code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from tokens import Token, TokenType
whitespace = " \n\t"
digits = "0123456789"
class Lexer:
def __init__(self, code):
self.source = code + '\n'
self.cur_char = None
self.cur_pos = -1
self.advance()
def advance(self, pos=1):
self.cur_pos += pos
try:
self.cur_char = self.source[self.cur_pos]
except:
self.cur_char = None
def peek(self):
try:
return self.source[self.cur_pos + 1]
except:
return None
# def advance(self):
# try:
# self.cur_char = next(self.text)
# except StopIteration:
# self.cur_char = None
def generate_tokens(self):
while self.cur_char is not None:
if self.cur_char in whitespace:
self.advance()
elif self.cur_char == "." or self.cur_char.isdigit():
yield self.generate_number()
elif self.cur_char == "+":
self.advance()
yield Token(TokenType.plus_token)
elif self.cur_char == "-":
self.advance()
yield Token(TokenType.minus_token)
elif self.cur_char == "%":
self.advance()
yield Token(TokenType.percent_token)
elif self.cur_char == "*":
if self.peek() == "*":
self.advance(2)
yield Token(TokenType.star_star_token)
else:
self.advance()
yield Token(TokenType.star_token)
elif self.cur_char == "/":
if self.peek() == "/":
self.advance(2)
yield Token(TokenType.slash_slash_token)
else:
self.advance()
yield Token(TokenType.slash_token)
elif self.cur_char == "(":
self.advance()
yield Token(TokenType.left_parentheses_token)
elif self.cur_char == ")":
self.advance()
yield Token(TokenType.right_parentheses_token)
elif self.cur_char == "%":
self.advance()
yield Token(TokenType.percent_token)
else:
raise Exception(f"Illegal character '{self.cur_char}'")
def generate_number(self):
decimal_point_count = 0
number_str = self.cur_char
self.advance()
while self.cur_char is not None and (self.cur_char == "." or self.cur_char.isdigit()):
if self.cur_char == ".":
decimal_point_count += 1
if decimal_point_count > 1:
break
number_str += self.cur_char
self.advance()
if number_str.startswith("."):
number_str = '0' + number_str
if number_str.endswith("."):
number_str += '0'
return Token(TokenType.number_token, float(number_str))
# 1 + 2 * 3
#
# +
# / \
# a *
# / \
# b c
# (1 + 2) * 3
#
# *
# / \
# + c
# / \
# a b
|
[
"tokens.Token"
] |
[((1044, 1071), 'tokens.Token', 'Token', (['TokenType.plus_token'], {}), '(TokenType.plus_token)\n', (1049, 1071), False, 'from tokens import Token, TokenType\n'), ((1164, 1192), 'tokens.Token', 'Token', (['TokenType.minus_token'], {}), '(TokenType.minus_token)\n', (1169, 1192), False, 'from tokens import Token, TokenType\n'), ((1285, 1315), 'tokens.Token', 'Token', (['TokenType.percent_token'], {}), '(TokenType.percent_token)\n', (1290, 1315), False, 'from tokens import Token, TokenType\n'), ((1456, 1488), 'tokens.Token', 'Token', (['TokenType.star_star_token'], {}), '(TokenType.star_star_token)\n', (1461, 1488), False, 'from tokens import Token, TokenType\n'), ((1572, 1599), 'tokens.Token', 'Token', (['TokenType.star_token'], {}), '(TokenType.star_token)\n', (1577, 1599), False, 'from tokens import Token, TokenType\n'), ((1740, 1774), 'tokens.Token', 'Token', (['TokenType.slash_slash_token'], {}), '(TokenType.slash_slash_token)\n', (1745, 1774), False, 'from tokens import Token, TokenType\n'), ((1858, 1886), 'tokens.Token', 'Token', (['TokenType.slash_token'], {}), '(TokenType.slash_token)\n', (1863, 1886), False, 'from tokens import Token, TokenType\n'), ((1979, 2018), 'tokens.Token', 'Token', (['TokenType.left_parentheses_token'], {}), '(TokenType.left_parentheses_token)\n', (1984, 2018), False, 'from tokens import Token, TokenType\n'), ((2111, 2151), 'tokens.Token', 'Token', (['TokenType.right_parentheses_token'], {}), '(TokenType.right_parentheses_token)\n', (2116, 2151), False, 'from tokens import Token, TokenType\n'), ((2244, 2274), 'tokens.Token', 'Token', (['TokenType.percent_token'], {}), '(TokenType.percent_token)\n', (2249, 2274), False, 'from tokens import Token, TokenType\n')]
|
#!/usr/bin/python3
import sys
import os
libdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'epd')
if os.path.exists(libdir):
sys.path.append(libdir)
import epd7in5_V2
epd = epd7in5_V2.EPD()
epd.init()
epd.Clear()
epd.sleep()
print("e-Paper clear & sleep done.")
|
[
"sys.path.append",
"os.path.realpath",
"os.path.exists",
"epd7in5_V2.EPD"
] |
[((117, 139), 'os.path.exists', 'os.path.exists', (['libdir'], {}), '(libdir)\n', (131, 139), False, 'import os\n'), ((194, 210), 'epd7in5_V2.EPD', 'epd7in5_V2.EPD', ([], {}), '()\n', (208, 210), False, 'import epd7in5_V2\n'), ((145, 168), 'sys.path.append', 'sys.path.append', (['libdir'], {}), '(libdir)\n', (160, 168), False, 'import sys\n'), ((78, 104), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (94, 104), False, 'import os\n')]
|
from catstuff import core, tools
from .config import mod_name, build
class Filelist(core.plugins.CSTask):
def __init__(self):
super().__init__(mod_name, build)
def main(self, path, max_depth=0, followlinks=False,
include=None, exclude=None, mode='whitelist',
safe_walk=True, **kwargs):
return tools.path.import_file_list(
path, max_depth=max_depth, followlinks=followlinks,
include=include, exclude=exclude, mode=mode,
safe_walk=safe_walk)
|
[
"catstuff.tools.path.import_file_list"
] |
[((347, 502), 'catstuff.tools.path.import_file_list', 'tools.path.import_file_list', (['path'], {'max_depth': 'max_depth', 'followlinks': 'followlinks', 'include': 'include', 'exclude': 'exclude', 'mode': 'mode', 'safe_walk': 'safe_walk'}), '(path, max_depth=max_depth, followlinks=\n followlinks, include=include, exclude=exclude, mode=mode, safe_walk=\n safe_walk)\n', (374, 502), False, 'from catstuff import core, tools\n')]
|
import tornado.gen
from tornado.gen import sleep, Future
from tornado.httpclient import HTTPRequest, HTTPError
from tornado.simple_httpclient import SimpleAsyncHTTPClient
from .. import options as _opts
from anthill.common.internal import Internal, InternalError
from anthill.common.validate import validate_value
from anthill.common.server import Server
from . util import promise, PromiseContext, APIError
API_TIMEOUT = 5
# noinspection PyUnusedLocal
@promise
async def sleep(delay, handler=None):
await tornado.gen.sleep(delay)
# noinspection PyUnusedLocal
@promise
async def moment(handler=None):
await tornado.gen.moment
def log(message):
handler = PromiseContext.current
if handler:
handler.log(message)
class AdminAPI(object):
@promise
async def delete_accounts(self, accounts, gamespace_only=True, handler=None, *args, **kwargs):
application = Server.instance()
publisher = await application.acquire_publisher()
await publisher.publish("DEL", {
"gamespace": handler.env["gamespace"],
"accounts": accounts,
"gamespace_only": gamespace_only
})
# noinspection PyUnusedLocal
class WebAPI(object):
def __init__(self):
self.http_client = SimpleAsyncHTTPClient()
self.rc_cache = {}
@promise
async def get(self, url, headers=None, *args, **kwargs):
request = HTTPRequest(url=url, use_gzip=True, headers=headers)
existing_futures = self.rc_cache.get(url, None)
if existing_futures is not None:
future = Future()
existing_futures.append(future)
result = await future
return result
new_futures = []
self.rc_cache[url] = new_futures
try:
response = await self.http_client.fetch(request)
except HTTPError as e:
e = APIError(e.code, e.message)
for future in new_futures:
future.set_exception(e)
del self.rc_cache[url]
raise e
else:
body = response.body
for future in new_futures:
future.set_result(body)
del self.rc_cache[url]
return body
# noinspection PyUnusedLocal
class ConfigAPI(object):
@promise
async def get(self, handler=None, *ignored):
app_name = handler.env["application_name"]
app_version = handler.env["application_version"]
key = "config:" + str(app_name) + ":" + str(app_version)
cached = handler.get_cache(key)
if cached:
return cached
internal = Internal()
try:
info = await internal.request(
"config", "get_configuration",
timeout=API_TIMEOUT,
app_name=app_name,
app_version=app_version,
gamespace=handler.env["gamespace"])
except InternalError as e:
raise APIError(e.code, e.body)
handler.set_cache(key, info)
return info
# noinspection PyUnusedLocal
class StoreAPI(object):
@promise
async def get(self, name, handler=None, *ignored):
if not isinstance(name, str):
raise APIError(400, "name should be a string")
key = "store:" + str(name)
cached = handler.get_cache(key)
if cached:
return cached
internal = Internal()
try:
config = await internal.request(
"store", "get_store",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
name=name)
except InternalError as e:
raise APIError(e.code, e.body)
handler.set_cache(key, config)
return config
@promise
async def new_order(self, store, item, currency, amount, component, env=None, handler=None, *ignored):
internal = Internal()
try:
result = await internal.request(
"store", "new_order",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
account=handler.env["account"],
store=store,
item=item,
currency=currency,
amount=amount,
component=component,
env=env)
except InternalError as e:
raise APIError(e.code, e.body)
return result
@promise
async def update_order(self, order_id, handler=None, *ignored):
internal = Internal()
try:
result = await internal.request(
"store", "update_order",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
account=handler.env["account"],
order_id=order_id)
except InternalError as e:
raise APIError(e.code, e.body)
return result
@promise
async def update_orders(self, handler=None, *ignored):
internal = Internal()
try:
result = await internal.request(
"store", "update_orders",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
account=handler.env["account"])
except InternalError as e:
raise APIError(e.code, e.body)
return result
# noinspection PyUnusedLocal
class ProfileAPI(object):
@promise
async def get(self, path="", handler=None, *ignored):
if not isinstance(path, str):
raise APIError(400, "Path should be a string")
internal = Internal()
try:
profile = await internal.request(
"profile", "get_my_profile",
timeout=API_TIMEOUT,
gamespace_id=handler.env["gamespace"],
account_id=handler.env["account"],
path=path)
except InternalError as e:
raise APIError(e.code, e.body)
return profile
@promise
async def update(self, profile=None, path="", merge=True, handler=None, *ignored):
if not isinstance(path, str):
raise APIError(400, "Path should be a string")
key = "profile:" + str(path)
if not profile:
profile = {}
internal = Internal()
try:
profile = await internal.request(
"profile", "update_profile",
timeout=API_TIMEOUT,
gamespace_id=handler.env["gamespace"],
account_id=handler.env["account"],
fields=profile,
path=path,
merge=merge)
except InternalError as e:
raise APIError(e.code, e.body)
handler.set_cache(key, profile)
return profile
@promise
async def query(self, query, limit=1000, handler=None, *ignored):
if not validate_value(query, "json_dict"):
raise APIError(400, "Query should be a JSON object")
internal = Internal()
try:
results = await internal.request(
"profile", "query_profiles",
timeout=API_TIMEOUT,
gamespace_id=handler.env["gamespace"],
query=query,
limit=limit)
except InternalError as e:
raise APIError(e.code, e.body)
return results
# noinspection PyUnusedLocal
class SocialAPI(object):
@promise
async def acquire_name(self, kind, name, handler=None, *ignored):
internal = Internal()
try:
profile = await internal.request(
"social", "acquire_name",
gamespace=handler.env["gamespace"],
account=handler.env["account"],
kind=kind,
name=name)
except InternalError as e:
raise APIError(e.code, e.body)
return profile
@promise
async def check_name(self, kind, name, handler=None, *ignored):
internal = Internal()
try:
account_id = await internal.request(
"social", "check_name",
gamespace=handler.env["gamespace"],
kind=kind,
name=name)
except InternalError as e:
raise APIError(e.code, e.body)
return account_id
@promise
async def release_name(self, kind, handler=None, *ignored):
internal = Internal()
try:
released = await internal.request(
"social", "release_name",
gamespace=handler.env["gamespace"],
account=handler.env["account"],
kind=kind)
except InternalError as e:
raise APIError(e.code, e.body)
return released
@promise
async def update_profile(self, group_id, profile=None, path=None, merge=True, handler=None, *ignored):
if path and not isinstance(path, (list, tuple)):
raise APIError(400, "Path should be a list/tuple")
internal = Internal()
try:
profile = await internal.request(
"social", "update_group_profile",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
group_id=group_id,
profile=profile,
path=path,
merge=merge)
except InternalError as e:
raise APIError(e.code, e.body)
return profile
@promise
async def update_group_profiles(self, group_profiles, path=None, merge=True, synced=False, handler=None, *ignored):
if not isinstance(group_profiles, dict):
raise APIError(400, "Group profiles should be a dict")
if path and not isinstance(path, (list, tuple)):
raise APIError(400, "Path should be a list/tuple")
internal = Internal()
try:
profile = await internal.request(
"social", "update_group_profiles",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
group_profiles=group_profiles,
path=path or [],
merge=merge,
synced=synced)
except InternalError as e:
raise APIError(e.code, e.body)
return profile
# noinspection PyUnusedLocal
class MessageAPI(object):
@promise
async def send_batch(self, sender, messages, authoritative=True, handler=None, *ignored):
internal = Internal()
try:
await internal.request(
"message", "send_batch",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
sender=sender,
messages=messages,
authoritative=authoritative)
except InternalError as e:
raise APIError(e.code, e.body)
return "OK"
# noinspection PyUnusedLocal
class PromoAPI(object):
@promise
async def use_code(self, key, handler=None, *ignored):
internal = Internal()
try:
result = await internal.request(
"promo", "use_code",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
account=handler.env["account"],
key=key)
except InternalError as e:
raise APIError(e.code, e.body)
try:
result = result["result"]
except KeyError:
raise APIError(500, "Response had no 'result' field.")
return result
class EventAPI(object):
@promise
async def update_event_profile(self, event_id, profile, path=None, merge=True, handler=None):
internal = Internal()
try:
events = await internal.request(
"event", "update_event_profile",
event_id=event_id,
profile=profile,
path=path,
merge=merge,
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
account=handler.env["account"])
except InternalError as e:
raise APIError(e.code, e.body)
return events
@promise
async def list(self, extra_start_time=0, extra_end_time=0, handler=None):
internal = Internal()
try:
events = await internal.request(
"event", "get_list",
timeout=API_TIMEOUT,
gamespace=handler.env["gamespace"],
account=handler.env["account"],
extra_start_time=extra_start_time,
extra_end_time=extra_end_time)
except InternalError as e:
raise APIError(e.code, e.body)
return events
class APIS(object):
config = ConfigAPI()
store = StoreAPI()
profile = ProfileAPI()
social = SocialAPI()
message = MessageAPI()
promo = PromoAPI()
web = WebAPI()
event = EventAPI()
admin = AdminAPI()
def expose(context, is_server=False):
expose_objects = {
"log": log,
"sleep": sleep,
"moment": moment,
"web": APIS.web,
"config": APIS.config,
"store": APIS.store,
"profile": APIS.profile,
"social": APIS.social,
"message": APIS.message,
"promo": APIS.promo,
"event": APIS.event
}
if is_server:
expose_objects.update({
"admin": APIS.admin
})
# define them as readonly
for name, callback in expose_objects.items():
context.Object.defineProperty(
context.glob, name, {'value': callback, 'writable': False})
|
[
"anthill.common.server.Server.instance",
"tornado.httpclient.HTTPRequest",
"anthill.common.validate.validate_value",
"tornado.gen.Future",
"anthill.common.internal.Internal",
"tornado.simple_httpclient.SimpleAsyncHTTPClient"
] |
[((904, 921), 'anthill.common.server.Server.instance', 'Server.instance', ([], {}), '()\n', (919, 921), False, 'from anthill.common.server import Server\n'), ((1268, 1291), 'tornado.simple_httpclient.SimpleAsyncHTTPClient', 'SimpleAsyncHTTPClient', ([], {}), '()\n', (1289, 1291), False, 'from tornado.simple_httpclient import SimpleAsyncHTTPClient\n'), ((1412, 1464), 'tornado.httpclient.HTTPRequest', 'HTTPRequest', ([], {'url': 'url', 'use_gzip': '(True)', 'headers': 'headers'}), '(url=url, use_gzip=True, headers=headers)\n', (1423, 1464), False, 'from tornado.httpclient import HTTPRequest, HTTPError\n'), ((2633, 2643), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (2641, 2643), False, 'from anthill.common.internal import Internal, InternalError\n'), ((3412, 3422), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (3420, 3422), False, 'from anthill.common.internal import Internal, InternalError\n'), ((3917, 3927), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (3925, 3927), False, 'from anthill.common.internal import Internal, InternalError\n'), ((4549, 4559), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (4557, 4559), False, 'from anthill.common.internal import Internal, InternalError\n'), ((5026, 5036), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (5034, 5036), False, 'from anthill.common.internal import Internal, InternalError\n'), ((5623, 5633), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (5631, 5633), False, 'from anthill.common.internal import Internal, InternalError\n'), ((6317, 6327), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (6325, 6327), False, 'from anthill.common.internal import Internal, InternalError\n'), ((7027, 7037), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (7035, 7037), False, 'from anthill.common.internal import Internal, InternalError\n'), ((7554, 7564), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (7562, 7564), False, 'from anthill.common.internal import Internal, InternalError\n'), ((8024, 8034), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (8032, 8034), False, 'from anthill.common.internal import Internal, InternalError\n'), ((8446, 8456), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (8454, 8456), False, 'from anthill.common.internal import Internal, InternalError\n'), ((9051, 9061), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (9059, 9061), False, 'from anthill.common.internal import Internal, InternalError\n'), ((9878, 9888), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (9886, 9888), False, 'from anthill.common.internal import Internal, InternalError\n'), ((10516, 10526), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (10524, 10526), False, 'from anthill.common.internal import Internal, InternalError\n'), ((11064, 11074), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (11072, 11074), False, 'from anthill.common.internal import Internal, InternalError\n'), ((11734, 11744), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (11742, 11744), False, 'from anthill.common.internal import Internal, InternalError\n'), ((12326, 12336), 'anthill.common.internal.Internal', 'Internal', ([], {}), '()\n', (12334, 12336), False, 'from anthill.common.internal import Internal, InternalError\n'), ((1585, 1593), 'tornado.gen.Future', 'Future', ([], {}), '()\n', (1591, 1593), False, 'from tornado.gen import sleep, Future\n'), ((6906, 6940), 'anthill.common.validate.validate_value', 'validate_value', (['query', '"""json_dict"""'], {}), "(query, 'json_dict')\n", (6920, 6940), False, 'from anthill.common.validate import validate_value\n')]
|
"""
Abstract class for defining scenarios
"""
import random
from typing import Tuple
import numpy as np
from copy import deepcopy
import torch
import stillleben as sl
import nimblephysics as nimble
from sl_cutscenes.room_models import RoomAssembler
from sl_cutscenes.objects.mesh_loader import MeshLoader
from sl_cutscenes.objects.object_loader import ObjectLoader
from sl_cutscenes.objects.decorator_loader import DecoratorLoader
from sl_cutscenes.lighting import get_lightmap
from sl_cutscenes.camera import Camera
import sl_cutscenes.utils.utils as utils
import sl_cutscenes.constants as CONSTANTS
from sl_cutscenes import object_info
class Scenario(object):
""" Abstract class for defining scenarios """
config = dict()
name = 'scenario'
def __init__(self, cfg, scene: sl.Scene, randomize=True):
self.device = cfg.device
self.viewer_mode = cfg.viewer
self.scene = scene
if randomize:
utils.randomize()
self.mesh_loader = MeshLoader()
self.object_loader = ObjectLoader(scenario_reset=True)
self.room_assembler = RoomAssembler(scene=self.scene)
self.decorator_loader = DecoratorLoader(scene=self.scene)
self.meshes_loaded, self.objects_loaded = False, False
self.z_offset = 0.
self.lights = cfg.lights
self.lightmap = cfg.lightmap if self.lights == 0 else None
if getattr(self, "allow_multiple_cameras", True):
self.n_cameras = cfg.cameras
else:
print(f"scenario '{self.name}' supports only 1 camera -> ignoring n_cameras...")
self.n_cameras = 1
self.coplanar_stereo = cfg.coplanar_stereo
self.coplanar_stereo_dist = cfg.coplanar_stereo_dist
self.cam_movement_complexity = cfg.cam_movement_complexity
self.sim_dt = cfg.sim_dt
self.cam_dt = cfg.cam_dt
self.physics_engine = cfg.physics_engine
self.nimble_debug = cfg.nimble_debug
self.reset_sim()
return
def reset_sim(self):
self.meshes_loaded, self.objects_loaded, self.cameras_loaded = False, False, False
if self.physics_engine == "nimble":
self.nimble_loaded = False
self.sim_t = 0
self.setup_scene()
self.setup_lighting()
self.setup_objects()
self.setup_cameras()
self.decorate_scene()
self.finalize_scene()
@property
def all_objects(self):
return self.object_loader.all_objects
@property
def static_objects(self):
return self.object_loader.static_objects
@property
def dynamic_objects(self):
return self.object_loader.dynamic_objects
def set_camera_look_at(self, pos, lookat):
self.scene.set_camera_look_at(position=pos, look_at=lookat)
def can_render(self):
raise NotImplementedError
def decorate_scene(self):
self.room_assembler.add_wall_furniture()
self.decorator_loader.decorate_scene(object_loader=self.object_loader)
return
def finalize_scene(self):
""" Scene setup stuff that has to be done after everything else """
for obj in self.static_objects:
obj.casts_shadows = False
def setup_scene(self):
""" Default setup_scene. Can be overriden from specific scenes """
_ = self.room_assembler.make_room()
def setup_lighting(self):
""" Default setup lighting. """
self.scene.ambient_light = torch.tensor([0.2, 0.2, 0.2])
if self.lightmap is not None:
self.scene.light_map = get_lightmap(self.lightmap)
self.scene.light_directions *= 0. # disable point lights
self.scene.manual_exposure = 5.0
else:
for i in range(self.lights):
# self.scene.choose_random_light_direction()
ori_angle = np.random.uniform(0, 360)
elev_angle = np.random.uniform(30, 90)
light_x = np.cos(ori_angle * np.pi / 180.) * np.cos(elev_angle * np.pi / 180.)
light_y = np.sin(ori_angle * np.pi / 180.) * np.cos(elev_angle * np.pi / 180.)
light_z = np.sin(elev_angle * np.pi / 180.)
light_direction = torch.tensor([-light_x, -light_y, -light_z])
self.scene.light_directions[i] = light_direction
light_color = torch.tensor([4.0, 4.0, 4.0]) + torch.rand(3)
light_color_normalized = 5. * light_color / torch.linalg.norm(light_color)
self.scene.light_colors[i] = light_color_normalized
self.scene.manual_exposure = 3.0
def get_separations(self):
# assert len(self.dynamic_objects) > 0, "Objects must be added to dynamic_objects before computing collisions"
self.scene.check_collisions()
separations = [obj.separation for obj in self.dynamic_objects if hasattr(obj, "separation")]
return separations
def is_there_collision(self):
separations = self.get_separations()
collision = True if np.sum(separations) < 0 else False
return collision
def load_meshes(self):
""" """
if self.meshes_loaded:
return
print("mesh setup...")
self.load_meshes_()
self.meshes_loaded = True
def load_meshes_(self):
"""
Scenario-specific logic
"""
raise NotImplementedError
def setup_objects(self):
""" """
if self.objects_loaded:
return
print("object setup...")
if not self.meshes_loaded:
self.load_meshes() # if objects have not been loaded yet, load them
self.setup_objects_()
self.objects_loaded = True
return
def setup_objects_(self):
"""
Scenario-specific logic
"""
raise NotImplementedError
def setup_cameras(self):
if self.cameras_loaded:
return
print("camera setup...")
self.cameras = []
self.camera_objs = []
cam_config = self.config["camera"]
base_lookat = cam_config["base_lookat"]
# pick default ori. angle and (n_cameras-1) other angles from a linspace of angles that are 5 degrees apart
default_ori_angle = cam_config["orientation_angle_default"]
cam_ori_angles = [0] + random.sample(np.linspace(0, 360, 72+1).tolist()[1:-1], k=self.n_cameras-1)
cam_ori_angles = [(angle + default_ori_angle) % 360 for angle in cam_ori_angles]
# TODO parameters 'orientation_angle_min/max' are not yet used!
for i, cam_ori_angle in enumerate(cam_ori_angles):
cam_elev_angle = random.uniform(cam_config["elevation_angle_min"], cam_config["elevation_angle_max"])
cam_dist = random.uniform(cam_config["distance_min"], cam_config["distance_max"])
cam_lookat = deepcopy(base_lookat)
cam_name = f"cam_{str(i).zfill(2)}"
cam_stereo_positions = ["left", "right"] if self.coplanar_stereo else ["mono"]
self.cameras.append(Camera(cam_name, self.cam_dt, cam_elev_angle, cam_ori_angle, cam_dist, cam_lookat,
self.coplanar_stereo_dist, cam_stereo_positions, self.cam_movement_complexity))
self.setup_cameras_() # e.g. scenario-specific height adjustment
self.setup_camera_objs()
self.cameras_loaded = True
def setup_camera_objs(self):
"""
Setting an object for each of the cameras.
- Viewer mode: A full mesh is displayed at the position and with the pose of the camera
- Normal mode: A tiny-dummy obj is place on the location of the camera to fill the occ-matrix cell
"""
camera_mesh = CONSTANTS.CAMERA_OBJ if self.viewer_mode else CONSTANTS.DUMMY_CAMERA_OBJ
for camera_id, camera in enumerate(self.cameras):
self.mesh_loader.load_meshes(camera_mesh)
camera_pos = camera.get_pos()
camera_info_mesh = self.mesh_loader.get_meshes()[-1]
self.camera_objs.append(self.add_object_to_scene(camera_info_mesh, is_static=True))
pose = torch.eye(4)
pose[:2, -1] = camera_pos[:2]
pose[2, -1] = camera_pos[-1] + self.camera_objs[-1].mesh.bbox.min[-1]
pose[:3, :3] = utils.get_rot_matrix(
yaw=torch.tensor(camera.ori_angle * np.pi / 180),
pitch=torch.tensor(-1 * camera.elev_angle * np.pi / 180),
roll=torch.tensor(0.)
)
self.camera_objs[-1].set_pose(pose)
self.scene.add_object(self.camera_objs[-1])
return
def setup_cameras_(self):
"""
Scenario-specific logic, e.g. height adjustment
"""
raise NotImplementedError
def simulate(self):
'''
Can be overwritten by scenario-specific logic
'''
self.sim_t += self.sim_dt
self.sim_step_()
def sim_step_(self):
'''
Just calls the appropriate simulator; assumes that all other things have been taken care of.
'''
if self.physics_engine == "physx":
self.scene.simulate(self.sim_dt)
elif self.physics_engine == "physx_manipulation_sim":
raise NotImplementedError # TODO implement for gripper sim
elif self.physics_engine == "nimble":
if not self.nimble_loaded:
self.setup_nimble_()
self.simulate_nimble_()
else:
raise ValueError(f"invalid physics_engine parameter: {self.physics_engine}")
def setup_nimble_(self):
'''
Creates a clone of the current stillleben scene for nimblephysics, enabling physics simulation there.
'''
print("initializing nimble scene from sl...")
# utils.dump_sl_scene_to_urdf(self.scene, "scene.urdf")
self.nimble_world = nimble.simulation.World()
self.nimble_world.setTimeStep(self.sim_dt)
positions, velocities = [], []
for obj in self.scene.objects:
obj_info = object_info.get_object_by_class_id(obj.mesh.class_index)
skel, pos, vel = utils.sl_object_to_nimble(obj, obj_info, debug_mode=self.nimble_debug)
self.nimble_world.addSkeleton(skel)
positions.extend(pos)
velocities.extend(vel)
self.nimble_states = [torch.cat(positions + velocities)]
self.nimble_loaded = True
def simulate_nimble_(self, action=None):
'''
Simulates a timestep in nimblephysics.
'''
# simulate timestep in nimble
if action is None:
action = torch.zeros(self.nimble_world.getNumDofs())
new_state = nimble.timestep(self.nimble_world, self.nimble_states[-1], action)
self.nimble_states.append(new_state)
self.nimble_world.setState(new_state)
# transfer object state back into the stillleben context
obj_pos, obj_vel = torch.chunk(new_state.clone(), 2)
obj_pos = torch.chunk(obj_pos, obj_pos.shape[0] // 6)
obj_vel = torch.chunk(obj_vel, obj_vel.shape[0] // 6)
for obj, pos, vel in zip(self.scene.objects, obj_pos, obj_vel):
obj_pose = obj.pose()
obj_rpy, obj_t = pos.split([3, 3])
obj_pose[:3, :3] = utils.get_mat_from_rpy(obj_rpy)
obj_pose[:3, 3] = obj_t
obj.set_pose(obj_pose)
angular_velocity, obj.linear_velocity = vel.split([3, 3])
obj.angular_velocity = angular_velocity.flip(0) # flip back from ZYX convention
def add_object_to_scene(self, obj_info_mesh: Tuple[object_info.ObjectInfo, sl.Mesh], is_static: bool, **obj_mod):
obj_info, obj_mesh = obj_info_mesh
obj = self.object_loader.create_object(obj_info, obj_mesh, is_static, **obj_mod)
self.scene.add_object(obj)
return obj
def remove_obj_from_scene(self, obj: sl.Object, decrement_ins_idx: bool=True):
self.scene.remove_object(obj)
self.object_loader.remove_object(obj.instance_index, decrement_ins_idx=decrement_ins_idx)
def update_object_height(self, cur_obj, objs=None, scales=None):
""" Updating an object z-position given a list of supporting objects"""
if objs is None:
objs = []
scales = [1.0] * len(objs) if scales is None else scales
assert len(objs) == len(scales), "provided non-matching scales for update_camera_height"
cur_obj_pose = cur_obj.pose()
z_pose = self.get_obj_z_offset(cur_obj)
for obj, scale in zip(objs, scales):
z_pose += self.get_obj_z_offset(obj) * scale
cur_obj_pose[2, -1] = z_pose
cur_obj.set_pose(cur_obj_pose)
return cur_obj
def update_camera_height(self, camera, objs=None, scales=None):
""" Updating the camera position, camera-object position and the look-at parameter"""
if objs is None:
objs = []
scales = [1.0] * len(objs) if scales is None else scales
assert len(objs) == len(scales), "provided non-matching scales for update_camera_height"
z_lookat = deepcopy(camera.start_base_lookat[-1])
for obj, scale in zip(objs, scales):
z_lookat += self.get_obj_z_offset(obj) * scale
camera.start_base_lookat[-1] = z_lookat
return camera
def get_obj_z_offset(self, obj):
""" Obtaining the z_offset (z-pos + height) for a given object"""
obj_pose = obj.pose()
z_offset = obj_pose[2, -1] + (obj.mesh.bbox.max[-1] - obj.mesh.bbox.min[-1]) / 2
return z_offset
def get_obj_offset(self, obj):
""" Obtaining the bbox boundaries (pos + size for x,y,z) for a given object"""
obj_pose = obj.pose()
offset_x, offset_y, offset_z = obj_pose[:3, -1] + obj.mesh.bbox.max
offset = torch.Tensor([-offset_x, -offset_y, offset_z])
return offset
|
[
"torch.eye",
"sl_cutscenes.object_info.get_object_by_class_id",
"numpy.sum",
"sl_cutscenes.camera.Camera",
"torch.cat",
"numpy.sin",
"sl_cutscenes.objects.mesh_loader.MeshLoader",
"sl_cutscenes.utils.utils.sl_object_to_nimble",
"sl_cutscenes.objects.decorator_loader.DecoratorLoader",
"sl_cutscenes.objects.object_loader.ObjectLoader",
"torch.Tensor",
"sl_cutscenes.utils.utils.randomize",
"torch.linalg.norm",
"numpy.linspace",
"copy.deepcopy",
"sl_cutscenes.room_models.RoomAssembler",
"numpy.cos",
"torch.rand",
"numpy.random.uniform",
"random.uniform",
"sl_cutscenes.utils.utils.get_mat_from_rpy",
"nimblephysics.timestep",
"sl_cutscenes.lighting.get_lightmap",
"nimblephysics.simulation.World",
"torch.chunk",
"torch.tensor"
] |
[((999, 1011), 'sl_cutscenes.objects.mesh_loader.MeshLoader', 'MeshLoader', ([], {}), '()\n', (1009, 1011), False, 'from sl_cutscenes.objects.mesh_loader import MeshLoader\n'), ((1041, 1074), 'sl_cutscenes.objects.object_loader.ObjectLoader', 'ObjectLoader', ([], {'scenario_reset': '(True)'}), '(scenario_reset=True)\n', (1053, 1074), False, 'from sl_cutscenes.objects.object_loader import ObjectLoader\n'), ((1105, 1136), 'sl_cutscenes.room_models.RoomAssembler', 'RoomAssembler', ([], {'scene': 'self.scene'}), '(scene=self.scene)\n', (1118, 1136), False, 'from sl_cutscenes.room_models import RoomAssembler\n'), ((1169, 1202), 'sl_cutscenes.objects.decorator_loader.DecoratorLoader', 'DecoratorLoader', ([], {'scene': 'self.scene'}), '(scene=self.scene)\n', (1184, 1202), False, 'from sl_cutscenes.objects.decorator_loader import DecoratorLoader\n'), ((3476, 3505), 'torch.tensor', 'torch.tensor', (['[0.2, 0.2, 0.2]'], {}), '([0.2, 0.2, 0.2])\n', (3488, 3505), False, 'import torch\n'), ((9912, 9937), 'nimblephysics.simulation.World', 'nimble.simulation.World', ([], {}), '()\n', (9935, 9937), True, 'import nimblephysics as nimble\n'), ((10730, 10796), 'nimblephysics.timestep', 'nimble.timestep', (['self.nimble_world', 'self.nimble_states[-1]', 'action'], {}), '(self.nimble_world, self.nimble_states[-1], action)\n', (10745, 10796), True, 'import nimblephysics as nimble\n'), ((11033, 11076), 'torch.chunk', 'torch.chunk', (['obj_pos', '(obj_pos.shape[0] // 6)'], {}), '(obj_pos, obj_pos.shape[0] // 6)\n', (11044, 11076), False, 'import torch\n'), ((11095, 11138), 'torch.chunk', 'torch.chunk', (['obj_vel', '(obj_vel.shape[0] // 6)'], {}), '(obj_vel, obj_vel.shape[0] // 6)\n', (11106, 11138), False, 'import torch\n'), ((13152, 13190), 'copy.deepcopy', 'deepcopy', (['camera.start_base_lookat[-1]'], {}), '(camera.start_base_lookat[-1])\n', (13160, 13190), False, 'from copy import deepcopy\n'), ((13866, 13912), 'torch.Tensor', 'torch.Tensor', (['[-offset_x, -offset_y, offset_z]'], {}), '([-offset_x, -offset_y, offset_z])\n', (13878, 13912), False, 'import torch\n'), ((953, 970), 'sl_cutscenes.utils.utils.randomize', 'utils.randomize', ([], {}), '()\n', (968, 970), True, 'import sl_cutscenes.utils.utils as utils\n'), ((3579, 3606), 'sl_cutscenes.lighting.get_lightmap', 'get_lightmap', (['self.lightmap'], {}), '(self.lightmap)\n', (3591, 3606), False, 'from sl_cutscenes.lighting import get_lightmap\n'), ((6664, 6753), 'random.uniform', 'random.uniform', (["cam_config['elevation_angle_min']", "cam_config['elevation_angle_max']"], {}), "(cam_config['elevation_angle_min'], cam_config[\n 'elevation_angle_max'])\n", (6678, 6753), False, 'import random\n'), ((6772, 6842), 'random.uniform', 'random.uniform', (["cam_config['distance_min']", "cam_config['distance_max']"], {}), "(cam_config['distance_min'], cam_config['distance_max'])\n", (6786, 6842), False, 'import random\n'), ((6868, 6889), 'copy.deepcopy', 'deepcopy', (['base_lookat'], {}), '(base_lookat)\n', (6876, 6889), False, 'from copy import deepcopy\n'), ((8148, 8160), 'torch.eye', 'torch.eye', (['(4)'], {}), '(4)\n', (8157, 8160), False, 'import torch\n'), ((10090, 10146), 'sl_cutscenes.object_info.get_object_by_class_id', 'object_info.get_object_by_class_id', (['obj.mesh.class_index'], {}), '(obj.mesh.class_index)\n', (10124, 10146), False, 'from sl_cutscenes import object_info\n'), ((10176, 10246), 'sl_cutscenes.utils.utils.sl_object_to_nimble', 'utils.sl_object_to_nimble', (['obj', 'obj_info'], {'debug_mode': 'self.nimble_debug'}), '(obj, obj_info, debug_mode=self.nimble_debug)\n', (10201, 10246), True, 'import sl_cutscenes.utils.utils as utils\n'), ((10394, 10427), 'torch.cat', 'torch.cat', (['(positions + velocities)'], {}), '(positions + velocities)\n', (10403, 10427), False, 'import torch\n'), ((11323, 11354), 'sl_cutscenes.utils.utils.get_mat_from_rpy', 'utils.get_mat_from_rpy', (['obj_rpy'], {}), '(obj_rpy)\n', (11345, 11354), True, 'import sl_cutscenes.utils.utils as utils\n'), ((3866, 3891), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(360)'], {}), '(0, 360)\n', (3883, 3891), True, 'import numpy as np\n'), ((3921, 3946), 'numpy.random.uniform', 'np.random.uniform', (['(30)', '(90)'], {}), '(30, 90)\n', (3938, 3946), True, 'import numpy as np\n'), ((4163, 4197), 'numpy.sin', 'np.sin', (['(elev_angle * np.pi / 180.0)'], {}), '(elev_angle * np.pi / 180.0)\n', (4169, 4197), True, 'import numpy as np\n'), ((4231, 4275), 'torch.tensor', 'torch.tensor', (['[-light_x, -light_y, -light_z]'], {}), '([-light_x, -light_y, -light_z])\n', (4243, 4275), False, 'import torch\n'), ((5048, 5067), 'numpy.sum', 'np.sum', (['separations'], {}), '(separations)\n', (5054, 5067), True, 'import numpy as np\n'), ((7061, 7231), 'sl_cutscenes.camera.Camera', 'Camera', (['cam_name', 'self.cam_dt', 'cam_elev_angle', 'cam_ori_angle', 'cam_dist', 'cam_lookat', 'self.coplanar_stereo_dist', 'cam_stereo_positions', 'self.cam_movement_complexity'], {}), '(cam_name, self.cam_dt, cam_elev_angle, cam_ori_angle, cam_dist,\n cam_lookat, self.coplanar_stereo_dist, cam_stereo_positions, self.\n cam_movement_complexity)\n', (7067, 7231), False, 'from sl_cutscenes.camera import Camera\n'), ((3973, 4006), 'numpy.cos', 'np.cos', (['(ori_angle * np.pi / 180.0)'], {}), '(ori_angle * np.pi / 180.0)\n', (3979, 4006), True, 'import numpy as np\n'), ((4008, 4042), 'numpy.cos', 'np.cos', (['(elev_angle * np.pi / 180.0)'], {}), '(elev_angle * np.pi / 180.0)\n', (4014, 4042), True, 'import numpy as np\n'), ((4068, 4101), 'numpy.sin', 'np.sin', (['(ori_angle * np.pi / 180.0)'], {}), '(ori_angle * np.pi / 180.0)\n', (4074, 4101), True, 'import numpy as np\n'), ((4103, 4137), 'numpy.cos', 'np.cos', (['(elev_angle * np.pi / 180.0)'], {}), '(elev_angle * np.pi / 180.0)\n', (4109, 4137), True, 'import numpy as np\n'), ((4372, 4401), 'torch.tensor', 'torch.tensor', (['[4.0, 4.0, 4.0]'], {}), '([4.0, 4.0, 4.0])\n', (4384, 4401), False, 'import torch\n'), ((4404, 4417), 'torch.rand', 'torch.rand', (['(3)'], {}), '(3)\n', (4414, 4417), False, 'import torch\n'), ((4478, 4508), 'torch.linalg.norm', 'torch.linalg.norm', (['light_color'], {}), '(light_color)\n', (4495, 4508), False, 'import torch\n'), ((8358, 8402), 'torch.tensor', 'torch.tensor', (['(camera.ori_angle * np.pi / 180)'], {}), '(camera.ori_angle * np.pi / 180)\n', (8370, 8402), False, 'import torch\n'), ((8430, 8480), 'torch.tensor', 'torch.tensor', (['(-1 * camera.elev_angle * np.pi / 180)'], {}), '(-1 * camera.elev_angle * np.pi / 180)\n', (8442, 8480), False, 'import torch\n'), ((8507, 8524), 'torch.tensor', 'torch.tensor', (['(0.0)'], {}), '(0.0)\n', (8519, 8524), False, 'import torch\n'), ((6352, 6379), 'numpy.linspace', 'np.linspace', (['(0)', '(360)', '(72 + 1)'], {}), '(0, 360, 72 + 1)\n', (6363, 6379), True, 'import numpy as np\n')]
|
from edna.ingest.streaming import TwitterStreamingIngest
from edna.process import BaseProcess
from edna.emit import KafkaEmit
from edna.serializers.EmptySerializer import EmptyStringSerializer
from edna.core.execution.context import SimpleStreamingContext
def main():
context = SimpleStreamingContext()
ingest_serializer = EmptyStringSerializer()
emit_serializer = EmptyStringSerializer() # twitter already provides byte encoded message
ingest = TwitterStreamingIngest(serializer=ingest_serializer, bearer_token=context.getVariable("bearer_token"),
tweet_fields=context.getVariable("tweet_fields"),
user_fields=context.getVariable("user_fields"),
place_fields=context.getVariable("place_fields"),
media_fields=context.getVariable("media_fields"))
process = BaseProcess()
emit = KafkaEmit(serializer=emit_serializer,
kafka_topic=context.getVariable("kafka_topic"),
bootstrap_server=context.getVariable("bootstrap_server"),
bootstrap_port=context.getVariable("bootstrap_port"))
context.addIngest(ingest)
context.addProcess(process)
context.addEmit(emit)
context.execute()
if __name__=="__main__":
main()
|
[
"edna.core.execution.context.SimpleStreamingContext",
"edna.process.BaseProcess",
"edna.serializers.EmptySerializer.EmptyStringSerializer"
] |
[((288, 312), 'edna.core.execution.context.SimpleStreamingContext', 'SimpleStreamingContext', ([], {}), '()\n', (310, 312), False, 'from edna.core.execution.context import SimpleStreamingContext\n'), ((337, 360), 'edna.serializers.EmptySerializer.EmptyStringSerializer', 'EmptyStringSerializer', ([], {}), '()\n', (358, 360), False, 'from edna.serializers.EmptySerializer import EmptyStringSerializer\n'), ((383, 406), 'edna.serializers.EmptySerializer.EmptyStringSerializer', 'EmptyStringSerializer', ([], {}), '()\n', (404, 406), False, 'from edna.serializers.EmptySerializer import EmptyStringSerializer\n'), ((853, 866), 'edna.process.BaseProcess', 'BaseProcess', ([], {}), '()\n', (864, 866), False, 'from edna.process import BaseProcess\n')]
|
import schedule
import threading
import datetime as dt
import subprocess
import time
import os
# auto commit 실행
def auto_commit():
print("auto commit을 시행합니다")
subprocess.call(['sh', './continue.sh'])
subprocess.call(['sh', './TimeAutoCommitProcess.sh'])
# n분마다 auto_commit 실행
def time_based_autocommit(num):
schedule.every(num).minutes.do(auto_commit)
while 1:
schedule.run_pending()
# 파일생성시간을 계산
def createtime(file):
if os.path.isfile(file):
ctime = os.path.getctime(file) # create time 생성
ymd_ctime = dt.datetime.fromtimestamp(ctime) # 출력 형태를 ymd의 format으로 변경
return ymd_ctime
# 파일생성시간을 timestamp를 이용해 float형 숫자로 바꾼 후, float형을 int형으로 변환
def start(filename):
start_time = createtime(filename)
start_time_timestamp = int(start_time.timestamp())
return start_time_timestamp
# 현재시간을 timestamp를 이용해 float형 숫자로 바꾼 후, float형을 int형으로 변환
def stop():
stop_time = dt.datetime.now()
stop_time_timestamp = int(stop_time.timestamp())
return stop_time_timestamp
# (현재 시간 - 파일 생성 시간) % 60n을 통해서 나머지 계산
def remainder(filename, start, stop, n):
time_remainder = (stop - start(filename)) % (60 * n)
return time_remainder
# 나머지가 0이 되면 autocommit 실행
def ctime_based_autocommit(filename, start, stop, n):
print("시도 중") # 함수가 실행될 때마다 '시도 중'을 출력
print(remainder(filename, start, stop, n)) # 나머지 출력
if remainder(filename, start, stop, n) == 0:
# auto commit 실행 - subprocess에서 에러가 계속 나서 주석처리해놓음
subprocess.call(['sh', './addFile.sh', filename])
subprocess.call(['sh', './TimeAutoCommitProcess.sh'])
print("백업되었습니다.")
choice = 0
while choice != 8:
print("Menu")
print("1. New")
print("2. Continue")
print("3. Time backup mode")
print("4. Create time backup mode")
print("5. Error Backup mode")
print("6. Git add file")
print("7. Git push to branch")
print("8. Exit")
choice = int(input(">> "))
if choice == 1:
subprocess.call(['sh', './setting.sh'])
subprocess.call(['sh', './autoCommitProcess.sh'])
elif choice == 2:
subprocess.call(['sh', './continue.sh'])
subprocess.call(['sh', './autoCommitProcess.sh'])
elif choice == 3:
subprocess.call(['bash', './killProcess.sh'])
subprocess.call(['sh', './setting.sh'])
filename = str(input("Enter your file name : "))
num = int(input('Enter the minutes you want to set up : ')) # GUI에서 사용자가 분을 세팅했다고 가정
try:
print("시도")
time_based_autocommit(num)
except Exception as ex: # GUI에서 체크버튼 해제되었다고 가정
print(ex)
elif choice == 4:
subprocess.call(['bash', './killProcess.sh'])
subprocess.call(['sh', './setting.sh'])
filename = str(input('Enter your file name : ')) # GUI에서 사용자가 특정 파일 선택했다고 가정
n = int(input('Enter the minutes you want to set up : ')) # GUI에서 사용자가 분을 n으로 세팅했다고 가정
while True:
try:
print("시도")
ctime_based_autocommit(filename, start, stop(), n) # 파일 생성 시간을 기준으로 n분마다 auto commit하는 걸 백그라운드에서 실행
except Exception as ex: # GUI에서 체크버튼 해제되었다고 가정
print(ex)
#if : # GUI에서 체크버튼 해제되었다고 가정
#print("버튼 해제2")
#break
elif choice == 5:
path = "./code/"
file_list = os.listdir(path)
py_list = [file for file in file_list if file.endswith(".py")]
# c_list = [file for file in file_list if file.endswith(".c")]
# java_list = [file for file in file_list if file.endswith(".java")]
for i in range(len(py_list)):
try:
subprocess.check_output(['python', path + py_list[0]], universal_newlines=True)
except Exception as ex:
branch = str("error")
msg = str(ex)
subprocess.call(['sh', './continue.sh'])
subprocess.call(['sh', './autoCommitProcess.sh'])
elif choice == 6:
subprocess.call(['bash', './killProcess.sh'])
filename = str(input("What file to add?(file_name) "))
subprocess.call(['sh', './addFile.sh', filename])
subprocess.call(['sh', './continue.sh'])
subprocess.call(['sh', './autoCommitProcess.sh'])
elif choice == 7:
subprocess.call(['bash', './killProcess.sh'])
branch = str(input("Where to push?(branch_name) "))
msg = str(input("Write commit message: "))
subprocess.call(['sh', './userCommit.sh', branch, msg])
subprocess.call(['sh', './continue.sh'])
subprocess.call(['sh', './autoCommitProcess.sh'])
elif choice == 8:
subprocess.call(['bash', './killProcess.sh'])
else:
print("Wrong Input! Please input again")
|
[
"schedule.run_pending",
"subprocess.check_output",
"os.path.isfile",
"subprocess.call",
"schedule.every",
"datetime.datetime.fromtimestamp",
"os.path.getctime",
"datetime.datetime.now",
"os.listdir"
] |
[((168, 208), 'subprocess.call', 'subprocess.call', (["['sh', './continue.sh']"], {}), "(['sh', './continue.sh'])\n", (183, 208), False, 'import subprocess\n'), ((213, 266), 'subprocess.call', 'subprocess.call', (["['sh', './TimeAutoCommitProcess.sh']"], {}), "(['sh', './TimeAutoCommitProcess.sh'])\n", (228, 266), False, 'import subprocess\n'), ((457, 477), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (471, 477), False, 'import os\n'), ((937, 954), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (952, 954), True, 'import datetime as dt\n'), ((391, 413), 'schedule.run_pending', 'schedule.run_pending', ([], {}), '()\n', (411, 413), False, 'import schedule\n'), ((495, 517), 'os.path.getctime', 'os.path.getctime', (['file'], {}), '(file)\n', (511, 517), False, 'import os\n'), ((556, 588), 'datetime.datetime.fromtimestamp', 'dt.datetime.fromtimestamp', (['ctime'], {}), '(ctime)\n', (581, 588), True, 'import datetime as dt\n'), ((1501, 1550), 'subprocess.call', 'subprocess.call', (["['sh', './addFile.sh', filename]"], {}), "(['sh', './addFile.sh', filename])\n", (1516, 1550), False, 'import subprocess\n'), ((1559, 1612), 'subprocess.call', 'subprocess.call', (["['sh', './TimeAutoCommitProcess.sh']"], {}), "(['sh', './TimeAutoCommitProcess.sh'])\n", (1574, 1612), False, 'import subprocess\n'), ((1986, 2025), 'subprocess.call', 'subprocess.call', (["['sh', './setting.sh']"], {}), "(['sh', './setting.sh'])\n", (2001, 2025), False, 'import subprocess\n'), ((2034, 2083), 'subprocess.call', 'subprocess.call', (["['sh', './autoCommitProcess.sh']"], {}), "(['sh', './autoCommitProcess.sh'])\n", (2049, 2083), False, 'import subprocess\n'), ((2115, 2155), 'subprocess.call', 'subprocess.call', (["['sh', './continue.sh']"], {}), "(['sh', './continue.sh'])\n", (2130, 2155), False, 'import subprocess\n'), ((2164, 2213), 'subprocess.call', 'subprocess.call', (["['sh', './autoCommitProcess.sh']"], {}), "(['sh', './autoCommitProcess.sh'])\n", (2179, 2213), False, 'import subprocess\n'), ((326, 345), 'schedule.every', 'schedule.every', (['num'], {}), '(num)\n', (340, 345), False, 'import schedule\n'), ((2245, 2290), 'subprocess.call', 'subprocess.call', (["['bash', './killProcess.sh']"], {}), "(['bash', './killProcess.sh'])\n", (2260, 2290), False, 'import subprocess\n'), ((2299, 2338), 'subprocess.call', 'subprocess.call', (["['sh', './setting.sh']"], {}), "(['sh', './setting.sh'])\n", (2314, 2338), False, 'import subprocess\n'), ((2676, 2721), 'subprocess.call', 'subprocess.call', (["['bash', './killProcess.sh']"], {}), "(['bash', './killProcess.sh'])\n", (2691, 2721), False, 'import subprocess\n'), ((2730, 2769), 'subprocess.call', 'subprocess.call', (["['sh', './setting.sh']"], {}), "(['sh', './setting.sh'])\n", (2745, 2769), False, 'import subprocess\n'), ((3387, 3403), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (3397, 3403), False, 'import os\n'), ((4036, 4081), 'subprocess.call', 'subprocess.call', (["['bash', './killProcess.sh']"], {}), "(['bash', './killProcess.sh'])\n", (4051, 4081), False, 'import subprocess\n'), ((4154, 4203), 'subprocess.call', 'subprocess.call', (["['sh', './addFile.sh', filename]"], {}), "(['sh', './addFile.sh', filename])\n", (4169, 4203), False, 'import subprocess\n'), ((4213, 4253), 'subprocess.call', 'subprocess.call', (["['sh', './continue.sh']"], {}), "(['sh', './continue.sh'])\n", (4228, 4253), False, 'import subprocess\n'), ((4262, 4311), 'subprocess.call', 'subprocess.call', (["['sh', './autoCommitProcess.sh']"], {}), "(['sh', './autoCommitProcess.sh'])\n", (4277, 4311), False, 'import subprocess\n'), ((3696, 3775), 'subprocess.check_output', 'subprocess.check_output', (["['python', path + py_list[0]]"], {'universal_newlines': '(True)'}), "(['python', path + py_list[0]], universal_newlines=True)\n", (3719, 3775), False, 'import subprocess\n'), ((4343, 4388), 'subprocess.call', 'subprocess.call', (["['bash', './killProcess.sh']"], {}), "(['bash', './killProcess.sh'])\n", (4358, 4388), False, 'import subprocess\n'), ((4509, 4564), 'subprocess.call', 'subprocess.call', (["['sh', './userCommit.sh', branch, msg]"], {}), "(['sh', './userCommit.sh', branch, msg])\n", (4524, 4564), False, 'import subprocess\n'), ((4574, 4614), 'subprocess.call', 'subprocess.call', (["['sh', './continue.sh']"], {}), "(['sh', './continue.sh'])\n", (4589, 4614), False, 'import subprocess\n'), ((4623, 4672), 'subprocess.call', 'subprocess.call', (["['sh', './autoCommitProcess.sh']"], {}), "(['sh', './autoCommitProcess.sh'])\n", (4638, 4672), False, 'import subprocess\n'), ((3897, 3937), 'subprocess.call', 'subprocess.call', (["['sh', './continue.sh']"], {}), "(['sh', './continue.sh'])\n", (3912, 3937), False, 'import subprocess\n'), ((3954, 4003), 'subprocess.call', 'subprocess.call', (["['sh', './autoCommitProcess.sh']"], {}), "(['sh', './autoCommitProcess.sh'])\n", (3969, 4003), False, 'import subprocess\n'), ((4704, 4749), 'subprocess.call', 'subprocess.call', (["['bash', './killProcess.sh']"], {}), "(['bash', './killProcess.sh'])\n", (4719, 4749), False, 'import subprocess\n')]
|
#! /usr/bin/env python
import argparse
import requests
arg_parser = argparse.ArgumentParser(
prog="get-weather", description="Get weather for entered city."
)
arg_parser.add_argument(
"city", metavar="my_city", type=str, help="City for which you want to get weather."
)
def get_city_weather(search_city):
api_key = "2fe992c00735713d86f2d6577ff41a3d"
url = f"http://api.openweathermap.org/data/2.5/weather?appid={api_key}&q="
response = requests.get(url + search_city)
return response.json()
if __name__ == "__main__":
args = arg_parser.parse_args()
try:
weather = get_city_weather(args.city)
print(f"The weather in {args.city}: {weather['weather'][0]['description']}")
except KeyError:
print("City no found.")
|
[
"argparse.ArgumentParser",
"requests.get"
] |
[((69, 162), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""get-weather"""', 'description': '"""Get weather for entered city."""'}), "(prog='get-weather', description=\n 'Get weather for entered city.')\n", (92, 162), False, 'import argparse\n'), ((460, 491), 'requests.get', 'requests.get', (['(url + search_city)'], {}), '(url + search_city)\n', (472, 491), False, 'import requests\n')]
|
# Generated by Django 3.0.7 on 2020-06-14 15:47
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20200614_1506'),
]
operations = [
migrations.AlterField(
model_name='urlmapping',
name='original_url',
field=models.TextField(validators=[django.core.validators.URLValidator]),
),
]
|
[
"django.db.models.TextField"
] |
[((373, 439), 'django.db.models.TextField', 'models.TextField', ([], {'validators': '[django.core.validators.URLValidator]'}), '(validators=[django.core.validators.URLValidator])\n', (389, 439), False, 'from django.db import migrations, models\n')]
|
from django.core.management.utils import get_random_secret_key
secret_key = get_random_secret_key()
text = 'SECRET_KEY = \'{0}\''.format(secret_key)
print(text)
|
[
"django.core.management.utils.get_random_secret_key"
] |
[((79, 102), 'django.core.management.utils.get_random_secret_key', 'get_random_secret_key', ([], {}), '()\n', (100, 102), False, 'from django.core.management.utils import get_random_secret_key\n')]
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# author: bigfoolliu
"""
print函数展示
"""
import time
def end_demo():
"""print结尾控制,加flush强制刷新"""
for _ in range(100):
print("#", end="", flush=True)
time.sleep(0.01)
print()
def progress_demo():
"""使用\r展示进度"""
days = 365
for i in range(days):
print("\r", "progress:{}%".format(round((i + 1) * 100 / days)), end="", flush=True)
time.sleep(0.01)
print()
def sep_demo():
"""使用sep参数将结果使用指定分隔符分割"""
print("name", "age", "score", sep=" | ")
def sysout_demo():
"""将print的默认输出改到指定文件,而不是默认的屏幕"""
f = open("print_demo.log", "w")
print("hello, this is print demo.", file=f)
print("hello, this is print demo again.", file=f)
f.close()
if __name__ == "__main__":
# end_demo()
# progress_demo()
sep_demo()
sysout_demo()
|
[
"time.sleep"
] |
[((221, 237), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (231, 237), False, 'import time\n'), ((433, 449), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (443, 449), False, 'import time\n')]
|
from __future__ import annotations
import time
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Optional, List, Tuple, Dict, Any
if TYPE_CHECKING:
from . import *
ScoreBoardItemType = Tuple[User, int]
from . import WithGameLifecycle
def minmax(x: int, a: int, b: int) -> int:
if x<a: return a
elif x>b: return b
return x
class Board(WithGameLifecycle, ABC):
def __init__(self, board_type: str, name: str):
self.board_type = board_type
self.name = name
@property
@abstractmethod
def summarized(self) -> Dict[str, Any]:
raise NotImplementedError()
class ScoreBoard(Board):
MAX_DISPLAY_USERS = 100
MAX_TOPSTAR_USERS = 10
def __init__(self, name: str, game: Game, group: Optional[List[str]], show_group: bool):
super().__init__('score', name)
self._game = game
self.show_group: bool = show_group
self.group: Optional[List[str]] = group
self.board: List[ScoreBoardItemType] = []
self.uid_to_rank: Dict[int, int] = {}
self._summarized: Dict[str, Any] = self._summarize()
@property
def summarized(self) -> Dict[str, Any]:
return self._summarized
def _update_board(self) -> None:
def is_valid(x: ScoreBoardItemType) -> bool:
user, score = x
return (
((user._store.group in self.group) if self.group is not None else True)
and score>0
)
def sorter(x: ScoreBoardItemType) -> Tuple[Any, ...]:
user, score = x
return (
-score,
-1 if user.last_succ_submission is None else user.last_succ_submission._store.id,
)
b = [(u, u.tot_score) for u in self._game.users.list]
self.board = sorted([x for x in b if is_valid(x)], key=sorter)
self.uid_to_rank = {user._store.id: idx+1 for idx, (user, _score) in enumerate(self.board)}
def _summarize(self) -> Dict[str, Any]:
return {
'challenges': [{
'id': ch._store.id,
'title': ch._store.title,
'category': ch._store.category,
'flags': [f.name for f in ch.flags],
} for ch in self._game.challenges.list],
'list': [{
'rank': idx+1,
'nickname': u._store.profile.nickname_or_null or '--',
'group_disp': u._store.group_disp() if self.show_group else None,
'score': score,
'last_succ_submission_ts': int(u.last_succ_submission._store.timestamp_ms/1000) if u.last_succ_submission else None,
'challenge_status': {
ch._store.id: ch.user_status(u)
for ch in self._game.challenges.list
},
'flag_pass_ts': {
f'{f.challenge._store.id}_{f.idx0}': int(sub._store.timestamp_ms/1000)
for f, sub in u.passed_flags.items()
},
} for idx, (u, score) in enumerate(self.board[:self.MAX_DISPLAY_USERS])],
'topstars': [{
'nickname': u._store.profile.nickname_or_null or '--',
'submissions': [{
'timestamp_ms': sub._store.timestamp_ms,
'gained_score': sub.gained_score(),
} for sub in u.succ_submissions]
} for u, _score in self.board[:self.MAX_TOPSTAR_USERS]],
'time_range': [
self._game.trigger.board_begin_ts,
minmax(int(time.time())+1, self._game.trigger.board_begin_ts+1, self._game.trigger.board_end_ts),
],
}
def on_scoreboard_reset(self) -> None:
self.board = []
self._summarized = self._summarize()
def on_scoreboard_update(self, submission: Submission, in_batch: bool) -> None:
if not in_batch and submission.matched_flag is not None:
if self.group is None or submission.user._store.group in self.group:
self._update_board()
self._summarized = self._summarize()
def on_scoreboard_batch_update_done(self) -> None:
self._update_board()
self._summarized = self._summarize()
class FirstBloodBoard(Board):
def __init__(self, name: str, game: Game, group: Optional[List[str]], show_group: bool):
super().__init__('firstblood', name)
self._game = game
self.show_group: bool = show_group
self.group: Optional[List[str]] = group
self.chall_board: Dict[Challenge, Submission] = {}
self.flag_board: Dict[Flag, Submission] = {}
self._summarized: Dict[str, Any] = self._summarize()
@property
def summarized(self) -> Dict[str, Any]:
return self._summarized
def _summarize(self) -> Dict[str, Any]:
return {
'list': [{
'title': ch._store.title,
'id': ch._store.id,
'flags': [{
'flag_name': None,
'nickname': ch_sub.user._store.profile.nickname_or_null if ch_sub is not None else None,
'group_disp': ch_sub.user._store.group_disp() if (ch_sub is not None and self.show_group) else None,
'timestamp': int(ch_sub._store.timestamp_ms/1000) if ch_sub is not None else None,
}] + ([] if len(ch.flags)<=1 else [{
'flag_name': f.name,
'nickname': f_sub.user._store.profile.nickname_or_null if f_sub is not None else None,
'group_disp': f_sub.user._store.group_disp() if (f_sub is not None and self.show_group) else None,
'timestamp': int(f_sub._store.timestamp_ms/1000) if f_sub is not None else None,
} for f in ch.flags for f_sub in [self.flag_board.get(f, None)]]),
} for ch in self._game.challenges.list for ch_sub in [self.chall_board.get(ch, None)]],
}
def on_scoreboard_reset(self) -> None:
self.chall_board = {}
self.flag_board = {}
self._summarized = self._summarize()
def on_scoreboard_update(self, submission: Submission, in_batch: bool) -> None:
if submission.matched_flag is not None:
assert submission.challenge is not None, 'submission matched flag to no challenge'
if self.group is None or submission.user._store.group in self.group:
passed_all_flags = submission.challenge in submission.user.passed_challs
if submission.matched_flag not in self.flag_board:
self.flag_board[submission.matched_flag] = submission
if not in_batch and not passed_all_flags:
self._game.worker.emit_local_message({
'type': 'flag_first_blood',
'board_name': self.name,
'nickname': submission.user._store.profile.nickname_or_null,
'challenge': submission.challenge._store.title,
'flag': submission.matched_flag.name,
}, self.group)
if submission.challenge not in self.chall_board and passed_all_flags:
self.chall_board[submission.challenge] = submission
if not in_batch:
self._game.worker.emit_local_message({
'type': 'challenge_first_blood',
'board_name': self.name,
'nickname': submission.user._store.profile.nickname_or_null,
'challenge': submission.challenge._store.title,
}, self.group)
self._summarized = self._summarize()
|
[
"time.time"
] |
[((3589, 3600), 'time.time', 'time.time', ([], {}), '()\n', (3598, 3600), False, 'import time\n')]
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import subprocess
import os
import json
import sys
from ansible.errors import AnsibleError
import qb
from qb.ipc.rpc import client as rpc_client
def get_semver_path():
bin_path = os.path.join(qb.ROOT, 'node_modules', 'semver', 'bin', 'semver')
if not os.path.isfile(bin_path):
raise Exception("can't find semver at %s" % bin_path)
return bin_path
# get_semver_path()
def semver_inc(version, level = None, preid = None):
'''increment the version at level, with optional preid for pre- levels.
runs
semver --increment <level> [--preid <preid>] <version>
This does **not** do what you probably want... `preid` is ignored:
>>> semver_inc('1.0.0', 'patch', preid = 'dev')
'1.0.1'
>>> semver_inc('1.0.0', 'minor', preid = 'dev')
'1.1.0'
The only way to get the `preid` appended is to increment the prerelease:
>>> semver_inc('1.0.0', 'prerelease', preid = 'dev')
'1.0.1-dev.0'
'''
cmd = [
get_semver_path(),
'--increment',
]
if not (level is None):
cmd.append(level)
if not (preid is None):
cmd.append('--preid')
cmd.append(preid)
cmd.append(version)
out = subprocess.check_output(cmd)
return out.rstrip()
# semver_inc()
def semver_parse(version):
'''parse semver.
'''
stmt = (
'''console.log(JSON.stringify(require('semver')(%s), null, 2))''' %
json.dumps(version)
)
cmd = ['node', '--eval', stmt]
out = subprocess.check_output(
cmd,
cwd = qb.ROOT
)
version = json.loads(out)
version['is_release'] = len(version['prerelease']) == 0
version['is_dev'] = (
len(version['prerelease']) > 0 and
version['prerelease'][0] == 'dev'
)
version['is_rc'] = (
len(version['prerelease']) > 0 and
version['prerelease'][0] == 'rc'
)
if version['is_release']:
version['level'] = 'release'
else:
version['level'] = version['prerelease'][0]
# depreciated name for level
version['type'] = version['level']
version['release'] = "%(major)s.%(minor)s.%(patch)s" % version
return version
# semver_parse()
def qb_version_parse(version_string):
'''Parse version into QB::Package::Version
'''
return rpc_client.send('QB::Package::Version', 'from', version_string)
def qb_read_version(file_path):
'''Read a QB::Package::Version from a file.
'''
with open(file_path, 'r') as file:
return qb_version_parse(file.read())
class FilterModule(object):
''' version manipulation filters '''
def filters(self):
return {
'semver_inc': semver_inc,
'semver_parse': semver_parse,
'qb_version_parse': qb_version_parse,
'qb_read_version': qb_read_version,
}
# filters()
# FilterModule
# testing - call camel_case on first cli arg and print result
if __name__ == '__main__':
import doctest
doctest.testmod()
|
[
"json.loads",
"subprocess.check_output",
"qb.ipc.rpc.client.send",
"json.dumps",
"os.path.isfile",
"os.path.join",
"doctest.testmod"
] |
[((276, 340), 'os.path.join', 'os.path.join', (['qb.ROOT', '"""node_modules"""', '"""semver"""', '"""bin"""', '"""semver"""'], {}), "(qb.ROOT, 'node_modules', 'semver', 'bin', 'semver')\n", (288, 340), False, 'import os\n'), ((1358, 1386), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {}), '(cmd)\n', (1381, 1386), False, 'import subprocess\n'), ((1672, 1713), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'cwd': 'qb.ROOT'}), '(cmd, cwd=qb.ROOT)\n', (1695, 1713), False, 'import subprocess\n'), ((1757, 1772), 'json.loads', 'json.loads', (['out'], {}), '(out)\n', (1767, 1772), False, 'import json\n'), ((2515, 2578), 'qb.ipc.rpc.client.send', 'rpc_client.send', (['"""QB::Package::Version"""', '"""from"""', 'version_string'], {}), "('QB::Package::Version', 'from', version_string)\n", (2530, 2578), True, 'from qb.ipc.rpc import client as rpc_client\n'), ((3207, 3224), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (3222, 3224), False, 'import doctest\n'), ((357, 381), 'os.path.isfile', 'os.path.isfile', (['bin_path'], {}), '(bin_path)\n', (371, 381), False, 'import os\n'), ((1591, 1610), 'json.dumps', 'json.dumps', (['version'], {}), '(version)\n', (1601, 1610), False, 'import json\n')]
|
from datetime import datetime
from collections import Counter, defaultdict, OrderedDict
from itertools import chain
from random import random
import numpy as np
from cma import CMAEvolutionStrategy, CMAOptions
from loguru import logger
from math import sqrt
from sklearn.preprocessing import MinMaxScaler
from sortedcontainers import SortedDict
from trueskill import BETA, global_env, rate_1vs1, Rating
from xgboost import XGBRegressor
from .data import DATA
from .data_2016 import DATA_2016
from .data_2017 import DATA_2017
from .data_2018 import DATA_2018
def win_probability(team1, team2):
delta_mu = sum(r.mu for r in team1) - sum(r.mu for r in team2)
sum_sigma = sum(r.sigma ** 2 for r in chain(team1, team2))
size = len(team1) + len(team2)
denom = sqrt(size * (BETA * BETA) + sum_sigma)
ts = global_env()
return ts.cdf(delta_mu / denom)
def to_decimal_odds(us_odds):
if us_odds > 0:
return us_odds / 100 + 1
else:
return 100 / us_odds + 1
def to_implied_odds(us_odds: float) -> float:
decimal_odds = to_decimal_odds(us_odds)
try:
return 1 / decimal_odds
except ZeroDivisionError:
return 1
def get_regressor(X_train, y_train, X_test=None, y_test=None, **reg_params):
"""get regressor"""
logger.info('')
logger.info('Training model...')
eval_set = [(np.array(X_train), y_train)]
if X_test and y_test:
eval_set.append((np.array(X_test), y_test))
reg = XGBRegressor(objective='reg:squarederror', n_jobs=4, **reg_params)
reg = reg.fit(X_train, y_train, eval_set=eval_set, eval_metric='auc', verbose=0)
return reg
def main(hyper_params, train=0):
logger.info('Starting main training')
all_data = DATA_2016 + DATA_2017 + DATA_2018 + DATA
# estimators, learning_rate = hyper_params
# gamma, max_depth, min_child_weight = hyper_params
# max_delta_step, subsample, scale_pos_weight = hyper_params
reg_params = {
'n_estimators': 100 if train else 1000,
# 'learning_rate': 0.09426181829690375, # 0.24678854038938264
# 'gamma': 0.1860088097748791, # 0.0012826703538762253,
# 'max_depth': int(round(2.1956102758009424)), # 2.5506573766936533)),
# 'min_child_weight': 3.5802932556001426,
# 'max_delta_step': 0.10779250505931337,
# 'subsample': 0.9859889452465481,
# 'scale_pos_weight': 1.2283288967549404,
}
# bet_pred_a, bet_pred_b, bet_odds_a, bet_odds_b, bet_wnl_a, bet_wnl_b = hyper_params
bet_pred_a = 1.713980438805089 # -3.55
bet_pred_b = -4.065137791049565 # -17.93
bet_odds_a = 3.122323263774503 # -12.44
bet_odds_b = 0.0837110561236318 # -16.17
bet_wnl_a = 15.100288654913749 # -3.52 # -8.01
bet_wnl_b = -10.111913271763338 # -4.96 # 2.50
# bet_ts_a, bet_ts_b, bet_tmi_a, bet_tmi_b, bet_tma_a, bet_tma_b = hyper_params
bet_ts_a = -50.59979897765422 # -26.88 # -3.52 # -8.01
bet_ts_b = -69.5794588139756 # -72.60 # -3.52 # -8.01
bet_tmi_a = -45.94904856923797
bet_tmi_b = -1.128236337281963
bet_tma_a = -28.62283185173976
bet_tma_b = -26.933801584409544
# init
reg = None
scaler = MinMaxScaler()
cutoff = int(len(all_data) * 0.6)
start_date = None
ratings = defaultdict(lambda: Rating())
wins_losses = defaultdict(lambda: [])
early_fights = defaultdict(lambda: 0.5)
last_fights = defaultdict(lambda: 0.5)
X_train = []
y_train = []
X_test = []
y_test = []
payouts = []
bet_amts = []
accuracy = (0, 0)
tab = []
tab_amts = []
actual = (0, 0)
actual_debug = []
bet_multis = []
bet_multis_cat = []
preds_flipped = []
odds_outcomes = []
# loop through scenes
for i, scene in enumerate(all_data):
is_training = i < cutoff
if not is_training:
if not reg:
start_date = datetime.strptime(scene['date'], '%Y-%m-%d')
# scale
scaler.partial_fit(X_train)
X_train = scaler.transform(X_train)
reg = get_regressor(X_train, y_train, **reg_params)
logger.info('')
logger.info(f'{scene["date"]} {scene["name"]}')
for fight in scene['fights']:
bet_size = 1
# skip if no odds:
if 'odds' not in fight:
continue
f1 = fight['fighters'][0]['name']
f2 = fight['fighters'][1]['name']
# trueskill data
f1_ts = ratings[f1].mu
f1_sigma = ratings[f1].sigma
f2_ts = ratings[f2].mu
f2_sigma = ratings[f2].sigma
f1_ts_min = f1_ts - f1_sigma * 2
f2_ts_min = f2_ts - f2_sigma * 2
f1_ts_max = f1_ts + f1_sigma * 2
f2_ts_max = f2_ts + f2_sigma * 2
# odds data
f1_odds = fight['odds'][f1]
f2_odds = fight['odds'][f2]
if not -50 < f1_odds < 50 or not -50 < f2_odds < 50:
raise ValueError(f'surely these odds are wrong? {f1_odds} {f2_odds}')
win1_prob = win_probability([ratings[f1]], [ratings[f2]])
win2_prob = win_probability([ratings[f2]], [ratings[f1]])
# wins losses data
f1_wins_losses = Counter(wins_losses[f1])
f1_wnl_winrate = f1_wins_losses[1] / max(1, len(wins_losses[f1]))
f2_wins_losses = Counter(wins_losses[f2])
f2_wnl_winrate = f2_wins_losses[1] / max(1, len(wins_losses[f2]))
fight_data = [
[
win1_prob,
f1_odds,
f2_odds,
f1_ts,
f2_ts,
f1_sigma,
f2_sigma,
f1_ts_min - f2_ts_min,
f1_ts - f2_ts,
f1_ts_max - f2_ts_max,
last_fights[f1],
last_fights[f2],
early_fights[f1],
early_fights[f2],
f1_wins_losses[1],
f1_wins_losses[-1],
f1_wnl_winrate,
f2_wins_losses[1],
f2_wins_losses[-1],
f2_wnl_winrate,
],
[
win2_prob,
f2_odds,
f1_odds,
f2_ts,
f1_ts,
f2_sigma,
f1_sigma,
f2_ts_min - f1_ts_min,
f2_ts - f1_ts,
f2_ts_max - f1_ts_max,
last_fights[f2],
last_fights[f1],
early_fights[f2],
early_fights[f1],
f2_wins_losses[1],
f2_wins_losses[-1],
f2_wnl_winrate,
f1_wins_losses[1],
f1_wins_losses[-1],
f1_wnl_winrate,
]
]
##########################################
# update data
if 'winner' in fight:
# get winner
fw = fight['winner']['fighter']
is_win_1 = fw == f1
fl = f2 if is_win_1 else f1
if not is_win_1 and fw != f2 and fw is not None:
raise ValueError(f'unknown winner {fw}')
drawn = fw is None
# update wins losses
wins_losses[f1] += [1]
wins_losses[f2] += [-1]
# update fights
early_fights[fw] = last_fights[fw]
early_fights[fl] = last_fights[fl]
last_fights[fw] = 1
last_fights[fl] = 0
# update ratings
ratings[fw], ratings[fl] = rate_1vs1(ratings[fw], ratings[fl], drawn=drawn)
###################################
# train
if is_training:
if 'winner' in fight:
X_train.extend(fight_data)
y_train.extend([is_win_1, not is_win_1])
###################################
# test
else:
scaled_fight_data = scaler.transform(fight_data)
f1_pred, f2_pred = reg.predict(scaled_fight_data)
#############################
# bet scaling
bet_multi = 1
# pred max
if f1_pred > f2_pred:
f_pred = f1_pred - f2_pred
else:
f_pred = f2_pred - f1_pred
bet_pred_multi = np.polyval([bet_pred_a, bet_pred_b], [f_pred])[0]
bet_pred_multi = round(min(1, max(0, bet_pred_multi)))
bet_multi += bet_pred_multi
bet_multis_cat.append(f'pred:{bet_pred_multi:.0f}')
# odds diff
if f1_pred > f2_pred:
f_odds = 1 / f1_odds - 1 / f2_odds
else:
f_odds = 1 / f2_odds - 1 / f1_odds
bet_odds_multi = np.polyval([bet_odds_a, bet_odds_b], [f_odds])[0]
bet_odds_multi = round(min(1, max(0, bet_odds_multi)))
bet_multi += bet_odds_multi
bet_multis_cat.append(f'odds:{bet_odds_multi:.0f}')
# wins and losses
if f1_pred > f2_pred:
f_wnl = f1_wnl_winrate - f2_wnl_winrate
else:
f_wnl = f2_wnl_winrate - f1_wnl_winrate
bet_wnl_multi = np.polyval([bet_wnl_a, bet_wnl_b], [f_wnl])[0]
bet_wnl_multi = round(min(1, max(0, bet_wnl_multi)))
bet_multi += bet_wnl_multi
bet_multis_cat.append(f'wnl:{bet_wnl_multi:.0f}')
# trueskill mu
if f1_pred > f2_pred:
f_ts = f1_ts - f2_ts
else:
f_ts = f2_ts - f1_ts
bet_ts_multi = np.polyval([bet_ts_a, bet_ts_b], [f_ts])[0]
bet_ts_multi = round(min(1, max(0, bet_ts_multi)))
bet_multi += bet_ts_multi
bet_multis_cat.append(f'ts:{bet_ts_multi:.0f}')
# trueskill min
if f1_pred > f2_pred:
f_ts_min = f1_ts_min - f2_ts_min
else:
f_ts_min = f2_ts_min - f1_ts_min
bet_tmi_multi = np.polyval([bet_tmi_a, bet_tmi_b], [f_ts_min])[0]
bet_tmi_multi = round(min(1, max(0, bet_tmi_multi)))
bet_multi += bet_tmi_multi
bet_multis_cat.append(f'tmi:{bet_tmi_multi:.0f}')
# trueskill max
if f1_pred > f2_pred:
f_ts_max = f1_ts_max - f2_ts_max
else:
f_ts_max = f2_ts_max - f1_ts_max
bet_tma_multi = np.polyval([bet_tma_a, bet_tma_b], [f_ts_max])[0]
bet_tma_multi = round(min(1, max(0, bet_tma_multi)))
bet_multi += bet_tma_multi
bet_multis_cat.append(f'tma:{bet_tma_multi:.0f}')
bet_size *= round(bet_multi)
bet_amt = round(bet_size * bet_multi)
assert bet_amt >= 1, f'bet multi is fucked: {bet_multi}'
bet_amts.append(bet_size)
bet_multis.append(int(round(bet_multi)))
#############################
# prediction made
if 'prediction' in fight and fight['prediction'] is None:
if f1_pred > f2_pred:
exp_winner = f1
pred_exp_winner = f1_pred
exp_loser = f2
pred_exp_loser = f2_pred
else:
exp_winner = f2
pred_exp_winner = f2_pred
exp_loser = f1
pred_exp_loser = f1_pred
logger.warning(f'[{pred_exp_winner * 100:.0f}% vs {pred_exp_loser * 100:.0f}%] Bet x{bet_multi} on {exp_winner} to beat {exp_loser} [{ratings[exp_winner].mu:.0f} vs {ratings[exp_loser].mu:.0f}]')
continue
# good luck with your bets
elif 'winner' not in fight:
logger.warning(f'Pending {f1} vs {f2}')
continue
if is_win_1:
fw_pred = f1_pred
fl_pred = f2_pred
else:
fw_pred = f2_pred
fl_pred = f1_pred
# add test data
X_test.extend(scaled_fight_data)
y_test.extend([is_win_1, not is_win_1])
# testing outcome
correct = 0
payout = -bet_size
if is_win_1 and f1_pred > f2_pred:
correct = 1
payout += f1_odds * bet_size
elif not is_win_1 and f2_pred > f1_pred:
correct = 1
payout += f2_odds * bet_size
odds_outcomes.append(int((f1_odds < f2_odds and is_win_1) or (f2_odds > f1_odds and not is_win_1)))
payouts.append(round(payout, 2))
accuracy = (accuracy[0] + correct, accuracy[1] + 1)
# actual outcome
pred_flipped = False
if 'bet' in fight:
is_actual_correct = fight['prediction'] == fw
actual = (actual[0] + is_actual_correct, actual[1] + 1)
cash = -fight['bet']
if is_actual_correct:
fw_odds = f1_odds if is_win_1 else f2_odds
cash += fw_odds * fight['bet']
else:
fw_odds = f2_odds if is_win_1 else f1_odds
tab.append(round(cash, 2))
tab_amts.append(fight['bet'])
# pred flipped?
pred_flipped = (f1_pred > f2_pred and fight['prediction'] != f1) or (
f2_pred > f1_pred and fight['prediction'] != f2)
actual_debug.append(f'${fight["bet"]} {fw_odds:.2f}: {cash:.2f} {fight["prediction"]} {fight["date"]}')
preds_flipped.append(int(pred_flipped))
log_balance = f'{"!!" if pred_flipped else " "}[{sum(payouts):.0f}|{payout:.0f}]'
log_pred = f'[{fw_pred * 100:.0f}% vs {fl_pred * 100:.0f}%]'
log_fight = f'x{bet_multi} {fw} {fight["winner"]["by"]} {fl}'
log_ratings = f'[{ratings[fw].mu:.0f} vs {ratings[fl].mu:.0f}]'
logger.info(f'{log_balance} {log_pred} {log_fight} {log_ratings}')
if train:
total_payouts = sum(payouts)
roi = total_payouts / sum(bet_amts)
res = -roi - (total_payouts / 5000)
print(f'Score: {-res*100:.2f} ROI {roi * 100:.1f}% Profit ${total_payouts:.0f}')
return res
else:
summary(reg, accuracy, payouts, start_date, bet_amts, bet_multis, bet_multis_cat, actual, tab, tab_amts, odds_outcomes)
def summary(reg, accuracy, payouts, start_date, bet_amts, bet_multis, bet_multis_cat, actual, tab, tab_amts, odds_outcomes):
logger.info('')
logger.info('Tree info:')
# reg = get_regressor(X_train, y_train, X_test, y_test, estimators=estimators, max_depth=max_depth)
reg_score = reg.evals_result()
params = reg.get_params()
logger.info(f'Num estimators: {params["n_estimators"]}')
logger.info(f'Learning rate: {params["learning_rate"]:.2f}')
logger.info(f'Max depth: {params["max_depth"]}')
logger.info(f'Accuracy: training={reg_score["validation_0"]["auc"][-1]*100:.0f}%')
feature_names = [
'win%',
'odds', '~odds',
'ts', '~ts', 'sigma', '~sigma',
'ts_min_diff', 'ts_diff', 'ts_max_diff',
'last', '~last',
'early', '~early',
'wins', '~wins', 'losses', '~losses', 'winrate', '~winrate',
]
assert len(feature_names) == len(reg.feature_importances_), f'{len(feature_names)} features vs {len(reg.feature_importances_)} reg values'
logger.info('')
logger.info(f'Features:')
features = SortedDict({v: k for k, v in zip(feature_names, reg.feature_importances_)})
for k in features.keys():
logger.info(f'{features[k]}: {k*1000:.0f}')
continue
if accuracy[1]:
payouts = np.array(payouts)
logger.info('')
logger.info('Testing:')
odds_acc = sum([t for t in odds_outcomes if t > 0]) / len(odds_outcomes)
logger.info(f'Accuracy {accuracy[0]}/{accuracy[1]} = {accuracy[0]/accuracy[1]*100:.1f}% Odds: {odds_acc*100:.1f}%')
logger.info(f'ROI {sum(payouts) / sum(bet_amts) * 100:.1f}% Profit ${sum(payouts):.0f}')
days = (datetime.now() - start_date).days
logger.info(f'Profit: per day: ${sum(payouts) / days:.2f} per bet ${payouts.mean():.2f}')
logger.info(f'Common multis: {Counter(bet_multis).most_common(4)}')
logger.info(f'cat multis: {Counter(bet_multis_cat).most_common()}')
if actual[1]:
tab = np.array(tab)
logger.info('')
logger.info('Actual:')
logger.info(f'Accuracy {actual[0]}/{actual[1]} = {actual[0]/actual[1] * 100:.1f}%')
logger.info(f'ROI {sum(tab) / sum(tab_amts) * 100:.2f}% Profit ${sum(tab):.0f}')
days = (datetime.now() - datetime(2019, 7, 13)).days
logger.info(f'Profit: per day: ${sum(tab) / days:.2f} per bet ${tab.mean():.2f}')
sheet = -62.62
if abs(sum(tab) - sheet) > 0.01:
for l in actual_debug:
logger.warning(l)
logger.error(f'debug! {sheet:.2f} != {sum(tab):.2f} diff {sum(tab) - sheet:.2f}')
def run():
train = 0
names = [
# 'bet_pred_a', 'bet_pred_b', 'bet_odds_a', 'bet_odds_b', 'bet_wnl_a', 'bet_wnl_b',
'bet_ts_a', 'bet_ts_b', 'bet_tmi_a', 'bet_tmi_b', 'bet_tma_a', 'bet_tma_b',
]
params = [
0, 0, 0, 0, 0, 0
]
bounds = [[-np.inf],
[np.inf]]
assert len(params) == len(names)
# assert len(params) == len(bounds[0])
if train:
sigma = 1
opts = CMAOptions()
# opts['tolx'] = 1E-2
opts['bounds'] = bounds
es = CMAEvolutionStrategy(params, sigma, inopts=opts)
while not es.stop():
solutions = es.ask()
fitness = [main(x, train=1) for x in solutions]
es.tell(solutions, fitness)
es.disp()
print(list(es.result[0]))
print(list(es.result[5]))
es.result_pretty()
print('')
print('best')
print(list(es.result[0]))
print('')
print('xfavorite: distribution mean in "phenotype" space, to be considered as current best estimate of the optimum')
print(list(es.result[5]))
else:
main(params)
if __name__ == '__main__':
run()
|
[
"trueskill.global_env",
"math.sqrt",
"numpy.polyval",
"sklearn.preprocessing.MinMaxScaler",
"loguru.logger.warning",
"cma.CMAEvolutionStrategy",
"datetime.datetime.now",
"datetime.datetime",
"collections.defaultdict",
"loguru.logger.info",
"datetime.datetime.strptime",
"numpy.array",
"xgboost.XGBRegressor",
"cma.CMAOptions",
"collections.Counter",
"trueskill.rate_1vs1",
"itertools.chain",
"trueskill.Rating"
] |
[((774, 812), 'math.sqrt', 'sqrt', (['(size * (BETA * BETA) + sum_sigma)'], {}), '(size * (BETA * BETA) + sum_sigma)\n', (778, 812), False, 'from math import sqrt\n'), ((822, 834), 'trueskill.global_env', 'global_env', ([], {}), '()\n', (832, 834), False, 'from trueskill import BETA, global_env, rate_1vs1, Rating\n'), ((1286, 1301), 'loguru.logger.info', 'logger.info', (['""""""'], {}), "('')\n", (1297, 1301), False, 'from loguru import logger\n'), ((1306, 1338), 'loguru.logger.info', 'logger.info', (['"""Training model..."""'], {}), "('Training model...')\n", (1317, 1338), False, 'from loguru import logger\n'), ((1475, 1541), 'xgboost.XGBRegressor', 'XGBRegressor', ([], {'objective': '"""reg:squarederror"""', 'n_jobs': '(4)'}), "(objective='reg:squarederror', n_jobs=4, **reg_params)\n", (1487, 1541), False, 'from xgboost import XGBRegressor\n'), ((1682, 1719), 'loguru.logger.info', 'logger.info', (['"""Starting main training"""'], {}), "('Starting main training')\n", (1693, 1719), False, 'from loguru import logger\n'), ((3209, 3223), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (3221, 3223), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((3346, 3370), 'collections.defaultdict', 'defaultdict', (['(lambda : [])'], {}), '(lambda : [])\n', (3357, 3370), False, 'from collections import Counter, defaultdict, OrderedDict\n'), ((3389, 3414), 'collections.defaultdict', 'defaultdict', (['(lambda : 0.5)'], {}), '(lambda : 0.5)\n', (3400, 3414), False, 'from collections import Counter, defaultdict, OrderedDict\n'), ((3432, 3457), 'collections.defaultdict', 'defaultdict', (['(lambda : 0.5)'], {}), '(lambda : 0.5)\n', (3443, 3457), False, 'from collections import Counter, defaultdict, OrderedDict\n'), ((15420, 15435), 'loguru.logger.info', 'logger.info', (['""""""'], {}), "('')\n", (15431, 15435), False, 'from loguru import logger\n'), ((15440, 15465), 'loguru.logger.info', 'logger.info', (['"""Tree info:"""'], {}), "('Tree info:')\n", (15451, 15465), False, 'from loguru import logger\n'), ((15639, 15695), 'loguru.logger.info', 'logger.info', (['f"""Num estimators: {params[\'n_estimators\']}"""'], {}), '(f"Num estimators: {params[\'n_estimators\']}")\n', (15650, 15695), False, 'from loguru import logger\n'), ((15700, 15760), 'loguru.logger.info', 'logger.info', (['f"""Learning rate: {params[\'learning_rate\']:.2f}"""'], {}), '(f"Learning rate: {params[\'learning_rate\']:.2f}")\n', (15711, 15760), False, 'from loguru import logger\n'), ((15765, 15813), 'loguru.logger.info', 'logger.info', (['f"""Max depth: {params[\'max_depth\']}"""'], {}), '(f"Max depth: {params[\'max_depth\']}")\n', (15776, 15813), False, 'from loguru import logger\n'), ((15818, 15907), 'loguru.logger.info', 'logger.info', (['f"""Accuracy: training={reg_score[\'validation_0\'][\'auc\'][-1] * 100:.0f}%"""'], {}), '(\n f"Accuracy: training={reg_score[\'validation_0\'][\'auc\'][-1] * 100:.0f}%")\n', (15829, 15907), False, 'from loguru import logger\n'), ((16327, 16342), 'loguru.logger.info', 'logger.info', (['""""""'], {}), "('')\n", (16338, 16342), False, 'from loguru import logger\n'), ((16347, 16372), 'loguru.logger.info', 'logger.info', (['f"""Features:"""'], {}), "(f'Features:')\n", (16358, 16372), False, 'from loguru import logger\n'), ((4194, 4241), 'loguru.logger.info', 'logger.info', (['f"""{scene[\'date\']} {scene[\'name\']}"""'], {}), '(f"{scene[\'date\']} {scene[\'name\']}")\n', (4205, 4241), False, 'from loguru import logger\n'), ((16502, 16547), 'loguru.logger.info', 'logger.info', (['f"""{features[k]}: {k * 1000:.0f}"""'], {}), "(f'{features[k]}: {k * 1000:.0f}')\n", (16513, 16547), False, 'from loguru import logger\n'), ((16602, 16619), 'numpy.array', 'np.array', (['payouts'], {}), '(payouts)\n', (16610, 16619), True, 'import numpy as np\n'), ((16628, 16643), 'loguru.logger.info', 'logger.info', (['""""""'], {}), "('')\n", (16639, 16643), False, 'from loguru import logger\n'), ((16652, 16675), 'loguru.logger.info', 'logger.info', (['"""Testing:"""'], {}), "('Testing:')\n", (16663, 16675), False, 'from loguru import logger\n'), ((16765, 16897), 'loguru.logger.info', 'logger.info', (['f"""Accuracy {accuracy[0]}/{accuracy[1]} = {accuracy[0] / accuracy[1] * 100:.1f}% Odds: {odds_acc * 100:.1f}%"""'], {}), "(\n f'Accuracy {accuracy[0]}/{accuracy[1]} = {accuracy[0] / accuracy[1] * 100:.1f}% Odds: {odds_acc * 100:.1f}%'\n )\n", (16776, 16897), False, 'from loguru import logger\n'), ((17314, 17327), 'numpy.array', 'np.array', (['tab'], {}), '(tab)\n', (17322, 17327), True, 'import numpy as np\n'), ((17336, 17351), 'loguru.logger.info', 'logger.info', (['""""""'], {}), "('')\n", (17347, 17351), False, 'from loguru import logger\n'), ((17360, 17382), 'loguru.logger.info', 'logger.info', (['"""Actual:"""'], {}), "('Actual:')\n", (17371, 17382), False, 'from loguru import logger\n'), ((17391, 17481), 'loguru.logger.info', 'logger.info', (['f"""Accuracy {actual[0]}/{actual[1]} = {actual[0] / actual[1] * 100:.1f}%"""'], {}), "(\n f'Accuracy {actual[0]}/{actual[1]} = {actual[0] / actual[1] * 100:.1f}%')\n", (17402, 17481), False, 'from loguru import logger\n'), ((18396, 18408), 'cma.CMAOptions', 'CMAOptions', ([], {}), '()\n', (18406, 18408), False, 'from cma import CMAEvolutionStrategy, CMAOptions\n'), ((18484, 18532), 'cma.CMAEvolutionStrategy', 'CMAEvolutionStrategy', (['params', 'sigma'], {'inopts': 'opts'}), '(params, sigma, inopts=opts)\n', (18504, 18532), False, 'from cma import CMAEvolutionStrategy, CMAOptions\n'), ((1357, 1374), 'numpy.array', 'np.array', (['X_train'], {}), '(X_train)\n', (1365, 1374), True, 'import numpy as np\n'), ((3318, 3326), 'trueskill.Rating', 'Rating', ([], {}), '()\n', (3324, 3326), False, 'from trueskill import BETA, global_env, rate_1vs1, Rating\n'), ((4170, 4185), 'loguru.logger.info', 'logger.info', (['""""""'], {}), "('')\n", (4181, 4185), False, 'from loguru import logger\n'), ((5311, 5335), 'collections.Counter', 'Counter', (['wins_losses[f1]'], {}), '(wins_losses[f1])\n', (5318, 5335), False, 'from collections import Counter, defaultdict, OrderedDict\n'), ((5443, 5467), 'collections.Counter', 'Counter', (['wins_losses[f2]'], {}), '(wins_losses[f2])\n', (5450, 5467), False, 'from collections import Counter, defaultdict, OrderedDict\n'), ((706, 725), 'itertools.chain', 'chain', (['team1', 'team2'], {}), '(team1, team2)\n', (711, 725), False, 'from itertools import chain\n'), ((1437, 1453), 'numpy.array', 'np.array', (['X_test'], {}), '(X_test)\n', (1445, 1453), True, 'import numpy as np\n'), ((3925, 3969), 'datetime.datetime.strptime', 'datetime.strptime', (["scene['date']", '"""%Y-%m-%d"""'], {}), "(scene['date'], '%Y-%m-%d')\n", (3942, 3969), False, 'from datetime import datetime\n'), ((7904, 7952), 'trueskill.rate_1vs1', 'rate_1vs1', (['ratings[fw]', 'ratings[fl]'], {'drawn': 'drawn'}), '(ratings[fw], ratings[fl], drawn=drawn)\n', (7913, 7952), False, 'from trueskill import BETA, global_env, rate_1vs1, Rating\n'), ((14834, 14900), 'loguru.logger.info', 'logger.info', (['f"""{log_balance} {log_pred} {log_fight} {log_ratings}"""'], {}), "(f'{log_balance} {log_pred} {log_fight} {log_ratings}')\n", (14845, 14900), False, 'from loguru import logger\n'), ((16996, 17010), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (17008, 17010), False, 'from datetime import datetime\n'), ((17581, 17595), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (17593, 17595), False, 'from datetime import datetime\n'), ((17598, 17619), 'datetime.datetime', 'datetime', (['(2019)', '(7)', '(13)'], {}), '(2019, 7, 13)\n', (17606, 17619), False, 'from datetime import datetime\n'), ((17832, 17849), 'loguru.logger.warning', 'logger.warning', (['l'], {}), '(l)\n', (17846, 17849), False, 'from loguru import logger\n'), ((8735, 8781), 'numpy.polyval', 'np.polyval', (['[bet_pred_a, bet_pred_b]', '[f_pred]'], {}), '([bet_pred_a, bet_pred_b], [f_pred])\n', (8745, 8781), True, 'import numpy as np\n'), ((9200, 9246), 'numpy.polyval', 'np.polyval', (['[bet_odds_a, bet_odds_b]', '[f_odds]'], {}), '([bet_odds_a, bet_odds_b], [f_odds])\n', (9210, 9246), True, 'import numpy as np\n'), ((9680, 9723), 'numpy.polyval', 'np.polyval', (['[bet_wnl_a, bet_wnl_b]', '[f_wnl]'], {}), '([bet_wnl_a, bet_wnl_b], [f_wnl])\n', (9690, 9723), True, 'import numpy as np\n'), ((10110, 10150), 'numpy.polyval', 'np.polyval', (['[bet_ts_a, bet_ts_b]', '[f_ts]'], {}), '([bet_ts_a, bet_ts_b], [f_ts])\n', (10120, 10150), True, 'import numpy as np\n'), ((10558, 10604), 'numpy.polyval', 'np.polyval', (['[bet_tmi_a, bet_tmi_b]', '[f_ts_min]'], {}), '([bet_tmi_a, bet_tmi_b], [f_ts_min])\n', (10568, 10604), True, 'import numpy as np\n'), ((11017, 11063), 'numpy.polyval', 'np.polyval', (['[bet_tma_a, bet_tma_b]', '[f_ts_max]'], {}), '([bet_tma_a, bet_tma_b], [f_ts_max])\n', (11027, 11063), True, 'import numpy as np\n'), ((12116, 12321), 'loguru.logger.warning', 'logger.warning', (['f"""[{pred_exp_winner * 100:.0f}% vs {pred_exp_loser * 100:.0f}%] Bet x{bet_multi} on {exp_winner} to beat {exp_loser} [{ratings[exp_winner].mu:.0f} vs {ratings[exp_loser].mu:.0f}]"""'], {}), "(\n f'[{pred_exp_winner * 100:.0f}% vs {pred_exp_loser * 100:.0f}%] Bet x{bet_multi} on {exp_winner} to beat {exp_loser} [{ratings[exp_winner].mu:.0f} vs {ratings[exp_loser].mu:.0f}]'\n )\n", (12130, 12321), False, 'from loguru import logger\n'), ((12449, 12488), 'loguru.logger.warning', 'logger.warning', (['f"""Pending {f1} vs {f2}"""'], {}), "(f'Pending {f1} vs {f2}')\n", (12463, 12488), False, 'from loguru import logger\n'), ((17167, 17186), 'collections.Counter', 'Counter', (['bet_multis'], {}), '(bet_multis)\n', (17174, 17186), False, 'from collections import Counter, defaultdict, OrderedDict\n'), ((17240, 17263), 'collections.Counter', 'Counter', (['bet_multis_cat'], {}), '(bet_multis_cat)\n', (17247, 17263), False, 'from collections import Counter, defaultdict, OrderedDict\n')]
|
from flask_script import Manager
from kron import Kron, db, restore_from_file
from kron import Tag, Post, Archive, Box, Document, Person, Topic
app = Kron(__name__)
manager = Manager(app)
@manager.command
def restore(file):
restore_from_file(file)
@manager.shell
def _make_shell_context():
return dict(
app=app, db=db, Tag=Tag, Post=Post,
Archive=Archive, Box=Box, Document=Document, Person=Person,
Topic=Topic
)
if __name__ == "__main__":
manager.run()
|
[
"kron.restore_from_file",
"flask_script.Manager",
"kron.Kron"
] |
[((153, 167), 'kron.Kron', 'Kron', (['__name__'], {}), '(__name__)\n', (157, 167), False, 'from kron import Kron, db, restore_from_file\n'), ((178, 190), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (185, 190), False, 'from flask_script import Manager\n'), ((233, 256), 'kron.restore_from_file', 'restore_from_file', (['file'], {}), '(file)\n', (250, 256), False, 'from kron import Kron, db, restore_from_file\n')]
|
import requests
from bs4 import BeautifulSoup
linklertoplam=[]
# öncelikle haber sitesinin page şeklinde olması önemli
# burda linkleri alıp link listesi oluşturuyoruz
#html bilenler bilir a href kısmında bizim linklerimiz bulunmakta fakat sitedeki her link işimize yaramıyor
# bu yüzden öncelikle göslem yapmanız geriyor ben yaptığım gözlemle birlikte işimize yarayan linkler
#https://play.google.com/store/apps/details?id=hurriyet.mobil.android&hl=tr && /seyahat/konu/gezgin/?p= linkleri arasında olduğunu keşfettim
# bu yüzden buna uygun bir kod ile linkeri aldım
# oluşan korpusunu düzenlemeniz için korpus düzenleme py de bırakıyorum bu metninizdeki noktalama işaretlerini kaldıracak bir program örnek vermek gerekirse
# Fenerbahçe'nin takım otobüsü. cümlesini size Fenerbahçe takım otobüsü olarak dönderecek bu da size Fenerbahçe ve otobüsü kelimesini kazandıracak
# diğer türlü modelinizin bu kelimeleri tanıması için çokca otobüsü. ve Fenerbahçe'nin kelimelerinin çokca geçmesi gerekir bu şekilde özel isimler ve noktalama işaretlerinden kurtuluyoruz
for p in range(2,49):
r=requests.get("https://www.hurriyet.com.tr/seyahat/konu/gezgin/".format(p))
soup=BeautifulSoup(r.content)
linkler=soup.find_all("a")
linklist=[]
list_min=0
list_max=0
counter=0
counter2=1
for link in linkler:
s=link.get("href")
linklist.append(s)
counter=counter+1
str_s=str(s)
if str_s[:]=="https://play.google.com/store/apps/details?id=hurriyet.mobil.android&hl=tr":
print(counter)
list_min = counter
if str_s[0:24]=='/seyahat/konu/gezgin/?p=' and counter2==1:
counter2=counter2+1
print(counter)
list_max = counter
for i in range(list_min,list_max-1):
linklertoplam.append(linklist[i])
print(len(linklertoplam))
dosya= open('turklink.txt', 'a', encoding='utf8')
for d in range(len(linklertoplam)):
dosya.write('https://www.hurriyet.com.tr'+str(linklertoplam[d])+'\n')
|
[
"bs4.BeautifulSoup"
] |
[((1168, 1192), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.content'], {}), '(r.content)\n', (1181, 1192), False, 'from bs4 import BeautifulSoup\n')]
|
import os
import time
import json
import arxiv
def get_paper_list(query="cat:cs.CL", domain='cscl',
latest_year=2021, end_year=2010, max_results=1000):
outs = {}
year = latest_year
i = 0
while year > end_year:
print(f"Results {i} - {i+max_results}:")
result = arxiv.query(query = query,
start = i,
sort_by = 'submittedDate',
sort_order = 'descending',
max_results = max_results)
new_papers = 0
for paper in result:
arxiv_id = paper.id.split('/')[-1]
N = int(arxiv_id[-1])
if '.' in arxiv_id and N > 1:
arxiv_id = arxiv_id.replace(f'v{N}', '')
print(arxiv_id)
new_papers += 1
year = int(paper.updated[:4])
if arxiv_id not in outs.keys():
outs[arxiv_id] = [N]
else:
outs[arxiv_id].append(N)
i += max_results
time.sleep(3)
print(year)
if new_papers == 0: break
with open(f'../data/arxiv/list/{domain}_list_{len(outs)}.json', 'w') as json_file:
json.dump(outs, json_file, indent=2)
return outs
def generate_json_file(preprint_list, tmp_file_path, domain):
with open(f'{tmp_file_path}/raw_revisions_{domain}.json', 'a') as json_file:
for ID in preprint_list.keys():
max_ver = max(preprint_list[ID])
for i in range(1, max_ver):
print(ID)
preprint_v1 = ID+f'v{i}'
preprint_v2 = ID+f'v{i+1}'
papers = arxiv.query(query="",
id_list=[preprint_v1,preprint_v2],
max_results=2)
try:
source_abs = papers[0].summary
target_abs = papers[1].summary
except:
print(f'Fail to get paper {ID}!!!')
continue
tmp = {
"arxiv_id": ID,
"before_version": i,
"after_version": i+1,
"before_raw_txt": source_abs,
"after_raw_txt": target_abs,
}
time.sleep(3)
json_file.write(json.dumps(tmp)+'\n')
if __name__ == '__main__':
tmp_path = '../data/arxiv'
tmp_list_path = '../data/arxiv/list'
tmp_file_path = '../data/arxiv/raw'
if not os.path.isdir(tmp_path):
os.mkdir(tmp_path)
if not os.path.isdir(tmp_list_path):
os.mkdir(tmp_list_path)
if not os.path.isdir(tmp_file_path):
os.mkdir(tmp_file_path)
# get raw paper id list (paper version >= 2)
cates = ['econ.EM', 'econ.GN', 'econ.TH']
cates += ['q-fin.CP', 'q-fin.EC', 'q-fin.GN', 'q-fin.MF', 'q-fin.PM', 'q-fin.PR',
'q-fin.RM', 'q-fin.ST', 'q-fin.TR']
cates += ['q-bio.BM', 'q-bio.CB', 'q-bio.GN', 'q-bio.MN', 'q-bio.NC', 'q-bio.OT',
'q-bio.PE', 'q-bio.QM', 'q-bio.SC', 'q-bio.TO']
cates += ['cs.AI', 'cs.CC', 'cs.CE', 'cs.CG', 'cs.GT', 'cs.CV', 'cs.CY', 'cs.CR',
'cs.DS', 'cs.DB', 'cs.DL', 'cs.DM', 'cs.DC', 'cs.ET', 'cs.FL', 'cs.GL',
'cs.GR', 'cs.AR', 'cs.HC', 'cs.IR', 'cs.IT', 'cs.LO', 'cs.LG', 'cs.MS',
'cs.MA', 'cs.MM', 'cs.NI', 'cs.NE', 'cs.NA', 'cs.OS', 'cs.OH', 'cs.PF',
'cs.PL', 'cs.RO', 'cs.SI', 'cs.SE', 'cs.SD', 'cs.SC', 'cs.SY']
for cate in cates:
preprint_list = get_paper_list(query=f"cat:{cate}", domain=f'{cate}',
latest_year=2021, end_year=1900, max_results=1000)
# extract paper abstract by paper id
files = os.listdir(tmp_list_path)
for fname in files:
if fname == '.DS_Store': continue
domain = fname.split('_')[0]
print(domain)
with open(f'{tmp_list_path}/{fname}', 'r') as f:
preprint_list = json.load(f)
outs = generate_json_file(preprint_list, tmp_file_path, domain)
|
[
"json.dump",
"os.mkdir",
"json.load",
"os.path.isdir",
"arxiv.query",
"json.dumps",
"time.sleep",
"os.listdir"
] |
[((3882, 3907), 'os.listdir', 'os.listdir', (['tmp_list_path'], {}), '(tmp_list_path)\n', (3892, 3907), False, 'import os\n'), ((314, 427), 'arxiv.query', 'arxiv.query', ([], {'query': 'query', 'start': 'i', 'sort_by': '"""submittedDate"""', 'sort_order': '"""descending"""', 'max_results': 'max_results'}), "(query=query, start=i, sort_by='submittedDate', sort_order=\n 'descending', max_results=max_results)\n", (325, 427), False, 'import arxiv\n'), ((1080, 1093), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1090, 1093), False, 'import time\n'), ((1252, 1288), 'json.dump', 'json.dump', (['outs', 'json_file'], {'indent': '(2)'}), '(outs, json_file, indent=2)\n', (1261, 1288), False, 'import json\n'), ((2626, 2649), 'os.path.isdir', 'os.path.isdir', (['tmp_path'], {}), '(tmp_path)\n', (2639, 2649), False, 'import os\n'), ((2659, 2677), 'os.mkdir', 'os.mkdir', (['tmp_path'], {}), '(tmp_path)\n', (2667, 2677), False, 'import os\n'), ((2689, 2717), 'os.path.isdir', 'os.path.isdir', (['tmp_list_path'], {}), '(tmp_list_path)\n', (2702, 2717), False, 'import os\n'), ((2727, 2750), 'os.mkdir', 'os.mkdir', (['tmp_list_path'], {}), '(tmp_list_path)\n', (2735, 2750), False, 'import os\n'), ((2762, 2790), 'os.path.isdir', 'os.path.isdir', (['tmp_file_path'], {}), '(tmp_file_path)\n', (2775, 2790), False, 'import os\n'), ((2800, 2823), 'os.mkdir', 'os.mkdir', (['tmp_file_path'], {}), '(tmp_file_path)\n', (2808, 2823), False, 'import os\n'), ((4118, 4130), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4127, 4130), False, 'import json\n'), ((1709, 1781), 'arxiv.query', 'arxiv.query', ([], {'query': '""""""', 'id_list': '[preprint_v1, preprint_v2]', 'max_results': '(2)'}), "(query='', id_list=[preprint_v1, preprint_v2], max_results=2)\n", (1720, 1781), False, 'import arxiv\n'), ((2393, 2406), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2403, 2406), False, 'import time\n'), ((2439, 2454), 'json.dumps', 'json.dumps', (['tmp'], {}), '(tmp)\n', (2449, 2454), False, 'import json\n')]
|
import copy
from aerosandbox import *
opti = cas.Opti() # Initialize an optimization environment
def variable(init_val, lb=None, ub=None):
"""
Initialize attrib_name scalar design variable.
:param init_val: Initial guess
:param lb: Optional lower bound
:param ub: Optional upper bound
:return: The created variable
"""
var = opti.variable()
opti.set_initial(var, init_val)
if lb is not None:
opti.subject_to(var >= lb)
if ub is not None:
opti.subject_to(var <= ub)
return var
def quasi_variable(val):
"""
Initialize attrib_name scalar design variable.
:param init_val: Initial guess
:param lb: Optional lower bound
:param ub: Optional upper bound
:return: The created variable
"""
var = opti.variable()
opti.set_initial(var, val)
opti.subject_to(var == val)
return var
airplane = Airplane(
name="AVL's plane.avl",
x_ref=0.02463, # CG location
y_ref=0, # CG location
z_ref=0.2239, # CG location
s_ref=12,
c_ref=1,
b_ref=15,
wings=[
Wing(
name="Main Wing",
x_le=0, # Coordinates of the wing's leading edge
y_le=0, # Coordinates of the wing's leading edge
z_le=0, # Coordinates of the wing's leading edge
symmetric=True,
chordwise_panels=1,
xsecs=[ # The wing's cross ("X") sections
WingXSec( # Root
x_le=-0.25, # Coordinates of the XSec's leading edge, relative to the wing's leading edge.
y_le=0, # Coordinates of the XSec's leading edge, relative to the wing's leading edge.
z_le=0, # Coordinates of the XSec's leading edge, relative to the wing's leading edge.
chord=1, # 0.18,
twist=4, # variable(0,-10,10), # degrees
airfoil=Airfoil(name="naca0012"),
control_surface_type='symmetric_problem',
# Flap # Control surfaces are applied between attrib_name given XSec and the next one.
control_surface_deflection=0, # degrees
control_surface_hinge_point=0.75, # as chord fraction
spanwise_panels=16,
),
WingXSec( # Mid
x_le=-0.175,
y_le=7.5,
z_le=0.5,
chord=0.7, # 0.16,
twist=4, # variable(0,-10,10),
airfoil=Airfoil(name="naca0012"),
control_surface_type='asymmetric', # Aileron
control_surface_deflection=0,
control_surface_hinge_point=0.75
),
# WingXSec( # Tip
# x_c=0.08,#variable(0.08, 0, 0.16),
# y_c=1,#variable(1, 0.5, 1.25),
# z_c=0.1,#variable(0.1, 0, 0.2),
# chord=variable(0.08,0,1),#0.08,#variable(0.08, 0.01, 1),
# twist=0,#variable(0,-10,10),
# airfoil=Airfoil(name="naca4412"),
# )
]
),
Wing(
name="Horizontal Stabilizer",
x_le=6,
y_le=0,
z_le=0.5,
symmetric=True,
chordwise_panels=1,
xsecs=[
WingXSec( # root
x_le=-0.1,
y_le=0,
z_le=0,
chord=0.4,
twist=variable(0, -60, 60),
airfoil=Airfoil(name="naca0012"),
control_surface_type='symmetric_problem', # Elevator
control_surface_deflection=0,
control_surface_hinge_point=0.75,
spanwise_panels=10
),
WingXSec( # tip
x_le=-0.075,
y_le=2,
z_le=0,
chord=0.3,
twist=variable(0, -60, 60),
airfoil=Airfoil(name="naca0012")
)
]
),
Wing(
name="Vertical Stabilizer",
x_le=6,
y_le=0,
z_le=0.5,
symmetric=False,
chordwise_panels=1,
xsecs=[
WingXSec(
x_le=-0.1,
y_le=0,
z_le=0,
chord=0.4,
twist=0,
airfoil=Airfoil(name="naca0012"),
control_surface_type='symmetric_problem', # Rudder
control_surface_deflection=0,
control_surface_hinge_point=0.75,
spanwise_panels=10
),
WingXSec(
x_le=-0.075,
y_le=0,
z_le=1,
chord=0.3,
twist=0,
airfoil=Airfoil(name="naca0012")
)
]
)
]
)
# airplane.set_paneling_everywhere(6, 10)
ap = Casvlm1( # Set up the AeroProblem
airplane=airplane,
op_point=OperatingPoint(
velocity=65,
density=0.002377,
alpha=variable(0),
beta=quasi_variable(0),
p=quasi_variable(0),
q=quasi_variable(0),
r=quasi_variable(0),
),
opti=opti
)
# Set up the VLM optimization submatrix
ap.setup()
# Extra constraints
# Trim constraint
opti.subject_to([
ap.CL == 0.390510,
ap.airplane.wings[1].xsecs[0].twist == ap.airplane.wings[1].xsecs[1].twist,
ap.Cm == 0,
# -ap.force_total_inviscid_wind[2] == 9.81 * 0.5,
# # ap.CY == 0,
# # ap.Cl == 0,
# ap.Cm == 0,
# # ap.Cn == 0,
])
# Cmalpha constraint
# opti.subject_to(cas.gradient(ap.Cm, ap.op_point.alpha) * 180/np.pi == -1)
# Objective
# opti.minimize(-ap.force_total_inviscid_wind[0])
# Solver options
p_opts = {}
s_opts = {}
s_opts["max_iter"] = 1e6 # If you need to interrupt, just use ctrl+c
# s_opts["mu_strategy"] = "adaptive"
# s_opts["start_with_resto"] = "yes"
# s_opts["required_infeasibility_reduction"] = 0.1
opti.solver('ipopt', p_opts, s_opts)
# Solve
try:
sol = opti.solve()
except RuntimeError:
sol = opti.debug
# Create solved object
ap_sol = copy.deepcopy(ap)
ap_sol.substitute_solution(sol)
# Postprocess
ap_sol.draw()
# Answer you should get: (XFLR5)
# CL = 0.797
# CDi = 0.017
# CL/CDi = 47.211
|
[
"copy.deepcopy"
] |
[((6399, 6416), 'copy.deepcopy', 'copy.deepcopy', (['ap'], {}), '(ap)\n', (6412, 6416), False, 'import copy\n')]
|
import math
num = int(input('Digite um número para encontrar a raíz: '))
raiz = math.sqrt(num)
print('A raíz quadrada de {}, é {}.'.format(num, raiz))
|
[
"math.sqrt"
] |
[((80, 94), 'math.sqrt', 'math.sqrt', (['num'], {}), '(num)\n', (89, 94), False, 'import math\n')]
|
from __future__ import division
import json
from django.template import Library
register = Library()
def global_weight(criterion, report):
"""
Formula:
Global Weight = Criterion W value / Criterion Count
For example:
W Value = 1
Criterion Count = 5
Global Weight = 1 / 5 = 0.2
"""
criterion_count = criterion['parent']['count']
data = report.criterion_compare[str(criterion['parent']['id'])]
criterion_index = 0
columns = filter(lambda x: x != 'criterion_0',
data['main_table'][0])
# get column index from matris
for index, column in enumerate(columns):
if 'criterion_%s' % criterion['id'] == column:
criterion_index = index
break
w_value = data['w'][criterion_index]
return json.dumps(round(w_value / criterion_count, 4))
@register.simple_tag
def criterion_w(criterion, report, index):
"""
Get W value for given index.
"""
data = report.supplier_compare[str(criterion['id'])]
return data['w'][index - 1]
@register.simple_tag
def calculate_supplier_score(report, index):
"""
Calculate supplier score for given report and index.
"""
total = 0
for cr_id, data in report.supplier_compare.items():
criterion = list(filter(lambda x: str(x['id']) == str(cr_id),
report.get_child_criterions()))[0]
w = float(data['w'][index - 1])
weight = w * float(global_weight(criterion, report))
total += weight
return '%.3f' % total
@register.simple_tag
def get_supplier_criterion_score(report, supplier, criterion):
"""
Vikor Step 1 Calculation.
"""
result = filter(
lambda x: x['criterion_id'] == str(criterion['id']) and
x['supplier_id'] == str(supplier['id']),
report.criterion_supplier_score)
if len(result) > 0:
return result[0]['score']
return 0
@register.simple_tag
def get_supplier_normalized_criterion_score(report, supplier, criterion):
"""
Vikor Step 1 Calculation.
"""
result = filter(
lambda x: x['criterion_id'] == str(criterion['id']) and
x['supplier_id'] == str(supplier['id']),
report.criterion_supplier_score)
if len(result) > 0:
score = int(result[0]['score'])
best = best_criterion_score(report, criterion)
worst = worst_criterion_score(report, criterion)
result = float((best - score) / (best - worst))
return '%.3f' % result
return 0
@register.simple_tag
def get_supplier_weighted_criterion_score(report, supplier, criterion):
"""
Vikor Step 1 Calculation.
"""
normalized = float(get_supplier_normalized_criterion_score(
report, supplier, criterion))
w = float(global_weight(criterion, report))
result = normalized * w
return '%.3f' % result
@register.simple_tag
def best_criterion_score(report, criterion):
"""
Vikor Step 1 Calculation.
"""
max_score = 0
for item in report.criterion_supplier_score:
if item['criterion_id'] == str(criterion['id']) and \
int(item['score']) > max_score:
max_score = int(item['score'])
return max_score
@register.simple_tag
def get_si_value(report, supplier):
"""
Vikor Step 1 Calculation.
"""
total = 0
for criterion in report.get_child_criterions():
total += float(get_supplier_weighted_criterion_score(
report, supplier, criterion))
return '%.4f' % total
@register.simple_tag
def get_min_si_value(report):
"""
Vikor Step 1 Calculation.
"""
min_value = 0
for supplier in report.suppliers:
value = float(get_si_value(report, supplier))
if min_value == 0 or value < min_value:
min_value = value
return '%.4f' % min_value
@register.simple_tag
def get_max_si_value(report):
"""
Vikor Step 1 Calculation.
"""
max_value = 0
for supplier in report.suppliers:
value = float(get_si_value(report, supplier))
if value > max_value:
max_value = value
return '%.4f' % max_value
@register.simple_tag
def get_min_ri_value(report):
"""
Vikor Step 1 Calculation.
"""
min_value = 0
for supplier in report.suppliers:
value = float(get_ri_value(report, supplier))
if min_value == 0 or value < min_value:
min_value = value
return '%.4f' % min_value
@register.simple_tag
def get_max_ri_value(report):
"""
Vikor Step 1 Calculation.
"""
max_value = 0
for supplier in report.suppliers:
value = float(get_ri_value(report, supplier))
if value > max_value:
max_value = value
return '%.4f' % max_value
@register.simple_tag
def get_ri_value(report, supplier):
"""
Vikor Step 1 Calculation.
"""
max_value = 0
for criterion in report.get_child_criterions():
score = float(get_supplier_weighted_criterion_score(
report, supplier, criterion))
if score > max_value:
max_value = score
return '%.4f' % max_value
@register.simple_tag
def get_qi_value(report, supplier, weight, min_si, max_si,
min_ri, max_ri, si, ri):
"""
Vikor Step 1 Calculation.
"""
si = float(si)
ri = float(ri)
min_si = float(min_si)
max_si = float(max_si)
min_ri = float(min_ri)
max_ri = float(max_ri)
total = ((weight * (si - min_si)) / (max_si - min_si)) + \
(((1 - weight) * (ri - min_ri)) / (max_ri - min_ri))
return '%.4f' % total
@register.simple_tag
def worst_criterion_score(report, criterion):
"""
Vikor Step 1 Calculation.
"""
min_score = 0
for item in report.criterion_supplier_score:
if item['criterion_id'] == str(criterion['id']) and \
(min_score == 0 or int(item['score']) < min_score):
min_score = int(item['score'])
return min_score
register.filter('global_weight', global_weight)
|
[
"django.template.Library"
] |
[((95, 104), 'django.template.Library', 'Library', ([], {}), '()\n', (102, 104), False, 'from django.template import Library\n')]
|
"""Test searching for shows."""
import datetime
from tests.context import BaseTVDBTest
from libtvdb.model.enums import AirDay, ShowStatus
class ShowTestSuite(BaseTVDBTest):
"""Show test cases."""
def test_show_parse(self):
"""Test that a show is parsed as we'd expect."""
show = self.client().show_info(73739)
self.assertEqual(show.added, None, f"'{show.added}' was not equal to expected added 'None'")
self.assertEqual(show.added_by, None, f"'{show.added_by}' was not equal to expected added by 'None'")
self.assertEqual(show.air_day, AirDay.tuesday, f"'{show.air_day}' was not equal to expected air_day '{AirDay.tuesday}'")
self.assertEqual(show.air_time, '9:00 PM', f"'{show.air_time}' was not equal to expected air time '9:00 PM'")
self.assertEqual(show.aliases, ['Lost: Missing Pieces'], f"'{show.aliases}' was not equal to expected aliases '{['Lost: Missing Pieces']}'")
self.assertEqual(show.banner, 'graphical/73739-g4.jpg', f"'{show.banner}' was not equal to expected banner 'graphical/73739-g4.jpg")
self.assertEqual(show.first_aired, datetime.date(2004, 9, 22), f"'{show.first_aired}' was not equal to expected first_aired '{datetime.date(2004, 9, 22)}'")
self.assertEqual(show.genres, ['Action', 'Adventure', 'Drama', 'Science-Fiction'], f"'{show.genres}' was not equal to expected genres '{['Action', 'Adventure', 'Drama', 'Science-Fiction']}'")
self.assertEqual(show.identifier, 73739, f"'{show.identifier}' was not equal to expected identifier '73739'")
self.assertEqual(show.imdb_id, 'tt0411008', f"'{show.imdb_id}' was not equal to expected imdb_id 'tt0411008'")
self.assertEqual(show.name, 'Lost', f"'{show.name}' was not equal to expected name Lost'")
self.assertEqual(show.network, 'ABC (US)', f"'{show.network}' was not equal to expected network 'ABC (US)'")
self.assertEqual(show.network_identifier, '', f"'{show.network_identifier}' was not equal to expected network_identifier ''")
self.assertEqual(show.rating, 'TV-14', f"'{show.rating}' was not equal to expected rating 'TV-14'")
self.assertEqual(show.runtime, '45', f"'{show.runtime}' was not equal to expected runtime '45'")
self.assertEqual(show.series_identifier, '24313', f"'{show.series_identifier}' was not equal to expected series_identifier '24313'")
self.assertEqual(show.site_rating, 9.1, f"'{show.site_rating}' was not equal to expected site_rating '9.1'")
self.assertEqual(show.site_rating_count, 768, f"'{show.site_rating_count}' was not equal to expected site_rating_count '768'")
self.assertEqual(show.slug, 'lost', f"'{show.slug}' was not equal to expected slug 'lost")
self.assertEqual(show.status, ShowStatus.ended, f"'{show.status}' was not equal to expected status '{ShowStatus.ended}'")
self.assertEqual(show.zap2it_id, 'SH672362', f"'{show.zap2it_id}' was not equal to expected zap2it_id 'SH672362'")
self.assertGreaterEqual(
show.last_updated,
datetime.datetime(2018, 11, 23, 0, 28, 59),
f"'{show.last_updated}' was not greater or equal to expected last_updated '{datetime.datetime(2018, 11, 23, 0, 28, 59)}'"
)
#pylint: disable=line-too-long
self.assertEqual(show.overview, 'After their plane, Oceanic Air flight 815, tore apart whilst thousands of miles off course, the survivors find themselves on a mysterious deserted island where they soon find out they are not alone.', f"'{show.overview}' was not equal to expected overview 'After their plane, Oceanic Air flight 815, tore apart whilst thousands of miles off course, the survivors find themselves on a mysterious deserted island where they soon find out they are not alone.'")
#pylint: enable=line-too-long
|
[
"datetime.date",
"datetime.datetime"
] |
[((1133, 1159), 'datetime.date', 'datetime.date', (['(2004)', '(9)', '(22)'], {}), '(2004, 9, 22)\n', (1146, 1159), False, 'import datetime\n'), ((3077, 3119), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(11)', '(23)', '(0)', '(28)', '(59)'], {}), '(2018, 11, 23, 0, 28, 59)\n', (3094, 3119), False, 'import datetime\n'), ((1224, 1250), 'datetime.date', 'datetime.date', (['(2004)', '(9)', '(22)'], {}), '(2004, 9, 22)\n', (1237, 1250), False, 'import datetime\n'), ((3209, 3251), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(11)', '(23)', '(0)', '(28)', '(59)'], {}), '(2018, 11, 23, 0, 28, 59)\n', (3226, 3251), False, 'import datetime\n')]
|
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/00_core.ipynb (unless otherwise specified).
__all__ = ['ExactGPModel', 'MultitaskGPModel', 'nv_cost', 'week_of_month']
# Cell
import gpytorch
from math import ceil
import datetime
# Cell
class ExactGPModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood):
super(ExactGPModel, self).__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ConstantMean()
self.covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.PeriodicKernel())
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
# Cell
class MultitaskGPModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood, num_tasks):
super(MultitaskGPModel, self).__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.MultitaskMean(
gpytorch.means.ConstantMean(), num_tasks=num_tasks
)
self.covar_module = gpytorch.kernels.MultitaskKernel(
gpytorch.kernels.RBFKernel(), num_tasks=num_tasks, rank=1
)
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultitaskMultivariateNormal(mean_x, covar_x)
# Cell
def nv_cost(q, y, cu, co):
if q>y:
cost = (q-y)*co
else:
cost = (y-q)*cu
return cost
# Cell
def week_of_month(dt_str):
""" Returns the week of the month for the specified date.
"""
dt = datetime.datetime.strptime(dt_str, '%Y-%m-%d')
first_day = dt.replace(day=1)
dom = dt.day
adjusted_dom = dom + first_day.weekday()
return int(ceil(adjusted_dom/7.0))
|
[
"gpytorch.distributions.MultivariateNormal",
"math.ceil",
"gpytorch.distributions.MultitaskMultivariateNormal",
"gpytorch.kernels.RBFKernel",
"gpytorch.kernels.PeriodicKernel",
"datetime.datetime.strptime",
"gpytorch.means.ConstantMean"
] |
[((1629, 1675), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['dt_str', '"""%Y-%m-%d"""'], {}), "(dt_str, '%Y-%m-%d')\n", (1655, 1675), False, 'import datetime\n'), ((437, 466), 'gpytorch.means.ConstantMean', 'gpytorch.means.ConstantMean', ([], {}), '()\n', (464, 466), False, 'import gpytorch\n'), ((677, 735), 'gpytorch.distributions.MultivariateNormal', 'gpytorch.distributions.MultivariateNormal', (['mean_x', 'covar_x'], {}), '(mean_x, covar_x)\n', (718, 735), False, 'import gpytorch\n'), ((1326, 1393), 'gpytorch.distributions.MultitaskMultivariateNormal', 'gpytorch.distributions.MultitaskMultivariateNormal', (['mean_x', 'covar_x'], {}), '(mean_x, covar_x)\n', (1376, 1393), False, 'import gpytorch\n'), ((1790, 1814), 'math.ceil', 'ceil', (['(adjusted_dom / 7.0)'], {}), '(adjusted_dom / 7.0)\n', (1794, 1814), False, 'from math import ceil\n'), ((524, 557), 'gpytorch.kernels.PeriodicKernel', 'gpytorch.kernels.PeriodicKernel', ([], {}), '()\n', (555, 557), False, 'import gpytorch\n'), ((1005, 1034), 'gpytorch.means.ConstantMean', 'gpytorch.means.ConstantMean', ([], {}), '()\n', (1032, 1034), False, 'import gpytorch\n'), ((1140, 1168), 'gpytorch.kernels.RBFKernel', 'gpytorch.kernels.RBFKernel', ([], {}), '()\n', (1166, 1168), False, 'import gpytorch\n')]
|
import unittest
import pickle
import tempfile
import os
import math
from datetime import datetime
import numpy as np
import quaternion
import cv2
from visnav.algo.model import Camera
from visnav.algo.odometry import VisualOdometry, Pose
from visnav.algo import tools
class TestOdometry(unittest.TestCase):
def setUp(self, verbose=False):
self.cam = get_cam()
params = {
'min_keypoint_dist': 10,
'min_inliers': 12,
'min_2d2d_inliers': 24,
}
self.odo = VisualOdometry(self.cam, self.cam.width/4, verbose=verbose, pause=False,
use_scale_correction=False, est_cam_pose=False, **params)
def tearDown(self):
pass
def assertQuatAlmostEqual(self, quat0, quat1, delta=1e-4, msg=None):
if quat0 is None and quat1 is None:
return
diff = math.degrees(tools.angle_between_q(quat0, quat1))
self.assertAlmostEqual(0, diff, delta=delta,
msg=None if msg is None else (msg + ': angle[deg] %f > %f' % (diff, delta)))
def assertArrayAlmostEqual(self, arr0, arr1, delta=1e-7, ord=np.inf, msg=None):
if arr0 is None and arr1 is None:
return
norm = np.linalg.norm(np.array(arr0)-np.array(arr1), ord=ord)
self.assertAlmostEqual(0, norm, delta=delta,
msg=None if msg is None else (msg + ': norm(%s) %f > %f' % (ord, norm, delta)))
def assertPoseAlmostEqual(self, pose0: Pose, pose1: Pose, delta_v=1e-7, delta_q=1e-4, msg=None):
if pose0 is None and pose1 is None:
return
self.assertArrayAlmostEqual(pose0.loc, pose1.loc, delta=delta_v, ord=2,
msg=None if msg is None else (msg + ': loc %s vs %s'%(pose0.loc, pose1.loc)))
self.assertQuatAlmostEqual(pose0.quat, pose1.quat, delta=delta_q,
msg=None if msg is None else (msg + ': quat %s vs %s'%(pose0.quat, pose1.quat)))
def assertOdomResultAlmostEqual(self, result0, result1):
pose0, bias_sds0, scale_sd0 = result0
pose1, bias_sds1, scale_sd1 = result1
msg = '%s deviate(s) too much from the expected value(s)'
self.assertPoseAlmostEqual(pose0, pose1, delta_v=0.02, delta_q=1, msg=msg%'estimated poses')
self.assertArrayAlmostEqual(bias_sds0, bias_sds1, delta=0.1, ord=np.inf, msg=msg%'error estimates')
self.assertAlmostEqual(scale_sd0, scale_sd1, delta=0.01, msg=msg%'scale error estimate')
def test_rotating_object(self, inputs=None, results=None):
pickle_file = os.path.join(os.path.dirname(__file__), 'data', 'test_rotating_object.pickle')
record = inputs is not None and results is None
if not record and results is None:
inputs, results = self._load_recording(pickle_file)
else:
results = []
cam_q = quaternion.one
orig_time = datetime.strptime('2020-07-01 15:42:00', '%Y-%m-%d %H:%M:%S').timestamp()
for i, (img, cam_obj_v, cam_obj_q) in enumerate(inputs):
time = datetime.fromtimestamp(orig_time + i*60)
prior = Pose(cam_obj_v, cam_obj_q, np.ones((3,)) * 0.1, np.ones((3,)) * 0.01)
res = self.odo.process(img, time, prior, cam_q)
if record:
results.append(res)
elif 0:
self.assertOdomResultAlmostEqual(results[i], res)
if i > 1 and 0:
self.assertIsNotNone(res[0], msg='failed to get pose estimate')
self.assertPoseAlmostEqual(prior, res[0], delta_v=0.1, delta_q=10,
msg='estimated pose deviates too much from the real one')
if record:
self._save_recording(pickle_file, inputs, results)
def _save_recording(self, fname, inputs, results):
tf = tempfile.NamedTemporaryFile(suffix='.png', delete=False)
tf.close()
for i in range(len(inputs)):
cv2.imwrite(tf.name, inputs[i][0], (cv2.IMWRITE_PNG_COMPRESSION, 9))
with open(tf.name, 'br') as fh:
inputs[i][0] = fh.read()
os.unlink(tf.name)
with open(fname, 'wb') as fh:
pickle.dump((inputs, results), fh)
def _load_recording(self, fname):
with open(fname, 'rb') as fh:
inputs, results = pickle.load(fh)
tf = tempfile.NamedTemporaryFile(suffix='.png', delete=False)
tf.close()
for i in range(len(inputs)):
with open(tf.name, 'wb') as fh:
fh.write(inputs[i][0])
inputs[i][0] = cv2.imread(tf.name, cv2.IMREAD_GRAYSCALE)
os.unlink(tf.name)
return inputs, results
def get_rot_imgs():
pass
def get_cam():
common_kwargs_worst = {
'sensor_size': (2048 * 0.0022, 1944 * 0.0022),
'quantum_eff': 0.30,
'px_saturation_e': 2200, # snr_max = 20*log10(sqrt(sat_e)) dB
'lambda_min': 350e-9, 'lambda_eff': 580e-9, 'lambda_max': 800e-9,
'dark_noise_mu': 40, 'dark_noise_sd': 6.32, 'readout_noise_sd': 15,
# dark_noise_sd should be sqrt(dark_noise_mu)
'emp_coef': 1, # dynamic range = 20*log10(sat_e/readout_noise))
'exclusion_angle_x': 55,
'exclusion_angle_y': 90,
}
common_kwargs_best = dict(common_kwargs_worst)
common_kwargs_best.update({
'quantum_eff': 0.4,
'px_saturation_e': 3500,
'dark_noise_mu': 25, 'dark_noise_sd': 5, 'readout_noise_sd': 5,
})
common_kwargs = common_kwargs_best
return Camera(
2048, # width in pixels
1944, # height in pixels
7.7, # x fov in degrees (could be 6 & 5.695, 5.15 & 4.89, 7.7 & 7.309)
7.309, # y fov in degrees
f_stop=5, # TODO: put better value here
point_spread_fn=0.50, # ratio of brightness in center pixel
scattering_coef=2e-10, # affects strength of haze/veil when sun shines on the lens
**common_kwargs
)
if __name__ == '__main__':
import sys
if len(sys.argv) > 1 and sys.argv[1] == 'record':
from visnav.algo.model import SystemModel
from visnav.missions.didymos import DidymosSystemModel
from visnav.render.render import RenderEngine
from visnav.settings import *
sm = DidymosSystemModel(use_narrow_cam=False, target_primary=False, hi_res_shape_model=True)
re = RenderEngine(sm.cam.width, sm.cam.height, antialias_samples=0)
re.set_frustum(sm.cam.x_fov, sm.cam.y_fov, 0.05, 2)
obj = sm.asteroid.real_shape_model
obj_idx = re.load_object(obj)
light = np.array([1, 0, -0.5])
light /= np.linalg.norm(light)
cam_ast_v0 = np.array([0, 0, -sm.min_med_distance * 0.7])
cam_ast_q0 = quaternion.one
dq = tools.angleaxis_to_q((math.radians(1), 0, 1, 0))
inputs = []
for i in range(60):
cam_ast_v = cam_ast_v0
cam_ast_q = dq**i * cam_ast_q0
image = re.render(obj_idx, cam_ast_v, cam_ast_q, light, gamma=1.8, get_depth=False)
cam_ast_cv_v = tools.q_times_v(SystemModel.cv2gl_q, cam_ast_v)
cam_ast_cv_q = SystemModel.cv2gl_q * cam_ast_q * SystemModel.cv2gl_q.conj()
inputs.append([image, cam_ast_cv_v, cam_ast_cv_q])
if 0:
for image, _, _ in inputs:
cv2.imshow('t', cv2.resize(image, None, fx=0.5, fy=0.5))
cv2.waitKey()
else:
t = TestOdometry()
t.setUp(verbose=True)
t.test_rotating_object(inputs=inputs)
else:
unittest.main()
|
[
"pickle.dump",
"os.unlink",
"numpy.ones",
"pickle.load",
"numpy.linalg.norm",
"unittest.main",
"visnav.render.render.RenderEngine",
"math.radians",
"cv2.imwrite",
"os.path.dirname",
"visnav.algo.odometry.VisualOdometry",
"visnav.missions.didymos.DidymosSystemModel",
"cv2.resize",
"cv2.waitKey",
"datetime.datetime.strptime",
"datetime.datetime.fromtimestamp",
"visnav.algo.tools.angle_between_q",
"tempfile.NamedTemporaryFile",
"visnav.algo.model.SystemModel.cv2gl_q.conj",
"visnav.algo.model.Camera",
"cv2.imread",
"numpy.array",
"visnav.algo.tools.q_times_v"
] |
[((5755, 5860), 'visnav.algo.model.Camera', 'Camera', (['(2048)', '(1944)', '(7.7)', '(7.309)'], {'f_stop': '(5)', 'point_spread_fn': '(0.5)', 'scattering_coef': '(2e-10)'}), '(2048, 1944, 7.7, 7.309, f_stop=5, point_spread_fn=0.5,\n scattering_coef=2e-10, **common_kwargs)\n', (5761, 5860), False, 'from visnav.algo.model import Camera\n'), ((551, 687), 'visnav.algo.odometry.VisualOdometry', 'VisualOdometry', (['self.cam', '(self.cam.width / 4)'], {'verbose': 'verbose', 'pause': '(False)', 'use_scale_correction': '(False)', 'est_cam_pose': '(False)'}), '(self.cam, self.cam.width / 4, verbose=verbose, pause=False,\n use_scale_correction=False, est_cam_pose=False, **params)\n', (565, 687), False, 'from visnav.algo.odometry import VisualOdometry, Pose\n'), ((3996, 4052), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".png"""', 'delete': '(False)'}), "(suffix='.png', delete=False)\n", (4023, 4052), False, 'import tempfile\n'), ((4289, 4307), 'os.unlink', 'os.unlink', (['tf.name'], {}), '(tf.name)\n', (4298, 4307), False, 'import os\n'), ((4540, 4596), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".png"""', 'delete': '(False)'}), "(suffix='.png', delete=False)\n", (4567, 4596), False, 'import tempfile\n'), ((4823, 4841), 'os.unlink', 'os.unlink', (['tf.name'], {}), '(tf.name)\n', (4832, 4841), False, 'import os\n'), ((6525, 6616), 'visnav.missions.didymos.DidymosSystemModel', 'DidymosSystemModel', ([], {'use_narrow_cam': '(False)', 'target_primary': '(False)', 'hi_res_shape_model': '(True)'}), '(use_narrow_cam=False, target_primary=False,\n hi_res_shape_model=True)\n', (6543, 6616), False, 'from visnav.missions.didymos import DidymosSystemModel\n'), ((6627, 6689), 'visnav.render.render.RenderEngine', 'RenderEngine', (['sm.cam.width', 'sm.cam.height'], {'antialias_samples': '(0)'}), '(sm.cam.width, sm.cam.height, antialias_samples=0)\n', (6639, 6689), False, 'from visnav.render.render import RenderEngine\n'), ((6853, 6875), 'numpy.array', 'np.array', (['[1, 0, -0.5]'], {}), '([1, 0, -0.5])\n', (6861, 6875), True, 'import numpy as np\n'), ((6894, 6915), 'numpy.linalg.norm', 'np.linalg.norm', (['light'], {}), '(light)\n', (6908, 6915), True, 'import numpy as np\n'), ((6938, 6982), 'numpy.array', 'np.array', (['[0, 0, -sm.min_med_distance * 0.7]'], {}), '([0, 0, -sm.min_med_distance * 0.7])\n', (6946, 6982), True, 'import numpy as np\n'), ((7856, 7871), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7869, 7871), False, 'import unittest\n'), ((928, 963), 'visnav.algo.tools.angle_between_q', 'tools.angle_between_q', (['quat0', 'quat1'], {}), '(quat0, quat1)\n', (949, 963), False, 'from visnav.algo import tools\n'), ((2708, 2733), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2723, 2733), False, 'import os\n'), ((3198, 3240), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(orig_time + i * 60)'], {}), '(orig_time + i * 60)\n', (3220, 3240), False, 'from datetime import datetime\n'), ((4124, 4192), 'cv2.imwrite', 'cv2.imwrite', (['tf.name', 'inputs[i][0]', '(cv2.IMWRITE_PNG_COMPRESSION, 9)'], {}), '(tf.name, inputs[i][0], (cv2.IMWRITE_PNG_COMPRESSION, 9))\n', (4135, 4192), False, 'import cv2\n'), ((4362, 4396), 'pickle.dump', 'pickle.dump', (['(inputs, results)', 'fh'], {}), '((inputs, results), fh)\n', (4373, 4396), False, 'import pickle\n'), ((4508, 4523), 'pickle.load', 'pickle.load', (['fh'], {}), '(fh)\n', (4519, 4523), False, 'import pickle\n'), ((7340, 7387), 'visnav.algo.tools.q_times_v', 'tools.q_times_v', (['SystemModel.cv2gl_q', 'cam_ast_v'], {}), '(SystemModel.cv2gl_q, cam_ast_v)\n', (7355, 7387), False, 'from visnav.algo import tools\n'), ((1309, 1323), 'numpy.array', 'np.array', (['arr0'], {}), '(arr0)\n', (1317, 1323), True, 'import numpy as np\n'), ((1324, 1338), 'numpy.array', 'np.array', (['arr1'], {}), '(arr1)\n', (1332, 1338), True, 'import numpy as np\n'), ((3036, 3097), 'datetime.datetime.strptime', 'datetime.strptime', (['"""2020-07-01 15:42:00"""', '"""%Y-%m-%d %H:%M:%S"""'], {}), "('2020-07-01 15:42:00', '%Y-%m-%d %H:%M:%S')\n", (3053, 3097), False, 'from datetime import datetime\n'), ((4772, 4813), 'cv2.imread', 'cv2.imread', (['tf.name', 'cv2.IMREAD_GRAYSCALE'], {}), '(tf.name, cv2.IMREAD_GRAYSCALE)\n', (4782, 4813), False, 'import cv2\n'), ((7056, 7071), 'math.radians', 'math.radians', (['(1)'], {}), '(1)\n', (7068, 7071), False, 'import math\n'), ((7450, 7476), 'visnav.algo.model.SystemModel.cv2gl_q.conj', 'SystemModel.cv2gl_q.conj', ([], {}), '()\n', (7474, 7476), False, 'from visnav.algo.model import SystemModel\n'), ((7689, 7702), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (7700, 7702), False, 'import cv2\n'), ((3287, 3300), 'numpy.ones', 'np.ones', (['(3,)'], {}), '((3,))\n', (3294, 3300), True, 'import numpy as np\n'), ((3308, 3321), 'numpy.ones', 'np.ones', (['(3,)'], {}), '((3,))\n', (3315, 3321), True, 'import numpy as np\n'), ((7631, 7670), 'cv2.resize', 'cv2.resize', (['image', 'None'], {'fx': '(0.5)', 'fy': '(0.5)'}), '(image, None, fx=0.5, fy=0.5)\n', (7641, 7670), False, 'import cv2\n')]
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="books_info-bsodhi",
version="0.0.2",
author="<NAME>",
author_email="<EMAIL>",
description="Books data scraper.",
long_description="Scrapes books and articles informatio from Goodreads and Google Scholar",
long_description_content_type="text/markdown",
url="https://github.com/bsodhi/books_scraper",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=["requests",
"cryptography",
"pyOpenSSL",
"lxml",
"argparse",
"beautifulsoup4",
"fake_useragent",
"scholarly",
"selenium", ],
entry_points={
'console_scripts': [
'bscrape=books_scraper.scraper:main',
],
},
)
|
[
"setuptools.find_packages"
] |
[((456, 482), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (480, 482), False, 'import setuptools\n')]
|
"""
utils.py
@author: developmentseed
Functions used to generate a list of tiles via recursion
"""
from os import path as op
import json
from shapely.geometry import Polygon
from pygeotile.tile import Tile
def _get_quadrant_tiles(tile):
"""Return indicies of tiles at one higher zoom (in google tiling scheme)"""
ul = (tile.google[0] * 2, tile.google[1] * 2)
return [Tile.from_google(ul[0], ul[1], tile.zoom + 1), # UL
Tile.from_google(ul[0], ul[1] + 1, tile.zoom + 1), # LL
Tile.from_google(ul[0] + 1, ul[1], tile.zoom + 1), # UR
Tile.from_google(ul[0] + 1, ul[1] + 1, tile.zoom + 1)] # LR
def _calc_overlap(geom1, geom2):
"""Return area overlap"""
return geom1.intersection(geom2).area
def load_geojson(geojson_fpath):
"""Load geojson and return all contained polygons.
Parameters:
----------
geojson_fpath: str
Filepath of to geojson containing boundaries.
Returns:
-------
bounds: list
List of geometries read from geojson file."""
if not op.exists(geojson_fpath):
raise FileNotFoundError('{} does not exist'.format(geojson_fpath))
if not op.splitext(geojson_fpath) not in ['.geojson', '.json']:
raise ValueError('{} should be a .geojson or .json file'.format(geojson_fpath))
bounds = None
with open(geojson_fpath, 'r') as geojson_f:
raw_json = json.loads(geojson_f.read())
features = raw_json['features']
bounds = [feat['geometry'] for feat in features
if feat['geometry']['type'] in ['Polygon', 'MultiPolygon']]
return bounds
def format_tile(tile, tile_format, format_str='{x} {y} {z}'):
"""Convert tile to necessary format.
Parameters
----------
tile: pygeotile.tile.Tile
Tile object to be formatted.
tile_format: str
Desired tile format. `google`, `tms`, or `quad_tree`
format_str: str
String to guide formatting. Only used for `google` or `tms`
(as quad_tree is one value).
Default: "{x} {y} {z}". Example: "{z}-{x}-{y}"
"""
if tile_format == 'google':
td = {key: val for key, val
in zip(['x', 'y', 'z'], list(tile.google) + [tile.zoom])}
return format_str.format(**td)
elif tile_format == 'tms':
td = {key: val for key, val
in zip(['x', 'y', 'z'], list(tile.tms) + [tile.zoom])}
return format_str.format(**td)
elif tile_format == 'quad_tree':
return tile.quad_tree
else:
raise ValueError('`tile_format`: {} not recognized'.format(tile_format))
def get_overlap_child_tiles(tile, roi_geom, completely_contained=False):
"""Find all children tiles that overlap a boundary
Parameters
----------
tile: pygeotile.tile.Tile
Tile that is checked for overlap with `roi_geom`.
roi_geom: shapely.geometry.shape
Boundary of region-of-interest.
completely_contained: bool
Whether or not a tile is completely contained in the boundary.
If a tile is found to have 100% overlap with boundary, set to `True`
and algorithm can avoid calculating overlap for all future child tiles.
Default False.
Returns:
-------
return_tiles: list of pygeotile.tile.Tile, bool
Tiles that are children of `tile` and overlap the boundary
"""
return_tiles = []
quad_tiles = _get_quadrant_tiles(tile) # Compute four contained tiles
# If sub-tiles are completely contained within boundary, no need to compute overlap
if completely_contained:
return [[qt, True] for qt in quad_tiles]
# For each tile, compute overlap with ROI boundary
for qt in quad_tiles:
ll, ur = qt.bounds # Get lower-left and upper-right points
tile_pts = ((ll[1], ll[0]), (ur[1], ll[0]),
(ur[1], ur[0]), (ll[1], ur[0]))
tile_polygon = Polygon(tile_pts)
# Calculate overlap of tile with ROI
overlap_area = _calc_overlap(roi_geom, tile_polygon)
# If 100% overlap, indicate this to avoid checking overlap in future
if overlap_area == tile_polygon.area:
return_tiles.append([qt, True])
elif overlap_area > 0:
return_tiles.append([qt, False])
return return_tiles
|
[
"pygeotile.tile.Tile.from_google",
"os.path.exists",
"os.path.splitext",
"shapely.geometry.Polygon"
] |
[((385, 430), 'pygeotile.tile.Tile.from_google', 'Tile.from_google', (['ul[0]', 'ul[1]', '(tile.zoom + 1)'], {}), '(ul[0], ul[1], tile.zoom + 1)\n', (401, 430), False, 'from pygeotile.tile import Tile\n'), ((459, 508), 'pygeotile.tile.Tile.from_google', 'Tile.from_google', (['ul[0]', '(ul[1] + 1)', '(tile.zoom + 1)'], {}), '(ul[0], ul[1] + 1, tile.zoom + 1)\n', (475, 508), False, 'from pygeotile.tile import Tile\n'), ((533, 582), 'pygeotile.tile.Tile.from_google', 'Tile.from_google', (['(ul[0] + 1)', 'ul[1]', '(tile.zoom + 1)'], {}), '(ul[0] + 1, ul[1], tile.zoom + 1)\n', (549, 582), False, 'from pygeotile.tile import Tile\n'), ((607, 660), 'pygeotile.tile.Tile.from_google', 'Tile.from_google', (['(ul[0] + 1)', '(ul[1] + 1)', '(tile.zoom + 1)'], {}), '(ul[0] + 1, ul[1] + 1, tile.zoom + 1)\n', (623, 660), False, 'from pygeotile.tile import Tile\n'), ((1085, 1109), 'os.path.exists', 'op.exists', (['geojson_fpath'], {}), '(geojson_fpath)\n', (1094, 1109), True, 'from os import path as op\n'), ((3943, 3960), 'shapely.geometry.Polygon', 'Polygon', (['tile_pts'], {}), '(tile_pts)\n', (3950, 3960), False, 'from shapely.geometry import Polygon\n'), ((1197, 1223), 'os.path.splitext', 'op.splitext', (['geojson_fpath'], {}), '(geojson_fpath)\n', (1208, 1223), True, 'from os import path as op\n')]
|
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import codecs
import re
from .process import load_word_list
from .process import load_word_pattern
from .process import remove_urls_and_tags
from .process import remove_escapes_and_non_printable
smiles = load_word_list("emoticons-smile.dat")
laughs = load_word_list("emoticons-laugh.dat")
winks = load_word_list("emoticons-wink.dat")
cheekys = load_word_list("emoticons-wink.dat")
kisses = load_word_list("emoticons-kiss.dat")
happycrys = load_word_list("emoticons-happy-cry.dat")
crys = load_word_list("emoticons-cry.dat")
sads = load_word_list("emoticons-sad.dat")
shocks = load_word_list("emoticons-shock.dat")
sceptics = load_word_list("emoticons-sceptical.dat")
fwd_regex = "[:;8BX]['’`]{0,1}[-=^oc]{0,2}[DPO0J3ox,Þþb@*\\|/()<>{}\[\]]{1,2}"
fwd_re = re.compile(fwd_regex)
bck_regex = "[@*\\|/()<>{}\[\]]{1,2}[-=^]{0,2}['’`]{0,1}[:;]"
bck_re = re.compile(bck_regex)
"""
texturizer.emoticons: Emoticon Recognition Text Features
The functions in this library will add columns to a dataframe that indivate
whether there are emoticons in certain columns of text, and whether those
emoticons represent one of the more common emotions.
NOTE: In developing these regexes I have deliberately ignored certain emoticons
because of the likelihood of false positive matches in text containing brackets
For example emoticons: 8) or (B will not be matched.
To avoid matching characters inside document markup language tags there is a
rudimentary regex based tag removal and unescaped version of the text that is
expecte to have been generated in the intial simple text function run by the program.
This will remove URLS and HTML tags before trying to match emoticons.
Some references used when considering which empticons to include:
https://www.unglobalpulse.org/2014/10/emoticon-use-in-arabic-spanish-and-english-tweets/
https://www.researchgate.net/publication/266269913_From_Emoticon_to_Universal_Symbolic_Signs_Can_Written_Language_Survive_in_Cyberspace
https://www.sciencedirect.com/science/article/abs/pii/S0950329317300939
https://www.semanticscholar.org/paper/An-Approach-towards-Text-to-Emoticon-Conversion-and-Jha/3b81505fa7fec81563b2dafae3939fa1b07f3a98
https://www.qualitative-research.net/index.php/fqs/article/view/175/391
https://www.researchgate.net/publication/221622114_M_Textual_Affect_Sensing_for_Sociable_and_Expressive_Online_Communication
"""
########################################################################################
def add_text_emoticon_features(df, columns):
"""
Given a pandas dataframe and a set of column names.
Add features that detect the presence of emoticons.
"""
rez = df.copy()
for col in columns:
rez = add_emoticon_features(rez, col)
return rez
########################################################################################
def add_emoticon_features(df, col):
"""
Given a pandas dataframe and a column name.
Check for emoticons in the column and add a set of features
that indicate both the presence and emotional flavour of the emoticon.
"""
def cal_features(x, col):
emos = 0
smiley = 0
wink = 0
kiss = 0
happycry = 0
laugh = 0
cheeky = 0
crying = 0
sad = 0
shock = 0
sceptic = 0
if x[col]==x[col]:
text = remove_urls_and_tags( remove_escapes_and_non_printable( x[col] ) )
matches = fwd_re.findall(text)
bck_matches = bck_re.findall(text)
if len(matches)>0 or len(bck_matches)>0:
matches.extend(bck_matches)
emos = len(matches)
if set(matches).intersection( smiles ):
smiley = 1
if set(matches).intersection( crys ):
crying = 1
if set(matches).intersection( winks ):
wink = 1
if set(matches).intersection( kisses ):
kiss = 1
if set(matches).intersection( sads ):
sad = 1
if set(matches).intersection( shocks ):
shock = 1
if set(matches).intersection( sceptics ):
sceptic = 1
if set(matches).intersection( laughs ):
laugh = 1
if set(matches).intersection( cheekys ):
cheeky = 1
if set(matches).intersection( happycrys ):
happycry = 1
pos = smiley + wink + kiss + happycry + laugh + cheeky
neg = crying + sad + shock + sceptic
sent = pos - neg
return emos,smiley,wink,kiss,happycry,laugh,cheeky,crying,sad,shock,sceptic,pos,neg,sent
df[ get_emoticon_col_list(col) ] = df.apply(cal_features, col=col, axis=1, result_type="expand")
return df
########################################################################################
def get_emoticon_col_list(col):
return [col+'_emoticons', col+'_emo_smiley', col+'_emo_wink', col+'_emo_kiss', col+'_emo_happycry', col+'_emo_laugh', col+'_emo_cheeky', col+'_emo_cry', col+'_emo_sad', col+'_emo_shock', col+'_emo_sceptic', col+'_emo_pos', col+'_emo_neg', col+'_emo_sentiment']
|
[
"re.compile"
] |
[((823, 844), 're.compile', 're.compile', (['fwd_regex'], {}), '(fwd_regex)\n', (833, 844), False, 'import re\n'), ((916, 937), 're.compile', 're.compile', (['bck_regex'], {}), '(bck_regex)\n', (926, 937), False, 'import re\n')]
|
from collections import namedtuple
import itertools
import logging
import os
import time
from multiprocessing.dummy import Pool
import multiprocessing
import uuid
from elasticsearch import helpers
import elasticsearch
from elasticsearch_dsl import Index
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db import connection, transaction
import requests
from imageledger import models, search
console = logging.StreamHandler()
log = logging.getLogger(__name__)
log.addHandler(console)
log.setLevel(logging.INFO)
MAX_CONNECTION_RETRIES = 50
RETRY_WAIT = 5 # Number of sections to wait before retrying
DEFAULT_CHUNK_SIZE = 1000
DEFAULT_NUM_ITERATIONS = 10000
DEFAULT_NUM_THREADS = 4
class Command(BaseCommand):
can_import_settings = True
requires_migrations_checks = True
def add_arguments(self, parser):
parser.add_argument("--verbose",
action="store_true",
default=False,
help="Be very chatty and run logging at DEBUG")
parser.add_argument("--chunk-size",
dest="chunk_size",
default=DEFAULT_CHUNK_SIZE,
type=int,
help="The number of records to batch process at once")
parser.add_argument("--num-iterations",
dest="num_iterations",
default=DEFAULT_NUM_ITERATIONS,
type=int,
help="The number of times to loop through `chunk_size` records")
parser.add_argument("--num-threads",
dest="num_threads",
default=DEFAULT_NUM_THREADS,
type=int,
help="The number of threads to start up at once")
def handle(self, *args, **options):
if options['verbose'] or settings.DEBUG:
log.setLevel(logging.DEBUG)
self.index_all_images(chunk_size=options['chunk_size'],
num_iterations=options['num_iterations'],
num_threads=options['num_threads']
)
def index_all_images(self, chunk_size=DEFAULT_CHUNK_SIZE, num_iterations=DEFAULT_NUM_ITERATIONS,
num_threads=DEFAULT_NUM_THREADS):
"""Index every record in the database with a server-side cursor"""
index = Index(settings.ELASTICSEARCH_INDEX)
if not index.exists():
log.info("Creating new index %s", settings.ELASTICSEARCH_INDEX)
search.Image.init()
mapping = search.Image._doc_type.mapping
mapping.save(settings.ELASTICSEARCH_INDEX)
log.info("Done creating new index")
with Pool(num_threads) as pool:
starts = [i * chunk_size for i in range(0, num_iterations)]
pool.starmap(do_index, zip(starts, itertools.repeat(chunk_size, len(starts))))
def do_index(start, chunk_size):
end = start + chunk_size + 1
batches = []
retries = 0
try:
es = search.init(timeout=2000)
if not settings.DEBUG:
es.cluster.health(wait_for_status='green', request_timeout=2000)
except (requests.exceptions.ReadTimeout, elasticsearch.exceptions.TransportError) as e:
log.warn(e)
log.warn("Skipping batch and retrying after wait")
time.sleep(RETRY_WAIT)
return
log.debug("Starting index in range from %d to %d...", start, end)
qs = models.Image.objects.filter(removed_from_source=False, id__gt=start).order_by('id')[0:chunk_size]
for db_image in server_cursor_query(qs, chunk_size=chunk_size):
log.debug("Indexing database record %s", db_image.identifier)
image = search.db_image_to_index(db_image)
try:
if len(batches) >= chunk_size:
if not settings.DEBUG:
log.debug("Waiting for green status...")
es.cluster.health(wait_for_status='green', request_timeout=2000)
helpers.bulk(es, batches)
log.debug("Pushed batch of %d records to ES", len(batches))
batches = [] # Clear the batch size
else:
batches.append(image.to_dict(include_meta=True))
except (requests.exceptions.ReadTimeout,
elasticsearch.exceptions.TransportError,
elasticsearch.helpers.BulkIndexError) as e:
if retries < MAX_CONNECTION_RETRIES:
log.warn("Got timeout: retrying with %d retries remaining", MAX_CONNECTION_RETRIES - retries)
retries += 1
time.sleep(RETRY_WAIT)
else:
raise
helpers.bulk(es, batches)
def server_cursor_query(queryset, cursor_id=0, chunk_size=DEFAULT_CHUNK_SIZE):
connection.cursor()
compiler = queryset.query.get_compiler(using=queryset.db)
sql, params = compiler.as_sql()
model = compiler.klass_info['model']
select_fields = compiler.klass_info['select_fields']
fields = [field[0].target.attname
for field in compiler.select[select_fields[0]:select_fields[-1] + 1]]
cursor_name = 'cursor-large-%d' % cursor_id
cursor = connection.connection.cursor(name=cursor_name)
with transaction.atomic(savepoint=False):
cursor.execute(sql, params)
while True:
rows = cursor.fetchmany(chunk_size)
if not rows:
break
for row in rows:
DBObj = namedtuple('DBObj', fields)
obj = DBObj(*row[select_fields[0]:select_fields[-1] + 1])
yield obj
def grouper_it(n, iterable):
it = iter(iterable)
while True:
chunk_it = itertools.islice(it, n)
try:
first_el = next(chunk_it)
except StopIteration:
return
yield itertools.chain((first_el,), chunk_it)
|
[
"imageledger.models.Image.objects.filter",
"multiprocessing.dummy.Pool",
"imageledger.search.init",
"elasticsearch.helpers.bulk",
"logging.StreamHandler",
"imageledger.search.db_image_to_index",
"imageledger.search.Image.init",
"django.db.connection.cursor",
"time.sleep",
"collections.namedtuple",
"django.db.connection.connection.cursor",
"itertools.islice",
"elasticsearch_dsl.Index",
"itertools.chain",
"django.db.transaction.atomic",
"logging.getLogger"
] |
[((467, 490), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (488, 490), False, 'import logging\n'), ((497, 524), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (514, 524), False, 'import logging\n'), ((4844, 4869), 'elasticsearch.helpers.bulk', 'helpers.bulk', (['es', 'batches'], {}), '(es, batches)\n', (4856, 4869), False, 'from elasticsearch import helpers\n'), ((4954, 4973), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (4971, 4973), False, 'from django.db import connection, transaction\n'), ((5356, 5402), 'django.db.connection.connection.cursor', 'connection.connection.cursor', ([], {'name': 'cursor_name'}), '(name=cursor_name)\n', (5384, 5402), False, 'from django.db import connection, transaction\n'), ((2533, 2568), 'elasticsearch_dsl.Index', 'Index', (['settings.ELASTICSEARCH_INDEX'], {}), '(settings.ELASTICSEARCH_INDEX)\n', (2538, 2568), False, 'from elasticsearch_dsl import Index\n'), ((3191, 3216), 'imageledger.search.init', 'search.init', ([], {'timeout': '(2000)'}), '(timeout=2000)\n', (3202, 3216), False, 'from imageledger import models, search\n'), ((3877, 3911), 'imageledger.search.db_image_to_index', 'search.db_image_to_index', (['db_image'], {}), '(db_image)\n', (3901, 3911), False, 'from imageledger import models, search\n'), ((5412, 5447), 'django.db.transaction.atomic', 'transaction.atomic', ([], {'savepoint': '(False)'}), '(savepoint=False)\n', (5430, 5447), False, 'from django.db import connection, transaction\n'), ((5871, 5894), 'itertools.islice', 'itertools.islice', (['it', 'n'], {}), '(it, n)\n', (5887, 5894), False, 'import itertools\n'), ((2688, 2707), 'imageledger.search.Image.init', 'search.Image.init', ([], {}), '()\n', (2705, 2707), False, 'from imageledger import models, search\n'), ((2878, 2895), 'multiprocessing.dummy.Pool', 'Pool', (['num_threads'], {}), '(num_threads)\n', (2882, 2895), False, 'from multiprocessing.dummy import Pool\n'), ((3505, 3527), 'time.sleep', 'time.sleep', (['RETRY_WAIT'], {}), '(RETRY_WAIT)\n', (3515, 3527), False, 'import time\n'), ((6009, 6047), 'itertools.chain', 'itertools.chain', (['(first_el,)', 'chunk_it'], {}), '((first_el,), chunk_it)\n', (6024, 6047), False, 'import itertools\n'), ((3624, 3692), 'imageledger.models.Image.objects.filter', 'models.Image.objects.filter', ([], {'removed_from_source': '(False)', 'id__gt': 'start'}), '(removed_from_source=False, id__gt=start)\n', (3651, 3692), False, 'from imageledger import models, search\n'), ((4169, 4194), 'elasticsearch.helpers.bulk', 'helpers.bulk', (['es', 'batches'], {}), '(es, batches)\n', (4181, 4194), False, 'from elasticsearch import helpers\n'), ((5654, 5681), 'collections.namedtuple', 'namedtuple', (['"""DBObj"""', 'fields'], {}), "('DBObj', fields)\n", (5664, 5681), False, 'from collections import namedtuple\n'), ((4777, 4799), 'time.sleep', 'time.sleep', (['RETRY_WAIT'], {}), '(RETRY_WAIT)\n', (4787, 4799), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
"""Constants for the Mantarray File Manager."""
from typing import Dict
import uuid
from immutabledict import immutabledict
from labware_domain_models import LabwareDefinition
try:
from importlib import metadata
except ImportError: # pragma: no cover
import importlib_metadata as metadata # type: ignore
PACKAGE_VERSION = metadata.version("pulse3D")
CURI_BIO_ACCOUNT_UUID = uuid.UUID("73f52be0-368c-42d8-a1fd-660d49ba5604")
CURI_BIO_USER_ACCOUNT_ID = uuid.UUID("<KEY>")
TWENTY_FOUR_WELL_PLATE = LabwareDefinition(row_count=4, column_count=6)
MIN_SUPPORTED_FILE_VERSION = "0.1.1"
CURRENT_BETA1_HDF5_FILE_FORMAT_VERSION = "0.4.2"
CURRENT_BETA2_HDF5_FILE_FORMAT_VERSION = "1.0.0"
FILE_FORMAT_VERSION_METADATA_KEY = "File Format Version"
FILE_MIGRATION_PATHS = immutabledict({"0.3.1": "0.4.1", "0.4.1": "0.4.2"})
NOT_APPLICABLE_H5_METADATA = uuid.UUID(
"59d92e00-99d5-4460-9a28-5a1a0fe9aecf"
) # Eli (1/19/21): H5 files can't store the concept of `None` in their metadata, so using this value to denote that a particular piece of metadata is not available (i.e. after migrating to a newer file format version)
HARDWARE_TEST_RECORDING_UUID = uuid.UUID("a2e76058-08cd-475d-a55d-31d401c3cb34")
UTC_BEGINNING_DATA_ACQUISTION_UUID = uuid.UUID("98c67f22-013b-421a-831b-0ea55df4651e")
START_RECORDING_TIME_INDEX_UUID = uuid.UUID("e41422b3-c903-48fd-9856-46ff56a6534c")
UTC_BEGINNING_RECORDING_UUID = uuid.UUID("d2449271-0e84-4b45-a28b-8deab390b7c2")
UTC_FIRST_TISSUE_DATA_POINT_UUID = uuid.UUID("b32fb8cb-ebf8-4378-a2c0-f53a27bc77cc")
UTC_FIRST_REF_DATA_POINT_UUID = uuid.UUID("7cc07b2b-4146-4374-b8f3-1c4d40ff0cf7")
CUSTOMER_ACCOUNT_ID_UUID = uuid.UUID("4927c810-fbf4-406f-a848-eba5308576e6")
USER_ACCOUNT_ID_UUID = uuid.UUID("7282cf00-2b6e-4202-9d9e-db0c73c3a71f")
SOFTWARE_BUILD_NUMBER_UUID = uuid.UUID("b4db8436-10a4-4359-932d-aa80e6de5c76")
SOFTWARE_RELEASE_VERSION_UUID = uuid.UUID("432fc3c1-051b-4604-bc3d-cc0d0bd75368")
MAIN_FIRMWARE_VERSION_UUID = uuid.UUID("faa48a0c-0155-4234-afbf-5e5dbaa59537")
SLEEP_FIRMWARE_VERSION_UUID = uuid.UUID("3a816076-90e4-4437-9929-dc910724a49d")
XEM_SERIAL_NUMBER_UUID = uuid.UUID("e5f5b134-60c7-4881-a531-33aa0edba540")
MANTARRAY_NICKNAME_UUID = uuid.UUID("0cdec9bb-d2b4-4c5b-9dd5-6a49766c5ed4")
MANTARRAY_SERIAL_NUMBER_UUID = uuid.UUID("83720d36-b941-4d85-9b39-1d817799edd6")
REFERENCE_VOLTAGE_UUID = uuid.UUID("0b3f3f56-0cc7-45f0-b748-9b9de480cba8")
WELL_NAME_UUID = uuid.UUID("6d78f3b9-135a-4195-b014-e74dee70387b")
WELL_ROW_UUID = uuid.UUID("da82fe73-16dd-456a-ac05-0b70fb7e0161")
WELL_COLUMN_UUID = uuid.UUID("7af25a0a-8253-4d32-98c4-3c2ca0d83906")
WELL_INDEX_UUID = uuid.UUID("cd89f639-1e36-4a13-a5ed-7fec6205f779")
TOTAL_WELL_COUNT_UUID = uuid.UUID("7ca73e1c-9555-4eca-8281-3f844b5606dc")
REF_SAMPLING_PERIOD_UUID = uuid.UUID("48aa034d-8775-453f-b135-75a983d6b553")
TISSUE_SAMPLING_PERIOD_UUID = uuid.UUID("f629083a-3724-4100-8ece-c03e637ac19c")
ADC_GAIN_SETTING_UUID = uuid.UUID("a3c3bb32-9b92-4da1-8ed8-6c09f9c816f8")
ADC_TISSUE_OFFSET_UUID = uuid.UUID("41069860-159f-49f2-a59d-401783c1ecb4")
ADC_REF_OFFSET_UUID = uuid.UUID("dc10066c-abf2-42b6-9b94-5e52d1ea9bfc")
PLATE_BARCODE_UUID = uuid.UUID("cf60afef-a9f0-4bc3-89e9-c665c6bb6941")
STIM_BARCODE_UUID = uuid.UUID("6fa67db1-c8b9-4937-b93f-6fe8bdc7e6d7")
BACKEND_LOG_UUID = uuid.UUID("87533deb-2495-4430-bce7-12fdfc99158e")
COMPUTER_NAME_HASH_UUID = uuid.UUID("fefd0675-35c2-45f6-855a-9500ad3f100d")
PLATE_BARCODE_IS_FROM_SCANNER_UUID = uuid.UUID("7d026e86-da70-4464-9181-dc0ce2d47bd1")
STIM_BARCODE_IS_FROM_SCANNER_UUID = uuid.UUID("6e5a4b3e-f766-4638-80f7-d95c417c0fc2")
IS_FILE_ORIGINAL_UNTRIMMED_UUID = uuid.UUID("52231a24-97a3-497a-917c-86c780d9993f")
TRIMMED_TIME_FROM_ORIGINAL_START_UUID = uuid.UUID("371996e6-5e2d-4183-a5cf-06de7058210a")
TRIMMED_TIME_FROM_ORIGINAL_END_UUID = uuid.UUID("55f6770d-c369-42ce-a437-5ed89c3cb1f8")
ORIGINAL_FILE_VERSION_UUID = uuid.UUID("cd1b4063-4a87-4a57-bc12-923ff4890844")
UTC_TIMESTAMP_OF_FILE_VERSION_MIGRATION_UUID = uuid.UUID("399b2148-09d4-418b-a132-e37df2721938")
FILE_VERSION_PRIOR_TO_MIGRATION_UUID = uuid.UUID("11b4945b-3cf3-4f67-8bee-7abc3c449756")
BOOTUP_COUNTER_UUID = uuid.UUID("b9ccc724-a39d-429a-be6d-3fd29be5037d")
TOTAL_WORKING_HOURS_UUID = uuid.UUID("f8108718-2fa0-40ce-a51a-8478e5edd4b8")
TAMPER_FLAG_UUID = uuid.UUID("68d0147f-9a84-4423-9c50-228da16ba895")
PCB_SERIAL_NUMBER_UUID = uuid.UUID("5103f995-19d2-4880-8a2e-2ce9080cd2f5")
MAGNETOMETER_CONFIGURATION_UUID = uuid.UUID("921121e9-4191-4536-bedd-03186fa1e117")
UTC_BEGINNING_STIMULATION_UUID = uuid.UUID("4b310594-ded4-45fd-a1b4-b829aceb416c")
STIMULATION_PROTOCOL_UUID = uuid.UUID("ede638ce-544e-427a-b1d9-c40784d7c82d")
IS_CALIBRATION_FILE_UUID = uuid.UUID("9a6f90eb-fe34-423b-bfed-fb441d6d9e5f")
CHANNEL_FIRMWARE_VERSION_UUID = uuid.UUID("d9694cfe-824c-41f8-915e-91e41ce7af32")
BOOT_FLAGS_UUID = uuid.UUID("762f6715-ffcd-4e8d-b707-638dd5777841")
INITIAL_MAGNET_FINDING_PARAMS = uuid.UUID("da5f2f6d-6874-4e53-be10-90c4bfbd3d45")
METADATA_UUID_DESCRIPTIONS = immutabledict(
{
# General values
HARDWARE_TEST_RECORDING_UUID: "Is Hardware Test Recording",
START_RECORDING_TIME_INDEX_UUID: "Timepoint of Beginning of Recording",
UTC_BEGINNING_DATA_ACQUISTION_UUID: "UTC Timestamp of Beginning of Data Acquisition",
UTC_BEGINNING_RECORDING_UUID: "UTC Timestamp of Beginning of Recording",
UTC_FIRST_TISSUE_DATA_POINT_UUID: "UTC Timestamp of Beginning of Recorded Tissue Sensor Data",
UTC_FIRST_REF_DATA_POINT_UUID: "UTC Timestamp of Beginning of Recorded Reference Sensor Data",
CUSTOMER_ACCOUNT_ID_UUID: "Customer Account ID",
USER_ACCOUNT_ID_UUID: "User Account ID",
SOFTWARE_BUILD_NUMBER_UUID: "Software Build Number",
SOFTWARE_RELEASE_VERSION_UUID: "Software Release Version",
MAIN_FIRMWARE_VERSION_UUID: "Firmware Version (Main Controller)",
SLEEP_FIRMWARE_VERSION_UUID: "Firmware Version (Sleep Mode)",
MANTARRAY_NICKNAME_UUID: "Mantarray Nickname",
MANTARRAY_SERIAL_NUMBER_UUID: "Mantarray Serial Number",
REFERENCE_VOLTAGE_UUID: "Reference Voltage",
WELL_NAME_UUID: "Well Name",
WELL_ROW_UUID: "Well Row (zero-based)",
WELL_COLUMN_UUID: "Well Column (zero-based)",
WELL_INDEX_UUID: "Well Index (zero-based)",
TOTAL_WELL_COUNT_UUID: "Total Wells in Plate",
REF_SAMPLING_PERIOD_UUID: "Reference Sensor Sampling Period (microseconds)",
TISSUE_SAMPLING_PERIOD_UUID: "Tissue Sensor Sampling Period (microseconds)",
ADC_GAIN_SETTING_UUID: "ADC Gain Setting",
ADC_TISSUE_OFFSET_UUID: "ADC Tissue Sensor Offset",
ADC_REF_OFFSET_UUID: "ADC Reference Sensor Offset",
PLATE_BARCODE_UUID: "Plate Barcode",
BACKEND_LOG_UUID: "Backend log file identifier",
COMPUTER_NAME_HASH_UUID: "SHA512 digest of computer name",
PLATE_BARCODE_IS_FROM_SCANNER_UUID: "Is this plate barcode obtained from the scanner",
IS_FILE_ORIGINAL_UNTRIMMED_UUID: "Is this an original file straight from the instrument and untrimmed",
TRIMMED_TIME_FROM_ORIGINAL_START_UUID: "Number of centimilliseconds that has been trimmed off the beginning of when the original data started",
TRIMMED_TIME_FROM_ORIGINAL_END_UUID: "Number of centimilliseconds that has been trimmed off the end of when the original data ended",
ORIGINAL_FILE_VERSION_UUID: "The original version of the file when recorded, prior to any migrations to newer versions/formats.",
UTC_TIMESTAMP_OF_FILE_VERSION_MIGRATION_UUID: "Timestamp when this file was migrated from an earlier version.",
FILE_VERSION_PRIOR_TO_MIGRATION_UUID: "File format version that this file was migrated from",
# Beta 1 specific values
XEM_SERIAL_NUMBER_UUID: "XEM Serial Number",
# Beta 2 specific values
BOOTUP_COUNTER_UUID: "The number of times this Mantarray Instrument has booted up",
TOTAL_WORKING_HOURS_UUID: "The total number of hours this Mantarray Instrument has been powered on and running",
TAMPER_FLAG_UUID: "Is it suspected the internals of the Mantarray enclosure have been tampered with",
PCB_SERIAL_NUMBER_UUID: "The serial number of the Mantarray PCB",
MAGNETOMETER_CONFIGURATION_UUID: "The state (on/off) of the board's magnetometers",
UTC_BEGINNING_STIMULATION_UUID: "UTC Timestamp of Beginning of Stimulation",
STIMULATION_PROTOCOL_UUID: "The stimulation protocol that was running on this well during recording. Empty string if stimulation was not active",
STIM_BARCODE_UUID: "Stim Lid Barcode",
STIM_BARCODE_IS_FROM_SCANNER_UUID: "Is this stim lid barcode obtained from the scanner",
IS_CALIBRATION_FILE_UUID: "Is this file a calibration (empty plate) recording",
CHANNEL_FIRMWARE_VERSION_UUID: "Firmware Version (Channel Controller)",
BOOT_FLAGS_UUID: "Hardware/firmware flags present on device bootup",
INITIAL_MAGNET_FINDING_PARAMS: "JSON string of the initial magnet finding params that should be used in Pulse3D",
}
)
DATETIME_STR_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
CENTIMILLISECONDS_PER_SECOND = int(1e5)
MICRO_TO_BASE_CONVERSION = int(1e6)
MICROSECONDS_PER_CENTIMILLISECOND = 10
TISSUE_SENSOR_READINGS = "tissue_sensor_readings"
REFERENCE_SENSOR_READINGS = "reference_sensor_readings"
STIMULATION_READINGS = "stimulation_readings"
TIME_INDICES = "time_indices"
TIME_OFFSETS = "time_offsets"
"""
constants from mantarray_waveform_analysis library
"""
MILLI_TO_BASE_CONVERSION = 1000
TWITCH_PERIOD_UUID = uuid.UUID("6e0cd81c-7861-4c49-ba14-87b2739d65fb")
# This is just the reciprocal of twitch period, but is pre-computed to make downstream pipelines
# simpler. Frequency is reported in Hz
TWITCH_FREQUENCY_UUID = uuid.UUID("472d0707-ff87-4198-9374-c28900bb216c")
AMPLITUDE_UUID = uuid.UUID("89cf1105-a015-434f-b527-4169b9400e26")
AUC_UUID = uuid.UUID("e7b9a6e4-c43d-4e8b-af7e-51742e252030")
WIDTH_UUID = uuid.UUID("c4c60d55-017a-4783-9600-f19606de26f3")
WIDTH_VALUE_UUID = uuid.UUID("05041f4e-c77d-42d9-a2ae-8902f912e9ac")
WIDTH_RISING_COORDS_UUID = uuid.UUID("2a16acb6-4df7-4064-9d47-5d27ea7a98ad")
WIDTH_FALLING_COORDS_UUID = uuid.UUID("26e5637d-42c9-4060-aa5d-52209b349c84")
RELAXATION_VELOCITY_UUID = uuid.UUID("0fcc0dc3-f9aa-4f1b-91b3-e5b5924279a9")
CONTRACTION_VELOCITY_UUID = uuid.UUID("73961e7c-17ec-42b0-b503-a23195ec249c")
IRREGULARITY_INTERVAL_UUID = uuid.UUID("61046076-66b9-4b8b-bfec-1e00603743c0")
# Kristian 9/15/21
FRACTION_MAX_UUID = uuid.UUID("8fe142e2-2504-4c9e-b3dc-817b24c7447e")
# Kristian 10/29/21: for contraction to % width, or peak to % relaxation
TIME_VALUE_UUID = uuid.UUID("32f5ce6b-e311-4434-8a2a-c2b6bbd81ee6")
RELAXATION_TIME_UUID = uuid.UUID("0ad56cd1-7bcc-4b57-8076-14366d7f3c6a")
CONTRACTION_TIME_UUID = uuid.UUID("33b5b0a8-f197-46ef-a451-a254e530757b")
AMPLITUDE_UUID = uuid.UUID("89cf1105-a015-434f-b527-4169b9400e26")
AUC_UUID = uuid.UUID("e7b9a6e4-c43d-4e8b-af7e-51742e252030")
WIDTH_UUID = uuid.UUID("c4c60d55-017a-4783-9600-f19606de26f3")
WIDTH_VALUE_UUID = uuid.UUID("05041f4e-c77d-42d9-a2ae-8902f912e9ac")
WIDTH_RISING_COORDS_UUID = uuid.UUID("2a16acb6-4df7-4064-9d47-5d27ea7a98ad")
WIDTH_FALLING_COORDS_UUID = uuid.UUID("26e5637d-42c9-4060-aa5d-52209b349c84")
RELAXATION_VELOCITY_UUID = uuid.UUID("0fcc0dc3-f9aa-4f1b-91b3-e5b5924279a9")
CONTRACTION_VELOCITY_UUID = uuid.UUID("73961e7c-17ec-42b0-b503-a23195ec249c")
IRREGULARITY_INTERVAL_UUID = uuid.UUID("61046076-66b9-4b8b-bfec-1e00603743c0")
FRACTION_MAX_UUID = uuid.UUID("8fe142e2-2504-4c9e-b3dc-817b24c7447e")
TIME_DIFFERENCE_UUID = uuid.UUID("1363817a-b1fb-468e-9f1c-ec54fce72dfe")
TIME_VALUE_UUID = uuid.UUID("32f5ce6b-e311-4434-8a2a-c2b6bbd81ee6")
RELAXATION_TIME_UUID = uuid.UUID("0ad56cd1-7bcc-4b57-8076-14366d7f3c6a")
CONTRACTION_TIME_UUID = uuid.UUID("33b5b0a8-f197-46ef-a451-a254e530757b")
# Kristian 11/9/21: full contraction or full relaxation metrics
BASELINE_TO_PEAK_UUID = uuid.UUID("03ce2d30-3580-4129-9913-2fc2e35eddb7")
PEAK_TO_BASELINE_UUID = uuid.UUID("1ac2589d-4713-41c0-8dd0-1e6c98600e37")
ALL_METRICS = [
TWITCH_PERIOD_UUID,
FRACTION_MAX_UUID,
AMPLITUDE_UUID,
AUC_UUID,
TWITCH_FREQUENCY_UUID,
CONTRACTION_VELOCITY_UUID,
RELAXATION_VELOCITY_UUID,
IRREGULARITY_INTERVAL_UUID,
BASELINE_TO_PEAK_UUID,
PEAK_TO_BASELINE_UUID,
WIDTH_UUID,
RELAXATION_TIME_UUID,
CONTRACTION_TIME_UUID,
]
PRIOR_PEAK_INDEX_UUID = uuid.UUID("80df90dc-21f8-4cad-a164-89436909b30a")
PRIOR_VALLEY_INDEX_UUID = uuid.UUID("72ba9466-c203-41b6-ac30-337b4a17a124")
SUBSEQUENT_PEAK_INDEX_UUID = uuid.UUID("7e37325b-6681-4623-b192-39f154350f36")
SUBSEQUENT_VALLEY_INDEX_UUID = uuid.UUID("fd47ba6b-ee4d-4674-9a89-56e0db7f3d97")
BESSEL_BANDPASS_UUID = uuid.UUID("0ecf0e52-0a29-453f-a6ff-46f5ec3ae783")
BESSEL_LOWPASS_10_UUID = uuid.UUID("7d64cac3-b841-4912-b734-c0cf20a81e7a")
BESSEL_LOWPASS_30_UUID = uuid.UUID("eee66c75-4dc4-4eb4-8d48-6c608bf28d91")
BUTTERWORTH_LOWPASS_30_UUID = uuid.UUID("de8d8cef-65bf-4119-ada7-bdecbbaa897a")
# General mangetic field to force conversion factor. Obtained 03/09/2021 by <NAME>, Valid as of 11/19/21
MILLIMETERS_PER_MILLITESLA = 23.25
NEWTONS_PER_MILLIMETER = 0.000159
# Beta 1 GMR to magnetic field conversion values. Valid as of 11/19/21
MILLIVOLTS_PER_MILLITESLA = 1073.6 # Obtained 03/09/2021 by <NAME>
MIDSCALE_CODE = 0x800000
RAW_TO_SIGNED_CONVERSION_VALUE = 2 ** 23 # subtract this value from raw hardware data
REFERENCE_VOLTAGE = 2.5
ADC_GAIN = 2
# Beta 2 Memsic to magnetic field conversion factors. Valid as of 11/19/21
MEMSIC_CENTER_OFFSET = 2 ** 15
MEMSIC_MSB = 2 ** 16
MEMSIC_FULL_SCALE = 16
GAUSS_PER_MILLITESLA = 10
MIN_NUMBER_PEAKS = 3
MIN_NUMBER_VALLEYS = 3
"""
pulse3D constants
"""
METADATA_EXCEL_SHEET_NAME = "metadata"
METADATA_RECORDING_ROW_START = 0
METADATA_INSTRUMENT_ROW_START = METADATA_RECORDING_ROW_START + 4
METADATA_OUTPUT_FILE_ROW_START = METADATA_INSTRUMENT_ROW_START + 6
CONTINUOUS_WAVEFORM_SHEET_NAME = "continuous-waveforms"
AGGREGATE_METRICS_SHEET_NAME = "aggregate-metrics"
PER_TWITCH_METRICS_SHEET_NAME = "per-twitch-metrics"
NUMBER_OF_PER_TWITCH_METRICS = 45
SNAPSHOT_CHART_SHEET_NAME = "continuous-waveform-snapshots"
FULL_CHART_SHEET_NAME = "full-continuous-waveform-plots"
TWITCH_FREQUENCIES_CHART_SHEET_NAME = "twitch-frequencies-plots"
FORCE_FREQUENCY_RELATIONSHIP_SHEET = "force-frequency-relationship"
INTERPOLATED_DATA_PERIOD_SECONDS = 1 / 100
INTERPOLATED_DATA_PERIOD_US = INTERPOLATED_DATA_PERIOD_SECONDS * MICRO_TO_BASE_CONVERSION
TSP_TO_DEFAULT_FILTER_UUID = { # Tissue Sampling Period (µs) to default Pipeline Filter UUID
9600: BESSEL_LOWPASS_10_UUID,
1600: BUTTERWORTH_LOWPASS_30_UUID,
}
DEFAULT_CELL_WIDTH = 64
CHART_ALPHA = 60 # for full/snapshots -- num pixels between left figure edge and plot area
CHART_GAMMA = 150 # for full/snapshots -- num pixels between right figure edge and plot area
CHART_PIXELS_PER_SECOND = 35 # for full/snapshots -- number of pixels per second
CHART_MAXIMUM_SNAPSHOT_LENGTH = 10
CHART_HEIGHT = 300
CHART_HEIGHT_CELLS = 15
CHART_FIXED_WIDTH_CELLS = 8
CHART_FIXED_WIDTH = DEFAULT_CELL_WIDTH * CHART_FIXED_WIDTH_CELLS
PEAK_VALLEY_COLUMN_START = 100
SECONDS_PER_CELL = 2.5
CALCULATED_METRIC_DISPLAY_NAMES = {
TWITCH_PERIOD_UUID: "Twitch Period (seconds)",
TWITCH_FREQUENCY_UUID: "Twitch Frequency (Hz)",
AMPLITUDE_UUID: "Active Twitch Force (μN)",
FRACTION_MAX_UUID: "Fraction of Maximum Active Twitch Force (μN)",
AUC_UUID: "Energy (μJ)",
CONTRACTION_VELOCITY_UUID: "Twitch Contraction Velocity (μN/second)",
RELAXATION_VELOCITY_UUID: "Twitch Relaxation Velocity (μN/second)",
IRREGULARITY_INTERVAL_UUID: "Twitch Interval Irregularity (seconds)",
TIME_DIFFERENCE_UUID: "Time Difference (seconds)",
WIDTH_UUID: "Twitch Width {} (seconds)",
RELAXATION_TIME_UUID: "Time From Peak to Relaxation {} (seconds)",
CONTRACTION_TIME_UUID: "Time From Contraction {} to Peak (seconds)",
BASELINE_TO_PEAK_UUID: "Time From Baseline to Peak (seconds)",
PEAK_TO_BASELINE_UUID: "Time From Peak to Baseline (seconds)",
}
CALCULATED_METRICS = immutabledict(
{
"by_width": (WIDTH_UUID, CONTRACTION_TIME_UUID, RELAXATION_TIME_UUID),
"scalar": (
AMPLITUDE_UUID,
AUC_UUID,
BASELINE_TO_PEAK_UUID,
CONTRACTION_VELOCITY_UUID,
FRACTION_MAX_UUID,
IRREGULARITY_INTERVAL_UUID,
PEAK_TO_BASELINE_UUID,
RELAXATION_VELOCITY_UUID,
TWITCH_FREQUENCY_UUID,
TWITCH_PERIOD_UUID,
),
}
)
COORDS = (10, 25, 50, 75, 90)
TWITCH_WIDTH_METRIC_DISPLAY_NAMES: Dict[int, str] = immutabledict(
(coord, f"Twitch Width {coord} (seconds)") for coord in reversed(COORDS)
)
CONTRACTION_COORDINATES_DISPLAY_NAMES: Dict[int, str] = immutabledict(
(coord, f"Contraction Coordinates {coord}") for coord in reversed(COORDS)
)
RELAXATION_COORDINATES_DISPLAY_NAMES: Dict[int, str] = immutabledict(
(coord, f"Relaxation Coordinates {coord}") for coord in COORDS
)
CONTRACTION_TIME_DIFFERENCE_DISPLAY_NAMES: Dict[int, str] = immutabledict(
(coord, f"Time From Contraction {coord} to Peak (seconds)") for coord in reversed(COORDS)
)
RELAXATION_TIME_DIFFERENCE_DISPLAY_NAMES: Dict[int, str] = immutabledict(
(coord, f"Time From Peak to Relaxation {coord} (seconds)") for coord in COORDS
)
ALL_FORMATS = immutabledict({"CoV": {"num_format": "0.00%"}})
TWITCHES_POINT_UP_UUID = uuid.UUID("97f69f56-f1c6-4c50-8590-7332570ed3c5")
INTERPOLATION_VALUE_UUID = uuid.UUID("466d0131-06b7-4f0f-ba1e-062a771cb280")
mutable_metadata_uuid_descriptions = dict(
METADATA_UUID_DESCRIPTIONS
) # create a mutable version to add in the new values specific to the SDK (.update is an in-place operation that doesn't return the dictionary, so chaining is difficult)
mutable_metadata_uuid_descriptions.update(
{
TWITCHES_POINT_UP_UUID: "Flag indicating whether or not the twitches in the data point up or not",
INTERPOLATION_VALUE_UUID: "Desired value for optical well data interpolation",
}
)
METADATA_UUID_DESCRIPTIONS = immutabledict(mutable_metadata_uuid_descriptions)
EXCEL_OPTICAL_METADATA_CELLS = immutabledict(
{
WELL_NAME_UUID: "E2",
UTC_BEGINNING_RECORDING_UUID: "E3",
PLATE_BARCODE_UUID: "E4",
TISSUE_SAMPLING_PERIOD_UUID: "E5",
TWITCHES_POINT_UP_UUID: "E6",
MANTARRAY_SERIAL_NUMBER_UUID: "E7",
INTERPOLATION_VALUE_UUID: "E8",
}
)
"""
Magnet Finding
"""
# 10 seconds at sampling rate of 100Hz
BASELINE_MEAN_NUM_DATA_POINTS = 10 * 100
|
[
"labware_domain_models.LabwareDefinition",
"uuid.UUID",
"immutabledict.immutabledict",
"importlib_metadata.version"
] |
[((358, 385), 'importlib_metadata.version', 'metadata.version', (['"""pulse3D"""'], {}), "('pulse3D')\n", (374, 385), True, 'import importlib_metadata as metadata\n'), ((411, 460), 'uuid.UUID', 'uuid.UUID', (['"""73f52be0-368c-42d8-a1fd-660d49ba5604"""'], {}), "('73f52be0-368c-42d8-a1fd-660d49ba5604')\n", (420, 460), False, 'import uuid\n'), ((488, 506), 'uuid.UUID', 'uuid.UUID', (['"""<KEY>"""'], {}), "('<KEY>')\n", (497, 506), False, 'import uuid\n'), ((533, 579), 'labware_domain_models.LabwareDefinition', 'LabwareDefinition', ([], {'row_count': '(4)', 'column_count': '(6)'}), '(row_count=4, column_count=6)\n', (550, 579), False, 'from labware_domain_models import LabwareDefinition\n'), ((796, 847), 'immutabledict.immutabledict', 'immutabledict', (["{'0.3.1': '0.4.1', '0.4.1': '0.4.2'}"], {}), "({'0.3.1': '0.4.1', '0.4.1': '0.4.2'})\n", (809, 847), False, 'from immutabledict import immutabledict\n'), ((878, 927), 'uuid.UUID', 'uuid.UUID', (['"""59d92e00-99d5-4460-9a28-5a1a0fe9aecf"""'], {}), "('59d92e00-99d5-4460-9a28-5a1a0fe9aecf')\n", (887, 927), False, 'import uuid\n'), ((1184, 1233), 'uuid.UUID', 'uuid.UUID', (['"""a2e76058-08cd-475d-a55d-31d401c3cb34"""'], {}), "('a2e76058-08cd-475d-a55d-31d401c3cb34')\n", (1193, 1233), False, 'import uuid\n'), ((1271, 1320), 'uuid.UUID', 'uuid.UUID', (['"""98c67f22-013b-421a-831b-0ea55df4651e"""'], {}), "('98c67f22-013b-421a-831b-0ea55df4651e')\n", (1280, 1320), False, 'import uuid\n'), ((1355, 1404), 'uuid.UUID', 'uuid.UUID', (['"""e41422b3-c903-48fd-9856-46ff56a6534c"""'], {}), "('e41422b3-c903-48fd-9856-46ff56a6534c')\n", (1364, 1404), False, 'import uuid\n'), ((1436, 1485), 'uuid.UUID', 'uuid.UUID', (['"""d2449271-0e84-4b45-a28b-8deab390b7c2"""'], {}), "('d2449271-0e84-4b45-a28b-8deab390b7c2')\n", (1445, 1485), False, 'import uuid\n'), ((1521, 1570), 'uuid.UUID', 'uuid.UUID', (['"""b32fb8cb-ebf8-4378-a2c0-f53a27bc77cc"""'], {}), "('b32fb8cb-ebf8-4378-a2c0-f53a27bc77cc')\n", (1530, 1570), False, 'import uuid\n'), ((1603, 1652), 'uuid.UUID', 'uuid.UUID', (['"""7cc07b2b-4146-4374-b8f3-1c4d40ff0cf7"""'], {}), "('7cc07b2b-4146-4374-b8f3-1c4d40ff0cf7')\n", (1612, 1652), False, 'import uuid\n'), ((1680, 1729), 'uuid.UUID', 'uuid.UUID', (['"""4927c810-fbf4-406f-a848-eba5308576e6"""'], {}), "('4927c810-fbf4-406f-a848-eba5308576e6')\n", (1689, 1729), False, 'import uuid\n'), ((1753, 1802), 'uuid.UUID', 'uuid.UUID', (['"""7282cf00-2b6e-4202-9d9e-db0c73c3a71f"""'], {}), "('7282cf00-2b6e-4202-9d9e-db0c73c3a71f')\n", (1762, 1802), False, 'import uuid\n'), ((1832, 1881), 'uuid.UUID', 'uuid.UUID', (['"""b4db8436-10a4-4359-932d-aa80e6de5c76"""'], {}), "('b4db8436-10a4-4359-932d-aa80e6de5c76')\n", (1841, 1881), False, 'import uuid\n'), ((1914, 1963), 'uuid.UUID', 'uuid.UUID', (['"""432fc3c1-051b-4604-bc3d-cc0d0bd75368"""'], {}), "('432fc3c1-051b-4604-bc3d-cc0d0bd75368')\n", (1923, 1963), False, 'import uuid\n'), ((1993, 2042), 'uuid.UUID', 'uuid.UUID', (['"""faa48a0c-0155-4234-afbf-5e5dbaa59537"""'], {}), "('faa48a0c-0155-4234-afbf-5e5dbaa59537')\n", (2002, 2042), False, 'import uuid\n'), ((2073, 2122), 'uuid.UUID', 'uuid.UUID', (['"""3a816076-90e4-4437-9929-dc910724a49d"""'], {}), "('3a816076-90e4-4437-9929-dc910724a49d')\n", (2082, 2122), False, 'import uuid\n'), ((2148, 2197), 'uuid.UUID', 'uuid.UUID', (['"""e5f5b134-60c7-4881-a531-33aa0edba540"""'], {}), "('e5f5b134-60c7-4881-a531-33aa0edba540')\n", (2157, 2197), False, 'import uuid\n'), ((2224, 2273), 'uuid.UUID', 'uuid.UUID', (['"""0cdec9bb-d2b4-4c5b-9dd5-6a49766c5ed4"""'], {}), "('0cdec9bb-d2b4-4c5b-9dd5-6a49766c5ed4')\n", (2233, 2273), False, 'import uuid\n'), ((2305, 2354), 'uuid.UUID', 'uuid.UUID', (['"""83720d36-b941-4d85-9b39-1d817799edd6"""'], {}), "('83720d36-b941-4d85-9b39-1d817799edd6')\n", (2314, 2354), False, 'import uuid\n'), ((2380, 2429), 'uuid.UUID', 'uuid.UUID', (['"""0b3f3f56-0cc7-45f0-b748-9b9de480cba8"""'], {}), "('0b3f3f56-0cc7-45f0-b748-9b9de480cba8')\n", (2389, 2429), False, 'import uuid\n'), ((2447, 2496), 'uuid.UUID', 'uuid.UUID', (['"""6d78f3b9-135a-4195-b014-e74dee70387b"""'], {}), "('6d78f3b9-135a-4195-b014-e74dee70387b')\n", (2456, 2496), False, 'import uuid\n'), ((2513, 2562), 'uuid.UUID', 'uuid.UUID', (['"""da82fe73-16dd-456a-ac05-0b70fb7e0161"""'], {}), "('da82fe73-16dd-456a-ac05-0b70fb7e0161')\n", (2522, 2562), False, 'import uuid\n'), ((2582, 2631), 'uuid.UUID', 'uuid.UUID', (['"""7af25a0a-8253-4d32-98c4-3c2ca0d83906"""'], {}), "('7af25a0a-8253-4d32-98c4-3c2ca0d83906')\n", (2591, 2631), False, 'import uuid\n'), ((2650, 2699), 'uuid.UUID', 'uuid.UUID', (['"""cd89f639-1e36-4a13-a5ed-7fec6205f779"""'], {}), "('cd89f639-1e36-4a13-a5ed-7fec6205f779')\n", (2659, 2699), False, 'import uuid\n'), ((2724, 2773), 'uuid.UUID', 'uuid.UUID', (['"""7ca73e1c-9555-4eca-8281-3f844b5606dc"""'], {}), "('7ca73e1c-9555-4eca-8281-3f844b5606dc')\n", (2733, 2773), False, 'import uuid\n'), ((2801, 2850), 'uuid.UUID', 'uuid.UUID', (['"""48aa034d-8775-453f-b135-75a983d6b553"""'], {}), "('48aa034d-8775-453f-b135-75a983d6b553')\n", (2810, 2850), False, 'import uuid\n'), ((2881, 2930), 'uuid.UUID', 'uuid.UUID', (['"""f629083a-3724-4100-8ece-c03e637ac19c"""'], {}), "('f629083a-3724-4100-8ece-c03e637ac19c')\n", (2890, 2930), False, 'import uuid\n'), ((2955, 3004), 'uuid.UUID', 'uuid.UUID', (['"""a3c3bb32-9b92-4da1-8ed8-6c09f9c816f8"""'], {}), "('a3c3bb32-9b92-4da1-8ed8-6c09f9c816f8')\n", (2964, 3004), False, 'import uuid\n'), ((3030, 3079), 'uuid.UUID', 'uuid.UUID', (['"""41069860-159f-49f2-a59d-401783c1ecb4"""'], {}), "('41069860-159f-49f2-a59d-401783c1ecb4')\n", (3039, 3079), False, 'import uuid\n'), ((3102, 3151), 'uuid.UUID', 'uuid.UUID', (['"""dc10066c-abf2-42b6-9b94-5e52d1ea9bfc"""'], {}), "('dc10066c-abf2-42b6-9b94-5e52d1ea9bfc')\n", (3111, 3151), False, 'import uuid\n'), ((3173, 3222), 'uuid.UUID', 'uuid.UUID', (['"""cf60afef-a9f0-4bc3-89e9-c665c6bb6941"""'], {}), "('cf60afef-a9f0-4bc3-89e9-c665c6bb6941')\n", (3182, 3222), False, 'import uuid\n'), ((3243, 3292), 'uuid.UUID', 'uuid.UUID', (['"""6fa67db1-c8b9-4937-b93f-6fe8bdc7e6d7"""'], {}), "('6fa67db1-c8b9-4937-b93f-6fe8bdc7e6d7')\n", (3252, 3292), False, 'import uuid\n'), ((3312, 3361), 'uuid.UUID', 'uuid.UUID', (['"""87533deb-2495-4430-bce7-12fdfc99158e"""'], {}), "('87533deb-2495-4430-bce7-12fdfc99158e')\n", (3321, 3361), False, 'import uuid\n'), ((3388, 3437), 'uuid.UUID', 'uuid.UUID', (['"""fefd0675-35c2-45f6-855a-9500ad3f100d"""'], {}), "('fefd0675-35c2-45f6-855a-9500ad3f100d')\n", (3397, 3437), False, 'import uuid\n'), ((3475, 3524), 'uuid.UUID', 'uuid.UUID', (['"""7d026e86-da70-4464-9181-dc0ce2d47bd1"""'], {}), "('7d026e86-da70-4464-9181-dc0ce2d47bd1')\n", (3484, 3524), False, 'import uuid\n'), ((3561, 3610), 'uuid.UUID', 'uuid.UUID', (['"""6e5a4b3e-f766-4638-80f7-d95c417c0fc2"""'], {}), "('6e5a4b3e-f766-4638-80f7-d95c417c0fc2')\n", (3570, 3610), False, 'import uuid\n'), ((3645, 3694), 'uuid.UUID', 'uuid.UUID', (['"""52231a24-97a3-497a-917c-86c780d9993f"""'], {}), "('52231a24-97a3-497a-917c-86c780d9993f')\n", (3654, 3694), False, 'import uuid\n'), ((3735, 3784), 'uuid.UUID', 'uuid.UUID', (['"""371996e6-5e2d-4183-a5cf-06de7058210a"""'], {}), "('371996e6-5e2d-4183-a5cf-06de7058210a')\n", (3744, 3784), False, 'import uuid\n'), ((3823, 3872), 'uuid.UUID', 'uuid.UUID', (['"""55f6770d-c369-42ce-a437-5ed89c3cb1f8"""'], {}), "('55f6770d-c369-42ce-a437-5ed89c3cb1f8')\n", (3832, 3872), False, 'import uuid\n'), ((3902, 3951), 'uuid.UUID', 'uuid.UUID', (['"""cd1b4063-4a87-4a57-bc12-923ff4890844"""'], {}), "('cd1b4063-4a87-4a57-bc12-923ff4890844')\n", (3911, 3951), False, 'import uuid\n'), ((3999, 4048), 'uuid.UUID', 'uuid.UUID', (['"""399b2148-09d4-418b-a132-e37df2721938"""'], {}), "('399b2148-09d4-418b-a132-e37df2721938')\n", (4008, 4048), False, 'import uuid\n'), ((4088, 4137), 'uuid.UUID', 'uuid.UUID', (['"""11b4945b-3cf3-4f67-8bee-7abc3c449756"""'], {}), "('11b4945b-3cf3-4f67-8bee-7abc3c449756')\n", (4097, 4137), False, 'import uuid\n'), ((4160, 4209), 'uuid.UUID', 'uuid.UUID', (['"""b9ccc724-a39d-429a-be6d-3fd29be5037d"""'], {}), "('b9ccc724-a39d-429a-be6d-3fd29be5037d')\n", (4169, 4209), False, 'import uuid\n'), ((4237, 4286), 'uuid.UUID', 'uuid.UUID', (['"""f8108718-2fa0-40ce-a51a-8478e5edd4b8"""'], {}), "('f8108718-2fa0-40ce-a51a-8478e5edd4b8')\n", (4246, 4286), False, 'import uuid\n'), ((4306, 4355), 'uuid.UUID', 'uuid.UUID', (['"""68d0147f-9a84-4423-9c50-228da16ba895"""'], {}), "('68d0147f-9a84-4423-9c50-228da16ba895')\n", (4315, 4355), False, 'import uuid\n'), ((4381, 4430), 'uuid.UUID', 'uuid.UUID', (['"""5103f995-19d2-4880-8a2e-2ce9080cd2f5"""'], {}), "('5103f995-19d2-4880-8a2e-2ce9080cd2f5')\n", (4390, 4430), False, 'import uuid\n'), ((4465, 4514), 'uuid.UUID', 'uuid.UUID', (['"""921121e9-4191-4536-bedd-03186fa1e117"""'], {}), "('921121e9-4191-4536-bedd-03186fa1e117')\n", (4474, 4514), False, 'import uuid\n'), ((4548, 4597), 'uuid.UUID', 'uuid.UUID', (['"""4b310594-ded4-45fd-a1b4-b829aceb416c"""'], {}), "('4b310594-ded4-45fd-a1b4-b829aceb416c')\n", (4557, 4597), False, 'import uuid\n'), ((4626, 4675), 'uuid.UUID', 'uuid.UUID', (['"""ede638ce-544e-427a-b1d9-c40784d7c82d"""'], {}), "('ede638ce-544e-427a-b1d9-c40784d7c82d')\n", (4635, 4675), False, 'import uuid\n'), ((4703, 4752), 'uuid.UUID', 'uuid.UUID', (['"""9a6f90eb-fe34-423b-bfed-fb441d6d9e5f"""'], {}), "('9a6f90eb-fe34-423b-bfed-fb441d6d9e5f')\n", (4712, 4752), False, 'import uuid\n'), ((4785, 4834), 'uuid.UUID', 'uuid.UUID', (['"""d9694cfe-824c-41f8-915e-91e41ce7af32"""'], {}), "('d9694cfe-824c-41f8-915e-91e41ce7af32')\n", (4794, 4834), False, 'import uuid\n'), ((4853, 4902), 'uuid.UUID', 'uuid.UUID', (['"""762f6715-ffcd-4e8d-b707-638dd5777841"""'], {}), "('762f6715-ffcd-4e8d-b707-638dd5777841')\n", (4862, 4902), False, 'import uuid\n'), ((4935, 4984), 'uuid.UUID', 'uuid.UUID', (['"""da5f2f6d-6874-4e53-be10-90c4bfbd3d45"""'], {}), "('da5f2f6d-6874-4e53-be10-90c4bfbd3d45')\n", (4944, 4984), False, 'import uuid\n'), ((5014, 8881), 'immutabledict.immutabledict', 'immutabledict', (['{HARDWARE_TEST_RECORDING_UUID: \'Is Hardware Test Recording\',\n START_RECORDING_TIME_INDEX_UUID: \'Timepoint of Beginning of Recording\',\n UTC_BEGINNING_DATA_ACQUISTION_UUID:\n \'UTC Timestamp of Beginning of Data Acquisition\',\n UTC_BEGINNING_RECORDING_UUID: \'UTC Timestamp of Beginning of Recording\',\n UTC_FIRST_TISSUE_DATA_POINT_UUID:\n \'UTC Timestamp of Beginning of Recorded Tissue Sensor Data\',\n UTC_FIRST_REF_DATA_POINT_UUID:\n \'UTC Timestamp of Beginning of Recorded Reference Sensor Data\',\n CUSTOMER_ACCOUNT_ID_UUID: \'Customer Account ID\', USER_ACCOUNT_ID_UUID:\n \'User Account ID\', SOFTWARE_BUILD_NUMBER_UUID: \'Software Build Number\',\n SOFTWARE_RELEASE_VERSION_UUID: \'Software Release Version\',\n MAIN_FIRMWARE_VERSION_UUID: \'Firmware Version (Main Controller)\',\n SLEEP_FIRMWARE_VERSION_UUID: \'Firmware Version (Sleep Mode)\',\n MANTARRAY_NICKNAME_UUID: \'Mantarray Nickname\',\n MANTARRAY_SERIAL_NUMBER_UUID: \'Mantarray Serial Number\',\n REFERENCE_VOLTAGE_UUID: \'Reference Voltage\', WELL_NAME_UUID:\n \'Well Name\', WELL_ROW_UUID: \'Well Row (zero-based)\', WELL_COLUMN_UUID:\n \'Well Column (zero-based)\', WELL_INDEX_UUID: \'Well Index (zero-based)\',\n TOTAL_WELL_COUNT_UUID: \'Total Wells in Plate\', REF_SAMPLING_PERIOD_UUID:\n \'Reference Sensor Sampling Period (microseconds)\',\n TISSUE_SAMPLING_PERIOD_UUID:\n \'Tissue Sensor Sampling Period (microseconds)\', ADC_GAIN_SETTING_UUID:\n \'ADC Gain Setting\', ADC_TISSUE_OFFSET_UUID: \'ADC Tissue Sensor Offset\',\n ADC_REF_OFFSET_UUID: \'ADC Reference Sensor Offset\', PLATE_BARCODE_UUID:\n \'Plate Barcode\', BACKEND_LOG_UUID: \'Backend log file identifier\',\n COMPUTER_NAME_HASH_UUID: \'SHA512 digest of computer name\',\n PLATE_BARCODE_IS_FROM_SCANNER_UUID:\n \'Is this plate barcode obtained from the scanner\',\n IS_FILE_ORIGINAL_UNTRIMMED_UUID:\n \'Is this an original file straight from the instrument and untrimmed\',\n TRIMMED_TIME_FROM_ORIGINAL_START_UUID:\n \'Number of centimilliseconds that has been trimmed off the beginning of when the original data started\'\n , TRIMMED_TIME_FROM_ORIGINAL_END_UUID:\n \'Number of centimilliseconds that has been trimmed off the end of when the original data ended\'\n , ORIGINAL_FILE_VERSION_UUID:\n \'The original version of the file when recorded, prior to any migrations to newer versions/formats.\'\n , UTC_TIMESTAMP_OF_FILE_VERSION_MIGRATION_UUID:\n \'Timestamp when this file was migrated from an earlier version.\',\n FILE_VERSION_PRIOR_TO_MIGRATION_UUID:\n \'File format version that this file was migrated from\',\n XEM_SERIAL_NUMBER_UUID: \'XEM Serial Number\', BOOTUP_COUNTER_UUID:\n \'The number of times this Mantarray Instrument has booted up\',\n TOTAL_WORKING_HOURS_UUID:\n \'The total number of hours this Mantarray Instrument has been powered on and running\'\n , TAMPER_FLAG_UUID:\n \'Is it suspected the internals of the Mantarray enclosure have been tampered with\'\n , PCB_SERIAL_NUMBER_UUID: \'The serial number of the Mantarray PCB\',\n MAGNETOMETER_CONFIGURATION_UUID:\n "The state (on/off) of the board\'s magnetometers",\n UTC_BEGINNING_STIMULATION_UUID:\n \'UTC Timestamp of Beginning of Stimulation\', STIMULATION_PROTOCOL_UUID:\n \'The stimulation protocol that was running on this well during recording. Empty string if stimulation was not active\'\n , STIM_BARCODE_UUID: \'Stim Lid Barcode\',\n STIM_BARCODE_IS_FROM_SCANNER_UUID:\n \'Is this stim lid barcode obtained from the scanner\',\n IS_CALIBRATION_FILE_UUID:\n \'Is this file a calibration (empty plate) recording\',\n CHANNEL_FIRMWARE_VERSION_UUID: \'Firmware Version (Channel Controller)\',\n BOOT_FLAGS_UUID: \'Hardware/firmware flags present on device bootup\',\n INITIAL_MAGNET_FINDING_PARAMS:\n \'JSON string of the initial magnet finding params that should be used in Pulse3D\'\n }'], {}), '({HARDWARE_TEST_RECORDING_UUID: \'Is Hardware Test Recording\',\n START_RECORDING_TIME_INDEX_UUID: \'Timepoint of Beginning of Recording\',\n UTC_BEGINNING_DATA_ACQUISTION_UUID:\n \'UTC Timestamp of Beginning of Data Acquisition\',\n UTC_BEGINNING_RECORDING_UUID: \'UTC Timestamp of Beginning of Recording\',\n UTC_FIRST_TISSUE_DATA_POINT_UUID:\n \'UTC Timestamp of Beginning of Recorded Tissue Sensor Data\',\n UTC_FIRST_REF_DATA_POINT_UUID:\n \'UTC Timestamp of Beginning of Recorded Reference Sensor Data\',\n CUSTOMER_ACCOUNT_ID_UUID: \'Customer Account ID\', USER_ACCOUNT_ID_UUID:\n \'User Account ID\', SOFTWARE_BUILD_NUMBER_UUID: \'Software Build Number\',\n SOFTWARE_RELEASE_VERSION_UUID: \'Software Release Version\',\n MAIN_FIRMWARE_VERSION_UUID: \'Firmware Version (Main Controller)\',\n SLEEP_FIRMWARE_VERSION_UUID: \'Firmware Version (Sleep Mode)\',\n MANTARRAY_NICKNAME_UUID: \'Mantarray Nickname\',\n MANTARRAY_SERIAL_NUMBER_UUID: \'Mantarray Serial Number\',\n REFERENCE_VOLTAGE_UUID: \'Reference Voltage\', WELL_NAME_UUID:\n \'Well Name\', WELL_ROW_UUID: \'Well Row (zero-based)\', WELL_COLUMN_UUID:\n \'Well Column (zero-based)\', WELL_INDEX_UUID: \'Well Index (zero-based)\',\n TOTAL_WELL_COUNT_UUID: \'Total Wells in Plate\', REF_SAMPLING_PERIOD_UUID:\n \'Reference Sensor Sampling Period (microseconds)\',\n TISSUE_SAMPLING_PERIOD_UUID:\n \'Tissue Sensor Sampling Period (microseconds)\', ADC_GAIN_SETTING_UUID:\n \'ADC Gain Setting\', ADC_TISSUE_OFFSET_UUID: \'ADC Tissue Sensor Offset\',\n ADC_REF_OFFSET_UUID: \'ADC Reference Sensor Offset\', PLATE_BARCODE_UUID:\n \'Plate Barcode\', BACKEND_LOG_UUID: \'Backend log file identifier\',\n COMPUTER_NAME_HASH_UUID: \'SHA512 digest of computer name\',\n PLATE_BARCODE_IS_FROM_SCANNER_UUID:\n \'Is this plate barcode obtained from the scanner\',\n IS_FILE_ORIGINAL_UNTRIMMED_UUID:\n \'Is this an original file straight from the instrument and untrimmed\',\n TRIMMED_TIME_FROM_ORIGINAL_START_UUID:\n \'Number of centimilliseconds that has been trimmed off the beginning of when the original data started\'\n , TRIMMED_TIME_FROM_ORIGINAL_END_UUID:\n \'Number of centimilliseconds that has been trimmed off the end of when the original data ended\'\n , ORIGINAL_FILE_VERSION_UUID:\n \'The original version of the file when recorded, prior to any migrations to newer versions/formats.\'\n , UTC_TIMESTAMP_OF_FILE_VERSION_MIGRATION_UUID:\n \'Timestamp when this file was migrated from an earlier version.\',\n FILE_VERSION_PRIOR_TO_MIGRATION_UUID:\n \'File format version that this file was migrated from\',\n XEM_SERIAL_NUMBER_UUID: \'XEM Serial Number\', BOOTUP_COUNTER_UUID:\n \'The number of times this Mantarray Instrument has booted up\',\n TOTAL_WORKING_HOURS_UUID:\n \'The total number of hours this Mantarray Instrument has been powered on and running\'\n , TAMPER_FLAG_UUID:\n \'Is it suspected the internals of the Mantarray enclosure have been tampered with\'\n , PCB_SERIAL_NUMBER_UUID: \'The serial number of the Mantarray PCB\',\n MAGNETOMETER_CONFIGURATION_UUID:\n "The state (on/off) of the board\'s magnetometers",\n UTC_BEGINNING_STIMULATION_UUID:\n \'UTC Timestamp of Beginning of Stimulation\', STIMULATION_PROTOCOL_UUID:\n \'The stimulation protocol that was running on this well during recording. Empty string if stimulation was not active\'\n , STIM_BARCODE_UUID: \'Stim Lid Barcode\',\n STIM_BARCODE_IS_FROM_SCANNER_UUID:\n \'Is this stim lid barcode obtained from the scanner\',\n IS_CALIBRATION_FILE_UUID:\n \'Is this file a calibration (empty plate) recording\',\n CHANNEL_FIRMWARE_VERSION_UUID: \'Firmware Version (Channel Controller)\',\n BOOT_FLAGS_UUID: \'Hardware/firmware flags present on device bootup\',\n INITIAL_MAGNET_FINDING_PARAMS:\n \'JSON string of the initial magnet finding params that should be used in Pulse3D\'\n })\n', (5027, 8881), False, 'from immutabledict import immutabledict\n'), ((9611, 9660), 'uuid.UUID', 'uuid.UUID', (['"""6e0cd81c-7861-4c49-ba14-87b2739d65fb"""'], {}), "('6e0cd81c-7861-4c49-ba14-87b2739d65fb')\n", (9620, 9660), False, 'import uuid\n'), ((9822, 9871), 'uuid.UUID', 'uuid.UUID', (['"""472d0707-ff87-4198-9374-c28900bb216c"""'], {}), "('472d0707-ff87-4198-9374-c28900bb216c')\n", (9831, 9871), False, 'import uuid\n'), ((9889, 9938), 'uuid.UUID', 'uuid.UUID', (['"""89cf1105-a015-434f-b527-4169b9400e26"""'], {}), "('89cf1105-a015-434f-b527-4169b9400e26')\n", (9898, 9938), False, 'import uuid\n'), ((9950, 9999), 'uuid.UUID', 'uuid.UUID', (['"""e7b9a6e4-c43d-4e8b-af7e-51742e252030"""'], {}), "('e7b9a6e4-c43d-4e8b-af7e-51742e252030')\n", (9959, 9999), False, 'import uuid\n'), ((10013, 10062), 'uuid.UUID', 'uuid.UUID', (['"""c4c60d55-017a-4783-9600-f19606de26f3"""'], {}), "('c4c60d55-017a-4783-9600-f19606de26f3')\n", (10022, 10062), False, 'import uuid\n'), ((10082, 10131), 'uuid.UUID', 'uuid.UUID', (['"""05041f4e-c77d-42d9-a2ae-8902f912e9ac"""'], {}), "('05041f4e-c77d-42d9-a2ae-8902f912e9ac')\n", (10091, 10131), False, 'import uuid\n'), ((10159, 10208), 'uuid.UUID', 'uuid.UUID', (['"""2a16acb6-4df7-4064-9d47-5d27ea7a98ad"""'], {}), "('2a16acb6-4df7-4064-9d47-5d27ea7a98ad')\n", (10168, 10208), False, 'import uuid\n'), ((10237, 10286), 'uuid.UUID', 'uuid.UUID', (['"""26e5637d-42c9-4060-aa5d-52209b349c84"""'], {}), "('26e5637d-42c9-4060-aa5d-52209b349c84')\n", (10246, 10286), False, 'import uuid\n'), ((10314, 10363), 'uuid.UUID', 'uuid.UUID', (['"""0fcc0dc3-f9aa-4f1b-91b3-e5b5924279a9"""'], {}), "('0fcc0dc3-f9aa-4f1b-91b3-e5b5924279a9')\n", (10323, 10363), False, 'import uuid\n'), ((10392, 10441), 'uuid.UUID', 'uuid.UUID', (['"""73961e7c-17ec-42b0-b503-a23195ec249c"""'], {}), "('73961e7c-17ec-42b0-b503-a23195ec249c')\n", (10401, 10441), False, 'import uuid\n'), ((10471, 10520), 'uuid.UUID', 'uuid.UUID', (['"""61046076-66b9-4b8b-bfec-1e00603743c0"""'], {}), "('61046076-66b9-4b8b-bfec-1e00603743c0')\n", (10480, 10520), False, 'import uuid\n'), ((10561, 10610), 'uuid.UUID', 'uuid.UUID', (['"""8fe142e2-2504-4c9e-b3dc-817b24c7447e"""'], {}), "('8fe142e2-2504-4c9e-b3dc-817b24c7447e')\n", (10570, 10610), False, 'import uuid\n'), ((10703, 10752), 'uuid.UUID', 'uuid.UUID', (['"""32f5ce6b-e311-4434-8a2a-c2b6bbd81ee6"""'], {}), "('32f5ce6b-e311-4434-8a2a-c2b6bbd81ee6')\n", (10712, 10752), False, 'import uuid\n'), ((10776, 10825), 'uuid.UUID', 'uuid.UUID', (['"""0ad56cd1-7bcc-4b57-8076-14366d7f3c6a"""'], {}), "('0ad56cd1-7bcc-4b57-8076-14366d7f3c6a')\n", (10785, 10825), False, 'import uuid\n'), ((10850, 10899), 'uuid.UUID', 'uuid.UUID', (['"""33b5b0a8-f197-46ef-a451-a254e530757b"""'], {}), "('33b5b0a8-f197-46ef-a451-a254e530757b')\n", (10859, 10899), False, 'import uuid\n'), ((10918, 10967), 'uuid.UUID', 'uuid.UUID', (['"""89cf1105-a015-434f-b527-4169b9400e26"""'], {}), "('89cf1105-a015-434f-b527-4169b9400e26')\n", (10927, 10967), False, 'import uuid\n'), ((10979, 11028), 'uuid.UUID', 'uuid.UUID', (['"""e7b9a6e4-c43d-4e8b-af7e-51742e252030"""'], {}), "('e7b9a6e4-c43d-4e8b-af7e-51742e252030')\n", (10988, 11028), False, 'import uuid\n'), ((11042, 11091), 'uuid.UUID', 'uuid.UUID', (['"""c4c60d55-017a-4783-9600-f19606de26f3"""'], {}), "('c4c60d55-017a-4783-9600-f19606de26f3')\n", (11051, 11091), False, 'import uuid\n'), ((11111, 11160), 'uuid.UUID', 'uuid.UUID', (['"""05041f4e-c77d-42d9-a2ae-8902f912e9ac"""'], {}), "('05041f4e-c77d-42d9-a2ae-8902f912e9ac')\n", (11120, 11160), False, 'import uuid\n'), ((11188, 11237), 'uuid.UUID', 'uuid.UUID', (['"""2a16acb6-4df7-4064-9d47-5d27ea7a98ad"""'], {}), "('2a16acb6-4df7-4064-9d47-5d27ea7a98ad')\n", (11197, 11237), False, 'import uuid\n'), ((11266, 11315), 'uuid.UUID', 'uuid.UUID', (['"""26e5637d-42c9-4060-aa5d-52209b349c84"""'], {}), "('26e5637d-42c9-4060-aa5d-52209b349c84')\n", (11275, 11315), False, 'import uuid\n'), ((11343, 11392), 'uuid.UUID', 'uuid.UUID', (['"""0fcc0dc3-f9aa-4f1b-91b3-e5b5924279a9"""'], {}), "('0fcc0dc3-f9aa-4f1b-91b3-e5b5924279a9')\n", (11352, 11392), False, 'import uuid\n'), ((11421, 11470), 'uuid.UUID', 'uuid.UUID', (['"""73961e7c-17ec-42b0-b503-a23195ec249c"""'], {}), "('73961e7c-17ec-42b0-b503-a23195ec249c')\n", (11430, 11470), False, 'import uuid\n'), ((11500, 11549), 'uuid.UUID', 'uuid.UUID', (['"""61046076-66b9-4b8b-bfec-1e00603743c0"""'], {}), "('61046076-66b9-4b8b-bfec-1e00603743c0')\n", (11509, 11549), False, 'import uuid\n'), ((11570, 11619), 'uuid.UUID', 'uuid.UUID', (['"""8fe142e2-2504-4c9e-b3dc-817b24c7447e"""'], {}), "('8fe142e2-2504-4c9e-b3dc-817b24c7447e')\n", (11579, 11619), False, 'import uuid\n'), ((11643, 11692), 'uuid.UUID', 'uuid.UUID', (['"""1363817a-b1fb-468e-9f1c-ec54fce72dfe"""'], {}), "('1363817a-b1fb-468e-9f1c-ec54fce72dfe')\n", (11652, 11692), False, 'import uuid\n'), ((11711, 11760), 'uuid.UUID', 'uuid.UUID', (['"""32f5ce6b-e311-4434-8a2a-c2b6bbd81ee6"""'], {}), "('32f5ce6b-e311-4434-8a2a-c2b6bbd81ee6')\n", (11720, 11760), False, 'import uuid\n'), ((11784, 11833), 'uuid.UUID', 'uuid.UUID', (['"""0ad56cd1-7bcc-4b57-8076-14366d7f3c6a"""'], {}), "('0ad56cd1-7bcc-4b57-8076-14366d7f3c6a')\n", (11793, 11833), False, 'import uuid\n'), ((11858, 11907), 'uuid.UUID', 'uuid.UUID', (['"""33b5b0a8-f197-46ef-a451-a254e530757b"""'], {}), "('33b5b0a8-f197-46ef-a451-a254e530757b')\n", (11867, 11907), False, 'import uuid\n'), ((11997, 12046), 'uuid.UUID', 'uuid.UUID', (['"""03ce2d30-3580-4129-9913-2fc2e35eddb7"""'], {}), "('03ce2d30-3580-4129-9913-2fc2e35eddb7')\n", (12006, 12046), False, 'import uuid\n'), ((12071, 12120), 'uuid.UUID', 'uuid.UUID', (['"""1ac2589d-4713-41c0-8dd0-1e6c98600e37"""'], {}), "('1ac2589d-4713-41c0-8dd0-1e6c98600e37')\n", (12080, 12120), False, 'import uuid\n'), ((12489, 12538), 'uuid.UUID', 'uuid.UUID', (['"""80df90dc-21f8-4cad-a164-89436909b30a"""'], {}), "('80df90dc-21f8-4cad-a164-89436909b30a')\n", (12498, 12538), False, 'import uuid\n'), ((12565, 12614), 'uuid.UUID', 'uuid.UUID', (['"""72ba9466-c203-41b6-ac30-337b4a17a124"""'], {}), "('72ba9466-c203-41b6-ac30-337b4a17a124')\n", (12574, 12614), False, 'import uuid\n'), ((12644, 12693), 'uuid.UUID', 'uuid.UUID', (['"""7e37325b-6681-4623-b192-39f154350f36"""'], {}), "('7e37325b-6681-4623-b192-39f154350f36')\n", (12653, 12693), False, 'import uuid\n'), ((12725, 12774), 'uuid.UUID', 'uuid.UUID', (['"""fd47ba6b-ee4d-4674-9a89-56e0db7f3d97"""'], {}), "('fd47ba6b-ee4d-4674-9a89-56e0db7f3d97')\n", (12734, 12774), False, 'import uuid\n'), ((12799, 12848), 'uuid.UUID', 'uuid.UUID', (['"""0ecf0e52-0a29-453f-a6ff-46f5ec3ae783"""'], {}), "('0ecf0e52-0a29-453f-a6ff-46f5ec3ae783')\n", (12808, 12848), False, 'import uuid\n'), ((12874, 12923), 'uuid.UUID', 'uuid.UUID', (['"""7d64cac3-b841-4912-b734-c0cf20a81e7a"""'], {}), "('7d64cac3-b841-4912-b734-c0cf20a81e7a')\n", (12883, 12923), False, 'import uuid\n'), ((12949, 12998), 'uuid.UUID', 'uuid.UUID', (['"""eee66c75-4dc4-4eb4-8d48-6c608bf28d91"""'], {}), "('eee66c75-4dc4-4eb4-8d48-6c608bf28d91')\n", (12958, 12998), False, 'import uuid\n'), ((13029, 13078), 'uuid.UUID', 'uuid.UUID', (['"""de8d8cef-65bf-4119-ada7-bdecbbaa897a"""'], {}), "('de8d8cef-65bf-4119-ada7-bdecbbaa897a')\n", (13038, 13078), False, 'import uuid\n'), ((16174, 16503), 'immutabledict.immutabledict', 'immutabledict', (["{'by_width': (WIDTH_UUID, CONTRACTION_TIME_UUID, RELAXATION_TIME_UUID),\n 'scalar': (AMPLITUDE_UUID, AUC_UUID, BASELINE_TO_PEAK_UUID,\n CONTRACTION_VELOCITY_UUID, FRACTION_MAX_UUID,\n IRREGULARITY_INTERVAL_UUID, PEAK_TO_BASELINE_UUID,\n RELAXATION_VELOCITY_UUID, TWITCH_FREQUENCY_UUID, TWITCH_PERIOD_UUID)}"], {}), "({'by_width': (WIDTH_UUID, CONTRACTION_TIME_UUID,\n RELAXATION_TIME_UUID), 'scalar': (AMPLITUDE_UUID, AUC_UUID,\n BASELINE_TO_PEAK_UUID, CONTRACTION_VELOCITY_UUID, FRACTION_MAX_UUID,\n IRREGULARITY_INTERVAL_UUID, PEAK_TO_BASELINE_UUID,\n RELAXATION_VELOCITY_UUID, TWITCH_FREQUENCY_UUID, TWITCH_PERIOD_UUID)})\n", (16187, 16503), False, 'from immutabledict import immutabledict\n'), ((17031, 17108), 'immutabledict.immutabledict', 'immutabledict', (["((coord, f'Relaxation Coordinates {coord}') for coord in COORDS)"], {}), "((coord, f'Relaxation Coordinates {coord}') for coord in COORDS)\n", (17044, 17108), False, 'from immutabledict import immutabledict\n'), ((17345, 17442), 'immutabledict.immutabledict', 'immutabledict', (["((coord, f'Time From Peak to Relaxation {coord} (seconds)') for coord in COORDS\n )"], {}), "((coord, f'Time From Peak to Relaxation {coord} (seconds)') for\n coord in COORDS)\n", (17358, 17442), False, 'from immutabledict import immutabledict\n'), ((17460, 17507), 'immutabledict.immutabledict', 'immutabledict', (["{'CoV': {'num_format': '0.00%'}}"], {}), "({'CoV': {'num_format': '0.00%'}})\n", (17473, 17507), False, 'from immutabledict import immutabledict\n'), ((17534, 17583), 'uuid.UUID', 'uuid.UUID', (['"""97f69f56-f1c6-4c50-8590-7332570ed3c5"""'], {}), "('97f69f56-f1c6-4c50-8590-7332570ed3c5')\n", (17543, 17583), False, 'import uuid\n'), ((17611, 17660), 'uuid.UUID', 'uuid.UUID', (['"""466d0131-06b7-4f0f-ba1e-062a771cb280"""'], {}), "('466d0131-06b7-4f0f-ba1e-062a771cb280')\n", (17620, 17660), False, 'import uuid\n'), ((18186, 18235), 'immutabledict.immutabledict', 'immutabledict', (['mutable_metadata_uuid_descriptions'], {}), '(mutable_metadata_uuid_descriptions)\n', (18199, 18235), False, 'from immutabledict import immutabledict\n'), ((18268, 18512), 'immutabledict.immutabledict', 'immutabledict', (["{WELL_NAME_UUID: 'E2', UTC_BEGINNING_RECORDING_UUID: 'E3',\n PLATE_BARCODE_UUID: 'E4', TISSUE_SAMPLING_PERIOD_UUID: 'E5',\n TWITCHES_POINT_UP_UUID: 'E6', MANTARRAY_SERIAL_NUMBER_UUID: 'E7',\n INTERPOLATION_VALUE_UUID: 'E8'}"], {}), "({WELL_NAME_UUID: 'E2', UTC_BEGINNING_RECORDING_UUID: 'E3',\n PLATE_BARCODE_UUID: 'E4', TISSUE_SAMPLING_PERIOD_UUID: 'E5',\n TWITCHES_POINT_UP_UUID: 'E6', MANTARRAY_SERIAL_NUMBER_UUID: 'E7',\n INTERPOLATION_VALUE_UUID: 'E8'})\n", (18281, 18512), False, 'from immutabledict import immutabledict\n')]
|
from flask import current_app
import pytest
import json
from base64 import b64encode
import basis_set_exchange as bse
headers = {'Content-Type': 'application/json'}
def get_ref_formats():
return [(format) for format in bse.get_reference_formats()]
@pytest.mark.usefixtures("app", "client", autouse=True) # to use fixtures from conftest
class TestAPIs(object):
"""
Testing the APIs by connecting to the flask app from a client.
"""
@classmethod
def setup_class(cls):
cls.api_url = '/api/'
cls.template_url = '/'
def test_app_exists(self):
assert current_app is not None
def get_api_headers(self, username, password):
return {
'Authorization': 'Basic ' + b64encode(
(username + ':' + password).encode('utf-8')).decode('utf-8'),
'Accept': 'application/json',
'Content-Type': 'application/json'
}
def test_get_formats(self, client):
"""Get the supported formats of the basis sets
"""
response = client.get(self.api_url + 'formats/')
assert response.status_code == 200
data = json.loads(response.get_data(as_text=True))
assert type(data) == dict
assert data['gamess_us'] == 'GAMESS US'
def test_get_references_formats(self, client):
"""Get the supported references formats
"""
response = client.get(self.api_url + 'reference_formats/')
assert response.status_code == 200
data = json.loads(response.get_data(as_text=True))
assert type(data) == dict
assert data['bib'] == 'BibTeX'
def test_get_metadata(self, client):
"""Get the bs metadata
"""
response = client.get(self.api_url + 'metadata/')
assert response.status_code == 200
data = json.loads(response.get_data(as_text=True))
assert type(data) == dict
# get the basis data of any basis set
basis_set_name = list(data.keys())[0]
basis_set = data[basis_set_name]
assert 'auxiliaries' in basis_set
assert 'functiontypes' in basis_set
assert 'latest_version' in basis_set
assert 'display_name' in basis_set
assert 'family' in basis_set
assert 'role' in basis_set
@pytest.mark.parametrize('bs_format,output',[
('gaussian94', 'Basis set: 3-21G'),
('json', '"name": "3-21G"')
])
def test_get_simple_basis(self, bs_format, output, client):
"""Get a simple basis set"""
bs_name = '3-21g'
url = self.api_url + 'basis/{}/format/{}/'.format(bs_name, bs_format)
response = client.get(url)
assert response.status_code == 200
data = response.get_data(as_text=True)
assert output in data
if bs_format == 'json':
assert json.loads(data)
def test_get_basis_elements(self, client):
"""Get a simple basis set"""
bs_name = '3-21g'
bs_format = 'gaussian94'
params = dict(elements='1,3')
url = self.api_url + 'basis/{}/format/{}/'.format(bs_name, bs_format)
response = client.get(url, query_string=params)
assert response.status_code == 200
data = response.get_data(as_text=True)
assert 'Basis set: 3-21G' in data
assert 'H' in data and 'Li' in data
@pytest.mark.parametrize('rf_format', get_ref_formats())
def test_get_references(self, rf_format, client):
"""Get references for a basis set with different formats"""
bs_name = '3-21g'
params = dict(elements='1,3')
url = self.api_url + 'references/{}/format/{}/'.format(bs_name, rf_format)
print(url)
response = client.get(url, query_string=params)
assert response.status_code == 200
data = response.get_data(as_text=True)
assert data
if rf_format == 'json':
assert json.loads(data)
# without elements
response = client.get(url)
assert response.status_code == 200
def test_get_notes(self, client):
"""Get notes of a basis set"""
bs_name = '3-21g'
url = self.api_url + 'notes/{}/'.format(bs_name)
response = client.get(url)
assert response.status_code == 200
assert response.get_data(as_text=True)
@pytest.mark.parametrize('family_name', ['pople', 'sto'])
def test_bs_family_notes(self, family_name, client):
"""Get basis set family notes"""
url = self.api_url + 'family_notes/{}/'.format(family_name)
response = client.get(url)
assert response.status_code == 200
assert response.get_data(as_text=True)
@pytest.mark.parametrize('bs_format', bse.get_formats().keys())
@pytest.mark.parametrize('archive_type', bse.get_archive_types().keys())
def test_download(self, bs_format, archive_type, client):
"""Get basis set family notes"""
ver = bse.version()
url = self.api_url + 'download/{}/{}/{}'.format(ver, bs_format, archive_type)
response = client.head(url)
assert response.status_code == 200
|
[
"basis_set_exchange.get_archive_types",
"basis_set_exchange.get_reference_formats",
"json.loads",
"basis_set_exchange.version",
"pytest.mark.parametrize",
"pytest.mark.usefixtures",
"basis_set_exchange.get_formats"
] |
[((259, 313), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""app"""', '"""client"""'], {'autouse': '(True)'}), "('app', 'client', autouse=True)\n", (282, 313), False, 'import pytest\n'), ((2303, 2417), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""bs_format,output"""', '[(\'gaussian94\', \'Basis set: 3-21G\'), (\'json\', \'"name": "3-21G"\')]'], {}), '(\'bs_format,output\', [(\'gaussian94\',\n \'Basis set: 3-21G\'), (\'json\', \'"name": "3-21G"\')])\n', (2326, 2417), False, 'import pytest\n'), ((4341, 4397), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""family_name"""', "['pople', 'sto']"], {}), "('family_name', ['pople', 'sto'])\n", (4364, 4397), False, 'import pytest\n'), ((4954, 4967), 'basis_set_exchange.version', 'bse.version', ([], {}), '()\n', (4965, 4967), True, 'import basis_set_exchange as bse\n'), ((227, 254), 'basis_set_exchange.get_reference_formats', 'bse.get_reference_formats', ([], {}), '()\n', (252, 254), True, 'import basis_set_exchange as bse\n'), ((2847, 2863), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (2857, 2863), False, 'import json\n'), ((3925, 3941), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (3935, 3941), False, 'import json\n'), ((4733, 4750), 'basis_set_exchange.get_formats', 'bse.get_formats', ([], {}), '()\n', (4748, 4750), True, 'import basis_set_exchange as bse\n'), ((4804, 4827), 'basis_set_exchange.get_archive_types', 'bse.get_archive_types', ([], {}), '()\n', (4825, 4827), True, 'import basis_set_exchange as bse\n')]
|
from flask import Flask, render_template, Response, request
from video import Video
import requests
app = Flask(__name__)
vid=Video(0)
geste = ""
data = ""
def gen():
global geste
while True:
frame, geste = vid.get_frame()
r=requests.post('http://localhost:8090/getAPI',data={'geste':geste,'position':'0123'})
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.route('/')
def video():
return render_template('index.html',geste = geste)
@app.route('/pptDisplay')
def ppt():
return render_template('pptDisplay.html')
@app.route('/video_feed')
def video_feed():
return Response(gen(),
mimetype='multipart/x-mixed-replace; boundary=frame')
@app.route('/geste')
def geste():
r = requests.post('http://localhost:8090/getAPI', data={'geste': "Main Ouverte", 'position': '0123'})
return("geste")
@app.route('/data',methods=['GET','POST'])
def data():
global data
if request.method=='POST':
data=request
return(request)
if request.method=='GET':
return(data)
if __name__ == '__main__':
app.run(debug=False, port=8080)
|
[
"video.Video",
"requests.post",
"flask.Flask",
"flask.render_template"
] |
[((107, 122), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (112, 122), False, 'from flask import Flask, render_template, Response, request\n'), ((127, 135), 'video.Video', 'Video', (['(0)'], {}), '(0)\n', (132, 135), False, 'from video import Video\n'), ((477, 519), 'flask.render_template', 'render_template', (['"""index.html"""'], {'geste': 'geste'}), "('index.html', geste=geste)\n", (492, 519), False, 'from flask import Flask, render_template, Response, request\n'), ((570, 604), 'flask.render_template', 'render_template', (['"""pptDisplay.html"""'], {}), "('pptDisplay.html')\n", (585, 604), False, 'from flask import Flask, render_template, Response, request\n'), ((782, 883), 'requests.post', 'requests.post', (['"""http://localhost:8090/getAPI"""'], {'data': "{'geste': 'Main Ouverte', 'position': '0123'}"}), "('http://localhost:8090/getAPI', data={'geste': 'Main Ouverte',\n 'position': '0123'})\n", (795, 883), False, 'import requests\n'), ((251, 343), 'requests.post', 'requests.post', (['"""http://localhost:8090/getAPI"""'], {'data': "{'geste': geste, 'position': '0123'}"}), "('http://localhost:8090/getAPI', data={'geste': geste,\n 'position': '0123'})\n", (264, 343), False, 'import requests\n')]
|
__author__ = "Altertech Group, https://www.altertech.com/"
__copyright__ = "Copyright (C) 2012-2018 Altertech Group"
__license__ = "Apache License 2.0"
__version__ = "2.0.0"
__description__ = "Denkovi smartDEN IP-32IN"
__api__ = 4
__required__ = ['value', 'events']
__mods_required__ = []
__lpi_default__ = 'sensor'
__equipment__ = 'smartDEN IP-32IN'
__features__ = []
__config_help__ = [{
'name': 'host',
'help': 'module host/ip',
'type': 'str',
'required': True
}, {
'name': 'community',
'help': 'snmp default community (default: public)',
'type': 'str',
'required': False
}, {
'name': 'retries',
'help': 'snmp retry attemps (default: 0)',
'type': 'int',
'required': False
}]
__get_help__ = []
__set_help__ = []
__help__ = """
PHI for Denkovi smartDEN IP-32IN
Sensors should have port set 1-16 for digital inputs, a1-a8 for analog inputs,
t1-8 for temperature inputs.
DIN events can be received by SNMP traps.
For production it is recommended to install python "python3-netsnmp" module.
"""
try:
import netsnmp
except:
netsnmp = None
from eva.uc.drivers.phi.generic_phi import PHI as GenericPHI
from eva.uc.driverapi import log_traceback
from eva.uc.driverapi import get_timeout
from eva.uc.driverapi import handle_phi_event
from eva.tools import parse_host_port
import eva.uc.drivers.tools.snmp as snmp
import eva.traphandler
from eva.uc.driverapi import phi_constructor
class PHI(GenericPHI):
@phi_constructor
def __init__(self, **kwargs):
self.snmp_host, self.snmp_port = parse_host_port(
self.phi_cfg.get('host'), 161)
self.port_state = {}
if not self.snmp_host:
self.log_error('no host specified')
self.ready = False
self.community = self.phi_cfg.get('community') if self.phi_cfg.get(
'community') else 'public'
try:
self.snmp_tries = int(self.phi_get('retries')) + 1
except:
self.snmp_tries = 1
self.oid_din = '.1.3.6.1.4.1.42505.7.2.1.1.7'
self.oid_ain = '.1.3.6.1.4.1.42505.7.2.2.1.6'
self.oid_temp = '.1.3.6.1.4.1.42505.7.2.3.1.7'
self.oid_name = '.1.3.6.1.4.1.42505.7.1.1.0'
self.oid_version = '.1.3.6.1.4.1.42505.7.1.2.0'
def start(self):
eva.traphandler.subscribe(self)
def stop(self):
eva.traphandler.unsubscribe(self)
def get(self, port=None, cfg=None, timeout=0):
if cfg:
host, snmp_port = parse_host_port(cfg.get('host'), 161)
community = cfg.get('community')
tries = cfg.get('retries')
try:
tries = int(tries) + 1
except:
tries = None
else:
host = None
community = None
tries = None
if not host:
host = self.snmp_host
snmp_port = self.snmp_port
if not community:
community = self.community
if tries is None: tries = self.snmp_tries
if not host or not community: return None
_timeout = timeout / tries
port = str(port)
if port.startswith('a'):
oid = self.oid_ain
port_max = 8
port = port[1:]
ret = 1
elif port.startswith('t'):
oid = self.oid_temp
port_max = 8
port = port[1:]
ret = 2
else:
oid = self.oid_din
port_max = 16
ret = 0
try:
port = int(port)
except:
return None
if port < 1 or port > port_max: return None
if netsnmp:
try:
sess = netsnmp.Session(Version=2,
DestHost=host,
RemotePort=snmp_port,
Community=community,
Timeout=int(_timeout * 1000000),
Retries=self.snmp_tries - 1)
o = netsnmp.VarList('%s.%u' % (oid, port - 1))
result = sess.get(o)[0].decode()
except Exception as e:
self.log_error(e)
log_traceback()
return None
else:
result = snmp.get('%s.%u' % (oid, port - 1),
host,
snmp_port,
community,
_timeout,
tries - 1,
rf=int)
if ret == 0:
return result
elif ret == 1:
return int(result) / 100
elif ret == 2:
return None if result == '---' else result
def get_ports(self):
l = self.generate_port_list(port_max=16,
name='DIN port #{}',
description='digital input port #{}')
for i in range(1, 9):
l.append({
'port': 'a{}'.format(i),
'name': 'AIN port #{}'.format(i),
'description': 'analog input port #{}'.format(i)
})
for i in range(1, 9):
l.append({
'port': 't{}'.format(i),
'name': 'Temp port #{}'.format(i),
'description': 'temperature input port #{}'.format(i)
})
return l
def process_snmp_trap(self, host, data):
if host != self.snmp_host: return
if data.get('1.3.6.1.6.3.1.1.4.1.0') != '1.3.6.1.4.1.42505.7.0.1':
return
for i in range(16):
value = data.get('1.3.6.1.4.1.42505.7.2.1.1.7.{}'.format(i))
if value:
port = 'din{}'.format(i + 1)
self.log_debug('event {} = {}'.format(port, value))
self.port_state[port] = value
handle_phi_event(self, port, {port: value})
return
def test(self, cmd=None):
if cmd == 'module':
return 'default' if not netsnmp else 'netsnmp'
if cmd == 'self' and self.snmp_host is None: return 'OK'
if cmd == 'info' or cmd == 'self':
if netsnmp:
try:
sess = netsnmp.Session(Version=2,
DestHost=self.snmp_host,
RemotePort=self.snmp_port,
Community=self.community,
Timeout=int(get_timeout() * 1000000),
Retries=self.snmp_tries - 1)
except:
log_traceback()
sess = None
if netsnmp:
try:
name = sess.get(netsnmp.VarList(self.oid_name))[0].decode()
except:
log_traceback()
name = None
else:
name = snmp.get(self.oid_name,
self.snmp_host,
self.snmp_port,
self.community,
timeout=get_timeout(),
retries=self.snmp_tries - 1)
if not name: return 'FAILED'
if name and cmd == 'self': return 'OK'
if netsnmp:
try:
version = sess.get(netsnmp.VarList(
self.oid_version))[0].decode()
except:
version = None
else:
version = snmp.get(self.oid_version,
self.snmp_host,
self.snmp_port,
self.community,
timeout=get_timeout())
if not version: return 'FAILED'
return '%s %s' % (name.strip(), version.strip())
return {
'info': 'returns relay ip module name and version',
'module': 'current SNMP module'
}
|
[
"netsnmp.VarList",
"eva.uc.driverapi.log_traceback",
"eva.uc.drivers.tools.snmp.get",
"eva.uc.driverapi.handle_phi_event",
"eva.uc.driverapi.get_timeout"
] |
[((4313, 4410), 'eva.uc.drivers.tools.snmp.get', 'snmp.get', (["('%s.%u' % (oid, port - 1))", 'host', 'snmp_port', 'community', '_timeout', '(tries - 1)'], {'rf': 'int'}), "('%s.%u' % (oid, port - 1), host, snmp_port, community, _timeout, \n tries - 1, rf=int)\n", (4321, 4410), True, 'import eva.uc.drivers.tools.snmp as snmp\n'), ((4057, 4099), 'netsnmp.VarList', 'netsnmp.VarList', (["('%s.%u' % (oid, port - 1))"], {}), "('%s.%u' % (oid, port - 1))\n", (4072, 4099), False, 'import netsnmp\n'), ((5928, 5971), 'eva.uc.driverapi.handle_phi_event', 'handle_phi_event', (['self', 'port', '{port: value}'], {}), '(self, port, {port: value})\n', (5944, 5971), False, 'from eva.uc.driverapi import handle_phi_event\n'), ((4234, 4249), 'eva.uc.driverapi.log_traceback', 'log_traceback', ([], {}), '()\n', (4247, 4249), False, 'from eva.uc.driverapi import log_traceback\n'), ((6716, 6731), 'eva.uc.driverapi.log_traceback', 'log_traceback', ([], {}), '()\n', (6729, 6731), False, 'from eva.uc.driverapi import log_traceback\n'), ((6933, 6948), 'eva.uc.driverapi.log_traceback', 'log_traceback', ([], {}), '()\n', (6946, 6948), False, 'from eva.uc.driverapi import log_traceback\n'), ((7230, 7243), 'eva.uc.driverapi.get_timeout', 'get_timeout', ([], {}), '()\n', (7241, 7243), False, 'from eva.uc.driverapi import get_timeout\n'), ((7880, 7893), 'eva.uc.driverapi.get_timeout', 'get_timeout', ([], {}), '()\n', (7891, 7893), False, 'from eva.uc.driverapi import get_timeout\n'), ((6574, 6587), 'eva.uc.driverapi.get_timeout', 'get_timeout', ([], {}), '()\n', (6585, 6587), False, 'from eva.uc.driverapi import get_timeout\n'), ((6845, 6875), 'netsnmp.VarList', 'netsnmp.VarList', (['self.oid_name'], {}), '(self.oid_name)\n', (6860, 6875), False, 'import netsnmp\n'), ((7482, 7515), 'netsnmp.VarList', 'netsnmp.VarList', (['self.oid_version'], {}), '(self.oid_version)\n', (7497, 7515), False, 'import netsnmp\n')]
|
from datahandler import ConnData
import pandas
import os
#influx
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
class DataLoader(ConnData):
def init(self, fileName):
data = pandas.read_csv(fileName)
write_api = self.influx_client.write_api(write_options=SYNCHRONOUS)
for i in range (len(data)):
try:
p = influxdb_client.Point("test_Mote")\
.tag("type", "multi_sensor_dev") \
.field("power", data["power"][i])\
.field("temp", data["temp"][i])\
.field("humidity", data["humidity"][i])\
.field("light", data["light"][i])\
.field("CO2", data["CO2"][i])\
.field("dust", data["dust"][i])\
.time(data["time"][i])
write_api.write(bucket=self.influx_bucket, org=self.influx_org, record=p)
except:
print("failed at entry: "+ str(i))
continue
return True
|
[
"pandas.read_csv",
"influxdb_client.Point"
] |
[((223, 248), 'pandas.read_csv', 'pandas.read_csv', (['fileName'], {}), '(fileName)\n', (238, 248), False, 'import pandas\n'), ((398, 432), 'influxdb_client.Point', 'influxdb_client.Point', (['"""test_Mote"""'], {}), "('test_Mote')\n", (419, 432), False, 'import influxdb_client\n')]
|
"""
Show differences between WT and STFT
"""
from scipy import signal
import matplotlib.pyplot as plt
import numpy as np
import pywt
waveletname = 'morl'
scales = range(1,200)
t = np.linspace(-1, 1, 200, endpoint=False)
sig = np.cos(2 * np.pi * 7 * t) + signal.gausspulse(t - 0.4, fc=2)
t = np.linspace(-1, 1, 50, endpoint=False)
sig1 = np.sin(2 * np.pi * 16 * t)+100*np.sin(2 * np.pi *0.1 * t)
for i in range(50):
sig[50+i] = sig1[i] + sig[50+i]
coeff, freq = pywt.cwt(sig, scales, waveletname, 1)
t = np.linspace(0, 200, 200, endpoint=False)
plt.plot(t,sig,color='k')
plt.title('Transformed signal')
plt.ylabel('Amplitude')
plt.xlabel('t [s]')
plt.figure()
plt.pcolormesh(coeff, cmap='plasma')
plt.title('Wavelet Transform (Morlett kernel)')
plt.ylabel('f [Hz]')
plt.xlabel('t [s]')
f, t, Zxx = signal.stft(sig, fs=400,nperseg = 8)
t = t*400
plt.figure()
plt.pcolormesh(t, f, np.abs(Zxx), cmap='plasma')
plt.title('Short Time Fourier Transform (STFT)')
plt.ylabel('f [Hz]')
plt.xlabel('t [s]')
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"scipy.signal.gausspulse",
"matplotlib.pyplot.plot",
"numpy.abs",
"pywt.cwt",
"matplotlib.pyplot.figure",
"numpy.sin",
"numpy.linspace",
"matplotlib.pyplot.pcolormesh",
"numpy.cos",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"scipy.signal.stft"
] |
[((183, 222), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', '(200)'], {'endpoint': '(False)'}), '(-1, 1, 200, endpoint=False)\n', (194, 222), True, 'import numpy as np\n'), ((295, 333), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', '(50)'], {'endpoint': '(False)'}), '(-1, 1, 50, endpoint=False)\n', (306, 333), True, 'import numpy as np\n'), ((470, 507), 'pywt.cwt', 'pywt.cwt', (['sig', 'scales', 'waveletname', '(1)'], {}), '(sig, scales, waveletname, 1)\n', (478, 507), False, 'import pywt\n'), ((512, 552), 'numpy.linspace', 'np.linspace', (['(0)', '(200)', '(200)'], {'endpoint': '(False)'}), '(0, 200, 200, endpoint=False)\n', (523, 552), True, 'import numpy as np\n'), ((553, 580), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'sig'], {'color': '"""k"""'}), "(t, sig, color='k')\n", (561, 580), True, 'import matplotlib.pyplot as plt\n'), ((579, 610), 'matplotlib.pyplot.title', 'plt.title', (['"""Transformed signal"""'], {}), "('Transformed signal')\n", (588, 610), True, 'import matplotlib.pyplot as plt\n'), ((611, 634), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Amplitude"""'], {}), "('Amplitude')\n", (621, 634), True, 'import matplotlib.pyplot as plt\n'), ((635, 654), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""t [s]"""'], {}), "('t [s]')\n", (645, 654), True, 'import matplotlib.pyplot as plt\n'), ((655, 667), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (665, 667), True, 'import matplotlib.pyplot as plt\n'), ((668, 704), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (['coeff'], {'cmap': '"""plasma"""'}), "(coeff, cmap='plasma')\n", (682, 704), True, 'import matplotlib.pyplot as plt\n'), ((705, 752), 'matplotlib.pyplot.title', 'plt.title', (['"""Wavelet Transform (Morlett kernel)"""'], {}), "('Wavelet Transform (Morlett kernel)')\n", (714, 752), True, 'import matplotlib.pyplot as plt\n'), ((753, 773), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""f [Hz]"""'], {}), "('f [Hz]')\n", (763, 773), True, 'import matplotlib.pyplot as plt\n'), ((774, 793), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""t [s]"""'], {}), "('t [s]')\n", (784, 793), True, 'import matplotlib.pyplot as plt\n'), ((806, 841), 'scipy.signal.stft', 'signal.stft', (['sig'], {'fs': '(400)', 'nperseg': '(8)'}), '(sig, fs=400, nperseg=8)\n', (817, 841), False, 'from scipy import signal\n'), ((853, 865), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (863, 865), True, 'import matplotlib.pyplot as plt\n'), ((915, 963), 'matplotlib.pyplot.title', 'plt.title', (['"""Short Time Fourier Transform (STFT)"""'], {}), "('Short Time Fourier Transform (STFT)')\n", (924, 963), True, 'import matplotlib.pyplot as plt\n'), ((964, 984), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""f [Hz]"""'], {}), "('f [Hz]')\n", (974, 984), True, 'import matplotlib.pyplot as plt\n'), ((985, 1004), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""t [s]"""'], {}), "('t [s]')\n", (995, 1004), True, 'import matplotlib.pyplot as plt\n'), ((1005, 1015), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1013, 1015), True, 'import matplotlib.pyplot as plt\n'), ((230, 255), 'numpy.cos', 'np.cos', (['(2 * np.pi * 7 * t)'], {}), '(2 * np.pi * 7 * t)\n', (236, 255), True, 'import numpy as np\n'), ((258, 290), 'scipy.signal.gausspulse', 'signal.gausspulse', (['(t - 0.4)'], {'fc': '(2)'}), '(t - 0.4, fc=2)\n', (275, 290), False, 'from scipy import signal\n'), ((342, 368), 'numpy.sin', 'np.sin', (['(2 * np.pi * 16 * t)'], {}), '(2 * np.pi * 16 * t)\n', (348, 368), True, 'import numpy as np\n'), ((887, 898), 'numpy.abs', 'np.abs', (['Zxx'], {}), '(Zxx)\n', (893, 898), True, 'import numpy as np\n'), ((373, 400), 'numpy.sin', 'np.sin', (['(2 * np.pi * 0.1 * t)'], {}), '(2 * np.pi * 0.1 * t)\n', (379, 400), True, 'import numpy as np\n')]
|
import math
import numpy as np
#1-a
def function1():
value=0
for i in range(1,1000+1):
value+=i
return value
#1-b
def function2(m):
value=0
for i in range(1,m+1):
value+=i
return value
#2
def function3():
value=0
for i in range(1,100+1):
value+=math.sqrt(i*math.pi/100)*math.sin(i*math.pi/100)
return value
print(function1())
print(function2(1000))
print(function3())
# 500500
# 500500
# 77.51389798916512
#oriented object programming
class physic_calculation:
def __init__(self):
pass
def function_a(self):
value1=0
for i in range(1,1000+1):
value1+=i
return value1
def function_b(self,m):
self.m=m
value2=0
for i in range(1,self.m+1):
value2+=i
return value2
def function_c(self):
value3=0
for i in range(1,100+1):
value3+=math.sqrt(i*math.pi/100)*math.sin(i*math.pi/100)
return value3
pc=physic_calculation()
print("---------------OOP----------------")
print(pc.function_a())
print(pc.function_b(1000))
print(pc.function_c())
# 500500
# 500500
# 77.51389798916512
print("---------------numpy----------------")
a=np.arange(1,26).reshape(5,5)
print(a)
# [[ 1 2 3 4 5]
# [ 6 7 8 9 10]
# [11 12 13 14 15]
# [16 17 18 19 20]
# [21 22 23 24 25]]
|
[
"math.sin",
"numpy.arange",
"math.sqrt"
] |
[((1229, 1245), 'numpy.arange', 'np.arange', (['(1)', '(26)'], {}), '(1, 26)\n', (1238, 1245), True, 'import numpy as np\n'), ((308, 336), 'math.sqrt', 'math.sqrt', (['(i * math.pi / 100)'], {}), '(i * math.pi / 100)\n', (317, 336), False, 'import math\n'), ((333, 360), 'math.sin', 'math.sin', (['(i * math.pi / 100)'], {}), '(i * math.pi / 100)\n', (341, 360), False, 'import math\n'), ((929, 957), 'math.sqrt', 'math.sqrt', (['(i * math.pi / 100)'], {}), '(i * math.pi / 100)\n', (938, 957), False, 'import math\n'), ((954, 981), 'math.sin', 'math.sin', (['(i * math.pi / 100)'], {}), '(i * math.pi / 100)\n', (962, 981), False, 'import math\n')]
|
from PyQt5.QtWidgets import QWidget
from PyQt5.QtGui import QPainter, QBrush, QPen, QColor
from PyQt5.QtCore import Qt
import math
class QBarPlot(QWidget):
def __init__(self):
super().__init__()
self.horizontal_margin = 10
self.vertical_margin = 10
self.data = None
self.data_index = None
self.data_color = None
self.title = None
self.title_size = 32
self.title_margin = 5
self.hlines = None
self.hlines_style = None
self.ymin = None
self.ymax = None
self.x_label_height = 50
# Set window background color
self.setAutoFillBackground(True)
palette = self.palette()
palette.setColor(self.backgroundRole(), Qt.white)
self.setPalette(palette)
def paintEvent(self, event):
qp = QPainter(self)
try:
num_bar = len(self.data)
except:
num_bar = 0
if self.data_index is not None and len(self.data_index) != len(self.data):
raise ValueError("len(data_index) != len(data)") # TODO
size = self.size()
widget_width = size.width()
widget_height = size.height()
if num_bar > 0:
plot_area_width = max(0, widget_width - 2 * self.horizontal_margin)
plot_area_height = max(0, widget_height - 2 * self.vertical_margin)
# Set antialiasing ################################################
# Set anti-aliasing See https://wiki.python.org/moin/PyQt/Painting%20and%20clipping%20demonstration
qp.setRenderHint(QPainter.Antialiasing)
# Set Font ########################################################
font = qp.font()
font.setPointSize(self.title_size)
qp.setFont(font)
# Draw title ######################################################
title_x_start = self.title_margin
title_y_start = self.title_margin
title_width = widget_width - 2 * self.title_margin
title_height = self.title_size
title_x_end = title_x_start + title_width
title_y_end = title_y_start + title_height
qp.drawText(title_x_start, title_y_start, title_width, title_height, Qt.AlignCenter, self.title)
# Prepare coordinates transform ###################################
filtered_data = [data_value for data_value in self.data if data_value is not None]
self.top_ordinate_value = max(filtered_data) if self.ymax is None else self.ymax
self.bottom_ordinate_value = min(filtered_data) if self.ymin is None else self.ymin
plot_area_x_start = self.horizontal_margin
plot_area_x_end = widget_width - self.horizontal_margin
plot_area_width = plot_area_x_end - plot_area_x_start
self.plot_area_y_start = title_y_end + self.title_margin + self.vertical_margin
self.plot_area_y_end = widget_height - self.vertical_margin - self.x_label_height
plot_area_height = self.plot_area_y_end - self.plot_area_y_start
brush = QBrush(Qt.white, Qt.SolidPattern)
qp.setBrush(brush)
qp.drawRect(plot_area_x_start, self.plot_area_y_start, plot_area_width, plot_area_height) # TODO
# Set Pen and Brush ###############################################
#see https://hci.isir.upmc.fr/wp-content/uploads/2018/03/PyQt-Dessin.pdf
#pen = QPen(Qt.black, 3, Qt.SolidLine)
pen = QPen()
pen.setStyle(Qt.SolidLine) # Qt.DotLine Qt.DashLine Qt.DashDotLine
pen.setWidth(2)
pen.setBrush(Qt.black) # Qt.green
pen.setCapStyle(Qt.RoundCap)
pen.setJoinStyle(Qt.RoundJoin)
qp.setPen(pen)
# See https://en.wikipedia.org/wiki/Tango_Desktop_Project#Palette and https://web.archive.org/web/20160202102503/http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines
black_pen = QPen(Qt.black, Qt.SolidLine)
green_pen = QPen(QColor("#4e9a06"), Qt.SolidLine)
yellow_pen = QPen(QColor("#c4a000"), Qt.SolidLine)
red_pen = QPen(QColor("#a40000"), Qt.SolidLine)
white_brush = QBrush(Qt.white, Qt.SolidPattern)
green_brush = QBrush(QColor("#73d216"), Qt.SolidPattern)
yellow_brush = QBrush(QColor("#edd400"), Qt.SolidPattern)
red_brush = QBrush(QColor("#cc0000"), Qt.SolidPattern)
#green_brush = QBrush(QColor("#8ae234"), Qt.SolidPattern)
#yellow_brush = QBrush(QColor("#fce94f"), Qt.SolidPattern)
#red_brush = QBrush(QColor("#ef2929"), Qt.SolidPattern)
# Draw horizontal lines ###########################################
if self.hlines is not None:
for hline_index, hline_value in enumerate(self.hlines):
hline_position = self.ordinateTransform(hline_value)
if hline_position is not None:
try:
hline_style = self.hlines_style[hline_index]
if hline_style == ":":
pen = qp.pen()
pen.setStyle(Qt.DotLine)
qp.setPen(pen)
else:
pen = qp.pen()
pen.setStyle(Qt.SolidLine)
qp.setPen(pen)
except:
pen = qp.pen()
pen.setStyle(Qt.SolidLine)
qp.setPen(pen)
qp.drawLine(plot_area_x_start, hline_position, plot_area_x_end, hline_position) # x_start, y_start, x_end, y_end
# Draw bars #######################################################
pen = qp.pen()
pen.setStyle(Qt.SolidLine)
qp.setPen(pen)
if self.data_color is None:
self.data_color = [None for data_value in self.data]
for data_index, (data_value, data_color) in enumerate(zip(self.data, self.data_color)):
if data_value is not None:
if data_color == "green":
qp.setBrush(green_brush)
qp.setPen(green_pen)
elif data_color == "yellow":
qp.setBrush(yellow_brush)
qp.setPen(yellow_pen)
elif data_color == "red":
qp.setBrush(red_brush)
qp.setPen(red_pen)
else:
qp.setBrush(white_brush)
qp.setPen(black_pen)
x_length = math.floor(plot_area_width / num_bar)
x_start = self.horizontal_margin + data_index * x_length
y_start = self.ordinateTransform(data_value) # TODO: what if y_start is None ?
if y_start is None:
if data_value > self.bottom_ordinate_value:
y_start = self.plot_area_y_start
else:
y_start = self.plot_area_y_end
y_end = self.ordinateTransform(0)
if y_end is None:
y_end = self.plot_area_y_end
y_length = y_end - y_start
# Draw bar
qp.drawRect(x_start, y_start, x_length, y_length)
def ordinateTransform(self, data_ordinate):
# self.top_ordinate_value -> self.plot_area_y_start
# self.bottom_ordinate_value -> self.plot_area_y_end
if self.bottom_ordinate_value <= data_ordinate <= self.top_ordinate_value:
data_ordinate_ratio = (self.top_ordinate_value - data_ordinate) / (self.top_ordinate_value - self.bottom_ordinate_value)
data_ordinate_position = self.plot_area_y_start + data_ordinate_ratio * (self.plot_area_y_end - self.plot_area_y_start)
return math.floor(data_ordinate_position)
else:
return None
|
[
"PyQt5.QtGui.QPainter",
"PyQt5.QtGui.QColor",
"math.floor",
"PyQt5.QtGui.QPen",
"PyQt5.QtGui.QBrush"
] |
[((854, 868), 'PyQt5.QtGui.QPainter', 'QPainter', (['self'], {}), '(self)\n', (862, 868), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((3187, 3220), 'PyQt5.QtGui.QBrush', 'QBrush', (['Qt.white', 'Qt.SolidPattern'], {}), '(Qt.white, Qt.SolidPattern)\n', (3193, 3220), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((3612, 3618), 'PyQt5.QtGui.QPen', 'QPen', ([], {}), '()\n', (3616, 3618), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((4110, 4138), 'PyQt5.QtGui.QPen', 'QPen', (['Qt.black', 'Qt.SolidLine'], {}), '(Qt.black, Qt.SolidLine)\n', (4114, 4138), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((4351, 4384), 'PyQt5.QtGui.QBrush', 'QBrush', (['Qt.white', 'Qt.SolidPattern'], {}), '(Qt.white, Qt.SolidPattern)\n', (4357, 4384), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((8257, 8291), 'math.floor', 'math.floor', (['data_ordinate_position'], {}), '(data_ordinate_position)\n', (8267, 8291), False, 'import math\n'), ((4168, 4185), 'PyQt5.QtGui.QColor', 'QColor', (['"""#4e9a06"""'], {}), "('#4e9a06')\n", (4174, 4185), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((4231, 4248), 'PyQt5.QtGui.QColor', 'QColor', (['"""#c4a000"""'], {}), "('#c4a000')\n", (4237, 4248), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((4291, 4308), 'PyQt5.QtGui.QColor', 'QColor', (['"""#a40000"""'], {}), "('#a40000')\n", (4297, 4308), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((4418, 4435), 'PyQt5.QtGui.QColor', 'QColor', (['"""#73d216"""'], {}), "('#73d216')\n", (4424, 4435), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((4488, 4505), 'PyQt5.QtGui.QColor', 'QColor', (['"""#edd400"""'], {}), "('#edd400')\n", (4494, 4505), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((4555, 4572), 'PyQt5.QtGui.QColor', 'QColor', (['"""#cc0000"""'], {}), "('#cc0000')\n", (4561, 4572), False, 'from PyQt5.QtGui import QPainter, QBrush, QPen, QColor\n'), ((6926, 6963), 'math.floor', 'math.floor', (['(plot_area_width / num_bar)'], {}), '(plot_area_width / num_bar)\n', (6936, 6963), False, 'import math\n')]
|
#!/usr/bin/python3
import sys
import os
import signal
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from replay_sorcery import ReplaySorcery
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = QApplication([])
dir_name = os.path.dirname(sys.argv[0])
full_path = os.path.abspath(dir_name)
icon_filename = 'icon.png'
icon_path = os.path.join(full_path, icon_filename)
class ReplaySorceryGUI(QWidget):
def __init__(self, debug):
self.debug = debug
self.rs = ReplaySorcery(self.debug)
QWidget.__init__(self)
self.setWindowTitle('ReplaySorceryGUI')
self.setWindowIcon(QIcon(icon_path))
self.setMinimumWidth(300)
app_layout = QHBoxLayout()
# left side
left_layout = QVBoxLayout()
left_layout.setAlignment(Qt.AlignCenter)
self.icon = QPixmap(icon_path)
self.icon = self.icon.scaled(92, 92)
self.icon_label = QLabel()
self.icon_label.setPixmap(self.icon)
self.icon_label.setAlignment(Qt.AlignCenter)
left_layout.addWidget(self.icon_label)
self.instructions_text = QLabel()
self.instructions_text.setText("Ctrl+Super+R to save\nthe last 30 seconds\n")
left_layout.addWidget(self.instructions_text)
self.status_text = QLabel()
self.update_status_text()
left_layout.addWidget(self.status_text)
self.timer = QTimer(self)
self.timer.timeout.connect(self.update_status_text)
self.timer.start(1000)
button_size = QSize(150, 40)
buttons = []
turn_on_button = QPushButton("Turn on")
buttons.append(turn_on_button)
turn_on_button.clicked.connect(self.turn_on_action)
turn_off_button = QPushButton("Turn off")
buttons.append(turn_off_button)
turn_off_button.clicked.connect(self.turn_off_action)
refresh_button = QPushButton("Refresh")
buttons.append(refresh_button)
refresh_button.clicked.connect(self.refresh_action)
quit_button = QPushButton("Quit")
buttons.append(quit_button)
quit_button.clicked.connect(self.quit_action)
for button in buttons:
button.setFixedSize(button_size)
left_layout.addWidget(button)
# right side
right_layout = QVBoxLayout()
right_layout.setAlignment(Qt.AlignCenter)
# both sides
app_layout.addLayout(left_layout)
app_layout.addLayout(right_layout)
self.setLayout(app_layout)
def update_status_text(self):
text_string = "ReplaySorcery: %s" % self.rs.current_status["name"]
self.status_text.setText(text_string)
color_string = 'color: %s' % self.rs.current_status["color"]
self.status_text.setStyleSheet(color_string)
self.rs.get_status()
def turn_on_action(self):
self.rs.turn_on()
def turn_off_action(self):
self.rs.turn_off()
def refresh_action(self):
self.rs.get_status()
def quit_action(self):
if self.debug > 0:
print("Exiting ReplaySorceryGUI")
sys.exit()
window = ReplaySorceryGUI(1)
window.show()
if window.debug > 0:
print("ReplaySorceryGUI started")
app.exec_()
|
[
"os.path.abspath",
"os.path.dirname",
"replay_sorcery.ReplaySorcery",
"signal.signal",
"os.path.join",
"sys.exit"
] |
[((182, 226), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.SIG_DFL'], {}), '(signal.SIGINT, signal.SIG_DFL)\n', (195, 226), False, 'import signal\n'), ((263, 291), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (278, 291), False, 'import os\n'), ((312, 337), 'os.path.abspath', 'os.path.abspath', (['dir_name'], {}), '(dir_name)\n', (327, 337), False, 'import os\n'), ((377, 415), 'os.path.join', 'os.path.join', (['full_path', 'icon_filename'], {}), '(full_path, icon_filename)\n', (389, 415), False, 'import os\n'), ((522, 547), 'replay_sorcery.ReplaySorcery', 'ReplaySorcery', (['self.debug'], {}), '(self.debug)\n', (535, 547), False, 'from replay_sorcery import ReplaySorcery\n'), ((3027, 3037), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3035, 3037), False, 'import sys\n')]
|
import torch
import numpy as np
from time import time
from os.path import join
import lpips
from Hessian.GAN_hessian_compute import hessian_compute
#%%
ImDist = lpips.LPIPS(net='squeeze').cuda()
use_gpu = True if torch.cuda.is_available() else False
model = torch.hub.load('facebookresearch/pytorch_GAN_zoo:hub',
'PGAN', model_name='celebAHQ-256',
pretrained=True, useGPU=use_gpu)
num_images = 1
noise, _ = model.buildNoiseData(num_images)
noise.requires_grad_(True)
# with torch.no_grad():
generated_images = model.test(noise)
#%%
img = model.avgG.forward(noise)
#%%
class PGGAN_wrapper(): # nn.Module
def __init__(self, PGGAN, ):
self.PGGAN = PGGAN
def visualize(self, code, scale=1):
imgs = self.PGGAN.forward(code,) # Matlab version default to 0.7
return torch.clamp((imgs + 1.0) / 2.0, 0, 1) * scale
G = PGGAN_wrapper(model.avgG)
#%%
feat = noise.detach().clone().cuda()
EPS = 1E-2
T0 = time()
eva_BI, evc_BI, H_BI = hessian_compute(G, feat, ImDist, hessian_method="BackwardIter")
print("%.2f sec" % (time() - T0)) # 95.7 sec
T0 = time()
eva_FI, evc_FI, H_FI = hessian_compute(G, feat, ImDist, hessian_method="ForwardIter")
print("%.2f sec" % (time() - T0)) # 61.8 sec
T0 = time()
eva_BP, evc_BP, H_BP = hessian_compute(G, feat, ImDist, hessian_method="BP")
print("%.2f sec" % (time() - T0)) # 95.4 sec
#%%
print("Correlation of Flattened Hessian matrix BP vs BackwardIter %.3f" % np.corrcoef(H_BP.flatten(), H_BI.flatten())[0, 1])
print("Correlation of Flattened Hessian matrix BP vs ForwardIter %.3f" %
np.corrcoef(H_BP.flatten(), H_FI.flatten())[0, 1])
print("Correlation of Flattened Hessian matrix ForwardIter vs BackwardIter %.3f"%
np.corrcoef(H_FI.flatten(), H_BI.flatten())[0, 1])
# Correlation of Flattened Hessian matrix BP vs BackwardIter 1.000
# Correlation of Flattened Hessian matrix BP vs ForwardIter 0.877
# Correlation of Flattened Hessian matrix ForwardIter vs BackwardIter 0.877
#%%
H_col = []
for EPS in [1E-5, 1E-4, 1E-3, 1E-2, 1E-1, 1, 2, 10]:
T0 = time()
eva_FI, evc_FI, H_FI = hessian_compute(G, feat, ImDist, hessian_method="ForwardIter", EPS=EPS)
print("%.2f sec" % (time() - T0)) # 325.83 sec
print("EPS %.1e Correlation of Flattened Hessian matrix BP vs ForwardIter %.3f" % (EPS, np.corrcoef(H_BP.flatten(), H_FI.flatten())[0, 1]))
H_col.append((eva_FI, evc_FI, H_FI))
# EPS 1.0e-05 Correlation of Flattened Hessian matrix BP vs ForwardIter 1.000
# EPS 1.0e-04 Correlation of Flattened Hessian matrix BP vs ForwardIter 0.999
# EPS 1.0e-03 Correlation of Flattened Hessian matrix BP vs ForwardIter 0.989
# EPS 1.0e-02 Correlation of Flattened Hessian matrix BP vs ForwardIter 0.901
# EPS 1.0e-01 Correlation of Flattened Hessian matrix BP vs ForwardIter 0.398
# EPS 1.0e+00 Correlation of Flattened Hessian matrix BP vs ForwardIter 0.046
# EPS 2.0e+00 Correlation of Flattened Hessian matrix BP vs ForwardIter 0.008
# EPS 1.0e+01 Correlation of Flattened Hessian matrix BP vs ForwardIter -0.003
#%%
#%% Visualize Spectra
figdir = r"E:\OneDrive - Washington University in St. Louis\Hessian_summary\PGGAN"
savedir = r"E:\Cluster_Backup\PGGAN"
# eva_col = []
# evc_col = []
# for triali in tqdm(range(400)):
# data = np.load(join(savedir, "Hessian_cmp_%d.npz" % triali))
# eva_BP = data["eva_BP"]
# evc_BP = data["evc_BP"]
# eva_col.append(eva_BP)
# evc_col.append(evc_BP)
#
# eva_col = np.array(eva_col)
from Hessian.hessian_analysis_tools import plot_spectra, compute_hess_corr, plot_consistency_example, plot_consistentcy_mat, average_H, scan_hess_npz
eva_col, evc_col, feat_col, meta = scan_hess_npz(savedir, "Hessian_cmp_(\d*).npz", featkey="feat")
feat_col = np.array(feat_col).squeeze()
H_avg, eva_avg, evc_avg = average_H(eva_col, evc_col)
np.savez(join(figdir, "H_avg_%s.npz"%"PGGAN"), H_avg=H_avg, eva_avg=eva_avg, evc_avg=evc_avg, feats=feat_col)
#%%
fig = plot_spectra(eva_col, figdir=figdir, titstr="PGGAN", )
#%%
corr_mat_log, corr_mat_lin = compute_hess_corr(eva_col, evc_col, figdir=figdir, use_cuda=True)
# without cuda 12:11 mins, with cuda 8:21
# corr_mat_log, corr_mat_lin = compute_hess_corr(eva_col, evc_col, figdir=figdir, use_cuda=False)
#%%
fig1, fig2 = plot_consistentcy_mat(corr_mat_log, corr_mat_lin, figdir=figdir, titstr="PGGAN")
#%%
fig3 = plot_consistency_example(eva_col, evc_col, figdir=figdir, nsamp=5, titstr="PGGAN",)
fig3.show()
|
[
"Hessian.hessian_analysis_tools.plot_consistency_example",
"Hessian.hessian_analysis_tools.average_H",
"Hessian.GAN_hessian_compute.hessian_compute",
"Hessian.hessian_analysis_tools.plot_spectra",
"Hessian.hessian_analysis_tools.scan_hess_npz",
"time.time",
"torch.clamp",
"torch.cuda.is_available",
"lpips.LPIPS",
"numpy.array",
"Hessian.hessian_analysis_tools.plot_consistentcy_mat",
"torch.hub.load",
"os.path.join",
"Hessian.hessian_analysis_tools.compute_hess_corr"
] |
[((259, 386), 'torch.hub.load', 'torch.hub.load', (['"""facebookresearch/pytorch_GAN_zoo:hub"""', '"""PGAN"""'], {'model_name': '"""celebAHQ-256"""', 'pretrained': '(True)', 'useGPU': 'use_gpu'}), "('facebookresearch/pytorch_GAN_zoo:hub', 'PGAN', model_name=\n 'celebAHQ-256', pretrained=True, useGPU=use_gpu)\n", (273, 386), False, 'import torch\n'), ((975, 981), 'time.time', 'time', ([], {}), '()\n', (979, 981), False, 'from time import time\n'), ((1005, 1068), 'Hessian.GAN_hessian_compute.hessian_compute', 'hessian_compute', (['G', 'feat', 'ImDist'], {'hessian_method': '"""BackwardIter"""'}), "(G, feat, ImDist, hessian_method='BackwardIter')\n", (1020, 1068), False, 'from Hessian.GAN_hessian_compute import hessian_compute\n'), ((1120, 1126), 'time.time', 'time', ([], {}), '()\n', (1124, 1126), False, 'from time import time\n'), ((1150, 1212), 'Hessian.GAN_hessian_compute.hessian_compute', 'hessian_compute', (['G', 'feat', 'ImDist'], {'hessian_method': '"""ForwardIter"""'}), "(G, feat, ImDist, hessian_method='ForwardIter')\n", (1165, 1212), False, 'from Hessian.GAN_hessian_compute import hessian_compute\n'), ((1264, 1270), 'time.time', 'time', ([], {}), '()\n', (1268, 1270), False, 'from time import time\n'), ((1294, 1347), 'Hessian.GAN_hessian_compute.hessian_compute', 'hessian_compute', (['G', 'feat', 'ImDist'], {'hessian_method': '"""BP"""'}), "(G, feat, ImDist, hessian_method='BP')\n", (1309, 1347), False, 'from Hessian.GAN_hessian_compute import hessian_compute\n'), ((3660, 3724), 'Hessian.hessian_analysis_tools.scan_hess_npz', 'scan_hess_npz', (['savedir', '"""Hessian_cmp_(\\\\d*).npz"""'], {'featkey': '"""feat"""'}), "(savedir, 'Hessian_cmp_(\\\\d*).npz', featkey='feat')\n", (3673, 3724), False, 'from Hessian.hessian_analysis_tools import plot_spectra, compute_hess_corr, plot_consistency_example, plot_consistentcy_mat, average_H, scan_hess_npz\n'), ((3790, 3817), 'Hessian.hessian_analysis_tools.average_H', 'average_H', (['eva_col', 'evc_col'], {}), '(eva_col, evc_col)\n', (3799, 3817), False, 'from Hessian.hessian_analysis_tools import plot_spectra, compute_hess_corr, plot_consistency_example, plot_consistentcy_mat, average_H, scan_hess_npz\n'), ((3938, 3990), 'Hessian.hessian_analysis_tools.plot_spectra', 'plot_spectra', (['eva_col'], {'figdir': 'figdir', 'titstr': '"""PGGAN"""'}), "(eva_col, figdir=figdir, titstr='PGGAN')\n", (3950, 3990), False, 'from Hessian.hessian_analysis_tools import plot_spectra, compute_hess_corr, plot_consistency_example, plot_consistentcy_mat, average_H, scan_hess_npz\n'), ((4026, 4091), 'Hessian.hessian_analysis_tools.compute_hess_corr', 'compute_hess_corr', (['eva_col', 'evc_col'], {'figdir': 'figdir', 'use_cuda': '(True)'}), '(eva_col, evc_col, figdir=figdir, use_cuda=True)\n', (4043, 4091), False, 'from Hessian.hessian_analysis_tools import plot_spectra, compute_hess_corr, plot_consistency_example, plot_consistentcy_mat, average_H, scan_hess_npz\n'), ((4249, 4334), 'Hessian.hessian_analysis_tools.plot_consistentcy_mat', 'plot_consistentcy_mat', (['corr_mat_log', 'corr_mat_lin'], {'figdir': 'figdir', 'titstr': '"""PGGAN"""'}), "(corr_mat_log, corr_mat_lin, figdir=figdir, titstr='PGGAN'\n )\n", (4270, 4334), False, 'from Hessian.hessian_analysis_tools import plot_spectra, compute_hess_corr, plot_consistency_example, plot_consistentcy_mat, average_H, scan_hess_npz\n'), ((4341, 4428), 'Hessian.hessian_analysis_tools.plot_consistency_example', 'plot_consistency_example', (['eva_col', 'evc_col'], {'figdir': 'figdir', 'nsamp': '(5)', 'titstr': '"""PGGAN"""'}), "(eva_col, evc_col, figdir=figdir, nsamp=5, titstr=\n 'PGGAN')\n", (4365, 4428), False, 'from Hessian.hessian_analysis_tools import plot_spectra, compute_hess_corr, plot_consistency_example, plot_consistentcy_mat, average_H, scan_hess_npz\n'), ((214, 239), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (237, 239), False, 'import torch\n'), ((2079, 2085), 'time.time', 'time', ([], {}), '()\n', (2083, 2085), False, 'from time import time\n'), ((2113, 2184), 'Hessian.GAN_hessian_compute.hessian_compute', 'hessian_compute', (['G', 'feat', 'ImDist'], {'hessian_method': '"""ForwardIter"""', 'EPS': 'EPS'}), "(G, feat, ImDist, hessian_method='ForwardIter', EPS=EPS)\n", (2128, 2184), False, 'from Hessian.GAN_hessian_compute import hessian_compute\n'), ((3827, 3865), 'os.path.join', 'join', (['figdir', "('H_avg_%s.npz' % 'PGGAN')"], {}), "(figdir, 'H_avg_%s.npz' % 'PGGAN')\n", (3831, 3865), False, 'from os.path import join\n'), ((162, 188), 'lpips.LPIPS', 'lpips.LPIPS', ([], {'net': '"""squeeze"""'}), "(net='squeeze')\n", (173, 188), False, 'import lpips\n'), ((3735, 3753), 'numpy.array', 'np.array', (['feat_col'], {}), '(feat_col)\n', (3743, 3753), True, 'import numpy as np\n'), ((841, 878), 'torch.clamp', 'torch.clamp', (['((imgs + 1.0) / 2.0)', '(0)', '(1)'], {}), '((imgs + 1.0) / 2.0, 0, 1)\n', (852, 878), False, 'import torch\n'), ((1089, 1095), 'time.time', 'time', ([], {}), '()\n', (1093, 1095), False, 'from time import time\n'), ((1233, 1239), 'time.time', 'time', ([], {}), '()\n', (1237, 1239), False, 'from time import time\n'), ((1368, 1374), 'time.time', 'time', ([], {}), '()\n', (1372, 1374), False, 'from time import time\n'), ((2209, 2215), 'time.time', 'time', ([], {}), '()\n', (2213, 2215), False, 'from time import time\n')]
|
from django.contrib import admin
from django.urls import include, path
from django.conf.urls.static import static
from django.conf import settings
from django.conf.urls import handler404, handler500
from . import views
handler404 = "foodgram.views.page_not_found"
handler500 = "foodgram.views.server_error"
urlpatterns = [
path('admin/', admin.site.urls),
path("auth/", include("users.urls")),
path("auth/", include("django.contrib.auth.urls")),
path("favorites/", include("favorites.urls")),
path("followings/", include("follows.urls")),
path("shopping-list/", include("shopping_list.urls")),
path("", include("recipes.urls")),
]+ static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG: # new
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
[
"django.conf.urls.static.static",
"django.urls.path",
"django.urls.include"
] |
[((666, 729), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (672, 729), False, 'from django.conf.urls.static import static\n'), ((776, 837), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (782, 837), False, 'from django.conf.urls.static import static\n'), ((333, 364), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (337, 364), False, 'from django.urls import include, path\n'), ((384, 405), 'django.urls.include', 'include', (['"""users.urls"""'], {}), "('users.urls')\n", (391, 405), False, 'from django.urls import include, path\n'), ((426, 461), 'django.urls.include', 'include', (['"""django.contrib.auth.urls"""'], {}), "('django.contrib.auth.urls')\n", (433, 461), False, 'from django.urls import include, path\n'), ((487, 512), 'django.urls.include', 'include', (['"""favorites.urls"""'], {}), "('favorites.urls')\n", (494, 512), False, 'from django.urls import include, path\n'), ((539, 562), 'django.urls.include', 'include', (['"""follows.urls"""'], {}), "('follows.urls')\n", (546, 562), False, 'from django.urls import include, path\n'), ((592, 621), 'django.urls.include', 'include', (['"""shopping_list.urls"""'], {}), "('shopping_list.urls')\n", (599, 621), False, 'from django.urls import include, path\n'), ((637, 660), 'django.urls.include', 'include', (['"""recipes.urls"""'], {}), "('recipes.urls')\n", (644, 660), False, 'from django.urls import include, path\n')]
|
from tkinter import *
import sys
class Panel(Frame):
def __init__(self, master, *args, **kw):
super().__init__(master, *args, **kw)
def hide(self):
self.grid_forget()
def show(self):
self.grid()
# http://tkinter.unpythonic.net/wiki/VerticalScrolledFrame
#https://code.activestate.com/recipes/578894-mousewheel-binding-to-scrolling-area-tkinter-multi/
class ScrollingArea:
def __init__(self, root, factor = 2):
self.activeArea = None
if type(factor) == int:
self.factor = factor
else:
raise Exception("Factor must be an integer.")
if sys.platform.startswith('linux') :
root.bind_all('<4>', self._on_mouse_wheel, add='+')
root.bind_all('<5>', self._on_mouse_wheel, add='+')
else:
root.bind_all("<MouseWheel>", self._on_mouse_wheel, add='+')
def _on_mouse_wheel(self,event):
if self.activeArea and self.activeArea != self:
self.activeArea._on_mouse_wheel(event)
def _mouse_wheel_bind(self, widget):
self.activeArea = widget
def _mouse_wheel_unbind(self):
self.activeArea = None
def build_function__on_mouse_wheel(self, widget, orient, factor = 1):
view_command = getattr(widget, orient+'view')
if sys.platform.startswith('linux'):
def _on_mouse_wheel(event):
if event.num == 4:
view_command("scroll",(-1)*factor,"units" )
elif event.num == 5:
view_command("scroll",factor,"units" )
elif sys.platform == 'win32' or sys.platform == 'cygwin':
def _on_mouse_wheel(event):
view_command("scroll",(-1)*int((event.delta/120)*factor),"units" )
elif sys.platform == 'darwin':
def _on_mouse_wheel(event):
view_command("scroll",event.delta,"units" )
return _on_mouse_wheel
def add_scrolling(self, scrollingArea, xscrollbar=None, yscrollbar=None):
if yscrollbar:
scrollingArea.configure(xscrollcommand=yscrollbar.set)
yscrollbar['command']=scrollingArea.yview
if xscrollbar:
scrollingArea.configure(yscrollcommand=xscrollbar.set)
xscrollbar['command']=scrollingArea.xview
scrollingArea.bind('<Enter>',lambda event: self._mouse_wheel_bind(scrollingArea))
scrollingArea.bind('<Leave>', lambda event: self._mouse_wheel_unbind())
if xscrollbar and not hasattr(xscrollbar, '_on_mouse_wheel'):
xscrollbar._on_mouse_wheel = self.build_function__on_mouse_wheel(scrollingArea,'x', self.factor)
if yscrollbar and not hasattr(yscrollbar, '_on_mouse_wheel'):
yscrollbar._on_mouse_wheel = self.build_function__on_mouse_wheel(scrollingArea,'y', self.factor)
main_scrollbar = yscrollbar or xscrollbar
if main_scrollbar:
scrollingArea._on_mouse_wheel = main_scrollbar._on_mouse_wheel
for scrollbar in (xscrollbar, yscrollbar):
if scrollbar:
scrollbar.bind('<Enter>', lambda event, scrollbar=scrollbar: self._mouse_wheel_bind(scrollbar) )
scrollbar.bind('<Leave>', lambda event: self._mouse_wheel_unbind())
class VScrolledPanel(Panel):
"""A pure Tkinter scrollable frame that actually works!
* Use the 'interior' attribute to place widgets inside the scrollable frame
* Construct and pack/place/grid normally
* This frame only allows vertical scrolling
"""
def __init__(self, master, *args, **kw):
super().__init__(master, *args, **kw)
# create a canvas object and a vertical scrollbar for scrolling it
self.vscrollbar = Scrollbar(self, orient=VERTICAL)
self.vscrollbar.pack(fill=Y, side=RIGHT, expand=FALSE)
self.canvas = Canvas(self, bd=0, highlightthickness=0,
yscrollcommand=self.vscrollbar.set)
self.canvas.pack(side=LEFT, fill=BOTH, expand=TRUE)
self.vscrollbar.config(command=self.canvas.yview)
# reset the view
self.canvas.xview_moveto(0)
self.canvas.yview_moveto(0)
# create a frame inside the canvas which will be scrolled with it
self.interior = interior = Frame(self.canvas, bg="white",)
interior_id = self.canvas.create_window(0, 0, window=interior,
anchor=NW)
# track changes to the canvas and frame width and sync them,
# also updating the scrollbar
def _configure_interior(event):
# update the scrollbars to match the size of the inner frame
size = (interior.winfo_reqwidth(), interior.winfo_reqheight())
self.canvas.config(scrollregion="0 0 %s %s" % size)
if interior.winfo_reqwidth() != self.canvas.winfo_width():
# update the canvas's width to fit the inner frame
self.canvas.config(width=interior.winfo_reqwidth())
interior.bind('<Configure>', _configure_interior)
def _configure_canvas(event):
if interior.winfo_reqwidth() != self.canvas.winfo_width():
# update the inner frame's width to fill the canvas
self.canvas.itemconfigure(interior_id, width=self.canvas.winfo_width())
self.canvas.bind('<Configure>', _configure_canvas)
ScrollingArea(self).add_scrolling(self.canvas, yscrollbar=self.vscrollbar)
def reset(self):
self.canvas.xview_moveto(0)
self.canvas.yview_moveto(0)
|
[
"sys.platform.startswith"
] |
[((672, 704), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (695, 704), False, 'import sys\n'), ((1368, 1400), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (1391, 1400), False, 'import sys\n')]
|
#!/usr/bin/env python3
import sys, os, shutil, subprocess, re, difflib
os.environ['LC_ALL'] = 'C' # otherwise 'nm' prints in wrong order
builddir = os.getenv('builddir', os.path.dirname(__file__))
libs = os.getenv('libs', '.libs')
IGNORED_SYMBOLS = '|'.join(['_fini', '_init', '_fdata', '_ftext', '_fbss',
'__bss_start', '__bss_start__', '__bss_end__', '_edata', '_end', '_bss_end__',
'__end__', '__gcov_.*', 'llvm_.*', 'flush_fn_list', 'writeout_fn_list', 'mangle_path'])
nm = os.getenv('NM', shutil.which('nm'))
if not nm:
print('check-symbols.py: \'nm\' not found; skipping test')
sys.exit(77)
cxxflit = shutil.which('c++filt')
tested = False
stat = 0
for soname in ['harfbuzz', 'harfbuzz-subset', 'harfbuzz-icu', 'harfbuzz-gobject']:
for suffix in ['so', 'dylib']:
so = os.path.join(builddir, libs, 'lib%s.%s' % (soname, suffix))
if not os.path.exists(so): continue
# On macOS, C symbols are prefixed with _
symprefix = '_' if suffix == 'dylib' else ''
EXPORTED_SYMBOLS = [s.split()[2]
for s in re.findall(r'^.+ [BCDGIRST] .+$',
subprocess.check_output(nm.split() + [so]).decode('utf-8'),
re.MULTILINE)
if not re.match(r'.* %s(%s)\b' % (symprefix, IGNORED_SYMBOLS), s)]
# run again c++flit also if is available
if cxxflit:
EXPORTED_SYMBOLS = subprocess.check_output(
[cxxflit], input='\n'.join(EXPORTED_SYMBOLS).encode()
).decode('utf-8').splitlines()
prefix = (symprefix + os.path.basename(so)).replace('libharfbuzz', 'hb').replace('-', '_').split('.')[0]
print('Checking that %s does not expose internal symbols' % so)
suspicious_symbols = [x for x in EXPORTED_SYMBOLS if not re.match(r'^%s(_|$)' % prefix, x)]
if suspicious_symbols:
print('Ouch, internal symbols exposed:', suspicious_symbols)
stat = 1
def_path = os.path.join(builddir, soname + '.def')
if not os.path.exists(def_path):
print('\'%s\' not found; skipping' % def_path)
else:
print('Checking that %s has the same symbol list as %s' % (so, def_path))
with open(def_path, 'r', encoding='utf-8') as f:
def_file = f.read()
diff_result = list(difflib.context_diff(
def_file.splitlines(),
['EXPORTS'] + [re.sub('^%shb' % symprefix, 'hb', x) for x in EXPORTED_SYMBOLS] +
# cheat: copy the last line from the def file!
[def_file.splitlines()[-1]]
))
if diff_result:
print('\n'.join(diff_result))
stat = 1
tested = True
if not tested:
print('check-symbols.sh: no shared libraries found; skipping test')
sys.exit(77)
sys.exit(stat)
|
[
"os.path.basename",
"os.path.dirname",
"os.path.exists",
"shutil.which",
"re.match",
"re.sub",
"os.path.join",
"os.getenv",
"sys.exit"
] |
[((208, 234), 'os.getenv', 'os.getenv', (['"""libs"""', '""".libs"""'], {}), "('libs', '.libs')\n", (217, 234), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((678, 701), 'shutil.which', 'shutil.which', (['"""c++filt"""'], {}), "('c++filt')\n", (690, 701), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((2997, 3011), 'sys.exit', 'sys.exit', (['stat'], {}), '(stat)\n', (3005, 3011), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((174, 199), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (189, 199), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((556, 574), 'shutil.which', 'shutil.which', (['"""nm"""'], {}), "('nm')\n", (568, 574), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((654, 666), 'sys.exit', 'sys.exit', (['(77)'], {}), '(77)\n', (662, 666), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((2983, 2995), 'sys.exit', 'sys.exit', (['(77)'], {}), '(77)\n', (2991, 2995), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((859, 918), 'os.path.join', 'os.path.join', (['builddir', 'libs', "('lib%s.%s' % (soname, suffix))"], {}), "(builddir, libs, 'lib%s.%s' % (soname, suffix))\n", (871, 918), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((2116, 2155), 'os.path.join', 'os.path.join', (['builddir', "(soname + '.def')"], {}), "(builddir, soname + '.def')\n", (2128, 2155), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((934, 952), 'os.path.exists', 'os.path.exists', (['so'], {}), '(so)\n', (948, 952), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((2171, 2195), 'os.path.exists', 'os.path.exists', (['def_path'], {}), '(def_path)\n', (2185, 2195), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((1385, 1443), 're.match', 're.match', (["('.* %s(%s)\\\\b' % (symprefix, IGNORED_SYMBOLS))", 's'], {}), "('.* %s(%s)\\\\b' % (symprefix, IGNORED_SYMBOLS), s)\n", (1393, 1443), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((1936, 1968), 're.match', 're.match', (["('^%s(_|$)' % prefix)", 'x'], {}), "('^%s(_|$)' % prefix, x)\n", (1944, 1968), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((2576, 2612), 're.sub', 're.sub', (["('^%shb' % symprefix)", '"""hb"""', 'x'], {}), "('^%shb' % symprefix, 'hb', x)\n", (2582, 2612), False, 'import sys, os, shutil, subprocess, re, difflib\n'), ((1715, 1735), 'os.path.basename', 'os.path.basename', (['so'], {}), '(so)\n', (1731, 1735), False, 'import sys, os, shutil, subprocess, re, difflib\n')]
|
import torch
from torch.utils.data import Dataset
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import collections
from chr import coverage
import pdb
class RegressionDataset(Dataset):
def __init__(self, X_data, y_data):
self.X_data = torch.from_numpy(X_data).float()
self.y_data = torch.from_numpy(y_data).float()
def __getitem__(self, index):
return self.X_data[index], self.y_data[index]
def __len__ (self):
return len(self.X_data)
def evaluate_predictions(pred, Y, X=None):
# Extract lower and upper prediction bands
pred_l = np.min(pred,1)
pred_h = np.max(pred,1)
# Marginal coverage
cover = (Y>=pred_l)*(Y<=pred_h)
marg_coverage = np.mean(cover)
if X is None:
wsc_coverage = None
else:
# Estimated conditional coverage (worse-case slab)
wsc_coverage = coverage.wsc_unbiased(X, Y, pred, M=100)
# Marginal length
lengths = pred_h-pred_l
length = np.mean(lengths)
# Length conditional on coverage
idx_cover = np.where(cover)[0]
length_cover = np.mean([lengths for i in idx_cover])
# Combine results
out = pd.DataFrame({'Coverage': [marg_coverage], 'Conditional coverage': [wsc_coverage],
'Length': [length], 'Length cover': [length_cover]})
return out
def plot_histogram(breaks, weights, S=None, fig=None, limits=None, i=0, colors=None, linestyles=None, xlim=None, filename=None):
if colors is None:
if limits is not None:
colors = ['tab:blue'] * len(limits)
if linestyles is None:
if limits is not None:
linestyles = ['-'] * len(limits)
if fig is None:
fig = plt.figure()
plt.step(breaks, weights[i], where='pre', color='black')
if S is not None:
idx = S[i]
z = np.zeros(len(breaks),)
z[idx] = weights[i,idx]
plt.fill_between(breaks, z, step="pre", alpha=0.4, color='gray')
if limits is not None:
for q_idx in range(len(limits[i])):
q = limits[i][q_idx]
plt.axvline(q, 0, 1, linestyle=linestyles[q_idx], color=colors[q_idx])
plt.xlabel('$Y$')
plt.ylabel('Density')
if xlim is not None:
plt.xlim(xlim)
if filename is not None:
fig.set_size_inches(4.5, 3)
plt.savefig(filename, bbox_inches='tight', dpi=300)
plt.show()
|
[
"pandas.DataFrame",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.axvline",
"matplotlib.pyplot.show",
"matplotlib.pyplot.step",
"numpy.min",
"numpy.mean",
"numpy.max",
"chr.coverage.wsc_unbiased",
"numpy.where",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.fill_between",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig",
"torch.from_numpy"
] |
[((614, 629), 'numpy.min', 'np.min', (['pred', '(1)'], {}), '(pred, 1)\n', (620, 629), True, 'import numpy as np\n'), ((642, 657), 'numpy.max', 'np.max', (['pred', '(1)'], {}), '(pred, 1)\n', (648, 657), True, 'import numpy as np\n'), ((737, 751), 'numpy.mean', 'np.mean', (['cover'], {}), '(cover)\n', (744, 751), True, 'import numpy as np\n'), ((995, 1011), 'numpy.mean', 'np.mean', (['lengths'], {}), '(lengths)\n', (1002, 1011), True, 'import numpy as np\n'), ((1103, 1140), 'numpy.mean', 'np.mean', (['[lengths for i in idx_cover]'], {}), '([lengths for i in idx_cover])\n', (1110, 1140), True, 'import numpy as np\n'), ((1174, 1314), 'pandas.DataFrame', 'pd.DataFrame', (["{'Coverage': [marg_coverage], 'Conditional coverage': [wsc_coverage],\n 'Length': [length], 'Length cover': [length_cover]}"], {}), "({'Coverage': [marg_coverage], 'Conditional coverage': [\n wsc_coverage], 'Length': [length], 'Length cover': [length_cover]})\n", (1186, 1314), True, 'import pandas as pd\n'), ((1736, 1792), 'matplotlib.pyplot.step', 'plt.step', (['breaks', 'weights[i]'], {'where': '"""pre"""', 'color': '"""black"""'}), "(breaks, weights[i], where='pre', color='black')\n", (1744, 1792), True, 'import matplotlib.pyplot as plt\n'), ((2166, 2183), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$Y$"""'], {}), "('$Y$')\n", (2176, 2183), True, 'import matplotlib.pyplot as plt\n'), ((2188, 2209), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Density"""'], {}), "('Density')\n", (2198, 2209), True, 'import matplotlib.pyplot as plt\n'), ((2390, 2400), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2398, 2400), True, 'import matplotlib.pyplot as plt\n'), ((890, 930), 'chr.coverage.wsc_unbiased', 'coverage.wsc_unbiased', (['X', 'Y', 'pred'], {'M': '(100)'}), '(X, Y, pred, M=100)\n', (911, 930), False, 'from chr import coverage\n'), ((1065, 1080), 'numpy.where', 'np.where', (['cover'], {}), '(cover)\n', (1073, 1080), True, 'import numpy as np\n'), ((1719, 1731), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1729, 1731), True, 'import matplotlib.pyplot as plt\n'), ((1909, 1973), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['breaks', 'z'], {'step': '"""pre"""', 'alpha': '(0.4)', 'color': '"""gray"""'}), "(breaks, z, step='pre', alpha=0.4, color='gray')\n", (1925, 1973), True, 'import matplotlib.pyplot as plt\n'), ((2244, 2258), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xlim'], {}), '(xlim)\n', (2252, 2258), True, 'import matplotlib.pyplot as plt\n'), ((2333, 2384), 'matplotlib.pyplot.savefig', 'plt.savefig', (['filename'], {'bbox_inches': '"""tight"""', 'dpi': '(300)'}), "(filename, bbox_inches='tight', dpi=300)\n", (2344, 2384), True, 'import matplotlib.pyplot as plt\n'), ((2090, 2160), 'matplotlib.pyplot.axvline', 'plt.axvline', (['q', '(0)', '(1)'], {'linestyle': 'linestyles[q_idx]', 'color': 'colors[q_idx]'}), '(q, 0, 1, linestyle=linestyles[q_idx], color=colors[q_idx])\n', (2101, 2160), True, 'import matplotlib.pyplot as plt\n'), ((276, 300), 'torch.from_numpy', 'torch.from_numpy', (['X_data'], {}), '(X_data)\n', (292, 300), False, 'import torch\n'), ((331, 355), 'torch.from_numpy', 'torch.from_numpy', (['y_data'], {}), '(y_data)\n', (347, 355), False, 'import torch\n')]
|
import os
if not os.path.exists("data"):
os.mkdir("data")
if not os.path.exists("data/about"):
os.mkdir("data/about")
if not os.path.exists("data/father"):
os.mkdir("data/father")
for filename in os.listdir("raw_data"):
full_filename = f"raw_data/{filename}"
if "About" in filename:
dest_file = f"data/about/{filename}"
elif "Father" in filename:
dest_file = f"data/father/{filename}"
os.rename(full_filename, dest_file)
|
[
"os.mkdir",
"os.rename",
"os.path.exists",
"os.listdir"
] |
[((208, 230), 'os.listdir', 'os.listdir', (['"""raw_data"""'], {}), "('raw_data')\n", (218, 230), False, 'import os\n'), ((17, 39), 'os.path.exists', 'os.path.exists', (['"""data"""'], {}), "('data')\n", (31, 39), False, 'import os\n'), ((45, 61), 'os.mkdir', 'os.mkdir', (['"""data"""'], {}), "('data')\n", (53, 61), False, 'import os\n'), ((69, 97), 'os.path.exists', 'os.path.exists', (['"""data/about"""'], {}), "('data/about')\n", (83, 97), False, 'import os\n'), ((103, 125), 'os.mkdir', 'os.mkdir', (['"""data/about"""'], {}), "('data/about')\n", (111, 125), False, 'import os\n'), ((133, 162), 'os.path.exists', 'os.path.exists', (['"""data/father"""'], {}), "('data/father')\n", (147, 162), False, 'import os\n'), ((168, 191), 'os.mkdir', 'os.mkdir', (['"""data/father"""'], {}), "('data/father')\n", (176, 191), False, 'import os\n'), ((429, 464), 'os.rename', 'os.rename', (['full_filename', 'dest_file'], {}), '(full_filename, dest_file)\n', (438, 464), False, 'import os\n')]
|
from django.contrib import admin
from shipment.models import Shipment
# Register your models here.
@admin.register(Shipment)
class ShipmentAdmin(admin.ModelAdmin):
list_display = ("name", "surname", "email", "phone","city","district","neighborhood","others")
|
[
"django.contrib.admin.register"
] |
[((101, 125), 'django.contrib.admin.register', 'admin.register', (['Shipment'], {}), '(Shipment)\n', (115, 125), False, 'from django.contrib import admin\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2010 <NAME>, h<EMAIL>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "$Revision: 194 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2008-04-18 18:59:53 +0200 (Fr, 18 Apr 2008) $"
import ho.pisa as pisa
import os
import logging
log = logging.getLogger(__file__)
def dummyLoader(name):
return '\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00F\x00\x00\x00\x89\x04\x03\x00\x00\x00c\xbeS\xd6\x00\x00' \
'\x000PLTE\x00\x00\x00\n\x06\x04\x18\x14\x0f-&\x1eLB6w`E\x8f\x80q\xb2\x9c\x82\xbe\xa1{' \
'\xc7\xb0\x96\xd1\xbd\xa9\xd9\xd0\xc6\xef\xeb\xe6\xf8\xf3\xef\xff\xfb\xf7\xff\xff\xffZ\x83\x0b|\x00\x00' \
'\x0c\xedIDATx^u\x97]l\x1bWv\xc7g\xe2`\x81\xbe\xcd%Gr\xd3\xa7P\x12e\xb7\x01\x8a\xd0")E\x01\x02\x8f\xf8' \
'!\x8bI\x17\x10\xc5!))5`\xf1C\xb4\xb25`S\xb2l\xb95\x90H\xa4.\xb9/u$K3\xe3\xa2\x80W\x12\xc59L\xf6a\xb3' \
'\x8dcN\xd6@\xb7\x1f\x01\x8a\x85\x16\x9b-\xfa\x81M\xb8@\x83l\xd1\xd8\xbc|)\xd0\x97\x82\xea\xb93\x92\xec' \
'"\xce\x11 \t3?\xfe\xcf\xff\x9e{\xce\x01(' \
'\x1c>7\x18\xfb\xc2\xfaE\xffk_\xb6\x18\xeb\x1e>\x8f\xe92d\xfe%T\xa8\x98\xfa\x07\x1f ' \
'$<\x0f\xe1\x91\xabT\xc1\xacT\xf2\xbfd\xec\xbb\x98\xdfM\xeb\x86aYP\xfa\xd3\xd6\xf3\x98C[' \
'\xa6\xaaU\xa1a5\xe9\x1b\xad\xef\xd0i}\x91\xccy+\xc8X\xf5E\xf6]:\xff0\xd8\x97\xce7\xb9P\xf1\xd1\xb7\x98' \
'\xaec\xe7/\xd3\xa1\xeb\x81{\x96e5\xd7.\xb6\x85\xe7\x99aO\x94\xf1R(' \
'\xfeC\xce\xd4F\xbf\xc50\x1b\xfa\xefS\xa9\xb2\x12p\x98({' \
'\x8eN\x9b\xb1\xbf\xf5O\xa5\xd7\x0b\xb4\xc9\x0f\x96\xec<G\xa7\xc5\x1e\xbf\xfa\xe2b\x90\x16\xb2\x00\x96E' \
'\x93O\x9e\xe7\xe77\x8b\xd2@ \xa3\xa7\x96\xe6\r\xab\xb9\x97\xfc\xf6\xb90WV\x0e\x8d(' \
'\xa1\xa5dd*\x06PL\xa2\xe7g\xdfw\xba\xe8\xe6o\x06\xc6\xd5\x80\xc7\xe5s\xbb|\xbd\x91\xd2\xb9 ' \
'\x13\x9e1\xc2\x13\xb5\xfeN\rn\xa5\xd5a\xc5+\xe7\xb7\xf5\xa2\xcbC\xde>a\x9c\xd2\xb5\xad\x07\xdbS\x0b\xb0' \
'\xa5z\xeb\x94\xd2y\x80kD\xee<e\x10h\x7fs]\xf4g\xa7\x01\xb6\x12\x91z\xa9P\x8a\\\xcfg\xfdQ\xf6\x0c\x83' \
'\xb1CD?\x05\x80\xf2\xa4;z)\xb8\x11\xf1\x11\xf7\xe5\x8b\x9d\xff\xcf\\\x92H\x846\x80f\x91Ys/\x11\xe2r\x85' \
'\xfe\x98u\x9e\xf5\xf3_\x1eB\xd2U\x00\x9a\xf3\xc9\xc92\xb9\xbc\xbc\xec\x93N?:\xce\xd59\xect\xdb\xec_\xbdC' \
'\xa4\x1f\x99\xb9\x81\x97\xddj\xb9g\x8c\xf4\xaf\xe8\x8f\xba\xc8\x1cwy\xbb\xd3\xb8\xab.\xfb\x0bU\xd03S\xa2' \
'\xac\x96\x03k\xe1\x02\xe4\x19\xbe\x12N\xcc|3<U\xd8O\x02\xd4iQ\x12\\j\x81R\x80\xbd\x14\x16\xed\x88\xc1' \
'\xfavw&\x02isj\xa2\xa9\xd1\x12\x91\xc4\xfe$\xa5\xe1\xbc\xf2f\xbbs\xcc ' \
'\xc2\xb2\xc6\xcd\xec\xe8\xfe\xa2\x05\xb4F$A\x0c\x94\n\xee\x9b\xc5\xec_\xb3\xa7\x0c\xfb\xf7q\xad\xb2\xb6b5' \
'?h\xea\xe6$\x11\t\xe9\xebs\r\xbdv\xf5\xf6\t\xd3a\xec#5\xb8\x9c\x08\xdf\xb4\xc0J\xc1\x9a$\x11\x7f8\x1c\x01' \
'\xb8\xf4\x17\xec\xb0s\xe29\x93\x18\x08\xa5\xcc\xa4eA\xaep\xd7#\xca\xa0\xeb\xd7o\xd5\x8a\xb7\x19;a:.\x1f' \
'\x11\xdd7\x1b8R\xcb\x83\xf5\xac<\xbf\x1e.,\xce~<\xff\xe3N\x9b\x1d3m\x0f\xea\x8b\x85{' \
'\xd6\xa7\xd6\xc3\xf8e}\xd9\xdc C\xd1\xd9f\xfe\x9d\x16;f\xba\x7f/\x12A\x10\xce\xe2\x88[' \
'\xffT\x9a\x99\xc8\x0co\xf5\xf5\x05g\xad\xda\x0fX\xeb\xa4\xceqQ\x10$\xb1\xb7\xd2@\xa86x\x7f8>h._\x9dh4\x8d' \
'\xa7:\x8f#X\x13At\xdb3nF\xee\xc8\x19wV^\xf4\x1b\xd6\xdc\xed\x13\xe6w\x01I\x90\x90\xa1F\x05\x99\xdc}B\x88(' \
'\x87}\xb7\xac\xda\x99\x13\xe6\xa7\xa1\xf3\x02fs\xa5)\xbd\xd70\r\xceH"\x91\xc2\x15\xc8\x1e\x9f\xbd\xbd\x17' \
'\xf7\x8b\x04m\x07\xd2\xb4\x02\xc8 !\xcf\xe1\x83\x0b\xc6\x9d+\\\x87u;\xedl\xdc{' \
'^\x12\x05\x89$\x0b\xd40\xef\x12\tu\xd2\x99!\xec\xc4\xab\x17\x8f\x98\xc7/\xc6\x07\xc6$;\xc1YZ\xd1+\n\x11E' \
'\x12\xa0\xe0\x1b\x18G\xd3\x0e\xf3\xb57\xeeN\xbc,\x89\xa2@z\xd0\x12]\xc34C\x11d\xbct\x809\x0c\xfbU ' \
'N"\x1eA\x92\xf0l\x03\xd8]\xeb\nq/\xc9\xb4\xe6\x91\x13\xf2\x97\xc8t\x1dF\xea#\xa2\xc0\xebH\x06)\x98\x8b' \
'\xc4\xbd\xd73\x12\x17e\xe5\x956g\xb0C~\x15P\x89(' \
'\t<\x08\xe9\xbda\xc0]\xcf\x1f\xed\x91\xbcBd\xe5\rv\xc4\xfc:\xac\xe2Qlf\xc8G\x82\x95\xc6\'\xf1\x18(' \
'><\xa6\xfb\xc0\xf6\x83\xcc\xe7\t\xd5G\x1c&\x8d\xc3E\x1b\x0fK\x00\x8a"\xc8\xd9\xde\x93\xfb\xfa\\U\xa7\x08' \
'\xcf\x85\x96\xd3\xf9\xb1\xf4\x0f\x9b\x9c\x11\xa4q_\xf8\xe0)3\xa5\x9e\x97\x1c;^\xbaU\xa8Z[' \
'1x\x9f\xbcX$3_v9\xd3\xedt?W\xe3^\x14r\xa04T\xc0\xfad\x14\xc6r\x83\xf7\xa5\xc4\x91\x1f\xc6\x90!r\x9fs0\xb1' \
'\xa76\xdd\xb0\x1e\xc66\xcf\\\x9ay\xf5\x85\xc4\xc1aW\xb0\x97\xd355A\x88,' \
'8AjA\x1d\x1b-S\x98Ly\xe4\xe4m\xe7\xec-\xe6WU\x82%\x94\x1cF\xed\xa1Uk/\xa2\xb9\xb3\xe4T\xee\r\xf6[' \
'dZ-\x16@F\xc2{w\x92\x05C#\xd4\x1a\x1f\xae\xcbe\x8f\xff\\\xaf\xe3\xa7\xfd\xf5\xd9\xb2:\x89wu\x14\xb2\xe2' \
'\xbeqO_\xa9\x0f\xaf\xfb\xfa\x06\xe7\xae\xb4m?\xff\xdc[\x8a\xa8\xca1$\x8a!\xf2Zc\x13\xea\x17\xd6\\I(' \
'\xcd\xb4\x84\xeea\x9b}\xe4\xce\x8f\x85\x13\xce\x8d\x89\xc8HR\x10\xb2P\xa7\x19w\x0c\xf6\x93\xbf\xe4L\xeb' \
'\x12\x89\x95\\\x11\xc5\xbe1" *\xca\xc6\x80Ik\xbe\xf0\x02\xd4s\x8f\xb8\x9fo|\xbd\x83\xda\x80+\xc7\xdbPD' \
'\x10\x8f\xf8\xc2B?\xadlD\x8b\x00\x943]\xf6?\xa9\xfe\x1e\xdc\xd6\x83\x08\t\xbc\x00\xc3\x8aH\xd2\xfd\x85' \
'\x8a_\x1b?a~\xb4\xb0\x99\xf1-g\xfc\x86\x11\x1a\x1a:\xd7G\x00\xce\x8b\xbd\xef\x176a\xed\xb5f\xb3\x9e{' \
'\x9b\xe7\xda\xbde\xc1^h\x1cj\x97s*\xc69\x80]B2\x05]\xcb.\x00\xd4\xcb\xafs\x9d\xfb\xef\xe0\x90\xefG\r\x8d' \
'\xaa\xe10\x9aA\x8eH\xee\x02-\xab^\x00\xd3f\xba\xbb\xc6\xa7V\xb3\xa9Uu]\xcf\x86\xb1\xda\xf6\x8c\xbe\x90,' \
'\xe4\x16]Q\xd08s\xd8\xde\xc5=\xd0\x040\xa0\x01e\x1f\x8e\xab\xcd\x90Hr\xdd\xf4yS\xb0\xc5\x99\xc71\x04@\xdf' \
'\x1c6\x00\xeeb\x89$\xde\xb5\xc4C\xfa\x01v\x86\xd2\xb0\x8f\x9e\xbb\xffV\x05\x93\x96\t\x99\x9b\x013DPG$R' \
'\xdf\xa9bx\x85\x7f\x12\xac\x07\x9c\xf9\xa4\n:\x8d\xe3h\xcfC.\xcb\xcbH\xdc\x03j\x90\xa2]\xdd\xc0\x9de\xfe' \
'\x00\x99T\x15\xa0\xe6!\x0159\x9f\xcf\xc7\t"I\x7f\xb9@\xab\x1a\xa5Z\xf5SK{\x13\x99\xf1*\xd4\xe7\xc8 ' \
'\x8e\xf0\xe5\x89p\xde#{\xe3\xe9<\xb5\xa3R\xbfgY\x9a\x1f=GQg{' \
'\xfe\x06\xc5X\xd0\xebD.\xac\xf3\xff\xcb\xaa\x9a\xac\\\xc0\x9a\x94\\\x8e\x0e\x0f\xcd\xf9\xa4G.P\x8cuU' \
'\x8dxw\x0b\r0Koq\x86\x1aO!\x9a\x90\xd3\x1c\xc9*\x84\x8c\x16/7\xabu\xfa\xe7\xc8Di\xc5fL\x8a&\xe9v8\x89' \
'\x7fscD\x92\x17&W\x1e\xde\xd3J\xaf\xd8\x0c\xad\xd8\x14\xbe\x03C_T\xf3\xf9\\\xe2eB\xdc\xb1\x84F\xf5\xf0' \
'\x1a?{\x84[D\xa4\x01u\x8a\xbf\xf6T\x1e\xb83\xce\x04\xbd\xa6\xaa\xcd\xaf}\x88\xe7:?L\xb5\xfcM\'\x1b`(' \
'X*\xf5UQL-\xf5>\x18\xce\x8c$\x99\xc0\x98\x12\xa4tJ\xbd\xac\xeb<\x1bX\xcd\x1d{w\xf2\xae\x1d\xfeI\x94,' \
'q\xa6\xa3\x04\n\xebJ\x00\x97.\xcc\xeb\xb4\n\xf0>2|d%\x12\xfbI\xbe\'\x94\xecp\x9d@j]q\x0f\x8d\xd3\x9a?\xa6' \
'\x1b\x00\xef\x11I\xe0\xbb\x91\xb8\xa6wj\xd3\xc1 \xcf\xf5sY\xcdM\x11\x12(' \
'\x94\x88\\\xb1>K\xbf\xe7\x91\x88\xc8\xb5\xdc\xc9\xd0\xb5\xec\x99\xb78\xf3\xebS\xaa\x8a\x03\x88\x8c\x87' \
'\\\xf8\xf4\xfe\xcc5\xb4\x83\x86\x029\xf7\xd4\xe9\x9b\xa1\xa5/\xb9\x9f\xff\x15#jbh(' \
'\x92\xc6\x06\t6\xe6.\xfb\xb1\xc4\xfdb\x8fV\xf2\x89\xa2\x1c\xb9\xd2\xe6\xcc\x93\xc9\x80\x8a\x81\xf5\xc5d' \
'\xd5D\xed\x0f\xefr\xdd\x0b\xb4<\x89\xae\xc8\x15\xc6\x84\x0e\xeb~\x16Bh\x8a\xa8\xe5\xb0+Y\xd9\xdc\x9b\xb5,' \
'S!7hi\nG\x92\x1cp\xe6\xf0\xb7\x1fo\xf7\xf5\xf5\xbdL\x06K\x02\xb9P\x9d\xd8\xbbeY;\xa4\x07\xef,' \
'!\x89\xd2\xe9N\xf7\x10\x99v\x13\xee\xa0K\xd2[' \
'"nZ\x81M\xec\xab;\x9e42\x93\x82$\xbe\xd29\xe4\xcc\x93\x18lp\xd5`\x89\x04\x0bU\x98Z\xb1\x9a\xfex\x9a\x96' \
'\xf9\xfa#\xb79\xc3\xba\xc8\x94\xf9|\xde(' \
'\x91\xe84@\xb2a}\x9c\x0c\xdb\xa9\x04\xe1\xd4#\x9ba\xc8`k\x89\xb2^"\x91\n\xec\xa7,' \
'kiKFF\xc1\x91\xc5m\x88\xcc!{2\x08\xb4\xe4\x11\'\x00sU\xeb\xc5\xd9fx\xa6&\xd3r\x02\'Q|\xb3c3\x87\xed\xbbP_' \
'#d\xc6\x98\x93\xd3\xd5\xd5\xc0\xec\xc3\x01(' \
'\xcbeu\n\x19r\x91ul\xa6\xb3\x07u\xac\xde\xeeK\x97\x08\xf6Vpv\'\x06\xef\x8e\xe4T\x85\x88\x92\xcc\x1c\xa6' \
'\xcb\x90YC\xe6\xb4B\xc2!wa=\x07\xf5w\xc7U,\x0e\x91\xfe\xa4\xd5:a\xcc\xb2O\xde\xed%\x18=t{' \
'\x06\xb4w\x83\t\x9f\x84%\xfbY\xf7(\x17\xdbY\x00\xaa\xc8\xbbI>\xea\x11\xdee\x9a\x12T\xb0b\xe2\xf7\x0eP\xc7' \
'\xf1|\x9f3$Q\xe4\xdb9J\rd\xce\xe5}\x9c\xf9\xb36;\xd6\xb9?\x83\x8c\x18\xbe\x86\x0c\x19__\x01s\xcd\xbd\xf8' \
'\x02\xf6*\x16\x87\xb5\x8f\xfc\xd8:b\xe2\x9a$H\xaedy\x01\xccLOv@\xb2\xdb\x82u\x1d\xa6\xbd\xb3b3s(' \
'\xe3N\xa1\x9fm_$\x11\x97D^c\xac\xa0\xe3g\x0f\x00\xeb<4\x87\x1f\x95SK\xbcX\xc3XA\xe9-4s\xc4t\x9f\xf8\x01' \
'\xd6\xf0H\xd8\xc7DNfM:\xd7sF\x9d\x12\xe5\x1f?\xcb\x8c\xa2K\x91\xb8\xe6DI\x94\xd3\xa3Z\x9ex\x83\x81\xb1' \
'\x84\xf7g\xfcP\xc7L\x8c\xdf\xa9\xf0\xa2\xffUQ\x08\xa4\xce\xe6|$\x91\x95U5\xf8\x08\x99\xae\xc3`\x8f\x99' \
'\x94*\x828\x91\x11p\x80\x06}\xe2)\xf5\xd2@^M\x7f\x88\x9e\x9f\xea\xd4)\x9d#\xe2BV\x10\x02\xd9~\\\x18\xd7' \
'\xc7\x92TM\xbf\xdd:a\x0e\xbf\x18EfU ' \
'+\x8b\xc8d\xb0\xbe\xc1\xa4/J\xf37^G\xe4X\xe7q\xcc\x04Z&\xc2K\x0eC\\Y\x1a\xb8`,' \
'\x9a\xb7Z\xad\xa7\xb9Fu\x13u\xa4\x97\xb26#}\xcfK#\xd4\xd85W\xdb\xec\x19\xc6\x00\r\xeb\xfaR\xc9a\xc6F\xea' \
'\xab\x9aQ\x87U\xf6\x8cN\x0c\x1a\xday"\xfe\x9e\xc3\x90k#\xf52gJWX\x17\xef\xeb\x98\x01\x9a\xc7\xfa\x95\x88' \
'\xcd\xcc\x05\xa3U\xce\xd4\xdf\xc0+\xed:3\xf8x\x14\x99u\t\xbd\x12\x11\x19W1\xd0c\xd8\x8c\xcaX\x8b9\xf3\xf5' \
'\x1f1\xa8\xd3UIt\xe1p\xb8\xb3~Z\xf1\x91\r\xcd\xa85\xcc\xdc\x01k\x1f33\x00\xda\xaa\xe4\x0e/\x12\x89\xa4' \
'\xb1V\x8b\xbe\xa2\x06\xc5\x15(\xf1\x9b?\xb4\x99\xaf\x00\x80\xc6\xdd)\xc8\x12B\xfc\xcd\n\xad\x14s\xbay\x15' \
'\'|\x98\xb1\x13\x1d\x03h$U\x1b?\'\x86C\xa4\x01\x94\xee\x8e\xe8p\x15\x1b8\x8c\xd7\xeax\xfe\xeaF\xb5^\xd1k' \
'\xe7z\xb13\xae\xfb\x1aVS\xd39\x13\x03\x9ayttv\x16\xa2\x06\x98EQ\xec\x15"xo\xb8\xa1\x00Ftc\xaf\x17\x05\xdf' \
'\xec:\xf3\xce\xa2\x94\xc2&\x1f?\x92\xa6\xd5\xcd3M\x1d`\xa62\xbf\x13Df\x03\r\xd9~\xc2i\n\x97H8\xac\x88i' \
'\xdd0\x07,]\xdfZ\xd9^\xd9\xcf\x1b\x94\x96n\x1f1\xf7\xbdUXR)}\xcf\xfe\xa27`\x81V6\xf6rZn\x85\xd2\xf2\xf7' \
'\x8f\xcf%\xc3\x05\n\xf8@\xec\x1f1`\xee\x9df}j\xc5\xdc\x18Voit\xf5\xfb-\xc7\xf3\xcf\'\x8a\x7f\x00\x1a\xa5' \
'\xeb\xc4C&\xe0\xfdY\x0b&\x0bK\x99A\xafQ\xa7k\x07-\x9e\xab\xc3\xc6\xb6\x94\xd3\x00uZ\x96T%X\xd9\x8b!\x93t' \
'\'\x06\xaf\x83I\xd7o\xb7\x9c\\\x91\xc5p\xbfa\xeat]I\xff\xc8O\xf7\x83M\xc8\x10w\xc0\xbb\xb4b\xd2\xf2\xa8' \
'\xc3\xfc\xe7|\x94\xc6\xa7ML\x86_m\xb3\x14\x96\x8cz9G\xc8\xd9\xaca\x96\xe6C\x1fr\xa6\xf5@+\x18\xa5A\xd3' \
'\x04\x9a\xed\xd9\xc8j\xb0\x1f\xa6\xd4X"\xeei0\xd6\n\xea\x01g\xday\x8dB=~\x06\x1d\x95zV\xb7\xab`\xea\x1aB' \
'\xba\xc9\x1d\x06\xdf\xb6\xeb\xf3\x9b\n4\xf9N\xd8\xc6c(Y\xb3\x02{\xf3\x0f\n\x15@\xc3\x18\xfeN\xd7f(' \
'>\xc0\x9e\xbf3\x0e\x1a\xda\xd2\xa1\xe6\xc9O\xa0\xa8\x81H\xeeb\xdb\xd6\xf9G.\x0c\xb0zU\x9e\x81\xcd\xdf7' \
'\x00\x96<\xde( \xab\xd1l\xe0\xc0\xe9\xc3\x8f\x90G\xa9\xf8\xc6\xbc\x1fv\xe5J\xb5\xba\xd9#\'\x81K\xaf\xc5' \
'>hu\xed>\xfc)\xe5a\x8cm\xc2F\xcc\x1cZ\xde\xdc\x9f\x0ef\xd1\xf8:-\xfd\xd5\x01;\xea\xc3S\xd4\x8e\xdd\xe5' \
'\x19\x80\x86\x8fd\xca\x13\xd1\x1e\xa3\x9e\x0fEX\x1b\x7f\x1c\x1dU-\xd8\xd9F5t\x95 ' \
'\xa1\xa5\x89\xa8:\xddTg\xf9N\xc5\xc9\xb1\x99\xc7J\xc4\x16\x9a\xd6\xd0\x95\x99 ' \
'J4\xb5\x7f\xab\x85D\x8b\xffr\xf6<{\xb8\x1d\x0e\xf9\xa9\x13\xb0GnZ\xd6/Z\xfc%\xb3\x99\xae\xcd0f\xe1c\x1e' \
'\x9f\r\r\x05\xad\x16{&\x10\xc0\xf8?Z\n\xf1+\xfb\x81\xd5F\x00\x00\x00\x00IEND\xaeB`\x82 '
class myLinkLoader:
"""
This object is just a wrapper to track additional informations
and handle temporary files after they are not needed any more.
"""
def __init__(self, **kw):
"""
The self.kw could be used in getFileName if you like
"""
self.kw = kw
self.tmpFileList = []
def __del__(self):
for path in self.tmpFileList:
os.remove(path)
self.tmpFileList = []
def getFileName(self, path, relative=None):
import os
import tempfile
log.info("myLinkLoader.getFileName: %r %r %r", path, relative, self.kw)
try:
if "." in path:
new_suffix = "." + path.split(".")[-1].lower()
if new_suffix in (".css", ".gif", ".jpg", ".png"):
suffix = new_suffix
tmpPath = tempfile.mktemp(prefix="pisa-", suffix=suffix)
tmpFile = file(tmpPath, "wb")
try:
# Here you may add your own stuff
tmpFile.write(dummyLoader(path))
finally:
tmpFile.close()
self.tmpFileList.append(tmpPath)
return tmpPath
except Exception as e:
log.exception("myLinkLoader.getFileName")
return None
def helloWorld():
filename = __file__ + ".pdf"
lc = myLinkLoader(database="some_name", port=666).getFileName
pdf = pisa.CreatePDF(
u"""
<p>
Hello <strong>World</strong>
<p>
<img src="apath/some.png">
""",
file(filename, "wb"),
link_callback=lc,
)
if not pdf.err:
pisa.startViewer(filename)
if __name__ == "__main__":
pisa.showLogging()
helloWorld()
# print repr(open("img/denker.png", "rb").read())
|
[
"os.remove",
"ho.pisa.showLogging",
"ho.pisa.startViewer",
"tempfile.mktemp",
"logging.getLogger"
] |
[((797, 824), 'logging.getLogger', 'logging.getLogger', (['__file__'], {}), '(__file__)\n', (814, 824), False, 'import logging\n'), ((14133, 14151), 'ho.pisa.showLogging', 'pisa.showLogging', ([], {}), '()\n', (14149, 14151), True, 'import ho.pisa as pisa\n'), ((14073, 14099), 'ho.pisa.startViewer', 'pisa.startViewer', (['filename'], {}), '(filename)\n', (14089, 14099), True, 'import ho.pisa as pisa\n'), ((12812, 12827), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (12821, 12827), False, 'import os\n'), ((13263, 13309), 'tempfile.mktemp', 'tempfile.mktemp', ([], {'prefix': '"""pisa-"""', 'suffix': 'suffix'}), "(prefix='pisa-', suffix=suffix)\n", (13278, 13309), False, 'import tempfile\n')]
|
import asyncio
import subprocess
from quart import session
from async_http import AsyncHTTP
from config import twilio_available_sms_numbers_base_uri
from config import twilio_purchase_sms_number_base_uri
from config import twilio_create_subaccount_base_uri
from config import twilio_assign_new_number_base_uri
from config import auth_token
from config import admin_sid
class CreateTwilioAccount:
def __init__(self, friendly_name):
self.friendly_name = friendly_name
self.async_http = AsyncHTTP()
async def create_user_account(self):
"""A method for creating a system user account.
Args:
self: an instance of the CreateTwilioAccount class
"""
params = {
'friendly_name': self.friendly_name,
'auth_token': auth_token
}
request_uri = twilio_create_subaccount_base_uri
print(request_uri)
print(params)
user_account = await self.async_http.post(
base_uri=request_uri,
params=params)
user_sid = user_account.sid
try:
signed_up_user = await self._get_sms_user(user_sid)
except Exception as e:
print('Twilio sign up error: {}'.format(str(e)))
return signed_up_user
async def _get_sms_user(self, user_sid):
"""A private method for getting a list of available,
sms-enabled phone numbers in a given area code. Calls
private helper methods to complete the process of
purchasing a number from the list, and assigning
it to the Twilio subaccount.
NOTE: the area code is set on the session.
Args:
self: an instance of the CreateTwilioAccount class
user_sid: string
"""
request_uri = twilio_available_sms_numbers_base_uri.format(
auth_token=auth_token)
response = await self.async_http.get(base_uri=request_uri)
sms_number = response.available_phone_numbers[0].friendly_name
response = await self._purchase_sms_number(user_sid, sms_number)
return response
async def _purchase_sms_number(self, user_sid, sms_number):
"""A private method for purchasing a given sms-enabled number.
Args:
self: an instance of the CreateTwilioAccount class
user_sid: string
sms_number: string: the sms number to buy
"""
params = {'phone_number':sms_number}
request_uri = twilio_purchase_sms_number_base_uri.format(
auth_token=auth_token)
response = await self.async_http.post(
base_uri=request_uri,
params=params)
response = await self._assign_sms_number_to_user(user_sid, sms_number)
return response
async def _assign_sms_number_to_user(self, user_sid, sms_number):
"""A private method for assigning a sms-enabled number
to a Twilio subaccount.
Args:
self: an instance of the CreateTwilioAccount class
user_sid: string
sms_number: string: the number that was just purchased.
"""
params = {
'phone_number':sms_number,
'auth_token': auth_token,
'AddressSid': user_sid
}
request_uri = twilio_assign_new_number_base_uri.format(
admin_sid=admin_sid,
sms_number=sms_number)
response = await self.async_http.post(
base_uri=request_uri,
params=params)
return response
|
[
"config.twilio_assign_new_number_base_uri.format",
"async_http.AsyncHTTP",
"config.twilio_purchase_sms_number_base_uri.format",
"config.twilio_available_sms_numbers_base_uri.format"
] |
[((511, 522), 'async_http.AsyncHTTP', 'AsyncHTTP', ([], {}), '()\n', (520, 522), False, 'from async_http import AsyncHTTP\n'), ((1817, 1884), 'config.twilio_available_sms_numbers_base_uri.format', 'twilio_available_sms_numbers_base_uri.format', ([], {'auth_token': 'auth_token'}), '(auth_token=auth_token)\n', (1861, 1884), False, 'from config import twilio_available_sms_numbers_base_uri\n'), ((2520, 2585), 'config.twilio_purchase_sms_number_base_uri.format', 'twilio_purchase_sms_number_base_uri.format', ([], {'auth_token': 'auth_token'}), '(auth_token=auth_token)\n', (2562, 2585), False, 'from config import twilio_purchase_sms_number_base_uri\n'), ((3344, 3433), 'config.twilio_assign_new_number_base_uri.format', 'twilio_assign_new_number_base_uri.format', ([], {'admin_sid': 'admin_sid', 'sms_number': 'sms_number'}), '(admin_sid=admin_sid, sms_number=\n sms_number)\n', (3384, 3433), False, 'from config import twilio_assign_new_number_base_uri\n')]
|
import keras
from keras.models import model_from_json
from keras.utils import plot_model
import pandas as pd
import numpy as np
from ann_visualizer.visualize import ann_viz
import glob
import os
#batch visualize:
for model in glob.glob('./models/*.json'):
#load json model:
classifier_name = os.path.splitext(model)[0]
classifier_weights = classifier_name+'.h5'
model = classifier_name+'.json'
json_file = open(model,'r')
classifier_json = json_file.read()
json_file.close()
classifier = model_from_json(classifier_json)
#load weights:
classifier.load_weights(classifier_weights)
print("Loaded model from disk")
#try visualizing it
#view = True will result in .pdf files of visualization
ann_viz(classifier,view=True, filename='Visualize_'+classifier_name,title='English German Classifier: Simple ANN')
plot_model(classifier,to_file='VisualizeLayers_'+classifier_name+'.png',show_shapes=True,show_layer_names=True)
print("Models have been visualized")
|
[
"ann_visualizer.visualize.ann_viz",
"keras.utils.plot_model",
"keras.models.model_from_json",
"os.path.splitext",
"glob.glob"
] |
[((228, 256), 'glob.glob', 'glob.glob', (['"""./models/*.json"""'], {}), "('./models/*.json')\n", (237, 256), False, 'import glob\n'), ((527, 559), 'keras.models.model_from_json', 'model_from_json', (['classifier_json'], {}), '(classifier_json)\n', (542, 559), False, 'from keras.models import model_from_json\n'), ((752, 874), 'ann_visualizer.visualize.ann_viz', 'ann_viz', (['classifier'], {'view': '(True)', 'filename': "('Visualize_' + classifier_name)", 'title': '"""English German Classifier: Simple ANN"""'}), "(classifier, view=True, filename='Visualize_' + classifier_name,\n title='English German Classifier: Simple ANN')\n", (759, 874), False, 'from ann_visualizer.visualize import ann_viz\n'), ((871, 993), 'keras.utils.plot_model', 'plot_model', (['classifier'], {'to_file': "('VisualizeLayers_' + classifier_name + '.png')", 'show_shapes': '(True)', 'show_layer_names': '(True)'}), "(classifier, to_file='VisualizeLayers_' + classifier_name +\n '.png', show_shapes=True, show_layer_names=True)\n", (881, 993), False, 'from keras.utils import plot_model\n'), ((307, 330), 'os.path.splitext', 'os.path.splitext', (['model'], {}), '(model)\n', (323, 330), False, 'import os\n')]
|
import pathlib
import re
from setuptools import find_packages, setup
about = {}
with open(pathlib.Path("rikai") / "__version__.py", "r") as fh:
exec(fh.read(), about)
with open(
pathlib.Path(__file__).absolute().parent.parent / "README.md",
"r",
) as fh:
long_description = fh.read()
# extras
test = ["pytest"]
torch = ["torch>=1.5.0", "torchvision"]
jupyter = ["matplotlib", "jupyterlab"]
aws = ["boto"]
docs = ["sphinx"]
youtube = ["pafy", "youtube_dl", "ffmpeg-python"]
all = test + torch + jupyter + aws + docs + youtube
setup(
name="rikai",
version=about["version"],
license="Apache License, Version 2.0",
author="<NAME>",
author_email="<EMAIL>",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/eto-ai/rikai",
packages=find_packages(),
include_package_data=True,
python_requires=">=3.7",
install_requires=[
"antlr4-python3-runtime",
"ipython",
"jsonschema",
"numpy",
"opencv-python",
"pandas",
"Pillow",
"pyarrow>=2.0",
"pyspark>=3.1,<3.2",
"pyyaml",
"requests",
],
extras_require={
"test": test,
"pytorch": torch,
"jupyter": jupyter,
"aws": aws,
"docs": docs,
"youtube": youtube,
"all": all,
},
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Libraries",
],
)
|
[
"pathlib.Path",
"setuptools.find_packages"
] |
[((839, 854), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (852, 854), False, 'from setuptools import find_packages, setup\n'), ((91, 112), 'pathlib.Path', 'pathlib.Path', (['"""rikai"""'], {}), "('rikai')\n", (103, 112), False, 'import pathlib\n'), ((188, 210), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (200, 210), False, 'import pathlib\n')]
|
from datetime import timedelta
from unittest import TestCase
from unittest.mock import patch
from path import Path
from dakara_feeder.directory import SongPaths
from dakara_feeder.metadata import FFProbeMetadataParser, MediaParseError
from dakara_feeder.song import BaseSong
from dakara_feeder.subtitle.parsing import Pysubs2SubtitleParser, SubtitleParseError
class BaseSongTestCase(TestCase):
"""Test the BaseSong class."""
@patch.object(Pysubs2SubtitleParser, "parse", autoset=True)
@patch.object(FFProbeMetadataParser, "parse", autoset=True)
def test_subtitle_parser_error(self, mocked_metadata_parse, mocked_subtitle_parse):
"""Test an invalid subtitle file raises no exception but logs error."""
# setup mocks
mocked_metadata_parse.return_value.get_duration.return_value = timedelta(
seconds=1
)
mocked_metadata_parse.return_value.get_audio_tracks_count.return_value = 1
mocked_subtitle_parse.side_effect = SubtitleParseError("invalid")
# create paths
paths = SongPaths(Path("file.mp4"), subtitle=Path("file.ass"))
# create BaseSong instance
song = BaseSong(Path("/base-dir"), paths)
# get song representation
with self.assertLogs("dakara_feeder.song") as logger:
representation = song.get_representation()
# check no lyrics has been found
self.assertEqual(representation["lyrics"], "")
# assert logs
self.assertListEqual(
logger.output, ["ERROR:dakara_feeder.song:Lyrics not parsed: invalid"]
)
@patch.object(Pysubs2SubtitleParser, "parse", autoset=True)
@patch.object(FFProbeMetadataParser, "parse", autoset=True)
def test_metadata_error(self, mocked_metadata_parse, mocked_subtitle_parse):
"""Test an invalid video file raises no exception but logs error."""
# setup mocks
mocked_metadata_parse.side_effect = MediaParseError("invalid")
mocked_subtitle_parse.return_value.get_lyrics.return_value = ""
# create paths
paths = SongPaths(Path("file.mp4"), subtitle=Path("file.ass"))
# create BaseSong instance
song = BaseSong(Path("/base-dir"), paths)
# get song representation
with self.assertLogs("dakara_feeder.song") as logger:
representation = song.get_representation()
# check duration defaults to zero
self.assertEqual(representation["duration"], 0)
# assert logs
self.assertListEqual(
logger.output, ["ERROR:dakara_feeder.song:Cannot parse metadata: invalid"]
)
|
[
"unittest.mock.patch.object",
"dakara_feeder.subtitle.parsing.SubtitleParseError",
"path.Path",
"datetime.timedelta",
"dakara_feeder.metadata.MediaParseError"
] |
[((439, 497), 'unittest.mock.patch.object', 'patch.object', (['Pysubs2SubtitleParser', '"""parse"""'], {'autoset': '(True)'}), "(Pysubs2SubtitleParser, 'parse', autoset=True)\n", (451, 497), False, 'from unittest.mock import patch\n'), ((503, 561), 'unittest.mock.patch.object', 'patch.object', (['FFProbeMetadataParser', '"""parse"""'], {'autoset': '(True)'}), "(FFProbeMetadataParser, 'parse', autoset=True)\n", (515, 561), False, 'from unittest.mock import patch\n'), ((1606, 1664), 'unittest.mock.patch.object', 'patch.object', (['Pysubs2SubtitleParser', '"""parse"""'], {'autoset': '(True)'}), "(Pysubs2SubtitleParser, 'parse', autoset=True)\n", (1618, 1664), False, 'from unittest.mock import patch\n'), ((1670, 1728), 'unittest.mock.patch.object', 'patch.object', (['FFProbeMetadataParser', '"""parse"""'], {'autoset': '(True)'}), "(FFProbeMetadataParser, 'parse', autoset=True)\n", (1682, 1728), False, 'from unittest.mock import patch\n'), ((823, 843), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (832, 843), False, 'from datetime import timedelta\n'), ((993, 1022), 'dakara_feeder.subtitle.parsing.SubtitleParseError', 'SubtitleParseError', (['"""invalid"""'], {}), "('invalid')\n", (1011, 1022), False, 'from dakara_feeder.subtitle.parsing import Pysubs2SubtitleParser, SubtitleParseError\n'), ((1953, 1979), 'dakara_feeder.metadata.MediaParseError', 'MediaParseError', (['"""invalid"""'], {}), "('invalid')\n", (1968, 1979), False, 'from dakara_feeder.metadata import FFProbeMetadataParser, MediaParseError\n'), ((1073, 1089), 'path.Path', 'Path', (['"""file.mp4"""'], {}), "('file.mp4')\n", (1077, 1089), False, 'from path import Path\n'), ((1178, 1195), 'path.Path', 'Path', (['"""/base-dir"""'], {}), "('/base-dir')\n", (1182, 1195), False, 'from path import Path\n'), ((2102, 2118), 'path.Path', 'Path', (['"""file.mp4"""'], {}), "('file.mp4')\n", (2106, 2118), False, 'from path import Path\n'), ((2207, 2224), 'path.Path', 'Path', (['"""/base-dir"""'], {}), "('/base-dir')\n", (2211, 2224), False, 'from path import Path\n'), ((1100, 1116), 'path.Path', 'Path', (['"""file.ass"""'], {}), "('file.ass')\n", (1104, 1116), False, 'from path import Path\n'), ((2129, 2145), 'path.Path', 'Path', (['"""file.ass"""'], {}), "('file.ass')\n", (2133, 2145), False, 'from path import Path\n')]
|
""" Training from scratch with different conditions. """
import os
import sys
import argparse
import copy
import time
import shutil
import json
import logging
logging.getLogger().setLevel(logging.DEBUG)
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data as data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
from gumi import model_utils
from gumi.ops import *
from gumi.pruning.export import GroupExporter
from gumi.model_runner import utils
from gumi.model_runner.model_runner import ModelRunner
from gumi.model_runner.parser import create_cli_parser
# CLI parser
parser = create_cli_parser(prog="CLI tool for pruning")
parser.add_argument(
"--skip-train",
action="store_true",
default=False,
help="Whether to skip the training step.",
)
parser.add_argument(
"--fine-tune",
action="store_true",
default=False,
help="Whether to fine-tune ONLY the linear classifiers.",
)
args = parser.parse_args()
# CUDA
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_id
use_cuda = torch.cuda.is_available()
cudnn.benchmark = True
class TransferRunner(ModelRunner):
""" Runner for transfer learning. """
def validate_args(self, args):
pass
def create_update_state_dict_fn():
def update_state_dict(state_dict):
""" Here are several update rules:
- In this new script, we won't have "module." prefix
- There won't be any '.conv2d' in the module
"""
state_dict_ = copy.deepcopy(state_dict)
for key, val in state_dict.items():
key_ = key
if "module" in key_:
del state_dict_[key_]
key_ = key_.replace("module.", "")
state_dict_[key_] = val
if "fc" in key_:
del state_dict_[key_]
return state_dict_
return update_state_dict
def main():
""" Main """
# initialise runner
logging.info("==> Initializing TransferRunner ...")
runner = TransferRunner(args)
# load model
logging.info("==> Loading model ...")
model = runner.load_model(
update_state_dict_fn=create_update_state_dict_fn(), fine_tune=args.fine_tune
)
# Validate
logging.info("==> Validating the loaded model ...")
loss1, acc1 = runner.validate(model)
# Train
if args.skip_train:
logging.info("==> Training has been skipped.")
else:
logging.info("==> Run training ...")
best_acc = runner.train(model) # parameters are in args
# Validate again
logging.info("==> Validating the trained model ...")
loss2, acc2 = runner.validate(model)
if __name__ == "__main__":
main()
|
[
"copy.deepcopy",
"gumi.model_runner.parser.create_cli_parser",
"logging.info",
"torch.cuda.is_available",
"logging.getLogger"
] |
[((765, 811), 'gumi.model_runner.parser.create_cli_parser', 'create_cli_parser', ([], {'prog': '"""CLI tool for pruning"""'}), "(prog='CLI tool for pruning')\n", (782, 811), False, 'from gumi.model_runner.parser import create_cli_parser\n'), ((1208, 1233), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1231, 1233), False, 'import torch\n'), ((2130, 2181), 'logging.info', 'logging.info', (['"""==> Initializing TransferRunner ..."""'], {}), "('==> Initializing TransferRunner ...')\n", (2142, 2181), False, 'import logging\n'), ((2242, 2279), 'logging.info', 'logging.info', (['"""==> Loading model ..."""'], {}), "('==> Loading model ...')\n", (2254, 2279), False, 'import logging\n'), ((2428, 2479), 'logging.info', 'logging.info', (['"""==> Validating the loaded model ..."""'], {}), "('==> Validating the loaded model ...')\n", (2440, 2479), False, 'import logging\n'), ((2770, 2822), 'logging.info', 'logging.info', (['"""==> Validating the trained model ..."""'], {}), "('==> Validating the trained model ...')\n", (2782, 2822), False, 'import logging\n'), ((172, 191), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (189, 191), False, 'import logging\n'), ((1667, 1692), 'copy.deepcopy', 'copy.deepcopy', (['state_dict'], {}), '(state_dict)\n', (1680, 1692), False, 'import copy\n'), ((2571, 2617), 'logging.info', 'logging.info', (['"""==> Training has been skipped."""'], {}), "('==> Training has been skipped.')\n", (2583, 2617), False, 'import logging\n'), ((2638, 2674), 'logging.info', 'logging.info', (['"""==> Run training ..."""'], {}), "('==> Run training ...')\n", (2650, 2674), False, 'import logging\n')]
|
#!/usr/bin/env python3
from aws_cdk import core
import os
from ec2_ialb_aga_custom_r53.network_stack import NetworkingStack
from ec2_ialb_aga_custom_r53.aga_stack import AgaStack
from ec2_ialb_aga_custom_r53.alb_stack import ALBStack
from ec2_ialb_aga_custom_r53.certs_stack import CertsStack
from ec2_ialb_aga_custom_r53.ec2_stack import EC2Stack
deploy_env = core.Environment(
account=os.environ["CDK_DEFAULT_ACCOUNT"],
region=os.environ["CDK_DEFAULT_REGION"])
# These need to be injected at synth/deployment time
CIDR = os.getenv("VPC_CIDR", "")
DOMAIN = os.getenv("R53_DOMAIN", "")
SUB_DOMAIN = "code-server"
app = core.App()
net = NetworkingStack(app, "GravitonBlog-NetworkingStack", CIDR, env=deploy_env)
ec2 = EC2Stack(app, "GravitonBlog-EC2Stack", net.vpc, env=deploy_env)
ec2.add_dependency(net)
cert = CertsStack(app, "GravitonBlog-CertsStack",
DOMAIN, SUB_DOMAIN, env=deploy_env)
alb = ALBStack(app, "GravitonBlog-ALBStack", net.vpc, ec2.instance,
cert.domain_cert, env=deploy_env)
alb.add_dependency(net)
alb.add_dependency(ec2)
alb.add_dependency(cert)
aga = AgaStack(app, "GravitonBlog-AGAStack", net.vpc, alb.alb,
cert.blog_hosted_zone, SUB_DOMAIN, env=deploy_env)
aga.add_dependency(net)
aga.add_dependency(cert)
aga.add_dependency(alb)
app.synth()
|
[
"ec2_ialb_aga_custom_r53.aga_stack.AgaStack",
"ec2_ialb_aga_custom_r53.alb_stack.ALBStack",
"ec2_ialb_aga_custom_r53.certs_stack.CertsStack",
"ec2_ialb_aga_custom_r53.network_stack.NetworkingStack",
"aws_cdk.core.Environment",
"aws_cdk.core.App",
"ec2_ialb_aga_custom_r53.ec2_stack.EC2Stack",
"os.getenv"
] |
[((364, 469), 'aws_cdk.core.Environment', 'core.Environment', ([], {'account': "os.environ['CDK_DEFAULT_ACCOUNT']", 'region': "os.environ['CDK_DEFAULT_REGION']"}), "(account=os.environ['CDK_DEFAULT_ACCOUNT'], region=os.\n environ['CDK_DEFAULT_REGION'])\n", (380, 469), False, 'from aws_cdk import core\n'), ((535, 560), 'os.getenv', 'os.getenv', (['"""VPC_CIDR"""', '""""""'], {}), "('VPC_CIDR', '')\n", (544, 560), False, 'import os\n'), ((570, 597), 'os.getenv', 'os.getenv', (['"""R53_DOMAIN"""', '""""""'], {}), "('R53_DOMAIN', '')\n", (579, 597), False, 'import os\n'), ((632, 642), 'aws_cdk.core.App', 'core.App', ([], {}), '()\n', (640, 642), False, 'from aws_cdk import core\n'), ((650, 724), 'ec2_ialb_aga_custom_r53.network_stack.NetworkingStack', 'NetworkingStack', (['app', '"""GravitonBlog-NetworkingStack"""', 'CIDR'], {'env': 'deploy_env'}), "(app, 'GravitonBlog-NetworkingStack', CIDR, env=deploy_env)\n", (665, 724), False, 'from ec2_ialb_aga_custom_r53.network_stack import NetworkingStack\n'), ((732, 795), 'ec2_ialb_aga_custom_r53.ec2_stack.EC2Stack', 'EC2Stack', (['app', '"""GravitonBlog-EC2Stack"""', 'net.vpc'], {'env': 'deploy_env'}), "(app, 'GravitonBlog-EC2Stack', net.vpc, env=deploy_env)\n", (740, 795), False, 'from ec2_ialb_aga_custom_r53.ec2_stack import EC2Stack\n'), ((828, 906), 'ec2_ialb_aga_custom_r53.certs_stack.CertsStack', 'CertsStack', (['app', '"""GravitonBlog-CertsStack"""', 'DOMAIN', 'SUB_DOMAIN'], {'env': 'deploy_env'}), "(app, 'GravitonBlog-CertsStack', DOMAIN, SUB_DOMAIN, env=deploy_env)\n", (838, 906), False, 'from ec2_ialb_aga_custom_r53.certs_stack import CertsStack\n'), ((932, 1032), 'ec2_ialb_aga_custom_r53.alb_stack.ALBStack', 'ALBStack', (['app', '"""GravitonBlog-ALBStack"""', 'net.vpc', 'ec2.instance', 'cert.domain_cert'], {'env': 'deploy_env'}), "(app, 'GravitonBlog-ALBStack', net.vpc, ec2.instance, cert.\n domain_cert, env=deploy_env)\n", (940, 1032), False, 'from ec2_ialb_aga_custom_r53.alb_stack import ALBStack\n'), ((1123, 1235), 'ec2_ialb_aga_custom_r53.aga_stack.AgaStack', 'AgaStack', (['app', '"""GravitonBlog-AGAStack"""', 'net.vpc', 'alb.alb', 'cert.blog_hosted_zone', 'SUB_DOMAIN'], {'env': 'deploy_env'}), "(app, 'GravitonBlog-AGAStack', net.vpc, alb.alb, cert.\n blog_hosted_zone, SUB_DOMAIN, env=deploy_env)\n", (1131, 1235), False, 'from ec2_ialb_aga_custom_r53.aga_stack import AgaStack\n')]
|
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from django.http import Http404, HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.utils import translation
from vanilla import TemplateView, DetailView, UpdateView
from deck.models import Event, Proposal
from core.models import Profile
from core.forms import ProfileForm, ProfilePictureForm, ProfileChangeLanguageForm
from core.mixins import LoginRequiredMixin, FormValidRedirectMixing
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context.update(
events=Event.objects.count(),
proposals=Proposal.objects.count(),
users=User.objects.count()
)
return context
class AboutView(TemplateView):
template_name = 'about.html'
class ProfileView(DetailView):
template_name = 'account/profile.html'
model = Profile
lookup_field = 'user__username'
def get_object(self, **kwargs):
queryset = self.get_queryset()
username = self.kwargs.get('user__username')
if not username and self.request.user.is_authenticated():
return self.request.user.profile
else:
return get_object_or_404(queryset, user__username=username)
def get_context_data(self, **kwargs):
context = super(ProfileView, self).get_context_data(**kwargs)
self.object = self.get_object()
context.update(
profile_form=ProfileForm(instance=self.object),
language_form=ProfileChangeLanguageForm(instance=self.object),
events=self.object.get_profile_events(),
proposals=self.object.get_profile_proposals(),
)
return context
class ProfileUpdateView(LoginRequiredMixin,
FormValidRedirectMixing,
UpdateView):
template_name = 'account/profile.html'
model = Profile
form_class = ProfileForm
lookup_field = 'user__username'
def get_object(self, **kwargs):
queryset = self.get_queryset()
username = self.kwargs.get('user__username')
if not username and self.request.user.is_authenticated():
return self.request.user.profile
elif (username == self.request.user.username or
self.request.user.is_superuser):
return get_object_or_404(queryset, user__username=username)
else:
raise Http404
def form_valid(self, form):
self.object = form.save()
return self.success_redirect(_(u'Profile updated.'))
def get(self, *args, **kwargs):
self.object = self.get_object()
return HttpResponseRedirect(
self.object.get_absolute_url()
)
def form_invalid(self, form):
for error in form.errors.itervalues():
messages.error(self.request, error.as_data()[0].message)
return self.get()
class ProfileUpdatePictureView(ProfileUpdateView):
form_class = ProfilePictureForm
def form_valid(self, form):
self.object = form.save()
return self.success_redirect(_(u'Photo changed.'))
class ProfileChangeLanguageView(ProfileUpdateView):
form_class = ProfileChangeLanguageForm
def form_valid(self, form):
self.object = form.save()
translation.activate(self.object.language)
self.request.session[
translation.LANGUAGE_SESSION_KEY
] = self.object.language
return self.success_redirect(_(u'Language changed.'))
|
[
"django.utils.translation.activate",
"deck.models.Proposal.objects.count",
"deck.models.Event.objects.count",
"core.forms.ProfileChangeLanguageForm",
"django.shortcuts.get_object_or_404",
"django.utils.translation.ugettext",
"core.forms.ProfileForm",
"django.contrib.auth.models.User.objects.count"
] |
[((3473, 3515), 'django.utils.translation.activate', 'translation.activate', (['self.object.language'], {}), '(self.object.language)\n', (3493, 3515), False, 'from django.utils import translation\n'), ((1386, 1438), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['queryset'], {'user__username': 'username'}), '(queryset, user__username=username)\n', (1403, 1438), False, 'from django.shortcuts import get_object_or_404\n'), ((2716, 2738), 'django.utils.translation.ugettext', '_', (['u"""Profile updated."""'], {}), "(u'Profile updated.')\n", (2717, 2738), True, 'from django.utils.translation import ugettext as _\n'), ((3279, 3299), 'django.utils.translation.ugettext', '_', (['u"""Photo changed."""'], {}), "(u'Photo changed.')\n", (3280, 3299), True, 'from django.utils.translation import ugettext as _\n'), ((3661, 3684), 'django.utils.translation.ugettext', '_', (['u"""Language changed."""'], {}), "(u'Language changed.')\n", (3662, 3684), True, 'from django.utils.translation import ugettext as _\n'), ((771, 792), 'deck.models.Event.objects.count', 'Event.objects.count', ([], {}), '()\n', (790, 792), False, 'from deck.models import Event, Proposal\n'), ((816, 840), 'deck.models.Proposal.objects.count', 'Proposal.objects.count', ([], {}), '()\n', (838, 840), False, 'from deck.models import Event, Proposal\n'), ((860, 880), 'django.contrib.auth.models.User.objects.count', 'User.objects.count', ([], {}), '()\n', (878, 880), False, 'from django.contrib.auth.models import User\n'), ((1641, 1674), 'core.forms.ProfileForm', 'ProfileForm', ([], {'instance': 'self.object'}), '(instance=self.object)\n', (1652, 1674), False, 'from core.forms import ProfileForm, ProfilePictureForm, ProfileChangeLanguageForm\n'), ((1702, 1749), 'core.forms.ProfileChangeLanguageForm', 'ProfileChangeLanguageForm', ([], {'instance': 'self.object'}), '(instance=self.object)\n', (1727, 1749), False, 'from core.forms import ProfileForm, ProfilePictureForm, ProfileChangeLanguageForm\n'), ((2519, 2571), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['queryset'], {'user__username': 'username'}), '(queryset, user__username=username)\n', (2536, 2571), False, 'from django.shortcuts import get_object_or_404\n')]
|
import os
from discord.ext import commands
from dotenv import load_dotenv
import json
import shutil
from datetime import datetime
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
DST_DIR = os.getenv('DST_DIR')
BACKUP_DIR = os.getenv('BACKUP_DIR')
bot = commands.Bot(command_prefix='!')
@bot.event
async def on_ready():
print(f'{bot.user} has connected to Discord!')
@bot.command()
async def ping(ctx):
await ctx.send(f'Pong! {round(bot.latency * 1000)}ms')
@bot.command(aliases=['clops'])
async def deerclops(ctx, *args):
with open('data/deerclops.txt', 'r+') as f:
day = f.readlines()[0].strip()
if (not len(args)):
await ctx.send(f'Deerclops will spawn on Day {day}.')
elif (args[0] == 'help'):
await ctx.send('Deerclops Usage: ')
elif (len(args) == 1):
# Update file
day += 71.8
await ctx.send(f'Updated: Deerclops will spawn on Day {args[0]}.')
else:
await ctx.send('Only provide 1 argument! e.g. "!deerclops 10"')
@bot.command(aliases=['mod'])
async def mods(ctx, *args):
with open('data/mods.json', 'r+') as f:
data = json.load(f)
# Display mods
if (not len(args)):
message = ''
# Add server mods
message += '__**Server Mods:**__\n'
for mod in data['server']:
message += f'- {mod}\n'
# Add client mods
message += '\n__**Client Mods:**__\n'
for mod in data['client']:
message += f'- {mod}\n'
await ctx.send(message)
# Add new mod
elif (args[0] == 'server' or args[0] == 'client'):
mod_type = args[0]
# Format the mod and add it to json
mod = ' '.join(args[1:])
data[mod_type].append(mod)
# Clear the json file before dumping the updated contents
f.seek(0)
f.truncate()
json.dump(data, f, indent=4)
# Send confirmation!
await ctx.send(f'Added "{mod}" to {mod_type} mods!')
# Help
elif (args[0] == 'help'):
await ctx.send('Mods usage:')
@bot.command(aliases=['backup'])
async def save(ctx):
# TODO: take server name as argument
src = f'{DST_DIR}/Cluster_5'
server_name = 'the rust buster'
dest = f'{BACKUP_DIR}/{server_name}/Backup {datetime.now().strftime("%b-%d-%y %H%M")}'
try:
shutil.copytree(src, dest)
await ctx.send('Server saved!')
print(f'Server saved to {dest}')
except Exception as e:
await ctx.send('Backup failed :( Check console for error')
print(e)
bot.run(TOKEN)
|
[
"json.dump",
"json.load",
"dotenv.load_dotenv",
"discord.ext.commands.Bot",
"shutil.copytree",
"datetime.datetime.now",
"os.getenv"
] |
[((132, 145), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (143, 145), False, 'from dotenv import load_dotenv\n'), ((154, 180), 'os.getenv', 'os.getenv', (['"""DISCORD_TOKEN"""'], {}), "('DISCORD_TOKEN')\n", (163, 180), False, 'import os\n'), ((191, 211), 'os.getenv', 'os.getenv', (['"""DST_DIR"""'], {}), "('DST_DIR')\n", (200, 211), False, 'import os\n'), ((225, 248), 'os.getenv', 'os.getenv', (['"""BACKUP_DIR"""'], {}), "('BACKUP_DIR')\n", (234, 248), False, 'import os\n'), ((256, 288), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': '"""!"""'}), "(command_prefix='!')\n", (268, 288), False, 'from discord.ext import commands\n'), ((1171, 1183), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1180, 1183), False, 'import json\n'), ((2479, 2505), 'shutil.copytree', 'shutil.copytree', (['src', 'dest'], {}), '(src, dest)\n', (2494, 2505), False, 'import shutil\n'), ((1984, 2012), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(4)'}), '(data, f, indent=4)\n', (1993, 2012), False, 'import json\n'), ((2419, 2433), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2431, 2433), False, 'from datetime import datetime\n')]
|
from typing import List, Tuple
import numpy as np
from PIL import Image
import pytorch_lightning as pl
import torch
from torchvision.models import resnet18
from torchvision import transforms
from ml_models.model_initializer import ModelInitializer
class PredPostureNet(pl.LightningModule):
def __init__(self):
super().__init__()
self.resnet = resnet18(pretrained=True)
self.fc = torch.nn.Linear(1000, 4)
def forward(self, x):
h0 = self.resnet(x)
h1 = self.fc(h0)
return h1
class Inference:
def __init__(self):
BUCKET_NAME: str = 'classify-posture'
MODEL_SOURCE_NAME: str = 'posture_4_classes_model.pt'
MODEL_FILE_PATH: str = f'ml_models/classify_images/{MODEL_SOURCE_NAME}'
initializer: ModelInitializer = ModelInitializer(
BUCKET_NAME, MODEL_SOURCE_NAME, MODEL_FILE_PATH
)
self.net: PredPostureNet = initializer.init_model(network_class=PredPostureNet)
self.class_names: List[str] = ['handstand', 'lying_down', 'sit', 'stand']
def run(self, image_name: str) -> Tuple[np.ndarray, int]:
path: str = self._image_file_path(image_name)
image = self._prepare_image(path)
with torch.no_grad():
y = self.net(image)
# NOTE: 1行しかないので 0 で次元を落とす
result: np.ndarray = y.softmax(dim=-1).detach().numpy()[0]
cls: int = np.argmax(result)
return np.round(result, decimals=4), self.class_names[cls]
def _image_file_path(self, image_name: str) -> str:
return f'media/images/{image_name}'
def _prepare_image(self, path: str):
transform = transforms.Compose([
# ImageNetで学習したモデルを使うときは、256->224の変換が一般的
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
# PyTorch公式でもこのmean, stdが推奨されている
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
img = Image.open(path).convert('RGB')
transformed_img = transform(img)
img_torch = torch.stack([transformed_img])
return img_torch
|
[
"torchvision.models.resnet18",
"torch.stack",
"numpy.argmax",
"ml_models.model_initializer.ModelInitializer",
"torchvision.transforms.ToTensor",
"PIL.Image.open",
"torch.nn.Linear",
"torchvision.transforms.CenterCrop",
"torchvision.transforms.Normalize",
"torch.no_grad",
"numpy.round",
"torchvision.transforms.Resize"
] |
[((367, 392), 'torchvision.models.resnet18', 'resnet18', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (375, 392), False, 'from torchvision.models import resnet18\n'), ((411, 435), 'torch.nn.Linear', 'torch.nn.Linear', (['(1000)', '(4)'], {}), '(1000, 4)\n', (426, 435), False, 'import torch\n'), ((805, 870), 'ml_models.model_initializer.ModelInitializer', 'ModelInitializer', (['BUCKET_NAME', 'MODEL_SOURCE_NAME', 'MODEL_FILE_PATH'], {}), '(BUCKET_NAME, MODEL_SOURCE_NAME, MODEL_FILE_PATH)\n', (821, 870), False, 'from ml_models.model_initializer import ModelInitializer\n'), ((1407, 1424), 'numpy.argmax', 'np.argmax', (['result'], {}), '(result)\n', (1416, 1424), True, 'import numpy as np\n'), ((2092, 2122), 'torch.stack', 'torch.stack', (['[transformed_img]'], {}), '([transformed_img])\n', (2103, 2122), False, 'import torch\n'), ((1236, 1251), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1249, 1251), False, 'import torch\n'), ((1440, 1468), 'numpy.round', 'np.round', (['result'], {'decimals': '(4)'}), '(result, decimals=4)\n', (1448, 1468), True, 'import numpy as np\n'), ((1741, 1763), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (1758, 1763), False, 'from torchvision import transforms\n'), ((1777, 1803), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (1798, 1803), False, 'from torchvision import transforms\n'), ((1817, 1838), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1836, 1838), False, 'from torchvision import transforms\n'), ((1897, 1972), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (1917, 1972), False, 'from torchvision import transforms\n'), ((1999, 2015), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (2009, 2015), False, 'from PIL import Image\n')]
|
from typing import Dict, TYPE_CHECKING, Type
from django.apps import apps
from django.utils.translation import ugettext_lazy as _
from ..settings import settings
if TYPE_CHECKING:
from django.db.models import Model
__all__ = (
'cart_element_representation_serializer',
'get_base_api_view'
)
def cart_element_representation_serializer(
value: Type['Model'],
serializer_context: Dict
):
serializers = settings.ELEMENT_REPRESENTATION_SERIALIZERS
for model_path, serializer_class in serializers.items():
model_class = apps.get_model(model_path)
if isinstance(value, model_class):
return serializer_class(
instance=value,
context=serializer_context
)
raise Exception(_('Unexpected type of cart element'))
def get_base_api_view():
"""
Returns custom pagination class, set in settings
"""
BaseAPIView = settings.BASE_API_VIEW
if BaseAPIView is None:
class BaseAPIView:
pass
return BaseAPIView
|
[
"django.utils.translation.ugettext_lazy",
"django.apps.apps.get_model"
] |
[((565, 591), 'django.apps.apps.get_model', 'apps.get_model', (['model_path'], {}), '(model_path)\n', (579, 591), False, 'from django.apps import apps\n'), ((783, 819), 'django.utils.translation.ugettext_lazy', '_', (['"""Unexpected type of cart element"""'], {}), "('Unexpected type of cart element')\n", (784, 819), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
# -*- coding: utf-8 -*-
"""Tests for PCATransformer."""
import numpy as np
import pytest
from sktime.transformations.panel.pca import PCATransformer
from sktime.utils._testing.panel import _make_nested_from_array
@pytest.mark.parametrize("bad_components", ["str", 1.2, -1.2, -1, 11])
def test_bad_input_args(bad_components):
"""Check that exception is raised for bad input args."""
X = _make_nested_from_array(np.ones(10), n_instances=10, n_columns=1)
if isinstance(bad_components, str):
with pytest.raises(TypeError):
PCATransformer(n_components=bad_components).fit(X)
else:
with pytest.raises(ValueError):
PCATransformer(n_components=bad_components).fit(X)
|
[
"pytest.mark.parametrize",
"pytest.raises",
"sktime.transformations.panel.pca.PCATransformer",
"numpy.ones"
] |
[((217, 286), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""bad_components"""', "['str', 1.2, -1.2, -1, 11]"], {}), "('bad_components', ['str', 1.2, -1.2, -1, 11])\n", (240, 286), False, 'import pytest\n'), ((421, 432), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (428, 432), True, 'import numpy as np\n'), ((517, 541), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (530, 541), False, 'import pytest\n'), ((629, 654), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (642, 654), False, 'import pytest\n'), ((555, 598), 'sktime.transformations.panel.pca.PCATransformer', 'PCATransformer', ([], {'n_components': 'bad_components'}), '(n_components=bad_components)\n', (569, 598), False, 'from sktime.transformations.panel.pca import PCATransformer\n'), ((668, 711), 'sktime.transformations.panel.pca.PCATransformer', 'PCATransformer', ([], {'n_components': 'bad_components'}), '(n_components=bad_components)\n', (682, 711), False, 'from sktime.transformations.panel.pca import PCATransformer\n')]
|
from direct.showbase.ShowBase import ShowBase
from panda3d.core import ExecutionEnvironment
from p3dopenvr.p3dopenvr import P3DOpenVR
import openvr
import os
class MinimalOpenVR(P3DOpenVR):
def __init__(self):
P3DOpenVR.__init__(self)
self.left_hand = None
self.right_hand = None
def init_action(self):
main_dir = ExecutionEnvironment.getEnvironmentVariable("MAIN_DIR")
filename = os.path.join(main_dir, "demo_actions.json")
self.load_action_manifest(filename, "/actions/demo")
self.action_haptic_left = self.vr_input.getActionHandle('/actions/demo/out/Haptic_Left')
self.source_left = self.vr_input.getInputSourceHandle('/user/hand/left')
self.action_pose_left = self.vr_input.getActionHandle('/actions/demo/in/Hand_Left')
self.action_haptic_right = self.vr_input.getActionHandle('/actions/demo/out/Haptic_Right')
self.source_right = self.vr_input.getInputSourceHandle('/user/hand/right')
self.action_pose_right = self.vr_input.getActionHandle('/actions/demo/in/Hand_Right')
self.action_left_trigger = self.vr_input.getActionHandle('/actions/demo/in/left_trigger')
self.action_right_trigger = self.vr_input.getActionHandle('/actions/demo/in/right_trigger')
def update_action(self):
left_trigger_state, device = self.get_digital_action_rising_edge(self.action_left_trigger)
if left_trigger_state:
print("LEFT")
self.vr_input.triggerHapticVibrationAction(self.action_haptic_left, 0, 1, 4, 1, openvr.k_ulInvalidInputValueHandle)
right_trigger_state, device = self.get_digital_action_rising_edge(self.action_right_trigger)
if right_trigger_state:
print("RIGHT")
self.vr_input.triggerHapticVibrationAction(self.action_haptic_right, 0, 1, 4, 1, openvr.k_ulInvalidInputValueHandle)
left_pose = self.get_action_pose(self.action_pose_left)
if left_pose.pose.bPoseIsValid:
left_matrix = self.get_pose_modelview(left_pose.pose)
if self.left_hand is None:
self.left_hand = self.tracking_space.attach_new_node('left-hand')
model = loader.loadModel("box")
model.reparent_to(self.left_hand)
model.set_scale(0.1)
self.left_hand.show()
self.left_hand.set_mat(left_matrix)
else:
if self.left_hand is not None:
self.left_hand.hide()
right_pose = self.get_action_pose(self.action_pose_right)
if right_pose.pose.bPoseIsValid:
right_matrix = self.get_pose_modelview(right_pose.pose)
if self.right_hand is None:
self.right_hand = self.tracking_space.attach_new_node('right-hand')
model = loader.loadModel("box")
model.reparent_to(self.right_hand)
model.set_scale(0.1)
self.right_hand.show()
self.right_hand.set_mat(right_matrix)
else:
if self.right_hand is not None:
self.right_hand.hide()
base = ShowBase()
base.setFrameRateMeter(True)
myvr = MinimalOpenVR()
myvr.init()
model = loader.loadModel("panda")
model.reparentTo(render)
model.setPos(0, 10, -5)
base.accept('d', myvr.list_devices)
base.accept('b', base.bufferViewer.toggleEnable)
base.run()
|
[
"direct.showbase.ShowBase.ShowBase",
"os.path.join",
"p3dopenvr.p3dopenvr.P3DOpenVR.__init__",
"panda3d.core.ExecutionEnvironment.getEnvironmentVariable"
] |
[((3115, 3125), 'direct.showbase.ShowBase.ShowBase', 'ShowBase', ([], {}), '()\n', (3123, 3125), False, 'from direct.showbase.ShowBase import ShowBase\n'), ((225, 249), 'p3dopenvr.p3dopenvr.P3DOpenVR.__init__', 'P3DOpenVR.__init__', (['self'], {}), '(self)\n', (243, 249), False, 'from p3dopenvr.p3dopenvr import P3DOpenVR\n'), ((358, 413), 'panda3d.core.ExecutionEnvironment.getEnvironmentVariable', 'ExecutionEnvironment.getEnvironmentVariable', (['"""MAIN_DIR"""'], {}), "('MAIN_DIR')\n", (401, 413), False, 'from panda3d.core import ExecutionEnvironment\n'), ((433, 476), 'os.path.join', 'os.path.join', (['main_dir', '"""demo_actions.json"""'], {}), "(main_dir, 'demo_actions.json')\n", (445, 476), False, 'import os\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ADCM upgrade"""
# pylint:disable=redefined-outer-name, no-self-use, too-many-arguments
import random
from contextlib import contextmanager
from pathlib import Path
from typing import Tuple, Union, List, Iterable, Any
import allure
import pytest
from adcm_client.base import ObjectNotFound
from adcm_client.objects import ADCMClient, Cluster, Host, Service, Bundle, Component, Provider, Task, Job, Upgrade
from adcm_pytest_plugin import params
from adcm_pytest_plugin.docker_utils import ADCM
from adcm_pytest_plugin.plugin import parametrized_by_adcm_version
from adcm_pytest_plugin.steps.actions import (
run_cluster_action_and_assert_result,
run_service_action_and_assert_result,
run_component_action_and_assert_result,
run_provider_action_and_assert_result,
)
from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string
from tests.upgrade_utils import upgrade_adcm_version
from tests.functional.conftest import only_clean_adcm
from tests.functional.plugin_utils import build_objects_checker, build_objects_comparator
from tests.functional.tools import AnyADCMObject, get_config, get_objects_via_pagination
from tests.library.utils import previous_adcm_version_tag
pytestmark = [only_clean_adcm]
AVAILABLE_ACTIONS = {
"single_state-available",
"state_list-available",
"state_any-available",
}
@pytest.fixture(scope="session")
def upgrade_target(cmd_opts) -> Tuple[str, str]:
"""Actual ADCM version"""
if not cmd_opts.adcm_image:
pytest.fail("CLI parameter adcm_image should be provided")
return tuple(cmd_opts.adcm_image.split(":", maxsplit=2)) # type: ignore
def old_adcm_images() -> Tuple[List[Tuple[str, str]], Any]:
"""A list of old ADCM images"""
return parametrized_by_adcm_version(adcm_min_version="2019.10.08")[0]
def _create_cluster(sdk_client_fs: ADCMClient, bundle_dir: str = "cluster_bundle") -> Cluster:
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, bundle_dir))
cluster_name = f"test_{random_string()}"
return bundle.cluster_prototype().cluster_create(name=cluster_name)
def _create_host(sdk_client_fs: ADCMClient, bundle_dir: str = "hostprovider") -> Host:
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, bundle_dir))
provider = bundle.provider_create(name=f"test_{random_string()}")
return provider.host_create(fqdn=f"test_host_{random_string()}")
@allure.step("Check actions availability")
def _assert_available_actions(obj: AnyADCMObject):
obj.reread()
actions = {action.name for action in obj.action_list()}
assert (
actions == AVAILABLE_ACTIONS
), f"Unexpected list of available actions!\nExpected: {AVAILABLE_ACTIONS}\nActual:{actions}"
@allure.step("Check that previously created cluster exists")
def _check_that_cluster_exists(sdk_client_fs: ADCMClient, cluster: Cluster) -> None:
assert len(sdk_client_fs.cluster_list()) == 1, "Only one cluster expected to be"
with catch_failed(ObjectNotFound, "Previously created cluster not found"):
sdk_client_fs.cluster(name=cluster.name)
@allure.step("Check that previously created service exists")
def _check_that_host_exists(cluster: Cluster, host: Host) -> None:
assert len(cluster.host_list()) == 1, "Only one host expected to be"
with catch_failed(ObjectNotFound, "Previously created host not found"):
cluster.host(fqdn=host.fqdn)
@allure.step("Check encryption")
def _check_encryption(obj: Union[Cluster, Service]) -> None:
assert obj.action(name="check-password").run().wait() == "success"
@pytest.mark.parametrize("adcm_is_upgradable", [True], indirect=True)
@pytest.mark.parametrize("image", old_adcm_images(), ids=repr, indirect=True)
def test_upgrade_adcm(
adcm_fs: ADCM,
sdk_client_fs: ADCMClient,
adcm_api_credentials: dict,
adcm_image_tags: Tuple[str, str],
) -> None:
"""Test adcm upgrade"""
cluster = _create_cluster(sdk_client_fs)
host = _create_host(sdk_client_fs)
cluster.host_add(host)
upgrade_adcm_version(adcm_fs, sdk_client_fs, adcm_api_credentials, adcm_image_tags)
_check_that_cluster_exists(sdk_client_fs, cluster)
_check_that_host_exists(cluster, host)
@pytest.mark.parametrize("adcm_is_upgradable", [True], indirect=True)
@pytest.mark.parametrize("image", old_adcm_images(), ids=repr, indirect=True)
def test_pass_in_config_encryption_after_upgrade(
adcm_fs: ADCM,
sdk_client_fs: ADCMClient,
adcm_api_credentials: dict,
adcm_image_tags: Tuple[str, str],
) -> None:
"""Test adcm upgrade with encrypted fields"""
cluster = _create_cluster(sdk_client_fs, "cluster_with_pass_verify")
service = cluster.service_add(name="PassCheckerService")
config_diff = dict(password="<PASSWORD>")
cluster.config_set_diff(config_diff)
service.config_set_diff(config_diff)
upgrade_adcm_version(adcm_fs, sdk_client_fs, adcm_api_credentials, adcm_image_tags)
_check_encryption(cluster)
_check_encryption(service)
@pytest.mark.parametrize("adcm_is_upgradable", [True], indirect=True)
@pytest.mark.parametrize("image", [["hub.arenadata.io/adcm/adcm", "2021.06.17.06"]], ids=repr, indirect=True)
def test_actions_availability_after_upgrade(
adcm_fs: ADCM,
sdk_client_fs: ADCMClient,
adcm_api_credentials: dict,
upgrade_target: Tuple[str, str],
) -> None:
"""Test that actions availability from old DSL remains the same after update"""
cluster = _create_cluster(sdk_client_fs, "cluster_with_actions")
_assert_available_actions(cluster)
upgrade_adcm_version(adcm_fs, sdk_client_fs, adcm_api_credentials, upgrade_target)
_assert_available_actions(cluster)
# !===== Dirty ADCM upgrade =====!
class TestUpgradeFilledADCM:
"""
Check that ADCM filled with different objects can upgrade correctly:
- objects didn't loose their configs and "stable" properties
- objects can be manipulated (you can run actions on them)
"""
LONG_TEXT = f'{"Many" * 200}Words\nTo \"Say\"\n To (me)\n"' * 20
# Services
CHEESE_SERVICE = 'cheese_service'
SAUCE_SERVICE = 'sauce_service'
BREAD_SERVICE = 'bread_service'
# Components
# on cheese
MILK_COMPONENT = 'milk'
# on sauce
SPICE_COMPONENT = 'spice'
TOMATO_COMPONENT = 'tomato'
LEMON_COMPONENT = 'lemon'
# fixtures
@pytest.fixture()
def dirty_adcm(self, sdk_client_fs: ADCMClient) -> dict:
"""
Fill ADCM with many different objects: bundles, clusters, providers, hosts, jobs.
All jobs are waited to be finished before returning result dictionary.
:returns: Dictionary with providers, clusters and sometimes bundles.
"""
dirty_dir = Path(get_data_dir(__file__)) / "dirty_upgrade"
simple_provider_bundle, simple_providers, simple_hosts, all_tasks = self.create_simple_providers(
sdk_client_fs, dirty_dir
)
simple_cluster_bundle, simple_clusters, tasks = self.create_simple_clusters(sdk_client_fs, dirty_dir)
complex_objects = self.create_complex_providers_and_clusters(sdk_client_fs, dirty_dir)
upgraded_cluster, not_upgraded_cluster = self.create_upgradable_clusters(sdk_client_fs, dirty_dir)
all_tasks.extend(tasks)
_wait_for_tasks(all_tasks)
with allure.step('Delete one of simple clusters with jobs'):
self._delete_simple_cluster_with_job(simple_clusters)
return {
'simple': {
'providers': tuple(simple_providers),
'hosts': tuple(simple_hosts),
'clusters': tuple(simple_clusters),
'provider_bundle': simple_provider_bundle,
'cluster_bundle': simple_cluster_bundle,
},
'complex': {
'providers': {'host_supplier': complex_objects[0], 'free_hosts': complex_objects[1]},
'clusters': {
'all_services': complex_objects[2],
'config_history': complex_objects[3],
'not_configured': complex_objects[4],
},
},
'upgrade': {'upgraded': upgraded_cluster, 'not_upgraded': not_upgraded_cluster},
}
# Test itself
@params.including_https
@pytest.mark.parametrize("adcm_is_upgradable", [True], indirect=True)
@pytest.mark.parametrize("image", [previous_adcm_version_tag()], indirect=True)
def test_upgrade_dirty_adcm(
self,
adcm_fs: ADCM,
sdk_client_fs: ADCMClient,
adcm_api_credentials: dict,
upgrade_target: Tuple[str, str],
dirty_adcm: dict,
):
"""
Create previous version ADCM with a lot of different objects.
Upgrade ADCM.
Run actions on ADCM.
"""
objects_are_not_changed = build_objects_checker(changed=None, extractor=_get_object_fields)
with allure.step('Upgrade ADCM and expect all objects to be same'), objects_are_not_changed(
sdk_client_fs
), self.check_job_related_objects_are_not_changed(sdk_client_fs):
upgrade_adcm_version(adcm_fs, sdk_client_fs, adcm_api_credentials, upgrade_target)
self.run_actions_after_upgrade(
dirty_adcm['complex']['clusters']['all_services'],
dirty_adcm['complex']['clusters']['config_history'],
dirty_adcm['simple']['providers'][0],
)
# Steps and helpers
@contextmanager
def check_job_related_objects_are_not_changed(self, adcm_client: ADCMClient):
"""Freeze jobs and check that they aren't changed after upgrade"""
def extract_job_info(job: Job) -> dict:
return {
'task_id': job.task_id,
'status': job.status,
'start_date': job.start_date,
'finish_date': job.finish_date,
'log_ids': {log.id for log in job.log_list()},
}
comparator = build_objects_comparator(
get_compare_value=extract_job_info, field_name='Job info', name_composer=lambda obj: f"Job with id {obj.id}"
)
jobs: List[Job] = get_objects_via_pagination(adcm_client.job_list)
frozen_objects = {job.job_id: extract_job_info(job) for job in jobs}
yield
with allure.step('Assert that Jobs have correct info'):
for job_id, job_info in frozen_objects.items():
comparator(adcm_client.job(id=job_id), job_info)
@allure.step('Create simple providers')
def create_simple_providers(
self, adcm_client: ADCMClient, bundle_dir: Path
) -> Tuple[Bundle, List[Provider], List[Host], List[Task]]:
"""
Upload simple_provider bundle
Create 10 providers and 20 hosts on each provider
Change config of one of providers and one of hosts
Run failed actions on 3 of providers
Run install action on hosts of 2 providers
"""
provider_bundle = adcm_client.upload_from_fs(bundle_dir / "simple_provider")
providers = [provider_bundle.provider_create(f'Provider {random_string(6)}') for _ in range(10)]
one_of_providers = providers[-2]
one_of_providers.config_set_diff({'ssh_key': self.LONG_TEXT})
hosts = [
provider.host_create(f'{random_string(6)}-{random_string(6)}') for _ in range(20) for provider in providers
]
one_of_providers.host_list()[-1].config_set_diff({'hosts_file': self.LONG_TEXT})
tasks = [provider.action(name='validate').run() for provider in providers[:3]] + [
host.action(name='install').run() for provider in providers[-2:] for host in provider.host_list()
]
return provider_bundle, providers, hosts, tasks
@allure.step('Create a lot of simple clusters')
def create_simple_clusters(
self, adcm_client: ADCMClient, bundle_dir: Path
) -> Tuple[Bundle, List[Cluster], List[Task]]:
"""
Upload simple_cluster bundle
Create many clusters:
- With one service and launched action on component
- With one service and altered config of cluster, service and component
- With two services and launched cluster install action
:returns: Bundle, created clusters and tasks
"""
amount_of_clusters = 34
params = {
'cluster_altered_config': {'number_of_segments': 2, 'auto_reboot': False, 'textarea': self.LONG_TEXT},
'service_altered_config': {'simple-is-best': False, 'mode': 'fast'},
'component_altered_config': {'simpler-is-better': True},
'cluster_action': 'install',
'service_with_component': 'Tchaikovsky',
'lonely_service': 'Shostakovich',
'component_with_action': 'mazepa',
'component_with_config': 'symphony',
'component_action': 'no_sense_to_run_me',
}
cluster_bundle = adcm_client.upload_from_fs(bundle_dir / "simple_cluster")
tasks = []
with allure.step(f'Create {amount_of_clusters} clusters'):
clusters = [cluster_bundle.cluster_create(f'Cluster {random_string(8)}') for _ in range(amount_of_clusters)]
with allure.step('Add one service to clusters and run action on component'):
for one_service_cluster in clusters[:4]:
service = one_service_cluster.service_add(name=params['service_with_component'])
component: Component = service.component(name=params['component_with_action'])
tasks.append(component.action(name=params['component_action']).run())
with allure.step('Change config of clusters'):
for cluster_to_change_config in clusters[6:10]:
cluster_to_change_config.config_set_diff(params['cluster_altered_config'])
service = cluster_to_change_config.service_add(name=params['service_with_component'])
service.config_set_diff(params['service_altered_config'])
service.component(name=params['component_with_config']).config_set_diff(
params['component_altered_config']
)
with allure.step('Add two services to clusters and run action on them'):
for install_cluster_with_two_services in clusters[12:30]:
install_cluster_with_two_services.service_add(name=params['service_with_component'])
install_cluster_with_two_services.service_add(name=params['lonely_service'])
tasks.append(install_cluster_with_two_services.action(name=params['cluster_action']).run())
return cluster_bundle, clusters, tasks
@allure.step('Create complex provider and {amount_of_hosts} hosts with prefix "{template}" by action')
def create_complex_provider(
self, provider_bundle: Bundle, template: str = 'complex-host', amount_of_hosts: int = 18
) -> Tuple[Provider, Task]:
"""
Create provider, bunch of hosts via action (provide template if you want to use it more than 1 time).
:returns: Create provider and hosts create tasks
"""
provider = provider_bundle.provider_create(name=f'Complex Provider {random_string(6)}')
provider.config_set_diff({'very_important_flag': 54.4})
task = provider.action(name='create_hosts').run(config={'count': amount_of_hosts, 'template': template})
return provider, task
@allure.step('Create two complex providers and three complex clusters')
def create_complex_providers_and_clusters(
self, adcm_client: ADCMClient, bundles_directory: Path
) -> Tuple[Provider, Provider, Cluster, Cluster, Cluster]:
"""
Upload complex_provider and complex_cluster
Create two complex providers:
1. Provider that supply hosts for complex clusters
(all hosts created by provider action and taken by clusters)
2. Provider that create multiple hosts via action, run actions on some of hosts
and then delete multiple of them by host delete action
And three complex clusters:
1. Cluster with all services and finished jobs
2. Cluster with config history (on cluster, one service and its components)
3. Not configured cluster just with hosts and one service added
:returns: Tuple with provider and cluster objects in order that is declared above
"""
provider_bundle = adcm_client.upload_from_fs(bundles_directory / "complex_provider")
provider_bundle.license_accept()
provider, host_create_task = self.create_complex_provider(provider_bundle)
provider_with_free_hosts, _ = self.create_complex_provider(provider_bundle, template='doomed-host')
self._run_actions_on_host_and_delete_with_action(provider)
cluster_bundle = adcm_client.upload_from_fs(bundles_directory / "complex_cluster")
cluster_bundle.license_accept()
cluster_with_history = self._create_cluster_with_config_history(cluster_bundle)
# we want to wait for tasks on provider to be finished (for hosts to be created)
host_create_task.wait()
cluster_with_all_services = self._create_cluster_with_all_services(
cluster_bundle, tuple(provider.host_list())[:3]
)
cluster_with_hosts = self._create_cluster_with_hosts(cluster_bundle, tuple(provider.host_list())[3:])
return provider, provider_with_free_hosts, cluster_with_all_services, cluster_with_history, cluster_with_hosts
@allure.step('Create two upgradable clusters, upgrade one of them')
def create_upgradable_clusters(self, adcm_client: ADCMClient, bundles_directory: Path) -> Tuple[Cluster, Cluster]:
"""
1. Upload two bundles with old and new version with possibility of upgrade
2. Create two clusters of previous version
3. Run dummy actions on both of them
4. Upgrade one of clusters
:returns: Tuple with upgraded cluster and old-version cluster
"""
old_version_bundle = adcm_client.upload_from_fs(bundles_directory / "cluster_to_upgrade")
adcm_client.upload_from_fs(bundles_directory / "cluster_greater_version")
cluster_to_upgrade = old_version_bundle.cluster_create('I will be upgraded')
good_old_cluster = old_version_bundle.cluster_create('I am good the way I am')
_wait_for_tasks((cluster_to_upgrade.action(name='dummy').run(), good_old_cluster.action(name='dummy').run()))
upgrade: Upgrade = cluster_to_upgrade.upgrade()
upgrade.do()
return cluster_to_upgrade, good_old_cluster
@allure.step('Run some actions in upgraded ADCM')
def run_actions_after_upgrade(
self, cluster_all_services: Cluster, cluster_config_history: Cluster, simple_provider: Provider
) -> None:
"""
Run successful actions on: cluster, service, component.
Run failed action on provider.
"""
sauce_service = cluster_config_history.service(name=self.SAUCE_SERVICE)
run_cluster_action_and_assert_result(cluster_all_services, 'eat_sandwich')
run_service_action_and_assert_result(sauce_service, 'put_on_bread')
run_component_action_and_assert_result(sauce_service.component(name=self.SPICE_COMPONENT), 'add_more')
run_provider_action_and_assert_result(simple_provider, 'validate', status='failed')
@allure.step('Create complex cluster with all services')
def _create_cluster_with_all_services(self, cluster_bundle: Bundle, hosts: Tuple[Host, Host, Host]) -> Cluster:
"""
Create cluster with three services
Add three hosts on it
Set components on hosts
Run some actions
"""
with allure.step('Create cluster and add services'):
cluster = cluster_bundle.cluster_create(name='With all services')
cluster.config_set_diff({'very_important_flag': 1.6})
cheese_service = cluster.service_add(name=self.CHEESE_SERVICE)
sauce_service = cluster.service_add(name=self.SAUCE_SERVICE)
bread_service = cluster.service_add(name=self.BREAD_SERVICE)
components = {
self.MILK_COMPONENT: cheese_service.component(name=self.MILK_COMPONENT),
self.TOMATO_COMPONENT: sauce_service.component(name=self.TOMATO_COMPONENT),
self.LEMON_COMPONENT: sauce_service.component(name=self.LEMON_COMPONENT),
self.SPICE_COMPONENT: sauce_service.component(name=self.SPICE_COMPONENT),
}
with allure.step('Add hosts'):
for host in hosts:
cluster.host_add(host)
with allure.step('Run actions on the cluster, all components and services'):
self._run_actions_on_components(cluster, sauce_service, components, hosts)
_wait_for_tasks(service.action().run() for service in (cheese_service, sauce_service, bread_service))
cluster.action(name='make_sandwich').run().wait()
return cluster
@allure.step('Create cluster with config history')
def _create_cluster_with_config_history(self, bundle: Bundle) -> Cluster:
"""Create cluster with one service and config history"""
def get_random_config_map() -> dict:
return {
'a_lot_of_text': {'simple_string': random_string(25), 'file_pass': random_<PASSWORD>(16)},
'from_doc': {
'memory_size': random.randint(2, 64),
'person': {
'name': random_string(13),
'age': str(random.randint(14, 80)),
'custom_field': random_string(12),
},
},
'country_codes': [
{'country': random_string(12), 'code': int(random.randint(1, 200))} for _ in range(4)
],
}
def get_component_random_config_map() -> dict:
return {'illicium': random.random()}
config_change_iterations = 100
cluster = bundle.cluster_create(name='Config history')
cluster.config_set_diff({'very_important_flag': 1.6})
with allure.step(f"Change cluster's config {config_change_iterations} times"):
for _ in range(config_change_iterations):
cluster.config_set_diff(get_random_config_map())
with allure.step(f"Add service and change its config {config_change_iterations} times"):
service = cluster.service_add(name=self.SAUCE_SERVICE)
for _ in range(config_change_iterations):
service.config_set_diff(get_random_config_map())
with allure.step(f"Change component's config {config_change_iterations} times"):
component = service.component()
for _ in range(config_change_iterations):
component.config_set_diff(get_component_random_config_map())
return cluster
@allure.step('Create cluster, add service {service_name} and add hosts to cluster')
def _create_cluster_with_hosts(
self, cluster_bundle: Bundle, hosts: Tuple[Host, ...], service_name: str = SAUCE_SERVICE
) -> Cluster:
"""
Create cluster with given amount of hosts.
Cluster is not configured (can't run actions on it).
Cluster has 1 service added.
"""
cluster = cluster_bundle.cluster_create(name='Cluster with hosts')
cluster.service_add(name=service_name)
for host in hosts:
cluster.host_add(host)
return cluster
@allure.step("Run actions on provider's hosts and remove every 4th host by action on host")
def _run_actions_on_host_and_delete_with_action(self, provider: Provider) -> None:
"""Run dummy actions on each second host and delete each fourth host after tasks are finished"""
hosts = tuple(provider.host_list())
_wait_for_tasks(tuple((host.action(name='dummy_action').run() for host in hosts[::2])))
_wait_for_tasks(tuple((host.action(name='remove_host').run() for host in hosts[::4])))
def _run_actions_on_components(self, cluster: Cluster, service: Service, components: dict, hosts: tuple):
"""Utility function to run actions on components (host actions too)"""
cluster.action(name='make_sauce').run(
hc=tuple(
(
{'host_id': host_id, 'service_id': service.id, 'component_id': component_id}
for host_id, component_id in (
(hosts[1].id, components[self.SPICE_COMPONENT].id),
(hosts[1].id, components[self.LEMON_COMPONENT].id),
(hosts[2].id, components[self.TOMATO_COMPONENT].id),
)
)
)
).wait()
cluster.hostcomponent_set(
(hosts[0], components[self.MILK_COMPONENT]),
*[
(cluster.host(id=hc['host_id']), service.component(id=hc['component_id']))
for hc in cluster.hostcomponent()
],
)
_wait_for_tasks(
(
components[self.TOMATO_COMPONENT].action(name='add_more').run(),
components[self.SPICE_COMPONENT].action(name='add_more').run(),
)
)
def _delete_simple_cluster_with_job(self, simple_clusters: List[Cluster]) -> None:
"""Delete one of simple clusters where at least one job was ran"""
cluster_with_job = next(
filter(lambda cluster: any(len(action.task_list()) for action in cluster.action_list()), simple_clusters),
None,
)
if cluster_with_job is None:
raise ValueError('At least on of simple clusters should have a job')
cluster_with_job.delete()
def _get_object_fields(adcm_object: AnyADCMObject) -> dict:
"""
Save all common fields of an object to one big dict
Useful for dirty upgrade
"""
return {
'name_or_fqdn': adcm_object.name if hasattr(adcm_object, 'name') else adcm_object.fqdn,
'display_name': getattr(adcm_object, 'display_name', None),
'edition': getattr(adcm_object, 'edition', None),
'state': adcm_object.state,
'config': get_config(adcm_object),
# if visibility is changed, it may break
'actions': set(action.id for action in adcm_object.action_list()),
}
@allure.step('Wait for tasks')
def _wait_for_tasks(tasks_to_wait: Iterable[Task]):
"""Iterate over `tasks_to_wait` and wait for each to be finished (results aren't checked)"""
for task in tasks_to_wait:
task.wait()
|
[
"adcm_pytest_plugin.utils.catch_failed",
"pytest.mark.parametrize",
"adcm_pytest_plugin.steps.actions.run_service_action_and_assert_result",
"tests.functional.plugin_utils.build_objects_comparator",
"random.randint",
"pytest.fail",
"adcm_pytest_plugin.utils.random_string",
"adcm_pytest_plugin.steps.actions.run_provider_action_and_assert_result",
"allure.step",
"pytest.fixture",
"tests.functional.tools.get_config",
"random.random",
"adcm_pytest_plugin.utils.get_data_dir",
"tests.functional.tools.get_objects_via_pagination",
"adcm_pytest_plugin.steps.actions.run_cluster_action_and_assert_result",
"adcm_pytest_plugin.plugin.parametrized_by_adcm_version",
"tests.library.utils.previous_adcm_version_tag",
"tests.upgrade_utils.upgrade_adcm_version",
"tests.functional.plugin_utils.build_objects_checker"
] |
[((1909, 1940), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1923, 1940), False, 'import pytest\n'), ((2969, 3010), 'allure.step', 'allure.step', (['"""Check actions availability"""'], {}), "('Check actions availability')\n", (2980, 3010), False, 'import allure\n'), ((3289, 3348), 'allure.step', 'allure.step', (['"""Check that previously created cluster exists"""'], {}), "('Check that previously created cluster exists')\n", (3300, 3348), False, 'import allure\n'), ((3650, 3709), 'allure.step', 'allure.step', (['"""Check that previously created service exists"""'], {}), "('Check that previously created service exists')\n", (3661, 3709), False, 'import allure\n'), ((3966, 3997), 'allure.step', 'allure.step', (['"""Check encryption"""'], {}), "('Check encryption')\n", (3977, 3997), False, 'import allure\n'), ((4133, 4201), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""adcm_is_upgradable"""', '[True]'], {'indirect': '(True)'}), "('adcm_is_upgradable', [True], indirect=True)\n", (4156, 4201), False, 'import pytest\n'), ((4764, 4832), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""adcm_is_upgradable"""', '[True]'], {'indirect': '(True)'}), "('adcm_is_upgradable', [True], indirect=True)\n", (4787, 4832), False, 'import pytest\n'), ((5560, 5628), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""adcm_is_upgradable"""', '[True]'], {'indirect': '(True)'}), "('adcm_is_upgradable', [True], indirect=True)\n", (5583, 5628), False, 'import pytest\n'), ((5630, 5742), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""image"""', "[['hub.arenadata.io/adcm/adcm', '2021.06.17.06']]"], {'ids': 'repr', 'indirect': '(True)'}), "('image', [['hub.arenadata.io/adcm/adcm',\n '2021.06.17.06']], ids=repr, indirect=True)\n", (5653, 5742), False, 'import pytest\n'), ((27033, 27062), 'allure.step', 'allure.step', (['"""Wait for tasks"""'], {}), "('Wait for tasks')\n", (27044, 27062), False, 'import allure\n'), ((4578, 4665), 'tests.upgrade_utils.upgrade_adcm_version', 'upgrade_adcm_version', (['adcm_fs', 'sdk_client_fs', 'adcm_api_credentials', 'adcm_image_tags'], {}), '(adcm_fs, sdk_client_fs, adcm_api_credentials,\n adcm_image_tags)\n', (4598, 4665), False, 'from tests.upgrade_utils import upgrade_adcm_version\n'), ((5410, 5497), 'tests.upgrade_utils.upgrade_adcm_version', 'upgrade_adcm_version', (['adcm_fs', 'sdk_client_fs', 'adcm_api_credentials', 'adcm_image_tags'], {}), '(adcm_fs, sdk_client_fs, adcm_api_credentials,\n adcm_image_tags)\n', (5430, 5497), False, 'from tests.upgrade_utils import upgrade_adcm_version\n'), ((6112, 6198), 'tests.upgrade_utils.upgrade_adcm_version', 'upgrade_adcm_version', (['adcm_fs', 'sdk_client_fs', 'adcm_api_credentials', 'upgrade_target'], {}), '(adcm_fs, sdk_client_fs, adcm_api_credentials,\n upgrade_target)\n', (6132, 6198), False, 'from tests.upgrade_utils import upgrade_adcm_version\n'), ((6917, 6933), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (6931, 6933), False, 'import pytest\n'), ((8843, 8911), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""adcm_is_upgradable"""', '[True]'], {'indirect': '(True)'}), "('adcm_is_upgradable', [True], indirect=True)\n", (8866, 8911), False, 'import pytest\n'), ((11044, 11082), 'allure.step', 'allure.step', (['"""Create simple providers"""'], {}), "('Create simple providers')\n", (11055, 11082), False, 'import allure\n'), ((12322, 12368), 'allure.step', 'allure.step', (['"""Create a lot of simple clusters"""'], {}), "('Create a lot of simple clusters')\n", (12333, 12368), False, 'import allure\n'), ((15229, 15340), 'allure.step', 'allure.step', (['"""Create complex provider and {amount_of_hosts} hosts with prefix "{template}" by action"""'], {}), '(\n \'Create complex provider and {amount_of_hosts} hosts with prefix "{template}" by action\'\n )\n', (15240, 15340), False, 'import allure\n'), ((15994, 16064), 'allure.step', 'allure.step', (['"""Create two complex providers and three complex clusters"""'], {}), "('Create two complex providers and three complex clusters')\n", (16005, 16064), False, 'import allure\n'), ((18125, 18191), 'allure.step', 'allure.step', (['"""Create two upgradable clusters, upgrade one of them"""'], {}), "('Create two upgradable clusters, upgrade one of them')\n", (18136, 18191), False, 'import allure\n'), ((19225, 19273), 'allure.step', 'allure.step', (['"""Run some actions in upgraded ADCM"""'], {}), "('Run some actions in upgraded ADCM')\n", (19236, 19273), False, 'import allure\n'), ((20003, 20058), 'allure.step', 'allure.step', (['"""Create complex cluster with all services"""'], {}), "('Create complex cluster with all services')\n", (20014, 20058), False, 'import allure\n'), ((21643, 21692), 'allure.step', 'allure.step', (['"""Create cluster with config history"""'], {}), "('Create cluster with config history')\n", (21654, 21692), False, 'import allure\n'), ((23568, 23655), 'allure.step', 'allure.step', (['"""Create cluster, add service {service_name} and add hosts to cluster"""'], {}), "(\n 'Create cluster, add service {service_name} and add hosts to cluster')\n", (23579, 23655), False, 'import allure\n'), ((24188, 24288), 'allure.step', 'allure.step', (['"""Run actions on provider\'s hosts and remove every 4th host by action on host"""'], {}), '(\n "Run actions on provider\'s hosts and remove every 4th host by action on host"\n )\n', (24199, 24288), False, 'import allure\n'), ((2060, 2118), 'pytest.fail', 'pytest.fail', (['"""CLI parameter adcm_image should be provided"""'], {}), "('CLI parameter adcm_image should be provided')\n", (2071, 2118), False, 'import pytest\n'), ((2305, 2364), 'adcm_pytest_plugin.plugin.parametrized_by_adcm_version', 'parametrized_by_adcm_version', ([], {'adcm_min_version': '"""2019.10.08"""'}), "(adcm_min_version='2019.10.08')\n", (2333, 2364), False, 'from adcm_pytest_plugin.plugin import parametrized_by_adcm_version\n'), ((2507, 2541), 'adcm_pytest_plugin.utils.get_data_dir', 'get_data_dir', (['__file__', 'bundle_dir'], {}), '(__file__, bundle_dir)\n', (2519, 2541), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((2791, 2825), 'adcm_pytest_plugin.utils.get_data_dir', 'get_data_dir', (['__file__', 'bundle_dir'], {}), '(__file__, bundle_dir)\n', (2803, 2825), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((3528, 3596), 'adcm_pytest_plugin.utils.catch_failed', 'catch_failed', (['ObjectNotFound', '"""Previously created cluster not found"""'], {}), "(ObjectNotFound, 'Previously created cluster not found')\n", (3540, 3596), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((3859, 3924), 'adcm_pytest_plugin.utils.catch_failed', 'catch_failed', (['ObjectNotFound', '"""Previously created host not found"""'], {}), "(ObjectNotFound, 'Previously created host not found')\n", (3871, 3924), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((9390, 9455), 'tests.functional.plugin_utils.build_objects_checker', 'build_objects_checker', ([], {'changed': 'None', 'extractor': '_get_object_fields'}), '(changed=None, extractor=_get_object_fields)\n', (9411, 9455), False, 'from tests.functional.plugin_utils import build_objects_checker, build_objects_comparator\n'), ((10524, 10663), 'tests.functional.plugin_utils.build_objects_comparator', 'build_objects_comparator', ([], {'get_compare_value': 'extract_job_info', 'field_name': '"""Job info"""', 'name_composer': "(lambda obj: f'Job with id {obj.id}')"}), "(get_compare_value=extract_job_info, field_name=\n 'Job info', name_composer=lambda obj: f'Job with id {obj.id}')\n", (10548, 10663), False, 'from tests.functional.plugin_utils import build_objects_checker, build_objects_comparator\n'), ((10707, 10755), 'tests.functional.tools.get_objects_via_pagination', 'get_objects_via_pagination', (['adcm_client.job_list'], {}), '(adcm_client.job_list)\n', (10733, 10755), False, 'from tests.functional.tools import AnyADCMObject, get_config, get_objects_via_pagination\n'), ((19643, 19717), 'adcm_pytest_plugin.steps.actions.run_cluster_action_and_assert_result', 'run_cluster_action_and_assert_result', (['cluster_all_services', '"""eat_sandwich"""'], {}), "(cluster_all_services, 'eat_sandwich')\n", (19679, 19717), False, 'from adcm_pytest_plugin.steps.actions import run_cluster_action_and_assert_result, run_service_action_and_assert_result, run_component_action_and_assert_result, run_provider_action_and_assert_result\n'), ((19726, 19793), 'adcm_pytest_plugin.steps.actions.run_service_action_and_assert_result', 'run_service_action_and_assert_result', (['sauce_service', '"""put_on_bread"""'], {}), "(sauce_service, 'put_on_bread')\n", (19762, 19793), False, 'from adcm_pytest_plugin.steps.actions import run_cluster_action_and_assert_result, run_service_action_and_assert_result, run_component_action_and_assert_result, run_provider_action_and_assert_result\n'), ((19913, 20001), 'adcm_pytest_plugin.steps.actions.run_provider_action_and_assert_result', 'run_provider_action_and_assert_result', (['simple_provider', '"""validate"""'], {'status': '"""failed"""'}), "(simple_provider, 'validate', status=\n 'failed')\n", (19950, 20001), False, 'from adcm_pytest_plugin.steps.actions import run_cluster_action_and_assert_result, run_service_action_and_assert_result, run_component_action_and_assert_result, run_provider_action_and_assert_result\n'), ((26875, 26898), 'tests.functional.tools.get_config', 'get_config', (['adcm_object'], {}), '(adcm_object)\n', (26885, 26898), False, 'from tests.functional.tools import AnyADCMObject, get_config, get_objects_via_pagination\n'), ((2570, 2585), 'adcm_pytest_plugin.utils.random_string', 'random_string', ([], {}), '()\n', (2583, 2585), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((7878, 7932), 'allure.step', 'allure.step', (['"""Delete one of simple clusters with jobs"""'], {}), "('Delete one of simple clusters with jobs')\n", (7889, 7932), False, 'import allure\n'), ((9469, 9530), 'allure.step', 'allure.step', (['"""Upgrade ADCM and expect all objects to be same"""'], {}), "('Upgrade ADCM and expect all objects to be same')\n", (9480, 9530), False, 'import allure\n'), ((9669, 9755), 'tests.upgrade_utils.upgrade_adcm_version', 'upgrade_adcm_version', (['adcm_fs', 'sdk_client_fs', 'adcm_api_credentials', 'upgrade_target'], {}), '(adcm_fs, sdk_client_fs, adcm_api_credentials,\n upgrade_target)\n', (9689, 9755), False, 'from tests.upgrade_utils import upgrade_adcm_version\n'), ((8951, 8978), 'tests.library.utils.previous_adcm_version_tag', 'previous_adcm_version_tag', ([], {}), '()\n', (8976, 8978), False, 'from tests.library.utils import previous_adcm_version_tag\n'), ((10862, 10911), 'allure.step', 'allure.step', (['"""Assert that Jobs have correct info"""'], {}), "('Assert that Jobs have correct info')\n", (10873, 10911), False, 'import allure\n'), ((13588, 13640), 'allure.step', 'allure.step', (['f"""Create {amount_of_clusters} clusters"""'], {}), "(f'Create {amount_of_clusters} clusters')\n", (13599, 13640), False, 'import allure\n'), ((13776, 13846), 'allure.step', 'allure.step', (['"""Add one service to clusters and run action on component"""'], {}), "('Add one service to clusters and run action on component')\n", (13787, 13846), False, 'import allure\n'), ((14192, 14232), 'allure.step', 'allure.step', (['"""Change config of clusters"""'], {}), "('Change config of clusters')\n", (14203, 14232), False, 'import allure\n'), ((14736, 14802), 'allure.step', 'allure.step', (['"""Add two services to clusters and run action on them"""'], {}), "('Add two services to clusters and run action on them')\n", (14747, 14802), False, 'import allure\n'), ((20342, 20388), 'allure.step', 'allure.step', (['"""Create cluster and add services"""'], {}), "('Create cluster and add services')\n", (20353, 20388), False, 'import allure\n'), ((21170, 21194), 'allure.step', 'allure.step', (['"""Add hosts"""'], {}), "('Add hosts')\n", (21181, 21194), False, 'import allure\n'), ((21279, 21349), 'allure.step', 'allure.step', (['"""Run actions on the cluster, all components and services"""'], {}), "('Run actions on the cluster, all components and services')\n", (21290, 21349), False, 'import allure\n'), ((22799, 22871), 'allure.step', 'allure.step', (['f"""Change cluster\'s config {config_change_iterations} times"""'], {}), '(f"Change cluster\'s config {config_change_iterations} times")\n', (22810, 22871), False, 'import allure\n'), ((23005, 23092), 'allure.step', 'allure.step', (['f"""Add service and change its config {config_change_iterations} times"""'], {}), "(\n f'Add service and change its config {config_change_iterations} times')\n", (23016, 23092), False, 'import allure\n'), ((23288, 23362), 'allure.step', 'allure.step', (['f"""Change component\'s config {config_change_iterations} times"""'], {}), '(f"Change component\'s config {config_change_iterations} times")\n', (23299, 23362), False, 'import allure\n'), ((7291, 7313), 'adcm_pytest_plugin.utils.get_data_dir', 'get_data_dir', (['__file__'], {}), '(__file__)\n', (7303, 7313), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((22604, 22619), 'random.random', 'random.random', ([], {}), '()\n', (22617, 22619), False, 'import random\n'), ((2878, 2893), 'adcm_pytest_plugin.utils.random_string', 'random_string', ([], {}), '()\n', (2891, 2893), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((2947, 2962), 'adcm_pytest_plugin.utils.random_string', 'random_string', ([], {}), '()\n', (2960, 2962), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((21954, 21971), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(25)'], {}), '(25)\n', (21967, 21971), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((22075, 22096), 'random.randint', 'random.randint', (['(2)', '(64)'], {}), '(2, 64)\n', (22089, 22096), False, 'import random\n'), ((11661, 11677), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(6)'], {}), '(6)\n', (11674, 11677), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((11866, 11882), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(6)'], {}), '(6)\n', (11879, 11882), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((11885, 11901), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(6)'], {}), '(6)\n', (11898, 11901), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((15761, 15777), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(6)'], {}), '(6)\n', (15774, 15777), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((22162, 22179), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(13)'], {}), '(13)\n', (22175, 22179), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((22281, 22298), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(12)'], {}), '(12)\n', (22294, 22298), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((22409, 22426), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(12)'], {}), '(12)\n', (22422, 22426), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((13707, 13723), 'adcm_pytest_plugin.utils.random_string', 'random_string', (['(8)'], {}), '(8)\n', (13720, 13723), False, 'from adcm_pytest_plugin.utils import catch_failed, get_data_dir, random_string\n'), ((22216, 22238), 'random.randint', 'random.randint', (['(14)', '(80)'], {}), '(14, 80)\n', (22230, 22238), False, 'import random\n'), ((22440, 22462), 'random.randint', 'random.randint', (['(1)', '(200)'], {}), '(1, 200)\n', (22454, 22462), False, 'import random\n')]
|
import numpy as np
from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request
from Frontend import application
# Imports application details from a private file
from details import client_id, client_secret
import requests
from Backend.RequestError import RequestError
from Backend.SptfyApiHndler import SptfyApiHndler
application.config.from_object('configurations.ProductionConfig')
@application.route('/getPlaylists', methods = ['GET'])
def getPlaylists():
try:
assert request.path == '/getPlaylists'
assert request.method == 'GET'
tkn = request.args.get('tkn')
## Get user_id
req = requests.get( "https://api.spotify.com/v1/me",
headers={ 'authorization': "Bearer " + tkn })
if req.status_code != 200:
print('An error occured getting user id occured, error code: '+str(req.status_code))
raise RequestError('An Error has occured')
req_Json = req.json()
usr_id = req_Json['id']
## Get user Playlists
playlists = []
i = 0
while(len(playlists)==i):
req = requests.get("https://api.spotify.com/v1/users/"+usr_id+"/playlists?limit="+str(50)+"&offset="+str(i), headers={ 'authorization': "Bearer " + tkn })
if req.status_code != 200:
print('An error occured getting user playlists, error code: '+str(req.status_code))
raise RequestError('An Error has occured')
req_Json = req.json()
for lst in req_Json['items']:
images = lst['images']
if(len(images)==0):
continue
if(len(images)>=2):
image_url = images[1]['url']
else:
image_url = images[0]['url']
playlists.append({'id':lst['id'], 'isActive':False,'image_url':image_url, 'name':lst['name'], 'tracks':lst['tracks']['total']})
i = i+50
return jsonify({'ok':True, 'playlists':playlists})
except RequestError:
return jsonify({'ok':False, 'message':"A requesterror has occured"})
except AssertionError:
return jsonify({'ok':False, 'message':"An invalid request has been made"})
except Exception:
return jsonify({'ok':False, 'message':"An unexpected error has occured"})
@application.route('/CheckMood', methods = ['GET'])
def checkMood():
try:
assert request.path == '/CheckMood'
assert request.method == 'GET'
tkn = request.args.get('tkn')
mood = request.args.get('mood')
## Get playlists on mood
req = requests.get( "https://api.spotify.com/v1/search?q="+mood+"&type=playlist&limit=5",
headers={ 'authorization': "Bearer " + tkn })
if req.status_code != 200:
raise RequestError('An Error has occured')
req_Json = req.json()
playlists = req_Json['playlists']['items']
if(len(playlists)<5):
return jsonify({'ok':True, 'valid':False})
else:
return jsonify({'ok':True, 'valid':True})
except RequestError as e:
return jsonify({'ok':False, 'message':"A requesterror has occured"})
except AssertionError as e:
return jsonify({'ok':False, 'message':"An invalid type of request has been made"})
except Exception as e:
return jsonify({'ok':False, 'message':"An unexpected error has occured"})
@application.route('/createPlaylist', methods = ['GET'])
def createPlaylist():
try:
assert request.path == '/createPlaylist'
assert request.method == 'GET'
tkn = request.args.get('tkn')
mood = request.args.get('mood')
playlists = request.args.get('playlistIDs').split(',')
####### PUT BACKEND CODE METHOD HERE #################
sptfyApi = SptfyApiHndler()
newPlaylistID = sptfyApi.filterPlaylists(client_id,tkn,mood,playlists)
########################
# newPlaylistID = 'https://open.spotify.com/embed/playlist/7xB5RIoWhp2RHVCT43GwWg?si=9XxgO-g9QIS0v4GcIaCH9Q'
return jsonify({'ok':True, 'newPlaylistID':newPlaylistID})
except RequestError as e:
print(e)
return jsonify({'ok':False, 'message':"A requesterror has occured"})
except AssertionError as e:
return jsonify({'ok':False, 'message':"An invalid type of request has been made"})
except Exception as e:
print(e)
return jsonify({'ok':False, 'message':"An unexpected error has occured"})
@application.route('/')
def login():
scopes = ['user-read-private','user-read-email','playlist-read-private','playlist-read-collaborative','playlist-modify-public']
scope = '%20'.join(scopes)
redirect_url = url_for('index')
redirect_url = 'http://127.0.0.1:5000'+redirect_url
url = 'https://accounts.spotify.com/authorize?client_id='+client_id+'&redirect_uri='+redirect_url+'&scope='+scope+'&response_type=token&state=123'
return redirect(url)
@application.route('/welcome')
def index():
resp = make_response(render_template("index.html"))
resp.set_cookie('cross-site-cookie', 'spotify1', domain='.spotify.com', samesite=None, secure=True);
resp.set_cookie('cross-site-cookie', 'spotify2', domain='.accounts.spotify.com', samesite=None, secure=True);
resp.set_cookie('cross-site-cookie', 'spotify3', domain='.community.spotify.com', samesite=None, secure=True);
resp.set_cookie('cross-site-cookie', 'spotify4', domain='.www.spotify.com', samesite=None, secure=True);
resp.set_cookie('cross-site-cookie', 'goadjust', domain='go.adjust.com', samesite=None, secure=True);
resp.set_cookie('cross-site-cookie', 'applicationadjust', domain='application.adjust.<EMAIL>', samesite=None, secure=True);
resp.set_cookie('cross-site-cookie', 'general', samesite=None, secure=True);
resp.headers.add('Set-Cookie','cross-site-cookie=spotify; SameSite=None; Secure')
return resp
if __name__=='__main__':
application.run()
|
[
"Frontend.application.route",
"Frontend.application.run",
"flask.redirect",
"flask.request.args.get",
"Backend.RequestError.RequestError",
"Backend.SptfyApiHndler.SptfyApiHndler",
"flask.url_for",
"flask.jsonify",
"Frontend.application.config.from_object",
"requests.get",
"flask.render_template"
] |
[((353, 418), 'Frontend.application.config.from_object', 'application.config.from_object', (['"""configurations.ProductionConfig"""'], {}), "('configurations.ProductionConfig')\n", (383, 418), False, 'from Frontend import application\n'), ((421, 472), 'Frontend.application.route', 'application.route', (['"""/getPlaylists"""'], {'methods': "['GET']"}), "('/getPlaylists', methods=['GET'])\n", (438, 472), False, 'from Frontend import application\n'), ((2385, 2433), 'Frontend.application.route', 'application.route', (['"""/CheckMood"""'], {'methods': "['GET']"}), "('/CheckMood', methods=['GET'])\n", (2402, 2433), False, 'from Frontend import application\n'), ((3498, 3551), 'Frontend.application.route', 'application.route', (['"""/createPlaylist"""'], {'methods': "['GET']"}), "('/createPlaylist', methods=['GET'])\n", (3515, 3551), False, 'from Frontend import application\n'), ((4584, 4606), 'Frontend.application.route', 'application.route', (['"""/"""'], {}), "('/')\n", (4601, 4606), False, 'from Frontend import application\n'), ((5053, 5082), 'Frontend.application.route', 'application.route', (['"""/welcome"""'], {}), "('/welcome')\n", (5070, 5082), False, 'from Frontend import application\n'), ((4802, 4818), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (4809, 4818), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((5037, 5050), 'flask.redirect', 'redirect', (['url'], {}), '(url)\n', (5045, 5050), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((6042, 6059), 'Frontend.application.run', 'application.run', ([], {}), '()\n', (6057, 6059), False, 'from Frontend import application\n'), ((605, 628), 'flask.request.args.get', 'request.args.get', (['"""tkn"""'], {}), "('tkn')\n", (621, 628), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((666, 760), 'requests.get', 'requests.get', (['"""https://api.spotify.com/v1/me"""'], {'headers': "{'authorization': 'Bearer ' + tkn}"}), "('https://api.spotify.com/v1/me', headers={'authorization': \n 'Bearer ' + tkn})\n", (678, 760), False, 'import requests\n'), ((2023, 2068), 'flask.jsonify', 'jsonify', (["{'ok': True, 'playlists': playlists}"], {}), "({'ok': True, 'playlists': playlists})\n", (2030, 2068), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((2559, 2582), 'flask.request.args.get', 'request.args.get', (['"""tkn"""'], {}), "('tkn')\n", (2575, 2582), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((2598, 2622), 'flask.request.args.get', 'request.args.get', (['"""mood"""'], {}), "('mood')\n", (2614, 2622), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((2671, 2805), 'requests.get', 'requests.get', (["('https://api.spotify.com/v1/search?q=' + mood + '&type=playlist&limit=5')"], {'headers': "{'authorization': 'Bearer ' + tkn}"}), "('https://api.spotify.com/v1/search?q=' + mood +\n '&type=playlist&limit=5', headers={'authorization': 'Bearer ' + tkn})\n", (2683, 2805), False, 'import requests\n'), ((3687, 3710), 'flask.request.args.get', 'request.args.get', (['"""tkn"""'], {}), "('tkn')\n", (3703, 3710), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((3726, 3750), 'flask.request.args.get', 'request.args.get', (['"""mood"""'], {}), "('mood')\n", (3742, 3750), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((3896, 3912), 'Backend.SptfyApiHndler.SptfyApiHndler', 'SptfyApiHndler', ([], {}), '()\n', (3910, 3912), False, 'from Backend.SptfyApiHndler import SptfyApiHndler\n'), ((4157, 4210), 'flask.jsonify', 'jsonify', (["{'ok': True, 'newPlaylistID': newPlaylistID}"], {}), "({'ok': True, 'newPlaylistID': newPlaylistID})\n", (4164, 4210), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((5121, 5150), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (5136, 5150), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((941, 977), 'Backend.RequestError.RequestError', 'RequestError', (['"""An Error has occured"""'], {}), "('An Error has occured')\n", (953, 977), False, 'from Backend.RequestError import RequestError\n'), ((2107, 2170), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'A requesterror has occured'}"], {}), "({'ok': False, 'message': 'A requesterror has occured'})\n", (2114, 2170), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((2211, 2280), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'An invalid request has been made'}"], {}), "({'ok': False, 'message': 'An invalid request has been made'})\n", (2218, 2280), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((2316, 2384), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'An unexpected error has occured'}"], {}), "({'ok': False, 'message': 'An unexpected error has occured'})\n", (2323, 2384), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((2886, 2922), 'Backend.RequestError.RequestError', 'RequestError', (['"""An Error has occured"""'], {}), "('An Error has occured')\n", (2898, 2922), False, 'from Backend.RequestError import RequestError\n'), ((3053, 3090), 'flask.jsonify', 'jsonify', (["{'ok': True, 'valid': False}"], {}), "({'ok': True, 'valid': False})\n", (3060, 3090), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((3122, 3158), 'flask.jsonify', 'jsonify', (["{'ok': True, 'valid': True}"], {}), "({'ok': True, 'valid': True})\n", (3129, 3158), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((3202, 3265), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'A requesterror has occured'}"], {}), "({'ok': False, 'message': 'A requesterror has occured'})\n", (3209, 3265), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((3311, 3388), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'An invalid type of request has been made'}"], {}), "({'ok': False, 'message': 'An invalid type of request has been made'})\n", (3318, 3388), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((3429, 3497), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'An unexpected error has occured'}"], {}), "({'ok': False, 'message': 'An unexpected error has occured'})\n", (3436, 3497), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((4271, 4334), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'A requesterror has occured'}"], {}), "({'ok': False, 'message': 'A requesterror has occured'})\n", (4278, 4334), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((4380, 4457), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'An invalid type of request has been made'}"], {}), "({'ok': False, 'message': 'An invalid type of request has been made'})\n", (4387, 4457), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((4515, 4583), 'flask.jsonify', 'jsonify', (["{'ok': False, 'message': 'An unexpected error has occured'}"], {}), "({'ok': False, 'message': 'An unexpected error has occured'})\n", (4522, 4583), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n'), ((1469, 1505), 'Backend.RequestError.RequestError', 'RequestError', (['"""An Error has occured"""'], {}), "('An Error has occured')\n", (1481, 1505), False, 'from Backend.RequestError import RequestError\n'), ((3771, 3802), 'flask.request.args.get', 'request.args.get', (['"""playlistIDs"""'], {}), "('playlistIDs')\n", (3787, 3802), False, 'from flask import Flask, render_template, redirect, url_for, jsonify, make_response, request\n')]
|
"""
==================================================
Save image to GeoTIFF
==================================================
This example demonstrates how to save an image
to your local machine in GeoTiff format.
"""
import descarteslabs as dl
# Create an aoi feature to clip imagery to
box = {
"type": "Polygon",
"coordinates": [
[
[-108.64292971398066, 33.58051349561343],
[-108.27082685426221, 33.58051349561343],
[-108.27082685426221, 33.83925599538719],
[-108.64292971398066, 33.83925599538719],
[-108.64292971398066, 33.58051349561343],
]
],
}
# Two predefined image IDs for mosaic and download. These can be obtained through a Metadata or Scenes API search
images = [
"landsat:LC08:01:RT:TOAR:meta_LC08_L1TP_035037_20180602_20180602_01_RT_v1",
"landsat:LC08:01:RT:TOAR:meta_LC08_L1TP_035036_20180602_20180602_01_RT_v1",
]
# The Raster API call to download an image mosaic. Other parameters are available
# The file is written in to the same directory as the script.
raster_client = dl.Raster()
raster_client.raster(
inputs=images,
bands=["red", "green", "blue", "alpha"],
scales=[[0, 5500], [0, 5500], [0, 5500], None],
data_type="Byte",
cutline=box,
save=True,
outfile_basename="save_local",
resolution=60,
)
|
[
"descarteslabs.Raster"
] |
[((1094, 1105), 'descarteslabs.Raster', 'dl.Raster', ([], {}), '()\n', (1103, 1105), True, 'import descarteslabs as dl\n')]
|
from __future__ import absolute_import
import os
import argparse
import cwltool.main
import cwltool.argparser
import cwltool.utils
from .exec_profile import ExecProfileBase, LocalToolExec
from cwltool.executors import MultithreadedJobExecutor, SingleJobExecutor
from . import worker
from .tool_handling import make_custom_tool
from .log_handling import error_message
from copy import copy
import typing_extensions
from inspect import isclass
import importlib
import functools
import yaml
## get cwltool default args:
cwltool_ap = cwltool.argparser.arg_parser()
cwltool_default_args = cwltool_ap.parse_args([])
def main(args=None):
if args is None:
parser = argparse.ArgumentParser(
prog="C2WL-Rocket",
description='Customizable CWL Rocket - A highly flexible CWL execution engine.'
)
subparser = parser.add_subparsers(
help="CWLab sub-commands",
dest='subcommand'
)
## subcommand launch:
parser_launch = subparser.add_parser(
"launch",
help="Start execution of a CWL workflow given run input parameter."
)
parser_launch.add_argument("--debug",
action="store_true",
help="Print debugging level messages."
)
parser_launch.add_argument('-p', '--exec-profile',
help="""Specify an exec profile.
Please specify the name to a python module and
a contained exec profile class sperated by \":\"
(e.g. the default \"c2wl_rocket.exec_profile:LocalToolExec\").
Alternatively you can specify the full path to a python file
containing an exec profile class
(e.g. \"/path/to/my/exec_profiles.py:CustomExec\").
""",
default="c2wl_rocket.exec_profile:LocalToolExec"
)
parser_launch.add_argument('cwl_document',
help="Provide a CWL workflow or tool."
)
parser_launch.add_argument('input_params',
nargs=argparse.REMAINDER,
help="Provide input parameters in YAML or JSON format."
)
parser_launch.add_argument("--outdir",
type=typing_extensions.Text,
help="Output directory, default current directory",
default=os.path.abspath('.')
)
exgroup = parser_launch.add_mutually_exclusive_group()
exgroup.add_argument("--tmp-outdir-prefix",
type=typing_extensions.Text,
help="Path prefix for intermediate output directories",
default=cwltool.utils.DEFAULT_TMP_PREFIX
)
exgroup.add_argument("--cachedir",
type=typing_extensions.Text,
help="Directory to cache intermediate workflow outputs to avoid recomputing steps.",
default=""
)
exgroup = parser_launch.add_mutually_exclusive_group()
exgroup.add_argument("--move-outputs",
action="store_const",
const="move",
default="move",
help="Move output files to the workflow output directory and delete "
"intermediate output directories (default).",
dest="move_outputs"
)
exgroup.add_argument("--leave-outputs",
action="store_const",
const="leave",
default="move",
help="Leave output files in intermediate output directories.",
dest="move_outputs"
)
exgroup.add_argument("--copy-outputs",
action="store_const",
const="copy",
default="move",
help="""
Copy output files to the workflow output directory,
don't delete intermediate output directories.
""",
dest="move_outputs"
)
# subcommand start_worker:
parser_start_worker = subparser.add_parser(
"start_worker",
help="Start a worker service instance."
)
parser_start_worker.add_argument("-H", "--web_server_host",
type=typing_extensions.Text,
help="""
IP of webserver host.
Specify \"0.0.0.0\" for remote availablity within
the current network.
""",
default="localhost"
)
parser_start_worker.add_argument("-P", "--web_server_port",
type=typing_extensions.Text,
help="""
Port of webserver.
""",
default="5000"
)
args = parser.parse_args()
if args.subcommand == "launch":
if isinstance(args.exec_profile, str):
exec_profile_invalid_message = error_message(
"main",
"""
The specified exec profile is invalid.
Please either specify a class inheriting from
ExecProfileBase at c2wl_rocket.execprofile or
if using the commandline specify the name or path
to a module that containes such a class.
Please see the commandline help for details.
""",
is_known=True
)
assert ":" in args.exec_profile, \
exec_profile_invalid_message
exec_profile_module_name = args.exec_profile.split(":")[0]
exec_profile_class_name = args.exec_profile.split(":")[1]
try:
exec_profile_module = importlib.import_module(exec_profile_module_name)
except:
try:
spec = importlib.util.spec_from_file_location(
"exec_profile_module",
exec_profile_module_name
)
exec_profile_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(exec_profile_module)
except:
raise AssertionError(
error_message(
"main",
"""
The specified exec profile module \"{exec_profile_module_name}\"
could not be imported.
""",
is_known=True
)
)
assert hasattr(exec_profile_module, exec_profile_class_name), \
error_message(
"main",
f"""
The specified exec profile module \"{exec_profile_module_name}\"
has no class \"{exec_profile_class_name}\".
""",
is_known=True
)
args.exec_profile = getattr(exec_profile_module, exec_profile_class_name)
assert isclass(args.exec_profile) and issubclass(args.exec_profile, ExecProfileBase), \
error_message(
"main",
"""
The specified exec profile class does not inherit
from ExecProfileBase at c2wl_rocket.execprofile.
""",
is_known=True
)
cwltool_args = copy(cwltool_default_args)
cwltool_args.workflow = args.cwl_document
cwltool_args.job_order = args.input_params
cwltool_args.outdir = args.outdir
cwltool_args.tmp_outdir_prefix = args.tmp_outdir_prefix
cwltool_args.cachedir = args.cachedir
cwltool_args.move_outputs = args.move_outputs
cwltool_args.debug = args.debug
loading_context = cwltool.main.LoadingContext(vars(cwltool_args))
with open(args.cwl_document, mode="r") as cwl:
cwl_content = yaml.load(cwl)
assert "cwlVersion" in cwl_content.keys(), error_message(
"main",
"No cwlVersion as specified in the CWL document.",
is_known=True
)
workflow_metadata = {"cwlVersion": cwl_content["cwlVersion"]}
loading_context.construct_tool_object = functools.partial(
make_custom_tool,
exec_profile_class=args.exec_profile,
workflow_metadata=workflow_metadata
)
runtime_context = cwltool.main.RuntimeContext(vars(cwltool_args))
job_executor = MultithreadedJobExecutor() if cwltool_args.parallel \
else SingleJobExecutor()
job_executor.max_ram = job_executor.max_cores = float("inf")
# hand arguments over to main exec function:
cwltool.main.main(
args=cwltool_args,
executor=job_executor,
loadingContext=loading_context,
runtimeContext=runtime_context
)
elif args.subcommand == "start_worker":
worker.start(
web_server_host=args.web_server_host,
web_server_port=int(args.web_server_port)
)
def run(
cwl_document:str,
input_params:str,
exec_profile=LocalToolExec, # please note here class not
# the path to the module is required
outdir=os.path.abspath('.'),
tmp_outdir_prefix=cwltool.utils.DEFAULT_TMP_PREFIX,
cachedir="",
move_outputs="move", # one of "move", "copy", or "leave"
debug=False
):
"""
Main API entry point. Executes c2wl_rocket.__main__.main"
"""
args = argparse.Namespace(
debug=debug,
exec_profile=exec_profile,
cwl_document=cwl_document,
input_params=[input_params],
outdir=outdir,
tmp_outdir_prefix=tmp_outdir_prefix,
cachedir=cachedir,
move_outputs=move_outputs
)
main(args)
if __name__ == "__main__":
main()
|
[
"argparse.Namespace",
"functools.partial",
"os.path.abspath",
"yaml.load",
"argparse.ArgumentParser",
"importlib.import_module",
"inspect.isclass",
"cwltool.executors.SingleJobExecutor",
"copy.copy",
"cwltool.executors.MultithreadedJobExecutor",
"importlib.util.spec_from_file_location",
"importlib.util.module_from_spec"
] |
[((9325, 9345), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (9340, 9345), False, 'import os\n'), ((9593, 9819), 'argparse.Namespace', 'argparse.Namespace', ([], {'debug': 'debug', 'exec_profile': 'exec_profile', 'cwl_document': 'cwl_document', 'input_params': '[input_params]', 'outdir': 'outdir', 'tmp_outdir_prefix': 'tmp_outdir_prefix', 'cachedir': 'cachedir', 'move_outputs': 'move_outputs'}), '(debug=debug, exec_profile=exec_profile, cwl_document=\n cwl_document, input_params=[input_params], outdir=outdir,\n tmp_outdir_prefix=tmp_outdir_prefix, cachedir=cachedir, move_outputs=\n move_outputs)\n', (9611, 9819), False, 'import argparse\n'), ((672, 801), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""C2WL-Rocket"""', 'description': '"""Customizable CWL Rocket - A highly flexible CWL execution engine."""'}), "(prog='C2WL-Rocket', description=\n 'Customizable CWL Rocket - A highly flexible CWL execution engine.')\n", (695, 801), False, 'import argparse\n'), ((7429, 7455), 'copy.copy', 'copy', (['cwltool_default_args'], {}), '(cwltool_default_args)\n', (7433, 7455), False, 'from copy import copy\n'), ((8278, 8392), 'functools.partial', 'functools.partial', (['make_custom_tool'], {'exec_profile_class': 'args.exec_profile', 'workflow_metadata': 'workflow_metadata'}), '(make_custom_tool, exec_profile_class=args.exec_profile,\n workflow_metadata=workflow_metadata)\n', (8295, 8392), False, 'import functools\n'), ((7016, 7042), 'inspect.isclass', 'isclass', (['args.exec_profile'], {}), '(args.exec_profile)\n', (7023, 7042), False, 'from inspect import isclass\n'), ((7960, 7974), 'yaml.load', 'yaml.load', (['cwl'], {}), '(cwl)\n', (7969, 7974), False, 'import yaml\n'), ((8533, 8559), 'cwltool.executors.MultithreadedJobExecutor', 'MultithreadedJobExecutor', ([], {}), '()\n', (8557, 8559), False, 'from cwltool.executors import MultithreadedJobExecutor, SingleJobExecutor\n'), ((8604, 8623), 'cwltool.executors.SingleJobExecutor', 'SingleJobExecutor', ([], {}), '()\n', (8621, 8623), False, 'from cwltool.executors import MultithreadedJobExecutor, SingleJobExecutor\n'), ((2377, 2397), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (2392, 2397), False, 'import os\n'), ((5612, 5661), 'importlib.import_module', 'importlib.import_module', (['exec_profile_module_name'], {}), '(exec_profile_module_name)\n', (5635, 5661), False, 'import importlib\n'), ((5730, 5821), 'importlib.util.spec_from_file_location', 'importlib.util.spec_from_file_location', (['"""exec_profile_module"""', 'exec_profile_module_name'], {}), "('exec_profile_module',\n exec_profile_module_name)\n", (5768, 5821), False, 'import importlib\n'), ((5931, 5968), 'importlib.util.module_from_spec', 'importlib.util.module_from_spec', (['spec'], {}), '(spec)\n', (5962, 5968), False, 'import importlib\n')]
|
import numpy as np
import torch
from torch import nn
from torch.utils.data import Dataset
from tqdm import tqdm
class SeqMaskGenerator(object):
def __init__(self, seqconfig):
self.seqconfig = seqconfig
def create_enc_mask(self, enc_inp):
#enc_inp = [N, inp_seq_len]
# N is total number of input sequences
bsize, seq_len = enc_inp.shape
# #enc_mask.shape = [1, 1, inp_seq_len, inp_seq_len]
# enc_mask = np.ones((1, 1, seq_len, seq_len))
# #enc_mask.shape = [1, 1, inp_seq_len, inp_seq_len]
# # enc_mask = enc_mask.reshape(1, 1, seq_len, seq_len)
# #enc_mask.shape = [bsize, 1, inp_seq_len, inp_seq_len]
# enc_mask = np.repeat(enc_mask, bsize, axis=0)
enc_mask = np.full((bsize,1, seq_len, seq_len), 1)
return enc_mask
def create_enc_dec_mask(self, num_samples):
inp_seqlen = self.seqconfig.seq_len
outp_seqlen = self.seqconfig.ewindow_end+1
# enc_dec_mask = np.ones((1,1, outp_seqlen, inp_seqlen))
# enc_dec_mask = np.repeat(enc_dec_mask, num_samples, axis=0)
enc_dec_mask = np.full((num_samples, 1, outp_seqlen, inp_seqlen), 1)
return enc_dec_mask
def create_dec_mask(self, mask_targetbase):
# dec_inp = [num_haplotypes, outcome_seq_len]
# outcome_seq_len is length of haplotype outcome sequence
# mask_targetbase = [num_haplotyptes, outcome_seq_len]
# generate causal mask
seqconfig = self.seqconfig
num_haplotypes = mask_targetbase.shape[0]
ewindow_st, ewindow_end = seqconfig.ewindow_st, seqconfig.ewindow_end
# ewindow_st = 0
# 6-13
# print('ewindow_st:', ewindow_st, 'ewindow_end:', ewindow_end)
tm = mask_targetbase[:, ewindow_st:ewindow_end+1]
tindx = np.where(tm.astype(np.bool))
# print('tindx:\n', tindx)
# tindx (array(), array()) representing row and column indices where mask has 1 entries
target_pos_st = tindx[1][0] # give the start of target base occurence in the sequence
ew_seqlen = ewindow_end - (target_pos_st + ewindow_st) + 1
# print('ew_seqlen:', ew_seqlen)
sub_mask = np.ones((ew_seqlen, ew_seqlen))
sub_mask_ind = np.triu_indices(ew_seqlen, k=0)
sub_mask[sub_mask_ind[0], sub_mask_ind[1]] = 0
dec_causal_mask = np.ones((ewindow_end+1,ewindow_end+1))
# print('dec_causal_mask.shape', dec_causal_mask.shape)
offset = target_pos_st + ewindow_st
# print('offset:',offset)
for i in range(ewindow_end+1):
if i < offset:
dec_causal_mask[i, offset:] = 0
else:
dec_causal_mask[i, offset:] = sub_mask[i-offset,:]
# print('dec_causal_mask:\n', dec_causal_mask)
#dec_causal_mask.shape = [1, 0:ewindow_end+1, 0:ewindow_end+1]
dec_causal_mask = dec_causal_mask.reshape(1, dec_causal_mask.shape[0], dec_causal_mask.shape[1])
dec_causal_mask = np.repeat(dec_causal_mask, num_haplotypes, axis=0)
return dec_causal_mask
class HaplotypeDataTensor(Dataset):
def __init__(self, seqconfig):
self.seqconfig = seqconfig
# def _encode_to_one_hot(self, mask, n_dims=None):
# """ turn matrix with labels into one-hot encoding using the max number of classes detected"""
# original_mask_shape = mask.shape
# mask = mask.type(torch.LongTensor).view(-1, 1)
# if n_dims is None:
# n_dims = int(torch.max(mask)) + 1
# one_hot = torch.zeros(mask.shape[0], n_dims).scatter_(1, mask, 1)
# one_hot = one_hot.view(*original_mask_shape, -1)
# return one_hot
def generate_tensor_from_df(self, proc_df, tb_cb_nucl, outcome_prop_col):
# create the tensors we need
# N is total number of input sequences
print('Generating tensors using sequence config:\n', self.seqconfig)
Xinp_enc = [] # tensor, (N x inp_sequence_len)
Xinp_dec = [] # list of tensors, (N x num_haplotypes x outp_sequence_len)
mask_inp_targetbase = [] # list of tensors, (N x num_haplotypes x outp_sequence_len)
target_conv = [] # list of tensors, (N x num_haplotypes x outp_sequence_len)
target_conv_onehot = [] # list of tensors (i.e. one-hot encoding), (N x num_haplotypes x outp_sequence_len x 2 x 1)
target_prob = [] # list of tensors, (N x num_haplotypes)
mask_dec = []
indx_seqid_map = {} # dict, int_id:(seqid, target_seq)
inpseq_outpseq_map = {} # dict([]), int_id:[outp_seq1, out_seq2, ....]
seqconfig = self.seqconfig
mask_generator = SeqMaskGenerator(seqconfig)
seq_len = seqconfig.seq_len
tb_nucl, cb_nucl = tb_cb_nucl # target base, conversion base (i.e. A->G for ABE base editor)
# C->T for CBE base editor
# output sequence will be from 0:end of editable window indx
for gr_name, gr_df in tqdm(proc_df.groupby(by=['seq_id', 'Inp_seq'])):
Xinp_enc.append(gr_df[[f'Inp_B{i}' for i in range(1,seq_len+1)]].values[0,:])
Xinp_dec.append(gr_df[[f'Outp_B{i}' for i in range(1,seq_len+1)]].values[:,0:seqconfig.ewindow_end+1])
mask_inp_targetbase.append(gr_df[[f'Inp_M{i}' for i in range(1,seq_len+1)]].values[:,0:seqconfig.ewindow_end+1])
conv = gr_df[[f'conv{tb_nucl}{cb_nucl}_{i}' for i in range(1,seq_len+1)]].values[:,0:seqconfig.ewindow_end+1]
target_conv.append(conv)
if outcome_prop_col is not None:
target_prob.append(gr_df[outcome_prop_col].values)
# print(target_prob[-1])
# compute mask_enc and mask_dec
# print(mask_targetbase[-1])
mask_dec.append(mask_generator.create_dec_mask(mask_inp_targetbase[-1]))
inpseq_id = len(indx_seqid_map)
indx_seqid_map[inpseq_id] = gr_name
inpseq_outpseq_map[inpseq_id] = gr_df['Outp_seq'].values.tolist()
mask_enc = None
mask_encdec = None
# tensorize
print('--- tensorizing ---')
device_cpu = torch.device('cpu')
self.Xinp_enc = torch.tensor(Xinp_enc).long().to(device_cpu)
self.Xinp_enc = self.Xinp_enc.reshape(self.Xinp_enc.shape[0], 1, self.Xinp_enc.shape[1])
self.Xinp_dec = [torch.from_numpy(arr).long().to(device_cpu) for arr in Xinp_dec]
self.mask_inp_targetbase = [torch.from_numpy(arr).long().to(device_cpu) for arr in mask_inp_targetbase]
self.target_conv_onehot = [torch.nn.functional.one_hot(torch.from_numpy(arr).long().to(device_cpu), num_classes=2)
for arr in target_conv]
if outcome_prop_col is not None:
self.target_prob = [torch.from_numpy(arr).float().to(device_cpu) for arr in target_prob]
else:
self.target_prob = None
self.mask_enc = mask_enc
self.mask_encdec = mask_encdec
self.mask_dec = [torch.from_numpy(arr).long().to(device_cpu) for arr in mask_dec]
self.num_samples = len(self.Xinp_enc) # int, number of sequences
self.indx_seqid_map = indx_seqid_map
self.inpseq_outpseq_map = inpseq_outpseq_map
print('--- end ---')
def hap_collate(self, batch):
# pack batches in a list for now
# to be used in dataloader object
return [item for item in batch]
def __getitem__(self, indx):
if self.target_prob is None:
return_target_prob = None
else:
return_target_prob = self.target_prob[indx]
return(self.Xinp_enc[indx],
self.Xinp_dec[indx],
self.mask_enc,
self.mask_dec[indx],
self.mask_encdec,
self.mask_inp_targetbase[indx],
self.target_conv_onehot[indx],
return_target_prob,
indx,
self.indx_seqid_map[indx],
self.inpseq_outpseq_map[indx])
def __len__(self):
return(self.num_samples)
class PartitionDataTensor(Dataset):
def __init__(self, dtensor, partition_ids, dsettype, run_num):
self.dtensor = dtensor # instance of :class:`HaplotypeDataTensor`
self.partition_ids = partition_ids # list of sequence indices
self.dsettype = dsettype # string, dataset type (i.e. train, validation, test)
self.run_num = run_num # int, run number
self.num_samples = len(self.partition_ids[:]) # int, number of docs in the partition
def __getitem__(self, indx):
target_id = self.partition_ids[indx]
return self.dtensor[target_id]
def __len__(self):
return(self.num_samples)
def print_data_example(elm):
Xinp_enc, Xinp_dec, mask_enc, mask_dec, mask_encdec, mask_targetbase_enc, target_conv_onehot, target_prob, indx, seqid = elm
print('Xinp_enc:\n', Xinp_enc, 'shape:', Xinp_enc.shape)
print('Xinp_dec:\n',Xinp_dec, 'shape:',Xinp_dec.shape)
if mask_enc is not None:
print('mask_enc:\n', mask_enc, 'shape:',mask_enc.shape)
print('mask_dec:\n',mask_dec, 'shape:',mask_dec.shape)
if mask_encdec is not None:
print('mask_encdec:\n', mask_encdec, 'shape:',mask_encdec.shape)
print('mask_targetbase_enc:\n', mask_targetbase_enc,'shape:', mask_targetbase_enc.shape)
print('target_conv_onehot:\n',target_conv_onehot, 'shape:',target_conv_onehot.shape)
if target_prob is not None:
print('target_prob:\n',target_prob, 'shape:',target_prob.shape)
else:
print('target_prob:None')
print('indx:', indx)
print('seqid:', seqid)
def hap_collate(batch):
# pack batches in a list for now
# to be used in dataloader object
return [item for item in batch]
|
[
"numpy.full",
"torch.from_numpy",
"numpy.ones",
"numpy.triu_indices",
"torch.device",
"torch.tensor",
"numpy.repeat"
] |
[((762, 802), 'numpy.full', 'np.full', (['(bsize, 1, seq_len, seq_len)', '(1)'], {}), '((bsize, 1, seq_len, seq_len), 1)\n', (769, 802), True, 'import numpy as np\n'), ((1128, 1181), 'numpy.full', 'np.full', (['(num_samples, 1, outp_seqlen, inp_seqlen)', '(1)'], {}), '((num_samples, 1, outp_seqlen, inp_seqlen), 1)\n', (1135, 1181), True, 'import numpy as np\n'), ((2241, 2272), 'numpy.ones', 'np.ones', (['(ew_seqlen, ew_seqlen)'], {}), '((ew_seqlen, ew_seqlen))\n', (2248, 2272), True, 'import numpy as np\n'), ((2296, 2327), 'numpy.triu_indices', 'np.triu_indices', (['ew_seqlen'], {'k': '(0)'}), '(ew_seqlen, k=0)\n', (2311, 2327), True, 'import numpy as np\n'), ((2418, 2461), 'numpy.ones', 'np.ones', (['(ewindow_end + 1, ewindow_end + 1)'], {}), '((ewindow_end + 1, ewindow_end + 1))\n', (2425, 2461), True, 'import numpy as np\n'), ((3073, 3123), 'numpy.repeat', 'np.repeat', (['dec_causal_mask', 'num_haplotypes'], {'axis': '(0)'}), '(dec_causal_mask, num_haplotypes, axis=0)\n', (3082, 3123), True, 'import numpy as np\n'), ((6317, 6336), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (6329, 6336), False, 'import torch\n'), ((6361, 6383), 'torch.tensor', 'torch.tensor', (['Xinp_enc'], {}), '(Xinp_enc)\n', (6373, 6383), False, 'import torch\n'), ((6528, 6549), 'torch.from_numpy', 'torch.from_numpy', (['arr'], {}), '(arr)\n', (6544, 6549), False, 'import torch\n'), ((6629, 6650), 'torch.from_numpy', 'torch.from_numpy', (['arr'], {}), '(arr)\n', (6645, 6650), False, 'import torch\n'), ((7178, 7199), 'torch.from_numpy', 'torch.from_numpy', (['arr'], {}), '(arr)\n', (7194, 7199), False, 'import torch\n'), ((6768, 6789), 'torch.from_numpy', 'torch.from_numpy', (['arr'], {}), '(arr)\n', (6784, 6789), False, 'import torch\n'), ((6960, 6981), 'torch.from_numpy', 'torch.from_numpy', (['arr'], {}), '(arr)\n', (6976, 6981), False, 'import torch\n')]
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\apartments\situations\neighbor_react_to_you_situation.py
# Compiled at: 2018-07-22 23:17:14
# Size of source mod 2**32: 9737 bytes
import random
from event_testing.resolver import DoubleSimResolver
from interactions.utils.loot import LootActions
from relationships.global_relationship_tuning import RelationshipGlobalTuning
from sims4.tuning.instances import lock_instance_tunables
from sims4.tuning.tunable import TunableList
from situations.bouncer.bouncer_types import BouncerExclusivityCategory, RequestSpawningOption, BouncerRequestPriority
from situations.situation import Situation
from situations.situation_complex import SituationComplexCommon, TunableSituationJobAndRoleState, CommonSituationState, SituationStateData, CommonInteractionCompletedSituationState, SituationState
from situations.situation_guest_list import SituationGuestInfo, SituationGuestList
from situations.situation_types import SituationCreationUIOption
from ui.ui_dialog_notification import TunableUiDialogNotificationSnippet
import services
class _StartSituationState(SituationState):
def _on_set_sim_role_state(self, sim, *args, **kwargs):
(super()._on_set_sim_role_state)(sim, *args, **kwargs)
relationship_tracker = sim.sim_info.relationship_tracker
for sim_info in services.active_household():
if relationship_tracker.has_bit(sim_info.sim_id, RelationshipGlobalTuning.NEIGHBOR_GIVEN_KEY_RELATIONSHIP_BIT):
self._change_state(self.owner._hangout_state())
return
self._change_state(self.owner._knock_on_door_state())
class _KnockOnDoorState(CommonInteractionCompletedSituationState):
def _on_interaction_of_interest_complete(self, **kwargs):
self._change_state(self.owner._wait_to_be_greeted())
class _NeighborWaitToBeGreetedState(CommonInteractionCompletedSituationState):
FACTORY_TUNABLES = {'early_exit_loot':TunableList(description='\n A list of loot to apply between the neighbor and the active\n household Sims if this stiuation state times out.\n ',
tunable=LootActions.TunableReference(description='\n A loot action applied to all of the active household Sims if this\n situation state times out.\n ')),
'early_exit_notification':TunableUiDialogNotificationSnippet(description='\n Notification that will be shown when this situation state times\n out.\n ')}
def __init__(self, *args, early_exit_loot=tuple(), early_exit_notification=None, **kwargs):
(super().__init__)(*args, **kwargs)
self._early_exit_loot = early_exit_loot
self._early_exit_notification = early_exit_notification
def _on_interaction_of_interest_complete(self, **kwargs):
self._change_state(self.owner._hangout_state())
def timer_expired(self):
for sim_info in services.active_household():
resolver = DoubleSimResolver(sim_info, self.owner._neighbor_sim.sim_info)
for loot_action in self._early_exit_loot:
loot_action.apply_to_resolver(resolver)
resolver = DoubleSimResolver(services.active_sim_info(), self.owner._neighbor_sim.sim_info)
early_exit_notification = self._early_exit_notification((services.active_sim_info()), resolver=resolver)
early_exit_notification.show_dialog()
self.owner._self_destruct()
class _NeighborHangoutState(CommonSituationState):
def timer_expired(self):
self.owner._self_destruct()
class NeighborReactToYouSituation(SituationComplexCommon):
INSTANCE_TUNABLES = {'_knock_on_door_state':_KnockOnDoorState.TunableFactory(description='\n Situation State for the Sim to knock on the door.\n ',
locked_args={'time_out':None,
'allow_join_situation':True}),
'_wait_to_be_greeted':_NeighborWaitToBeGreetedState.TunableFactory(description='\n Situation State for the Sim to wait to be greeted.\n ',
locked_args={'allow_join_situation': True}),
'_hangout_state':_NeighborHangoutState.TunableFactory(description='\n Situation state for the Sim to hang out for a while.\n ',
locked_args={'allow_join_situation': True}),
'_starting_neighbor_job_and_role_state':TunableSituationJobAndRoleState(description='\n Job and Role State for the neighbor.\n ')}
REMOVE_INSTANCE_TUNABLES = ('_buff', 'targeted_situation', '_resident_job', '_relationship_between_job_members',
'audio_sting_on_start', 'force_invite_only', 'screen_slam_gold',
'screen_slam_silver', 'screen_slam_bronze', 'screen_slam_no_medal') + Situation.SITUATION_START_FROM_UI_REMOVE_INSTANCE_TUNABLES + Situation.SITUATION_USER_FACING_REMOVE_INSTANCE_TUNABLES
@classmethod
def _states(cls):
return (SituationStateData(1, _StartSituationState),
SituationStateData(2, _KnockOnDoorState, factory=(cls._knock_on_door_state)),
SituationStateData(3, _NeighborWaitToBeGreetedState, factory=(cls._wait_to_be_greeted)),
SituationStateData(4, _NeighborHangoutState, factory=(cls._hangout_state)))
@classmethod
def _get_tuned_job_and_default_role_state_tuples(cls):
return [(cls._starting_neighbor_job_and_role_state.job, cls._starting_neighbor_job_and_role_state.role_state)]
@classmethod
def default_job(cls):
pass
def __init__(self, *args, **kwargs):
(super().__init__)(*args, **kwargs)
self._neighbor_sim = None
def _on_set_sim_job(self, sim, job_type):
super()._on_set_sim_job(sim, job_type)
self._neighbor_sim = sim
@classmethod
def get_predefined_guest_list(cls):
active_sim_info = services.active_sim_info()
neighbor_sim_id = cls._get_neighbor()
if neighbor_sim_id is None:
return
guest_list = SituationGuestList(invite_only=True, host_sim_id=neighbor_sim_id,
filter_requesting_sim_id=(active_sim_info.sim_id))
guest_list.add_guest_info(SituationGuestInfo(neighbor_sim_id, (cls._starting_neighbor_job_and_role_state.job),
(RequestSpawningOption.DONT_CARE),
(BouncerRequestPriority.BACKGROUND_MEDIUM),
expectation_preference=True))
return guest_list
@classmethod
def _get_neighbor(cls):
active_sim_info = services.active_sim_info()
neighbors = services.sim_filter_service().submit_filter((cls._starting_neighbor_job_and_role_state.job.filter), callback=None,
requesting_sim_info=active_sim_info,
allow_yielding=False,
blacklist_sim_ids={sim_info.sim_id for sim_info in services.active_household()},
gsi_source_fn=(cls.get_sim_filter_gsi_name))
if not neighbors:
return
neighbor_sim_infos_at_home = [result.sim_info for result in neighbors if result.sim_info.is_at_home]
neighbor_sim_id = random.choice(neighbor_sim_infos_at_home).sim_id if neighbor_sim_infos_at_home else None
return neighbor_sim_id
def start_situation(self):
super().start_situation()
self._change_state(_StartSituationState())
lock_instance_tunables(NeighborReactToYouSituation, exclusivity=(BouncerExclusivityCategory.NORMAL),
creation_ui_option=(SituationCreationUIOption.NOT_AVAILABLE),
duration=0,
_implies_greeted_status=False)
|
[
"event_testing.resolver.DoubleSimResolver",
"situations.situation_complex.SituationStateData",
"sims4.tuning.instances.lock_instance_tunables",
"interactions.utils.loot.LootActions.TunableReference",
"services.sim_filter_service",
"random.choice",
"situations.situation_guest_list.SituationGuestInfo",
"ui.ui_dialog_notification.TunableUiDialogNotificationSnippet",
"situations.situation_complex.TunableSituationJobAndRoleState",
"services.active_household",
"situations.situation_guest_list.SituationGuestList",
"services.active_sim_info"
] |
[((7487, 7702), 'sims4.tuning.instances.lock_instance_tunables', 'lock_instance_tunables', (['NeighborReactToYouSituation'], {'exclusivity': 'BouncerExclusivityCategory.NORMAL', 'creation_ui_option': 'SituationCreationUIOption.NOT_AVAILABLE', 'duration': '(0)', '_implies_greeted_status': '(False)'}), '(NeighborReactToYouSituation, exclusivity=\n BouncerExclusivityCategory.NORMAL, creation_ui_option=\n SituationCreationUIOption.NOT_AVAILABLE, duration=0,\n _implies_greeted_status=False)\n', (7509, 7702), False, 'from sims4.tuning.instances import lock_instance_tunables\n'), ((1501, 1528), 'services.active_household', 'services.active_household', ([], {}), '()\n', (1526, 1528), False, 'import services\n'), ((2537, 2707), 'ui.ui_dialog_notification.TunableUiDialogNotificationSnippet', 'TunableUiDialogNotificationSnippet', ([], {'description': '"""\n Notification that will be shown when this situation state times\n out.\n """'}), '(description=\n """\n Notification that will be shown when this situation state times\n out.\n """\n )\n', (2571, 2707), False, 'from ui.ui_dialog_notification import TunableUiDialogNotificationSnippet\n'), ((3124, 3151), 'services.active_household', 'services.active_household', ([], {}), '()\n', (3149, 3151), False, 'import services\n'), ((4547, 4665), 'situations.situation_complex.TunableSituationJobAndRoleState', 'TunableSituationJobAndRoleState', ([], {'description': '"""\n Job and Role State for the neighbor.\n """'}), '(description=\n """\n Job and Role State for the neighbor.\n """)\n', (4578, 4665), False, 'from situations.situation_complex import SituationComplexCommon, TunableSituationJobAndRoleState, CommonSituationState, SituationStateData, CommonInteractionCompletedSituationState, SituationState\n'), ((6049, 6075), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (6073, 6075), False, 'import services\n'), ((6198, 6316), 'situations.situation_guest_list.SituationGuestList', 'SituationGuestList', ([], {'invite_only': '(True)', 'host_sim_id': 'neighbor_sim_id', 'filter_requesting_sim_id': 'active_sim_info.sim_id'}), '(invite_only=True, host_sim_id=neighbor_sim_id,\n filter_requesting_sim_id=active_sim_info.sim_id)\n', (6216, 6316), False, 'from situations.situation_guest_list import SituationGuestInfo, SituationGuestList\n'), ((6681, 6707), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (6705, 6707), False, 'import services\n'), ((3176, 3238), 'event_testing.resolver.DoubleSimResolver', 'DoubleSimResolver', (['sim_info', 'self.owner._neighbor_sim.sim_info'], {}), '(sim_info, self.owner._neighbor_sim.sim_info)\n', (3193, 3238), False, 'from event_testing.resolver import DoubleSimResolver\n'), ((3387, 3413), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (3411, 3413), False, 'import services\n'), ((3515, 3541), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (3539, 3541), False, 'import services\n'), ((5150, 5193), 'situations.situation_complex.SituationStateData', 'SituationStateData', (['(1)', '_StartSituationState'], {}), '(1, _StartSituationState)\n', (5168, 5193), False, 'from situations.situation_complex import SituationComplexCommon, TunableSituationJobAndRoleState, CommonSituationState, SituationStateData, CommonInteractionCompletedSituationState, SituationState\n'), ((5204, 5278), 'situations.situation_complex.SituationStateData', 'SituationStateData', (['(2)', '_KnockOnDoorState'], {'factory': 'cls._knock_on_door_state'}), '(2, _KnockOnDoorState, factory=cls._knock_on_door_state)\n', (5222, 5278), False, 'from situations.situation_complex import SituationComplexCommon, TunableSituationJobAndRoleState, CommonSituationState, SituationStateData, CommonInteractionCompletedSituationState, SituationState\n'), ((5291, 5381), 'situations.situation_complex.SituationStateData', 'SituationStateData', (['(3)', '_NeighborWaitToBeGreetedState'], {'factory': 'cls._wait_to_be_greeted'}), '(3, _NeighborWaitToBeGreetedState, factory=cls.\n _wait_to_be_greeted)\n', (5309, 5381), False, 'from situations.situation_complex import SituationComplexCommon, TunableSituationJobAndRoleState, CommonSituationState, SituationStateData, CommonInteractionCompletedSituationState, SituationState\n'), ((5389, 5461), 'situations.situation_complex.SituationStateData', 'SituationStateData', (['(4)', '_NeighborHangoutState'], {'factory': 'cls._hangout_state'}), '(4, _NeighborHangoutState, factory=cls._hangout_state)\n', (5407, 5461), False, 'from situations.situation_complex import SituationComplexCommon, TunableSituationJobAndRoleState, CommonSituationState, SituationStateData, CommonInteractionCompletedSituationState, SituationState\n'), ((6359, 6559), 'situations.situation_guest_list.SituationGuestInfo', 'SituationGuestInfo', (['neighbor_sim_id', 'cls._starting_neighbor_job_and_role_state.job', 'RequestSpawningOption.DONT_CARE', 'BouncerRequestPriority.BACKGROUND_MEDIUM'], {'expectation_preference': '(True)'}), '(neighbor_sim_id, cls.\n _starting_neighbor_job_and_role_state.job, RequestSpawningOption.\n DONT_CARE, BouncerRequestPriority.BACKGROUND_MEDIUM,\n expectation_preference=True)\n', (6377, 6559), False, 'from situations.situation_guest_list import SituationGuestInfo, SituationGuestList\n'), ((2313, 2513), 'interactions.utils.loot.LootActions.TunableReference', 'LootActions.TunableReference', ([], {'description': '"""\n A loot action applied to all of the active household Sims if this\n situation state times out.\n """'}), '(description=\n """\n A loot action applied to all of the active household Sims if this\n situation state times out.\n """\n )\n', (2341, 2513), False, 'from interactions.utils.loot import LootActions\n'), ((6728, 6757), 'services.sim_filter_service', 'services.sim_filter_service', ([], {}), '()\n', (6755, 6757), False, 'import services\n'), ((7248, 7289), 'random.choice', 'random.choice', (['neighbor_sim_infos_at_home'], {}), '(neighbor_sim_infos_at_home)\n', (7261, 7289), False, 'import random\n'), ((6983, 7010), 'services.active_household', 'services.active_household', ([], {}), '()\n', (7008, 7010), False, 'import services\n')]
|
from threading import Thread, Event
import logging
import peewee
import socket
from ..database import CorsikaRun, CeresRun
from ..queries import get_pending_jobs, count_jobs, update_job_status
from .corsika import prepare_corsika_job
from .ceres import prepare_ceres_job
log = logging.getLogger(__name__)
hostname = socket.getfqdn()
class JobSubmitter(Thread):
def __init__(
self,
interval,
max_queued_jobs,
mopro_directory,
host,
port,
cluster,
location=None,
corsika_memory='4G',
ceres_memory='12G',
tmp_dir=None,
):
'''
Parametrs
----------
interval: int
number of seconds to wait between submissions
max_queued_jobs: int
Maximum number of jobs in the queue of the grid engine
No new jobs are submitted if the number of jobs in the queue is
higher than this value
mopro_directory: str
patch to the basic structure for erna. Logfiles, jars, xmls and
analysis output are stored in subdirectories to this directory.
host: str
hostname of the submitter node
port: int
port for the zmq communication
'''
super().__init__()
self.event = Event()
self.interval = interval
self.max_queued_jobs = max_queued_jobs
self.mopro_directory = mopro_directory
self.host = host
self.port = port
self.cluster = cluster
self.location = location or hostname
self.ceres_memory = ceres_memory
self.corsika_memory = corsika_memory
self.tmp_dir = tmp_dir
def run(self):
while not self.event.is_set():
try:
self.process_pending_jobs()
except peewee.OperationalError:
log.exception('Lost database connection')
except Exception as e:
log.exception('Error during submission: {}'.format(e))
self.event.wait(self.interval)
def terminate(self):
self.event.set()
def process_pending_jobs(self):
'''
Fetches pending runs from the processing database
and submits them using qsub if not to many jobs are running already.
'''
pending_corsika = count_jobs(CorsikaRun, status='created')
pending_ceres = count_jobs(CeresRun, status='created')
n_queued = self.cluster.n_queued
log.debug(f'{self.cluster.n_running} jobs running')
log.debug(f'{n_queued} jobs queued')
log.debug(f'{pending_corsika} pending CORSIKA jobs in database')
log.debug(f'{pending_ceres} pending CERES jobs in database')
new_jobs = self.max_queued_jobs - n_queued
if new_jobs > 0:
pending_jobs = get_pending_jobs(max_jobs=new_jobs, location=self.location)
for job in pending_jobs:
if self.event.is_set():
break
kwargs = {
'mopro_directory': self.mopro_directory,
'submitter_host': self.host,
'submitter_port': self.port,
'tmp_dir': self.tmp_dir
}
try:
if isinstance(job, CorsikaRun):
self.cluster.submit_job(
**prepare_corsika_job(job, **kwargs),
memory=self.corsika_memory
)
log.info(f'Submitted new CORSIKA job with id {job.id}')
elif isinstance(job, CeresRun):
self.cluster.submit_job(
**prepare_ceres_job(job, **kwargs),
memory=self.ceres_memory
)
log.info(f'Submitted new CERES job with id {job.id}')
else:
raise ValueError(f'Unknown job type: {job}')
update_job_status(type(job), job.id, 'queued', location=self.location)
except:
log.exception('Could not submit job')
update_job_status(type(job), job.id, 'failed')
|
[
"socket.getfqdn",
"threading.Event",
"logging.getLogger"
] |
[((280, 307), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (297, 307), False, 'import logging\n'), ((319, 335), 'socket.getfqdn', 'socket.getfqdn', ([], {}), '()\n', (333, 335), False, 'import socket\n'), ((1320, 1327), 'threading.Event', 'Event', ([], {}), '()\n', (1325, 1327), False, 'from threading import Thread, Event\n')]
|
"""
Backend of Narrator web app.
"""
import os
import sys
import shutil
import pandas as pd
import skimage.io as io
import PIL
from flask import render_template, request, redirect, url_for, send_from_directory, session
from app import app
sys.path.append(app.config['COCOAPI_PATH'] + 'PythonAPI')
from pycocotools.coco import COCO
sys.path.append('../src/')
from Narrator import Narrator
# Construct classes
narrator = Narrator(
root_path=app.config['ROOT_PATH'],
coco_vocab_path=app.config['COCO_VOCAB_PATH'],
msrvtt_vocab_path=app.config['MSRVTT_VOCAB_PATH'],
base_model=app.config['ENCODER_MODEL'],
ic_model_path=app.config['IC_MODEL_PATH'],
vc_model_path=app.config['VC_MODEL_PATH'],
ic_rnn_type=app.config['IC_RNN_TYPE'],
vc_rnn_type=app.config['VC_RNN_TYPE']
)
# Load samples from file
try:
samplesDF = pd.read_csv(
app.config['SAMPLES_DIR'] + 'sample_captions.csv', index_col=0)
except:
samplesDF = pd.DataFrame(
columns=['id', 'caption', 'gt'], index=['name']).head()
# Update any existing samples
if len(app.config['SAMPLES_TO_UPDATE']) > 0:
# Load image and video datasets
coco = COCO(app.config[
'COCOAPI_PATH'] + 'annotations/instances_{}.json'.format(app.config['COCO_SET']))
cocoCaptionDF = pd.read_csv(
app.config['COCOAPI_PATH'] + 'annotations/coco_captions.csv')
msrvttCaptionDF = pd.read_csv(app.config['MSRVTT_CAPTION_PATH'])
# Determine images and videos to update
im_names = [x for x in app.config['SAMPLES_TO_UPDATE'] if 'image' in x]
vid_names = [x for x in app.config['SAMPLES_TO_UPDATE'] if 'video' in x]
# Randomly select ids from their respective datasets and reject any that already have been
# chosen
rand_im_ids = cocoCaptionDF[cocoCaptionDF['set'] == app.config[
'COCO_SET']].sample(n=32)['id'].values.tolist()
rand_im_ids = [x for x in rand_im_ids if x not in samplesDF['id'].values.tolist()][
:len(im_names)]
rand_vid_ids = msrvttCaptionDF[msrvttCaptionDF['set'] == 'test'].sample(n=32)[
'vid_id'].values.tolist()
rand_vid_ids = [x for x in rand_vid_ids if x not in samplesDF['id'].values.tolist()][
:len(vid_names)]
# Generate sample information and store to file
for i, (name, im_id) in enumerate(zip(im_names, rand_im_ids)):
# Get image and generated caption
url = coco.loadImgs(im_id)[0]['coco_url']
caption = narrator.gen_caption(url, beam_size=8)
# Get all gt captions and encode/decode using vocabulary
gts = cocoCaptionDF[cocoCaptionDF['id'] == im_id]['caption']
gts = gts.apply(lambda x: narrator.coco_vocab.encode(
x, app.config['MAX_LEN'] + 1))
gts = gts.apply(lambda x: narrator.coco_vocab.decode(x, clean=True))
# Find nearest gt
nearest_gt = ''
best_score = 0.0
for gt in gts:
bleu = narrator.coco_vocab.evaluate([gt], caption)
if bleu > best_score:
best_score = bleu
nearest_gt = gt
gt = ' '.join(nearest_gt).capitalize()
caption = ' '.join(caption).capitalize()
# Load and save imge
im = PIL.Image.fromarray(io.imread(url)).convert('RGB')
im.save(app.config['SAMPLES_DIR'] + name + '.jpg')
# Generate audio files
narrator.gen_audio_file(
gt, app.config['SAMPLES_DIR'] + name + '_gt.ogg')
narrator.gen_audio_file(
caption, app.config['SAMPLES_DIR'] + name + '.ogg')
# Update samples dataframe
samplesDF.loc[name, 'id'] = im_id
samplesDF.loc[name, 'caption'] = caption
samplesDF.loc[name, 'gt'] = gt
print('Images updated!')
for i, (name, vid_id) in enumerate(zip(vid_names, rand_vid_ids)):
# Get video and generated caption
url = app.config['MSRVTT_DATA_PATH'] + vid_id + '.mp4'
caption = narrator.gen_caption(url, beam_size=8)
# Get all gt captions and encode/decode using vocabulary
gts = msrvttCaptionDF[msrvttCaptionDF['vid_id'] == vid_id]['caption']
gts = gts.apply(lambda x: narrator.msrvtt_vocab.encode(
x, app.config['MAX_LEN'] + 1))
gts = gts.apply(lambda x: narrator.msrvtt_vocab.decode(x, clean=True))
# Find the nearest gt
nearest_gt = ''
best_score = 0.0
for gt in gts:
bleu = narrator.msrvtt_vocab.evaluate([gt], caption)
if bleu > best_score:
best_score = bleu
nearest_gt = gt
gt = ' '.join(nearest_gt).capitalize()
caption = ' '.join(caption).capitalize()
# Copy image to samples directory
shutil.copy(url, app.config['SAMPLES_DIR'] + name + '.mp4')
# Generate audio files
narrator.gen_audio_file(
gt, app.config['SAMPLES_DIR'] + name + '_gt.ogg')
narrator.gen_audio_file(
caption, app.config['SAMPLES_DIR'] + name + '.ogg')
# update samples dataframe
samplesDF.loc[name, 'id'] = vid_id
samplesDF.loc[name, 'caption'] = caption
samplesDF.loc[name, 'gt'] = gt
print('Videos updated!')
# Save samples dataframe
samplesDF.to_csv(app.config['SAMPLES_DIR'] + 'sample_captions.csv')
# Load samples
IM_SAMPLES_DICT = [[], [], [], []]
VID_SAMPLES_DICT = [[], [], [], []]
for i, ix in enumerate(range(16)):
im_sample = samplesDF.loc['image' + str(ix)]
vid_sample = samplesDF.loc['video' + str(ix)]
IM_SAMPLES_DICT[int(i / 4)].append({
'id': im_sample['id'],
'url': 'image' + str(ix) + '.jpg',
'gt': im_sample['gt'],
'gt_audio': 'image' + str(ix) + '_gt.ogg',
'caption': im_sample['caption'],
'cap_audio': 'image' + str(ix) + '.ogg'
})
VID_SAMPLES_DICT[int(i / 4)].append({
'id': vid_sample['id'],
'url': 'video' + str(ix) + '.mp4',
'gt': vid_sample['gt'],
'gt_audio': 'video' + str(ix) + '_gt.ogg',
'caption': vid_sample['caption'],
'cap_audio': 'video' + str(ix) + '.ogg'
})
print("Samples loaded")
# Get filepath for scene example
scene_example_file = app.config[
'SAMPLES_DIR'] + app.config['SCENE_EXAMPLE_FILE']
# Create scene example if it doesn't already exist
if not os.path.exists(scene_example_file + '.csv'):
# Generate captions by scene
captions, scene_change_timecodes = narrator.gen_caption(
scene_example_file + '.mp4', by_scene=True, as_string=True)
# Create dataframe
sceneSamplesDF = pd.DataFrame({
'time': scene_change_timecodes,
'caption': captions
})
# Capitalize
sceneSamplesDF['caption'] = sceneSamplesDF[
'caption'].apply(lambda x: x.capitalize())
# Generate audio files for each caption
for i, caption in enumerate(captions):
narrator.gen_audio_file(
caption, scene_example_file + '.' + str(i) + '.ogg')
# Save samples dataframe
sceneSamplesDF.to_csv(scene_example_file + '.csv', index=False)
# Load samples dataframe
else:
sceneSamplesDF = pd.read_csv(scene_example_file + '.csv')
# Load scene example
SCENE_SAMPLES_DICT = []
for i, row in sceneSamplesDF.iterrows():
SCENE_SAMPLES_DICT.append({
'time': row['time'],
'cap_audio': app.config['SCENE_EXAMPLE_FILE'] + '.' + str(i) + '.ogg',
'caption': row['caption'].capitalize()
})
##############################################################################
##################################### APP ####################################
##############################################################################
def allowed_file(filename):
"""Determine if a file has an allowed extension."""
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
def save_file(file):
"""Save given file and return path."""
file_path = os.path.join(app.config['UPLOAD_DIR'], file.filename)
file.save(os.path.join(app.config['UPLOAD_DIR'], file.filename))
return file_path
def split_filename(file):
"""Split filename into name and ext."""
*filename, ext = file.filename.split('.')
if isinstance(filename, list):
filename = '_'.join(filename) # Replace existing . with _
return filename, ext
def determine_type(ext, by_scene):
"""Determine if image or video."""
if ext in app.config['VID_EXTENSIONS']:
if by_scene:
return 'scene'
return 'video'
return 'image'
def generate_caption(file, by_scene):
"""Generate caption for given file"""
file.filename = file.filename.replace(' ', '_')
@app.route('/')
@app.route('/index')
def index():
"""Render homepage."""
return render_template('main.html', page='main', title=app.config['TITLE'])
@app.route('/images')
def images():
"""Render image examples page."""
return render_template('images.html', im_dict=IM_SAMPLES_DICT, page='images',
title=app.config['TITLE'])
@app.route('/videos')
def videos():
"""Render video examples page."""
return render_template('videos.html', vid_dict=VID_SAMPLES_DICT, page='videos',
title=app.config['TITLE'])
@app.route('/scenes')
def scenes():
"""Render scene examples page."""
return render_template('scenes.html', page='scenes', scenes_dict=SCENE_SAMPLES_DICT,
title=app.config['TITLE'])
@app.route('/contact')
def contact():
"""Render contact me page."""
return render_template('contact.html', page='contact', title=app.config['TITLE'])
@app.route('/about')
def about():
"""Render about page."""
return render_template('about.html', page='about', title=app.config['TITLE'])
@app.route('/demo', methods=['GET', 'POST'])
def demo():
"""Render demo page."""
# Check if file is uploaded
if request.method == 'POST':
try:
# Grab file, and if by_scene is requested from website
file = request.files['file']
by_scene = 'by_scene' in request.form
# Check if filetype is allowed
if file and allowed_file(file.filename):
# Fix filename, save to file, get ext and determine type
file.filename = file.filename.replace(' ', '_')
file_path = save_file(file)
filename, ext = split_filename(file)
typ = determine_type(ext, by_scene)
if typ == 'image':
by_scene = False
# Generate caption/audio and redirect to demo_output page
if not by_scene:
caption = narrator.gen_caption(file_path,
beam_size=app.config['BEAM_SIZE'],
as_string=True,
by_scene=by_scene).capitalize()
cap_audio = filename + '.ogg'
narrator.gen_audio_file(
caption, app.config['UPLOAD_DIR'] + cap_audio)
return redirect(url_for('uploaded_file',
filename=file.filename,
cap_audio=cap_audio,
caption=caption,
typ=typ))
# Generate caption/audio by scene and redirect to demo_output
# page
captions, time_codes = narrator.gen_caption(file_path,
beam_size=app.config[
'BEAM_SIZE'],
as_string=True,
by_scene=by_scene)
scenes_dict = []
for i, caption in enumerate(captions):
narrator.gen_audio_file(caption,
app.config['UPLOAD_DIR'] +
filename + '.' + str(i) + '.ogg')
scenes_dict.append({
'time': time_codes[i],
'cap_audio': filename + '.' + str(i) + '.ogg',
'caption': caption.capitalize()
})
session['scenes_dict'] = scenes_dict
return redirect(url_for('uploaded_file',
filename=file.filename,
typ='scene',
caption='scene',
cap_audio='scene'))
except KeyError as e:
print(e)
return render_template('demo.html', page='demo', title=app.config['TITLE'])
@app.route('/demo/<filename>&<cap_audio>&<typ>&<caption>', methods=['GET', 'POST'])
def uploaded_file(filename, typ='image', caption="", cap_audio=None):
"""Render demo output page."""
# Duplicate of above -- allows
if request.method == 'POST':
try:
# Grab file, and if by_scene is requested from website
file = request.files['file']
by_scene = 'by_scene' in request.form
# Check if filetype is allowed
if file and allowed_file(file.filename):
# Fix filename, save to file, get ext and determine type
file.filename = file.filename.replace(' ', '_')
file_path = save_file(file)
filename, ext = split_filename(file)
typ = determine_type(ext, by_scene)
if typ == 'image':
by_scene = False
# Generate caption/audio and redirect to demo_output page
if not by_scene:
caption = narrator.gen_caption(file_path,
beam_size=app.config[
'BEAM_SIZE'],
as_string=True,
by_scene=by_scene).capitalize()
cap_audio = filename + '.ogg'
narrator.gen_audio_file(
caption, app.config['UPLOAD_DIR'] + cap_audio)
return redirect(url_for('uploaded_file',
filename=file.filename,
cap_audio=cap_audio,
caption=caption,
typ=typ))
# Generate caption/audio by scene and redirect to demo_output
# page
captions, time_codes = narrator.gen_caption(file_path,
beam_size=app.config[
'BEAM_SIZE'],
as_string=True,
by_scene=by_scene)
scenes_dict = []
for i, caption in enumerate(captions):
narrator.gen_audio_file(caption,
app.config['UPLOAD_DIR'] + filename +
'.' + str(i) + '.ogg')
scenes_dict.append({
'time': time_codes[i],
'cap_audio': filename + '.' + str(i) + '.ogg',
'caption': caption.capitalize()
})
session['scenes_dict'] = scenes_dict
return redirect(url_for('uploaded_file',
filename=file.filename,
typ='scene',
caption='scene',
cap_audio='scene'))
except KeyError as e:
print(e)
return render_template('demo_output.html',
filename=filename,
typ=typ,
caption=caption,
cap_audio=cap_audio,
page='demo',
title=app.config['TITLE'])
@app.route('/uploads/<filename>')
def get_upload(filename):
"""Get path to file in upload directory."""
return send_from_directory(app.config['UPLOAD_DIR'], filename)
@app.route('/samples/<filename>')
def get_sample(filename):
"""Get path to file in samples directory."""
return send_from_directory(app.config['SAMPLES_DIR'], filename)
|
[
"sys.path.append",
"app.app.route",
"pandas.DataFrame",
"pandas.read_csv",
"skimage.io.imread",
"os.path.exists",
"flask.url_for",
"flask.render_template",
"flask.send_from_directory",
"os.path.join",
"shutil.copy",
"Narrator.Narrator"
] |
[((240, 297), 'sys.path.append', 'sys.path.append', (["(app.config['COCOAPI_PATH'] + 'PythonAPI')"], {}), "(app.config['COCOAPI_PATH'] + 'PythonAPI')\n", (255, 297), False, 'import sys\n'), ((333, 359), 'sys.path.append', 'sys.path.append', (['"""../src/"""'], {}), "('../src/')\n", (348, 359), False, 'import sys\n'), ((422, 790), 'Narrator.Narrator', 'Narrator', ([], {'root_path': "app.config['ROOT_PATH']", 'coco_vocab_path': "app.config['COCO_VOCAB_PATH']", 'msrvtt_vocab_path': "app.config['MSRVTT_VOCAB_PATH']", 'base_model': "app.config['ENCODER_MODEL']", 'ic_model_path': "app.config['IC_MODEL_PATH']", 'vc_model_path': "app.config['VC_MODEL_PATH']", 'ic_rnn_type': "app.config['IC_RNN_TYPE']", 'vc_rnn_type': "app.config['VC_RNN_TYPE']"}), "(root_path=app.config['ROOT_PATH'], coco_vocab_path=app.config[\n 'COCO_VOCAB_PATH'], msrvtt_vocab_path=app.config['MSRVTT_VOCAB_PATH'],\n base_model=app.config['ENCODER_MODEL'], ic_model_path=app.config[\n 'IC_MODEL_PATH'], vc_model_path=app.config['VC_MODEL_PATH'],\n ic_rnn_type=app.config['IC_RNN_TYPE'], vc_rnn_type=app.config[\n 'VC_RNN_TYPE'])\n", (430, 790), False, 'from Narrator import Narrator\n'), ((8681, 8695), 'app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (8690, 8695), False, 'from app import app\n'), ((8697, 8716), 'app.app.route', 'app.route', (['"""/index"""'], {}), "('/index')\n", (8706, 8716), False, 'from app import app\n'), ((8840, 8860), 'app.app.route', 'app.route', (['"""/images"""'], {}), "('/images')\n", (8849, 8860), False, 'from app import app\n'), ((9052, 9072), 'app.app.route', 'app.route', (['"""/videos"""'], {}), "('/videos')\n", (9061, 9072), False, 'from app import app\n'), ((9266, 9286), 'app.app.route', 'app.route', (['"""/scenes"""'], {}), "('/scenes')\n", (9275, 9286), False, 'from app import app\n'), ((9484, 9505), 'app.app.route', 'app.route', (['"""/contact"""'], {}), "('/contact')\n", (9493, 9505), False, 'from app import app\n'), ((9643, 9662), 'app.app.route', 'app.route', (['"""/about"""'], {}), "('/about')\n", (9652, 9662), False, 'from app import app\n'), ((9790, 9833), 'app.app.route', 'app.route', (['"""/demo"""'], {'methods': "['GET', 'POST']"}), "('/demo', methods=['GET', 'POST'])\n", (9799, 9833), False, 'from app import app\n'), ((12945, 13031), 'app.app.route', 'app.route', (['"""/demo/<filename>&<cap_audio>&<typ>&<caption>"""'], {'methods': "['GET', 'POST']"}), "('/demo/<filename>&<cap_audio>&<typ>&<caption>', methods=['GET',\n 'POST'])\n", (12954, 13031), False, 'from app import app\n'), ((16499, 16531), 'app.app.route', 'app.route', (['"""/uploads/<filename>"""'], {}), "('/uploads/<filename>')\n", (16508, 16531), False, 'from app import app\n'), ((16676, 16708), 'app.app.route', 'app.route', (['"""/samples/<filename>"""'], {}), "('/samples/<filename>')\n", (16685, 16708), False, 'from app import app\n'), ((849, 924), 'pandas.read_csv', 'pd.read_csv', (["(app.config['SAMPLES_DIR'] + 'sample_captions.csv')"], {'index_col': '(0)'}), "(app.config['SAMPLES_DIR'] + 'sample_captions.csv', index_col=0)\n", (860, 924), True, 'import pandas as pd\n'), ((1294, 1367), 'pandas.read_csv', 'pd.read_csv', (["(app.config['COCOAPI_PATH'] + 'annotations/coco_captions.csv')"], {}), "(app.config['COCOAPI_PATH'] + 'annotations/coco_captions.csv')\n", (1305, 1367), True, 'import pandas as pd\n'), ((1399, 1445), 'pandas.read_csv', 'pd.read_csv', (["app.config['MSRVTT_CAPTION_PATH']"], {}), "(app.config['MSRVTT_CAPTION_PATH'])\n", (1410, 1445), True, 'import pandas as pd\n'), ((6317, 6360), 'os.path.exists', 'os.path.exists', (["(scene_example_file + '.csv')"], {}), "(scene_example_file + '.csv')\n", (6331, 6360), False, 'import os\n'), ((6569, 6636), 'pandas.DataFrame', 'pd.DataFrame', (["{'time': scene_change_timecodes, 'caption': captions}"], {}), "({'time': scene_change_timecodes, 'caption': captions})\n", (6581, 6636), True, 'import pandas as pd\n'), ((7113, 7153), 'pandas.read_csv', 'pd.read_csv', (["(scene_example_file + '.csv')"], {}), "(scene_example_file + '.csv')\n", (7124, 7153), True, 'import pandas as pd\n'), ((7945, 7998), 'os.path.join', 'os.path.join', (["app.config['UPLOAD_DIR']", 'file.filename'], {}), "(app.config['UPLOAD_DIR'], file.filename)\n", (7957, 7998), False, 'import os\n'), ((8768, 8836), 'flask.render_template', 'render_template', (['"""main.html"""'], {'page': '"""main"""', 'title': "app.config['TITLE']"}), "('main.html', page='main', title=app.config['TITLE'])\n", (8783, 8836), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((8924, 9025), 'flask.render_template', 'render_template', (['"""images.html"""'], {'im_dict': 'IM_SAMPLES_DICT', 'page': '"""images"""', 'title': "app.config['TITLE']"}), "('images.html', im_dict=IM_SAMPLES_DICT, page='images',\n title=app.config['TITLE'])\n", (8939, 9025), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((9136, 9239), 'flask.render_template', 'render_template', (['"""videos.html"""'], {'vid_dict': 'VID_SAMPLES_DICT', 'page': '"""videos"""', 'title': "app.config['TITLE']"}), "('videos.html', vid_dict=VID_SAMPLES_DICT, page='videos',\n title=app.config['TITLE'])\n", (9151, 9239), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((9350, 9459), 'flask.render_template', 'render_template', (['"""scenes.html"""'], {'page': '"""scenes"""', 'scenes_dict': 'SCENE_SAMPLES_DICT', 'title': "app.config['TITLE']"}), "('scenes.html', page='scenes', scenes_dict=\n SCENE_SAMPLES_DICT, title=app.config['TITLE'])\n", (9365, 9459), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((9566, 9640), 'flask.render_template', 'render_template', (['"""contact.html"""'], {'page': '"""contact"""', 'title': "app.config['TITLE']"}), "('contact.html', page='contact', title=app.config['TITLE'])\n", (9581, 9640), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((9716, 9786), 'flask.render_template', 'render_template', (['"""about.html"""'], {'page': '"""about"""', 'title': "app.config['TITLE']"}), "('about.html', page='about', title=app.config['TITLE'])\n", (9731, 9786), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((12873, 12941), 'flask.render_template', 'render_template', (['"""demo.html"""'], {'page': '"""demo"""', 'title': "app.config['TITLE']"}), "('demo.html', page='demo', title=app.config['TITLE'])\n", (12888, 12941), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((16192, 16338), 'flask.render_template', 'render_template', (['"""demo_output.html"""'], {'filename': 'filename', 'typ': 'typ', 'caption': 'caption', 'cap_audio': 'cap_audio', 'page': '"""demo"""', 'title': "app.config['TITLE']"}), "('demo_output.html', filename=filename, typ=typ, caption=\n caption, cap_audio=cap_audio, page='demo', title=app.config['TITLE'])\n", (16207, 16338), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((16617, 16672), 'flask.send_from_directory', 'send_from_directory', (["app.config['UPLOAD_DIR']", 'filename'], {}), "(app.config['UPLOAD_DIR'], filename)\n", (16636, 16672), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((16795, 16851), 'flask.send_from_directory', 'send_from_directory', (["app.config['SAMPLES_DIR']", 'filename'], {}), "(app.config['SAMPLES_DIR'], filename)\n", (16814, 16851), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((4717, 4776), 'shutil.copy', 'shutil.copy', (['url', "(app.config['SAMPLES_DIR'] + name + '.mp4')"], {}), "(url, app.config['SAMPLES_DIR'] + name + '.mp4')\n", (4728, 4776), False, 'import shutil\n'), ((8013, 8066), 'os.path.join', 'os.path.join', (["app.config['UPLOAD_DIR']", 'file.filename'], {}), "(app.config['UPLOAD_DIR'], file.filename)\n", (8025, 8066), False, 'import os\n'), ((958, 1019), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['id', 'caption', 'gt']", 'index': "['name']"}), "(columns=['id', 'caption', 'gt'], index=['name'])\n", (970, 1019), True, 'import pandas as pd\n'), ((3229, 3243), 'skimage.io.imread', 'io.imread', (['url'], {}), '(url)\n', (3238, 3243), True, 'import skimage.io as io\n'), ((12552, 12654), 'flask.url_for', 'url_for', (['"""uploaded_file"""'], {'filename': 'file.filename', 'typ': '"""scene"""', 'caption': '"""scene"""', 'cap_audio': '"""scene"""'}), "('uploaded_file', filename=file.filename, typ='scene', caption=\n 'scene', cap_audio='scene')\n", (12559, 12654), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((15871, 15973), 'flask.url_for', 'url_for', (['"""uploaded_file"""'], {'filename': 'file.filename', 'typ': '"""scene"""', 'caption': '"""scene"""', 'cap_audio': '"""scene"""'}), "('uploaded_file', filename=file.filename, typ='scene', caption=\n 'scene', cap_audio='scene')\n", (15878, 15973), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((11177, 11276), 'flask.url_for', 'url_for', (['"""uploaded_file"""'], {'filename': 'file.filename', 'cap_audio': 'cap_audio', 'caption': 'caption', 'typ': 'typ'}), "('uploaded_file', filename=file.filename, cap_audio=cap_audio,\n caption=caption, typ=typ)\n", (11184, 11276), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n'), ((14496, 14595), 'flask.url_for', 'url_for', (['"""uploaded_file"""'], {'filename': 'file.filename', 'cap_audio': 'cap_audio', 'caption': 'caption', 'typ': 'typ'}), "('uploaded_file', filename=file.filename, cap_audio=cap_audio,\n caption=caption, typ=typ)\n", (14503, 14595), False, 'from flask import render_template, request, redirect, url_for, send_from_directory, session\n')]
|
from tkinter import *
import cv2
import numpy as np
import urllib.request
import pandas as pd
from tkinter import filedialog
from PIL import ImageTk,Image
import pyperclip as pc
root = Tk()
root.title("Image Color Detection")
root.geometry("936x536+300+130")
root.configure(bg='#243B53')
image_path = ""
def open():
global image_path
root.filename = filedialog.askopenfilename(initialdir=r"C:\Users\7p\Desktop\temp pypro\python-project-color-detection",title="Select an image file", filetypes=(("All files","*.*"),("jpg files","*.jpg"),("png files","*.png")))
image_path = root.filename
print(image_path)
# open select2 btn image
selectimg2 = Image.open("C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/selectbtn2.png")
#resize btn image
resized2 = selectimg2.resize((200,50),Image.ANTIALIAS)
finalimg2 = ImageTk.PhotoImage(resized2)
my_btn.configure(image=finalimg2,state=DISABLED)
my_btn.image=finalimg2
root.configure(bg='#363062')
return image_path
image_url=StringVar()
def urlimg():
imgurl = image_url.get()
url_response = urllib.request.urlopen(imgurl)
img_array = np.array(bytearray(url_response.read()), dtype=np.uint8)
urlimg.image = cv2.imdecode(img_array,-1)
image_url.set("")
root.destroy()
# open urllabel btn image
urllabel = Image.open("C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/urllabel.png")
#resize btn image
resized3 = urllabel.resize((100,50),Image.ANTIALIAS)
finalimg3 = ImageTk.PhotoImage(resized3)
img_label = Label(root, image=finalimg3,borderwidth=0,bg='#243B53').place(x=150,y=260)
# open urlopen btn image
urlopen = Image.open("C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/urlopen.png")
#resize btn image
resized4 = urlopen.resize((200,50),Image.ANTIALIAS)
finalimg4 = ImageTk.PhotoImage(resized4)
url_btn=Button(root,image=finalimg4, command = urlimg,borderwidth=0,bg='#243B53').place(x=590,y=260)
img_entry = Entry(root,textvariable = image_url,width=12,font=('Roboto',26)).place(x=300,y=260)
# open select btn image
selectimg = Image.open("C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/selectbtn.png")
#resize btn image
resized = selectimg.resize((200,50),Image.ANTIALIAS)
finalimg = ImageTk.PhotoImage(resized)
my_btn = Button(root,image=finalimg,command=open,borderwidth=0,bg='#243B53')
my_btn.place(x=100,y=150)
# open start btn image
openimg = Image.open("C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/startbtn1.png")
#resize btn image
resized1 = openimg.resize((118,50),Image.ANTIALIAS)
finalimg1 = ImageTk.PhotoImage(resized1)
strt_btn = Button(root,image=finalimg1,command=root.quit,borderwidth=0,bg='#243B53').place(x=620,y=155)
root.mainloop()
if image_path == "":
img = urlimg.image
else :
#Reading the image with opencv
img = cv2.imread(image_path)
#declaring global variables (are used later on)
clicked = False
r = g = b = hexcode = xpos = ypos = 0
#Reading csv file with pandas and giving names to each column
index=["color","color_name","hex","R","G","B"]
csv = pd.read_csv('C:/Users/7p/Desktop/temp pypro/python-project-color-detection/colors.csv', names=index, header=None)
#function to calculate minimum distance from all colors and get the most matching color
def getColorName(R,G,B):
minimum = 10000
for i in range(len(csv)):
d = abs(R- int(csv.loc[i,"R"])) + abs(G- int(csv.loc[i,"G"]))+ abs(B- int(csv.loc[i,"B"]))
if(d<=minimum):
minimum = d
getColorName.cname = csv.loc[i,"color_name"]
getColorName.hexcode = csv.loc[i,"hex"]
#function to get x,y coordinates of mouse double click
def draw_function(event, x,y,flags,param):
if event == cv2.EVENT_LBUTTONDBLCLK:
global b,g,r,xpos,ypos, clicked
clicked = True
xpos = x
ypos = y
b,g,r = img[y,x]
b = int(b)
g = int(g)
r = int(r)
cv2.namedWindow('Image Color Detection')
cv2.setMouseCallback('Image Color Detection',draw_function)
while(1):
cv2.imshow("Image Color Detection",img)
if (clicked):
#scale text according to image size
imageWidth = img.shape[0]
imageHeight = img.shape[1]
fontScale = min(imageWidth,imageHeight)/(800)
#cv2.rectangle(image, startpoint, endpoint, color, thickness)-1 fills entire rectangle
cv2.rectangle(img,(50,10), (max(imageWidth,imageHeight),50), (b,g,r), -1)
getColorName(r,g,b)
#Creating text string to display( Color name and RGB values )
text = getColorName.cname + ' R='+ str(r) + ' G='+ str(g) + ' B='+ str(b) +" "+ getColorName.hexcode
#copying color code to clipboard
pc.copy(getColorName.hexcode)
#scale text according to image size
imageWidth = img.shape[0]
imageHeight = img.shape[1]
fontScale = min(imageWidth,imageHeight)/(800)
#cv2.putText(img,text,start,font(0-7),fontScale,color,thickness,lineType )
cv2.putText(img, text,(50,40),2,fontScale,(255,255,255),1,cv2.LINE_AA)
#For very light colours we will display text in black colour
if(r+g+b>=600):
cv2.putText(img, text,(50,40),2,fontScale,(0,0,0),1,cv2.LINE_AA)
clicked=False
#Break the loop when User hits 'enter' key
if cv2.waitKey(20) & 0xFF ==13:
break
cv2.destroyAllWindows()
|
[
"PIL.ImageTk.PhotoImage",
"cv2.putText",
"pandas.read_csv",
"cv2.waitKey",
"cv2.imdecode",
"cv2.imshow",
"tkinter.filedialog.askopenfilename",
"PIL.Image.open",
"cv2.imread",
"cv2.setMouseCallback",
"pyperclip.copy",
"cv2.destroyAllWindows",
"cv2.namedWindow"
] |
[((1400, 1506), 'PIL.Image.open', 'Image.open', (['"""C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/urllabel.png"""'], {}), "(\n 'C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/urllabel.png'\n )\n", (1410, 1506), False, 'from PIL import ImageTk, Image\n'), ((1583, 1611), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['resized3'], {}), '(resized3)\n', (1601, 1611), False, 'from PIL import ImageTk, Image\n'), ((1743, 1848), 'PIL.Image.open', 'Image.open', (['"""C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/urlopen.png"""'], {}), "(\n 'C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/urlopen.png'\n )\n", (1753, 1848), False, 'from PIL import ImageTk, Image\n'), ((1924, 1952), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['resized4'], {}), '(resized4)\n', (1942, 1952), False, 'from PIL import ImageTk, Image\n'), ((2202, 2309), 'PIL.Image.open', 'Image.open', (['"""C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/selectbtn.png"""'], {}), "(\n 'C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/selectbtn.png'\n )\n", (2212, 2309), False, 'from PIL import ImageTk, Image\n'), ((2385, 2412), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['resized'], {}), '(resized)\n', (2403, 2412), False, 'from PIL import ImageTk, Image\n'), ((2557, 2664), 'PIL.Image.open', 'Image.open', (['"""C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/startbtn1.png"""'], {}), "(\n 'C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/startbtn1.png'\n )\n", (2567, 2664), False, 'from PIL import ImageTk, Image\n'), ((2740, 2768), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['resized1'], {}), '(resized1)\n', (2758, 2768), False, 'from PIL import ImageTk, Image\n'), ((3260, 3382), 'pandas.read_csv', 'pd.read_csv', (['"""C:/Users/7p/Desktop/temp pypro/python-project-color-detection/colors.csv"""'], {'names': 'index', 'header': 'None'}), "(\n 'C:/Users/7p/Desktop/temp pypro/python-project-color-detection/colors.csv',\n names=index, header=None)\n", (3271, 3382), True, 'import pandas as pd\n'), ((4148, 4188), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Image Color Detection"""'], {}), "('Image Color Detection')\n", (4163, 4188), False, 'import cv2\n'), ((4194, 4254), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""Image Color Detection"""', 'draw_function'], {}), "('Image Color Detection', draw_function)\n", (4214, 4254), False, 'import cv2\n'), ((5659, 5682), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5680, 5682), False, 'import cv2\n'), ((377, 610), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'initialdir': '"""C:\\\\Users\\\\7p\\\\Desktop\\\\temp pypro\\\\python-project-color-detection"""', 'title': '"""Select an image file"""', 'filetypes': "(('All files', '*.*'), ('jpg files', '*.jpg'), ('png files', '*.png'))"}), "(initialdir=\n 'C:\\\\Users\\\\7p\\\\Desktop\\\\temp pypro\\\\python-project-color-detection',\n title='Select an image file', filetypes=(('All files', '*.*'), (\n 'jpg files', '*.jpg'), ('png files', '*.png')))\n", (403, 610), False, 'from tkinter import filedialog\n'), ((692, 800), 'PIL.Image.open', 'Image.open', (['"""C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/selectbtn2.png"""'], {}), "(\n 'C:/Users/7p/Desktop/temp pypro/python-project-color-detection/buttons/selectbtn2.png'\n )\n", (702, 800), False, 'from PIL import ImageTk, Image\n'), ((893, 921), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['resized2'], {}), '(resized2)\n', (911, 921), False, 'from PIL import ImageTk, Image\n'), ((1286, 1313), 'cv2.imdecode', 'cv2.imdecode', (['img_array', '(-1)'], {}), '(img_array, -1)\n', (1298, 1313), False, 'import cv2\n'), ((3008, 3030), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (3018, 3030), False, 'import cv2\n'), ((4274, 4314), 'cv2.imshow', 'cv2.imshow', (['"""Image Color Detection"""', 'img'], {}), "('Image Color Detection', img)\n", (4284, 4314), False, 'import cv2\n'), ((4957, 4986), 'pyperclip.copy', 'pc.copy', (['getColorName.hexcode'], {}), '(getColorName.hexcode)\n', (4964, 4986), True, 'import pyperclip as pc\n'), ((5265, 5344), 'cv2.putText', 'cv2.putText', (['img', 'text', '(50, 40)', '(2)', 'fontScale', '(255, 255, 255)', '(1)', 'cv2.LINE_AA'], {}), '(img, text, (50, 40), 2, fontScale, (255, 255, 255), 1, cv2.LINE_AA)\n', (5276, 5344), False, 'import cv2\n'), ((5446, 5519), 'cv2.putText', 'cv2.putText', (['img', 'text', '(50, 40)', '(2)', 'fontScale', '(0, 0, 0)', '(1)', 'cv2.LINE_AA'], {}), '(img, text, (50, 40), 2, fontScale, (0, 0, 0), 1, cv2.LINE_AA)\n', (5457, 5519), False, 'import cv2\n'), ((5610, 5625), 'cv2.waitKey', 'cv2.waitKey', (['(20)'], {}), '(20)\n', (5621, 5625), False, 'import cv2\n')]
|
import pytest, logging
from read_video import read_video
def test_read_video():
test_data = "/Users/pepper/Projekte/PythonProjects/GM_brightness_metric/resources/video/Brosserness_4sec_h264_1920x1080_24fps_2Ch-stereo.mp4"
#logging.info('ERROR')
i = 0
for frame in read_video(test_data):
logging.info("Output in loop")
assert frame is not None, "fail"
i+=1
assert i>0
|
[
"read_video.read_video",
"logging.info"
] |
[((281, 302), 'read_video.read_video', 'read_video', (['test_data'], {}), '(test_data)\n', (291, 302), False, 'from read_video import read_video\n'), ((312, 342), 'logging.info', 'logging.info', (['"""Output in loop"""'], {}), "('Output in loop')\n", (324, 342), False, 'import pytest, logging\n')]
|
import pyglet
import platform
import struct
from ctypes import addressof,pointer
import ctypes
from compushady import HEAP_UPLOAD, Swapchain, Compute, Texture2D, Buffer
from compushady.formats import B8G8R8A8_UNORM
from compushady.shaders import hlsl
if platform.system() != 'Windows':
raise Exception('only Windows is supported for this example')
window = pyglet.window.Window()
swapchain = Swapchain(window._hwnd, B8G8R8A8_UNORM, 3)
target = Texture2D(window.width, window.height, B8G8R8A8_UNORM)
clear_screen = Compute(hlsl.compile("""
RWTexture2D<float4> target : register(u0);
[numthreads(8,8,1)]
void main(uint3 tid : SV_DispatchThreadID)
{
target[tid.xy] = float4(1, 0, 0, 1);
}
"""), uav=[target])
constant_buffer = Buffer(8, HEAP_UPLOAD)
quad = Compute(hlsl.compile("""
struct Quad
{
uint x;
uint y;
};
ConstantBuffer<Quad> quad : register(b0);
RWTexture2D<float4> target : register(u0);
[numthreads(8,8,1)]
void main(uint3 tid : SV_DispatchThreadID)
{
target[tid.xy + uint2(quad.x, quad.y)] = float4(0, 1, 1, 1);
}
"""), cbv=[constant_buffer], uav=[target])
x = 0
y = 0
def update(dt):
global x, y
x += 1
y += 1
if x > window.width:
x = 0
if y > window.height:
y = 0
constant_buffer.upload(struct.pack('II', x, y))
@window.event
def on_draw():
clear_screen.dispatch(window.width // 8, window.height // 8, 1)
quad.dispatch(1, 1, 1)
swapchain.present(target)
pyglet.clock.schedule_interval(update, 1/120.0)
pyglet.app.run()
|
[
"compushady.shaders.hlsl.compile",
"pyglet.app.run",
"compushady.Texture2D",
"struct.pack",
"compushady.Buffer",
"platform.system",
"pyglet.window.Window",
"compushady.Swapchain",
"pyglet.clock.schedule_interval"
] |
[((363, 385), 'pyglet.window.Window', 'pyglet.window.Window', ([], {}), '()\n', (383, 385), False, 'import pyglet\n'), ((399, 441), 'compushady.Swapchain', 'Swapchain', (['window._hwnd', 'B8G8R8A8_UNORM', '(3)'], {}), '(window._hwnd, B8G8R8A8_UNORM, 3)\n', (408, 441), False, 'from compushady import HEAP_UPLOAD, Swapchain, Compute, Texture2D, Buffer\n'), ((451, 505), 'compushady.Texture2D', 'Texture2D', (['window.width', 'window.height', 'B8G8R8A8_UNORM'], {}), '(window.width, window.height, B8G8R8A8_UNORM)\n', (460, 505), False, 'from compushady import HEAP_UPLOAD, Swapchain, Compute, Texture2D, Buffer\n'), ((736, 758), 'compushady.Buffer', 'Buffer', (['(8)', 'HEAP_UPLOAD'], {}), '(8, HEAP_UPLOAD)\n', (742, 758), False, 'from compushady import HEAP_UPLOAD, Swapchain, Compute, Texture2D, Buffer\n'), ((1451, 1500), 'pyglet.clock.schedule_interval', 'pyglet.clock.schedule_interval', (['update', '(1 / 120.0)'], {}), '(update, 1 / 120.0)\n', (1481, 1500), False, 'import pyglet\n'), ((1500, 1516), 'pyglet.app.run', 'pyglet.app.run', ([], {}), '()\n', (1514, 1516), False, 'import pyglet\n'), ((255, 272), 'platform.system', 'platform.system', ([], {}), '()\n', (270, 272), False, 'import platform\n'), ((529, 711), 'compushady.shaders.hlsl.compile', 'hlsl.compile', (['"""\nRWTexture2D<float4> target : register(u0);\n[numthreads(8,8,1)]\nvoid main(uint3 tid : SV_DispatchThreadID)\n{\n target[tid.xy] = float4(1, 0, 0, 1);\n}\n"""'], {}), '(\n """\nRWTexture2D<float4> target : register(u0);\n[numthreads(8,8,1)]\nvoid main(uint3 tid : SV_DispatchThreadID)\n{\n target[tid.xy] = float4(1, 0, 0, 1);\n}\n"""\n )\n', (541, 711), False, 'from compushady.shaders import hlsl\n'), ((775, 1064), 'compushady.shaders.hlsl.compile', 'hlsl.compile', (['"""\nstruct Quad\n{\n uint x;\n uint y;\n};\nConstantBuffer<Quad> quad : register(b0);\nRWTexture2D<float4> target : register(u0);\n[numthreads(8,8,1)]\nvoid main(uint3 tid : SV_DispatchThreadID)\n{\n target[tid.xy + uint2(quad.x, quad.y)] = float4(0, 1, 1, 1);\n}\n"""'], {}), '(\n """\nstruct Quad\n{\n uint x;\n uint y;\n};\nConstantBuffer<Quad> quad : register(b0);\nRWTexture2D<float4> target : register(u0);\n[numthreads(8,8,1)]\nvoid main(uint3 tid : SV_DispatchThreadID)\n{\n target[tid.xy + uint2(quad.x, quad.y)] = float4(0, 1, 1, 1);\n}\n"""\n )\n', (787, 1064), False, 'from compushady.shaders import hlsl\n'), ((1268, 1291), 'struct.pack', 'struct.pack', (['"""II"""', 'x', 'y'], {}), "('II', x, y)\n", (1279, 1291), False, 'import struct\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-09-07 21:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', 'remove_atmosphereuser_selected_identity'),
]
operations = [
migrations.AlterUniqueTogether(
name='providerdnsserverip',
unique_together=set([]),
),
migrations.RemoveField(
model_name='providerdnsserverip',
name='provider',
),
migrations.DeleteModel(name='ProviderDNSServerIP', ),
]
|
[
"django.db.migrations.RemoveField",
"django.db.migrations.DeleteModel"
] |
[((433, 506), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""providerdnsserverip"""', 'name': '"""provider"""'}), "(model_name='providerdnsserverip', name='provider')\n", (455, 506), False, 'from django.db import migrations\n'), ((551, 601), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""ProviderDNSServerIP"""'}), "(name='ProviderDNSServerIP')\n", (573, 601), False, 'from django.db import migrations\n')]
|
import obspy
import read_event_obspy_file as reof
from getwaveform import *
def get_ev_info(ev_info,iex):
# ===============================================================
# SilwalTape2016 example event (Anchorage)
if iex == 0:
ev_info.use_catalog = 0
ev_info.otime = obspy.UTCDateTime("2009-04-07T20:12:55.351")
ev_info.min_dist = 0
ev_info.max_dist = 300
ev_info.tbefore_sec = 100
ev_info.tafter_sec = 300
#output all proccessing steps
ev_info.ifverbose = True
#keep stations with missing components and fill the missing component with a null trace (MPEN)
#Be sure to set the null component to 0 in the weight file when running cap
#ev_info.icreateNull = 1
ev_info.icreateNull = 0
#RAW and ENZ files can be used when checking if you are receiving all the data ($PYSEP/check_getwaveform.bash)
ev_info.isave_raw = False
ev_info.isave_raw_processed = False
#ev_info.isave_raw = True
#ev_info.isave_raw_processed = True
ev_info.isave_ENZ = False
#ev_info.isave_ENZ = True
#ev_info.min_lat = 59
#ev_info.max_lat = 62
#ev_info.min_lon = -152
#ev_info.max_lon = -147
# default list of Alaska networks
# note 1: cannot use '*' because of IM
# note 2: may want to exclude the mid-band AV network
# note 3: these are temporary:
# XE BEAAR 1999
# XR ARCTIC 2004
# XZ STEEP 2005
# YV MOOS 2006
# XV FLATS 2014
# <NAME> 2015
# XG WVF 2016
# [7C MMEP 2015]
# TA
#ev_info.network = 'AK,AT,AV,CN,II,IU,US,XM,XV,XZ,YV'
#ev_info.network = 'AK' # for testing
ev_info.network = 'AK,AT,AV,CN,II,IU,US,XM,TA,XE,XR,XZ,YV,XV,ZE,XG'
ev_info.channel = 'BH?'
ev_info.use_catalog = 0
ev_info.elat = 61.45420
ev_info.elon = -149.7428
ev_info.edep = 33033.60
# ev_info.rlat = 61.45420
# ev_info.rlon = -149.7428
# ev_info.rtime = obspy.UTCDateTime("2009-04-07T20:12:55.351")
ev_info.emag = 4.6
ev_info.resample_freq = 50
ev_info.scale_factor = 100
#ev_info.phase_window = False
#-------for specfem------------
#ev_info.tbefore_sec = 0
#ev_info.resample_TF = False
#ev_info.scale_factor = 1
#ev_info.outformat = 'DISP'
#------------------------------
return(ev_info)
|
[
"obspy.UTCDateTime"
] |
[((289, 333), 'obspy.UTCDateTime', 'obspy.UTCDateTime', (['"""2009-04-07T20:12:55.351"""'], {}), "('2009-04-07T20:12:55.351')\n", (306, 333), False, 'import obspy\n')]
|
import shutil
import sys
from PIL import Image
def get_term_width():
""" return terminal width
this function depends upon shutil.get_terminal_size
this works only on Python >= 3.3
"""
return shutil.get_terminal_size().columns
def get_aspect_ratio(img):
""" return the aspect ratio of given image
ar = width//height
return an int, we don't care about exact ratios
"""
width, height = img.size
aspect_ratio = width//height
if aspect_ratio == 0:
aspect_ratio = 1
return aspect_ratio
def get_height(width, aspect_ratio):
""" return height with respect to given aspect ratio """
return width//aspect_ratio
def resize_img(img):
""" return a resized image
resize acc. to given terminal width
keeping in mind the original aspect ratio
"""
term_width = get_term_width()
# divide by 2 because we use 2 characters per pixel
width = term_width//2
aspect_ratio = get_aspect_ratio(img)
height = get_height(width, aspect_ratio)
return img.resize((width, height))
def draw_ascii(img):
""" draw ascii art from the provided image
use # for black
use . for white
before drawing, convert the image to black and white
then resize it according to terminal width
"""
# convert image to black and white
img = img.convert('L')
# resize image to match terminal width and aspect ratio
img = resize_img(img)
width, height = img.size
for y in range(height):
for x in range(width):
if img.getpixel((x, y)) < 15:
print('# ', end='')
else:
print('. ', end='')
print()
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Please enter an image name as argument')
sys.exit(1)
img_file = sys.argv[1]
try:
img = Image.open(img_file)
draw_ascii(img)
except IOError:
print('Enter correct file')
sys.exit(1)
|
[
"shutil.get_terminal_size",
"sys.exit",
"PIL.Image.open"
] |
[((223, 249), 'shutil.get_terminal_size', 'shutil.get_terminal_size', ([], {}), '()\n', (247, 249), False, 'import shutil\n'), ((1846, 1857), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1854, 1857), False, 'import sys\n'), ((1909, 1929), 'PIL.Image.open', 'Image.open', (['img_file'], {}), '(img_file)\n', (1919, 1929), False, 'from PIL import Image\n'), ((2018, 2029), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2026, 2029), False, 'import sys\n')]
|
from unittest import TestCase
from unittest.mock import patch, Mock
from rpiatipo.Events import Event, EventService
class EventsTest(TestCase):
@patch('rpiatipo.Events.EventService')
def setUp(self, MockEventService):
self.event = Event(type="test", data={"data": 1})
self.eventService = MockEventService()
self.eventService.create.return_value = self.event
def test_CreateEvent_EventService(self):
response = self.eventService.create()
self.assertIsInstance(response, Event)
def test_GetIdEvent_Success_EventService(self):
self.eventService.getId.side_effect = self.side_effect("1")
response = self.eventService.getId()
self.assertIsInstance(response, Event)
def test_GetIdEvent_NotFound_EventService(self):
self.eventService.getId.side_effect = self.side_effect("0")
response = self.eventService.getId()
self.assertNotIsInstance(response, Event)
def side_effect(self, id):
if (id=="1"):
return [self.event]
else:
return None
|
[
"unittest.mock.patch",
"rpiatipo.Events.Event"
] |
[((150, 187), 'unittest.mock.patch', 'patch', (['"""rpiatipo.Events.EventService"""'], {}), "('rpiatipo.Events.EventService')\n", (155, 187), False, 'from unittest.mock import patch, Mock\n'), ((248, 284), 'rpiatipo.Events.Event', 'Event', ([], {'type': '"""test"""', 'data': "{'data': 1}"}), "(type='test', data={'data': 1})\n", (253, 284), False, 'from rpiatipo.Events import Event, EventService\n')]
|
""" $lic$
Copyright (C) 2016-2019 by The Board of Trustees of Stanford University
This program is free software: you can redistribute it and/or modify it under
the terms of the Modified BSD-3 License as published by the Open Source
Initiative.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the BSD-3 License for more details.
You should have received a copy of the Modified BSD-3 License along with this
program. If not, see <https://opensource.org/licenses/BSD-3-Clause>.
"""
import unittest
from nn_dataflow.core import NodeRegion
from nn_dataflow.core import PhyDim2
from nn_dataflow.core import Resource
class TestResource(unittest.TestCase):
''' Tests for Resource. '''
def setUp(self):
self.proc_region = NodeRegion(dim=PhyDim2(2, 2), origin=PhyDim2(0, 0),
type=NodeRegion.PROC)
self.dram_region = NodeRegion(dim=PhyDim2(2, 2), origin=PhyDim2(0, 0),
type=NodeRegion.DRAM)
self.src_data_region = NodeRegion(dim=PhyDim2(2, 1),
origin=PhyDim2(0, 0),
type=NodeRegion.DRAM)
self.dst_data_region = NodeRegion(dim=PhyDim2(2, 1),
origin=PhyDim2(0, 1),
type=NodeRegion.DRAM)
def test_valid_args(self):
''' Valid arguments. '''
resource = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
self.assertTupleEqual(resource.proc_region.dim, (2, 2), 'proc_region')
self.assertTupleEqual(resource.dram_region.dim, (2, 2), 'dram_region')
self.assertTupleEqual(resource.dim_array, (16, 16), 'dim_array')
self.assertEqual(resource.size_gbuf, 131072, 'size_gbuf')
self.assertEqual(resource.size_regf, 512, 'size_regf')
self.assertEqual(resource.array_bus_width, 8, 'array_bus_width')
self.assertEqual(resource.dram_bandwidth, 128, 'dram_bandwidth')
self.assertFalse(resource.no_time_mux, 'no_time_mux')
def test_invalid_proc_region(self):
''' Invalid proc_region. '''
with self.assertRaisesRegexp(TypeError, 'Resource: .*proc_region.*'):
_ = Resource(proc_region=PhyDim2(2, 2),
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_proc_region_dram(self):
''' Invalid proc_region with type DRAM. '''
with self.assertRaisesRegexp(ValueError, 'Resource: .*proc_.*type.*'):
_ = Resource(proc_region=NodeRegion(dim=PhyDim2(2, 2),
origin=PhyDim2(0, 0),
type=NodeRegion.DRAM),
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_dram_region(self):
''' Invalid dram_region. '''
with self.assertRaisesRegexp(TypeError, 'Resource: .*dram_region.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=PhyDim2(2, 2),
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_dram_region_proc(self):
''' Invalid dram_region with type DRAM. '''
with self.assertRaisesRegexp(ValueError, 'Resource: .*dram_.*type.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=NodeRegion(dim=PhyDim2(2, 2),
origin=PhyDim2(0, 0),
type=NodeRegion.PROC),
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_data_region(self):
''' Invalid src/dst_proc_region. '''
with self.assertRaisesRegexp(TypeError, 'Resource: .*src_data_.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=PhyDim2(2, 1),
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
with self.assertRaisesRegexp(TypeError, 'Resource: .*dst_data_.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=PhyDim2(2, 1),
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_dim_array(self):
''' Invalid dim_array. '''
with self.assertRaisesRegexp(TypeError, 'Resource: .*dim_array.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_size_gbuf(self):
''' Invalid size_gbuf. '''
with self.assertRaisesRegexp(TypeError, 'Resource: .*size_gbuf.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=(131072,),
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_size_regf(self):
''' Invalid size_regf. '''
with self.assertRaisesRegexp(TypeError, 'Resource: .*size_regf.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=(512,),
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_array_bus_width(self):
''' Invalid array_bus_width. '''
with self.assertRaisesRegexp(TypeError,
'Resource: .*array_bus_width.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=1.2,
dram_bandwidth=128,
no_time_mux=False,
)
with self.assertRaisesRegexp(ValueError,
'Resource: .*array_bus_width.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=-2,
dram_bandwidth=128,
no_time_mux=False,
)
with self.assertRaisesRegexp(ValueError,
'Resource: .*array_bus_width.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=0,
dram_bandwidth=128,
no_time_mux=False,
)
def test_invalid_dram_bandwidth(self):
''' Invalid dram_bandwidth. '''
with self.assertRaisesRegexp(TypeError,
'Resource: .*dram_bandwidth.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=None,
no_time_mux=False,
)
with self.assertRaisesRegexp(ValueError,
'Resource: .*dram_bandwidth.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=-3,
no_time_mux=False,
)
with self.assertRaisesRegexp(ValueError,
'Resource: .*dram_bandwidth.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=0,
no_time_mux=False,
)
def test_invalid_no_time_mux(self):
''' Invalid no_time_mux. '''
with self.assertRaisesRegexp(TypeError,
'Resource: .*no_time_mux.*'):
_ = Resource(proc_region=self.proc_region,
dram_region=self.dram_region,
src_data_region=self.src_data_region,
dst_data_region=self.dst_data_region,
dim_array=PhyDim2(16, 16),
size_gbuf=131072,
size_regf=512,
array_bus_width=8,
dram_bandwidth=128,
no_time_mux=None,
)
|
[
"nn_dataflow.core.Resource",
"nn_dataflow.core.PhyDim2"
] |
[((7233, 7501), 'nn_dataflow.core.Resource', 'Resource', ([], {'proc_region': 'self.proc_region', 'dram_region': 'self.dram_region', 'src_data_region': 'self.src_data_region', 'dst_data_region': 'self.dst_data_region', 'dim_array': '(16, 16)', 'size_gbuf': '(131072)', 'size_regf': '(512)', 'array_bus_width': '(8)', 'dram_bandwidth': '(128)', 'no_time_mux': '(False)'}), '(proc_region=self.proc_region, dram_region=self.dram_region,\n src_data_region=self.src_data_region, dst_data_region=self.\n dst_data_region, dim_array=(16, 16), size_gbuf=131072, size_regf=512,\n array_bus_width=8, dram_bandwidth=128, no_time_mux=False)\n', (7241, 7501), False, 'from nn_dataflow.core import Resource\n'), ((887, 900), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(2)'], {}), '(2, 2)\n', (894, 900), False, 'from nn_dataflow.core import PhyDim2\n'), ((909, 922), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(0)', '(0)'], {}), '(0, 0)\n', (916, 922), False, 'from nn_dataflow.core import PhyDim2\n'), ((1026, 1039), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(2)'], {}), '(2, 2)\n', (1033, 1039), False, 'from nn_dataflow.core import PhyDim2\n'), ((1048, 1061), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(0)', '(0)'], {}), '(0, 0)\n', (1055, 1061), False, 'from nn_dataflow.core import PhyDim2\n'), ((1169, 1182), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(1)'], {}), '(2, 1)\n', (1176, 1182), False, 'from nn_dataflow.core import PhyDim2\n'), ((1233, 1246), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(0)', '(0)'], {}), '(0, 0)\n', (1240, 1246), False, 'from nn_dataflow.core import PhyDim2\n'), ((1358, 1371), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(1)'], {}), '(2, 1)\n', (1365, 1371), False, 'from nn_dataflow.core import PhyDim2\n'), ((1422, 1435), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(0)', '(1)'], {}), '(0, 1)\n', (1429, 1435), False, 'from nn_dataflow.core import PhyDim2\n'), ((1852, 1867), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (1859, 1867), False, 'from nn_dataflow.core import PhyDim2\n'), ((2890, 2903), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(2)'], {}), '(2, 2)\n', (2897, 2903), False, 'from nn_dataflow.core import PhyDim2\n'), ((3121, 3136), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (3128, 3136), False, 'from nn_dataflow.core import PhyDim2\n'), ((3981, 3996), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (3988, 3996), False, 'from nn_dataflow.core import PhyDim2\n'), ((4488, 4501), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(2)'], {}), '(2, 2)\n', (4495, 4501), False, 'from nn_dataflow.core import PhyDim2\n'), ((4664, 4679), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (4671, 4679), False, 'from nn_dataflow.core import PhyDim2\n'), ((5524, 5539), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (5531, 5539), False, 'from nn_dataflow.core import PhyDim2\n'), ((6096, 6109), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(1)'], {}), '(2, 1)\n', (6103, 6109), False, 'from nn_dataflow.core import PhyDim2\n'), ((6209, 6224), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (6216, 6224), False, 'from nn_dataflow.core import PhyDim2\n'), ((6758, 6771), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(1)'], {}), '(2, 1)\n', (6765, 6771), False, 'from nn_dataflow.core import PhyDim2\n'), ((6808, 6823), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (6815, 6823), False, 'from nn_dataflow.core import PhyDim2\n'), ((8161, 8176), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (8168, 8176), False, 'from nn_dataflow.core import PhyDim2\n'), ((8844, 8859), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (8851, 8859), False, 'from nn_dataflow.core import PhyDim2\n'), ((9582, 9597), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (9589, 9597), False, 'from nn_dataflow.core import PhyDim2\n'), ((10234, 10249), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (10241, 10249), False, 'from nn_dataflow.core import PhyDim2\n'), ((10885, 10900), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (10892, 10900), False, 'from nn_dataflow.core import PhyDim2\n'), ((11617, 11632), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (11624, 11632), False, 'from nn_dataflow.core import PhyDim2\n'), ((12267, 12282), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (12274, 12282), False, 'from nn_dataflow.core import PhyDim2\n'), ((12915, 12930), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (12922, 12930), False, 'from nn_dataflow.core import PhyDim2\n'), ((13636, 13651), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(16)', '(16)'], {}), '(16, 16)\n', (13643, 13651), False, 'from nn_dataflow.core import PhyDim2\n'), ((3609, 3622), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(2)'], {}), '(2, 2)\n', (3616, 3622), False, 'from nn_dataflow.core import PhyDim2\n'), ((3679, 3692), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(0)', '(0)'], {}), '(0, 0)\n', (3686, 3692), False, 'from nn_dataflow.core import PhyDim2\n'), ((5207, 5220), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(2)', '(2)'], {}), '(2, 2)\n', (5214, 5220), False, 'from nn_dataflow.core import PhyDim2\n'), ((5277, 5290), 'nn_dataflow.core.PhyDim2', 'PhyDim2', (['(0)', '(0)'], {}), '(0, 0)\n', (5284, 5290), False, 'from nn_dataflow.core import PhyDim2\n')]
|
from .transform import Transform
from torch.nn import Parameter
import torch
class Affine(Transform):
def __init__(self, loc=0.0, scale=1.0, learnable=True):
super().__init__()
if not isinstance(loc, torch.Tensor):
loc = torch.tensor(loc).view(1, -1)
if not isinstance(scale, torch.Tensor):
scale = torch.tensor(scale).view(1, -1)
self.loc = loc.float()
self.scale = scale.float()
self.n_dims = len(loc)
if learnable:
self.loc = Parameter(self.loc)
self.scale = Parameter(self.scale)
def forward(self, x):
return self.loc + self.scale * x
def inverse(self, y):
return (y - self.loc) / self.scale
def log_abs_det_jacobian(self, x, y):
return torch.log(torch.abs(self.scale.expand(x.size()))).sum(-1)
def get_parameters(self):
return {'type':'affine', 'loc':self.loc.detach().numpy(),
'scale':self.scale.detach().numpy()}
|
[
"torch.nn.Parameter",
"torch.tensor"
] |
[((528, 547), 'torch.nn.Parameter', 'Parameter', (['self.loc'], {}), '(self.loc)\n', (537, 547), False, 'from torch.nn import Parameter\n'), ((573, 594), 'torch.nn.Parameter', 'Parameter', (['self.scale'], {}), '(self.scale)\n', (582, 594), False, 'from torch.nn import Parameter\n'), ((256, 273), 'torch.tensor', 'torch.tensor', (['loc'], {}), '(loc)\n', (268, 273), False, 'import torch\n'), ((354, 373), 'torch.tensor', 'torch.tensor', (['scale'], {}), '(scale)\n', (366, 373), False, 'import torch\n')]
|
from builtins import sorted
from itertools import cycle
from unittest.mock import patch, Mock
from pyclarity_lims.entities import Sample
from scripts.copy_samples import Container
from scripts.copy_samples import CopySamples
from tests.test_common import TestEPP, FakeEntitiesMaker
class TestCopySamples(TestEPP):
mocked_step = Mock(details=Mock(udf={}), actions=Mock(next_actions=[{}]))
patched_get_workflow_stage = patch('scripts.copy_samples.get_workflow_stage', return_value=Mock(uri='a_uri',
step=mocked_step))
patched_create_batch = patch('lims.copy_samples.create_batch', return_value=True)
@staticmethod
def get_patch_create_container(container):
return patch.object(Container, 'create', return_value=container)
def setUp(self):
self.epp = CopySamples(self.default_argv)
self.fem_params = {
'nb_input': 2,
'project_name': 'X99999',
'process_id': '99-9999',
'input_container_name': 'X99999P001',
'sample_name': cycle(['X99999P001A01',
'X99999P001B01']),
'sample_udfs': {
'Prep Workflow': cycle(['TruSeq Nano DNA Sample Prep', 'TruSeq PCR-Free DNA Sample Prep']),
'Coverage (X)': cycle([30, 60]),
'Required Yield (Gb)': cycle([120, 240]),
'Delivery': cycle(['merged', 'split']),
'Analysis Type': cycle(['Variant Calling gatk', 'None']),
'Rapid Analysis': cycle(['No', 'Yes']),
'User Prepared Library': cycle(['No', 'Yes']),
'Species': cycle(['Homo sapiens', 'Mus musculus']),
'Genome Version': cycle(['hg38', 'hg19']),
},
'step_udfs': {'Container Type': '96 well plate'},
'output_per_input': 0,
'process_id': '99-9999'
}
def test_copy_samples(self):
fem = FakeEntitiesMaker()
self.epp.lims = fem.lims
self.epp.process = fem.create_a_fake_process(**self.fem_params)
self.epp.lims.get_containers = Mock(return_value=[])
self.workflow_stage = Mock(uri='a_uri')
self.patch_Step_create = patch('scripts.copy_samples.Step.create', return_value=self.mocked_step)
with self.get_patch_create_container(fem.create_a_fake_container(container_name='X99999P002')), \
self.patched_get_workflow_stage as pws, self.patch_Step_create as psc:
self.epp._run()
expected_create_samples_list = [{
'container': fem.object_store_per_type['Container'][1],
'project': fem.object_store_per_type['Project'][0],
'name': 'X99999P002A01', 'position': 'A:1',
'udf': {'Prep Workflow': 'TruSeq Nano DNA Sample Prep',
'Coverage (X)': 30,
'Required Yield (Gb)': 120,
'Delivery': 'merged',
'User Prepared Library': 'No',
'Analysis Type': 'Variant Calling gatk',
'Rapid Analysis': 'No',
'Species': 'Homo sapiens',
'Genome Version': 'hg38',
}},
{
'container': fem.object_store_per_type['Container'][1],
'project': fem.object_store_per_type['Project'][0],
'name': 'X99999P002B01',
'position': 'B:1',
'udf': {'Prep Workflow': 'TruSeq PCR-Free DNA Sample Prep',
'Coverage (X)': 60,
'Required Yield (Gb)': 240,
'Delivery': 'split',
'Analysis Type': 'None',
'User Prepared Library': 'Yes',
'Rapid Analysis': 'Yes',
'Species': 'Mus musculus',
'Genome Version': 'hg19',
}},
]
self.epp.lims.create_batch.assert_called_once_with(Sample, expected_create_samples_list)
pws.assert_any_call(self.epp.lims, 'PreSeqLab EG2.1 WF', 'Create Manifest EG 1.0 ST')
pws.assert_any_call(self.epp.lims, "Remove From Processing EG 1.0 WF", "Remove From Processing EG 1.0 ST")
# test step creation
inputs_project_step_creation = []
inputs_project_step_creation_dict = {
self.epp.artifacts[0].samples[0].artifact.name: self.epp.artifacts[0].samples[0].artifact,
self.epp.artifacts[1].samples[0].artifact.name: self.epp.artifacts[1].samples[0].artifact}
for input in sorted(inputs_project_step_creation_dict):
inputs_project_step_creation.append(inputs_project_step_creation_dict[input])
psc.assert_called_with(
self.epp.lims,
inputs=inputs_project_step_creation,
protocol_step=self.mocked_step,
container_type_name='Tube'
)
|
[
"unittest.mock.patch.object",
"builtins.sorted",
"unittest.mock.Mock",
"tests.test_common.FakeEntitiesMaker",
"unittest.mock.patch",
"scripts.copy_samples.CopySamples",
"itertools.cycle"
] |
[((655, 713), 'unittest.mock.patch', 'patch', (['"""lims.copy_samples.create_batch"""'], {'return_value': '(True)'}), "('lims.copy_samples.create_batch', return_value=True)\n", (660, 713), False, 'from unittest.mock import patch, Mock\n'), ((795, 852), 'unittest.mock.patch.object', 'patch.object', (['Container', '"""create"""'], {'return_value': 'container'}), "(Container, 'create', return_value=container)\n", (807, 852), False, 'from unittest.mock import patch, Mock\n'), ((894, 924), 'scripts.copy_samples.CopySamples', 'CopySamples', (['self.default_argv'], {}), '(self.default_argv)\n', (905, 924), False, 'from scripts.copy_samples import CopySamples\n'), ((2036, 2055), 'tests.test_common.FakeEntitiesMaker', 'FakeEntitiesMaker', ([], {}), '()\n', (2053, 2055), False, 'from tests.test_common import TestEPP, FakeEntitiesMaker\n'), ((2200, 2221), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '[]'}), '(return_value=[])\n', (2204, 2221), False, 'from unittest.mock import patch, Mock\n'), ((2252, 2269), 'unittest.mock.Mock', 'Mock', ([], {'uri': '"""a_uri"""'}), "(uri='a_uri')\n", (2256, 2269), False, 'from unittest.mock import patch, Mock\n'), ((2303, 2375), 'unittest.mock.patch', 'patch', (['"""scripts.copy_samples.Step.create"""'], {'return_value': 'self.mocked_step'}), "('scripts.copy_samples.Step.create', return_value=self.mocked_step)\n", (2308, 2375), False, 'from unittest.mock import patch, Mock\n'), ((4690, 4731), 'builtins.sorted', 'sorted', (['inputs_project_step_creation_dict'], {}), '(inputs_project_step_creation_dict)\n', (4696, 4731), False, 'from builtins import sorted\n'), ((349, 361), 'unittest.mock.Mock', 'Mock', ([], {'udf': '{}'}), '(udf={})\n', (353, 361), False, 'from unittest.mock import patch, Mock\n'), ((371, 394), 'unittest.mock.Mock', 'Mock', ([], {'next_actions': '[{}]'}), '(next_actions=[{}])\n', (375, 394), False, 'from unittest.mock import patch, Mock\n'), ((491, 526), 'unittest.mock.Mock', 'Mock', ([], {'uri': '"""a_uri"""', 'step': 'mocked_step'}), "(uri='a_uri', step=mocked_step)\n", (495, 526), False, 'from unittest.mock import patch, Mock\n'), ((1132, 1173), 'itertools.cycle', 'cycle', (["['X99999P001A01', 'X99999P001B01']"], {}), "(['X99999P001A01', 'X99999P001B01'])\n", (1137, 1173), False, 'from itertools import cycle\n'), ((1271, 1344), 'itertools.cycle', 'cycle', (["['TruSeq Nano DNA Sample Prep', 'TruSeq PCR-Free DNA Sample Prep']"], {}), "(['TruSeq Nano DNA Sample Prep', 'TruSeq PCR-Free DNA Sample Prep'])\n", (1276, 1344), False, 'from itertools import cycle\n'), ((1378, 1393), 'itertools.cycle', 'cycle', (['[30, 60]'], {}), '([30, 60])\n', (1383, 1393), False, 'from itertools import cycle\n'), ((1434, 1451), 'itertools.cycle', 'cycle', (['[120, 240]'], {}), '([120, 240])\n', (1439, 1451), False, 'from itertools import cycle\n'), ((1481, 1507), 'itertools.cycle', 'cycle', (["['merged', 'split']"], {}), "(['merged', 'split'])\n", (1486, 1507), False, 'from itertools import cycle\n'), ((1542, 1581), 'itertools.cycle', 'cycle', (["['Variant Calling gatk', 'None']"], {}), "(['Variant Calling gatk', 'None'])\n", (1547, 1581), False, 'from itertools import cycle\n'), ((1617, 1637), 'itertools.cycle', 'cycle', (["['No', 'Yes']"], {}), "(['No', 'Yes'])\n", (1622, 1637), False, 'from itertools import cycle\n'), ((1680, 1700), 'itertools.cycle', 'cycle', (["['No', 'Yes']"], {}), "(['No', 'Yes'])\n", (1685, 1700), False, 'from itertools import cycle\n'), ((1729, 1768), 'itertools.cycle', 'cycle', (["['Homo sapiens', 'Mus musculus']"], {}), "(['Homo sapiens', 'Mus musculus'])\n", (1734, 1768), False, 'from itertools import cycle\n'), ((1804, 1827), 'itertools.cycle', 'cycle', (["['hg38', 'hg19']"], {}), "(['hg38', 'hg19'])\n", (1809, 1827), False, 'from itertools import cycle\n')]
|
#
# Object detector (by sequential file read from directory)
#
import os
import sys
import random
import math
import re
import time
import numpy as np
import tensorflow as tf
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from glob import glob
import argparse
import skimage
import shutil
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn import utils
from mrcnn import visualize
from mrcnn.visualize import display_images
import mrcnn.model as modellib
from mrcnn.model import log
#from samples.cats_dogs import cats_dogs
from samples.miyukiCamera import miyukiCamera
# Directory to save logs and trained model
MODEL_DIR = os.path.join(ROOT_DIR, "logs")
def process():
class InferenceConfig(miyukiCamera.MiyukiCameraConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
config = InferenceConfig()
#config.display()
# Device to load the neural network on.
# Useful if you're training a model on the same
# machine, in which case use CPU and leave the
# GPU for training.
DEVICE = "/gpu:0" # /cpu:0 or /gpu:0
# Inspect the model in training or inference modes
# values: 'inference' or 'training'
# TODO: code for 'training' test mode not ready yet
TEST_MODE = "inference"
# set model
# Create model in inference mode
with tf.device(DEVICE):
model = modellib.MaskRCNN(mode="inference", config=config,
model_dir=MODEL_DIR)
# Or, load the last model you trained
weights_path = model.find_last()
# Load weights
print("Loading weights ", weights_path)
model.load_weights(weights_path, by_name=True)
return model, config
def detector(model,config, dataset, DATA_DIR):
MRCNN_DATA_DIR = "/".join( DATA_DIR.split('/')[:-1] )
MRCNN_DATA_DIR = os.path.join( MRCNN_DATA_DIR, "mrcnn_image")
print(MRCNN_DATA_DIR)
images = glob( os.path.join(DATA_DIR, "*.jpg") )
print("* total length of images : ", len(images) )
for f in images:
print("Running on {}".format(f))
# Read image
image = skimage.io.imread(f)
# Detect objects
results = model.detect([image], verbose=1)
r = results[0]
print("- " * 40 )
print("Scores --> ", r['scores'])
print("found Class Names --> ", [dataset.class_info[i]["name"] for i in r['class_ids']] )
classes = [dataset.class_info[i]["name"] for i in r['class_ids']]
if "prescription" in classes:
print("found prescription on %s" % f.split("/")[-1])
image_file = f.split("/")[-1]
shutil.copy( f, os.path.join( MRCNN_DATA_DIR, image_file ) )
def main():
# Parse command line arguments
parser = argparse.ArgumentParser(
description='Sequential Reading File Object Detector.')
parser.add_argument('--dataset', required=True,
metavar="/path/to/balloon/dataset",
help='Directory of the target dataset to detect')
args = parser.parse_args()
assert args.dataset ,\
"Provide --image directory to apply detector"
model, config = process()
dataset = miyukiCamera.MiyukiCameraDataset()
DATA_DIR = args.dataset
detector(model, config, dataset, DATA_DIR)
if __name__ == "__main__":
main()
|
[
"sys.path.append",
"os.path.abspath",
"argparse.ArgumentParser",
"skimage.io.imread",
"tensorflow.device",
"mrcnn.model.MaskRCNN",
"os.path.join",
"samples.miyukiCamera.miyukiCamera.MiyukiCameraDataset"
] |
[((374, 399), 'os.path.abspath', 'os.path.abspath', (['"""../../"""'], {}), "('../../')\n", (389, 399), False, 'import os\n'), ((420, 445), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (435, 445), False, 'import sys\n'), ((785, 815), 'os.path.join', 'os.path.join', (['ROOT_DIR', '"""logs"""'], {}), "(ROOT_DIR, 'logs')\n", (797, 815), False, 'import os\n'), ((2086, 2129), 'os.path.join', 'os.path.join', (['MRCNN_DATA_DIR', '"""mrcnn_image"""'], {}), "(MRCNN_DATA_DIR, 'mrcnn_image')\n", (2098, 2129), False, 'import os\n'), ((3030, 3109), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Sequential Reading File Object Detector."""'}), "(description='Sequential Reading File Object Detector.')\n", (3053, 3109), False, 'import argparse\n'), ((3473, 3507), 'samples.miyukiCamera.miyukiCamera.MiyukiCameraDataset', 'miyukiCamera.MiyukiCameraDataset', ([], {}), '()\n', (3505, 3507), False, 'from samples.miyukiCamera import miyukiCamera\n'), ((1590, 1607), 'tensorflow.device', 'tf.device', (['DEVICE'], {}), '(DEVICE)\n', (1599, 1607), True, 'import tensorflow as tf\n'), ((1629, 1700), 'mrcnn.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'config': 'config', 'model_dir': 'MODEL_DIR'}), "(mode='inference', config=config, model_dir=MODEL_DIR)\n", (1646, 1700), True, 'import mrcnn.model as modellib\n'), ((2181, 2212), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""*.jpg"""'], {}), "(DATA_DIR, '*.jpg')\n", (2193, 2212), False, 'import os\n'), ((2376, 2396), 'skimage.io.imread', 'skimage.io.imread', (['f'], {}), '(f)\n', (2393, 2396), False, 'import skimage\n'), ((2918, 2958), 'os.path.join', 'os.path.join', (['MRCNN_DATA_DIR', 'image_file'], {}), '(MRCNN_DATA_DIR, image_file)\n', (2930, 2958), False, 'import os\n')]
|
from django.test import TestCase
from django.test import Client
class HelloWorldTestCase(TestCase):
"""Hello world tests"""
def test_successful_case(self):
"""Successful test case"""
self.assertTrue(True)
def test_http_request(self):
client = Client()
response = client.get("/")
self.assertEqual(200, response.status_code)
self.assertEqual(response.content.decode(), "Hello world from Django! You're at the index. Bingo")
|
[
"django.test.Client"
] |
[((283, 291), 'django.test.Client', 'Client', ([], {}), '()\n', (289, 291), False, 'from django.test import Client\n')]
|
import os
import sys
import logging
import importlib
from ast import literal_eval
from copy import deepcopy
from collections import defaultdict
from collections import namedtuple, Counter
from modelmapper.misc import read_csv_gen, load_toml, camel_to_snake
from modelmapper.slack import slack
OVERRIDES_FILE_NAME = "{}_overrides.toml"
COMBINED_FILE_NAME = "{}_combined.py"
class Base:
logger = logging.getLogger(__name__)
SETUP_PATH = None
def __init__(self, setup_path=None, debug=False):
self.setup_path = setup_path or getattr(self, 'SETUP_PATH', None)
if self.setup_path is None:
raise ValueError('setup_path needs to be passed to init or SETUP_PATH needs to be a class attribute.')
if not self.setup_path.endswith('_setup.toml'):
raise ValueError('The path needs to end with _setup.toml')
self.debug = debug
self.setup_dir = os.path.dirname(self.setup_path)
sys.path.append(self.setup_dir)
clean_later = ['field_name_full_conversion', 'ignore_fields_in_signature_calculation',
'identify_header_by_column_names', 'fields_to_be_encrypted', 'fields_to_be_scrubbed']
convert_to_set = ['null_values', 'boolean_true', 'boolean_false', 'datetime_formats',
'ignore_lines_that_include_only_subset_of',
'ignore_fields_in_signature_calculation', 'identify_header_by_column_names']
self._original_settings = load_toml(self.setup_path)['settings']
self.settings = deepcopy(self._original_settings)
for item in clean_later:
self._clean_settings_items(item)
for item in convert_to_set:
self.settings[item] = set(self.settings.get(item, []))
key = 'default_value_for_field_when_casting_error'
self.settings[key] = self.settings.get(key) or r'{}'
self.settings[key] = {self._clean_it(i): v for i, v in literal_eval(self.settings[key]).items()}
slack_http_endpoint = self.settings['slack_http_endpoint']
# attempt to get passed in value from ENV VAR, defaulting to passed in value if not present
slack_http_endpoint = os.environ.get(slack_http_endpoint, slack_http_endpoint)
self.settings['should_reprocess'] = self.settings.get('should_reprocess', False)
self.settings['slack_http_endpoint'] = slack_http_endpoint
self.settings['identifier'] = identifier = os.path.basename(self.setup_path).replace('_setup.toml', '')
self.settings['overrides_file_name'] = OVERRIDES_FILE_NAME.format(identifier)
self.settings['combined_file_name'] = COMBINED_FILE_NAME.format(identifier)
self.settings['booleans'] = self.settings['boolean_true'] | self.settings['boolean_false']
self.settings['datetime_allowed_characters'] = set(self.settings['datetime_allowed_characters'])
for i, v in (('overrides_path', 'overrides_file_name'),
('combined_path', 'combined_file_name'),
('output_model_path', 'output_model_file')):
self.settings[i] = os.path.join(self.setup_dir, self.settings[v])
# Since we cleaning up the field_name_part_conversion, special characters
# such as \n need to be added seperately.
# self.settings['field_name_part_conversion'].insert(0, ['\n', '_']).insert(0, ['\r\n', '_'])
_max_int = ((i, int(v)) for i, v in self.settings['max_int'].items())
self.settings['max_int'] = dict(sorted(_max_int, key=lambda x: x[1]))
Settings = namedtuple('Settings', ' '.join(self.settings.keys()))
self.settings = Settings(**self.settings)
self.questionable_fields = {}
self.solid_decisions = {}
self.failed_to_infer_fields = set()
self.empty_fields = set()
def _clean_it(self, item):
conv = (self.settings['field_name_part_conversion'] if isinstance(self.settings, dict)
else self.settings.field_name_part_conversion)
item = item.replace('\r\n', '_').replace('\n', '_')
item = camel_to_snake(item)
for source, to_replace in conv:
item = item.replace(source, to_replace)
return item.strip('_')
def _clean_settings_items(self, item):
"""
Normalizes list or nested lists
"""
if item not in self.settings:
self.settings[item] = []
try:
first_value = self.settings[item][0]
except IndexError:
pass
else:
if isinstance(first_value, list):
self.settings[item] = [[self._clean_it(i), self._clean_it(j)] for i, j in self.settings[item]]
else:
self.settings[item] = list(map(self._clean_it, self.settings[item]))
def _get_clean_field_name(self, name):
item = self._clean_it(name)
for source, to_replace in self.settings.field_name_full_conversion:
if item == source:
item = to_replace
break
return item
def _get_all_clean_field_names_mapping(self, names):
name_mapping = {}
for name in names:
name_mapping[name] = self._get_clean_field_name(name)
return name_mapping
def _get_combined_module(self):
combined_module_str = self.settings.combined_file_name[:-3]
return importlib.import_module(combined_module_str)
def _verify_no_duplicate_clean_names(self, names_mapping):
clean_names_mapping = {}
for name, clean_name in names_mapping.items():
if clean_name in clean_names_mapping:
raise ValueError(f"'{name}' field has a collision with '{clean_names_mapping[clean_name]}'. "
f"They both produce '{clean_name}'")
else:
clean_names_mapping[clean_name] = name
def _does_line_include_data(self, line):
# whether line has any characters in it that are not in ignore_lines_that_include_only_subset_of
return any(filter(lambda x: set(x.strip()) - self.settings.ignore_lines_that_include_only_subset_of, line))
def _verify_no_duplicate_names(self, names):
counter = Counter(names)
duplicates = {i: v for i, v in counter.most_common(10) if v > 1}
if duplicates:
raise ValueError(f'The following fields were repeated in the csv: {duplicates}')
def _get_clean_names_and_csv_data_gen(self, path):
reader = read_csv_gen(path,
identify_header_by_column_names=self.settings.identify_header_by_column_names,
cleaning_func=self._clean_it)
names = next(reader)
self._verify_no_duplicate_names(names)
name_mapping = self._get_all_clean_field_names_mapping(names)
self._verify_no_duplicate_clean_names(name_mapping)
clean_names = list(name_mapping.values())
return clean_names, reader
def _get_all_values_per_clean_name(self, path):
result = defaultdict(list)
clean_names, reader = self._get_clean_names_and_csv_data_gen(path)
# transposing csv and turning into dictionary
for line in reader:
if self._does_line_include_data(line):
for i, v in enumerate(line):
try:
field_name = clean_names[i]
except IndexError:
raise ValueError("Your data might have new lines in the field names. "
"Please fix that and try again.")
else:
if field_name not in self.settings.fields_to_be_scrubbed:
result[field_name].append(v)
return result
def slack(self, text):
if self.settings.slack_username and \
self.settings.slack_channel and \
self.settings.slack_http_endpoint:
return slack(
text,
username=self.settings.slack_username,
channel=self.settings.slack_channel,
slack_http_endpoint=self.settings.slack_http_endpoint
)
|
[
"sys.path.append",
"copy.deepcopy",
"importlib.import_module",
"os.path.basename",
"os.path.dirname",
"modelmapper.misc.camel_to_snake",
"modelmapper.misc.read_csv_gen",
"os.environ.get",
"collections.defaultdict",
"modelmapper.misc.load_toml",
"modelmapper.slack.slack",
"ast.literal_eval",
"collections.Counter",
"os.path.join",
"logging.getLogger"
] |
[((405, 432), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (422, 432), False, 'import logging\n'), ((914, 946), 'os.path.dirname', 'os.path.dirname', (['self.setup_path'], {}), '(self.setup_path)\n', (929, 946), False, 'import os\n'), ((955, 986), 'sys.path.append', 'sys.path.append', (['self.setup_dir'], {}), '(self.setup_dir)\n', (970, 986), False, 'import sys\n'), ((1555, 1588), 'copy.deepcopy', 'deepcopy', (['self._original_settings'], {}), '(self._original_settings)\n', (1563, 1588), False, 'from copy import deepcopy\n'), ((2192, 2248), 'os.environ.get', 'os.environ.get', (['slack_http_endpoint', 'slack_http_endpoint'], {}), '(slack_http_endpoint, slack_http_endpoint)\n', (2206, 2248), False, 'import os\n'), ((4090, 4110), 'modelmapper.misc.camel_to_snake', 'camel_to_snake', (['item'], {}), '(item)\n', (4104, 4110), False, 'from modelmapper.misc import read_csv_gen, load_toml, camel_to_snake\n'), ((5386, 5430), 'importlib.import_module', 'importlib.import_module', (['combined_module_str'], {}), '(combined_module_str)\n', (5409, 5430), False, 'import importlib\n'), ((6221, 6235), 'collections.Counter', 'Counter', (['names'], {}), '(names)\n', (6228, 6235), False, 'from collections import namedtuple, Counter\n'), ((6498, 6630), 'modelmapper.misc.read_csv_gen', 'read_csv_gen', (['path'], {'identify_header_by_column_names': 'self.settings.identify_header_by_column_names', 'cleaning_func': 'self._clean_it'}), '(path, identify_header_by_column_names=self.settings.\n identify_header_by_column_names, cleaning_func=self._clean_it)\n', (6510, 6630), False, 'from modelmapper.misc import read_csv_gen, load_toml, camel_to_snake\n'), ((7047, 7064), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (7058, 7064), False, 'from collections import defaultdict\n'), ((1492, 1518), 'modelmapper.misc.load_toml', 'load_toml', (['self.setup_path'], {}), '(self.setup_path)\n', (1501, 1518), False, 'from modelmapper.misc import read_csv_gen, load_toml, camel_to_snake\n'), ((3114, 3160), 'os.path.join', 'os.path.join', (['self.setup_dir', 'self.settings[v]'], {}), '(self.setup_dir, self.settings[v])\n', (3126, 3160), False, 'import os\n'), ((7975, 8122), 'modelmapper.slack.slack', 'slack', (['text'], {'username': 'self.settings.slack_username', 'channel': 'self.settings.slack_channel', 'slack_http_endpoint': 'self.settings.slack_http_endpoint'}), '(text, username=self.settings.slack_username, channel=self.settings.\n slack_channel, slack_http_endpoint=self.settings.slack_http_endpoint)\n', (7980, 8122), False, 'from modelmapper.slack import slack\n'), ((2456, 2489), 'os.path.basename', 'os.path.basename', (['self.setup_path'], {}), '(self.setup_path)\n', (2472, 2489), False, 'import os\n'), ((1953, 1985), 'ast.literal_eval', 'literal_eval', (['self.settings[key]'], {}), '(self.settings[key])\n', (1965, 1985), False, 'from ast import literal_eval\n')]
|
"""
This file contains the logic to generate the master dataset for the INDDEX reports
Overview
--------
Beneficiaries are asked about their diet in a "recall" session. This results in
a "foodrecall" case. Every food they mention results in the creation of a "food"
case that's a child of this foodrecall.
This dataset has a row for every food, with metadata about the recall session,
calculated nutritional information, and auditing columns reporting on what data
is or isn't available. Some of these foods are recipes, and their ingredients
appear as separate rows in the report.
Standard recipes have their ingredients enumerated in the "recipes" lookup
table. This dataset has additional rows inserted for each ingredient. These
rows are associated with the recipe case, but don't have a case of their own.
Nonstandard recipes are defined by the user and beneficiary during a recall
session. The ingredients of the recipe are entered as additional food cases and
linked to the recipe by `recipe_case_id`.
Beneficiaries may report eating a nonstandard recipe more than once, in which
case subsequent references point to the recipe definition with
already_reported_recipe_case_id and don't enumerate the ingredients again. We
need to insert duplicates of the previously reported ingredients into the
report for them.
Components
----------
FoodData :: This is the interface to this dataset, it glues together all the
component pieces and presents the result as a unified dataset.
FoodRow :: Class responsible for row-wise calculations and indicator definitions.
"""
import operator
import uuid
from collections import defaultdict
from functools import reduce
from memoized import memoized
from corehq.apps.es import users as user_es
from corehq.apps.reports.filters.case_list import CaseListFilter as EMWF
from corehq.apps.reports.standard.cases.utils import get_case_owners
from custom.inddex.ucr_data import FoodCaseData
from .const import (
AGE_RANGES,
FOOD_ITEM,
NON_STANDARD_RECIPE,
STANDARD_RECIPE,
ConvFactorGaps,
FctGaps,
)
from .fixtures import FixtureAccessor
IN_UCR = 'in_ucr'
IN_FOOD_FIXTURE = 'in_food_fixture'
IS_RECALL_META = 'is_recall_meta'
CALCULATED_LATER = 'calculated_later'
class I:
def __init__(self, slug, *tags):
self.slug = slug
tags = set(tags)
self.in_ucr = IN_UCR in tags
self.in_food_fixture = IN_FOOD_FIXTURE in tags
self.is_recall_meta = IS_RECALL_META in tags
self.is_calculated_later = CALCULATED_LATER in tags
# Indicator descriptions can be found here:
# https://docs.google.com/spreadsheets/d/1znPjfQSFEUFP_R_G8VYE-Bd5dg72k5sP-hZPuy-3RZo/edit
INDICATORS = [
I('unique_respondent_id', IN_UCR, IS_RECALL_META),
I('location_id', IN_UCR, IS_RECALL_META),
I('respondent_id', IN_UCR, IS_RECALL_META),
I('recall_case_id', IN_UCR, IS_RECALL_META),
I('opened_by_username', IN_UCR, IS_RECALL_META),
I('owner_name', IN_UCR, IS_RECALL_META),
I('visit_date', IN_UCR, IS_RECALL_META),
I('opened_on', IN_UCR, IS_RECALL_META),
I('recall_status', IN_UCR, IS_RECALL_META),
I('gender', IN_UCR, IS_RECALL_META),
I('age_years_calculated', IN_UCR, IS_RECALL_META),
I('age_months_calculated', IN_UCR, IS_RECALL_META),
I('age_range', IS_RECALL_META),
I('pregnant', IN_UCR, IS_RECALL_META),
I('breastfeeding', IN_UCR, IS_RECALL_META),
I('urban_rural', IN_UCR, IS_RECALL_META),
I('supplements', IN_UCR, IS_RECALL_META),
I('food_code', IN_UCR),
I('food_name', IN_UCR, IN_FOOD_FIXTURE),
I('recipe_name', IN_UCR, CALCULATED_LATER),
I('caseid'),
I('food_type', IN_UCR, IN_FOOD_FIXTURE),
I('food_status', IN_UCR, IS_RECALL_META),
I('reference_food_code'),
I('base_term_food_code', IN_UCR),
I('include_in_analysis'),
I('fao_who_gift_food_group_code'),
I('fao_who_gift_food_group_description'),
I('user_food_group'),
I('eating_time', IN_UCR, IS_RECALL_META),
I('time_block', IN_UCR, IS_RECALL_META),
I('already_reported_food', IN_UCR),
I('already_reported_food_case_id', IN_UCR),
I('already_reported_recipe', IN_UCR),
I('already_reported_recipe_case_id', IN_UCR),
I('already_reported_recipe_name', IN_UCR),
I('is_ingredient', IN_UCR),
I('ingredient_type', CALCULATED_LATER),
I('recipe_case_id', IN_UCR),
I('ingr_recipe_code'),
I('ingr_fraction'),
I('ingr_recipe_total_grams_consumed', CALCULATED_LATER),
I('short_name', IN_UCR),
I('food_base_term', IN_UCR, IN_FOOD_FIXTURE),
I('tag_1', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_1', IN_UCR),
I('tag_2', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_2', IN_UCR),
I('tag_3', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_3', IN_UCR),
I('tag_4', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_4', IN_UCR),
I('tag_5', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_5', IN_UCR),
I('tag_6', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_6', IN_UCR),
I('tag_7', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_7', IN_UCR),
I('tag_8', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_8', IN_UCR),
I('tag_9', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_9', IN_UCR),
I('tag_10', IN_UCR, IN_FOOD_FIXTURE),
I('other_tag_10', IN_UCR),
I('conv_method_code', IN_UCR),
I('conv_method_desc', IN_UCR),
I('conv_option_code', IN_UCR),
I('conv_option_desc', IN_UCR),
I('measurement_amount', IN_UCR),
I('conv_units', IN_UCR),
I('portions', IN_UCR),
I('nsr_conv_method_code_post_cooking', IN_UCR),
I('nsr_conv_method_desc_post_cooking', IN_UCR),
I('nsr_conv_option_code_post_cooking', IN_UCR),
I('nsr_conv_option_desc_post_cooking', IN_UCR),
I('nsr_measurement_amount_post_cooking', IN_UCR),
I('nsr_consumed_cooked_fraction', IN_UCR),
I('recipe_num_ingredients', CALCULATED_LATER),
I('conv_factor_food_code'),
I('conv_factor_base_term_food_code'),
I('conv_factor_used'),
I('conv_factor'),
I('fct_food_code_exists'),
I('fct_base_term_food_code_exists'),
I('fct_reference_food_code_exists'),
I('fct_data_used'),
I('fct_code'),
I('total_grams', CALCULATED_LATER),
I('conv_factor_gap_code'),
I('conv_factor_gap_desc'),
I('fct_gap_code', CALCULATED_LATER),
I('fct_gap_desc', CALCULATED_LATER),
]
_INDICATORS_BY_SLUG = {i.slug: i for i in INDICATORS}
NSR_COLS_TO_COPY = [
'nsr_conv_method_code_post_cooking',
'nsr_conv_method_desc_post_cooking',
'nsr_conv_option_code_post_cooking',
'nsr_conv_option_desc_post_cooking',
'nsr_measurement_amount_post_cooking',
'nsr_consumed_cooked_fraction',
]
class FoodRow:
def __init__(self, ucr_row, fixtures, ingredient=None):
self.uuid = uuid.uuid4()
self.ucr_row = ucr_row
self.fixtures = fixtures
self._is_std_recipe_ingredient = bool(ingredient)
if self._is_std_recipe_ingredient:
self.food_code = ingredient.ingr_code
self._set_ingredient_fields(ingredient)
else:
self.caseid = ucr_row['doc_id']
self.food_code = ucr_row['food_code']
if not self.food_code and self.food_name in self.fixtures.foods_by_name:
self.food_code = self.fixtures.foods_by_name[self.food_name].food_code
if not self.base_term_food_code and self.food_base_term in self.fixtures.foods_by_name:
self.base_term_food_code = self.fixtures.foods_by_name[self.food_base_term].food_code
self._set_composition()
self._set_conversion_factors()
self.is_recipe = self.food_type in (STANDARD_RECIPE, NON_STANDARD_RECIPE)
self.include_in_analysis = not self.is_recipe
self.measurement_amount = _maybe_float(self.measurement_amount)
self.portions = _maybe_float(self.portions)
self.nsr_consumed_cooked_fraction = _maybe_float(self.nsr_consumed_cooked_fraction)
self.enrichment_complete = False
def _set_ingredient_fields(self, ingredient):
if self._is_std_recipe_ingredient:
self.is_ingredient = 'yes'
self.ingr_recipe_code = ingredient.recipe_code
self.ingr_fraction = ingredient.ingr_fraction
def _set_composition(self):
# Get the food composition corresponding to food_code, fall back to base_term_food_code
fct = self.fixtures.food_compositions
self.fct_food_code_exists = bool(self.food_code and self.food_code in fct)
self.fct_base_term_food_code_exists = bool(self.base_term_food_code and self.base_term_food_code in fct)
self.fct_code = None
if self.fct_food_code_exists:
self.fct_code = self.food_code
self.fct_data_used = 'food_code'
elif self.fct_base_term_food_code_exists:
self.fct_code = self.base_term_food_code
self.fct_data_used = 'base_term_food_code'
if self.fct_code:
self.composition = fct[self.fct_code]
self.fao_who_gift_food_group_code = self.composition.fao_who_gift_food_group_code
self.fao_who_gift_food_group_description = self.composition.fao_who_gift_food_group_description
self.user_food_group = self.composition.user_defined_food_group
self.reference_food_code = self.composition.reference_food_code_for_food_composition
if self.fct_data_used == 'food_code' and self.reference_food_code:
self.fct_data_used = 'reference_food_code'
self.fct_reference_food_code_exists = bool(self.reference_food_code)
def set_fct_gap(self, ingredients=None):
if ingredients:
for row in ingredients:
row.set_fct_gap()
self.fct_gap_code = FctGaps.NOT_AVAILABLE
if self.food_type == FOOD_ITEM and self.fct_code:
self.fct_gap_code = {
'food_code': FctGaps.AVAILABLE,
'base_term_food_code': FctGaps.BASE_TERM,
'reference_food_code': FctGaps.REFERENCE,
}[self.fct_data_used]
if self.is_recipe and ingredients:
if all(i.fct_gap_code == FctGaps.AVAILABLE for i in ingredients):
self.fct_gap_code = FctGaps.AVAILABLE
else:
self.fct_gap_code = FctGaps.INGREDIENT_GAPS
self.fct_gap_desc = FctGaps.DESCRIPTIONS[self.fct_gap_code]
def _set_conversion_factors(self):
self.conv_factor_gap_code = ConvFactorGaps.NOT_AVAILABLE
if (self.food_type == FOOD_ITEM and self._is_std_recipe_ingredient
or self.food_type == NON_STANDARD_RECIPE):
self.conv_factor_gap_code = ConvFactorGaps.NOT_APPLICABLE
elif self.food_type in (FOOD_ITEM, STANDARD_RECIPE) and self.conv_method_code:
self.conv_factor_food_code = self.fixtures.conversion_factors.get(
(self.food_code, self.conv_method_code, self.conv_option_code))
self.conv_factor_base_term_food_code = self.fixtures.conversion_factors.get(
(self.base_term_food_code, self.conv_method_code, self.conv_option_code))
if self.conv_factor_food_code:
self.conv_factor_used = 'food_code'
self.conv_factor = self.conv_factor_food_code
self.conv_factor_gap_code = ConvFactorGaps.AVAILABLE
elif self.conv_factor_base_term_food_code:
self.conv_factor_used = 'base_term_food_code'
self.conv_factor = self.conv_factor_base_term_food_code
self.conv_factor_gap_code = ConvFactorGaps.BASE_TERM
self.conv_factor_gap_desc = ConvFactorGaps.DESCRIPTIONS[self.conv_factor_gap_code]
@property
def age_range(self):
if not self.age_months_calculated:
return None
for age_range in AGE_RANGES:
if age_range.lower_bound <= getattr(self, age_range.column) < age_range.upper_bound:
return age_range.name
def get_nutrient_per_100g(self, nutrient_name):
if self.fct_code:
return self.composition.nutrients.get(nutrient_name)
def get_nutrient_amt(self, nutrient_name):
return _multiply(self.get_nutrient_per_100g(nutrient_name), self.total_grams, 0.01)
def __getattr__(self, name):
if name in _INDICATORS_BY_SLUG:
indicator = _INDICATORS_BY_SLUG[name]
if indicator.is_calculated_later:
if not self.enrichment_complete:
raise AttributeError(f"{name} hasn't yet been set. It will be "
"calculated outside the scope of FoodRow.")
return None
if self._is_std_recipe_ingredient:
# If it's an indicator that hasn't been explicitly set, check if it can
# be pulled from the food fixture or from the parent food case's UCR
if indicator.in_food_fixture:
return getattr(self.fixtures.foods[self.food_code], indicator.slug)
if indicator.is_recall_meta:
return self.ucr_row[indicator.slug]
return None
else:
# If it's an indicator in the UCR that hasn't been explicitly set, return that val
return self.ucr_row[indicator.slug] if indicator.in_ucr else None
raise AttributeError(f"FoodRow has no definition for {name}")
class FoodData:
"""Generates the primary dataset for INDDEX reports. See file docstring for more."""
IN_MEMORY_FILTERS = ['gap_type', 'fao_who_gift_food_group_code', 'food_type']
FILTERABLE_COLUMNS = IN_MEMORY_FILTERS + FoodCaseData.FILTERABLE_COLUMNS
def __init__(self, domain, *, datespan, filter_selections):
for slug in filter_selections:
if slug not in self.FILTERABLE_COLUMNS:
raise AssertionError(f"{slug} is not a valid filter slug")
self.fixtures = FixtureAccessor(domain)
self._in_memory_filter_selections = {
slug: filter_selections[slug] for slug in self.IN_MEMORY_FILTERS
if slug in filter_selections
}
self._ucr = FoodCaseData({
'domain': domain,
'startdate': str(datespan.startdate),
'enddate': str(datespan.enddate),
**{k: v for k, v in filter_selections.items()
if k in FoodCaseData.FILTERABLE_COLUMNS}
})
@classmethod
def from_request(cls, domain, request):
return cls(
domain,
datespan=request.datespan,
filter_selections={'owner_id': cls._get_owner_ids(domain, request),
**{k: [v for v in request.GET.getlist(k) if v]
for k in cls.FILTERABLE_COLUMNS if k != 'owner_id'}}
)
@staticmethod
def _get_owner_ids(domain, request):
slugs = request.GET.getlist(EMWF.slug)
if EMWF.no_filters_selected(slugs) or EMWF.show_all_data(slugs) or EMWF.show_project_data(slugs):
return [] # don't filter by owner
if EMWF.show_deactivated_data(slugs):
return (user_es.UserES()
.show_only_inactive()
.domain(domain)
.get_ids())
return get_case_owners(request, domain, slugs)
def _matches_in_memory_filters(self, row):
# If a gap type is specified, show only rows with gaps of that type
gap_type = self._in_memory_filter_selections.get('gap_type')
if gap_type == ConvFactorGaps.slug and row.conv_factor_gap_code == ConvFactorGaps.AVAILABLE:
return False
if gap_type == FctGaps.slug and row.fct_gap_code == FctGaps.AVAILABLE:
return False
food_types = self._in_memory_filter_selections.get('food_type')
if food_types and row.food_type not in food_types:
return False
food_groups = self._in_memory_filter_selections.get('fao_who_gift_food_group_code')
if food_groups and row.fao_who_gift_food_group_code not in food_groups:
return False
return True
def _get_grouped_rows(self):
"""Return raw case rows grouped by recipe"""
rows = defaultdict(lambda: {
'recipe': None,
'references': [],
'ingredients': [],
})
for row in self._ucr.get_data():
if row['food_type'] in (STANDARD_RECIPE, NON_STANDARD_RECIPE):
if row['already_reported_recipe_case_id']:
rows[row['already_reported_recipe_case_id']]['references'].append(row)
else:
rows[row['doc_id']]['recipe'] = row
elif row['recipe_case_id']:
rows[row['recipe_case_id']]['ingredients'].append(row)
else:
# this isn't part of a recipe
rows[row['doc_id']]['ingredients'].append(row)
return rows.values()
def _get_all_rows(self):
for group in self._get_grouped_rows():
master_recipe = group['recipe']
references = group['references']
ingredients = group['ingredients']
if not master_recipe:
yield from self._non_recipe_rows(references + ingredients)
else:
yield from self._recipe_rows(master_recipe, ingredients)
for recipe in references:
recipe = _insert_nsr_cols(recipe, master_recipe)
yield from self._recipe_rows(recipe, ingredients)
@property
@memoized
def rows(self):
rows = []
for row in self._get_all_rows():
if self._matches_in_memory_filters(row):
rows.append(row)
return rows
def _non_recipe_rows(self, rows):
"""These rows aren't part of a recipe, or it wasn't found"""
for raw_row in rows:
row = FoodRow(raw_row, self.fixtures)
row.total_grams = _multiply(row.measurement_amount, row.conv_factor, row.portions)
row.set_fct_gap()
row.enrichment_complete = True
yield row
def _recipe_rows(self, raw_recipe, raw_ingredients):
recipe = FoodRow(raw_recipe, self.fixtures)
if recipe.food_type == STANDARD_RECIPE:
# std recipe ingredients come from the DB, NOT ingredient cases
ingredients = [FoodRow(raw_recipe, self.fixtures, ingredient_data)
for ingredient_data in self.fixtures.recipes[recipe.food_code]]
else: # NON_STANDARD_RECIPE
ingredients = [FoodRow(raw, self.fixtures) for raw in raw_ingredients]
total_grams = _calculate_total_grams(recipe, ingredients)
recipe.set_fct_gap(ingredients)
recipe.recipe_name = recipe.ucr_row['recipe_name']
for row in [recipe] + ingredients:
row.total_grams = total_grams[row.uuid]
row.recipe_num_ingredients = len(ingredients)
row.recipe_case_id = recipe.caseid
if row.is_ingredient == 'yes':
row.recipe_name = recipe.recipe_name
if recipe.food_type == STANDARD_RECIPE:
row.ingredient_type = 'std_recipe_ingredient'
row.ingr_recipe_total_grams_consumed = total_grams[recipe.uuid]
else:
row.ingredient_type = 'non_std_recipe_ingredient'
for col in NSR_COLS_TO_COPY: # Copy these values from the recipe case
setattr(row, col, getattr(recipe, col))
row.enrichment_complete = True
yield row
def _insert_nsr_cols(raw_recipe, master_recipe):
# nsr references are missing some values, insert them from the master recipe
nsr_cols = {col: master_recipe[col] for col in NSR_COLS_TO_COPY}
amount = _maybe_float(raw_recipe['measurement_amount'])
portions = _maybe_float(raw_recipe['portions'])
amount_post_cooking = _maybe_float(master_recipe['nsr_measurement_amount_post_cooking'])
if all(val is not None for val in [amount, portions, amount_post_cooking]):
nsr_cols['nsr_consumed_cooked_fraction'] = amount * portions / amount_post_cooking
else:
nsr_cols['nsr_consumed_cooked_fraction'] = None
return {**raw_recipe, **nsr_cols}
def _calculate_total_grams(recipe, ingredients):
if recipe.food_type == STANDARD_RECIPE:
res = {}
recipe_total = _multiply(recipe.measurement_amount, recipe.conv_factor, recipe.portions)
res[recipe.uuid] = recipe_total
for row in ingredients:
res[row.uuid] = _multiply(recipe_total, row.ingr_fraction)
return res
else: # NON_STANDARD_RECIPE
res = {}
for row in ingredients:
res[row.uuid] = _multiply(row.measurement_amount, row.conv_factor,
row.portions, recipe.nsr_consumed_cooked_fraction)
try:
res[recipe.uuid] = sum(res.values()) if res else None
except TypeError:
res[recipe.uuid] = None
return res
def _multiply(*args):
try:
return reduce(operator.mul, args)
except TypeError:
return None
def _maybe_float(val):
return float(val) if val not in (None, '') else None
|
[
"uuid.uuid4",
"corehq.apps.es.users.UserES",
"collections.defaultdict",
"corehq.apps.reports.filters.case_list.CaseListFilter.show_all_data",
"corehq.apps.reports.standard.cases.utils.get_case_owners",
"functools.reduce",
"corehq.apps.reports.filters.case_list.CaseListFilter.no_filters_selected",
"corehq.apps.reports.filters.case_list.CaseListFilter.show_deactivated_data",
"corehq.apps.reports.filters.case_list.CaseListFilter.show_project_data"
] |
[((6749, 6761), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (6759, 6761), False, 'import uuid\n'), ((15090, 15123), 'corehq.apps.reports.filters.case_list.CaseListFilter.show_deactivated_data', 'EMWF.show_deactivated_data', (['slugs'], {}), '(slugs)\n', (15116, 15123), True, 'from corehq.apps.reports.filters.case_list import CaseListFilter as EMWF\n'), ((15287, 15326), 'corehq.apps.reports.standard.cases.utils.get_case_owners', 'get_case_owners', (['request', 'domain', 'slugs'], {}), '(request, domain, slugs)\n', (15302, 15326), False, 'from corehq.apps.reports.standard.cases.utils import get_case_owners\n'), ((16228, 16303), 'collections.defaultdict', 'defaultdict', (["(lambda : {'recipe': None, 'references': [], 'ingredients': []})"], {}), "(lambda : {'recipe': None, 'references': [], 'ingredients': []})\n", (16239, 16303), False, 'from collections import defaultdict\n'), ((21153, 21179), 'functools.reduce', 'reduce', (['operator.mul', 'args'], {}), '(operator.mul, args)\n', (21159, 21179), False, 'from functools import reduce\n'), ((14937, 14968), 'corehq.apps.reports.filters.case_list.CaseListFilter.no_filters_selected', 'EMWF.no_filters_selected', (['slugs'], {}), '(slugs)\n', (14961, 14968), True, 'from corehq.apps.reports.filters.case_list import CaseListFilter as EMWF\n'), ((14972, 14997), 'corehq.apps.reports.filters.case_list.CaseListFilter.show_all_data', 'EMWF.show_all_data', (['slugs'], {}), '(slugs)\n', (14990, 14997), True, 'from corehq.apps.reports.filters.case_list import CaseListFilter as EMWF\n'), ((15001, 15030), 'corehq.apps.reports.filters.case_list.CaseListFilter.show_project_data', 'EMWF.show_project_data', (['slugs'], {}), '(slugs)\n', (15023, 15030), True, 'from corehq.apps.reports.filters.case_list import CaseListFilter as EMWF\n'), ((15145, 15161), 'corehq.apps.es.users.UserES', 'user_es.UserES', ([], {}), '()\n', (15159, 15161), True, 'from corehq.apps.es import users as user_es\n')]
|
import locale
import os
import re
import string
import unicodedata
import zlib
from datetime import date
from urllib.parse import parse_qs, urlencode
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError, transaction
from django.urls import reverse_lazy as reverse
from django.utils.encoding import iri_to_uri, smart_text
from django.utils.functional import lazy
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from .conf import settings
MALE = "m"
FEMALE = "f"
def _get_localeconv():
"""
This function loads localeconv during module load.
It is necessary, because using locale.setlocale later may be dangerous
(It is not thread-safe in most of the implementations.)
"""
original_locale_name = locale.setlocale(locale.LC_ALL)
locale_name = locale.locale_alias[settings.LANGUAGE_CODE].split(".")[0] + ".UTF-8"
locale.setlocale(locale.LC_ALL, str(locale_name))
lc = locale.localeconv()
locale.setlocale(locale.LC_ALL, original_locale_name)
return lc
localeconv = _get_localeconv()
# This function is inspired by python's standard locale.currency().
def currency(val, international=False):
"""Formats val according to the currency settings for current language."""
digits = settings.PRICE_DECIMAL_PLACES
# grouping
groups = []
s = str(abs(int(val)))
for interval in locale._grouping_intervals(localeconv["mon_grouping"]):
if not s:
break
groups.append(s[-interval:])
s = s[:-interval]
if s:
groups.append(s)
groups.reverse()
s = smart_text(localeconv["mon_thousands_sep"]).join(groups)
# display fraction for non integer values
if digits and not isinstance(val, int):
s += smart_text(localeconv["mon_decimal_point"]) + "{{:.{}f}}".format(digits).format(val).split(".")[1]
# '<' and '>' are markers if the sign must be inserted between symbol and value
s = "<" + s + ">"
smb = smart_text(localeconv[international and "int_curr_symbol" or "currency_symbol"])
precedes = localeconv[val < 0 and "n_cs_precedes" or "p_cs_precedes"]
separated = localeconv[val < 0 and "n_sep_by_space" or "p_sep_by_space"]
if precedes:
s = smb + (separated and " " or "") + s
else:
s = s + (separated and " " or "") + smb
sign_pos = localeconv[val < 0 and "n_sign_posn" or "p_sign_posn"]
sign = localeconv[val < 0 and "negative_sign" or "positive_sign"]
if sign_pos == 0:
s = "(" + s + ")"
elif sign_pos == 1:
s = sign + s
elif sign_pos == 2:
s = s + sign
elif sign_pos == 3:
s = s.replace("<", sign)
elif sign_pos == 4:
s = s.replace(">", sign)
else:
# the default if nothing specified;
# this should be the most fitting sign position
s = sign + s
return s.replace("<", "").replace(">", "").replace(" ", "\u00A0")
def amount_color(amount):
if amount > 0:
return settings.LEPRIKON_COLOR_POSITIVE
elif amount < 0:
return settings.LEPRIKON_COLOR_NEGATIVE
else:
return settings.LEPRIKON_COLOR_ZERO
def ascii(value):
return unicodedata.normalize("NFKD", value).encode("ascii", errors="ignore").decode("ascii")
def comma_separated(lst):
lst = list(map(smart_text, lst))
if len(lst) > 2:
return _(", and ").join([", ".join(lst[:-1]), lst[-1]])
else:
return _(", and ").join(lst)
def get_rand_hash(length=32, stringset=string.ascii_letters + string.digits):
return "".join([stringset[i % len(stringset)] for i in [ord(x) for x in os.urandom(length)]])
def current_url(request):
if request.META["QUERY_STRING"]:
return "{}?{}".format(request.path, request.META["QUERY_STRING"])
else:
return request.path
def url_back(request):
return request.POST.get(
settings.LEPRIKON_PARAM_BACK,
request.GET.get(
settings.LEPRIKON_PARAM_BACK,
reverse("leprikon:summary"),
),
)
recursive_back_splitter = re.compile(f"[?&]{settings.LEPRIKON_PARAM_BACK}=")
def url_with_back(url, url_back):
try:
query = url_back.split("?")[1]
except IndexError:
pass
else:
try:
# try to reuse original back url
url_back = parse_qs(query)[settings.LEPRIKON_PARAM_BACK][0]
except KeyError:
pass
# remove recursive back url
url_back = recursive_back_splitter.split(url_back)[0]
return "{}?{}".format(url, urlencode({settings.LEPRIKON_PARAM_BACK: iri_to_uri(url_back)}))
def reverse_with_back(request, *args, **kwargs):
return url_with_back(reverse(*args, **kwargs), current_url(request))
def get_gender(birth_num):
return FEMALE if birth_num[2:4] > "50" else MALE
def get_birth_date(birth_num):
birth_num = birth_num.replace("/", "")
y = int(birth_num[:2])
if len(birth_num) == 9:
# before 1954
if y < 54:
year = 1900 + y
else:
year = 1800 + y
else:
year = int(date.today().year / 100) * 100 + y
if y > date.today().year % 100:
year -= 100
month = int(birth_num[2:4]) % 50 % 20
day = int(birth_num[4:6])
return date(year, month, day)
def get_age(birth_date, today=None):
today = today or date.today()
try:
birth_day_this_year = date(today.year, birth_date.month, birth_date.day)
except ValueError:
birth_day_this_year = date(today.year, birth_date.month + 1, 1)
if birth_day_this_year > today:
return today.year - birth_date.year - 1
else:
return today.year - birth_date.year
def first_upper(s):
return s[0].upper() + s[1:] if s else ""
def merge_objects(source, target, attributes=None, exclude=[]):
attributes = attributes or [f.name for f in source._meta.fields if f.name not in exclude]
for attr in attributes:
if not getattr(target, attr):
setattr(target, attr, getattr(source, attr))
return target
@transaction.atomic
def merge_users(source, target):
from .models.subjects import SubjectRegistration
target = merge_objects(source, target, ("first_name", "last_name", "email"))
target.date_joined = (
min(source.date_joined, target.date_joined)
if source.date_joined and target.date_joined
else source.date_joined or target.date_joined
)
target.last_login = (
max(source.last_login, target.last_login)
if source.last_login and target.last_login
else source.last_login or target.last_login
)
try:
leader = source.leprikon_leader
leader.user = target
leader.save()
except ObjectDoesNotExist:
pass
except IntegrityError:
# both users are leaders
raise
for attr in (
"user",
"created_by",
"approved_by",
"payment_requested_by",
"refund_offered_by",
"cancelation_requested_by",
"canceled_by",
):
SubjectRegistration.objects.filter(**{attr: source}).update(**{attr: target})
for sp in source.leprikon_participants.all():
tp = target.leprikon_participants.filter(birth_num=sp.birth_num).first()
if tp:
tp = merge_objects(sp, tp, exclude=("id", "user", "birth_num"))
tp.save()
else:
sp.user = target
sp.save()
for sp in source.leprikon_parents.all():
tp = target.leprikon_parents.filter(first_name=sp.first_name, last_name=sp.last_name).first()
if tp:
tp = merge_objects(sp, tp, exclude=("id", "user"))
tp.save()
else:
sp.user = target
sp.save()
for sbi in source.leprikon_billing_info.all():
tbi = target.leprikon_billing_info.filter(name=sbi.name).first()
if tbi:
tbi = merge_objects(sbi, tbi, exclude=("id", "user"))
tbi.save()
else:
sbi.user = target
sbi.save()
for mr in source.leprikon_messages.all():
if not target.leprikon_messages.filter(message=mr.message).exists():
mr.recipient = target
mr.save()
try:
# support social auth
source.social_auth.update(user=target)
except AttributeError:
pass
from .rocketchat import RocketChat
RocketChat().merge_users(source, target)
source.delete()
def spayd(*items):
s = "SPD*1.0*" + "*".join(
"%s:%s" % (k, unicodedata.normalize("NFKD", str(v).replace("*", "")).encode("ascii", "ignore").upper().decode())
for k, v in sorted(items)
)
s += "*CRC32:%x" % zlib.crc32(s.encode())
return s.upper()
def paragraph(text):
return mark_safe(f"<p>{text.strip()}</p>".replace("\n", "<br/>\n").replace("<br/>\n<br/>\n", "</p>\n\n<p>"))
lazy_paragraph = lazy(paragraph, str)
|
[
"unicodedata.normalize",
"os.urandom",
"django.urls.reverse_lazy",
"datetime.date",
"datetime.date.today",
"django.utils.encoding.iri_to_uri",
"urllib.parse.parse_qs",
"django.utils.functional.lazy",
"locale._grouping_intervals",
"locale.setlocale",
"django.utils.translation.ugettext_lazy",
"django.utils.encoding.smart_text",
"locale.localeconv",
"re.compile"
] |
[((4113, 4163), 're.compile', 're.compile', (['f"""[?&]{settings.LEPRIKON_PARAM_BACK}="""'], {}), "(f'[?&]{settings.LEPRIKON_PARAM_BACK}=')\n", (4123, 4163), False, 'import re\n'), ((8946, 8966), 'django.utils.functional.lazy', 'lazy', (['paragraph', 'str'], {}), '(paragraph, str)\n', (8950, 8966), False, 'from django.utils.functional import lazy\n'), ((814, 845), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL'], {}), '(locale.LC_ALL)\n', (830, 845), False, 'import locale\n'), ((996, 1015), 'locale.localeconv', 'locale.localeconv', ([], {}), '()\n', (1013, 1015), False, 'import locale\n'), ((1020, 1073), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', 'original_locale_name'], {}), '(locale.LC_ALL, original_locale_name)\n', (1036, 1073), False, 'import locale\n'), ((1432, 1486), 'locale._grouping_intervals', 'locale._grouping_intervals', (["localeconv['mon_grouping']"], {}), "(localeconv['mon_grouping'])\n", (1458, 1486), False, 'import locale\n'), ((2029, 2114), 'django.utils.encoding.smart_text', 'smart_text', (["localeconv[international and 'int_curr_symbol' or 'currency_symbol']"], {}), "(localeconv[international and 'int_curr_symbol' or 'currency_symbol']\n )\n", (2039, 2114), False, 'from django.utils.encoding import iri_to_uri, smart_text\n'), ((5311, 5333), 'datetime.date', 'date', (['year', 'month', 'day'], {}), '(year, month, day)\n', (5315, 5333), False, 'from datetime import date\n'), ((4728, 4752), 'django.urls.reverse_lazy', 'reverse', (['*args'], {}), '(*args, **kwargs)\n', (4735, 4752), True, 'from django.urls import reverse_lazy as reverse\n'), ((5394, 5406), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5404, 5406), False, 'from datetime import date\n'), ((5446, 5496), 'datetime.date', 'date', (['today.year', 'birth_date.month', 'birth_date.day'], {}), '(today.year, birth_date.month, birth_date.day)\n', (5450, 5496), False, 'from datetime import date\n'), ((1651, 1694), 'django.utils.encoding.smart_text', 'smart_text', (["localeconv['mon_thousands_sep']"], {}), "(localeconv['mon_thousands_sep'])\n", (1661, 1694), False, 'from django.utils.encoding import iri_to_uri, smart_text\n'), ((1812, 1855), 'django.utils.encoding.smart_text', 'smart_text', (["localeconv['mon_decimal_point']"], {}), "(localeconv['mon_decimal_point'])\n", (1822, 1855), False, 'from django.utils.encoding import iri_to_uri, smart_text\n'), ((4039, 4066), 'django.urls.reverse_lazy', 'reverse', (['"""leprikon:summary"""'], {}), "('leprikon:summary')\n", (4046, 4066), True, 'from django.urls import reverse_lazy as reverse\n'), ((5550, 5591), 'datetime.date', 'date', (['today.year', '(birth_date.month + 1)', '(1)'], {}), '(today.year, birth_date.month + 1, 1)\n', (5554, 5591), False, 'from datetime import date\n'), ((3417, 3428), 'django.utils.translation.ugettext_lazy', '_', (['""", and """'], {}), "(', and ')\n", (3418, 3428), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3491, 3502), 'django.utils.translation.ugettext_lazy', '_', (['""", and """'], {}), "(', and ')\n", (3492, 3502), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4628, 4648), 'django.utils.encoding.iri_to_uri', 'iri_to_uri', (['url_back'], {}), '(url_back)\n', (4638, 4648), False, 'from django.utils.encoding import iri_to_uri, smart_text\n'), ((3230, 3266), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFKD"""', 'value'], {}), "('NFKD', value)\n", (3251, 3266), False, 'import unicodedata\n'), ((4375, 4390), 'urllib.parse.parse_qs', 'parse_qs', (['query'], {}), '(query)\n', (4383, 4390), False, 'from urllib.parse import parse_qs, urlencode\n'), ((5179, 5191), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5189, 5191), False, 'from datetime import date\n'), ((3669, 3687), 'os.urandom', 'os.urandom', (['length'], {}), '(length)\n', (3679, 3687), False, 'import os\n'), ((5129, 5141), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5139, 5141), False, 'from datetime import date\n')]
|
'''OpenGL extension NV.texture_expand_normal
This module customises the behaviour of the
OpenGL.raw.GL.NV.texture_expand_normal to provide a more
Python-friendly API
Overview (from the spec)
This extension provides a remapping mode where unsigned texture
components (in the range [0,1]) can be treated as though they
contained signed data (in the range [-1,+1]). This allows
applications to easily encode signed data into unsigned texture
formats.
The functionality of this extension is nearly identical to the
EXPAND_NORMAL_NV remapping mode provided in the NV_register_combiners
extension, although it applies even if register combiners are used.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/NV/texture_expand_normal.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.NV.texture_expand_normal import *
from OpenGL.raw.GL.NV.texture_expand_normal import _EXTENSION_NAME
def glInitTextureExpandNormalNV():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
[
"OpenGL.extensions.hasGLExtension"
] |
[((1243, 1285), 'OpenGL.extensions.hasGLExtension', 'extensions.hasGLExtension', (['_EXTENSION_NAME'], {}), '(_EXTENSION_NAME)\n', (1268, 1285), False, 'from OpenGL import extensions\n')]
|
#!/usr/bin/env python3
"""An abstract _Runner_ module.
"""
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from enum import Enum, auto, unique
import threading
import uuid
RUNNER_IMAGE = 'alanbchristie/pydatalister'
RUNNER_TAG = 'latest'
@unique
class RunnerState(Enum):
"""Runner execution states.
"""
# The first event is always BEGIN.
# The last and final event is always END.
#
# The normal event sequence, which relates to a runner
# that's successfully created, runs, completes and is then
# automatically deleted is represented by the following sequence:
#
# BEGIN - PREPARING - RUNNING - COMPLETE - END
BEGIN = auto() # The Runner initial state (assigned in begin())
PREPARING = auto() # The Runner is preparing to run
RUNNING = auto() # The Runner is Running
COMPLETE = auto() # The Runner has completed its actions (naturally)
STOPPING = auto() # The Runner is stopping - in response to a stop()
STOPPED = auto() # The runner has stopped - in response to a stop()
FAILED = auto() # There has been a problem
END = auto() # The last event, issued when the runner's gone
RunnerStateTuple = namedtuple('Runner', ['state', 'context', 'msg'])
class Runner(threading.Thread, metaclass=ABCMeta):
"""The ``Runner`` base class, from which all Runners are derived.
"""
def __init__(self, callback, callback_context):
"""The basic Runner initialser.
"""
threading.Thread.__init__(self)
assert callable(callback)
self._state_callback = callback
self._callback_context = callback_context
self._runner_state = None
self._stopping = False
self._runner_uuid = uuid.uuid4()
# A synchronisation lock
self.lock = threading.Lock()
print('New Runner() {%s}' % self._runner_uuid)
def _set_runner_state(self, runner_state, msg=None):
"""Sets the runner state and informs the user.
:param runner_state: The new Runner state
:type runner_state: ``RunnerState
"""
assert isinstance(runner_state, RunnerState)
self._runner_state = runner_state
print('New RunnerState (%s) {%s}' % (runner_state, self._runner_uuid))
# Inform the user of each state change.
# The receiver must expect a `RunnerStateTuple` as the first argument
# in the callback method.
assert self._state_callback
rso = RunnerStateTuple(runner_state, self._callback_context, msg)
self._state_callback(rso, self._callback_context)
@abstractmethod
def begin(self):
"""Starts the Runner. The state_callback will be supplied with
instances of the RunnerState as the runner progresses. This
method must only be called once.
This method must not block.
"""
assert self._runner_state is None
self._set_runner_state(RunnerState.BEGIN)
def end(self):
"""Stops the Runner. This method should be called only of a Runner is
to be prematurely stopped. Runners have a built-in lifetime and are
normally left to complete naturally.
If the Runner is still running this method introduces the
``RunnerState`` values of ``STOPPING`` and ``STOPPED``, normally not
seen.
This method does nothing if the Runner is already stopping or has
completed.
This method must not block.
"""
print('End called... {%s}' % self._runner_uuid)
if self._stopping:
print('Ignoring (already in progress). {%s}' %
self._runner_uuid)
return
elif self._runner_state in [RunnerState.COMPLETE,
RunnerState.END]:
print('Ignoring (Runner already gone). {%s}' %
self._runner_uuid)
return
self._set_runner_state(RunnerState.STOPPING)
# Just set the 'stopping' field (and change the state).
# This should cause the main thread to exit - it's
# the responsibility of the implementing class.
self._stopping = True
print('End is nigh! {%s}' % self._runner_uuid)
|
[
"threading.Thread.__init__",
"uuid.uuid4",
"threading.Lock",
"collections.namedtuple",
"enum.auto"
] |
[((1264, 1313), 'collections.namedtuple', 'namedtuple', (['"""Runner"""', "['state', 'context', 'msg']"], {}), "('Runner', ['state', 'context', 'msg'])\n", (1274, 1313), False, 'from collections import namedtuple\n'), ((697, 703), 'enum.auto', 'auto', ([], {}), '()\n', (701, 703), False, 'from enum import Enum, auto, unique\n'), ((778, 784), 'enum.auto', 'auto', ([], {}), '()\n', (782, 784), False, 'from enum import Enum, auto, unique\n'), ((837, 843), 'enum.auto', 'auto', ([], {}), '()\n', (841, 843), False, 'from enum import Enum, auto, unique\n'), ((890, 896), 'enum.auto', 'auto', ([], {}), '()\n', (894, 896), False, 'from enum import Enum, auto, unique\n'), ((969, 975), 'enum.auto', 'auto', ([], {}), '()\n', (973, 975), False, 'from enum import Enum, auto, unique\n'), ((1047, 1053), 'enum.auto', 'auto', ([], {}), '()\n', (1051, 1053), False, 'from enum import Enum, auto, unique\n'), ((1125, 1131), 'enum.auto', 'auto', ([], {}), '()\n', (1129, 1131), False, 'from enum import Enum, auto, unique\n'), ((1177, 1183), 'enum.auto', 'auto', ([], {}), '()\n', (1181, 1183), False, 'from enum import Enum, auto, unique\n'), ((1558, 1589), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (1583, 1589), False, 'import threading\n'), ((1809, 1821), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1819, 1821), False, 'import uuid\n'), ((1876, 1892), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1890, 1892), False, 'import threading\n')]
|
from chinormfilter import __version__
from chinormfilter.cli import Filter
def test_version():
assert __version__ == '0.5.0'
def test_kuro2sudachi_cli(capsys):
f = Filter(dict_type="full")
assert f.duplicated("林檎,りんご") is True
assert f.duplicated("レナリドミド, レナリドマイド") is False
assert f.duplicated("エダマメ,枝豆") is True
assert f.duplicated("えだまめ,枝豆") is True
assert f.duplicated("飲む,呑む") is True
assert f.duplicated("エダマメ => 枝豆") is True
assert f.duplicated("tlc => tlc,全肺気量") is False
assert f.duplicated("リンたんぱく質,リン蛋白質,リンタンパク質") is True
assert f.duplicated("グルタチオン => グルタチオン,タチオン,ランデールチオン") is False
|
[
"chinormfilter.cli.Filter"
] |
[((176, 200), 'chinormfilter.cli.Filter', 'Filter', ([], {'dict_type': '"""full"""'}), "(dict_type='full')\n", (182, 200), False, 'from chinormfilter.cli import Filter\n')]
|
#!/usr/bin/env python3
import json
import os
import sys
NAME = "{VERSION_FROM}-{VERSION_CURRENT}-{VERSION_TO}-{STAGE}.json"
with open(NAME.format(**os.environ), "w") as fd:
json.dump(sys.argv, fd)
|
[
"json.dump"
] |
[((179, 202), 'json.dump', 'json.dump', (['sys.argv', 'fd'], {}), '(sys.argv, fd)\n', (188, 202), False, 'import json\n')]
|
# Author: <NAME>
# Data: 2020-01-10
# Function: Run training
#-------------------------- import package --------------------------#
from __future__ import division
from __future__ import print_function
import time
import tensorflow as tf
import winsound
from Code_utils import *
from Code_models import GCN, MLP, MORE
#--------------------------- main process ---------------------------#
# Set random seed
seed = 123
np.random.seed(seed)
tf.set_random_seed(seed)
# Settings
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('dataset', 'cora', 'Dataset string.') # 'cora', 'citeseer', 'pubmed'
flags.DEFINE_string('model', 'MORE', 'Model string.') # 'gcn', 'gcn_cheby', 'dense', 'MORE'
flags.DEFINE_float('learning_rate', 0.001, 'Initial learning rate.')
flags.DEFINE_integer('epochs', 500, 'Number of epochs to train.')
flags.DEFINE_float('dropout', 0.5, 'Dropout rate (1 - keep probability).')
flags.DEFINE_float('weight_decay', 5e-4, 'Weight for L2 loss on embedding matrix.')
flags.DEFINE_integer('early_stopping', 30, 'Tolerance for early stopping (# of epochs).')
flags.DEFINE_integer('hidden1', 16, 'Number of units in hidden layer 1.')
flags.DEFINE_integer('max_degree', 3, 'Maximum Chebyshev polynomial degree.')
flags.DEFINE_integer('motif_feature_dim', 6, 'the dim of motif features')
flags.DEFINE_list('property_embedding_hidden', [256], 'the hidden layer number of property embedding')
flags.DEFINE_list('motif_embedding_hidden', [256], 'the hidden layer number of motif embedding')
flags.DEFINE_list('integration_hidden', [], 'the hidden layer number of integration')
flags.DEFINE_string('embeding_combination_method', "Hadamard", 'the method of embedding combination')
# embeding_combination_method ---- "Hadamard", "Summation", "Connection"
# batch_run
use_batch = False
if use_batch:
FLAGS.model = 'MotifGCN'
lr = [0.01, 0.001, 0.0003, 0.003]
le = [300, 500, 1000, 2000]
mo = ["Hadamard", "Summation", "Connection"]
la = [32, 64, 128, 256, 512]
mode_list = []
for i in range(0, 4):
temp1 = [lr[i], le[i]]
for j in range(0, 3):
temp2 = temp1 + [mo[j]]
for k in range(0, 5):
temp3 = temp2 + [la[k]]
mode_list.append(temp3)
mode = mode_list[59] # 0-14, 15-29, 30-44, 45-59
print(mode)
FLAGS.learning_rate = mode[0]
FLAGS.epochs = mode[1]
FLAGS.embeding_combination_method = mode[2]
FLAGS.motif_embedding_hidden = [mode[3]]
FLAGS.property_embedding_hidden = [mode[3]]
# Load data
adj, features, y_train, y_val, y_test, train_mask, val_mask, test_mask, motiffeatures = load_data(FLAGS.dataset)
# Some preprocessing
features = preprocess_features(features)
motiffeatures = preprocess_features(motiffeatures)
if FLAGS.model == 'gcn':
support = [preprocess_adj(adj)]
num_supports = 1
model_func = GCN
elif FLAGS.model == 'gcn_cheby':
support = chebyshev_polynomials(adj, FLAGS.max_degree)
num_supports = 1 + FLAGS.max_degree
model_func = GCN
elif FLAGS.model == 'dense':
support = [preprocess_adj(adj)] # Not used
num_supports = 1
model_func = MLP
elif FLAGS.model == 'MORE':
support = [preprocess_adj(adj)] # Not used
num_supports = 1
model_func = MORE
else:
raise ValueError('Invalid argument for model: ' + str(FLAGS.model))
# Define placeholders
placeholders = {
'support': [tf.sparse_placeholder(tf.float32) for _ in range(num_supports)],
'features': tf.sparse_placeholder(tf.float32, shape=tf.constant(features[2], dtype=tf.int64)),
'motiffeatures': tf.sparse_placeholder(tf.float32, shape=tf.constant(motiffeatures[2], dtype=tf.int64)),
'labels': tf.placeholder(tf.float32, shape=(None, y_train.shape[1])),
'labels_mask': tf.placeholder(tf.int32),
'dropout': tf.placeholder_with_default(0., shape=()),
'num_features_nonzero': tf.placeholder(tf.int32), # helper variable for sparse dropout
'num_motif_features_nonzero': tf.placeholder(tf.int32)
}
# Create model
model = model_func(placeholders, input_dim=features[2][1], logging=True)
# Initialize session
sess = tf.Session()
# Define model evaluation function
def evaluate(features, support, labels, mask, motiffeatures, placeholders):
t_test = time.time()
feed_dict_val = construct_feed_dict(features, support, labels, mask, motiffeatures, placeholders)
outs_val = sess.run([model.loss, model.accuracy], feed_dict=feed_dict_val)
return outs_val[0], outs_val[1], (time.time() - t_test)
# Init variables
sess.run(tf.global_variables_initializer())
cost_val = []
train_acc, val_acc, Tacc = [], [], []
train_loss, val_loss, Tloss = [], [], []
# Train model
train_starttime = time.time()
train_time_list = []
stop_epoch = 0
for epoch in range(FLAGS.epochs):
t = time.time()
stop_epoch = epoch + 1
# Construct feed dictionary
feed_dict = construct_feed_dict(features, support, y_train, train_mask, motiffeatures, placeholders)
feed_dict.update({placeholders['dropout']: FLAGS.dropout})
# Training step
outs = sess.run([model.opt_op, model.loss, model.accuracy], feed_dict=feed_dict)
train_acc.append(outs[2])
train_loss.append(outs[1])
# Validation
cost, acc, duration = evaluate(features, support, y_val, val_mask, motiffeatures, placeholders)
cost_val.append(cost)
val_acc.append(acc)
val_loss.append(cost)
# Print results
print("Epoch:", '%04d' % (epoch + 1), "train_loss=", "{:.5f}".format(outs[1]),
"train_acc=", "{:.5f}".format(outs[2]), "val_loss=", "{:.5f}".format(cost),
"val_acc=", "{:.5f}".format(acc), "time=", "{:.5f}".format(time.time() - t))
train_time_list.append(time.time() - t)
test_cost, test_acc, test_duration = evaluate(features, support, y_test, test_mask, motiffeatures, placeholders)
Tacc.append(test_acc)
Tloss.append(test_cost)
if epoch > FLAGS.early_stopping and cost_val[-1] > np.mean(cost_val[-(FLAGS.early_stopping+1):-1]):
print("Early stopping...")
break
print("Optimization Finished!")
train_time = time.time() - train_starttime
# Tacc = Tacc[-max(FLAGS.early_stopping * 2, 20):]
# Testing
test_starttime = time.time()
test_cost, test_acc, test_duration = evaluate(features, support, y_test, test_mask, motiffeatures, placeholders)
print("Test set results:", "cost=", "{:.5f}".format(test_cost),
"accuracy=", "{:.5f}".format(test_acc), "time=", "{:.5f}".format(test_duration))
print("Max test acc = {:.5f}".format(max(Tacc)))
test_time = time.time() - test_starttime
# Save
with open("Result\\Train_log.csv", mode='a') as f:
f.write("{},{},{},{},{},{},{},{},{},{:.4f},{:.4f},(best={:.4f}),{:.4f},{},{:.6f},{:.6f},{:.6f}\n".\
format(seed,FLAGS.dataset,FLAGS.model,FLAGS.learning_rate, FLAGS.dropout, FLAGS.embeding_combination_method,\
str(FLAGS.property_embedding_hidden), str(FLAGS.motif_embedding_hidden), str(FLAGS.integration_hidden),\
test_acc,test_cost, max(Tacc),test_duration,stop_epoch, train_time, np.mean(train_time_list), test_time))
with open("Result\\Loss.csv", mode='a') as f:
for i in train_loss:
f.write("{:.6f},".format(i))
f.write("\n")
for i in val_loss:
f.write("{:.6f},".format(i))
f.write("\n")
for i in Tloss:
f.write("{:.6f},".format(i))
f.write("\n")
with open("Result\\Acc.csv", mode='a') as f:
for i in train_acc:
f.write("{:.6f},".format(i))
f.write("\n")
for i in val_acc:
f.write("{:.6f},".format(i))
f.write("\n")
for i in Tacc:
f.write("{:.6f},".format(i))
f.write("\n")
# Sound
duration = 500 # millisecond
freq = 600 # Hz
winsound.Beep(freq, duration)
|
[
"tensorflow.global_variables_initializer",
"winsound.Beep",
"tensorflow.placeholder_with_default",
"tensorflow.Session",
"tensorflow.constant",
"time.time",
"tensorflow.set_random_seed",
"tensorflow.placeholder",
"tensorflow.sparse_placeholder"
] |
[((465, 489), 'tensorflow.set_random_seed', 'tf.set_random_seed', (['seed'], {}), '(seed)\n', (483, 489), True, 'import tensorflow as tf\n'), ((4150, 4162), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (4160, 4162), True, 'import tensorflow as tf\n'), ((4730, 4741), 'time.time', 'time.time', ([], {}), '()\n', (4739, 4741), False, 'import time\n'), ((6221, 6232), 'time.time', 'time.time', ([], {}), '()\n', (6230, 6232), False, 'import time\n'), ((7722, 7751), 'winsound.Beep', 'winsound.Beep', (['freq', 'duration'], {}), '(freq, duration)\n', (7735, 7751), False, 'import winsound\n'), ((3713, 3771), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '(None, y_train.shape[1])'}), '(tf.float32, shape=(None, y_train.shape[1]))\n', (3727, 3771), True, 'import tensorflow as tf\n'), ((3792, 3816), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {}), '(tf.int32)\n', (3806, 3816), True, 'import tensorflow as tf\n'), ((3833, 3875), 'tensorflow.placeholder_with_default', 'tf.placeholder_with_default', (['(0.0)'], {'shape': '()'}), '(0.0, shape=())\n', (3860, 3875), True, 'import tensorflow as tf\n'), ((3904, 3928), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {}), '(tf.int32)\n', (3918, 3928), True, 'import tensorflow as tf\n'), ((4005, 4029), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {}), '(tf.int32)\n', (4019, 4029), True, 'import tensorflow as tf\n'), ((4288, 4299), 'time.time', 'time.time', ([], {}), '()\n', (4297, 4299), False, 'import time\n'), ((4568, 4601), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (4599, 4601), True, 'import tensorflow as tf\n'), ((4821, 4832), 'time.time', 'time.time', ([], {}), '()\n', (4830, 4832), False, 'import time\n'), ((6112, 6123), 'time.time', 'time.time', ([], {}), '()\n', (6121, 6123), False, 'import time\n'), ((6558, 6569), 'time.time', 'time.time', ([], {}), '()\n', (6567, 6569), False, 'import time\n'), ((3426, 3459), 'tensorflow.sparse_placeholder', 'tf.sparse_placeholder', (['tf.float32'], {}), '(tf.float32)\n', (3447, 3459), True, 'import tensorflow as tf\n'), ((3547, 3587), 'tensorflow.constant', 'tf.constant', (['features[2]'], {'dtype': 'tf.int64'}), '(features[2], dtype=tf.int64)\n', (3558, 3587), True, 'import tensorflow as tf\n'), ((3651, 3696), 'tensorflow.constant', 'tf.constant', (['motiffeatures[2]'], {'dtype': 'tf.int64'}), '(motiffeatures[2], dtype=tf.int64)\n', (3662, 3696), True, 'import tensorflow as tf\n'), ((4519, 4530), 'time.time', 'time.time', ([], {}), '()\n', (4528, 4530), False, 'import time\n'), ((5723, 5734), 'time.time', 'time.time', ([], {}), '()\n', (5732, 5734), False, 'import time\n'), ((5678, 5689), 'time.time', 'time.time', ([], {}), '()\n', (5687, 5689), False, 'import time\n')]
|
from datetime import datetime
from twick.tweet import Tweet
import twick.settings as settings
class Response(object):
def __init__(self, raw):
self.raw = raw
self.tweets = list(map(Tweet, raw["statuses"]))
self.metadata = dict(raw["search_metadata"])
self.timestamp = datetime.now()
def to_row(self):
return self.metadata
|
[
"datetime.datetime.now"
] |
[((305, 319), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (317, 319), False, 'from datetime import datetime\n')]
|
# Generated by Django 2.1 on 2019-05-15 12:47
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('article_tag', '0001_initial'),
('articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='readingstats',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='favoritearticle',
name='article',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='articles.Article'),
),
migrations.AddField(
model_name='favoritearticle',
name='favorited_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='article',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, to_field='username'),
),
migrations.AddField(
model_name='article',
name='tagList',
field=models.ManyToManyField(related_name='articles', to='article_tag.ArticleTag'),
),
migrations.AlterUniqueTogether(
name='readingstats',
unique_together={('article', 'user')},
),
]
|
[
"django.db.models.ForeignKey",
"django.db.migrations.swappable_dependency",
"django.db.migrations.AlterUniqueTogether",
"django.db.models.ManyToManyField"
] |
[((245, 302), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (276, 302), False, 'from django.db import migrations, models\n'), ((1504, 1599), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', ([], {'name': '"""readingstats"""', 'unique_together': "{('article', 'user')}"}), "(name='readingstats', unique_together={(\n 'article', 'user')})\n", (1534, 1599), False, 'from django.db import migrations, models\n'), ((520, 616), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (537, 616), False, 'from django.db import migrations, models\n'), ((741, 831), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Article"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'articles.Article')\n", (758, 831), False, 'from django.db import migrations, models\n'), ((961, 1057), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (978, 1057), False, 'from django.db import migrations, models\n'), ((1173, 1290), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'to_field': '"""username"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL, to_field='username')\n", (1190, 1290), False, 'from django.db import migrations, models\n'), ((1407, 1483), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""articles"""', 'to': '"""article_tag.ArticleTag"""'}), "(related_name='articles', to='article_tag.ArticleTag')\n", (1429, 1483), False, 'from django.db import migrations, models\n')]
|
from django.db import models
from tagging.fields import TagField
class Perch(models.Model):
size = models.IntegerField()
smelly = models.BooleanField(default=True)
class Parrot(models.Model):
state = models.CharField(maxlength=50)
perch = models.ForeignKey(Perch, null=True)
def __str__(self):
return self.state
class Meta:
ordering = ['state']
class Link(models.Model):
name = models.CharField(maxlength=50)
def __str__(self):
return self.name
class Meta:
ordering = ['name']
class Article(models.Model):
name = models.CharField(maxlength=50)
def __str__(self):
return self.name
class Meta:
ordering = ['name']
class FormTest(models.Model):
tags = TagField()
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.IntegerField",
"tagging.fields.TagField"
] |
[((104, 125), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (123, 125), False, 'from django.db import models\n'), ((139, 172), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (158, 172), False, 'from django.db import models\n'), ((214, 244), 'django.db.models.CharField', 'models.CharField', ([], {'maxlength': '(50)'}), '(maxlength=50)\n', (230, 244), False, 'from django.db import models\n'), ((257, 292), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Perch'], {'null': '(True)'}), '(Perch, null=True)\n', (274, 292), False, 'from django.db import models\n'), ((427, 457), 'django.db.models.CharField', 'models.CharField', ([], {'maxlength': '(50)'}), '(maxlength=50)\n', (443, 457), False, 'from django.db import models\n'), ((593, 623), 'django.db.models.CharField', 'models.CharField', ([], {'maxlength': '(50)'}), '(maxlength=50)\n', (609, 623), False, 'from django.db import models\n'), ((760, 770), 'tagging.fields.TagField', 'TagField', ([], {}), '()\n', (768, 770), False, 'from tagging.fields import TagField\n')]
|
import bpy
from bpy_extras.mesh_utils import ngon_tessellate
from . import se3
def get_se3_mesh_form_file(filepath):
file_query = se3.ASCIIFileQuery(filepath)
version = file_query.get_num_value("SE_MESH")
mesh = se3.Mesh(version)
num_of_layers = file_query.get_num_value("LAYERS")
file_query.follows_block_begin_decl() #open layers block
processed_layers = 0
while processed_layers < num_of_layers:
layer_name = file_query.get_str_value("LAYER_NAME")
layer_index = file_query.get_long_value("LAYER_INDEX")
file_query.follows_block_begin_decl() #open layer block
layer = se3.Layer(layer_name, layer_index)
mesh.layers.append(layer)
num_of_maps = file_query.get_long_value("VERTEX_MAPS")
file_query.follows_block_begin_decl() #open vertex maps block
processed_maps = 0
num_of_texcoord_maps = 0
num_of_weight_maps = 0
num_of_morph_maps = 0
while processed_maps < num_of_maps:
map_type = file_query.get_map_type()
map_name = file_query.get_map_name()
file_query.follows_block_begin_decl() #open vertex map block
if map_type == se3.VERTEX_MAP_TYPE_MORPH:
type_index = num_of_morph_maps
get_map_elem = file_query.get_morph_elem
map_is_relative = file_query.get_bool_value("RELATIVE")
num_of_morph_maps += 1
elif map_type == se3.VERTEX_MAP_TYPE_TEXCOORD:
type_index = num_of_texcoord_maps
get_map_elem = file_query.get_texcoord_elem
num_of_texcoord_maps += 1
elif map_type == se3.VERTEX_MAP_TYPE_WEIGHT:
type_index = num_of_weight_maps
get_map_elem = file_query.get_weight_elem
num_of_weight_maps += 1
map = se3.VertexMap(map_type, map_name, map_is_relative)
map.type_index = type_index
num_of_map_elems = file_query.get_long_value("ELEMENTS")
file_query.follows_block_begin_decl() # open elements block
processed_elems = 0
while processed_elems < num_of_map_elems:
file_query.follows_block_begin_decl() #open element block
map.elements.append(get_map_elem())
file_query.follows_block_end_decl() #close element block
processed_elems += 1
file_query.follows_block_end_decl() #close elements block
processed_maps += 1
layer.vertex_maps_append(map)
file_query.follows_block_end_decl() #close vertex map block
file_query.follows_block_end_decl() #close vertex maps block
num_of_verts = file_query.get_long_value("VERTICES")
file_query.follows_block_begin_decl() #open vertices block
num_of_processed_vertices = 0
while num_of_processed_vertices < num_of_verts:
vertex = se3.Vertex()
morph_pointers = vertex.morph_pointers
weight_pointers = vertex.weight_pointers
uv_pointers = vertex.uv_pointers
file_query.follows_block_begin_decl() #open vertex block
num_of_pointers = file_query.get_num_of_values()
num_of_processed_pointers = 0
is_last_pointer = False
last_pointer_index = num_of_pointers - 1
while num_of_processed_pointers < num_of_pointers:
if num_of_processed_pointers == last_pointer_index:
is_last_pointer = True
vertex_data_pointer = file_query.get_vertex_data_pointer(is_last_pointer)
vertex_map_index = vertex_data_pointer[0]
vertex_map_type = layer.vertex_maps[vertex_map_index].type
if vertex_map_type == se3.VERTEX_MAP_TYPE_MORPH:
morph_pointers.append(vertex_data_pointer)
elif vertex_map_type == se3.VERTEX_MAP_TYPE_WEIGHT:
weight_pointers.append(vertex_data_pointer)
elif vertex_map_type == se3.VERTEX_MAP_TYPE_TEXCOORD:
uv_pointers.append(vertex_data_pointer)
num_of_processed_pointers += 1
layer.vertices.append(vertex)
file_query.follows_block_end_decl() #close vertex block
num_of_processed_vertices += 1
file_query.follows_block_end_decl() #close vertices block
num_of_polys = file_query.get_long_value("POLYGONS")
file_query.follows_block_begin_decl() #open polygons block
processed_polys = 0
while processed_polys < num_of_polys:
poly = []
file_query.follows_block_begin_decl() #open polygon block
num_of_values = file_query.get_num_of_values()
processed_values = 0
is_last_value = False
last_value_idx = num_of_values - 1
while processed_values < num_of_values:
if processed_values == last_value_idx:
is_last_value = True
poly.append(file_query.get_vert_idx(is_last_value))
processed_values += 1
file_query.follows_block_end_decl() #close polygon block
layer.polygons.append(tuple(poly))
processed_polys += 1
file_query.follows_block_end_decl() #close polygons block
num_of_poly_maps = file_query.get_long_value("POLYGON_MAPS")
file_query.follows_block_begin_decl() #open polygon maps block
processed_poly_maps = 0
while processed_poly_maps < num_of_poly_maps:
map_type = file_query.get_map_type(False)
map_name = file_query.get_map_name()
map_smoothing_angle = file_query.get_num_value("POLYGON_MAP_SMOOTHING_ANGLE")
polygon_count = file_query.get_long_value("POLYGONS_COUNT")
file_query.follows_block_begin_decl() #open polygon count block
poly_map = se3.PolygonMap(map_type, map_name, map_smoothing_angle)
processed_poly_idxs = 0
while processed_poly_idxs < polygon_count:
poly_map.polygons.append(file_query.get_poly_idx())
processed_poly_idxs += 1
file_query.follows_block_end_decl() #close polygon count block
processed_poly_maps += 1
layer.polygon_maps.append(poly_map)
layer.surface_maps.append(poly_map)
file_query.follows_block_end_decl() #close polygon maps block
file_query.follows_block_end_decl() #close layer block
processed_layers += 1
file_query.follows_block_end_decl() #close layers block
return mesh
def get_bl_face(se3_layer, se3_vertex_indices):
new_indices = []
num_of_texcoord_maps = len(se3_layer.texcoord_maps)
uvs_data = []
for i in range(num_of_texcoord_maps):
uvs_data.append([])
se3_texcoord_maps = se3_layer.texcoord_maps
for index in se3_vertex_indices:
se3_vertex = se3_layer.vertices[index]
new_indices.append(se3_vertex.basic_morph_pointer[1])
for uv_index, uv_pointer in enumerate(se3_vertex.uv_pointers):
elem = se3_texcoord_maps[uv_index].elements[uv_pointer[1]]
uvs_data[uv_index].append(tuple([elem[0], (-elem[1]) + 1]) )
return tuple([tuple(new_indices), uvs_data])
def get_bl_edges(se3_vertex_indices):
edges = []
num_of_indices = len(se3_vertex_indices)
last_index = num_of_indices - 1
for current_index in range(num_of_indices):
next_index = se3_vertex_indices[current_index + 1] if current_index != last_index else se3_vertex_indices[0]
edges.append((se3_vertex_indices[current_index], next_index))
return edges
def get_bl_fgons(vertices, ngon_face):
fgon_faces = []
tessed_faces = ngon_tesselate(vertices, ngon_face)
for tessed_face in tessed_faces:
fgon_face = []
for tessed_index in tessed_face:
fgon_face.append(ngon_face[tessed_index])
fgon_faces.append(tuple(fgon_face))
return fgon_faces
def edge_not_in(which_edge, edges):
for edge in edges:
edge_rev = (edge[0], edge[1])
if which_edge == edge or which_edge == edge_rev:
return False
return True
def get_bl_face_uv_data(real_face, bl_face):
num_of_uv_tex = len(real_face[1])
uvs_data = []
for i in range(num_of_uv_tex):
uvs_data.append([])
for vert_index in bl_face:
real_index = real_face[0].index(vert_index)
for idx, uv_data in enumerate(uvs_data):
try:
data = real_face[1][idx][real_index]
except:
data = (1,0)
uv_data.append(data)
return uvs_data
def read_file(operator, context):
from mathutils import (Matrix, Vector)
import math
filepath = operator.filepath
se3_mesh = get_se3_mesh_form_file(filepath)
for se3_layer in se3_mesh.layers:
fgon_edge_indices = []
vertices = se3_layer.vertex_maps[0].elements
edges = []
real_faces = []
se3_surface_map_indices = [0] * len(se3_layer.polygons)
material_indices = []
for se3_surface_map_index, se3_surface_map in enumerate(se3_layer.surface_maps):
for polygon_index in se3_surface_map.polygons:
se3_surface_map_indices[polygon_index] = se3_surface_map_index
edge_index_count = 0
for se3_polygon_index, se3_polygon in enumerate(se3_layer.polygons):
se3_num_of_vertex_indices = len(se3_polygon)
se3_is_tri_or_quad = se3_num_of_vertex_indices <= 4
se3_surface_map_index = se3_surface_map_indices[se3_polygon_index]
if se3_is_tri_or_quad:
material_indices.append(se3_surface_map_index)
face = get_bl_face(se3_layer, se3_polygon)
real_faces.append(face)
face_edges = get_bl_edges(face[0])
for face_edge in face_edges:
"""
if edge_not_in(face_edge, edges):
edges.append(face_edge)
edge_index_count += 1
"""
edges.append(face_edge)
edge_index_count += 1
else:
ngon_face = get_bl_face(se3_layer, se3_polygon)
bound_edges = get_bl_edges(ngon_face[0])
fgon_faces = get_bl_fgons(vertices, ngon_face[0])
for fgon_face in fgon_faces:
material_indices.append(se3_surface_map_index)
real_faces.append(tuple( [fgon_face, get_bl_face_uv_data(ngon_face, fgon_face)] ))
face_edges = get_bl_edges(fgon_face)
for face_edge in face_edges:
is_fgon_edge = edge_not_in(face_edge, bound_edges)
edges.append(face_edge)
if is_fgon_edge:
fgon_edge_indices.append(edge_index_count)
edge_index_count += 1
faces = [real_face[0] for real_face in real_faces]
mesh = bpy.data.meshes.new("Test mesh")
mesh.from_pydata(vertices, edges, faces)
for fgon_edge_index in fgon_edge_indices:
mesh.edges[fgon_edge_index].is_fgon = True
for uv_index, se3_texcoord_map in enumerate(se3_layer.texcoord_maps):
uv_tex = mesh.uv_layers.new(se3_texcoord_map.name)
uv_loop = mesh.uv_layers[0]
for face_index, tex_data in enumerate(uv_tex.data):
real_tex_face = real_faces[face_index][1][uv_index]
poly = mesh.polygons[face_index]
for j, k in enumerate(poly.loop_indices):
uv_loop.data[k].uv = real_tex_face[j]
tranf_mat = Matrix(((-1.0, 0.0, 0.0, 0.0),
( 0.0, 0.0, 1.0, 0.0),
( 0.0, 1.0, 0.0, 0.0),
( 0.0, 0.0, 0.0, 1.0)))
obj = bpy.data.objects.new(se3_layer.name, mesh)
context.collection.objects.link(obj)
context.view_layer.objects.active = obj
obj.select_set(True)
se3_non_basic_morph_map = se3_layer.non_basic_morph_maps
se3_vertices = se3_layer.vertices
if se3_non_basic_morph_map:
obj.shape_key_add("position")
shape_keys = []
for se3_other_mmap in se3_non_basic_morph_map:
shape_keys.append(obj.shape_key_add(se3_other_mmap.name))
for se3_vertex in se3_vertices:
other_morph_pnts = se3_vertex.non_basic_morph_pointers
if other_morph_pnts:
for idx, other_mp in enumerate(other_morph_pnts):
type_idx = se3_layer.vertex_maps[other_mp[0]].type_index
se3_disp = se3_layer.vertex_maps[other_mp[0]].elements[other_mp[1]]
se3_vert = se3_vertex.basic_morph_pointer[1]
vert_data = se3_layer.vertex_maps[se3_vertex.basic_morph_pointer[0]].elements[se3_vertex.basic_morph_pointer[1]]
shape_keys[type_idx - 1].data[se3_vert].co = Vector(vert_data) + Vector(se3_disp)
se3_weight_maps = se3_layer.weight_maps
if se3_weight_maps:
vertex_groups = []
for se3_weight_map in se3_weight_maps:
vertex_groups.append(obj.vertex_groups.new(se3_weight_map.name))
for se3_vertex in se3_vertices:
se3_weight_pointers = se3_vertex.weight_pointers
if se3_weight_pointers:
for se3_weight_pointer in se3_weight_pointers:
vertex_index = se3_vertex.basic_morph_pointer[1]
se3_vertex_map_index = se3_weight_pointer[0]
se3_vertex_weight = se3_layer.vertex_maps[se3_vertex_map_index].elements[se3_weight_pointer[1]]
vertex_group_index = se3_layer.vertex_maps[se3_vertex_map_index].type_index
vertex_groups[vertex_group_index].add([vertex_index], se3_vertex_weight, 'REPLACE')
if se3_layer.surface_maps:
materials = []
for se3_surface_map in se3_layer.surface_maps:
material = bpy.data.materials.new(se3_surface_map.name)
materials.append(material)
bpy.ops.object.material_slot_add()
obj.material_slots[-1].material = material
for face in mesh.polygons:
face.material_index = material_indices[face.index]
obj.matrix_world = tranf_mat
bpy.ops.object.transform_apply(rotation=True)
context.view_layer.update()
return {'FINISHED'}
|
[
"bpy.ops.object.material_slot_add",
"bpy.ops.object.transform_apply",
"mathutils.Vector",
"bpy.data.materials.new",
"bpy.data.objects.new",
"bpy.data.meshes.new",
"mathutils.Matrix"
] |
[((11951, 11983), 'bpy.data.meshes.new', 'bpy.data.meshes.new', (['"""Test mesh"""'], {}), "('Test mesh')\n", (11970, 11983), False, 'import bpy\n'), ((12677, 12778), 'mathutils.Matrix', 'Matrix', (['((-1.0, 0.0, 0.0, 0.0), (0.0, 0.0, 1.0, 0.0), (0.0, 1.0, 0.0, 0.0), (0.0, \n 0.0, 0.0, 1.0))'], {}), '(((-1.0, 0.0, 0.0, 0.0), (0.0, 0.0, 1.0, 0.0), (0.0, 1.0, 0.0, 0.0),\n (0.0, 0.0, 0.0, 1.0)))\n', (12683, 12778), False, 'from mathutils import Matrix, Vector\n'), ((12885, 12927), 'bpy.data.objects.new', 'bpy.data.objects.new', (['se3_layer.name', 'mesh'], {}), '(se3_layer.name, mesh)\n', (12905, 12927), False, 'import bpy\n'), ((15743, 15788), 'bpy.ops.object.transform_apply', 'bpy.ops.object.transform_apply', ([], {'rotation': '(True)'}), '(rotation=True)\n', (15773, 15788), False, 'import bpy\n'), ((15372, 15416), 'bpy.data.materials.new', 'bpy.data.materials.new', (['se3_surface_map.name'], {}), '(se3_surface_map.name)\n', (15394, 15416), False, 'import bpy\n'), ((15476, 15510), 'bpy.ops.object.material_slot_add', 'bpy.ops.object.material_slot_add', ([], {}), '()\n', (15508, 15510), False, 'import bpy\n'), ((14179, 14196), 'mathutils.Vector', 'Vector', (['vert_data'], {}), '(vert_data)\n', (14185, 14196), False, 'from mathutils import Matrix, Vector\n'), ((14199, 14215), 'mathutils.Vector', 'Vector', (['se3_disp'], {}), '(se3_disp)\n', (14205, 14215), False, 'from mathutils import Matrix, Vector\n')]
|
import ply.lex as lex
import ply.yacc as yacc
import global_var
# definicie tokenov
tokens = (
'STRING', 'NUMBER', 'FIELD', 'GRE', 'LOE', 'EQ', 'NEQ',
)
# literaly
literals = ['+', '-', '*', '/', '>', '<']
# popis tokenov
t_FIELD = r'[a-zA-Z0-9_\.][a-zA-Z0-9_\.]*'
t_STRING = r'\".*\"'
t_GRE = '>='
t_LOE = '<='
t_EQ = '=='
t_NEQ = '!='
t_ignore = " \t"
def t_NUMBER(t):
r'\d+'
t.value = int(t.value)
return t
# chybny token
def t_error(t):
print("Nepodporovany znak '%s'" % t.value[0])
global_var.parseerror = True
t.lexer.skip(1)
# gramatika
def p_inp(p):
'''inp : exp '>' exp
| exp '<' exp
| exp GRE exp
| exp LOE exp
| exp NEQ exp
| exp EQ exp'''
def p_exp(p):
'''exp : STRING
| x exp2'''
def p_exp2(p):
'''exp2 : '+' x exp2
| '-' x exp2
| '*' x exp2
| '/' x exp2
| '''
def p_x1(p):
'''x : NUMBER'''
def p_x2(p):
'''x : FIELD'''
global_var.fields.append(p[1]) # pridanie fieldu do listu fieldov
# ak chyba v gramatike vypise sa chyba, nastavi sa globalna premenna na urcenie chyby
def p_error(p):
if p:
print("Syntakticka chyba pri '%s'" % p.value)
global_var.parseerror = True
else:
print("Syntakticka chyba pri EOF")
global_var.parseerror = True
# funkcia na zostavenie lexemu a parseru
# vstupom je vyraz, pri lexikalnej alebo syntaktickej chybe vrati False, inak True
def myparse(expression):
lexer = lex.lex()
parser = yacc.yacc(debug=False) # vypnutie debugovacieho suboru
# kontrola lexemov
lexer.input(expression)
while True:
tok = lexer.token()
if global_var.parseerror: # ak chyba
global_var.parseerror = False # reset flagu
return False
if not tok:
break
# syntakticka kontrola
parser.parse(expression)
if global_var.parseerror: # ak chyba
global_var.parseerror = False # reset flagu
return False
return True
|
[
"global_var.fields.append",
"ply.yacc.yacc",
"ply.lex.lex"
] |
[((1017, 1047), 'global_var.fields.append', 'global_var.fields.append', (['p[1]'], {}), '(p[1])\n', (1041, 1047), False, 'import global_var\n'), ((1543, 1552), 'ply.lex.lex', 'lex.lex', ([], {}), '()\n', (1550, 1552), True, 'import ply.lex as lex\n'), ((1566, 1588), 'ply.yacc.yacc', 'yacc.yacc', ([], {'debug': '(False)'}), '(debug=False)\n', (1575, 1588), True, 'import ply.yacc as yacc\n')]
|
# Generated by Django 3.1 on 2020-08-29 02:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('interview', '0003_auto_20200828_2215'),
]
operations = [
migrations.AlterModelOptions(
name='candidate',
options={'permissions': [('export', 'Can export candidate list'), ('notify', 'notify interviewer for candidate review')], 'verbose_name': '应聘者', 'verbose_name_plural': '应聘者'},
),
]
|
[
"django.db.migrations.AlterModelOptions"
] |
[((227, 462), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""candidate"""', 'options': "{'permissions': [('export', 'Can export candidate list'), ('notify',\n 'notify interviewer for candidate review')], 'verbose_name': '应聘者',\n 'verbose_name_plural': '应聘者'}"}), "(name='candidate', options={'permissions': [(\n 'export', 'Can export candidate list'), ('notify',\n 'notify interviewer for candidate review')], 'verbose_name': '应聘者',\n 'verbose_name_plural': '应聘者'})\n", (255, 462), False, 'from django.db import migrations\n')]
|
"""Training script for the WaveNet network on the VCTK corpus.
This script trains a network with the WaveNet using data from the VCTK corpus,
which can be freely downloaded at the following site (~10 GB):
http://homepages.inf.ed.ac.uk/jyamagis/page3/page58/page58.html
"""
from __future__ import print_function
import argparse
import glob
import json
from dataclasses import dataclass
import numpy as np
import os
import sys
import time
from datetime import datetime
from pathlib import Path
import tensorflow as tf
from tensorflow.python.client import timeline
from tqdm.auto import tqdm
from wavenet_tf import WaveNetModel, optimizer_factory
from wavenet_tf.data_io import get_train_dataset
ROOT_DIR = Path(os.path.abspath(__file__)).parent.parent
STARTED_DATESTRING = "{0:%Y-%m-%dT%H-%M-%S}".format(datetime.now())
@dataclass
class TrainParams:
data_dir: str = str(ROOT_DIR / 'data' / 'fma_small_25_16000')
log_dir: str = str(ROOT_DIR / "logdir")
checkpoint_every: int = 1000
num_steps: int = int(1e5)
batch_size: int = 1
sample_size: int = 100000
learning_rate: float = 1e-4
max_to_keep: int = 5
store_metadata: bool = False
l2_regularization_strength: float = 0.0
max_checkpoints: int = 5
def train(args: TrainParams, net, optimizer):
# Load raw waveform from VCTK corpus.
with tf.name_scope('create_inputs'):
# Allow silence trimming to be skipped by specifying a threshold near
dataset, n_examples = get_train_dataset(os.path.join(args.data_dir, "*.npz"), args.sample_size)
epoch_size = n_examples // args.batch_size
dataset = dataset.repeat().batch(args.batch_size)
iterator = tf.compat.v1.data.make_initializable_iterator(dataset)
audio_batch = iterator.get_next()
if args.l2_regularization_strength == 0:
args.l2_regularization_strength = None
loss = net.loss(input_batch=audio_batch,
global_condition_batch=None,
l2_regularization_strength=args.l2_regularization_strength)
trainable = tf.compat.v1.trainable_variables()
optim = optimizer.minimize(loss, var_list=trainable)
# Set up logging for TensorBoard.
writer = tf.compat.v1.summary.FileWriter(args.log_dir)
writer.add_graph(tf.compat.v1.get_default_graph())
run_metadata = tf.compat.v1.RunMetadata()
summaries = tf.compat.v1.summary.merge_all()
# Set up session
sess = tf.compat.v1.Session(config=tf.compat.v1.ConfigProto(log_device_placement=False))
init = tf.compat.v1.global_variables_initializer()
sess.run(init)
sess.run(iterator.initializer)
# Saver for storing checkpoints of the model.
saver = tf.compat.v1.train.Saver(var_list=tf.compat.v1.trainable_variables(), max_to_keep=args.max_checkpoints)
try:
saved_global_step = load(saver, sess, args.log_dir)
if saved_global_step is None:
# The first training step will be saved_global_step + 1,
# therefore we put -1 here for new or overwritten trainings.
saved_global_step = -1
except:
print("Something went wrong while restoring checkpoint. "
"We will terminate training to avoid accidentally overwriting "
"the previous model.")
raise
step = None
last_saved_step = saved_global_step
try:
total = args.num_steps - saved_global_step - 1
pbar = tqdm(
total=total,
initial=saved_global_step + 1,
desc=f'train (epoch-size={epoch_size}, #epoch={total // epoch_size})')
for step in range(saved_global_step + 1, args.num_steps):
if args.store_metadata and step % 50 == 0:
# Slow run that stores extra information for debugging.
print('Storing metadata')
run_options = tf.RunOptions(
trace_level=tf.RunOptions.FULL_TRACE)
summary, loss_value, _ = sess.run(
[summaries, loss, optim],
options=run_options,
run_metadata=run_metadata)
writer.add_summary(summary, step)
writer.add_run_metadata(run_metadata,
'step_{:04d}'.format(step))
tl = timeline.Timeline(run_metadata.step_stats)
timeline_path = os.path.join(args.log_dir, 'timeline.trace')
with open(timeline_path, 'w') as f:
f.write(tl.generate_chrome_trace_format(show_memory=True))
else:
summary, loss_value, _ = sess.run([summaries, loss, optim])
writer.add_summary(summary, step)
pbar.update(1)
pbar.set_postfix(step=step, loss=loss_value, epoch=step // epoch_size)
if step > 0 and step % args.checkpoint_every == 0:
save(saver, sess, args.log_dir, step)
last_saved_step = step
except KeyboardInterrupt:
# Introduce a line break after ^C is displayed so save message
# is on its own line.
print()
finally:
if step > last_saved_step:
save(saver, sess, args.log_dir, step)
def save(saver, sess, logdir, step):
model_name = 'model.ckpt'
checkpoint_path = os.path.join(logdir, model_name)
print('Storing checkpoint to {} ...'.format(logdir), end="")
sys.stdout.flush()
if not os.path.exists(logdir):
os.makedirs(logdir)
saver.save(sess, checkpoint_path, global_step=step)
print(' Done.')
def load(saver, sess, logdir):
print("Trying to restore saved checkpoints from {} ...".format(logdir),
end="")
ckpt = tf.train.get_checkpoint_state(logdir)
if ckpt:
print(" Checkpoint found: {}".format(ckpt.model_checkpoint_path))
global_step = int(ckpt.model_checkpoint_path
.split('/')[-1]
.split('-')[-1])
print(" Global step was: {}".format(global_step))
print(" Restoring...", end="")
saver.restore(sess, ckpt.model_checkpoint_path)
print(" Done.")
return global_step
else:
print(" No checkpoint found.")
return None
def get_default_logdir(logdir_root):
logdir = os.path.join(logdir_root, 'train', STARTED_DATESTRING)
return logdir
if __name__ == '__main__':
args = TrainParams()
with open('./data/tf_wavenet_params.json', 'r') as f:
wavenet_params = json.load(f)
model = WaveNetModel(
batch_size=args.batch_size,
dilations=wavenet_params["dilations"],
filter_width=wavenet_params["filter_width"],
residual_channels=wavenet_params["residual_channels"],
dilation_channels=wavenet_params["dilation_channels"],
skip_channels=wavenet_params["skip_channels"],
quantization_channels=wavenet_params["quantization_channels"],
use_biases=wavenet_params["use_biases"],
scalar_input=wavenet_params["scalar_input"],
initial_filter_width=wavenet_params["initial_filter_width"],
histograms=False,
global_condition_channels=None,
global_condition_cardinality=None)
optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=1e-4,
epsilon=1e-4)
train(args, model, optimizer)
|
[
"wavenet_tf.WaveNetModel",
"sys.stdout.flush",
"tensorflow.RunOptions",
"os.path.join",
"tensorflow.compat.v1.global_variables_initializer",
"os.path.abspath",
"os.path.exists",
"tensorflow.compat.v1.RunMetadata",
"tensorflow.name_scope",
"datetime.datetime.now",
"tensorflow.train.get_checkpoint_state",
"tensorflow.compat.v1.get_default_graph",
"tqdm.auto.tqdm",
"tensorflow.compat.v1.summary.FileWriter",
"tensorflow.compat.v1.ConfigProto",
"tensorflow.compat.v1.trainable_variables",
"json.load",
"os.makedirs",
"tensorflow.compat.v1.data.make_initializable_iterator",
"tensorflow.python.client.timeline.Timeline",
"tensorflow.compat.v1.train.AdamOptimizer",
"tensorflow.compat.v1.summary.merge_all"
] |
[((809, 823), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (821, 823), False, 'from datetime import datetime\n'), ((2069, 2103), 'tensorflow.compat.v1.trainable_variables', 'tf.compat.v1.trainable_variables', ([], {}), '()\n', (2101, 2103), True, 'import tensorflow as tf\n'), ((2213, 2258), 'tensorflow.compat.v1.summary.FileWriter', 'tf.compat.v1.summary.FileWriter', (['args.log_dir'], {}), '(args.log_dir)\n', (2244, 2258), True, 'import tensorflow as tf\n'), ((2333, 2359), 'tensorflow.compat.v1.RunMetadata', 'tf.compat.v1.RunMetadata', ([], {}), '()\n', (2357, 2359), True, 'import tensorflow as tf\n'), ((2376, 2408), 'tensorflow.compat.v1.summary.merge_all', 'tf.compat.v1.summary.merge_all', ([], {}), '()\n', (2406, 2408), True, 'import tensorflow as tf\n'), ((2535, 2578), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (2576, 2578), True, 'import tensorflow as tf\n'), ((5301, 5333), 'os.path.join', 'os.path.join', (['logdir', 'model_name'], {}), '(logdir, model_name)\n', (5313, 5333), False, 'import os\n'), ((5403, 5421), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5419, 5421), False, 'import sys\n'), ((5702, 5739), 'tensorflow.train.get_checkpoint_state', 'tf.train.get_checkpoint_state', (['logdir'], {}), '(logdir)\n', (5731, 5739), True, 'import tensorflow as tf\n'), ((6293, 6347), 'os.path.join', 'os.path.join', (['logdir_root', '"""train"""', 'STARTED_DATESTRING'], {}), "(logdir_root, 'train', STARTED_DATESTRING)\n", (6305, 6347), False, 'import os\n'), ((6529, 7141), 'wavenet_tf.WaveNetModel', 'WaveNetModel', ([], {'batch_size': 'args.batch_size', 'dilations': "wavenet_params['dilations']", 'filter_width': "wavenet_params['filter_width']", 'residual_channels': "wavenet_params['residual_channels']", 'dilation_channels': "wavenet_params['dilation_channels']", 'skip_channels': "wavenet_params['skip_channels']", 'quantization_channels': "wavenet_params['quantization_channels']", 'use_biases': "wavenet_params['use_biases']", 'scalar_input': "wavenet_params['scalar_input']", 'initial_filter_width': "wavenet_params['initial_filter_width']", 'histograms': '(False)', 'global_condition_channels': 'None', 'global_condition_cardinality': 'None'}), "(batch_size=args.batch_size, dilations=wavenet_params[\n 'dilations'], filter_width=wavenet_params['filter_width'],\n residual_channels=wavenet_params['residual_channels'],\n dilation_channels=wavenet_params['dilation_channels'], skip_channels=\n wavenet_params['skip_channels'], quantization_channels=wavenet_params[\n 'quantization_channels'], use_biases=wavenet_params['use_biases'],\n scalar_input=wavenet_params['scalar_input'], initial_filter_width=\n wavenet_params['initial_filter_width'], histograms=False,\n global_condition_channels=None, global_condition_cardinality=None)\n", (6541, 7141), False, 'from wavenet_tf import WaveNetModel, optimizer_factory\n'), ((7228, 7298), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': '(0.0001)', 'epsilon': '(0.0001)'}), '(learning_rate=0.0001, epsilon=0.0001)\n', (7260, 7298), True, 'import tensorflow as tf\n'), ((1346, 1376), 'tensorflow.name_scope', 'tf.name_scope', (['"""create_inputs"""'], {}), "('create_inputs')\n", (1359, 1376), True, 'import tensorflow as tf\n'), ((1689, 1743), 'tensorflow.compat.v1.data.make_initializable_iterator', 'tf.compat.v1.data.make_initializable_iterator', (['dataset'], {}), '(dataset)\n', (1734, 1743), True, 'import tensorflow as tf\n'), ((2280, 2312), 'tensorflow.compat.v1.get_default_graph', 'tf.compat.v1.get_default_graph', ([], {}), '()\n', (2310, 2312), True, 'import tensorflow as tf\n'), ((3428, 3552), 'tqdm.auto.tqdm', 'tqdm', ([], {'total': 'total', 'initial': '(saved_global_step + 1)', 'desc': 'f"""train (epoch-size={epoch_size}, #epoch={total // epoch_size})"""'}), "(total=total, initial=saved_global_step + 1, desc=\n f'train (epoch-size={epoch_size}, #epoch={total // epoch_size})')\n", (3432, 3552), False, 'from tqdm.auto import tqdm\n'), ((5434, 5456), 'os.path.exists', 'os.path.exists', (['logdir'], {}), '(logdir)\n', (5448, 5456), False, 'import os\n'), ((5466, 5485), 'os.makedirs', 'os.makedirs', (['logdir'], {}), '(logdir)\n', (5477, 5485), False, 'import os\n'), ((6503, 6515), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6512, 6515), False, 'import json\n'), ((716, 741), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (731, 741), False, 'import os\n'), ((1504, 1540), 'os.path.join', 'os.path.join', (['args.data_dir', '"""*.npz"""'], {}), "(args.data_dir, '*.npz')\n", (1516, 1540), False, 'import os\n'), ((2470, 2522), 'tensorflow.compat.v1.ConfigProto', 'tf.compat.v1.ConfigProto', ([], {'log_device_placement': '(False)'}), '(log_device_placement=False)\n', (2494, 2522), True, 'import tensorflow as tf\n'), ((2730, 2764), 'tensorflow.compat.v1.trainable_variables', 'tf.compat.v1.trainable_variables', ([], {}), '()\n', (2762, 2764), True, 'import tensorflow as tf\n'), ((3851, 3902), 'tensorflow.RunOptions', 'tf.RunOptions', ([], {'trace_level': 'tf.RunOptions.FULL_TRACE'}), '(trace_level=tf.RunOptions.FULL_TRACE)\n', (3864, 3902), True, 'import tensorflow as tf\n'), ((4302, 4344), 'tensorflow.python.client.timeline.Timeline', 'timeline.Timeline', (['run_metadata.step_stats'], {}), '(run_metadata.step_stats)\n', (4319, 4344), False, 'from tensorflow.python.client import timeline\n'), ((4377, 4421), 'os.path.join', 'os.path.join', (['args.log_dir', '"""timeline.trace"""'], {}), "(args.log_dir, 'timeline.trace')\n", (4389, 4421), False, 'import os\n')]
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
print(setuptools.find_packages())
setuptools.setup(
name="pymortar",
version="0.1.4",
author="<NAME>",
author_email="<EMAIL>",
description="Python3 Mortar",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mortar-frontend",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
],
)
|
[
"setuptools.find_packages"
] |
[((94, 120), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (118, 120), False, 'import setuptools\n'), ((414, 440), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (438, 440), False, 'import setuptools\n')]
|
import cv2
import numpy as np
def detect(img):
# finds and fills the located robots
img = cv2.convertScaleAbs(img, 1, 1.5)
structure = np.ones((3, 3))
canny = np.copy(cv2.Canny(img, 20, 120))
dilated = cv2.dilate(canny, structure)
contours, hier = cv2.findContours(dilated, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
filled = cv2.drawContours(np.zeros(img.shape, dtype=np.uint8), contours, -1, 1, -1, 0, hier, 1)
return np.copy(filled)
def get_large_contours(detect):
# take a detection mask, and contour information add circles
contours, hier = cv2.findContours(detect, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
large_contours = []
contour_area_minimum = 2000
for c in contours:
if cv2.contourArea(c) > contour_area_minimum:
large_contours.append(c)
return large_contours
def get_robot_angle(contour, center):
contour = np.squeeze(np.copy(contour))
contour -= center
theta = np.arctan2(contour[:, 1], contour[:, 0])
# rho = np.sqrt(contour[:, 0] ** 2 + contour[:, 1] ** 2)
val, bin_edges = np.histogram(theta, bins=50, range=[-np.pi, np.pi])
bin_centers = bin_edges[:-1] + np.diff(bin_edges) / 2
return np.nanmean(np.where(val == 0, bin_centers, np.nan))
def get_robots(large_contours, detect, objective):
# get memory
robot_control_mask = np.zeros(detect.shape)
large_contour_image = cv2.drawContours(np.copy(robot_control_mask), large_contours, -1, 1, -1)
# probably needs more adjustment in the future, so will make a dict for now
objective_calibration_dict = {'2x': 4,
'4x': 2,
'10x': 1,
'20x': 1,
'40x': 1}
robot_angles = []
contours_towards_center = []
contour_range_border_limit = 100 * objective_calibration_dict[objective]
contours_in_limits = []
for contour in large_contours:
xs = np.squeeze(contour)[:, 0]
ys = np.squeeze(contour)[:, 1]
# check that our contours are within acceptable limits, draw their circle if they are
if np.all(xs > contour_range_border_limit) and np.all(
xs < large_contour_image.shape[0] - contour_range_border_limit):
if np.all(ys > contour_range_border_limit) and np.all(
ys < large_contour_image.shape[0] - contour_range_border_limit):
contours_in_limits.append(contour)
M = cv2.moments(contour)
cx = int(M["m10"] / M["m00"])
cy = int(M["m01"] / M["m00"])
contours_towards_center.append(contour)
angle = get_robot_angle(contour, (cx, cy))
robot_angles.append(angle)
return contours_towards_center, robot_angles
def get_robot_control(img, objective):
detected = detect(img)
large_contours = get_large_contours(detected)
robots, robot_angles = get_robots(large_contours,
detected,
objective)
return robots, robot_angles
|
[
"cv2.Canny",
"cv2.contourArea",
"numpy.arctan2",
"numpy.copy",
"cv2.dilate",
"cv2.moments",
"numpy.zeros",
"numpy.ones",
"numpy.all",
"numpy.histogram",
"numpy.where",
"numpy.diff",
"cv2.convertScaleAbs",
"numpy.squeeze",
"cv2.findContours"
] |
[((106, 138), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['img', '(1)', '(1.5)'], {}), '(img, 1, 1.5)\n', (125, 138), False, 'import cv2\n'), ((156, 171), 'numpy.ones', 'np.ones', (['(3, 3)'], {}), '((3, 3))\n', (163, 171), True, 'import numpy as np\n'), ((233, 261), 'cv2.dilate', 'cv2.dilate', (['canny', 'structure'], {}), '(canny, structure)\n', (243, 261), False, 'import cv2\n'), ((284, 349), 'cv2.findContours', 'cv2.findContours', (['dilated', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(dilated, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (300, 349), False, 'import cv2\n'), ((463, 478), 'numpy.copy', 'np.copy', (['filled'], {}), '(filled)\n', (470, 478), True, 'import numpy as np\n'), ((604, 668), 'cv2.findContours', 'cv2.findContours', (['detect', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(detect, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (620, 668), False, 'import cv2\n'), ((996, 1036), 'numpy.arctan2', 'np.arctan2', (['contour[:, 1]', 'contour[:, 0]'], {}), '(contour[:, 1], contour[:, 0])\n', (1006, 1036), True, 'import numpy as np\n'), ((1121, 1172), 'numpy.histogram', 'np.histogram', (['theta'], {'bins': '(50)', 'range': '[-np.pi, np.pi]'}), '(theta, bins=50, range=[-np.pi, np.pi])\n', (1133, 1172), True, 'import numpy as np\n'), ((1399, 1421), 'numpy.zeros', 'np.zeros', (['detect.shape'], {}), '(detect.shape)\n', (1407, 1421), True, 'import numpy as np\n'), ((193, 216), 'cv2.Canny', 'cv2.Canny', (['img', '(20)', '(120)'], {}), '(img, 20, 120)\n', (202, 216), False, 'import cv2\n'), ((381, 416), 'numpy.zeros', 'np.zeros', (['img.shape'], {'dtype': 'np.uint8'}), '(img.shape, dtype=np.uint8)\n', (389, 416), True, 'import numpy as np\n'), ((942, 958), 'numpy.copy', 'np.copy', (['contour'], {}), '(contour)\n', (949, 958), True, 'import numpy as np\n'), ((1257, 1296), 'numpy.where', 'np.where', (['(val == 0)', 'bin_centers', 'np.nan'], {}), '(val == 0, bin_centers, np.nan)\n', (1265, 1296), True, 'import numpy as np\n'), ((1466, 1493), 'numpy.copy', 'np.copy', (['robot_control_mask'], {}), '(robot_control_mask)\n', (1473, 1493), True, 'import numpy as np\n'), ((763, 781), 'cv2.contourArea', 'cv2.contourArea', (['c'], {}), '(c)\n', (778, 781), False, 'import cv2\n'), ((1209, 1227), 'numpy.diff', 'np.diff', (['bin_edges'], {}), '(bin_edges)\n', (1216, 1227), True, 'import numpy as np\n'), ((2047, 2066), 'numpy.squeeze', 'np.squeeze', (['contour'], {}), '(contour)\n', (2057, 2066), True, 'import numpy as np\n'), ((2087, 2106), 'numpy.squeeze', 'np.squeeze', (['contour'], {}), '(contour)\n', (2097, 2106), True, 'import numpy as np\n'), ((2220, 2259), 'numpy.all', 'np.all', (['(xs > contour_range_border_limit)'], {}), '(xs > contour_range_border_limit)\n', (2226, 2259), True, 'import numpy as np\n'), ((2264, 2334), 'numpy.all', 'np.all', (['(xs < large_contour_image.shape[0] - contour_range_border_limit)'], {}), '(xs < large_contour_image.shape[0] - contour_range_border_limit)\n', (2270, 2334), True, 'import numpy as np\n'), ((2370, 2409), 'numpy.all', 'np.all', (['(ys > contour_range_border_limit)'], {}), '(ys > contour_range_border_limit)\n', (2376, 2409), True, 'import numpy as np\n'), ((2414, 2484), 'numpy.all', 'np.all', (['(ys < large_contour_image.shape[0] - contour_range_border_limit)'], {}), '(ys < large_contour_image.shape[0] - contour_range_border_limit)\n', (2420, 2484), True, 'import numpy as np\n'), ((2581, 2601), 'cv2.moments', 'cv2.moments', (['contour'], {}), '(contour)\n', (2592, 2601), False, 'import cv2\n')]
|
import socket
import re
import concurrent.futures
from pprint import pformat
from .unrealserver import UnrealServer
# Setup Logger
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class UnrealMasterServer(object):
def __init__(
self,
hostname,
port,
**kwargs,
):
"""
UnrealMasterServer class init statement
Args:
hostname (str): Resolvable DNS name or IP address for the
Master Server you'd wish to poll.
port (int): The port number the master server is
listening on
"""
self.hostname = hostname
self.port = port
self.servers = []
if 'logger' not in kwargs:
self.logger = logger
else:
self.logger = kwargs['logger']
if 'timeout' not in kwargs:
self.timeout = 5
else:
self.timeout = kwargs['timeout']
self.logger.debug(f'Passed kwargs: {kwargs}')
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(self.timeout)
self.server = (self.hostname, self.port)
def get_servers(self):
"""
Poll the Master Server for a client list and sets the class
attribute of 'servers' to a list of Server objects.
Returns: None
"""
# The Quake style queries to end clients (whether they be Master Servers
# or server clients), need a header of 4 \xFF bytes
command = b"\\list\\gamename\\ut\\final\\"
self.logger.debug(
f'Sending command \'\\{command}\\\' to {self.hostname}:{self.port}'
)
self.sock.connect(self.server)
self.sock.sendto(command, self.server)
fullmsg = ''
try:
while True:
msg = self.sock.recv(4096)
if len(msg) <= 0:
break
fullmsg += msg.decode('utf-8')
except socket.timeout as e:
raise e
self.logger.debug(f'Raw data received:\n\n{fullmsg}')
data = fullmsg.split('\\')[5:]
for item in data[1::2][:-1]:
self.servers.append(
UnrealServer(item.split(':')[0], int(item.split(':')[1]), logger=self.logger)
)
self.logger.info(
f'Found {len(self.servers)} servers running.'
)
def search_servers(self, query):
"""
Search for a given query in any of the values in the server dict
keys.
Args:
query (str): the search query to look for in the dictionary keys
Returns: A list of Servers
"""
if not self.servers:
return
return_list = []
self.logger.info(
f'Searching {len(self.servers)} servers for keyword \'{query}\'.'
)
for server in self.servers:
self.logger.info(f"Scanning {server} for keyword.")
self.logger.debug(f"{pformat(server.info)}")
info_results = [
key for key, val in server.info.items()
if re.search(
query,
str(val),
re.IGNORECASE
)
]
if info_results:
return_list.append(server)
return return_list
def poll_now(self):
"""
Concurrently poll all servers captured from the Master Server and
capture info and status headers.
Returns: None
"""
def get_server_info(server):
server.poll_server()
with concurrent.futures.ThreadPoolExecutor() as executor:
executor.map(get_server_info, self.servers)
|
[
"socket.socket",
"pprint.pformat",
"logging.getLogger",
"logging.NullHandler"
] |
[((156, 183), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (173, 183), False, 'import logging\n'), ((202, 223), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (221, 223), False, 'import logging\n'), ((1087, 1136), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (1100, 1136), False, 'import socket\n'), ((3108, 3128), 'pprint.pformat', 'pformat', (['server.info'], {}), '(server.info)\n', (3115, 3128), False, 'from pprint import pformat\n')]
|
from flask import render_template, flash, redirect, url_for, request
from app import app, db
from flask_sqlalchemy import sqlalchemy
from app.forms import RegisterForm, LoginForm
from app.models import User
from flask_login import current_user, login_user, logout_user, login_required
@app.before_first_request
def initDB(*args, **kwargs):
db.create_all()
@app.route('/', methods=['GET'])
@app.route('/index', methods=['GET'])
def index():
return render_template('base.html')
@app.route('/register', methods=['GET', 'POST'])
def register():
form = RegisterForm()
if form.validate_on_submit():
acc = User(username=form.username.data, email=form.email.data, firstname=form.firstname.data,
lastname=form.lastname.data)
acc.get_password(form.password2.data)
db.session.add(acc)
db.session.commit()
return redirect(url_for('index'))
return render_template('user_registration.html', form=form)
@login_required
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
student = User.query.filter_by(username=form.username.data).first()
if student is None or not student.check_password(form.password.data):
flash('Not a username or incorrect password!')
return redirect(url_for('login'))
login_user(student, remember=form.rememberme.data)
return redirect(url_for('index'))
return render_template('login.html', title='Login Page', form=form)
|
[
"app.models.User",
"app.app.route",
"flask.flash",
"flask_login.login_user",
"flask_login.logout_user",
"flask.url_for",
"app.models.User.query.filter_by",
"app.forms.LoginForm",
"app.db.session.commit",
"flask.render_template",
"app.forms.RegisterForm",
"app.db.create_all",
"app.db.session.add"
] |
[((363, 394), 'app.app.route', 'app.route', (['"""/"""'], {'methods': "['GET']"}), "('/', methods=['GET'])\n", (372, 394), False, 'from app import app, db\n'), ((396, 432), 'app.app.route', 'app.route', (['"""/index"""'], {'methods': "['GET']"}), "('/index', methods=['GET'])\n", (405, 432), False, 'from app import app, db\n'), ((488, 535), 'app.app.route', 'app.route', (['"""/register"""'], {'methods': "['GET', 'POST']"}), "('/register', methods=['GET', 'POST'])\n", (497, 535), False, 'from app import app, db\n'), ((981, 1001), 'app.app.route', 'app.route', (['"""/logout"""'], {}), "('/logout')\n", (990, 1001), False, 'from app import app, db\n'), ((1074, 1118), 'app.app.route', 'app.route', (['"""/login"""'], {'methods': "['GET', 'POST']"}), "('/login', methods=['GET', 'POST'])\n", (1083, 1118), False, 'from app import app, db\n'), ((345, 360), 'app.db.create_all', 'db.create_all', ([], {}), '()\n', (358, 360), False, 'from app import app, db\n'), ((457, 485), 'flask.render_template', 'render_template', (['"""base.html"""'], {}), "('base.html')\n", (472, 485), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((563, 577), 'app.forms.RegisterForm', 'RegisterForm', ([], {}), '()\n', (575, 577), False, 'from app.forms import RegisterForm, LoginForm\n'), ((910, 962), 'flask.render_template', 'render_template', (['"""user_registration.html"""'], {'form': 'form'}), "('user_registration.html', form=form)\n", (925, 962), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((1020, 1033), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (1031, 1033), False, 'from flask_login import current_user, login_user, logout_user, login_required\n'), ((1223, 1234), 'app.forms.LoginForm', 'LoginForm', ([], {}), '()\n', (1232, 1234), False, 'from app.forms import RegisterForm, LoginForm\n'), ((1640, 1700), 'flask.render_template', 'render_template', (['"""login.html"""'], {'title': '"""Login Page"""', 'form': 'form'}), "('login.html', title='Login Page', form=form)\n", (1655, 1700), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((626, 747), 'app.models.User', 'User', ([], {'username': 'form.username.data', 'email': 'form.email.data', 'firstname': 'form.firstname.data', 'lastname': 'form.lastname.data'}), '(username=form.username.data, email=form.email.data, firstname=form.\n firstname.data, lastname=form.lastname.data)\n', (630, 747), False, 'from app.models import User\n'), ((809, 828), 'app.db.session.add', 'db.session.add', (['acc'], {}), '(acc)\n', (823, 828), False, 'from app import app, db\n'), ((837, 856), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (854, 856), False, 'from app import app, db\n'), ((1054, 1070), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (1061, 1070), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((1536, 1586), 'flask_login.login_user', 'login_user', (['student'], {'remember': 'form.rememberme.data'}), '(student, remember=form.rememberme.data)\n', (1546, 1586), False, 'from flask_login import current_user, login_user, logout_user, login_required\n'), ((881, 897), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (888, 897), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((1194, 1210), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (1201, 1210), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((1435, 1481), 'flask.flash', 'flash', (['"""Not a username or incorrect password!"""'], {}), "('Not a username or incorrect password!')\n", (1440, 1481), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((1611, 1627), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (1618, 1627), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((1287, 1336), 'app.models.User.query.filter_by', 'User.query.filter_by', ([], {'username': 'form.username.data'}), '(username=form.username.data)\n', (1307, 1336), False, 'from app.models import User\n'), ((1510, 1526), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (1517, 1526), False, 'from flask import render_template, flash, redirect, url_for, request\n')]
|
import cv2
import numpy as np
cap = cv2.VideoCapture(0)
while True:
_, frame = cap.read()
hsv_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# Red color
low_red = np.array([161, 155, 84])
high_red = np.array([179, 255, 255])
red_mask = cv2.inRange(hsv_frame, low_red, high_red)
red = cv2.bitwise_and(frame, frame, mask=red_mask)
# Blue color
low_blue = np.array([94, 80, 2])
high_blue = np.array([126, 255, 255])
blue_mask = cv2.inRange(hsv_frame, low_blue, high_blue)
blue = cv2.bitwise_and(frame, frame, mask=blue_mask)
# Green color
low_green = np.array([25, 52, 72])
high_green = np.array([102, 255, 255])
green_mask = cv2.inRange(hsv_frame, low_green, high_green)
green = cv2.bitwise_and(frame, frame, mask=green_mask)
#yellow
low_yellow = np.array([21, 39, 64])
high_yellow = np.array([40, 255, 255])
yellow_mask = cv2.inRange(hsv_frame, low_yellow, high_yellow)
yellow = cv2.bitwise_and(frame, frame, mask=yellow_mask)
# Every color except white
low = np.array([0, 42, 0])
high = np.array([179, 255, 255])
mask = cv2.inRange(hsv_frame, low, high)
result = cv2.bitwise_and(frame, frame, mask=mask)
#cv2.imshow("Frame", frame)
#cv2.imshow("Red", red)
#cv2.imshow("Blue", blue)
cv2.imshow("Green", green)
cv2.imshow("Yellow", yellow)
#cv2.imshow("Result", result)
key = cv2.waitKey(1)
if key == 27:
break
|
[
"cv2.bitwise_and",
"cv2.cvtColor",
"cv2.waitKey",
"cv2.VideoCapture",
"numpy.array",
"cv2.imshow",
"cv2.inRange"
] |
[((37, 56), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (53, 56), False, 'import cv2\n'), ((112, 150), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2HSV'], {}), '(frame, cv2.COLOR_BGR2HSV)\n', (124, 150), False, 'import cv2\n'), ((182, 206), 'numpy.array', 'np.array', (['[161, 155, 84]'], {}), '([161, 155, 84])\n', (190, 206), True, 'import numpy as np\n'), ((222, 247), 'numpy.array', 'np.array', (['[179, 255, 255]'], {}), '([179, 255, 255])\n', (230, 247), True, 'import numpy as np\n'), ((263, 304), 'cv2.inRange', 'cv2.inRange', (['hsv_frame', 'low_red', 'high_red'], {}), '(hsv_frame, low_red, high_red)\n', (274, 304), False, 'import cv2\n'), ((315, 359), 'cv2.bitwise_and', 'cv2.bitwise_and', (['frame', 'frame'], {'mask': 'red_mask'}), '(frame, frame, mask=red_mask)\n', (330, 359), False, 'import cv2\n'), ((393, 414), 'numpy.array', 'np.array', (['[94, 80, 2]'], {}), '([94, 80, 2])\n', (401, 414), True, 'import numpy as np\n'), ((431, 456), 'numpy.array', 'np.array', (['[126, 255, 255]'], {}), '([126, 255, 255])\n', (439, 456), True, 'import numpy as np\n'), ((473, 516), 'cv2.inRange', 'cv2.inRange', (['hsv_frame', 'low_blue', 'high_blue'], {}), '(hsv_frame, low_blue, high_blue)\n', (484, 516), False, 'import cv2\n'), ((528, 573), 'cv2.bitwise_and', 'cv2.bitwise_and', (['frame', 'frame'], {'mask': 'blue_mask'}), '(frame, frame, mask=blue_mask)\n', (543, 573), False, 'import cv2\n'), ((609, 631), 'numpy.array', 'np.array', (['[25, 52, 72]'], {}), '([25, 52, 72])\n', (617, 631), True, 'import numpy as np\n'), ((649, 674), 'numpy.array', 'np.array', (['[102, 255, 255]'], {}), '([102, 255, 255])\n', (657, 674), True, 'import numpy as np\n'), ((692, 737), 'cv2.inRange', 'cv2.inRange', (['hsv_frame', 'low_green', 'high_green'], {}), '(hsv_frame, low_green, high_green)\n', (703, 737), False, 'import cv2\n'), ((750, 796), 'cv2.bitwise_and', 'cv2.bitwise_and', (['frame', 'frame'], {'mask': 'green_mask'}), '(frame, frame, mask=green_mask)\n', (765, 796), False, 'import cv2\n'), ((827, 849), 'numpy.array', 'np.array', (['[21, 39, 64]'], {}), '([21, 39, 64])\n', (835, 849), True, 'import numpy as np\n'), ((868, 892), 'numpy.array', 'np.array', (['[40, 255, 255]'], {}), '([40, 255, 255])\n', (876, 892), True, 'import numpy as np\n'), ((911, 958), 'cv2.inRange', 'cv2.inRange', (['hsv_frame', 'low_yellow', 'high_yellow'], {}), '(hsv_frame, low_yellow, high_yellow)\n', (922, 958), False, 'import cv2\n'), ((972, 1019), 'cv2.bitwise_and', 'cv2.bitwise_and', (['frame', 'frame'], {'mask': 'yellow_mask'}), '(frame, frame, mask=yellow_mask)\n', (987, 1019), False, 'import cv2\n'), ((1061, 1081), 'numpy.array', 'np.array', (['[0, 42, 0]'], {}), '([0, 42, 0])\n', (1069, 1081), True, 'import numpy as np\n'), ((1093, 1118), 'numpy.array', 'np.array', (['[179, 255, 255]'], {}), '([179, 255, 255])\n', (1101, 1118), True, 'import numpy as np\n'), ((1130, 1163), 'cv2.inRange', 'cv2.inRange', (['hsv_frame', 'low', 'high'], {}), '(hsv_frame, low, high)\n', (1141, 1163), False, 'import cv2\n'), ((1177, 1217), 'cv2.bitwise_and', 'cv2.bitwise_and', (['frame', 'frame'], {'mask': 'mask'}), '(frame, frame, mask=mask)\n', (1192, 1217), False, 'import cv2\n'), ((1312, 1338), 'cv2.imshow', 'cv2.imshow', (['"""Green"""', 'green'], {}), "('Green', green)\n", (1322, 1338), False, 'import cv2\n'), ((1343, 1371), 'cv2.imshow', 'cv2.imshow', (['"""Yellow"""', 'yellow'], {}), "('Yellow', yellow)\n", (1353, 1371), False, 'import cv2\n'), ((1417, 1431), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1428, 1431), False, 'import cv2\n')]
|
from django.shortcuts import render
from django.conf.urls import url
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.cache import never_cache
from django import shortcuts
from django.conf import settings
from . import forms
import requests
import json
from django.http import Http404
from django.shortcuts import redirect
from django.contrib import messages
@sensitive_post_parameters()
@csrf_protect
@never_cache
def signup(request):
if request.user.is_authenticated():
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
if request.method == 'POST':
form = forms.UserCreationForm(request.POST)
if form.is_valid():
messages.success(request, 'Successfully created account. Please check your email to verify your account before logging in.')
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
else:
form = forms.UserCreationForm()
return render(request, 'registration/signup.html', {'form': form})
def verify(request):
if request.GET.get('token') is not None:
payload = {'token': request.GET.get('token')}
headers = {'Content-type': 'application/json'}
res = requests.post(settings.EMAIL_VERIFICATION_URL, data=json.dumps(payload), headers=headers)
if res.status_code == 200:
messages.success(request, 'Email Address Verified! Please log in.')
return redirect('/auth/login')
raise Http404()
def reset_password(request):
if request.user.is_authenticated():
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
if request.method == 'POST':
form = forms.UserResetPasswordForm(request.POST)
if form.is_valid():
messages.success(request, "We have sent a password reset link. Please check your email.")
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
else:
form = forms.UserResetPasswordForm()
return render(request, 'registration/reset_password.html', {'form': form})
@sensitive_post_parameters()
@csrf_protect
@never_cache
def password_update(request):
if request.user.is_authenticated():
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
if request.method == 'POST':
form = forms.UserPasswordUpdateForm(request.POST)
if form.is_valid():
token = request.path.rsplit('/', 1)[-1]
password = form.cleaned_data['<PASSWORD>']
payload = {'token': token, 'password': password}
headers = {'Content-type': 'application/json'}
res = requests.post(settings.PASSWORD_UPDATE_URL, data=json.dumps(payload), headers=headers)
if res.status_code == 200:
messages.success(request, "Password updated successfully. Please log in.")
else:
messages.error(request, "That reset link does not exist or has expired. Please request a new reset password link by going to the reset password page.")
return redirect('/auth/login')
else:
form = forms.UserPasswordUpdateForm()
return render(request, 'registration/password_update.html', {'form': form})
|
[
"django.contrib.messages.success",
"django.shortcuts.redirect",
"django.contrib.messages.error",
"json.dumps",
"django.http.Http404",
"django.shortcuts.render",
"django.views.decorators.debug.sensitive_post_parameters"
] |
[((502, 529), 'django.views.decorators.debug.sensitive_post_parameters', 'sensitive_post_parameters', ([], {}), '()\n', (527, 529), False, 'from django.views.decorators.debug import sensitive_post_parameters\n'), ((2145, 2172), 'django.views.decorators.debug.sensitive_post_parameters', 'sensitive_post_parameters', ([], {}), '()\n', (2170, 2172), False, 'from django.views.decorators.debug import sensitive_post_parameters\n'), ((1062, 1121), 'django.shortcuts.render', 'render', (['request', '"""registration/signup.html"""', "{'form': form}"], {}), "(request, 'registration/signup.html', {'form': form})\n", (1068, 1121), False, 'from django.shortcuts import render\n'), ((1572, 1581), 'django.http.Http404', 'Http404', ([], {}), '()\n', (1579, 1581), False, 'from django.http import Http404\n'), ((2075, 2142), 'django.shortcuts.render', 'render', (['request', '"""registration/reset_password.html"""', "{'form': form}"], {}), "(request, 'registration/reset_password.html', {'form': form})\n", (2081, 2142), False, 'from django.shortcuts import render\n'), ((633, 680), 'django.shortcuts.redirect', 'shortcuts.redirect', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (651, 680), False, 'from django import shortcuts\n'), ((1667, 1714), 'django.shortcuts.redirect', 'shortcuts.redirect', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (1685, 1714), False, 'from django import shortcuts\n'), ((2285, 2332), 'django.shortcuts.redirect', 'shortcuts.redirect', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (2303, 2332), False, 'from django import shortcuts\n'), ((3216, 3284), 'django.shortcuts.render', 'render', (['request', '"""registration/password_update.html"""', "{'form': form}"], {}), "(request, 'registration/password_update.html', {'form': form})\n", (3222, 3284), False, 'from django.shortcuts import render\n'), ((807, 940), 'django.contrib.messages.success', 'messages.success', (['request', '"""Successfully created account. Please check your email to verify your account before logging in."""'], {}), "(request,\n 'Successfully created account. Please check your email to verify your account before logging in.'\n )\n", (823, 940), False, 'from django.contrib import messages\n'), ((951, 998), 'django.shortcuts.redirect', 'shortcuts.redirect', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (969, 998), False, 'from django import shortcuts\n'), ((1450, 1517), 'django.contrib.messages.success', 'messages.success', (['request', '"""Email Address Verified! Please log in."""'], {}), "(request, 'Email Address Verified! Please log in.')\n", (1466, 1517), False, 'from django.contrib import messages\n'), ((1537, 1560), 'django.shortcuts.redirect', 'redirect', (['"""/auth/login"""'], {}), "('/auth/login')\n", (1545, 1560), False, 'from django.shortcuts import redirect\n'), ((1850, 1943), 'django.contrib.messages.success', 'messages.success', (['request', '"""We have sent a password reset link. Please check your email."""'], {}), "(request,\n 'We have sent a password reset link. Please check your email.')\n", (1866, 1943), False, 'from django.contrib import messages\n'), ((1959, 2006), 'django.shortcuts.redirect', 'shortcuts.redirect', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (1977, 2006), False, 'from django import shortcuts\n'), ((3120, 3143), 'django.shortcuts.redirect', 'redirect', (['"""/auth/login"""'], {}), "('/auth/login')\n", (3128, 3143), False, 'from django.shortcuts import redirect\n'), ((1364, 1383), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (1374, 1383), False, 'import json\n'), ((2840, 2914), 'django.contrib.messages.success', 'messages.success', (['request', '"""Password updated successfully. Please log in."""'], {}), "(request, 'Password updated successfully. Please log in.')\n", (2856, 2914), False, 'from django.contrib import messages\n'), ((2949, 3109), 'django.contrib.messages.error', 'messages.error', (['request', '"""That reset link does not exist or has expired. Please request a new reset password link by going to the reset password page."""'], {}), "(request,\n 'That reset link does not exist or has expired. Please request a new reset password link by going to the reset password page.'\n )\n", (2963, 3109), False, 'from django.contrib import messages\n'), ((2747, 2766), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (2757, 2766), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
import os
import copy
import time
import shutil
from nipype import config
from nipype import logging
import nipype.pipeline.engine as pe
import nipype.interfaces.afni as afni
import nipype.interfaces.fsl as fsl
import nipype.interfaces.io as nio
from nipype.interfaces.utility import Merge, IdentityInterface
import nipype.interfaces.utility as util
from indi_aws import aws_utils
from CPAC.utils.utils import concat_list
from CPAC.utils.interfaces.datasink import DataSink
from CPAC.utils.interfaces.function import Function
import CPAC
from CPAC.registration import (
create_fsl_flirt_linear_reg,
create_fsl_fnirt_nonlinear_reg,
create_register_func_to_anat,
create_bbregister_func_to_anat,
create_wf_calculate_ants_warp,
connect_func_to_anat_init_reg,
connect_func_to_anat_bbreg,
connect_func_to_template_reg,
output_func_to_standard
)
from CPAC.registration.utils import run_ants_apply_warp
from CPAC.utils.datasource import (
resolve_resolution,
create_anat_datasource,
create_func_datasource,
create_check_for_s3_node
)
from CPAC.anat_preproc.anat_preproc import (
create_anat_preproc
)
from CPAC.seg_preproc.seg_preproc import (
connect_anat_segmentation
)
from CPAC.func_preproc.func_ingress import (
connect_func_ingress
)
from CPAC.func_preproc.func_preproc import (
connect_func_init,
connect_func_preproc,
create_func_preproc,
create_wf_edit_func
)
from CPAC.distortion_correction.distortion_correction import (
connect_distortion_correction
)
from CPAC.longitudinal_pipeline.longitudinal_preproc import (
subject_specific_template
)
from CPAC.utils import Strategy, find_files, function, Outputs
from CPAC.utils.utils import (
check_config_resources,
check_system_deps,
get_scan_params,
get_tr
)
logger = logging.getLogger('nipype.workflow')
def register_anat_longitudinal_template_to_standard(longitudinal_template_node, c, workflow, strat_init, strat_name):
brain_mask = pe.Node(interface=fsl.maths.MathsCommand(),
name=f'longitudinal_anatomical_brain_mask_{strat_name}')
brain_mask.inputs.args = '-bin'
workflow.connect(longitudinal_template_node, 'brain_template',
brain_mask, 'in_file')
strat_init_new = strat_init.fork()
strat_init_new.update_resource_pool({
'anatomical_brain': (longitudinal_template_node, 'brain_template'),
'anatomical_skull_leaf': (longitudinal_template_node, 'skull_template'),
'anatomical_brain_mask': (brain_mask, 'out_file')
})
strat_list = [strat_init_new]
# only need to run once for each subject
already_skullstripped = c.already_skullstripped[0]
if already_skullstripped == 2:
already_skullstripped = 0
elif already_skullstripped == 3:
already_skullstripped = 1
sub_mem_gb, num_cores_per_sub, num_ants_cores = \
check_config_resources(c)
new_strat_list = []
# either run FSL anatomical-to-MNI registration, or...
if 'FSL' in c.regOption:
for num_strat, strat in enumerate(strat_list):
# this is to prevent the user from running FNIRT if they are
# providing already-skullstripped inputs. this is because
# FNIRT requires an input with the skull still on
if already_skullstripped == 1:
err_msg = '\n\n[!] CPAC says: FNIRT (for anatomical ' \
'registration) will not work properly if you ' \
'are providing inputs that have already been ' \
'skull-stripped.\n\nEither switch to using ' \
'ANTS for registration or provide input ' \
'images that have not been already ' \
'skull-stripped.\n\n'
logger.info(err_msg)
raise Exception
flirt_reg_anat_mni = create_fsl_flirt_linear_reg(
'anat_mni_flirt_register_%s_%d' % (strat_name, num_strat)
)
# if someone doesn't have anatRegFSLinterpolation in their pipe config,
# sinc will be default option
if not hasattr(c, 'anatRegFSLinterpolation'):
setattr(c, 'anatRegFSLinterpolation', 'sinc')
if c.anatRegFSLinterpolation not in ["trilinear", "sinc", "spline"]:
err_msg = 'The selected FSL interpolation method may be in the list of values: "trilinear", "sinc", "spline"'
raise Exception(err_msg)
# Input registration parameters
flirt_reg_anat_mni.inputs.inputspec.interp = c.anatRegFSLinterpolation
node, out_file = strat['anatomical_brain']
workflow.connect(node, out_file,
flirt_reg_anat_mni, 'inputspec.input_brain')
# pass the reference files
node, out_file = strat['template_brain_for_anat']
workflow.connect(node, out_file, flirt_reg_anat_mni,
'inputspec.reference_brain')
if 'ANTS' in c.regOption:
strat = strat.fork()
new_strat_list.append(strat)
strat.append_name(flirt_reg_anat_mni.name)
strat.update_resource_pool({
'registration_method': 'FSL',
'anatomical_to_mni_linear_xfm': (flirt_reg_anat_mni, 'outputspec.linear_xfm'),
'mni_to_anatomical_linear_xfm': (flirt_reg_anat_mni, 'outputspec.invlinear_xfm'),
'anat_longitudinal_template_to_standard': (flirt_reg_anat_mni, 'outputspec.output_brain')
})
strat_list += new_strat_list
new_strat_list = []
try:
fsl_linear_reg_only = c.fsl_linear_reg_only
except AttributeError:
fsl_linear_reg_only = [0]
if 'FSL' in c.regOption and 0 in fsl_linear_reg_only:
for num_strat, strat in enumerate(strat_list):
if strat.get('registration_method') == 'FSL':
fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg(
'anat_mni_fnirt_register_%s_%d' % (strat_name, num_strat)
)
# brain input
node, out_file = strat['anatomical_brain']
workflow.connect(node, out_file,
fnirt_reg_anat_mni, 'inputspec.input_brain')
# brain reference
node, out_file = strat['template_brain_for_anat']
workflow.connect(node, out_file,
fnirt_reg_anat_mni, 'inputspec.reference_brain')
# skull input
node, out_file = strat['anatomical_skull_leaf']
workflow.connect(node, out_file,
fnirt_reg_anat_mni, 'inputspec.input_skull')
# skull reference
node, out_file = strat['template_skull_for_anat']
workflow.connect(node, out_file,
fnirt_reg_anat_mni, 'inputspec.reference_skull')
node, out_file = strat['anatomical_to_mni_linear_xfm']
workflow.connect(node, out_file,
fnirt_reg_anat_mni, 'inputspec.linear_aff')
node, out_file = strat['template_ref_mask']
workflow.connect(node, out_file,
fnirt_reg_anat_mni, 'inputspec.ref_mask')
# assign the FSL FNIRT config file specified in pipeline
# config.yml
fnirt_reg_anat_mni.inputs.inputspec.fnirt_config = c.fnirtConfig
if 1 in fsl_linear_reg_only:
strat = strat.fork()
new_strat_list.append(strat)
strat.append_name(fnirt_reg_anat_mni.name)
strat.update_resource_pool({
'anatomical_to_mni_nonlinear_xfm': (fnirt_reg_anat_mni, 'outputspec.nonlinear_xfm'),
'anat_longitudinal_template_to_standard': (fnirt_reg_anat_mni, 'outputspec.output_brain')
}, override=True)
strat_list += new_strat_list
new_strat_list = []
for num_strat, strat in enumerate(strat_list):
# or run ANTS anatomical-to-MNI registration instead
if 'ANTS' in c.regOption and \
strat.get('registration_method') != 'FSL':
ants_reg_anat_mni = \
create_wf_calculate_ants_warp(
'anat_mni_ants_register_%s_%d' % (strat_name, num_strat),
num_threads=num_ants_cores,
reg_ants_skull=c.regWithSkull
)
# if someone doesn't have anatRegANTSinterpolation in their pipe config,
# it will default to LanczosWindowedSinc
if not hasattr(c, 'anatRegANTSinterpolation'):
setattr(c, 'anatRegANTSinterpolation', 'LanczosWindowedSinc')
if c.anatRegANTSinterpolation not in ['Linear', 'BSpline', 'LanczosWindowedSinc']:
err_msg = 'The selected ANTS interpolation method may be in the list of values: "Linear", "BSpline", "LanczosWindowedSinc"'
raise Exception(err_msg)
# Input registration parameters
ants_reg_anat_mni.inputs.inputspec.interp = c.anatRegANTSinterpolation
# calculating the transform with the skullstripped is
# reported to be better, but it requires very high
# quality skullstripping. If skullstripping is imprecise
# registration with skull is preferred
if 1 in c.regWithSkull:
if already_skullstripped == 1:
err_msg = '\n\n[!] CPAC says: You selected ' \
'to run anatomical registration with ' \
'the skull, but you also selected to ' \
'use already-skullstripped images as ' \
'your inputs. This can be changed ' \
'in your pipeline configuration ' \
'editor.\n\n'
logger.info(err_msg)
raise Exception
# get the skull-stripped anatomical from resource pool
node, out_file = strat['anatomical_brain']
# pass the anatomical to the workflow
workflow.connect(node, out_file,
ants_reg_anat_mni, 'inputspec.moving_brain')
# get the reorient skull-on anatomical from resource pool
node, out_file = strat['anatomical_skull_leaf']
# pass the anatomical to the workflow
workflow.connect(node, out_file,
ants_reg_anat_mni, 'inputspec.moving_skull')
# pass the reference file
node, out_file = strat['template_brain_for_anat']
workflow.connect(node, out_file,
ants_reg_anat_mni, 'inputspec.reference_brain')
# pass the reference file
node, out_file = strat['template_skull_for_anat']
workflow.connect(node, out_file,
ants_reg_anat_mni, 'inputspec.reference_skull')
else:
node, out_file = strat['anatomical_brain']
workflow.connect(node, out_file,
ants_reg_anat_mni, 'inputspec.moving_brain')
# pass the reference file
node, out_file = strat['template_brain_for_anat']
workflow.connect(node, out_file,
ants_reg_anat_mni, 'inputspec.reference_brain')
ants_reg_anat_mni.inputs.inputspec.ants_para = c.ANTs_para_T1_registration
ants_reg_anat_mni.inputs.inputspec.fixed_image_mask = None
strat.append_name(ants_reg_anat_mni.name)
strat.update_resource_pool({
'registration_method': 'ANTS',
'ants_initial_xfm': (ants_reg_anat_mni, 'outputspec.ants_initial_xfm'),
'ants_rigid_xfm': (ants_reg_anat_mni, 'outputspec.ants_rigid_xfm'),
'ants_affine_xfm': (ants_reg_anat_mni, 'outputspec.ants_affine_xfm'),
'anatomical_to_mni_nonlinear_xfm': (ants_reg_anat_mni, 'outputspec.warp_field'),
'mni_to_anatomical_nonlinear_xfm': (ants_reg_anat_mni, 'outputspec.inverse_warp_field'),
'anat_to_mni_ants_composite_xfm': (ants_reg_anat_mni, 'outputspec.composite_transform'),
'anat_longitudinal_template_to_standard': (ants_reg_anat_mni, 'outputspec.normalized_output_brain')
})
strat_list += new_strat_list
# [SYMMETRIC] T1 -> Symmetric Template, Non-linear registration (FNIRT/ANTS)
new_strat_list = []
if 1 in c.runVMHC and 1 in getattr(c, 'runFunctional', [1]):
for num_strat, strat in enumerate(strat_list):
if 'FSL' in c.regOption and \
strat.get('registration_method') != 'ANTS':
# this is to prevent the user from running FNIRT if they are
# providing already-skullstripped inputs. this is because
# FNIRT requires an input with the skull still on
# TODO ASH normalize w schema validation to bool
if already_skullstripped == 1:
err_msg = '\n\n[!] CPAC says: FNIRT (for anatomical ' \
'registration) will not work properly if you ' \
'are providing inputs that have already been ' \
'skull-stripped.\n\nEither switch to using ' \
'ANTS for registration or provide input ' \
'images that have not been already ' \
'skull-stripped.\n\n'
logger.info(err_msg)
raise Exception
flirt_reg_anat_symm_mni = create_fsl_flirt_linear_reg(
'anat_symmetric_mni_flirt_register_%s_%d' % (strat_name, num_strat)
)
flirt_reg_anat_symm_mni.inputs.inputspec.interp = c.anatRegFSLinterpolation
node, out_file = strat['anatomical_brain']
workflow.connect(node, out_file,
flirt_reg_anat_symm_mni, 'inputspec.input_brain')
node, out_file = strat['template_symmetric_brain']
workflow.connect(node, out_file,
flirt_reg_anat_symm_mni, 'inputspec.reference_brain')
# if 'ANTS' in c.regOption:
# strat = strat.fork()
# new_strat_list.append(strat)
strat.append_name(flirt_reg_anat_symm_mni.name)
strat.update_resource_pool({
'anatomical_to_symmetric_mni_linear_xfm': (
flirt_reg_anat_symm_mni, 'outputspec.linear_xfm'),
'symmetric_mni_to_anatomical_linear_xfm': (
flirt_reg_anat_symm_mni, 'outputspec.invlinear_xfm'),
'symmetric_anatomical_to_standard': (
flirt_reg_anat_symm_mni, 'outputspec.output_brain')
})
strat_list += new_strat_list
new_strat_list = []
try:
fsl_linear_reg_only = c.fsl_linear_reg_only
except AttributeError:
fsl_linear_reg_only = [0]
if 'FSL' in c.regOption and 0 in fsl_linear_reg_only:
for num_strat, strat in enumerate(strat_list):
if strat.get('registration_method') == 'FSL':
fnirt_reg_anat_symm_mni = create_fsl_fnirt_nonlinear_reg(
'anat_symmetric_mni_fnirt_register_%s_%d' % (strat_name, num_strat)
)
node, out_file = strat['anatomical_brain']
workflow.connect(node, out_file,
fnirt_reg_anat_symm_mni,
'inputspec.input_brain')
node, out_file = strat['anatomical_skull_leaf']
workflow.connect(node, out_file,
fnirt_reg_anat_symm_mni,
'inputspec.input_skull')
node, out_file = strat['template_brain_for_anat']
workflow.connect(node, out_file,
fnirt_reg_anat_symm_mni, 'inputspec.reference_brain')
node, out_file = strat['template_symmetric_skull']
workflow.connect(node, out_file,
fnirt_reg_anat_symm_mni, 'inputspec.reference_skull')
node, out_file = strat['anatomical_to_mni_linear_xfm']
workflow.connect(node, out_file,
fnirt_reg_anat_symm_mni,
'inputspec.linear_aff')
node, out_file = strat['template_dilated_symmetric_brain_mask']
workflow.connect(node, out_file,
fnirt_reg_anat_symm_mni, 'inputspec.ref_mask')
strat.append_name(fnirt_reg_anat_symm_mni.name)
strat.update_resource_pool({
'anatomical_to_symmetric_mni_nonlinear_xfm': (
fnirt_reg_anat_symm_mni, 'outputspec.nonlinear_xfm'),
'symmetric_anatomical_to_standard': (
fnirt_reg_anat_symm_mni, 'outputspec.output_brain')
}, override=True)
strat_list += new_strat_list
new_strat_list = []
for num_strat, strat in enumerate(strat_list):
if 'ANTS' in c.regOption and \
strat.get('registration_method') != 'FSL':
ants_reg_anat_symm_mni = \
create_wf_calculate_ants_warp(
'anat_symmetric_mni_ants_register_%s_%d' % (strat_name, num_strat),
num_threads=num_ants_cores,
reg_ants_skull=c.regWithSkull
)
# Input registration parameters
ants_reg_anat_symm_mni.inputs.inputspec.interp = c.anatRegANTSinterpolation
# calculating the transform with the skullstripped is
# reported to be better, but it requires very high
# quality skullstripping. If skullstripping is imprecise
# registration with skull is preferred
if 1 in c.regWithSkull:
if already_skullstripped == 1:
err_msg = '\n\n[!] CPAC says: You selected ' \
'to run anatomical registration with ' \
'the skull, but you also selected to ' \
'use already-skullstripped images as ' \
'your inputs. This can be changed ' \
'in your pipeline configuration ' \
'editor.\n\n'
logger.info(err_msg)
raise Exception
# get the skullstripped anatomical from resource pool
node, out_file = strat['anatomical_brain']
# pass the anatomical to the workflow
workflow.connect(node, out_file,
ants_reg_anat_symm_mni, 'inputspec.moving_brain')
# pass the reference file
node, out_file = strat['template_symmetric_brain']
workflow.connect(node, out_file,
ants_reg_anat_symm_mni, 'inputspec.reference_brain')
# get the reorient skull-on anatomical from resource pool
node, out_file = strat['anatomical_skull_leaf']
# pass the anatomical to the workflow
workflow.connect(node, out_file,
ants_reg_anat_symm_mni, 'inputspec.moving_skull')
# pass the reference file
node, out_file = strat['template_symmetric_skull']
workflow.connect(node, out_file,
ants_reg_anat_symm_mni, 'inputspec.reference_skull')
else:
# get the skullstripped anatomical from resource pool
node, out_file = strat['anatomical_brain']
workflow.connect(node, out_file,
ants_reg_anat_symm_mni, 'inputspec.moving_brain')
# pass the reference file
node, out_file = strat['template_symmetric_brain']
workflow.connect(node, out_file,
ants_reg_anat_symm_mni, 'inputspec.reference_brain')
ants_reg_anat_symm_mni.inputs.inputspec.ants_para = c.ANTs_para_T1_registration
ants_reg_anat_symm_mni.inputs.inputspec.fixed_image_mask = None
strat.append_name(ants_reg_anat_symm_mni.name)
strat.update_resource_pool({
'ants_symmetric_initial_xfm': (ants_reg_anat_symm_mni, 'outputspec.ants_initial_xfm'),
'ants_symmetric_rigid_xfm': (ants_reg_anat_symm_mni, 'outputspec.ants_rigid_xfm'),
'ants_symmetric_affine_xfm': (ants_reg_anat_symm_mni, 'outputspec.ants_affine_xfm'),
'anatomical_to_symmetric_mni_nonlinear_xfm': (ants_reg_anat_symm_mni, 'outputspec.warp_field'),
'symmetric_mni_to_anatomical_nonlinear_xfm': (
ants_reg_anat_symm_mni, 'outputspec.inverse_warp_field'),
'anat_to_symmetric_mni_ants_composite_xfm': (
ants_reg_anat_symm_mni, 'outputspec.composite_transform'),
'symmetric_anatomical_to_standard': (ants_reg_anat_symm_mni, 'outputspec.normalized_output_brain')
})
strat_list += new_strat_list
# Inserting Segmentation Preprocessing Workflow
workflow, strat_list = connect_anat_segmentation(workflow, strat_list, c, strat_name)
return strat_list
def create_datasink(datasink_name, config, subject_id, session_id='', strat_name='', map_node_iterfield=None):
"""
Parameters
----------
datasink_name
config
subject_id
session_id
strat_name
map_node_iterfield
Returns
-------
"""
try:
encrypt_data = bool(config.s3Encryption[0])
except:
encrypt_data = False
# TODO Enforce value with schema validation
# Extract credentials path for output if it exists
try:
# Get path to creds file
creds_path = ''
if config.awsOutputBucketCredentials:
creds_path = str(config.awsOutputBucketCredentials)
creds_path = os.path.abspath(creds_path)
if config.outputDirectory.lower().startswith('s3://'):
# Test for s3 write access
s3_write_access = \
aws_utils.test_bucket_access(creds_path,
config.outputDirectory)
if not s3_write_access:
raise Exception('Not able to write to bucket!')
except Exception as e:
if config.outputDirectory.lower().startswith('s3://'):
err_msg = 'There was an error processing credentials or ' \
'accessing the S3 bucket. Check and try again.\n' \
'Error: %s' % e
raise Exception(err_msg)
if map_node_iterfield is not None:
ds = pe.MapNode(
DataSink(infields=map_node_iterfield),
name='sinker_{}'.format(datasink_name),
iterfield=map_node_iterfield
)
else:
ds = pe.Node(
DataSink(),
name='sinker_{}'.format(datasink_name)
)
ds.inputs.base_directory = config.outputDirectory
ds.inputs.creds_path = creds_path
ds.inputs.encrypt_bucket_keys = encrypt_data
ds.inputs.container = os.path.join(
'pipeline_%s_%s' % (config.pipelineName, strat_name),
subject_id, session_id
)
return ds
def connect_anat_preproc_inputs(strat, anat_preproc, strat_name, strat_nodes_list_list, workflow):
"""
Parameters
----------
strat : Strategy
the strategy object you want to fork
anat_preproc : Workflow
the anat_preproc workflow node to be connected and added to the resource pool
strat_name : str
name of the strategy
strat_nodes_list_list : list
a list of strat_nodes_list
workflow: Workflow
main longitudinal workflow
Returns
-------
new_strat : Strategy
the fork of strat with the resource pool updated
strat_nodes_list_list : list
a list of strat_nodes_list
"""
new_strat = strat.fork()
tmp_node, out_key = new_strat['anatomical']
workflow.connect(tmp_node, out_key, anat_preproc, 'inputspec.anat')
tmp_node, out_key = new_strat['template_cmass']
workflow.connect(tmp_node, out_key, anat_preproc, 'inputspec.template_cmass')
new_strat.append_name(anat_preproc.name)
new_strat.update_resource_pool({
'anatomical_brain': (
anat_preproc, 'outputspec.brain'),
'anatomical_skull_leaf': (
anat_preproc, 'outputspec.reorient'),
'anatomical_brain_mask': (
anat_preproc, 'outputspec.brain_mask'),
})
try:
strat_nodes_list_list[strat_name].append(new_strat)
except KeyError:
strat_nodes_list_list[strat_name] = [new_strat]
return new_strat, strat_nodes_list_list
def pick_map(file_list, index, file_type):
if isinstance(file_list, list):
if len(file_list) == 1:
file_list = file_list[0]
for file_name in file_list:
if file_name.endswith(f"{file_type}_{index}.nii.gz"):
return file_name
return None
def anat_longitudinal_wf(subject_id, sub_list, config):
"""
Parameters
----------
subject_id : str
the id of the subject
sub_list : list of dict
this is a list of sessions for one subject and each session if the same dictionary as the one given to
prep_workflow
config : configuration
a configuration object containing the information of the pipeline config. (Same as for prep_workflow)
Returns
-------
None
"""
workflow = pe.Workflow(name="anat_longitudinal_template_" + str(subject_id))
workflow.base_dir = config.workingDirectory
workflow.config['execution'] = {
'hash_method': 'timestamp',
'crashdump_dir': os.path.abspath(config.crashLogDirectory)
}
# For each participant we have a list of dict (each dict is a session)
already_skullstripped = config.already_skullstripped[0]
if already_skullstripped == 2:
already_skullstripped = 0
elif already_skullstripped == 3:
already_skullstripped = 1
resampled_template = pe.Node(Function(input_names=['resolution', 'template', 'template_name', 'tag'],
output_names=['resampled_template'],
function=resolve_resolution,
as_module=True),
name='template_skull_for_anat')
resampled_template.inputs.resolution = config.resolution_for_anat
resampled_template.inputs.template = config.template_skull_for_anat
resampled_template.inputs.template_name = 'template_skull_for_anat'
resampled_template.inputs.tag = 'resolution_for_anat'
# Node to calculate the center of mass of the standard template to align the images with it.
template_center_of_mass = pe.Node(
interface=afni.CenterMass(),
name='template_skull_for_anat_center_of_mass'
)
template_center_of_mass.inputs.cm_file = "template_center_of_mass.txt"
workflow.connect(resampled_template, 'resampled_template',
template_center_of_mass, 'in_file')
# list of lists for every strategy
strat_nodes_list_list = {}
# list of the data config dictionaries to be updated during the preprocessing
# creds_list = []
session_id_list = []
# Loop over the sessions to create the input for the longitudinal algorithm
for session in sub_list:
unique_id = session['unique_id']
session_id_list.append(unique_id)
try:
creds_path = session['creds_path']
if creds_path and 'none' not in creds_path.lower():
if os.path.exists(creds_path):
input_creds_path = os.path.abspath(creds_path)
else:
err_msg = 'Credentials path: "%s" for subject "%s" session "%s" ' \
'was not found. Check this path and try ' \
'again.' % (creds_path, subject_id, unique_id)
raise Exception(err_msg)
else:
input_creds_path = None
except KeyError:
input_creds_path = None
template_keys = [
("anat", "PRIORS_CSF"),
("anat", "PRIORS_GRAY"),
("anat", "PRIORS_WHITE"),
("other", "configFileTwomm"),
("anat", "template_based_segmentation_CSF"),
("anat", "template_based_segmentation_GRAY"),
("anat", "template_based_segmentation_WHITE"),
]
for key_type, key in template_keys:
if isinstance(getattr(config, key), str):
node = create_check_for_s3_node(
name=key,
file_path=getattr(config, key),
img_type=key_type,
creds_path=input_creds_path,
dl_dir=config.workingDirectory
)
setattr(config, key, node)
strat = Strategy()
strat_list = []
node_suffix = '_'.join([subject_id, unique_id])
anat_rsc = create_anat_datasource('anat_gather_%s' % node_suffix)
anat_rsc.inputs.inputnode.set(
subject = subject_id,
anat = session['anat'],
creds_path = input_creds_path,
dl_dir = config.workingDirectory,
img_type = 'anat'
)
strat.update_resource_pool({
'anatomical': (anat_rsc, 'outputspec.anat')
})
strat.update_resource_pool({
'template_cmass': (template_center_of_mass, 'cm')
})
# Here we have the same strategies for the skull stripping as in prep_workflow
if 'brain_mask' in session.keys() and session['brain_mask'] and \
session['brain_mask'].lower() != 'none':
brain_rsc = create_anat_datasource(
'brain_gather_%s' % unique_id)
brain_rsc.inputs.inputnode.set(
subject = subject_id,
anat = session['brain_mask'],
creds_path = input_creds_path,
dl_dir = config.workingDirectory,
img_type = 'anat'
)
skullstrip_method = 'mask'
preproc_wf_name = 'anat_preproc_mask_%s' % node_suffix
strat.append_name(brain_rsc.name)
strat.update_resource_pool({
'anatomical_brain_mask': (brain_rsc, 'outputspec.anat')
})
anat_preproc = create_anat_preproc(
method=skullstrip_method,
config=config,
wf_name=preproc_wf_name)
workflow.connect(brain_rsc, 'outputspec.brain_mask',
anat_preproc, 'inputspec.brain_mask')
new_strat, strat_nodes_list_list = connect_anat_preproc_inputs(
strat, anat_preproc, skullstrip_method + "_skullstrip", strat_nodes_list_list, workflow)
strat_list.append(new_strat)
elif already_skullstripped:
skullstrip_method = None
preproc_wf_name = 'anat_preproc_already_%s' % node_suffix
anat_preproc = create_anat_preproc(
method=skullstrip_method,
already_skullstripped=True,
config=config,
wf_name=preproc_wf_name
)
new_strat, strat_nodes_list_list = connect_anat_preproc_inputs(
strat, anat_preproc, 'already_skullstripped', strat_nodes_list_list, workflow)
strat_list.append(new_strat)
else:
# TODO add other SS methods
if "AFNI" in config.skullstrip_option:
skullstrip_method = 'afni'
preproc_wf_name = 'anat_preproc_afni_%s' % node_suffix
anat_preproc = create_anat_preproc(
method=skullstrip_method,
config=config,
wf_name=preproc_wf_name)
anat_preproc.inputs.AFNI_options.set(
shrink_factor=config.skullstrip_shrink_factor,
var_shrink_fac=config.skullstrip_var_shrink_fac,
shrink_fac_bot_lim=config.skullstrip_shrink_factor_bot_lim,
avoid_vent=config.skullstrip_avoid_vent,
niter=config.skullstrip_n_iterations,
pushout=config.skullstrip_pushout,
touchup=config.skullstrip_touchup,
fill_hole=config.skullstrip_fill_hole,
avoid_eyes=config.skullstrip_avoid_eyes,
use_edge=config.skullstrip_use_edge,
exp_frac=config.skullstrip_exp_frac,
smooth_final=config.skullstrip_smooth_final,
push_to_edge=config.skullstrip_push_to_edge,
use_skull=config.skullstrip_use_skull,
perc_int=config.skullstrip_perc_int,
max_inter_iter=config.skullstrip_max_inter_iter,
blur_fwhm=config.skullstrip_blur_fwhm,
fac=config.skullstrip_fac,
monkey=config.skullstrip_monkey,
mask_vol=config.skullstrip_mask_vol
)
new_strat, strat_nodes_list_list = connect_anat_preproc_inputs(
strat, anat_preproc, skullstrip_method + "_skullstrip", strat_nodes_list_list, workflow)
strat_list.append(new_strat)
if "BET" in config.skullstrip_option:
skullstrip_method = 'fsl'
preproc_wf_name = 'anat_preproc_fsl_%s' % node_suffix
anat_preproc = create_anat_preproc(
method=skullstrip_method,
config=config,
wf_name=preproc_wf_name)
anat_preproc.inputs.BET_options.set(
frac=config.bet_frac,
mask_boolean=config.bet_mask_boolean,
mesh_boolean=config.bet_mesh_boolean,
outline=config.bet_outline,
padding=config.bet_padding,
radius=config.bet_radius,
reduce_bias=config.bet_reduce_bias,
remove_eyes=config.bet_remove_eyes,
robust=config.bet_robust,
skull=config.bet_skull,
surfaces=config.bet_surfaces,
threshold=config.bet_threshold,
vertical_gradient=config.bet_vertical_gradient,
)
new_strat, strat_nodes_list_list = connect_anat_preproc_inputs(
strat, anat_preproc, skullstrip_method + "_skullstrip", strat_nodes_list_list, workflow)
strat_list.append(new_strat)
if not any(o in config.skullstrip_option for o in
["AFNI", "BET"]):
err = '\n\n[!] C-PAC says: Your skull-stripping ' \
'method options setting does not include either' \
' \'AFNI\' or \'BET\'.\n\n Options you ' \
'provided:\nskullstrip_option: {0}\n\n'.format(
str(config.skullstrip_option))
raise Exception(err)
# Here we have all the anat_preproc set up for every session of the subject
strat_init = Strategy()
templates_for_resampling = [
(config.resolution_for_anat, config.template_brain_only_for_anat, 'template_brain_for_anat', 'resolution_for_anat'),
(config.resolution_for_anat, config.template_skull_for_anat, 'template_skull_for_anat', 'resolution_for_anat'),
(config.resolution_for_anat, config.template_symmetric_brain_only, 'template_symmetric_brain', 'resolution_for_anat'),
(config.resolution_for_anat, config.template_symmetric_skull, 'template_symmetric_skull', 'resolution_for_anat'),
(config.resolution_for_anat, config.dilated_symmetric_brain_mask, 'template_dilated_symmetric_brain_mask',
'resolution_for_anat'),
(config.resolution_for_anat, config.ref_mask, 'template_ref_mask', 'resolution_for_anat'),
(config.resolution_for_func_preproc, config.template_brain_only_for_func, 'template_brain_for_func_preproc',
'resolution_for_func_preproc'),
(config.resolution_for_func_preproc, config.template_skull_for_func, 'template_skull_for_func_preproc',
'resolution_for_func_preproc'),
(config.resolution_for_func_derivative, config.template_brain_only_for_func, 'template_brain_for_func_derivative',
'resolution_for_func_preproc'),
(config.resolution_for_func_derivative, config.template_skull_for_func, 'template_skull_for_func_derivative',
'resolution_for_func_preproc')
]
# update resampled template to resource pool
for resolution, template, template_name, tag in templates_for_resampling:
resampled_template = pe.Node(Function(input_names=['resolution', 'template', 'template_name', 'tag'],
output_names=['resampled_template'],
function=resolve_resolution,
as_module=True),
name='resampled_' + template_name)
resampled_template.inputs.resolution = resolution
resampled_template.inputs.template = template
resampled_template.inputs.template_name = template_name
resampled_template.inputs.tag = tag
strat_init.update_resource_pool({
template_name: (resampled_template, 'resampled_template')
})
# loop over the different skull stripping strategies
for strat_name, strat_nodes_list in strat_nodes_list_list.items():
node_suffix = '_'.join([strat_name, subject_id])
# Merge node to feed the anat_preproc outputs to the longitudinal template generation
brain_merge_node = pe.Node(
interface=Merge(len(strat_nodes_list)),
name="anat_longitudinal_brain_merge_" + node_suffix)
skull_merge_node = pe.Node(
interface=Merge(len(strat_nodes_list)),
name="anat_longitudinal_skull_merge_" + node_suffix)
# This node will generate the longitudinal template (the functions are in longitudinal_preproc)
# Later other algorithms could be added to calculate it, like the multivariate template from ANTS
# It would just require to change it here.
template_node = subject_specific_template(
workflow_name='subject_specific_anat_template_' + node_suffix
)
unique_id_list = [i.get_name()[0].split('_')[-1] for i in strat_nodes_list]
template_node.inputs.set(
avg_method=config.longitudinal_template_average_method,
dof=config.longitudinal_template_dof,
interp=config.longitudinal_template_interp,
cost=config.longitudinal_template_cost,
convergence_threshold=config.longitudinal_template_convergence_threshold,
thread_pool=config.longitudinal_template_thread_pool,
unique_id_list=unique_id_list
)
workflow.connect(brain_merge_node, 'out', template_node, 'input_brain_list')
workflow.connect(skull_merge_node, 'out', template_node, 'input_skull_list')
reg_strat_list = register_anat_longitudinal_template_to_standard(template_node, config, workflow, strat_init, strat_name)
# Register T1 to the standard template
# TODO add session information in node name
for num_reg_strat, reg_strat in enumerate(reg_strat_list):
if reg_strat.get('registration_method') == 'FSL':
fsl_apply_warp = pe.MapNode(interface=fsl.ApplyWarp(),
name='fsl_apply_warp_anat_longitudinal_to_standard_{0}_'.format(strat_name),
iterfield=['in_file'])
workflow.connect(template_node, "output_brain_list",
fsl_apply_warp, 'in_file')
node, out_file = reg_strat['template_brain_for_anat']
workflow.connect(node, out_file,
fsl_apply_warp, 'ref_file')
# TODO how to include linear xfm?
# node, out_file = reg_strat['anatomical_to_mni_linear_xfm']
# workflow.connect(node, out_file, fsl_apply_warp, 'premat')
node, out_file = reg_strat['anatomical_to_mni_nonlinear_xfm']
workflow.connect(node, out_file,
fsl_apply_warp, 'field_file')
reg_strat.update_resource_pool({
'anatomical_to_standard': (fsl_apply_warp, 'out_file')
})
elif reg_strat.get('registration_method') == 'ANTS':
ants_apply_warp = pe.MapNode(util.Function(input_names=['moving_image',
'reference',
'initial',
'rigid',
'affine',
'nonlinear',
'interp'],
output_names=['out_image'],
function=run_ants_apply_warp),
name='ants_apply_warp_anat_longitudinal_to_standard_{0}_'.format(strat_name),
iterfield=['moving_image'])
workflow.connect(template_node, "output_brain_list", ants_apply_warp, 'moving_image')
node, out_file = reg_strat['template_brain_for_anat']
workflow.connect(node, out_file, ants_apply_warp, 'reference')
node, out_file = reg_strat['ants_initial_xfm']
workflow.connect(node, out_file, ants_apply_warp, 'initial')
node, out_file = reg_strat['ants_rigid_xfm']
workflow.connect(node, out_file, ants_apply_warp, 'rigid')
node, out_file = reg_strat['ants_affine_xfm']
workflow.connect(node, out_file, ants_apply_warp, 'affine')
node, out_file = reg_strat['anatomical_to_mni_nonlinear_xfm']
workflow.connect(node, out_file, ants_apply_warp, 'nonlinear')
ants_apply_warp.inputs.interp = config.anatRegANTSinterpolation
reg_strat.update_resource_pool({
'anatomical_to_standard': (ants_apply_warp, 'out_image')
})
# Register tissue segmentation from longitudinal template space to native space
fsl_convert_xfm = pe.MapNode(interface=fsl.ConvertXFM(),
name=f'fsl_xfm_longitudinal_to_native_{strat_name}',
iterfield=['in_file'])
fsl_convert_xfm.inputs.invert_xfm = True
workflow.connect(template_node, "warp_list",
fsl_convert_xfm, 'in_file')
def seg_apply_warp(strat_name, resource, type='str', file_type=None):
if type == 'str':
fsl_apply_xfm = pe.MapNode(interface=fsl.ApplyXFM(),
name=f'fsl_apply_xfm_longitudinal_to_native_{resource}_{strat_name}',
iterfield=['reference', 'in_matrix_file'])
fsl_apply_xfm.inputs.interp = 'nearestneighbour'
node, out_file = reg_strat[resource]
workflow.connect(node, out_file,
fsl_apply_xfm, 'in_file')
workflow.connect(brain_merge_node, 'out',
fsl_apply_xfm, 'reference')
workflow.connect(fsl_convert_xfm, "out_file",
fsl_apply_xfm, 'in_matrix_file')
reg_strat.update_resource_pool({
resource:(fsl_apply_xfm, 'out_file')
}, override=True)
elif type == 'list':
for index in range(3):
fsl_apply_xfm = pe.MapNode(interface=fsl.ApplyXFM(),
name=f'fsl_apply_xfm_longitudinal_to_native_{resource}_{index}_{strat_name}',
iterfield=['reference', 'in_matrix_file'])
fsl_apply_xfm.inputs.interp = 'nearestneighbour'
pick_seg_map = pe.Node(Function(input_names=['file_list', 'index', 'file_type'],
output_names=['file_name'],
function=pick_map),
name=f'pick_{file_type}_{index}_{strat_name}')
node, out_file = reg_strat[resource]
workflow.connect(node, out_file,
pick_seg_map, 'file_list')
pick_seg_map.inputs.index=index
pick_seg_map.inputs.file_type=file_type
workflow.connect(pick_seg_map, 'file_name',
fsl_apply_xfm, 'in_file')
workflow.connect(brain_merge_node, 'out',
fsl_apply_xfm, 'reference')
workflow.connect(fsl_convert_xfm, 'out_file',
fsl_apply_xfm, 'in_matrix_file')
concat_seg_map = pe.Node(Function(input_names=['in_list1', 'in_list2'],
output_names=['out_list'],
function=concat_list),
name=f'concat_{file_type}_{index}_{strat_name}')
if index == 0:
workflow.connect(fsl_apply_xfm, 'out_file',
concat_seg_map, 'in_list1')
reg_strat.update_resource_pool({
f'temporary_{resource}_list':(concat_seg_map, 'out_list')
})
else:
workflow.connect(fsl_apply_xfm, 'out_file',
concat_seg_map, 'in_list2')
node, out_file = reg_strat[f'temporary_{resource}_list']
workflow.connect(node, out_file,
concat_seg_map, 'in_list1')
reg_strat.update_resource_pool({
f'temporary_{resource}_list':(concat_seg_map, 'out_list')
}, override=True)
reg_strat.update_resource_pool({
resource:(concat_seg_map, 'out_list')
}, override=True)
for seg in ['anatomical_gm_mask', 'anatomical_csf_mask', 'anatomical_wm_mask',
'seg_mixeltype', 'seg_partial_volume_map']:
seg_apply_warp(strat_name=strat_name, resource=seg)
# apply warp on list
seg_apply_warp(strat_name=strat_name, resource='seg_probability_maps', type='list', file_type='prob')
seg_apply_warp(strat_name=strat_name, resource='seg_partial_volume_files', type='list', file_type='pve')
# Update resource pool
# longitudinal template
rsc_key = 'anatomical_longitudinal_template_'
ds_template = create_datasink(rsc_key + node_suffix, config, subject_id, strat_name='longitudinal_'+strat_name)
workflow.connect(template_node, 'brain_template',
ds_template, rsc_key)
# T1 to longitudinal template warp
rsc_key = 'anatomical_to_longitudinal_template_warp_'
ds_warp_list = create_datasink(rsc_key + node_suffix, config, subject_id, strat_name='longitudinal_'+strat_name,
map_node_iterfield=['anatomical_to_longitudinal_template_warp'])
workflow.connect(template_node, "warp_list",
ds_warp_list, 'anatomical_to_longitudinal_template_warp')
# T1 in longitudinal template space
rsc_key = 'anatomical_to_longitudinal_template_'
t1_list = create_datasink(rsc_key + node_suffix, config, subject_id, strat_name='longitudinal_'+strat_name,
map_node_iterfield=['anatomical_to_longitudinal_template'])
workflow.connect(template_node, "output_brain_list",
t1_list, 'anatomical_to_longitudinal_template')
# longitudinal to standard registration items
for num_strat, strat in enumerate(reg_strat_list):
for rsc_key in strat.resource_pool.keys():
rsc_nodes_suffix = '_'.join(['_longitudinal_to_standard', strat_name, str(num_strat)])
if rsc_key in Outputs.any:
node, rsc_name = strat[rsc_key]
ds = create_datasink(rsc_key + rsc_nodes_suffix, config, subject_id, strat_name='longitudinal_'+strat_name)
workflow.connect(node, rsc_name, ds, rsc_key)
# individual minimal preprocessing items
for i in range(len(strat_nodes_list)):
rsc_nodes_suffix = "_%s_%d" % (node_suffix, i)
for rsc_key in strat_nodes_list[i].resource_pool.keys():
if rsc_key in Outputs.any:
node, rsc_name = strat_nodes_list[i][rsc_key]
ds = create_datasink(rsc_key + rsc_nodes_suffix, config, subject_id,
session_id_list[i], 'longitudinal_'+strat_name)
workflow.connect(node, rsc_name, ds, rsc_key)
rsc_key = 'anatomical_brain'
anat_preproc_node, rsc_name = strat_nodes_list[i][rsc_key]
workflow.connect(anat_preproc_node,
rsc_name, brain_merge_node,
'in{}'.format(i + 1)) # the in{}.format take i+1 because the Merge nodes inputs starts at 1
rsc_key = 'anatomical_skull_leaf'
anat_preproc_node, rsc_name = strat_nodes_list[i][rsc_key]
workflow.connect(anat_preproc_node,
rsc_name, skull_merge_node,
'in{}'.format(i + 1))
workflow.run()
return reg_strat_list # strat_nodes_list_list # for func wf?
# TODO check:
# 1 func alone works
# 2 anat + func works, pass anat strategy list?
def func_preproc_longitudinal_wf(subject_id, sub_list, config):
"""
Parameters
----------
subject_id : string
the id of the subject
sub_list : list of dict
this is a list of sessions for one subject and each session if the same dictionary as the one given to
prep_workflow
config : configuration
a configuration object containing the information of the pipeline config. (Same as for prep_workflow)
Returns
-------
strat_list_ses_list : list of list
a list of strategies; within each strategy, a list of sessions
"""
datasink = pe.Node(nio.DataSink(), name='sinker')
datasink.inputs.base_directory = config.workingDirectory
session_id_list = []
ses_list_strat_list = {}
workflow_name = 'func_preproc_longitudinal_' + str(subject_id)
workflow = pe.Workflow(name=workflow_name)
workflow.base_dir = config.workingDirectory
workflow.config['execution'] = {
'hash_method': 'timestamp',
'crashdump_dir': os.path.abspath(config.crashLogDirectory)
}
for sub_dict in sub_list:
if 'func' in sub_dict or 'rest' in sub_dict:
if 'func' in sub_dict:
func_paths_dict = sub_dict['func']
else:
func_paths_dict = sub_dict['rest']
unique_id = sub_dict['unique_id']
session_id_list.append(unique_id)
try:
creds_path = sub_dict['creds_path']
if creds_path and 'none' not in creds_path.lower():
if os.path.exists(creds_path):
input_creds_path = os.path.abspath(creds_path)
else:
err_msg = 'Credentials path: "%s" for subject "%s" was not ' \
'found. Check this path and try again.' % (
creds_path, subject_id)
raise Exception(err_msg)
else:
input_creds_path = None
except KeyError:
input_creds_path = None
strat = Strategy()
strat_list = [strat]
node_suffix = '_'.join([subject_id, unique_id])
# Functional Ingress Workflow
# add optional flag
workflow, diff, blip, fmap_rp_list = connect_func_ingress(workflow,
strat_list,
config,
sub_dict,
subject_id,
input_creds_path,
node_suffix)
# Functional Initial Prep Workflow
workflow, strat_list = connect_func_init(workflow, strat_list, config, node_suffix)
# Functional Image Preprocessing Workflow
workflow, strat_list = connect_func_preproc(workflow, strat_list, config, node_suffix)
# Distortion Correction
workflow, strat_list = connect_distortion_correction(workflow,
strat_list,
config,
diff,
blip,
fmap_rp_list,
node_suffix)
ses_list_strat_list[node_suffix] = strat_list
# Here we have all the func_preproc set up for every session of the subject
# TODO create a list of list ses_list_strat_list
# a list of skullstripping strategies,
# a list of sessions within each strategy list
# TODO rename and reorganize dict
# TODO update strat name
strat_list_ses_list = {}
strat_list_ses_list['func_default'] = []
for sub_ses_id, strat_nodes_list in ses_list_strat_list.items():
strat_list_ses_list['func_default'].append(strat_nodes_list[0])
workflow.run()
return strat_list_ses_list
def merge_func_preproc(working_directory):
"""
Parameters
----------
working_directory : string
a path to the working directory
Returns
-------
brain_list : list
a list of func preprocessed brain
skull_list : list
a list of func preprocessed skull
"""
brain_list = []
skull_list = []
for dirpath, dirnames, filenames in os.walk(working_directory):
for f in filenames:
if 'func_get_preprocessed_median' in dirpath and '.nii.gz' in f:
filepath = os.path.join(dirpath, f)
brain_list.append(filepath)
if 'func_get_motion_correct_median' in dirpath and '.nii.gz' in f:
filepath = os.path.join(dirpath, f)
skull_list.append(filepath)
brain_list.sort()
skull_list.sort()
return brain_list, skull_list
def register_func_longitudinal_template_to_standard(longitudinal_template_node, c, workflow, strat_init, strat_name):
sub_mem_gb, num_cores_per_sub, num_ants_cores = \
check_config_resources(c)
strat_init_new = strat_init.fork()
strat_init_new.update_resource_pool({
'functional_preprocessed_median': (longitudinal_template_node, 'brain_template'),
'motion_correct_median': (longitudinal_template_node, 'skull_template')
})
strat_list = [strat_init_new]
new_strat_list = []
if 'FSL' in c.regOption:
for num_strat, strat in enumerate(strat_list):
flirt_reg_func_mni = create_fsl_flirt_linear_reg(
'func_mni_flirt_register_%s_%d' % (strat_name, num_strat)
)
# if someone doesn't have anatRegFSLinterpolation in their pipe config,
# sinc will be default option
if not hasattr(c, 'funcRegFSLinterpolation'):
setattr(c, 'funcRegFSLinterpolation', 'sinc')
if c.funcRegFSLinterpolation not in ["trilinear", "sinc", "spline"]:
err_msg = 'The selected FSL interpolation method may be in the list of values: "trilinear", "sinc", "spline"'
raise Exception(err_msg)
# Input registration parameters
flirt_reg_func_mni.inputs.inputspec.interp = c.funcRegFSLinterpolation
node, out_file = strat['functional_preprocessed_median']
workflow.connect(node, out_file,
flirt_reg_func_mni, 'inputspec.input_brain')
# pass the reference files
node, out_file = strat['template_brain_for_func_preproc']
workflow.connect(node, out_file, flirt_reg_func_mni,
'inputspec.reference_brain')
if 'ANTS' in c.regOption:
strat = strat.fork()
new_strat_list.append(strat)
strat.append_name(flirt_reg_func_mni.name)
strat.update_resource_pool({
'registration_method': 'FSL',
'func_longitudinal_to_mni_linear_xfm': (flirt_reg_func_mni, 'outputspec.linear_xfm'),
'mni_to_func_longitudinal_linear_xfm': (flirt_reg_func_mni, 'outputspec.invlinear_xfm'),
'func_longitudinal_template_to_standard': (flirt_reg_func_mni, 'outputspec.output_brain')
})
strat_list += new_strat_list
new_strat_list = []
try:
fsl_linear_reg_only = c.fsl_linear_reg_only
except AttributeError:
fsl_linear_reg_only = [0]
if 'FSL' in c.regOption and 0 in fsl_linear_reg_only:
for num_strat, strat in enumerate(strat_list):
if strat.get('registration_method') == 'FSL':
fnirt_reg_func_mni = create_fsl_fnirt_nonlinear_reg(
'func_mni_fnirt_register_%s_%d' % (strat_name, num_strat)
)
# brain input
node, out_file = strat['functional_preprocessed_median']
workflow.connect(node, out_file,
fnirt_reg_func_mni, 'inputspec.input_brain')
# brain reference
node, out_file = strat['template_brain_for_func_preproc']
workflow.connect(node, out_file,
fnirt_reg_func_mni, 'inputspec.reference_brain')
# skull input
node, out_file = strat['motion_correct_median']
workflow.connect(node, out_file,
fnirt_reg_func_mni, 'inputspec.input_skull')
# skull reference
node, out_file = strat['template_skull_for_func_preproc']
workflow.connect(node, out_file,
fnirt_reg_func_mni, 'inputspec.reference_skull')
node, out_file = strat['func_longitudinal_to_mni_linear_xfm']
workflow.connect(node, out_file,
fnirt_reg_func_mni, 'inputspec.linear_aff')
node, out_file = strat['template_ref_mask']
workflow.connect(node, out_file,
fnirt_reg_func_mni, 'inputspec.ref_mask')
# assign the FSL FNIRT config file specified in pipeline
# config.yml
fnirt_reg_func_mni.inputs.inputspec.fnirt_config = c.fnirtConfig
if 1 in fsl_linear_reg_only:
strat = strat.fork()
new_strat_list.append(strat)
strat.append_name(fnirt_reg_func_mni.name)
strat.update_resource_pool({
'func_longitudinal_to_mni_nonlinear_xfm': (fnirt_reg_func_mni, 'outputspec.nonlinear_xfm'),
'func_longitudinal_template_to_standard': (fnirt_reg_func_mni, 'outputspec.output_brain')
}, override=True)
strat_list += new_strat_list
new_strat_list = []
for num_strat, strat in enumerate(strat_list):
# or run ANTS anatomical-to-MNI registration instead
if 'ANTS' in c.regOption and \
strat.get('registration_method') != 'FSL':
ants_reg_func_mni = \
create_wf_calculate_ants_warp(
'func_mni_ants_register_%s_%d' % (strat_name, num_strat),
num_threads=num_ants_cores,
reg_ants_skull=c.regWithSkull
)
if not hasattr(c, 'funcRegANTSinterpolation'):
setattr(c, 'funcRegANTSinterpolation', 'LanczosWindowedSinc')
if c.funcRegANTSinterpolation not in ['Linear', 'BSpline', 'LanczosWindowedSinc']:
err_msg = 'The selected ANTS interpolation method may be in the list of values: "Linear", "BSpline", "LanczosWindowedSinc"'
raise Exception(err_msg)
# Input registration parameters
ants_reg_func_mni.inputs.inputspec.interp = c.funcRegANTSinterpolation
# calculating the transform with the skullstripped is
# reported to be better, but it requires very high
# quality skullstripping. If skullstripping is imprecise
# registration with skull is preferred
if 1 in c.regWithSkull:
# get the skull-stripped anatomical from resource pool
node, out_file = strat['functional_preprocessed_median']
# pass the anatomical to the workflow
workflow.connect(node, out_file,
ants_reg_func_mni, 'inputspec.moving_brain')
# get the reorient skull-on anatomical from resource pool
node, out_file = strat['motion_correct_median']
# pass the anatomical to the workflow
workflow.connect(node, out_file,
ants_reg_func_mni, 'inputspec.moving_skull')
# pass the reference file
node, out_file = strat['template_brain_for_func_preproc']
workflow.connect(node, out_file,
ants_reg_func_mni, 'inputspec.reference_brain')
# pass the reference file
node, out_file = strat['template_skull_for_func_preproc']
workflow.connect(node, out_file,
ants_reg_func_mni, 'inputspec.reference_skull')
else:
node, out_file = strat['functional_preprocessed_median']
workflow.connect(node, out_file,
ants_reg_func_mni, 'inputspec.moving_brain')
# pass the reference file
node, out_file = strat['template_brain_for_func_preproc']
workflow.connect(node, out_file,
ants_reg_func_mni, 'inputspec.reference_brain')
ants_reg_func_mni.inputs.inputspec.ants_para = c.ANTs_para_T1_registration
ants_reg_func_mni.inputs.inputspec.fixed_image_mask = None
strat.append_name(ants_reg_func_mni.name)
strat.update_resource_pool({
'registration_method': 'ANTS',
'ants_initial_xfm': (ants_reg_func_mni, 'outputspec.ants_initial_xfm'),
'ants_rigid_xfm': (ants_reg_func_mni, 'outputspec.ants_rigid_xfm'),
'ants_affine_xfm': (ants_reg_func_mni, 'outputspec.ants_affine_xfm'),
'func_longitudinal_to_mni_nonlinear_xfm': (ants_reg_func_mni, 'outputspec.warp_field'),
'mni_to_func_longitudinal_nonlinear_xfm': (ants_reg_func_mni, 'outputspec.inverse_warp_field'),
'func_longitudinal_to_mni_ants_composite_xfm': (ants_reg_func_mni, 'outputspec.composite_transform'),
'func_longitudinal_template_to_standard': (ants_reg_func_mni, 'outputspec.normalized_output_brain')
})
strat_list += new_strat_list
'''
# Func -> T1 Registration (Initial Linear Reg)
workflow, strat_list, diff_complete = connect_func_to_anat_init_reg(workflow, strat_list, c)
# Func -> T1 Registration (BBREG)
workflow, strat_list = connect_func_to_anat_bbreg(workflow, strat_list, c, diff_complete)
# Func -> T1/EPI Template
workflow, strat_list = connect_func_to_template_reg(workflow, strat_list, c)
'''
return workflow, strat_list
def func_longitudinal_template_wf(subject_id, strat_list, config):
'''
Parameters
----------
subject_id : string
the id of the subject
strat_list : list of list
first level strategy, second level session
config : configuration
a configuration object containing the information of the pipeline config.
Returns
-------
None
'''
workflow_name = 'func_longitudinal_template_' + str(subject_id)
workflow = pe.Workflow(name=workflow_name)
workflow.base_dir = config.workingDirectory
workflow.config['execution'] = {
'hash_method': 'timestamp',
'crashdump_dir': os.path.abspath(config.crashLogDirectory)
}
# strat_nodes_list = strat_list['func_default']
strat_init = Strategy()
templates_for_resampling = [
(config.resolution_for_func_preproc, config.template_brain_only_for_func, 'template_brain_for_func_preproc', 'resolution_for_func_preproc'),
(config.resolution_for_func_preproc, config.template_skull_for_func, 'template_skull_for_func_preproc', 'resolution_for_func_preproc'),
(config.resolution_for_func_preproc, config.ref_mask_for_func, 'template_ref_mask', 'resolution_for_func_preproc'), # TODO check float resolution
(config.resolution_for_func_preproc, config.template_epi, 'template_epi', 'resolution_for_func_preproc'),
(config.resolution_for_func_derivative, config.template_epi, 'template_epi_derivative', 'resolution_for_func_derivative'),
(config.resolution_for_func_derivative, config.template_brain_only_for_func, 'template_brain_for_func_derivative', 'resolution_for_func_preproc'),
(config.resolution_for_func_derivative, config.template_skull_for_func, 'template_skull_for_func_derivative', 'resolution_for_func_preproc'),
]
for resolution, template, template_name, tag in templates_for_resampling:
resampled_template = pe.Node(Function(input_names=['resolution', 'template', 'template_name', 'tag'],
output_names=['resampled_template'],
function=resolve_resolution,
as_module=True),
name='resampled_' + template_name)
resampled_template.inputs.resolution = resolution
resampled_template.inputs.template = template
resampled_template.inputs.template_name = template_name
resampled_template.inputs.tag = tag
strat_init.update_resource_pool({
template_name: (resampled_template, 'resampled_template')
})
merge_func_preproc_node = pe.Node(Function(input_names=['working_directory'],
output_names=['brain_list', 'skull_list'],
function=merge_func_preproc,
as_module=True),
name='merge_func_preproc')
merge_func_preproc_node.inputs.working_directory = config.workingDirectory
template_node = subject_specific_template(
workflow_name='subject_specific_func_template_' + subject_id
)
template_node.inputs.set(
avg_method=config.longitudinal_template_average_method,
dof=config.longitudinal_template_dof,
interp=config.longitudinal_template_interp,
cost=config.longitudinal_template_cost,
convergence_threshold=config.longitudinal_template_convergence_threshold,
thread_pool=config.longitudinal_template_thread_pool,
)
workflow.connect(merge_func_preproc_node, 'brain_list',
template_node, 'input_brain_list')
workflow.connect(merge_func_preproc_node, 'skull_list',
template_node, 'input_skull_list')
workflow, strat_list = register_func_longitudinal_template_to_standard(
template_node,
config,
workflow,
strat_init,
'default'
)
workflow.run()
return
|
[
"CPAC.anat_preproc.anat_preproc.create_anat_preproc",
"CPAC.utils.interfaces.datasink.DataSink",
"os.walk",
"CPAC.registration.create_fsl_flirt_linear_reg",
"CPAC.seg_preproc.seg_preproc.connect_anat_segmentation",
"CPAC.func_preproc.func_ingress.connect_func_ingress",
"os.path.join",
"CPAC.utils.interfaces.function.Function",
"CPAC.func_preproc.func_preproc.connect_func_init",
"CPAC.utils.utils.check_config_resources",
"CPAC.distortion_correction.distortion_correction.connect_distortion_correction",
"os.path.abspath",
"CPAC.func_preproc.func_preproc.connect_func_preproc",
"os.path.exists",
"CPAC.utils.Strategy",
"nipype.interfaces.afni.CenterMass",
"nipype.logging.getLogger",
"nipype.pipeline.engine.Workflow",
"nipype.interfaces.fsl.ConvertXFM",
"nipype.interfaces.fsl.ApplyXFM",
"nipype.interfaces.io.DataSink",
"nipype.config.outputDirectory.lower",
"nipype.interfaces.utility.Function",
"CPAC.registration.create_fsl_fnirt_nonlinear_reg",
"CPAC.longitudinal_pipeline.longitudinal_preproc.subject_specific_template",
"nipype.interfaces.fsl.maths.MathsCommand",
"nipype.interfaces.fsl.ApplyWarp",
"indi_aws.aws_utils.test_bucket_access",
"CPAC.registration.create_wf_calculate_ants_warp",
"CPAC.utils.datasource.create_anat_datasource"
] |
[((1863, 1899), 'nipype.logging.getLogger', 'logging.getLogger', (['"""nipype.workflow"""'], {}), "('nipype.workflow')\n", (1880, 1899), False, 'from nipype import logging\n'), ((2967, 2992), 'CPAC.utils.utils.check_config_resources', 'check_config_resources', (['c'], {}), '(c)\n', (2989, 2992), False, 'from CPAC.utils.utils import check_config_resources, check_system_deps, get_scan_params, get_tr\n'), ((22644, 22706), 'CPAC.seg_preproc.seg_preproc.connect_anat_segmentation', 'connect_anat_segmentation', (['workflow', 'strat_list', 'c', 'strat_name'], {}), '(workflow, strat_list, c, strat_name)\n', (22669, 22706), False, 'from CPAC.seg_preproc.seg_preproc import connect_anat_segmentation\n'), ((24626, 24720), 'os.path.join', 'os.path.join', (["('pipeline_%s_%s' % (config.pipelineName, strat_name))", 'subject_id', 'session_id'], {}), "('pipeline_%s_%s' % (config.pipelineName, strat_name),\n subject_id, session_id)\n", (24638, 24720), False, 'import os\n'), ((37026, 37036), 'CPAC.utils.Strategy', 'Strategy', ([], {}), '()\n', (37034, 37036), False, 'from CPAC.utils import Strategy, find_files, function, Outputs\n'), ((53492, 53523), 'nipype.pipeline.engine.Workflow', 'pe.Workflow', ([], {'name': 'workflow_name'}), '(name=workflow_name)\n', (53503, 53523), True, 'import nipype.pipeline.engine as pe\n'), ((57425, 57451), 'os.walk', 'os.walk', (['working_directory'], {}), '(working_directory)\n', (57432, 57451), False, 'import os\n'), ((58096, 58121), 'CPAC.utils.utils.check_config_resources', 'check_config_resources', (['c'], {}), '(c)\n', (58118, 58121), False, 'from CPAC.utils.utils import check_config_resources, check_system_deps, get_scan_params, get_tr\n'), ((67836, 67867), 'nipype.pipeline.engine.Workflow', 'pe.Workflow', ([], {'name': 'workflow_name'}), '(name=workflow_name)\n', (67847, 67867), True, 'import nipype.pipeline.engine as pe\n'), ((68132, 68142), 'CPAC.utils.Strategy', 'Strategy', ([], {}), '()\n', (68140, 68142), False, 'from CPAC.utils import Strategy, find_files, function, Outputs\n'), ((70531, 70622), 'CPAC.longitudinal_pipeline.longitudinal_preproc.subject_specific_template', 'subject_specific_template', ([], {'workflow_name': "('subject_specific_func_template_' + subject_id)"}), "(workflow_name='subject_specific_func_template_' +\n subject_id)\n", (70556, 70622), False, 'from CPAC.longitudinal_pipeline.longitudinal_preproc import subject_specific_template\n'), ((27278, 27319), 'os.path.abspath', 'os.path.abspath', (['config.crashLogDirectory'], {}), '(config.crashLogDirectory)\n', (27293, 27319), False, 'import os\n'), ((27636, 27798), 'CPAC.utils.interfaces.function.Function', 'Function', ([], {'input_names': "['resolution', 'template', 'template_name', 'tag']", 'output_names': "['resampled_template']", 'function': 'resolve_resolution', 'as_module': '(True)'}), "(input_names=['resolution', 'template', 'template_name', 'tag'],\n output_names=['resampled_template'], function=resolve_resolution,\n as_module=True)\n", (27644, 27798), False, 'from CPAC.utils.interfaces.function import Function\n'), ((30632, 30642), 'CPAC.utils.Strategy', 'Strategy', ([], {}), '()\n', (30640, 30642), False, 'from CPAC.utils import Strategy, find_files, function, Outputs\n'), ((30743, 30797), 'CPAC.utils.datasource.create_anat_datasource', 'create_anat_datasource', (["('anat_gather_%s' % node_suffix)"], {}), "('anat_gather_%s' % node_suffix)\n", (30765, 30797), False, 'from CPAC.utils.datasource import resolve_resolution, create_anat_datasource, create_func_datasource, create_check_for_s3_node\n'), ((40206, 40298), 'CPAC.longitudinal_pipeline.longitudinal_preproc.subject_specific_template', 'subject_specific_template', ([], {'workflow_name': "('subject_specific_anat_template_' + node_suffix)"}), "(workflow_name='subject_specific_anat_template_' +\n node_suffix)\n", (40231, 40298), False, 'from CPAC.longitudinal_pipeline.longitudinal_preproc import subject_specific_template\n'), ((53261, 53275), 'nipype.interfaces.io.DataSink', 'nio.DataSink', ([], {}), '()\n', (53273, 53275), True, 'import nipype.interfaces.io as nio\n'), ((53670, 53711), 'os.path.abspath', 'os.path.abspath', (['config.crashLogDirectory'], {}), '(config.crashLogDirectory)\n', (53685, 53711), False, 'import os\n'), ((68014, 68055), 'os.path.abspath', 'os.path.abspath', (['config.crashLogDirectory'], {}), '(config.crashLogDirectory)\n', (68029, 68055), False, 'import os\n'), ((70049, 70184), 'CPAC.utils.interfaces.function.Function', 'Function', ([], {'input_names': "['working_directory']", 'output_names': "['brain_list', 'skull_list']", 'function': 'merge_func_preproc', 'as_module': '(True)'}), "(input_names=['working_directory'], output_names=['brain_list',\n 'skull_list'], function=merge_func_preproc, as_module=True)\n", (70057, 70184), False, 'from CPAC.utils.interfaces.function import Function\n'), ((2056, 2080), 'nipype.interfaces.fsl.maths.MathsCommand', 'fsl.maths.MathsCommand', ([], {}), '()\n', (2078, 2080), True, 'import nipype.interfaces.fsl as fsl\n'), ((3997, 4087), 'CPAC.registration.create_fsl_flirt_linear_reg', 'create_fsl_flirt_linear_reg', (["('anat_mni_flirt_register_%s_%d' % (strat_name, num_strat))"], {}), "('anat_mni_flirt_register_%s_%d' % (strat_name,\n num_strat))\n", (4024, 4087), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((8535, 8685), 'CPAC.registration.create_wf_calculate_ants_warp', 'create_wf_calculate_ants_warp', (["('anat_mni_ants_register_%s_%d' % (strat_name, num_strat))"], {'num_threads': 'num_ants_cores', 'reg_ants_skull': 'c.regWithSkull'}), "('anat_mni_ants_register_%s_%d' % (strat_name,\n num_strat), num_threads=num_ants_cores, reg_ants_skull=c.regWithSkull)\n", (8564, 8685), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((23420, 23447), 'os.path.abspath', 'os.path.abspath', (['creds_path'], {}), '(creds_path)\n', (23435, 23447), False, 'import os\n'), ((23599, 23663), 'indi_aws.aws_utils.test_bucket_access', 'aws_utils.test_bucket_access', (['creds_path', 'config.outputDirectory'], {}), '(creds_path, config.outputDirectory)\n', (23627, 23663), False, 'from indi_aws import aws_utils\n'), ((24199, 24236), 'CPAC.utils.interfaces.datasink.DataSink', 'DataSink', ([], {'infields': 'map_node_iterfield'}), '(infields=map_node_iterfield)\n', (24207, 24236), False, 'from CPAC.utils.interfaces.datasink import DataSink\n'), ((24385, 24395), 'CPAC.utils.interfaces.datasink.DataSink', 'DataSink', ([], {}), '()\n', (24393, 24395), False, 'from CPAC.utils.interfaces.datasink import DataSink\n'), ((28429, 28446), 'nipype.interfaces.afni.CenterMass', 'afni.CenterMass', ([], {}), '()\n', (28444, 28446), True, 'import nipype.interfaces.afni as afni\n'), ((31497, 31550), 'CPAC.utils.datasource.create_anat_datasource', 'create_anat_datasource', (["('brain_gather_%s' % unique_id)"], {}), "('brain_gather_%s' % unique_id)\n", (31519, 31550), False, 'from CPAC.utils.datasource import resolve_resolution, create_anat_datasource, create_func_datasource, create_check_for_s3_node\n'), ((32151, 32241), 'CPAC.anat_preproc.anat_preproc.create_anat_preproc', 'create_anat_preproc', ([], {'method': 'skullstrip_method', 'config': 'config', 'wf_name': 'preproc_wf_name'}), '(method=skullstrip_method, config=config, wf_name=\n preproc_wf_name)\n', (32170, 32241), False, 'from CPAC.anat_preproc.anat_preproc import create_anat_preproc\n'), ((38620, 38782), 'CPAC.utils.interfaces.function.Function', 'Function', ([], {'input_names': "['resolution', 'template', 'template_name', 'tag']", 'output_names': "['resampled_template']", 'function': 'resolve_resolution', 'as_module': '(True)'}), "(input_names=['resolution', 'template', 'template_name', 'tag'],\n output_names=['resampled_template'], function=resolve_resolution,\n as_module=True)\n", (38628, 38782), False, 'from CPAC.utils.interfaces.function import Function\n'), ((54780, 54790), 'CPAC.utils.Strategy', 'Strategy', ([], {}), '()\n', (54788, 54790), False, 'from CPAC.utils import Strategy, find_files, function, Outputs\n'), ((55008, 55115), 'CPAC.func_preproc.func_ingress.connect_func_ingress', 'connect_func_ingress', (['workflow', 'strat_list', 'config', 'sub_dict', 'subject_id', 'input_creds_path', 'node_suffix'], {}), '(workflow, strat_list, config, sub_dict, subject_id,\n input_creds_path, node_suffix)\n', (55028, 55115), False, 'from CPAC.func_preproc.func_ingress import connect_func_ingress\n'), ((55605, 55665), 'CPAC.func_preproc.func_preproc.connect_func_init', 'connect_func_init', (['workflow', 'strat_list', 'config', 'node_suffix'], {}), '(workflow, strat_list, config, node_suffix)\n', (55622, 55665), False, 'from CPAC.func_preproc.func_preproc import connect_func_init, connect_func_preproc, create_func_preproc, create_wf_edit_func\n'), ((55756, 55819), 'CPAC.func_preproc.func_preproc.connect_func_preproc', 'connect_func_preproc', (['workflow', 'strat_list', 'config', 'node_suffix'], {}), '(workflow, strat_list, config, node_suffix)\n', (55776, 55819), False, 'from CPAC.func_preproc.func_preproc import connect_func_init, connect_func_preproc, create_func_preproc, create_wf_edit_func\n'), ((55892, 55994), 'CPAC.distortion_correction.distortion_correction.connect_distortion_correction', 'connect_distortion_correction', (['workflow', 'strat_list', 'config', 'diff', 'blip', 'fmap_rp_list', 'node_suffix'], {}), '(workflow, strat_list, config, diff, blip,\n fmap_rp_list, node_suffix)\n', (55921, 55994), False, 'from CPAC.distortion_correction.distortion_correction import connect_distortion_correction\n'), ((58562, 58652), 'CPAC.registration.create_fsl_flirt_linear_reg', 'create_fsl_flirt_linear_reg', (["('func_mni_flirt_register_%s_%d' % (strat_name, num_strat))"], {}), "('func_mni_flirt_register_%s_%d' % (strat_name,\n num_strat))\n", (58589, 58652), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((63180, 63330), 'CPAC.registration.create_wf_calculate_ants_warp', 'create_wf_calculate_ants_warp', (["('func_mni_ants_register_%s_%d' % (strat_name, num_strat))"], {'num_threads': 'num_ants_cores', 'reg_ants_skull': 'c.regWithSkull'}), "('func_mni_ants_register_%s_%d' % (strat_name,\n num_strat), num_threads=num_ants_cores, reg_ants_skull=c.regWithSkull)\n", (63209, 63330), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((69296, 69458), 'CPAC.utils.interfaces.function.Function', 'Function', ([], {'input_names': "['resolution', 'template', 'template_name', 'tag']", 'output_names': "['resampled_template']", 'function': 'resolve_resolution', 'as_module': '(True)'}), "(input_names=['resolution', 'template', 'template_name', 'tag'],\n output_names=['resampled_template'], function=resolve_resolution,\n as_module=True)\n", (69304, 69458), False, 'from CPAC.utils.interfaces.function import Function\n'), ((6112, 6206), 'CPAC.registration.create_fsl_fnirt_nonlinear_reg', 'create_fsl_fnirt_nonlinear_reg', (["('anat_mni_fnirt_register_%s_%d' % (strat_name, num_strat))"], {}), "('anat_mni_fnirt_register_%s_%d' % (\n strat_name, num_strat))\n", (6142, 6206), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((14206, 14307), 'CPAC.registration.create_fsl_flirt_linear_reg', 'create_fsl_flirt_linear_reg', (["('anat_symmetric_mni_flirt_register_%s_%d' % (strat_name, num_strat))"], {}), "('anat_symmetric_mni_flirt_register_%s_%d' % (\n strat_name, num_strat))\n", (14233, 14307), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((18266, 18432), 'CPAC.registration.create_wf_calculate_ants_warp', 'create_wf_calculate_ants_warp', (["('anat_symmetric_mni_ants_register_%s_%d' % (strat_name, num_strat))"], {'num_threads': 'num_ants_cores', 'reg_ants_skull': 'c.regWithSkull'}), "('anat_symmetric_mni_ants_register_%s_%d' % (\n strat_name, num_strat), num_threads=num_ants_cores, reg_ants_skull=c.\n regWithSkull)\n", (18295, 18432), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((23460, 23490), 'nipype.config.outputDirectory.lower', 'config.outputDirectory.lower', ([], {}), '()\n', (23488, 23490), False, 'from nipype import config\n'), ((29249, 29275), 'os.path.exists', 'os.path.exists', (['creds_path'], {}), '(creds_path)\n', (29263, 29275), False, 'import os\n'), ((32813, 32930), 'CPAC.anat_preproc.anat_preproc.create_anat_preproc', 'create_anat_preproc', ([], {'method': 'skullstrip_method', 'already_skullstripped': '(True)', 'config': 'config', 'wf_name': 'preproc_wf_name'}), '(method=skullstrip_method, already_skullstripped=True,\n config=config, wf_name=preproc_wf_name)\n', (32832, 32930), False, 'from CPAC.anat_preproc.anat_preproc import create_anat_preproc\n'), ((44770, 44786), 'nipype.interfaces.fsl.ConvertXFM', 'fsl.ConvertXFM', ([], {}), '()\n', (44784, 44786), True, 'import nipype.interfaces.fsl as fsl\n'), ((57585, 57609), 'os.path.join', 'os.path.join', (['dirpath', 'f'], {}), '(dirpath, f)\n', (57597, 57609), False, 'import os\n'), ((57760, 57784), 'os.path.join', 'os.path.join', (['dirpath', 'f'], {}), '(dirpath, f)\n', (57772, 57784), False, 'import os\n'), ((60713, 60807), 'CPAC.registration.create_fsl_fnirt_nonlinear_reg', 'create_fsl_fnirt_nonlinear_reg', (["('func_mni_fnirt_register_%s_%d' % (strat_name, num_strat))"], {}), "('func_mni_fnirt_register_%s_%d' % (\n strat_name, num_strat))\n", (60743, 60807), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((15966, 16069), 'CPAC.registration.create_fsl_fnirt_nonlinear_reg', 'create_fsl_fnirt_nonlinear_reg', (["('anat_symmetric_mni_fnirt_register_%s_%d' % (strat_name, num_strat))"], {}), "('anat_symmetric_mni_fnirt_register_%s_%d' %\n (strat_name, num_strat))\n", (15996, 16069), False, 'from CPAC.registration import create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, create_register_func_to_anat, create_bbregister_func_to_anat, create_wf_calculate_ants_warp, connect_func_to_anat_init_reg, connect_func_to_anat_bbreg, connect_func_to_template_reg, output_func_to_standard\n'), ((23849, 23879), 'nipype.config.outputDirectory.lower', 'config.outputDirectory.lower', ([], {}), '()\n', (23877, 23879), False, 'from nipype import config\n'), ((29316, 29343), 'os.path.abspath', 'os.path.abspath', (['creds_path'], {}), '(creds_path)\n', (29331, 29343), False, 'import os\n'), ((33470, 33560), 'CPAC.anat_preproc.anat_preproc.create_anat_preproc', 'create_anat_preproc', ([], {'method': 'skullstrip_method', 'config': 'config', 'wf_name': 'preproc_wf_name'}), '(method=skullstrip_method, config=config, wf_name=\n preproc_wf_name)\n', (33489, 33560), False, 'from CPAC.anat_preproc.anat_preproc import create_anat_preproc\n'), ((35329, 35419), 'CPAC.anat_preproc.anat_preproc.create_anat_preproc', 'create_anat_preproc', ([], {'method': 'skullstrip_method', 'config': 'config', 'wf_name': 'preproc_wf_name'}), '(method=skullstrip_method, config=config, wf_name=\n preproc_wf_name)\n', (35348, 35419), False, 'from CPAC.anat_preproc.anat_preproc import create_anat_preproc\n'), ((54223, 54249), 'os.path.exists', 'os.path.exists', (['creds_path'], {}), '(creds_path)\n', (54237, 54249), False, 'import os\n'), ((41470, 41485), 'nipype.interfaces.fsl.ApplyWarp', 'fsl.ApplyWarp', ([], {}), '()\n', (41483, 41485), True, 'import nipype.interfaces.fsl as fsl\n'), ((42638, 42814), 'nipype.interfaces.utility.Function', 'util.Function', ([], {'input_names': "['moving_image', 'reference', 'initial', 'rigid', 'affine', 'nonlinear',\n 'interp']", 'output_names': "['out_image']", 'function': 'run_ants_apply_warp'}), "(input_names=['moving_image', 'reference', 'initial', 'rigid',\n 'affine', 'nonlinear', 'interp'], output_names=['out_image'], function=\n run_ants_apply_warp)\n", (42651, 42814), True, 'import nipype.interfaces.utility as util\n'), ((45269, 45283), 'nipype.interfaces.fsl.ApplyXFM', 'fsl.ApplyXFM', ([], {}), '()\n', (45281, 45283), True, 'import nipype.interfaces.fsl as fsl\n'), ((54294, 54321), 'os.path.abspath', 'os.path.abspath', (['creds_path'], {}), '(creds_path)\n', (54309, 54321), False, 'import os\n'), ((46579, 46688), 'CPAC.utils.interfaces.function.Function', 'Function', ([], {'input_names': "['file_list', 'index', 'file_type']", 'output_names': "['file_name']", 'function': 'pick_map'}), "(input_names=['file_list', 'index', 'file_type'], output_names=[\n 'file_name'], function=pick_map)\n", (46587, 46688), False, 'from CPAC.utils.interfaces.function import Function\n'), ((47614, 47713), 'CPAC.utils.interfaces.function.Function', 'Function', ([], {'input_names': "['in_list1', 'in_list2']", 'output_names': "['out_list']", 'function': 'concat_list'}), "(input_names=['in_list1', 'in_list2'], output_names=['out_list'],\n function=concat_list)\n", (47622, 47713), False, 'from CPAC.utils.interfaces.function import Function\n'), ((46280, 46294), 'nipype.interfaces.fsl.ApplyXFM', 'fsl.ApplyXFM', ([], {}), '()\n', (46292, 46294), True, 'import nipype.interfaces.fsl as fsl\n')]
|
import sys
import time
import constants
import parser.correct as correct
import parser.export as export
import parser.read as read
class App:
"""
Base container class to divert all export
file conversions and error handling to
their respective packages and
libraries.
"""
def __init__(self, expansion=None, filename=None):
"""
Initialise the command line session, using
the correct expansion type to convert
and output readable data for our
visualiser to parse.
Args:
expansion (String): FamiTracker expansion chip to use as reference for parsing channel data. Defaults to None.
filename (String): Name of local file to be housed in same directory as script execution. Defaults to None.
"""
self.expansion = expansion
self.filename = filename
self.validateParameters()
correct.FixExport(self.filename)
"""Rewrite FamiTracker export file as there are existing problems that mask required data."""
self.reader = read.FileReader(self.filename)
full_path = self.reader.start()
"""Attempt to start reading the file if validation passes."""
timestamp = int(time.time())
"""Remove decimal places created by time.time() floating point precision for clean filenames."""
self.exporter = export.DataExporter(timestamp, full_path, self.expansion)
self.exporter.start()
"""Attempt to start writing to JSON config and CSV data files."""
def validateParameters(self):
"""
Ensure that the information passed to the
parser by the user via the command line
is in the correct format.
"""
if self.expansion is None:
"""Terminate execution if no expansion chip is provided."""
sys.stdout.write('Please provide a valid expansion chip name for parsing. Terminating...\n')
sys.exit()
elif self.expansion.lower() not in constants.expansions():
"""Ensure case-sensitivity doesn't get in the way of conversions."""
sys.stdout.write('Invalid expansion chip provided. Please reference the README for accepted formats. Terminating...\n')
sys.exit()
if self.filename is None:
"""Terminate execution if no filename is provided."""
sys.stdout.write('Please provide a valid .txt file for parsing. Terminating...\n')
sys.exit()
elif not self.filename.lower().endswith('.txt'):
"""Ensure case-sensitivity doesn't get in the way of conversions."""
sys.stdout.write('Invalid filename provided. Please reference the README for accepted formats. Terminating...\n')
sys.exit()
if '__main__' == __name__:
"""Initialise root app when file is executed via the command line."""
if 2 < len(sys.argv):
app = App(sys.argv[1], sys.argv[2])
elif 1 < len(sys.argv):
app = App(sys.argv[1])
else:
app = App()
|
[
"sys.stdout.write",
"time.time",
"parser.correct.FixExport",
"constants.expansions",
"parser.export.DataExporter",
"parser.read.FileReader",
"sys.exit"
] |
[((923, 955), 'parser.correct.FixExport', 'correct.FixExport', (['self.filename'], {}), '(self.filename)\n', (940, 955), True, 'import parser.correct as correct\n'), ((1081, 1111), 'parser.read.FileReader', 'read.FileReader', (['self.filename'], {}), '(self.filename)\n', (1096, 1111), True, 'import parser.read as read\n'), ((1389, 1446), 'parser.export.DataExporter', 'export.DataExporter', (['timestamp', 'full_path', 'self.expansion'], {}), '(timestamp, full_path, self.expansion)\n', (1408, 1446), True, 'import parser.export as export\n'), ((1247, 1258), 'time.time', 'time.time', ([], {}), '()\n', (1256, 1258), False, 'import time\n'), ((1861, 1958), 'sys.stdout.write', 'sys.stdout.write', (['"""Please provide a valid expansion chip name for parsing. Terminating...\n"""'], {}), "(\n 'Please provide a valid expansion chip name for parsing. Terminating...\\n')\n", (1877, 1958), False, 'import sys\n'), ((1966, 1976), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1974, 1976), False, 'import sys\n'), ((2393, 2480), 'sys.stdout.write', 'sys.stdout.write', (['"""Please provide a valid .txt file for parsing. Terminating...\n"""'], {}), "(\n 'Please provide a valid .txt file for parsing. Terminating...\\n')\n", (2409, 2480), False, 'import sys\n'), ((2488, 2498), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2496, 2498), False, 'import sys\n'), ((2020, 2042), 'constants.expansions', 'constants.expansions', ([], {}), '()\n', (2040, 2042), False, 'import constants\n'), ((2137, 2269), 'sys.stdout.write', 'sys.stdout.write', (['"""Invalid expansion chip provided. Please reference the README for accepted formats. Terminating...\n"""'], {}), '(\n """Invalid expansion chip provided. Please reference the README for accepted formats. Terminating...\n"""\n )\n', (2153, 2269), False, 'import sys\n'), ((2269, 2279), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2277, 2279), False, 'import sys\n'), ((2649, 2775), 'sys.stdout.write', 'sys.stdout.write', (['"""Invalid filename provided. Please reference the README for accepted formats. Terminating...\n"""'], {}), '(\n """Invalid filename provided. Please reference the README for accepted formats. Terminating...\n"""\n )\n', (2665, 2775), False, 'import sys\n'), ((2775, 2785), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2783, 2785), False, 'import sys\n')]
|
#!/usr/bin/env python
import sys
import loco
import tinymath as tm
import numpy as np
PHYSICS_BACKEND = loco.sim.PHYSICS_NONE
RENDERING_BACKEND = loco.sim.RENDERING_GLVIZ_GLFW
COM_TETRAHEDRON = [ 1.0 / 3.0, 1.0 / 3.0, 1.0 / 3.0 ]
TETRAHEDRON_VERTICES = [ 0.0 - COM_TETRAHEDRON[0], 0.0 - COM_TETRAHEDRON[1], 0.0 - COM_TETRAHEDRON[2],
1.0 - COM_TETRAHEDRON[0], 0.0 - COM_TETRAHEDRON[1], 0.0 - COM_TETRAHEDRON[2],
0.0 - COM_TETRAHEDRON[0], 1.0 - COM_TETRAHEDRON[1], 0.0 - COM_TETRAHEDRON[2],
0.0 - COM_TETRAHEDRON[0], 0.0 - COM_TETRAHEDRON[1], 1.0 - COM_TETRAHEDRON[2] ]
TETRAHEDRON_FACES = [ 0, 1, 3,
0, 2, 1,
0, 3, 2,
1, 2, 3 ]
COM_RAMP = [ 0.0, 7.0 / 9.0, 4.0 / 9.0 ]
RAMP_VERTICES = [ 1.0 - COM_RAMP[0], 0.0 - COM_RAMP[1], 0.0 - COM_RAMP[2],
1.0 - COM_RAMP[0], 2.0 - COM_RAMP[1], 0.0 - COM_RAMP[2],
1.0 - COM_RAMP[0], 1.0 - COM_RAMP[1], 1.0 - COM_RAMP[2],
1.0 - COM_RAMP[0], 0.0 - COM_RAMP[1], 1.0 - COM_RAMP[2],
-1.0 - COM_RAMP[0], 0.0 - COM_RAMP[1], 0.0 - COM_RAMP[2],
-1.0 - COM_RAMP[0], 2.0 - COM_RAMP[1], 0.0 - COM_RAMP[2],
-1.0 - COM_RAMP[0], 1.0 - COM_RAMP[1], 1.0 - COM_RAMP[2],
-1.0 - COM_RAMP[0], 0.0 - COM_RAMP[1], 1.0 - COM_RAMP[2] ]
RAMP_FACES = [ 0, 1, 2,
0, 2, 3,
0, 4, 5,
0, 5, 1,
0, 3, 7,
0, 7, 4,
2, 6, 7,
2, 7, 3,
1, 5, 6,
1, 6, 2,
4, 7, 6,
4, 6, 5 ]
def create_path_part( idx ) :
height = 1.0
inner_rad = 2.0
outer_rad = 3.0
dtheta = 2.0 * np.pi / 12.0
ctheta = np.cos( dtheta * idx )
stheta = np.sin( dtheta * idx )
ctheta_n = np.cos( dtheta * ( idx + 1 ) )
stheta_n = np.sin( dtheta * ( idx + 1 ) )
half_rad = 0.5* ( inner_rad + outer_rad )
com_position = [ half_rad * np.cos( ( idx + 0.5 ) * dtheta ),
half_rad * np.sin( ( idx + 0.5 ) * dtheta ),
0.5 * height ]
vertices = [ inner_rad * ctheta - com_position[0], inner_rad * stheta - com_position[1], 0.5 * height,
outer_rad * ctheta - com_position[0], outer_rad * stheta - com_position[1], 0.5 * height,
outer_rad * ctheta_n - com_position[0], outer_rad * stheta_n - com_position[1], 0.5 * height,
inner_rad * ctheta_n - com_position[0], inner_rad * stheta_n - com_position[1], 0.5 * height,
inner_rad * ctheta - com_position[0], inner_rad * stheta - com_position[1], -0.5 * height,
outer_rad * ctheta - com_position[0], outer_rad * stheta - com_position[1], -0.5 * height,
outer_rad * ctheta_n - com_position[0], outer_rad * stheta_n - com_position[1], -0.5 * height,
inner_rad * ctheta_n - com_position[0], inner_rad * stheta_n - com_position[1], -0.5 * height ]
faces = [ 0, 1, 2,
0, 2, 3,
0, 4, 5,
0, 5, 1,
0, 3, 7,
0, 7, 4,
2, 6, 7,
2, 7, 3,
1, 5, 6,
1, 6, 2,
4, 7, 6,
4, 6, 5 ]
return vertices, faces
if __name__ == '__main__' :
if len( sys.argv ) > 1 :
choice_backend = sys.argv[1]
if choice_backend == 'mujoco' :
PHYSICS_BACKEND = loco.sim.PHYSICS_MUJOCO
elif choice_backend == 'bullet' :
PHYSICS_BACKEND = loco.sim.PHYSICS_BULLET
elif choice_backend == 'dart' :
PHYSICS_BACKEND = loco.sim.PHYSICS_DART
elif choice_backend == 'raisim' :
PHYSICS_BACKEND = loco.sim.PHYSICS_RAISIM
print( 'Physics backend: {}'.format( PHYSICS_BACKEND ) )
print( 'Rendering backend: {}'.format( RENDERING_BACKEND ) )
#### rotation = tm.rotation( tm.Vector3f( [ np.pi / 3, np.pi / 4, np.pi / 6 ] ) )
#### rotation = tm.rotation( tm.Vector3f( [ np.pi / 2, 0.0, 0.0 ] ) )
rotation = tm.rotation( tm.Vector3f( [ 0.0, 0.0, 0.0 ] ) )
scenario = loco.sim.Scenario()
scenario.AddSingleBody( loco.sim.Plane( "floor", 10.0, 10.0, tm.Vector3f(), tm.Matrix3f() ) )
scenario.AddSingleBody( loco.sim.Sphere( "sphere", 0.1, [ 1.0, -1.0, 2.0 ], rotation ) )
scenario.AddSingleBody( loco.sim.Mesh( "tetrahedron_0",
TETRAHEDRON_VERTICES,
TETRAHEDRON_FACES,
1.0, [ -1.0, -1.0, 1.0 ], rotation ) )
scenario.AddSingleBody( loco.sim.Mesh( "tetrahedron_1",
TETRAHEDRON_VERTICES,
TETRAHEDRON_FACES,
0.5, [ -1.0, 1.0, 1.0 ], rotation ) )
scenario.AddSingleBody( loco.sim.Mesh( "ramp_0",
RAMP_VERTICES,
RAMP_FACES,
0.3, [ 1.0, 1.0, 1.0 ], rotation ) )
scenario.AddSingleBody( loco.sim.Mesh( "ramp_1",
RAMP_VERTICES,
RAMP_FACES,
0.5, [ 1.0, -1.0, 1.0 ], rotation ) )
for i in range( 0, 12 ) :
height = 1.0
inner_rad = 2.0
outer_rad = 3.0
half_rad = 0.5* ( inner_rad + outer_rad )
dtheta = 2.0 * np.pi / 12.0
com_position = [ half_rad * np.cos( ( i + 0.5 ) * dtheta ),
half_rad * np.sin( ( i + 0.5 ) * dtheta ),
0.5 * height ]
vertices, faces = create_path_part( i )
scenario.AddSingleBody( loco.sim.Mesh( "path_part_{}".format( i ),
vertices, faces,
1.0, com_position, tm.Matrix3f(),
loco.sim.DynamicsType.STATIC ) )
runtime = loco.sim.Runtime( PHYSICS_BACKEND, RENDERING_BACKEND )
simulation = runtime.CreateSimulation( scenario )
visualizer = runtime.CreateVisualizer( scenario )
sphere = scenario.GetSingleBodyByName( "sphere" )
floor = scenario.GetSingleBodyByName( "floor" )
floor.drawable.texture = 'built_in_chessboard'
floor.drawable.ambient = [ 0.3, 0.5, 0.7 ]
floor.drawable.diffuse = [ 0.3, 0.5, 0.7 ]
floor.drawable.specular = [ 0.3, 0.5, 0.7 ]
while visualizer.IsActive() :
if visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_ESCAPE ) :
break
elif visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_R ) :
simulation.Reset()
elif visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_P ) :
simulation.Pause() if simulation.running else simulation.Resume()
elif visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_SPACE ) :
sphere.AddForceCOM( [ 0.0, 0.0, 1000.0 ] )
elif visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_UP ) :
sphere.AddForceCOM( [ 0.0, 200.0, 0.0 ] )
elif visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_DOWN ) :
sphere.AddForceCOM( [ 0.0, -200.0, 0.0 ] )
elif visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_RIGHT ) :
sphere.AddForceCOM( [ 200.0, 0.0, 0.0 ] )
elif visualizer.CheckSingleKeyPress( loco.sim.Keys.KEY_LEFT ) :
sphere.AddForceCOM( [ -200.0, 0.0, 0.0 ] )
simulation.Step()
visualizer.Update()
runtime.DestroySimulation()
runtime.DestroyVisualizer()
|
[
"loco.sim.Scenario",
"tinymath.Matrix3f",
"numpy.sin",
"tinymath.Vector3f",
"loco.sim.Runtime",
"numpy.cos",
"loco.sim.Sphere",
"loco.sim.Mesh"
] |
[((1846, 1866), 'numpy.cos', 'np.cos', (['(dtheta * idx)'], {}), '(dtheta * idx)\n', (1852, 1866), True, 'import numpy as np\n'), ((1882, 1902), 'numpy.sin', 'np.sin', (['(dtheta * idx)'], {}), '(dtheta * idx)\n', (1888, 1902), True, 'import numpy as np\n'), ((1920, 1946), 'numpy.cos', 'np.cos', (['(dtheta * (idx + 1))'], {}), '(dtheta * (idx + 1))\n', (1926, 1946), True, 'import numpy as np\n'), ((1966, 1992), 'numpy.sin', 'np.sin', (['(dtheta * (idx + 1))'], {}), '(dtheta * (idx + 1))\n', (1972, 1992), True, 'import numpy as np\n'), ((4251, 4270), 'loco.sim.Scenario', 'loco.sim.Scenario', ([], {}), '()\n', (4268, 4270), False, 'import loco\n'), ((6219, 6271), 'loco.sim.Runtime', 'loco.sim.Runtime', (['PHYSICS_BACKEND', 'RENDERING_BACKEND'], {}), '(PHYSICS_BACKEND, RENDERING_BACKEND)\n', (6235, 6271), False, 'import loco\n'), ((4200, 4228), 'tinymath.Vector3f', 'tm.Vector3f', (['[0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0])\n', (4211, 4228), True, 'import tinymath as tm\n'), ((4397, 4455), 'loco.sim.Sphere', 'loco.sim.Sphere', (['"""sphere"""', '(0.1)', '[1.0, -1.0, 2.0]', 'rotation'], {}), "('sphere', 0.1, [1.0, -1.0, 2.0], rotation)\n", (4412, 4455), False, 'import loco\n'), ((4490, 4599), 'loco.sim.Mesh', 'loco.sim.Mesh', (['"""tetrahedron_0"""', 'TETRAHEDRON_VERTICES', 'TETRAHEDRON_FACES', '(1.0)', '[-1.0, -1.0, 1.0]', 'rotation'], {}), "('tetrahedron_0', TETRAHEDRON_VERTICES, TETRAHEDRON_FACES, 1.0,\n [-1.0, -1.0, 1.0], rotation)\n", (4503, 4599), False, 'import loco\n'), ((4759, 4867), 'loco.sim.Mesh', 'loco.sim.Mesh', (['"""tetrahedron_1"""', 'TETRAHEDRON_VERTICES', 'TETRAHEDRON_FACES', '(0.5)', '[-1.0, 1.0, 1.0]', 'rotation'], {}), "('tetrahedron_1', TETRAHEDRON_VERTICES, TETRAHEDRON_FACES, 0.5,\n [-1.0, 1.0, 1.0], rotation)\n", (4772, 4867), False, 'import loco\n'), ((5027, 5113), 'loco.sim.Mesh', 'loco.sim.Mesh', (['"""ramp_0"""', 'RAMP_VERTICES', 'RAMP_FACES', '(0.3)', '[1.0, 1.0, 1.0]', 'rotation'], {}), "('ramp_0', RAMP_VERTICES, RAMP_FACES, 0.3, [1.0, 1.0, 1.0],\n rotation)\n", (5040, 5113), False, 'import loco\n'), ((5273, 5360), 'loco.sim.Mesh', 'loco.sim.Mesh', (['"""ramp_1"""', 'RAMP_VERTICES', 'RAMP_FACES', '(0.5)', '[1.0, -1.0, 1.0]', 'rotation'], {}), "('ramp_1', RAMP_VERTICES, RAMP_FACES, 0.5, [1.0, -1.0, 1.0],\n rotation)\n", (5286, 5360), False, 'import loco\n'), ((2076, 2104), 'numpy.cos', 'np.cos', (['((idx + 0.5) * dtheta)'], {}), '((idx + 0.5) * dtheta)\n', (2082, 2104), True, 'import numpy as np\n'), ((2142, 2170), 'numpy.sin', 'np.sin', (['((idx + 0.5) * dtheta)'], {}), '((idx + 0.5) * dtheta)\n', (2148, 2170), True, 'import numpy as np\n'), ((4336, 4349), 'tinymath.Vector3f', 'tm.Vector3f', ([], {}), '()\n', (4347, 4349), True, 'import tinymath as tm\n'), ((4351, 4364), 'tinymath.Matrix3f', 'tm.Matrix3f', ([], {}), '()\n', (4362, 4364), True, 'import tinymath as tm\n'), ((5715, 5741), 'numpy.cos', 'np.cos', (['((i + 0.5) * dtheta)'], {}), '((i + 0.5) * dtheta)\n', (5721, 5741), True, 'import numpy as np\n'), ((5783, 5809), 'numpy.sin', 'np.sin', (['((i + 0.5) * dtheta)'], {}), '((i + 0.5) * dtheta)\n', (5789, 5809), True, 'import numpy as np\n'), ((6109, 6122), 'tinymath.Matrix3f', 'tm.Matrix3f', ([], {}), '()\n', (6120, 6122), True, 'import tinymath as tm\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.