code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from recognizers_date_time import recognize_datetime, Culture
class Ambiguity:
"""
TIMEX expressions are designed to represent ambiguous rather than definite dates. For
example: "Monday" could be any Monday ever. "May 5th" could be any one of the possible May
5th in the past or the future. TIMEX does not represent ambiguous times. So if the natural
language mentioned 4 o'clock it could be either 4AM or 4PM. For that the recognizer (and by
extension LUIS) would return two TIMEX expressions. A TIMEX expression can include a date and
time parts. So ambiguity of date can be combined with multiple results. Code that deals with
TIMEX expressions is frequently dealing with sets of TIMEX expressions.
"""
@staticmethod
def date_ambiguity():
# Run the recognizer.
results = recognize_datetime(
"Either Saturday or Sunday would work.", Culture.English
)
# We should find two results in this example.
for result in results:
# The resolution includes two example values: going backwards and forwards from NOW in the calendar.
# Each result includes a TIMEX expression that captures the inherent date but not time ambiguity.
# We are interested in the distinct set of TIMEX expressions.
# There is also either a "value" property on each value or "start" and "end".
distinct_timex_expressions = {
value["timex"]
for value in result.resolution["values"]
if "timex" in value
}
print(f"{result.text} ({','.join(distinct_timex_expressions)})")
@staticmethod
def time_ambiguity():
# Run the recognizer.
results = recognize_datetime(
"We would like to arrive at 4 o'clock or 5 o'clock.", Culture.English
)
# We should find two results in this example.
for result in results:
# The resolution includes two example values: one for AM and one for PM.
# Each result includes a TIMEX expression that captures the inherent date but not time ambiguity.
# We are interested in the distinct set of TIMEX expressions.
distinct_timex_expressions = {
value["timex"]
for value in result.resolution["values"]
if "timex" in value
}
# TIMEX expressions don't capture time ambiguity so there will be two distinct expressions for each result.
print(f"{result.text} ({','.join(distinct_timex_expressions)})")
@staticmethod
def date_time_ambiguity():
# Run the recognizer.
results = recognize_datetime(
"It will be ready Wednesday at 5 o'clock.", Culture.English
)
# We should find a single result in this example.
for result in results:
# The resolution includes four example values: backwards and forward in the calendar and then AM and PM.
# Each result includes a TIMEX expression that captures the inherent date but not time ambiguity.
# We are interested in the distinct set of TIMEX expressions.
distinct_timex_expressions = {
value["timex"]
for value in result.resolution["values"]
if "timex" in value
}
# TIMEX expressions don't capture time ambiguity so there will be two distinct expressions for each result.
print(f"{result.text} ({','.join(distinct_timex_expressions)})")
|
[
"recognizers_date_time.recognize_datetime"
] |
[((932, 1008), 'recognizers_date_time.recognize_datetime', 'recognize_datetime', (['"""Either Saturday or Sunday would work."""', 'Culture.English'], {}), "('Either Saturday or Sunday would work.', Culture.English)\n", (950, 1008), False, 'from recognizers_date_time import recognize_datetime, Culture\n'), ((1855, 1948), 'recognizers_date_time.recognize_datetime', 'recognize_datetime', (['"""We would like to arrive at 4 o\'clock or 5 o\'clock."""', 'Culture.English'], {}), '("We would like to arrive at 4 o\'clock or 5 o\'clock.",\n Culture.English)\n', (1873, 1948), False, 'from recognizers_date_time import recognize_datetime, Culture\n'), ((2800, 2879), 'recognizers_date_time.recognize_datetime', 'recognize_datetime', (['"""It will be ready Wednesday at 5 o\'clock."""', 'Culture.English'], {}), '("It will be ready Wednesday at 5 o\'clock.", Culture.English)\n', (2818, 2879), False, 'from recognizers_date_time import recognize_datetime, Culture\n')]
|
import re
def check(data) :
if not isinstance(data, list) :
data = [data]
maybe_rgs_parsed = [ re.sub("[^0-9]","",r) for r in data ]
true_rgs = []
for r in maybe_rgs_parsed :
r1 = list(map(int,list(r)))
r4 = r1[:8]
r2 = [ r4[rr]*(2+rr) for rr in range(len(r4)) ]
d1 = 11-(sum(r2)%11)
r4.append(d1)
if r == ''.join(map(str,r4)) : true_rgs.append(r)
return true_rgs
|
[
"re.sub"
] |
[((102, 125), 're.sub', 're.sub', (['"""[^0-9]"""', '""""""', 'r'], {}), "('[^0-9]', '', r)\n", (108, 125), False, 'import re\n')]
|
import coinlib
class Wallet():
def __init__(self, contents={}):
coins_by_symbol = coinlib.get_all_coins_by_symbol()
coins_by_name = coinlib.get_all_coins_by_name()
self.contents = {}
for coin in contents:
coin_lower = coin.lower().strip()
if (coin_lower not in coins_by_symbol
and coin_lower not in coins_by_name):
raise ValueError(coinlib.errors('103', [coin]))
else:
self.contents[coin_lower] = contents[coin]
def get_value(self, convert='USD'):
if len(self.contents) == 0:
return 0
convert = convert.upper().strip()
if convert not in coinlib.VALID_CONVERSION_CURRENCIES:
raise ValueError(coinlib.errors('101', [convert]))
contents_coins = coinlib.get_coins(list(self.contents.keys()), convert)
values = []
for coin in contents_coins:
values.append(contents_coins[coin]['price']*self.contents[coin])
return sum(values)
def add(self, coin, quantity):
coin = coin.lower().strip()
if coin in self.contents:
self.contents[coin] += quantity
else:
self.contents[coin] = quantity
return self.contents
def add_many(self, coins):
for coin in coins:
coin = coin.lower().strip()
if coin in self.contents:
self.contents[coin] += coins[coin]
else:
self.contents[coin] = coins[coin]
return self.contents
def subtract(self, coin, quantity):
self.contents[coin.lower().strip()] -= quantity
return self.contents
def subtract_many(self, coins):
for coin in coins:
coin = coin.lower().strip()
self.contents[coin] -= coins[coin]
return self.contents
def remove(self, coin):
del self.contents[coin.lower().strip()]
return self.contents
def remove_many(self, coins):
for coin in coins:
coin = coin.lower().strip()
del self.contents[coin]
|
[
"coinlib.errors",
"coinlib.get_all_coins_by_name",
"coinlib.get_all_coins_by_symbol"
] |
[((97, 130), 'coinlib.get_all_coins_by_symbol', 'coinlib.get_all_coins_by_symbol', ([], {}), '()\n', (128, 130), False, 'import coinlib\n'), ((155, 186), 'coinlib.get_all_coins_by_name', 'coinlib.get_all_coins_by_name', ([], {}), '()\n', (184, 186), False, 'import coinlib\n'), ((771, 803), 'coinlib.errors', 'coinlib.errors', (['"""101"""', '[convert]'], {}), "('101', [convert])\n", (785, 803), False, 'import coinlib\n'), ((431, 460), 'coinlib.errors', 'coinlib.errors', (['"""103"""', '[coin]'], {}), "('103', [coin])\n", (445, 460), False, 'import coinlib\n')]
|
from django.shortcuts import render, redirect, get_object_or_404
from .models import Image,Profile,Location,tags
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from .forms import NewImageForm
from .email import send_welcome_email
from .forms import NewsLetterForm, NewCommentForm
# Views
@login_required(login_url='/accounts/login/')
def home_images(request):
# Display all images here:
# images = Image.objects.all()
locations = Location.objects.all()
if request.GET.get('location'):
pictures = Image.filter_by_location(request.GET.get('location'))
elif request.GET.get('tags'):
pictures = Image.filter_by_tag(request.GET.get('tags'))
elif request.GET.get('search_term'):
pictures = Image.search_image(request.GET.get('search_term'))
else:
pictures = Image.objects.all()
if request.method == 'POST':
form = NewsLetterForm(request.POST)
if form.is_valid():
name = form.cleaned_data['your_name']
email = form.cleaned_data['email']
HttpResponseRedirect('home_images')
else:
form = NewsLetterForm()
return render(request, 'index.html', {'locations':locations,
'pictures':pictures, 'letterForm':form})
@login_required(login_url='/accounts/login/')
def image(request):
images = Image.objects.all()
# try:
# image = Image.objects.get(pk = id)
# except DoesNotExist:<div class="col-md-1"></div>
# raise Http404()
# current_user = request.user
return render(request,'registration/image_list.html', {"images":images})
@login_required(login_url='/accounts/login/')
def new_image(request):
current_user = request.user
if request.method == 'POST':
form = NewImageForm(request.POST, request.FILES)
if form.is_valid():
image = form.save(commit=False)
image.user = current_user
image.save()
return redirect('homePage')
else:
form = NewImageForm()
return render(request, 'registration/new_image.html', {"form": form})
def search_users(request):
# search for a user by their username
if 'user' in request.GET and request.GET["user"]:
search_term = request.GET.get("user")
searched_users = Profile.search_users(search_term)
message = f"{search_term}"
return render(request, 'search.html', {"message": message, "profiles": searched_users})
else:
message = "You haven't searched for any person"
return render(request, 'search.html', {"message": message})
def search_image(request):
# search for an image by the description of the image
if 'image' in request.GET and request.GET["image"]:
search_term = request.GET.get("image")
searched_images = Image.search_image(search_term)
message = f"{search_term}"
return render(request, 'search.html', {"message": message, "pictures": searched_images})
else:
message = "You haven't searched for any image"
return render(request, 'search.html', {"message": message})
@login_required(login_url='/accounts/login/')
def myprofile(request, username = None):
current_user = request.user
pictures = Image.objects.filter(user=current_user).all()
return render(request, 'profile.html', locals(), {'pictures':pictures})
@login_required(login_url='/accounts/login/')
def individual_profile_page(request, username):
print(username)
if not username:
username = request.user.username
# images by user id
images = Image.objects.filter(user_id=username)
user = request.user
profile = Profile.objects.get(user=user)
userf = User.objects.get(pk=username)
if userf:
print('user found')
profile = Profile.objects.get(user=userf)
else:
print('No suchuser')
return render (request, 'registration/profile.html', {'images':images,'profile':profile,'user':user, 'username': username})
def user_list(request):
user_list = User.objects.all()
context = {'user_list': user_list}
return render(request, 'image_details.html', context)
def image_detail(request, image_id):
image = Image.objects.get(id = image_id)
return render(request, 'image_details.html', {"image":image})
@login_required(login_url='/accounts/login/')
def new_comment(request, username):
current_user =request.user
username = current_user.username
if request.method =='POST':
form = NewCommentForm(request.POST, request.FILES)
if form.is_valid():
comment = form.save()
comment.user = request.user
comment.save()
return redirect('homePage')
else:
form = NewCommentForm()
return render(request, 'new_comment.html', {"form":form})
@login_required(login_url='/accounts/login/')
def single_image_like(request, image_id):
image = Image.objects.get(id=image_id)
image.likes = image.likes + 1
image.save()
return redirect('homePage')
|
[
"django.contrib.auth.decorators.login_required",
"django.contrib.auth.models.User.objects.get",
"django.shortcuts.redirect",
"django.shortcuts.render",
"django.http.HttpResponseRedirect",
"django.contrib.auth.models.User.objects.all"
] |
[((461, 505), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (475, 505), False, 'from django.contrib.auth.decorators import login_required\n'), ((1466, 1510), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1480, 1510), False, 'from django.contrib.auth.decorators import login_required\n'), ((1822, 1866), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1836, 1866), False, 'from django.contrib.auth.decorators import login_required\n'), ((3351, 3395), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (3365, 3395), False, 'from django.contrib.auth.decorators import login_required\n'), ((3613, 3657), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (3627, 3657), False, 'from django.contrib.auth.decorators import login_required\n'), ((4546, 4590), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (4560, 4590), False, 'from django.contrib.auth.decorators import login_required\n'), ((5057, 5101), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (5071, 5101), False, 'from django.contrib.auth.decorators import login_required\n'), ((1327, 1428), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', "{'locations': locations, 'pictures': pictures, 'letterForm': form}"], {}), "(request, 'index.html', {'locations': locations, 'pictures': pictures,\n 'letterForm': form})\n", (1333, 1428), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1749, 1816), 'django.shortcuts.render', 'render', (['request', '"""registration/image_list.html"""', "{'images': images}"], {}), "(request, 'registration/image_list.html', {'images': images})\n", (1755, 1816), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2236, 2298), 'django.shortcuts.render', 'render', (['request', '"""registration/new_image.html"""', "{'form': form}"], {}), "(request, 'registration/new_image.html', {'form': form})\n", (2242, 2298), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3945, 3974), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'username'}), '(pk=username)\n', (3961, 3974), False, 'from django.contrib.auth.models import User\n'), ((4119, 4243), 'django.shortcuts.render', 'render', (['request', '"""registration/profile.html"""', "{'images': images, 'profile': profile, 'user': user, 'username': username}"], {}), "(request, 'registration/profile.html', {'images': images, 'profile':\n profile, 'user': user, 'username': username})\n", (4125, 4243), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4279, 4297), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (4295, 4297), False, 'from django.contrib.auth.models import User\n'), ((4348, 4394), 'django.shortcuts.render', 'render', (['request', '"""image_details.html"""', 'context'], {}), "(request, 'image_details.html', context)\n", (4354, 4394), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4489, 4544), 'django.shortcuts.render', 'render', (['request', '"""image_details.html"""', "{'image': image}"], {}), "(request, 'image_details.html', {'image': image})\n", (4495, 4544), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((5004, 5055), 'django.shortcuts.render', 'render', (['request', '"""new_comment.html"""', "{'form': form}"], {}), "(request, 'new_comment.html', {'form': form})\n", (5010, 5055), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((5249, 5269), 'django.shortcuts.redirect', 'redirect', (['"""homePage"""'], {}), "('homePage')\n", (5257, 5269), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2163, 2183), 'django.shortcuts.redirect', 'redirect', (['"""homePage"""'], {}), "('homePage')\n", (2171, 2183), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2580, 2665), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message, 'profiles': searched_users}"], {}), "(request, 'search.html', {'message': message, 'profiles': searched_users}\n )\n", (2586, 2665), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2743, 2795), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message}"], {}), "(request, 'search.html', {'message': message})\n", (2749, 2795), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3121, 3206), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message, 'pictures': searched_images}"], {}), "(request, 'search.html', {'message': message, 'pictures':\n searched_images})\n", (3127, 3206), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3296, 3348), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message}"], {}), "(request, 'search.html', {'message': message})\n", (3302, 3348), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4930, 4950), 'django.shortcuts.redirect', 'redirect', (['"""homePage"""'], {}), "('homePage')\n", (4938, 4950), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1238, 1273), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""home_images"""'], {}), "('home_images')\n", (1258, 1273), False, 'from django.http import HttpResponse, Http404, HttpResponseRedirect\n')]
|
"""
Author: <NAME>
Licence: Apache 2.0
Version: 0.1
"""
import os
import sys
_colors = ["RED", "BLUE", "CYAN", "YELLOW", "GREEN", "MAGENTA", "WHITE", "BLACK"]
backgrounds = ["BG_RED", "BG_BLUE", "BG_CYAN", "BG_YELLOW", "BG_GREEN", "BG_MAGENTA", "BG_WHITE", "BG_BLACK"]
styles = ['BOLD', 'REVERSE', 'UNDERLINE', 'RESET', 'ITALIC', 'BLINK', 'INVISIBLE', "RED_UNDERLINE",
"GREEN_UNDERLINE", 'DOUBLE_UNDERLINE', 'STRIKE', 'RED_UNDERLINE', 'GREEN_UNDERLINE', 'INVISIBLE']
if sys.platform == "win32":
path = __file__
if not os.path.exists(path.replace(
"colors.py", "completed")):
print("Registering Colors In Windows Registry......")
cmd = os.popen(r'reg add HKEY_CURRENT_USER\Console /v VirtualTerminalLevel /t REG_DWORD /d 0x00000001 '
'/f').read()
assert cmd.rstrip() == "The operation completed successfully.", "Run As Administrator"
with open(path.replace("colors.py", "completed"), "x") as file: ...
print("Successfully Registered Colors In Windows Registry")
colors = {
'RED': "\033[1;31m", 'BLUE': "\033[1;34m", 'CYAN': "\033[1;36m", 'YELLOW': "\u001b[33m", 'GREEN': "\033[0;32m",
'MAGENTA': "\u001b[35m", 'WHITE': "\u001b[37m", 'BLACK': "\u001b[30m", 'BG_BLACK': "\u001b[40m",
'BG_RED': "\u001b[41m", 'BG_GREEN': "\u001b[42m", 'BG_YELLOW': "\u001b[43m", 'BG_BLUE': "\u001b[44m",
'BG_MAGENTA': "\u001b[45m", 'BG_CYAN': "\u001b[46m", 'BG_WHITE': "\u001b[47m", 'RESET': "\033[0;0m",
'BOLD': "\033[;1m", 'REVERSE': "\033[7m", 'UNDERLINE': "\u001b[4m", 'ITALIC': "\u001b[3m", 'BLINK': "\033[5m",
'INVISIBLE': "\033[8m", 'RED_UNDERLINE': "\u001b[4m\u001b[31m", 'GREEN_UNDERLINE': "\u001b[4m\u001b[32m",
'STRIKE': "\u001b[9m", "CURLY_UNDERLINE": '\u001b[4:3m'}
def is_string(string):
"""
:param string: string to check weather it is a string
:raise ValueError if string is not a string
"""
if not isinstance(string, str):
raise ValueError("Please Enter A String Not %s" % str(type(string))[8:].replace("'>", ""))
def color(string, fg=None, bg=None, style=None):
"""
:param string: String
:param fg: ForeGround Color
:param bg: BackGround Color
:param style: Style To use
:return: string Formatted with fg, bg and style
"""
is_string(string)
if fg is not None:
if fg.upper() not in _colors:
raise ValueError("%s Not Available" % fg)
string = colors[fg.upper()] + string
if bg is not None:
if "BG_" + bg.upper() not in backgrounds:
raise ValueError("%s Not Available" % bg)
string = colors["BG_" + bg.upper()] + string
if style is not None:
if style.upper() not in styles:
raise ValueError("%s Style Not Available" % style)
string = colors[style.upper()] + string
string += reset
return string
# Shortcut For Foreground Colors
blue = colors['BLUE']
green = colors['GREEN']
yellow = colors["YELLOW"]
white = colors["WHITE"]
cyan = colors["CYAN"]
magenta = colors["MAGENTA"]
red = colors["RED"]
black = colors["BLACK"]
# Shortcut For Background Colors
bg_black = colors["BG_BLACK"]
bg_red = colors["BG_RED"]
bg_blue = colors["BG_BLUE"]
bg_cyan = colors["BG_CYAN"]
bg_white = colors["BG_WHITE"]
bg_green = colors["BG_GREEN"]
bg_magenta = colors["BG_MAGENTA"]
bg_yellow = colors["BG_YELLOW"]
# Shortcut For Styles
bold = colors["BOLD"]
italic = colors["ITALIC"]
reverse = colors['REVERSE']
underline = colors['UNDERLINE']
red_underline = colors['RED_UNDERLINE']
green_underline = colors['GREEN_UNDERLINE']
invisible = colors['INVISIBLE']
blink = colors['BLINK']
strike = colors['STRIKE']
curly_underline = colors["CURLY_UNDERLINE"]
reset = colors["RESET"]
def print_all_colors():
for k, v in colors.items():
print(v, k, reset)
|
[
"os.popen"
] |
[((682, 792), 'os.popen', 'os.popen', (['"""reg add HKEY_CURRENT_USER\\\\Console /v VirtualTerminalLevel /t REG_DWORD /d 0x00000001 /f"""'], {}), "(\n 'reg add HKEY_CURRENT_USER\\\\Console /v VirtualTerminalLevel /t REG_DWORD /d 0x00000001 /f'\n )\n", (690, 792), False, 'import os\n')]
|
# google imports
# standard library imports
import sys
import copy
import pickle
import os
from collections import Counter
from io import BytesIO
from zipfile import ZipFile
import copy
import pickle
from math import ceil
import importlib
import urllib.request
# math imports
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
from scipy import stats
import seaborn as sns
sns.set()
# Jupyter Imports
from IPython.display import display
# from google.colab import files
# ML imports
# models
from sklearn.naive_bayes import CategoricalNB
from sklearn.tree import ExtraTreeClassifier, DecisionTreeClassifier
from sklearn.neural_network import MLPClassifier
from xgboost import XGBClassifier
from sklearn.linear_model import RidgeCV, SGDRegressor
from sklearn.svm import LinearSVR
# preprocessing
from sklearn.impute import SimpleImputer
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import StandardScaler, OneHotEncoder, KBinsDiscretizer
from sklearn.linear_model import LogisticRegression, LogisticRegressionCV
from sklearn.model_selection import train_test_split
# sampling
from imblearn.over_sampling import RandomOverSampler
from imblearn.under_sampling import RandomUnderSampler, EditedNearestNeighbours, RepeatedEditedNearestNeighbours
from imblearn.combine import SMOTEENN, SMOTETomek
# metrics
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.metrics import plot_precision_recall_curve, plot_confusion_matrix, plot_roc_curve
from sklearn.metrics import f1_score, roc_auc_score, roc_curve, accuracy_score
# other
from imblearn.pipeline import make_pipeline
from sklearn.model_selection import GridSearchCV
# custom imports
import feature_utils as feat_util
def response_boxplot(df, category, verbose=False):
print('\n'+category)
fig, axs = plt.subplots(1, 3, figsize=(20, 10))
qs = ['EFL_yes_no', 'skill_low_med_high', 'enjoy_high_med_low_none']
for i, f in enumerate(['R0_quiz_response', 'R1_quiz_response', 'R2_quiz_response', ]):
if verbose:
print(qs[i])
bp = df.boxplot(column=category, by=df[f].astype(
'category'), ax=axs[i])
bp.set_xlabel('')
for choice in range(df[f].min(), df[f].max()+1):
query = f"{f}=={choice}"
cat_df = df.query(query)[category]
num_chose = len(cat_df)
mean = cat_df.mean()
std = cat_df.std()
if verbose:
print(
f'{f} # chose {choice}: {num_chose} ({round(num_chose/len(df)*100)}%). Avg {mean}, std {std}.')
plt.suptitle(f'{category} Boxplot')
fig.show()
def group_by_func(df, func, title='', show=True):
r0_groups = {0: 'native', 1: 'nonnative'}
r1_groups = {0: 'not very good skill',
1: 'okay skill', 2: 'very good skill'}
r2_groups = {0: 'really enjoy', 1: 'enjoy', 2: 'okay', 3: 'not enjoy'}
def group_string(r0, r1, r2): return ', '.join(
[r0_groups[r0], r1_groups[r1], r2_groups[r2]])
result_dfs = [pd.DataFrame(index=r1_groups.values(), columns=r2_groups.values(
)), pd.DataFrame(index=r1_groups.values(), columns=r2_groups.values())]
if show:
print(f'{"-"*6} {title} {"-"*6}')
for r0 in [0, 1]:
subtitle = "Nonnatives" if r0 else "Natives"
if show:
print(f'\n{subtitle}:')
tdf0 = df.query(f"R0_quiz_response == {r0}")
for r1 in [0, 1, 2]:
tdf1 = tdf0.query(f"R1_quiz_response == {r1}")
for r2 in [0, 1, 2, 3]:
tdf2 = tdf1.query(f"R2_quiz_response == {r2}")
result_dfs[r0].loc[r1_groups[r1], r2_groups[r2]
] = func(df, tdf0, tdf1, tdf2)
if show:
display(result_dfs[r0])
return result_dfs
def standard_group_by_func(fulldf, per_category_stats_list=None):
per_category_stats_list = None or ['sess_count_clicks',
'sess_count_hovers',
'sess_meaningful_action_count',
'sess_EventCount',
'sess_count_notebook_uses',
'sess_avg_time_between_clicks',
'sess_first_enc_words_read',
'sess_first_enc_boxes_read',
'sess_num_enc',
'sess_first_enc_duration',
'sess_first_enc_avg_wps',
'sess_first_enc_var_wps',
'sess_first_enc_avg_tbps',
'sess_first_enc_var_tbps',
'sess_start_obj',
'sess_end_obj',
'start_level',
'max_level',
'sessDuration']
dfs_list = []
title_list = []
def df_func(df, tdf0, tdf1, tdf2): return len(tdf2)
title = 'count'
dfs = group_by_func(fulldf, df_func, title)
dfs_list.append(dfs)
title_list.append(title)
def df_func(df, tdf0, tdf1, tdf2): return round(len(tdf2)/len(df)*100, 2)
title = 'percent total pop'
dfs = group_by_func(fulldf, df_func, title)
dfs_list.append(dfs)
title_list.append(title)
def df_func(df, tdf0, tdf1, tdf2): return round(len(tdf2)/len(tdf0)*100, 2)
title = 'percent native class pop'
dfs = group_by_func(fulldf, df_func, title)
dfs_list.append(dfs)
title_list.append(title)
for category in per_category_stats_list:
df_func = get_avg_std_df_func(category)
title = f'(avg, std) {category}'
dfs = group_by_func(fulldf, df_func, title)
dfs_list.append(dfs)
title_list.append(title)
return title_list, dfs_list
def get_avg_std_df_func(category_name):
def inner(df, tdf0, tdf1, tdf2):
mean = tdf2[category_name].mean()
std = tdf2[category_name].std()
if not pd.isna(mean):
mean = round(mean, 2)
if not pd.isna(std):
std = round(std, 2)
return (mean, std)
return inner
def html_stats(df):
html_strs = ['<div class="container">', '<h3>{Stats}</h3>']
qs = ['EFL_yes_no', 'skill_low_med_high', 'enjoy_high_med_low_none']
html_strs.append(f'<p> Total pop {len(df)} </p>')
for i, f in enumerate(['R0_quiz_response', 'R1_quiz_response', 'R2_quiz_response', ]):
html_strs.append(f'<p> {qs[i]}</p>')
for choice in range(df[f].min(), df[f].max()+1):
query = f"{f}=={choice}"
cat_df = df.query(query)
num_chose = len(cat_df)
html_strs.append(
f'<p>{f} # chose {choice}: {num_chose} ({round(num_chose/len(df)*100)}%).</p>')
return '\n'.join(html_strs+['</div>'])
def full_html(base_df, title_list, dfs_list, suptitle=None):
HEADER = '''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<style>
.flex-container {
display: flex;
flex-wrap: wrap;
}
.container {
border: thick solid black;
padding: 10px;
margin: 5px;
}
.container table:nth-of-type(2) td {
background-color: rgb(161, 161, 230);
}
.container table:nth-of-type(2) th {
background-color: rgb(20, 20, 194);
color: white;
}
.container table:nth-of-type(2n-1) td {
background-color: rgb(235, 158, 158);
}
.container table:nth-of-type(2n-1) th {
background-color: rgb(160, 11, 11);
color: white;
}
.break {
flex-basis: 100%;
height: 0;
}
</style>
<div class="flex-container">'''
FOOTER = ''' </div>
</body>
</html>'''
def table_header(title): return f''' <div class="container">
<h3>{title}</h3>'''
table_footer = ''' </div>'''
def table_html(title, dfs): return '\n'.join([table_header(
title), "<p>Natives:</p>", dfs[0].to_html(), "<p>Nonnatives:</p>", dfs[1].to_html(), table_footer])
if suptitle is not None:
suptitle = f'<h2>{suptitle}</h2>\n<div class="break"></div> <!-- break -->'
else:
suptitle = ''
return '\n'.join([HEADER, suptitle, html_stats(base_df)] +
[table_html(t, dfs) for t, dfs in zip(title_list, dfs_list)] +
[FOOTER])
def download_full_html(base_df, title_list, dfs_list, filename, suptitle=None):
with open(filename, 'w+') as f:
f.write(full_html(base_df, title_list, dfs_list, suptitle=suptitle))
print("Wrote to", filename)
files.download(filename)
onext_int_feats = [f'obj{i}_onext_int' for i in range(80)]
onext_int_cats = [["nan", 1],
["nan", 11],
["nan", 12, 86, 111, 125],
["nan", 13, 14, 113, 116, 118],
["nan", 14, 15, 113, 114, 116, 118],
["nan", 13, 15, 113, 114, 116, 118],
["nan", 16, 86, 115, 118, 132, 161],
["nan", 17, 86, 115, 118, 128, 161],
["nan", 18, 86, 115, 118, 161],
["nan", 19, 86, 117, 118, 127, 133, 134, 161],
["nan", 20, 133, 134, 136],
["nan", 2, 80, 81, 82, 83],
["nan", 21, 86, 117, 127, 136, 137, 161],
["nan", 22, 137, 141],
["nan", 23, 24, 86, 117, 127, 136, 161],
["nan", 23, 24, 117, 127, 136, 161],
["nan", 25, 86, 117, 118, 127, 136, 140, 147, 151, 161],
["nan", 26, 142, 145],
["nan", 27, 143],
["nan", 28, 86, 117, 118, 136, 140, 150, 161],
["nan", 29, 119, 130],
["nan", 29, 30, 35, 86, 117, 118, 126, 136, 140, 149],
["nan", 3, 80, 82, 83, 86, 87, 88, 93],
["nan", 31, 38],
["nan", 32, 153],
["nan", 33, 154],
["nan", 34, 155],
["nan", 35, 156],
["nan", 36, 157],
["nan", 37, 158],
["nan", 30],
["nan", 39, 163],
["nan", 40, 160],
["nan", 3],
["nan", 41, 164, 166],
["nan", 42, 166],
["nan", 30],
["nan", 44, 85, 125],
["nan", 29, 45, 47, 84, 118, 125, 136, 140, 149, 168, 169, 184],
["nan", 45, 46, 169, 170],
["nan", 29, 45, 47, 92, 118, 136, 140, 149, 169, 184],
["nan", 29, 45, 48, 92, 118, 140, 149, 168, 184],
["nan", 46, 49, 168],
["nan", 46, 50, 168, 170],
["nan", 5, 80, 82, 83, 86, 89, 91, 95, 97, 125],
["nan", 29, 51, 92, 118, 136, 140, 149, 168, 184],
["nan", 52, 92, 118, 136, 149, 171, 184],
["nan", 53, 54, 92, 118, 136, 140, 149, 184],
["nan", 53, 54, 55, 59, 60, 90, 92, 94,
118, 136, 140, 149, 168, 184],
["nan", 53, 55, 59, 60, 90, 92, 94, 118, 136, 140, 149, 184],
["nan", 55, 56, 59, 60, 149, 174],
["nan", 57, 59, 60, 174],
["nan", 58, 59, 60, 136, 172, 174, 184],
["nan", 29, 59, 60, 61, 92, 118, 136, 149, 168, 172, 184],
["nan", 55, 56, 57, 58, 60, 61, 140, 172, 174, 184],
["nan", 6, 80, 82, 83, 86, 98, 100, 125],
["nan", 55, 56, 57, 58, 59, 61, 92, 118,
136, 140, 149, 172, 174, 184],
["nan", 59, 62, 136, 140, 149, 172, 173, 175, 184],
["nan", 63, 64, 176],
["nan", 64, 66, 149, 175, 184],
["nan", 29, 65, 66, 92, 118, 136, 140, 172, 175, 177, 184],
["nan", 66, 67, 68, 92, 118, 136, 140, 146, 175, 177, 184],
["nan", 67, 144],
["nan", 29, 64, 65, 68, 92, 118, 131, 136,
140, 148, 149, 172, 175, 177, 184],
["nan", 92, 118, 122, 123, 124, 131, 136, 140,
146, 148, 168, 172, 175, 177, 184],
["nan", 70],
["nan", 7],
["nan", 71, 178],
["nan", 72, 179],
["nan", 73, 180],
["nan", 74, 181],
["nan", 75, 182],
["nan", 69],
["nan", 77, 78, 185],
["nan", 78, 185],
["nan", 79],
[0],
["nan", 8],
["nan", 9, 103],
["nan", 104, 105, 108]]
QA_1_feats = [f'Q{i}_A1' for i in range(19)]
QA_1_cats = [['0', 'A', 'B', 'C', 'D'],
['0', 'A', 'B', 'C', 'D'],
['0', 'A', 'B', 'C', 'D'],
['0', 'A', 'B', 'C', 'D'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', '?', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q'],
['0', 'A', 'B', 'C', 'D', 'F', 'G', 'I', 'M', 'N', 'O', 'P', 'Q',
'R', 'S', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f'],
['0', 'Q', 'V', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f'],
['0', '?', 'X', 'Y', 'Z', 'b', 'c', 'd', 'e', 'f'],
['0', 'X', 'Y', 'b', 'c', 'd', 'e', 'f'],
['0', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f']]
def get_preprocessor(df, scaler=StandardScaler(), imputer=SimpleImputer(strategy='constant'), bool_dtype='int64'):
"""
By default has a number of steps:
1. drops columns from use in preprocessor if present:
- [f'Q{q}_answers' for q in range(19)]
- ["play_year", "play_month", "play_day", "play_hour", "play_minute", "play_second"]
- ["_continue", "continue", "save_code", "music", "hq", "fullscreen", "persistentSessionID"]
2. Creates a preprocessor for all non-y columns and non-boolean columns with the following steps:
a. Standard Scaler (0 mean, 1 std)
b. Simple Imputer(strategy='constant') (fill NaN with 0)
3. Fits the preprocessor to the given X
4. returns the unfitted preprocessor (sklearn pipeline), and the unprocessed X dataframe
:param df: jowilder dataframe
:param scaler: sklearn compatible scaler
:param imputer: sklearn compatible imputer
:return: the unfitted preprocessor (sklearn pipeline), and the unprocessed X dataframe
"""
df = df.drop(
[f'Q{q}_answers' for q in range(19)] + ["play_year", "play_month", "play_day", "play_hour", "play_minute",
"play_second",
"_continue", "continue", "save_code", "music", "hq", "fullscreen",
"persistentSessionID", ], axis=1, errors='ignore').copy()
y_cols, bool_cols, num_cols = separate_columns(df, bool_dtype=bool_dtype)
X = df.loc[:, num_cols+bool_cols]
# too complicated to allow for pipeline order
# pipeline_strings = [pipeline_order[i:i+2] for i in range(0,len(pipeline_order),2)]
# transformers = []
# num_sa, num_sc, num_im = 0,0,0
# for s in pipeline_strings:
# if s == 'Sa':
# transformer = make_pipeline(sampler)
# cols = num_cols + bool_cols
# name = f'{s}{num_sa}'
# num_sa += 1
# elif s == 'Sc':
# transformer = scaler
# name = f'{s}{num_sc}'
# cols = num_cols
# num_sc += 1
# elif s == 'Im':
# transformer = imputer
# name = f'{s}{num_im}'
# cols = num_cols
# num_im += 1
# else:
# raise ValueError("Pipeline substrings must be Sa Sc or Im")
# transformers.append((name, transformer, cols))
def col_str_to_int(col_strs): return [
X.columns.get_loc(s) for s in col_strs]
column_transformer = ColumnTransformer(
transformers=[
('num', make_pipeline(scaler, imputer), col_str_to_int(num_cols)),
('bool', 'passthrough', col_str_to_int(bool_cols))
],
remainder='drop')
return column_transformer, X
def get_ys(df):
"""
:rtype: dictionary of y columns (df series). keys: y0,y1,y2,y1_bin,y2_bin,y1_bin_x,y2_bin_x
"""
ys = {}
for key, y_col in [
('y0', 'R0_quiz_response'),
('y1', 'R1_quiz_response'),
('y2', 'R2_quiz_response'),
('y1_bin', 'R1_quiz_response_bin'),
('y1_bin_0v12', 'R1_quiz_response_0v12'),
('y1_bin_01v2', 'R1_quiz_response_01v2'),
('y1_bin_x', 'R1_quiz_response_bin_x'),
('y2_bin', 'R2_quiz_response_bin'),
('y2_bin_x', 'R2_quiz_response_bin_x'),
('y2_bin_0v123', 'R2_quiz_response_bin0v123'),
('y2_bin_01v23', 'R2_quiz_response_bin01v23'),
('y2_bin_012v3', 'R2_quiz_response_bin012v3'),
]:
if y_col in df.columns:
ys[key] = df.loc[:, y_col].astype('category').copy()
return ys
def separate_columns(df, bool_dtype='int64', expect_bool_cols = True) -> (list, list, list):
"""
:param df:
:param bool_dtype: Defaults to 'int64'. Should be int64 if coming from import csv otherwise could be 'uint8'
if coming from the pd dummies.
:return: tuple of lists of column names for y_columns, bool_columns, and integer_columns
"""
y_cols = [col for col in df.columns if 'quiz_response' in col]
bool_cols = [col for col in df.select_dtypes(include=[bool_dtype])
if np.isin(df[col].dropna().unique(), [0, 1]).all() and
col not in y_cols]
num_cols = [
col for col in df.columns if col not in bool_cols and col not in y_cols]
if not bool_cols and expect_bool_cols:
print('Warning! No bool columns. Consider changing bool_dtype="int_64" to "uint8"')
return y_cols, bool_cols, num_cols
end_obj_to_last_Q = {
9: 0,
10: 3,
11: 3,
12: 3,
13: 3,
14: 3,
15: 3,
16: 3,
17: 3,
18: 3,
19: 3,
20: 3,
21: 3,
22: 3,
23: 3,
24: 3,
25: 3,
26: 3,
27: 3,
28: 3,
29: 3,
30: 3,
31: 3,
32: 4,
33: 5,
34: 6,
35: 7,
36: 8,
37: 9,
38: 9,
39: 10,
40: 11,
41: 12,
42: 13,
43: 13,
44: 13,
45: 13,
46: 13,
47: 13,
48: 13,
49: 13,
50: 13,
51: 13,
52: 13,
53: 13,
54: 13,
55: 13,
56: 13,
57: 13,
58: 13,
59: 13,
60: 13,
61: 13,
62: 13,
63: 13,
64: 13,
65: 13,
66: 13,
67: 13,
68: 13,
69: 13,
70: 13,
71: 14,
72: 15,
73: 16,
74: 17,
75: 18,
76: 18,
77: 18,
78: 18,
79: 18,
}
end_obj_to_last_lvl = {
0: 0,
1: 0,
2: 0,
3: 1,
4: 2,
5: 2,
6: 3,
7: 3,
8: 4,
9: 4,
10: 4,
11: 4,
12: 5,
13: 6,
14: 6,
15: 6,
16: 6,
17: 6,
18: 6,
19: 7,
20: 7,
21: 8,
22: 8,
23: 9,
24: 9,
25: 9,
26: 10,
27: 10,
28: 11,
29: 11,
30: 12,
31: 12,
32: 12,
33: 12,
34: 12,
35: 12,
36: 12,
37: 12,
38: 12,
39: 12,
40: 12,
41: 12,
42: 12,
43: 13,
44: 13,
45: 14,
46: 15,
47: 15,
48: 16,
49: 16,
50: 17,
51: 17,
52: 18,
53: 18,
54: 18,
55: 18,
56: 18,
57: 18,
58: 18,
59: 18,
60: 18,
61: 18,
62: 19,
63: 19,
64: 19,
65: 20,
66: 20,
67: 21,
68: 21,
69: 22,
70: 22,
71: 22,
72: 22,
73: 22,
74: 22,
75: 22,
76: 22,
77: 23,
78: 23,
79: 23,
}
class GridSearcher():
def __init__(self, csv_fpath=None, df=None, preprocessor=None, fillna=0, meta=[], expect_bool_cols=True):
# either give csv_fpath or df.
assert csv_fpath or not df.empty
print(f'Loading from {csv_fpath}...')
# load df
if df is None:
print(f'Loading from {csv_fpath}...')
self.df, self.meta = feat_util.open_csv_from_path_with_meta(
csv_fpath, index_col=0)
else:
self.df, self.meta = df, meta
# set X and ys, and preprocessor
if not preprocessor:
self.preprocessor, self.X = get_preprocessor(self.df)
self.X = self.X.fillna(fillna)
else:
_, bool_cols, num_cols = separate_columns(self.df, expect_bool_cols=expect_bool_cols)
self.X = df[bool_cols+num_cols]
self.preprocessor = preprocessor
self.ys = get_ys(self.df)
# set object vars
self.model_dict = {}
self.cur_model = None
def split_data(self):
nonnull_X, nonnull_y = feat_util.remove_nan_labels(self.X, self.y)
X_train, X_test, y_train, y_test = train_test_split(
nonnull_X, nonnull_y, test_size=0.2, random_state=1)
self.X_train, self.X_test, self.y_train, self.y_test = X_train, X_test, y_train, y_test
def set_y(self, y_key=None, other_col=None):
if y_key:
print(f'Switching to {y_key}...')
self.y = self.ys[y_key]
elif other_col:
self.y = self.X[other_col]
self.X = self.X.drop(other_col, axis=1)
else:
print("Did not change y. Invalid inputs.")
self.split_data()
def run_fit(self, classifier, sampler=None, verbose=False, preprocess_twice=True, sampler_index=None, full_pipeline=False):
# fit self.cur_model as a pipeline of the given preprocessor, sampler, preprocessor, classifer
# if preprocess_twice is false, self.cur_model is sampler, preprocessor, classifier
# if full_pipeline and sampler index, self.cur_model is the classifier
# (must be a pipeline containing a sampler or a placeholder (None) for the sampler)
if full_pipeline:
assert sampler_index is not None
clf = classifier
elif preprocess_twice:
clf = make_pipeline(self.preprocessor, sampler,
copy.deepcopy(self.preprocessor), classifier)
sampler_index = 1
else:
clf = make_pipeline(sampler, self.preprocessor, classifier)
sampler_index = 0
self._sampling_pipeline = clf[:sampler_index+1]
self._classifying_pipeline = clf[sampler_index+1:]
if clf[sampler_index] is not None:
self.X_train_sampled, self.y_train_sampled = self._sampling_pipeline.fit_resample(
self.X_train, self.y_train)
else:
self.X_train_sampled, self.y_train_sampled = self.X_train, self.y_train
clf = self._classifying_pipeline
# model_name = f'{sampler} {classifier}'
# if verbose:
# print(f'Running {model_name}.')
self._classifying_pipeline.fit(
self.X_train_sampled, self.y_train_sampled)
self.cur_model = clf
# if verbose:
# print("model trained to: %.3f" %
# clf.score(self.X_train, self.y_train))
# print("model score: %.3f" % clf.score(self.X_test, self.y_test))
return clf
def metrics(self, graph_dir=None, graph_prefix=None, binary_classification=True):
# return list of (metric: float, metric_name: str) tuples of metrics of given classifier (default: self.cur_model)
# can only do metrics for binary classification as of right now
assert binary_classification
metric_list = []
clf = self.cur_model
# label metrics
if graph_prefix:
for flipped_labels in [False, True]:
flipped_labels_suffix = '' if not flipped_labels else '_flipped'
fig, axes = plt.subplots(3, 3, figsize=(20, 20))
for i, (yarray, Xarray, label) in enumerate([(self.y_test, self.X_test, 'test'),
(self.y_train_sampled,
self.X_train_sampled, 'train'),
(self.y_train,
self.X_train, 'train_raw'),
]):
for j, (graph_type, func) in enumerate([
('', plot_confusion_matrix),
('_PR', plot_precision_recall_curve),
('_ROC', plot_roc_curve),
]):
ax = axes[j, i]
graph_yarray = yarray.astype(bool)
if flipped_labels:
graph_yarray = ~graph_yarray
disp = func(clf, Xarray, graph_yarray, ax=ax)
title = f'{label}{graph_type}{flipped_labels_suffix}'
ax.set_title(title)
if graph_type in ['_PR', '_ROC']:
ax.set_xlim(-0.05, 1.05)
ax.set_ylim(-0.05, 1.05)
ax.set_aspect('equal', adjustable='box')
suptitle = f'{graph_prefix}{flipped_labels_suffix}'
plt.suptitle(suptitle)
savepath = os.path.join(graph_dir, f'{suptitle}.png')
fig.savefig(savepath, dpi=100)
plt.close()
for i, (yarray, Xarray, label) in enumerate([(self.y_test, self.X_test, 'test'),
(self.y_train_sampled,
self.X_train_sampled, 'train'),
(self.y_train,
self.X_train, 'train_raw'),
]):
y_pred = clf.predict(Xarray)
y_prob = clf.predict_proba(Xarray)[:, 1]
y_true = yarray
X_shape = Xarray.shape
metric_list.extend(feat_util.binary_metric_list(
y_true=y_true, y_pred=y_pred, y_prob=y_prob, X_shape=X_shape,
label_prefix=f'{label}_'
))
return metric_list
def model_stats(self, classifier=None, graph=True):
# counter, auc, and optional graph of given classifer (default: self.cur_model)
classifier = classifier or self.cur_model
y_prob = classifier.predict_proba(self.X_test)[:, 1]
print(f"dimension y_prob: {y_prob.shape}")
print(f"dimension y_test: {self.y_test.shape}")
print(f'Predicts:', Counter(list(classifier.predict(self.X_test))))
print(f'True Labels:', Counter(self.y_test))
if graph:
fpr, tpr, thres = roc_curve(self.y_test, y_prob)
plt.plot(fpr, tpr, color='green')
plt.plot([0, 1], [0, 1], color='red', linestyle='--')
plt.show()
roc_auc = roc_auc_score(self.y_test, y_prob)
print(f"ROC-AUC Score: {roc_auc}")
def classification_report(self):
# classification report on current model
y_true = self.y_test
y_pred = self.cur_model.predict(self.X_test)
print(classification_report(y_true, y_pred))
class JWWindowSelector:
ycols = ['R0_quiz_response','R1_quiz_response','R2_quiz_response','R1_quiz_response_bin',
'R1_quiz_response_0v12','R1_quiz_response_01v2','R1_quiz_response_bin_x',
'R2_quiz_response_bin','R2_quiz_response_bin_x','R2_quiz_response_bin0v123',
'R2_quiz_response_bin01v23','R2_quiz_response_bin012v3']
INTERACTION = 0
LEVEL = 1
QUIZ = 2
OBJECTIVE = 3
def __init__(self, csv_fpath=None, df=None, meta=None):
assert csv_fpath is not None or df is not None
# load df
if df is None:
print(f'Loading from {csv_fpath}...')
self.df, self.meta = feat_util.open_csv_from_path_with_meta(
csv_fpath, index_col=0)
else:
self.df = df
self.meta = meta or []
self.df_cols = list(df.columns)
@staticmethod
def get_abbrev(window_type):
if window_type == JWWindowSelector.INTERACTION:
return 'int'
if window_type == JWWindowSelector.LEVEL:
return 'lvl'
if window_type == JWWindowSelector.QUIZ:
return 'q'
if window_type == JWWindowSelector.OBJECTIVE:
return 'obj'
@staticmethod
def get_prefix(n, window_type):
if window_type == JWWindowSelector.INTERACTION:
return f'int{n}_i'
if window_type == JWWindowSelector.LEVEL:
return f'lvl{n}_'
if window_type == JWWindowSelector.QUIZ:
return f'Q{n}_'
if window_type == JWWindowSelector.OBJECTIVE:
return f'obj{n}_o'
@staticmethod
def get_window_range(window_type, skip_Q23=False):
if window_type == JWWindowSelector.INTERACTION:
return range(189)
if window_type == JWWindowSelector.LEVEL:
return range(24)
if window_type == JWWindowSelector.QUIZ:
if not skip_Q23:
return range(19)
else:
return [0,1,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18]
if window_type == JWWindowSelector.OBJECTIVE:
return range(80)
def cols_startwith(self, prefix):
return [c for c in self.df_cols if c.startswith(prefix)]
def get_feats(self, n, window_type):
prefix = self.get_prefix(n, window_type)
feats = self.cols_startwith(prefix)
return feats
def get_filter_queries(self, n, window_type, max_seconds_per_word=2):
prefix = JWWindowSelector.get_prefix(n, window_type)
queries = [f"R1_quiz_response == R1_quiz_response"]
if window_type in [JWWindowSelector.INTERACTION, JWWindowSelector.LEVEL]:
queries.extend([
f"{prefix}first_enc_duration == {prefix}first_enc_duration",
f"{prefix}first_enc_duration > 0",
])
if window_type == JWWindowSelector.QUIZ:
queries.extend([
f'{prefix}A1_nan!=1'
])
elif window_type == JWWindowSelector.INTERACTION:
num_words = self.df[f"int{n}_ifirst_enc_words_read"].max()
queries.extend([
f"{prefix}first_enc_words_read == {num_words}",
f"{prefix}time_to > 0",
f"{prefix}first_enc_duration < {prefix}first_enc_words_read*{max_seconds_per_word}",
])
elif window_type == JWWindowSelector.OBJECTIVE:
if n < 79:
queries.append(f'obj{n}_onext_int_nan==0')
queries.append(f"obj{n}_otime_to_next_obj < 600")
queries.append(f"obj{n}_otime_to_next_obj > 0 ")
elif window_type == JWWindowSelector.LEVEL:
queries.append(f"{prefix}time_in_level < 1200")
queries.append(f"{prefix}time_in_level > 0")
queries.extend([f"R{i}_quiz_response == R{i}_quiz_response" for i in [0,1,2]])
return queries
def get_base_meta(self):
return self.meta
@staticmethod
def join_XY(X,Y):
return X.join(Y)
def get_X_Y_meta(self, n, window_type, max_seconds_per_word=2,nbins=0, drop_first_next_int_col = True):
meta = []
prefix = JWWindowSelector.get_prefix(n, window_type)
Xfeats = self.get_feats(n, window_type)
meta.append(f'Using feats: {Xfeats}')
if window_type==JWWindowSelector.INTERACTION:
total_words = self.df[f"int{n}_ifirst_enc_words_read"].max()
if total_words is np.nan:
return None, None, meta
elif total_words < 10:
print('Total words < 10!')
queries = self.get_filter_queries(n, window_type, max_seconds_per_word=max_seconds_per_word)
filtered_df, filtered_df_meta = feat_util.filter_df(self.df[Xfeats+JWWindowSelector.ycols], query_list=queries, verbose=True, fillna=None)
meta.extend(filtered_df_meta)
X = filtered_df[Xfeats].fillna(0).copy()
meta.append(f'Filled X with 0')
Y = filtered_df[JWWindowSelector.ycols].copy()
drop_cols = []
if window_type in [JWWindowSelector.INTERACTION, JWWindowSelector.LEVEL]:
drop_cols = [
f"{prefix}first_enc_boxes_read",
f"{prefix}first_enc_words_read",
]
if window_type==JWWindowSelector.INTERACTION:
drop_cols.extend([
f"{prefix}time_to",
f"{prefix}total_duration"
])
if window_type==JWWindowSelector.OBJECTIVE:
drop_cols.append(f"{prefix}next_int_nan")
# if window_type==JWWindowSelector.QUIZ:
# drop_cols.append(f"{prefix}answers")
X = X.drop(columns=drop_cols)
meta.append(f"Dropped drop_cols: {drop_cols}")
constant_cols = X.columns[X.nunique()==1]
X = X.drop(columns=constant_cols)
meta.append(f'Dropped constant_cols: {constant_cols}')
if not len(X):
return None, None, meta
if window_type == JWWindowSelector.OBJECTIVE and drop_first_next_int_col:
next_int_cols = [c for c in X.columns if 'next_int' in c]
if next_int_cols:
X = X.drop(columns=next_int_cols[0])
meta.append(f'Dropped onehot column {next_int_cols[0]} from {next_int_cols}')
## does not bin by default
if nbins:
est = KBinsDiscretizer(n_bins=nbins, encode='onehot-dense', strategy='quantile')
bin_feats = [f'{prefix}first_enc_avg_tbps',
f'{prefix}first_enc_avg_wps',
# f'{prefix}first_enc_duration',
f'{prefix}first_enc_var_tbps',
f'{prefix}first_enc_var_wps']
bin_feats = [c for c in bin_feats if c in X.columns]
if bin_feats:
Xt = est.fit_transform(X[bin_feats])
new_feat_names = [f'{feat}>{x:.2f}' for bins,feat in zip(est.bin_edges_,bin_feats) for x in list(bins)[:-1]]
Xt_df = pd.DataFrame(Xt, index=X.index, columns=new_feat_names)
X = X.join(Xt_df)
X = X.drop(columns=bin_feats)
meta.append(f'Quantized n_bins={nbins} feats {bin_feats} to {new_feat_names}')
return (X, Y, meta)
def get_X_Y_meta_range(self, ns, window_type, max_seconds_per_word=2,nbins=0, drop_first_next_int_col = True, verbose=True):
X, Y, meta = None, None, []
for n in ns:
tX, tY, tmeta = self.get_X_Y_meta(n, window_type, max_seconds_per_word=max_seconds_per_word, nbins=nbins, drop_first_next_int_col=drop_first_next_int_col)
X, Y, meta = JWWindowSelector.join_X_Y_meta(X, Y, meta, tX, tY, tmeta, copy=False)
print('Join Size:', X.shape)
X, Y = X.copy(), Y.copy()
return X, Y, meta
@staticmethod
def join_X_Y_meta(X1, Y1, meta1, X2, Y2, meta2, copy=True):
meta = meta1+meta2
if X1 is None:
X = X2
Y = Y2
elif X2 is None:
X = X1
Y = Y1
else:
X = X1.join(X2, how='inner')
Y = Y1.loc[X.index, :]
meta = meta1+['--Inner Join--']+meta2+[f'Resultant Join Shape: {X.shape}']
if copy and X is not None:
X, Y = X.copy(), Y.copy()
return X, Y, meta
|
[
"sklearn.preprocessing.StandardScaler",
"matplotlib.pyplot.suptitle",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.classification_report",
"os.path.join",
"pandas.DataFrame",
"sklearn.impute.SimpleImputer",
"matplotlib.pyplot.close",
"IPython.display.display",
"feature_utils.binary_metric_list",
"collections.Counter",
"pandas.isna",
"matplotlib.pyplot.subplots",
"seaborn.set",
"feature_utils.remove_nan_labels",
"copy.deepcopy",
"matplotlib.pyplot.show",
"sklearn.metrics.roc_auc_score",
"imblearn.pipeline.make_pipeline",
"sklearn.preprocessing.KBinsDiscretizer",
"sklearn.metrics.roc_curve",
"matplotlib.pyplot.plot",
"feature_utils.open_csv_from_path_with_meta",
"feature_utils.filter_df"
] |
[((400, 409), 'seaborn.set', 'sns.set', ([], {}), '()\n', (407, 409), True, 'import seaborn as sns\n'), ((1850, 1886), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {'figsize': '(20, 10)'}), '(1, 3, figsize=(20, 10))\n', (1862, 1886), True, 'from matplotlib import pyplot as plt\n'), ((2624, 2659), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['f"""{category} Boxplot"""'], {}), "(f'{category} Boxplot')\n", (2636, 2659), True, 'from matplotlib import pyplot as plt\n'), ((14935, 14951), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (14949, 14951), False, 'from sklearn.preprocessing import StandardScaler, OneHotEncoder, KBinsDiscretizer\n'), ((14961, 14995), 'sklearn.impute.SimpleImputer', 'SimpleImputer', ([], {'strategy': '"""constant"""'}), "(strategy='constant')\n", (14974, 14995), False, 'from sklearn.impute import SimpleImputer\n'), ((22308, 22351), 'feature_utils.remove_nan_labels', 'feat_util.remove_nan_labels', (['self.X', 'self.y'], {}), '(self.X, self.y)\n', (22335, 22351), True, 'import feature_utils as feat_util\n'), ((22395, 22464), 'sklearn.model_selection.train_test_split', 'train_test_split', (['nonnull_X', 'nonnull_y'], {'test_size': '(0.2)', 'random_state': '(1)'}), '(nonnull_X, nonnull_y, test_size=0.2, random_state=1)\n', (22411, 22464), False, 'from sklearn.model_selection import train_test_split\n'), ((28571, 28605), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['self.y_test', 'y_prob'], {}), '(self.y_test, y_prob)\n', (28584, 28605), False, 'from sklearn.metrics import f1_score, roc_auc_score, roc_curve, accuracy_score\n'), ((33639, 33752), 'feature_utils.filter_df', 'feat_util.filter_df', (['self.df[Xfeats + JWWindowSelector.ycols]'], {'query_list': 'queries', 'verbose': '(True)', 'fillna': 'None'}), '(self.df[Xfeats + JWWindowSelector.ycols], query_list=\n queries, verbose=True, fillna=None)\n', (33658, 33752), True, 'import feature_utils as feat_util\n'), ((3797, 3820), 'IPython.display.display', 'display', (['result_dfs[r0]'], {}), '(result_dfs[r0])\n', (3804, 3820), False, 'from IPython.display import display\n'), ((6199, 6212), 'pandas.isna', 'pd.isna', (['mean'], {}), '(mean)\n', (6206, 6212), True, 'import pandas as pd\n'), ((6263, 6275), 'pandas.isna', 'pd.isna', (['std'], {}), '(std)\n', (6270, 6275), True, 'import pandas as pd\n'), ((21613, 21675), 'feature_utils.open_csv_from_path_with_meta', 'feat_util.open_csv_from_path_with_meta', (['csv_fpath'], {'index_col': '(0)'}), '(csv_fpath, index_col=0)\n', (21651, 21675), True, 'import feature_utils as feat_util\n'), ((28317, 28337), 'collections.Counter', 'Counter', (['self.y_test'], {}), '(self.y_test)\n', (28324, 28337), False, 'from collections import Counter\n'), ((28387, 28417), 'sklearn.metrics.roc_curve', 'roc_curve', (['self.y_test', 'y_prob'], {}), '(self.y_test, y_prob)\n', (28396, 28417), False, 'from sklearn.metrics import f1_score, roc_auc_score, roc_curve, accuracy_score\n'), ((28430, 28463), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr', 'tpr'], {'color': '"""green"""'}), "(fpr, tpr, color='green')\n", (28438, 28463), True, 'from matplotlib import pyplot as plt\n'), ((28476, 28529), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 1]', '[0, 1]'], {'color': '"""red"""', 'linestyle': '"""--"""'}), "([0, 1], [0, 1], color='red', linestyle='--')\n", (28484, 28529), True, 'from matplotlib import pyplot as plt\n'), ((28542, 28552), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (28550, 28552), True, 'from matplotlib import pyplot as plt\n'), ((28832, 28869), 'sklearn.metrics.classification_report', 'classification_report', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (28853, 28869), False, 'from sklearn.metrics import confusion_matrix, classification_report\n'), ((29541, 29603), 'feature_utils.open_csv_from_path_with_meta', 'feat_util.open_csv_from_path_with_meta', (['csv_fpath'], {'index_col': '(0)'}), '(csv_fpath, index_col=0)\n', (29579, 29603), True, 'import feature_utils as feat_util\n'), ((35258, 35332), 'sklearn.preprocessing.KBinsDiscretizer', 'KBinsDiscretizer', ([], {'n_bins': 'nbins', 'encode': '"""onehot-dense"""', 'strategy': '"""quantile"""'}), "(n_bins=nbins, encode='onehot-dense', strategy='quantile')\n", (35274, 35332), False, 'from sklearn.preprocessing import StandardScaler, OneHotEncoder, KBinsDiscretizer\n'), ((23761, 23814), 'imblearn.pipeline.make_pipeline', 'make_pipeline', (['sampler', 'self.preprocessor', 'classifier'], {}), '(sampler, self.preprocessor, classifier)\n', (23774, 23814), False, 'from imblearn.pipeline import make_pipeline\n'), ((25336, 25372), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(3)'], {'figsize': '(20, 20)'}), '(3, 3, figsize=(20, 20))\n', (25348, 25372), True, 'from matplotlib import pyplot as plt\n'), ((26837, 26859), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['suptitle'], {}), '(suptitle)\n', (26849, 26859), True, 'from matplotlib import pyplot as plt\n'), ((26887, 26929), 'os.path.join', 'os.path.join', (['graph_dir', 'f"""{suptitle}.png"""'], {}), "(graph_dir, f'{suptitle}.png')\n", (26899, 26929), False, 'import os\n'), ((26993, 27004), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (27002, 27004), True, 'from matplotlib import pyplot as plt\n'), ((27653, 27773), 'feature_utils.binary_metric_list', 'feat_util.binary_metric_list', ([], {'y_true': 'y_true', 'y_pred': 'y_pred', 'y_prob': 'y_prob', 'X_shape': 'X_shape', 'label_prefix': 'f"""{label}_"""'}), "(y_true=y_true, y_pred=y_pred, y_prob=y_prob,\n X_shape=X_shape, label_prefix=f'{label}_')\n", (27681, 27773), True, 'import feature_utils as feat_util\n'), ((35925, 35980), 'pandas.DataFrame', 'pd.DataFrame', (['Xt'], {'index': 'X.index', 'columns': 'new_feat_names'}), '(Xt, index=X.index, columns=new_feat_names)\n', (35937, 35980), True, 'import pandas as pd\n'), ((17509, 17539), 'imblearn.pipeline.make_pipeline', 'make_pipeline', (['scaler', 'imputer'], {}), '(scaler, imputer)\n', (17522, 17539), False, 'from imblearn.pipeline import make_pipeline\n'), ((23653, 23685), 'copy.deepcopy', 'copy.deepcopy', (['self.preprocessor'], {}), '(self.preprocessor)\n', (23666, 23685), False, 'import copy\n')]
|
from subprocess import Popen, PIPE
import sys
import os
from queue import Queue, Empty
import subprocess
import threading
import time
class LocalShell(object):
def __init__(self):
pass
def run(self, cmd):
env = os.environ.copy()
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=subprocess.STDOUT, shell=True, env=env)
def writeall(p):
while True:
# print("read data: ")
data = p.stdout.read(1).decode("utf-8")
if not data:
break
sys.stdout.write(data)
sys.stdout.flush()
writer = threading.Thread(target=writeall, args=(p,))
writer.start()
def reader(readq):
try:
while True:
d = sys.stdin.read(1)
if not d:
break
readq.put(d)
except EOFError:
pass
readq = Queue()
r = threading.Thread(target=reader, args=(readq,))
r.daemon=True
r.start()
while True:
if not writer.isAlive():
break
try:
d = readq.get(block=False)
self._write(p, bytes(d, 'utf-8'))
time.sleep(0.01)
except Empty:
pass
def _write(self, process, message):
process.stdin.write(message)
process.stdin.flush()
cmd = ['cookiecutter', 'source-files']
cmd = ' '.join(cmd)
def main():
shell = LocalShell()
try:
shell.run(cmd)
except KeyboardInterrupt:
return
if __name__ == '__main__':
main()
|
[
"sys.stdout.write",
"threading.Thread",
"subprocess.Popen",
"sys.stdin.read",
"os.environ.copy",
"time.sleep",
"sys.stdout.flush",
"queue.Queue"
] |
[((238, 255), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (253, 255), False, 'import os\n'), ((268, 354), 'subprocess.Popen', 'Popen', (['cmd'], {'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'subprocess.STDOUT', 'shell': '(True)', 'env': 'env'}), '(cmd, stdin=PIPE, stdout=PIPE, stderr=subprocess.STDOUT, shell=True,\n env=env)\n', (273, 354), False, 'from subprocess import Popen, PIPE\n'), ((643, 687), 'threading.Thread', 'threading.Thread', ([], {'target': 'writeall', 'args': '(p,)'}), '(target=writeall, args=(p,))\n', (659, 687), False, 'import threading\n'), ((986, 993), 'queue.Queue', 'Queue', ([], {}), '()\n', (991, 993), False, 'from queue import Queue, Empty\n'), ((1006, 1052), 'threading.Thread', 'threading.Thread', ([], {'target': 'reader', 'args': '(readq,)'}), '(target=reader, args=(readq,))\n', (1022, 1052), False, 'import threading\n'), ((567, 589), 'sys.stdout.write', 'sys.stdout.write', (['data'], {}), '(data)\n', (583, 589), False, 'import sys\n'), ((606, 624), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (622, 624), False, 'import sys\n'), ((1299, 1315), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1309, 1315), False, 'import time\n'), ((808, 825), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (822, 825), False, 'import sys\n')]
|
from typing import Optional, Callable
import torch
import numpy as np
from PIL.Image import Image
from ..transforms import TargetHandler
class NormalizeBothInputAndTarget:
transform: Callable[[Image], Image]
target_handler: TargetHandler
def __init__(
self,
transform: Callable[[Image], Image],
target_handler: Optional[TargetHandler] = None,
):
self.target_handler = target_handler
self.transform = transform
def forward(self, x: Image) -> torch.Tensor:
image_data = np.array(x)
std = image_data.std()
mean = image_data.mean()
erased_img = self.transform(x)
erased_img_data = torch.tensor(np.array(erased_img), dtype=torch.float32)
normed_img_data = (erased_img_data - mean) / std
target = self.target_handler.get()
if target is None:
raise RuntimeError("target has not generated.")
if not isinstance(target, Image):
raise TypeError("the generated target must be an PIL.Image")
target_data = torch.tensor(np.array(target), dtype=torch.float32)
self.target_handler.set((target_data - mean) / std)
return normed_img_data
|
[
"numpy.array"
] |
[((554, 565), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (562, 565), True, 'import numpy as np\n'), ((708, 728), 'numpy.array', 'np.array', (['erased_img'], {}), '(erased_img)\n', (716, 728), True, 'import numpy as np\n'), ((1088, 1104), 'numpy.array', 'np.array', (['target'], {}), '(target)\n', (1096, 1104), True, 'import numpy as np\n')]
|
import aiohttp
async def get_tinyurl(link: str):
url = f"http://tinyurl.com/api-create.php?url={link}"
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
response = (await response.content.read()).decode('utf-8')
return response
|
[
"aiohttp.ClientSession"
] |
[((124, 147), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (145, 147), False, 'import aiohttp\n')]
|
'''
Problem Name: Good Number Or Not
Problem Code: ISGOODNM
Problem Link: https://www.codechef.com/problems/ISGOODNM
Solution Link: https://www.codechef.com/viewsolution/47005382
'''
def divisors(m):
from math import sqrt
l = set()
for i in range(1, int(sqrt(m)+1)):
if m%i == 0:
l.add(i)
l.add(m//i)
l = list(l)
l.sort()
return l[:-1]
def solve(n):
l = sum(divisors(n))
if l == n:
return 'YES'
return 'NO'
if __name__ == '__main__':
n = int(input())
print(solve(n))
|
[
"math.sqrt"
] |
[((284, 291), 'math.sqrt', 'sqrt', (['m'], {}), '(m)\n', (288, 291), False, 'from math import sqrt\n')]
|
import sqlite3
import os
class DbUtil:
def __init__(self, db_file):
self.db_path = db_file
self.conn = self.create_connection()
self.c = self.create_cursor()
def create_connection(self):
try:
os.remove(self.db_path)
print("removing existing db file")
except OSError as e:
print(e)
print("Db file doesn't exist, creating it...")
conn = sqlite3.connect(self.db_path)
return conn
def create_cursor(self):
return self.conn.cursor()
def create_table(self, creation_string):
self.c.execute(creation_string)
self.conn.commit()
def execute_on_db(self, execution_string):
exec_st = ""
try:
# todo: check if encode/decode is really necessary?
exec_st = execution_string.encode("utf-8")
self.c.execute(exec_st.decode("utf-8"))
except sqlite3.OperationalError as e:
# todo: send mail notification
print(exec_st)
raise e
def close_db(self):
self.conn.close()
|
[
"os.remove",
"sqlite3.connect"
] |
[((443, 472), 'sqlite3.connect', 'sqlite3.connect', (['self.db_path'], {}), '(self.db_path)\n', (458, 472), False, 'import sqlite3\n'), ((248, 271), 'os.remove', 'os.remove', (['self.db_path'], {}), '(self.db_path)\n', (257, 271), False, 'import os\n')]
|
from typing import Iterator, List, Union, Tuple, Any
from datetime import datetime
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from tensorflow import keras
import tensorflow_addons as tfa
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras import layers, models, Model
from tensorflow.python.keras.callbacks import TensorBoard, EarlyStopping, ModelCheckpoint
from tensorflow.keras.losses import MeanAbsoluteError, MeanAbsolutePercentageError
from tensorflow.keras.models import Sequential
from tensorflow.keras.applications import EfficientNetB0
from tensorflow.keras.utils import plot_model
from tensorflow.keras.callbacks import History
# the next 3 lines of code are for my machine and setup due to https://github.com/tensorflow/tensorflow/issues/43174
import tensorflow as tf
physical_devices = tf.config.list_physical_devices("GPU")
tf.config.experimental.set_memory_growth(physical_devices[0], True)
def visualize_augmentations(data_generator: ImageDataGenerator, df: pd.DataFrame):
"""Visualizes the keras augmentations with matplotlib in 3x3 grid. This function is part of create_generators() and
can be accessed from there.
Parameters
----------
data_generator : Iterator
The keras data generator of your training data.
df : pd.DataFrame
The Pandas DataFrame containing your training data.
"""
# super hacky way of creating a small dataframe with one image
series = df.iloc[2]
df_augmentation_visualization = pd.concat([series, series], axis=1).transpose()
iterator_visualizations = data_generator.flow_from_dataframe( # type: ignore
dataframe=df_augmentation_visualization,
x_col="image_location",
y_col="price",
class_mode="raw",
target_size=(224, 224), # size of the image
batch_size=1, # use only one image for visualization
)
for i in range(9):
ax = plt.subplot(3, 3, i + 1) # create a 3x3 grid
batch = next(iterator_visualizations) # get the next image of the generator (always the same image)
img = batch[0] # type: ignore
img = img[0, :, :, :] # remove one dimension for plotting without issues
plt.imshow(img)
plt.show()
plt.close()
def get_mean_baseline(train: pd.DataFrame, val: pd.DataFrame) -> float:
"""Calculates the mean MAE and MAPE baselines by taking the mean values of the training data as prediction for the
validation target feature.
Parameters
----------
train : pd.DataFrame
Pandas DataFrame containing your training data.
val : pd.DataFrame
Pandas DataFrame containing your validation data.
Returns
-------
float
MAPE value.
"""
y_hat = train["price"].mean()
val["y_hat"] = y_hat
mae = MeanAbsoluteError()
mae = mae(val["price"], val["y_hat"]).numpy() # type: ignore
mape = MeanAbsolutePercentageError()
mape = mape(val["price"], val["y_hat"]).numpy() # type: ignore
print(mae)
print("mean baseline MAPE: ", mape)
return mape
def split_data(df: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
"""Accepts a Pandas DataFrame and splits it into training, testing and validation data. Returns DataFrames.
Parameters
----------
df : pd.DataFrame
Your Pandas DataFrame containing all your data.
Returns
-------
Union[pd.DataFrame, pd.DataFrame, pd.DataFrame]
[description]
"""
train, val = train_test_split(df, test_size=0.2, random_state=1) # split the data with a validation size o 20%
train, test = train_test_split(
train, test_size=0.125, random_state=1
) # split the data with an overall test size of 10%
print("shape train: ", train.shape) # type: ignore
print("shape val: ", val.shape) # type: ignore
print("shape test: ", test.shape) # type: ignore
print("Descriptive statistics of train:")
print(train.describe()) # type: ignore
return train, val, test # type: ignore
def create_generators(
df: pd.DataFrame, train: pd.DataFrame, val: pd.DataFrame, test: pd.DataFrame, plot_augmentations: Any
) -> Tuple[Iterator, Iterator, Iterator]:
"""Accepts four Pandas DataFrames: all your data, the training, validation and test DataFrames. Creates and returns
keras ImageDataGenerators. Within this function you can also visualize the augmentations of the ImageDataGenerators.
Parameters
----------
df : pd.DataFrame
Your Pandas DataFrame containing all your data.
train : pd.DataFrame
Your Pandas DataFrame containing your training data.
val : pd.DataFrame
Your Pandas DataFrame containing your validation data.
test : pd.DataFrame
Your Pandas DataFrame containing your testing data.
Returns
-------
Tuple[Iterator, Iterator, Iterator]
keras ImageDataGenerators used for training, validating and testing of your models.
"""
train_generator = ImageDataGenerator(
rescale=1.0 / 255,
rotation_range=5,
width_shift_range=0.1,
height_shift_range=0.1,
brightness_range=(0.75, 1),
shear_range=0.1,
zoom_range=[0.75, 1],
horizontal_flip=True,
validation_split=0.2,
) # create an ImageDataGenerator with multiple image augmentations
validation_generator = ImageDataGenerator(
rescale=1.0 / 255
) # except for rescaling, no augmentations are needed for validation and testing generators
test_generator = ImageDataGenerator(rescale=1.0 / 255)
# visualize image augmentations
if plot_augmentations == True:
visualize_augmentations(train_generator, df)
train_generator = train_generator.flow_from_dataframe(
dataframe=train,
x_col="image_location", # this is where your image data is stored
y_col="price", # this is your target feature
class_mode="raw", # use "raw" for regressions
target_size=(224, 224),
batch_size=32, # increase or decrease to fit your GPU
)
validation_generator = validation_generator.flow_from_dataframe(
dataframe=val, x_col="image_location", y_col="price", class_mode="raw", target_size=(224, 224), batch_size=128,
)
test_generator = test_generator.flow_from_dataframe(
dataframe=test, x_col="image_location", y_col="price", class_mode="raw", target_size=(224, 224), batch_size=128,
)
return train_generator, validation_generator, test_generator
def get_callbacks(model_name: str) -> List[Union[TensorBoard, EarlyStopping, ModelCheckpoint]]:
"""Accepts the model name as a string and returns multiple callbacks for training the keras model.
Parameters
----------
model_name : str
The name of the model as a string.
Returns
-------
List[Union[TensorBoard, EarlyStopping, ModelCheckpoint]]
A list of multiple keras callbacks.
"""
logdir = (
"logs/scalars/" + model_name + "_" + datetime.now().strftime("%Y%m%d-%H%M%S")
) # create a folder for each model.
tensorboard_callback = TensorBoard(log_dir=logdir)
# use tensorboard --logdir logs/scalars in your command line to startup tensorboard with the correct logs
early_stopping_callback = EarlyStopping(
monitor="val_mean_absolute_percentage_error",
min_delta=1, # model should improve by at least 1%
patience=10, # amount of epochs with improvements worse than 1% until the model stops
verbose=2,
mode="min",
restore_best_weights=True, # restore the best model with the lowest validation error
)
model_checkpoint_callback = ModelCheckpoint(
"./data/models/" + model_name + ".h5",
monitor="val_mean_absolute_percentage_error",
verbose=0,
save_best_only=True, # save the best model
mode="min",
save_freq="epoch", # save every epoch
) # saving eff_net takes quite a bit of time
return [tensorboard_callback, early_stopping_callback, model_checkpoint_callback]
def small_cnn() -> Sequential:
"""A very small custom convolutional neural network with image input dimensions of 224x224x3.
Returns
-------
Sequential
The keras Sequential model.
"""
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation="relu", input_shape=(224, 224, 3)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation="relu"))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation="relu"))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation="relu"))
model.add(layers.Dense(1))
return model
def run_model(
model_name: str,
model_function: Model,
lr: float,
train_generator: Iterator,
validation_generator: Iterator,
test_generator: Iterator,
) -> History:
"""This function runs a keras model with the Ranger optimizer and multiple callbacks. The model is evaluated within
training through the validation generator and afterwards one final time on the test generator.
Parameters
----------
model_name : str
The name of the model as a string.
model_function : Model
Keras model function like small_cnn() or adapt_efficient_net().
lr : float
Learning rate.
train_generator : Iterator
keras ImageDataGenerators for the training data.
validation_generator : Iterator
keras ImageDataGenerators for the validation data.
test_generator : Iterator
keras ImageDataGenerators for the test data.
Returns
-------
History
The history of the keras model as a History object. To access it as a Dict, use history.history. For an example
see plot_results().
"""
callbacks = get_callbacks(model_name)
model = model_function
model.summary()
plot_model(model, to_file=model_name + ".png", show_shapes=True)
radam = tfa.optimizers.RectifiedAdam(learning_rate=lr)
ranger = tfa.optimizers.Lookahead(radam, sync_period=6, slow_step_size=0.5)
optimizer = ranger
model.compile(
optimizer=optimizer, loss="mean_absolute_error", metrics=[MeanAbsoluteError(), MeanAbsolutePercentageError()]
)
history = model.fit(
train_generator,
epochs=100,
validation_data=validation_generator,
callbacks=callbacks,
workers=6, # adjust this according to the number of CPU cores of your machine
)
model.evaluate(
test_generator, callbacks=callbacks,
)
return history # type: ignore
def adapt_efficient_net() -> Model:
"""This code uses adapts the most up-to-date version of EfficientNet with NoisyStudent weights to a regression
problem. Most of this code is adapted from the official keras documentation.
Returns
-------
Model
The keras model.
"""
inputs = layers.Input(
shape=(224, 224, 3)
) # input shapes of the images should always be 224x224x3 with EfficientNetB0
# use the downloaded and converted newest EfficientNet wheights
model = EfficientNetB0(include_top=False, input_tensor=inputs, weights="efficientnetb0_notop.h5")
# Freeze the pretrained weights
model.trainable = False
# Rebuild top
x = layers.GlobalAveragePooling2D(name="avg_pool")(model.output)
x = layers.BatchNormalization()(x)
top_dropout_rate = 0.4
x = layers.Dropout(top_dropout_rate, name="top_dropout")(x)
outputs = layers.Dense(1, name="pred")(x)
# Compile
model = keras.Model(inputs, outputs, name="EfficientNet")
return model
def plot_results(model_history_small_cnn: History, model_history_eff_net: History, mean_baseline: float):
"""This function uses seaborn with matplotlib to plot the trainig and validation losses of both input models in an
sns.relplot(). The mean baseline is plotted as a horizontal red dotted line.
Parameters
----------
model_history_small_cnn : History
keras History object of the model.fit() method.
model_history_eff_net : History
keras History object of the model.fit() method.
mean_baseline : float
Result of the get_mean_baseline() function.
"""
# create a dictionary for each model history and loss type
dict1 = {
"MAPE": model_history_small_cnn.history["mean_absolute_percentage_error"],
"type": "training",
"model": "small_cnn",
}
dict2 = {
"MAPE": model_history_small_cnn.history["val_mean_absolute_percentage_error"],
"type": "validation",
"model": "small_cnn",
}
dict3 = {
"MAPE": model_history_eff_net.history["mean_absolute_percentage_error"],
"type": "training",
"model": "eff_net",
}
dict4 = {
"MAPE": model_history_eff_net.history["val_mean_absolute_percentage_error"],
"type": "validation",
"model": "eff_net",
}
# convert the dicts to pd.Series and concat them to a pd.DataFrame in the long format
s1 = pd.DataFrame(dict1)
s2 = pd.DataFrame(dict2)
s3 = pd.DataFrame(dict3)
s4 = pd.DataFrame(dict4)
df = pd.concat([s1, s2, s3, s4], axis=0).reset_index()
grid = sns.relplot(data=df, x=df["index"], y="MAPE", hue="model", col="type", kind="line", legend=False)
grid.set(ylim=(20, 100)) # set the y-axis limit
for ax in grid.axes.flat:
ax.axhline(
y=mean_baseline, color="lightcoral", linestyle="dashed"
) # add a mean baseline horizontal bar to each plot
ax.set(xlabel="Epoch")
labels = ["small_cnn", "eff_net", "mean_baseline"] # custom labels for the plot
plt.legend(labels=labels)
plt.savefig("training_validation.png")
plt.show()
def run(small_sample=False):
"""Run all the code of this file.
Parameters
----------
small_sample : bool, optional
If you just want to check if the code is working, set small_sample to True, by default False
"""
df = pd.read_pickle("./data/df.pkl")
df["image_location"] = (
"./data/processed_images/" + df["zpid"] + ".png"
) # add the correct path for the image locations.
if small_sample == True:
df = df.iloc[0:1000] # set small_sampe to True if you want to check if your code works without long waiting
train, val, test = split_data(df) # split your data
mean_baseline = get_mean_baseline(train, val)
train_generator, validation_generator, test_generator = create_generators(
df=df, train=train, val=val, test=test, plot_augmentations=True
)
small_cnn_history = run_model(
model_name="small_cnn",
model_function=small_cnn(),
lr=0.001,
train_generator=train_generator,
validation_generator=validation_generator,
test_generator=test_generator,
)
eff_net_history = run_model(
model_name="eff_net",
model_function=adapt_efficient_net(),
lr=0.5,
train_generator=train_generator,
validation_generator=validation_generator,
test_generator=test_generator,
)
plot_results(small_cnn_history, eff_net_history, mean_baseline)
if __name__ == "__main__":
run(small_sample=False)
|
[
"tensorflow.keras.preprocessing.image.ImageDataGenerator",
"tensorflow.keras.layers.MaxPooling2D",
"tensorflow.keras.layers.Dense",
"sklearn.model_selection.train_test_split",
"tensorflow.python.keras.callbacks.TensorBoard",
"tensorflow.keras.losses.MeanAbsolutePercentageError",
"tensorflow.keras.models.Sequential",
"seaborn.relplot",
"tensorflow_addons.optimizers.Lookahead",
"tensorflow_addons.optimizers.RectifiedAdam",
"pandas.DataFrame",
"tensorflow.keras.layers.Flatten",
"tensorflow.keras.layers.BatchNormalization",
"matplotlib.pyplot.close",
"matplotlib.pyplot.imshow",
"tensorflow.keras.losses.MeanAbsoluteError",
"tensorflow.keras.utils.plot_model",
"tensorflow.keras.layers.Input",
"tensorflow.keras.applications.EfficientNetB0",
"tensorflow.keras.layers.GlobalAveragePooling2D",
"datetime.datetime.now",
"pandas.concat",
"matplotlib.pyplot.show",
"tensorflow.keras.layers.Dropout",
"matplotlib.pyplot.legend",
"tensorflow.config.experimental.set_memory_growth",
"tensorflow.keras.Model",
"tensorflow.python.keras.callbacks.ModelCheckpoint",
"matplotlib.pyplot.subplot",
"tensorflow.keras.layers.Conv2D",
"tensorflow.config.list_physical_devices",
"tensorflow.python.keras.callbacks.EarlyStopping",
"pandas.read_pickle",
"matplotlib.pyplot.savefig"
] |
[((923, 961), 'tensorflow.config.list_physical_devices', 'tf.config.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (954, 961), True, 'import tensorflow as tf\n'), ((962, 1029), 'tensorflow.config.experimental.set_memory_growth', 'tf.config.experimental.set_memory_growth', (['physical_devices[0]', '(True)'], {}), '(physical_devices[0], True)\n', (1002, 1029), True, 'import tensorflow as tf\n'), ((2325, 2335), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2333, 2335), True, 'import matplotlib.pyplot as plt\n'), ((2340, 2351), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (2349, 2351), True, 'import matplotlib.pyplot as plt\n'), ((2901, 2920), 'tensorflow.keras.losses.MeanAbsoluteError', 'MeanAbsoluteError', ([], {}), '()\n', (2918, 2920), False, 'from tensorflow.keras.losses import MeanAbsoluteError, MeanAbsolutePercentageError\n'), ((2998, 3027), 'tensorflow.keras.losses.MeanAbsolutePercentageError', 'MeanAbsolutePercentageError', ([], {}), '()\n', (3025, 3027), False, 'from tensorflow.keras.losses import MeanAbsoluteError, MeanAbsolutePercentageError\n'), ((3601, 3652), 'sklearn.model_selection.train_test_split', 'train_test_split', (['df'], {'test_size': '(0.2)', 'random_state': '(1)'}), '(df, test_size=0.2, random_state=1)\n', (3617, 3652), False, 'from sklearn.model_selection import train_test_split\n'), ((3718, 3774), 'sklearn.model_selection.train_test_split', 'train_test_split', (['train'], {'test_size': '(0.125)', 'random_state': '(1)'}), '(train, test_size=0.125, random_state=1)\n', (3734, 3774), False, 'from sklearn.model_selection import train_test_split\n'), ((5105, 5328), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'rotation_range': '(5)', 'width_shift_range': '(0.1)', 'height_shift_range': '(0.1)', 'brightness_range': '(0.75, 1)', 'shear_range': '(0.1)', 'zoom_range': '[0.75, 1]', 'horizontal_flip': '(True)', 'validation_split': '(0.2)'}), '(rescale=1.0 / 255, rotation_range=5, width_shift_range=\n 0.1, height_shift_range=0.1, brightness_range=(0.75, 1), shear_range=\n 0.1, zoom_range=[0.75, 1], horizontal_flip=True, validation_split=0.2)\n', (5123, 5328), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((5491, 5528), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)'}), '(rescale=1.0 / 255)\n', (5509, 5528), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((5655, 5692), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)'}), '(rescale=1.0 / 255)\n', (5673, 5692), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((7235, 7262), 'tensorflow.python.keras.callbacks.TensorBoard', 'TensorBoard', ([], {'log_dir': 'logdir'}), '(log_dir=logdir)\n', (7246, 7262), False, 'from tensorflow.python.keras.callbacks import TensorBoard, EarlyStopping, ModelCheckpoint\n'), ((7404, 7543), 'tensorflow.python.keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_mean_absolute_percentage_error"""', 'min_delta': '(1)', 'patience': '(10)', 'verbose': '(2)', 'mode': '"""min"""', 'restore_best_weights': '(True)'}), "(monitor='val_mean_absolute_percentage_error', min_delta=1,\n patience=10, verbose=2, mode='min', restore_best_weights=True)\n", (7417, 7543), False, 'from tensorflow.python.keras.callbacks import TensorBoard, EarlyStopping, ModelCheckpoint\n'), ((7801, 7973), 'tensorflow.python.keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (["('./data/models/' + model_name + '.h5')"], {'monitor': '"""val_mean_absolute_percentage_error"""', 'verbose': '(0)', 'save_best_only': '(True)', 'mode': '"""min"""', 'save_freq': '"""epoch"""'}), "('./data/models/' + model_name + '.h5', monitor=\n 'val_mean_absolute_percentage_error', verbose=0, save_best_only=True,\n mode='min', save_freq='epoch')\n", (7816, 7973), False, 'from tensorflow.python.keras.callbacks import TensorBoard, EarlyStopping, ModelCheckpoint\n'), ((8420, 8439), 'tensorflow.keras.models.Sequential', 'models.Sequential', ([], {}), '()\n', (8437, 8439), False, 'from tensorflow.keras import layers, models, Model\n'), ((10062, 10126), 'tensorflow.keras.utils.plot_model', 'plot_model', (['model'], {'to_file': "(model_name + '.png')", 'show_shapes': '(True)'}), "(model, to_file=model_name + '.png', show_shapes=True)\n", (10072, 10126), False, 'from tensorflow.keras.utils import plot_model\n'), ((10140, 10186), 'tensorflow_addons.optimizers.RectifiedAdam', 'tfa.optimizers.RectifiedAdam', ([], {'learning_rate': 'lr'}), '(learning_rate=lr)\n', (10168, 10186), True, 'import tensorflow_addons as tfa\n'), ((10200, 10266), 'tensorflow_addons.optimizers.Lookahead', 'tfa.optimizers.Lookahead', (['radam'], {'sync_period': '(6)', 'slow_step_size': '(0.5)'}), '(radam, sync_period=6, slow_step_size=0.5)\n', (10224, 10266), True, 'import tensorflow_addons as tfa\n'), ((11094, 11127), 'tensorflow.keras.layers.Input', 'layers.Input', ([], {'shape': '(224, 224, 3)'}), '(shape=(224, 224, 3))\n', (11106, 11127), False, 'from tensorflow.keras import layers, models, Model\n'), ((11299, 11393), 'tensorflow.keras.applications.EfficientNetB0', 'EfficientNetB0', ([], {'include_top': '(False)', 'input_tensor': 'inputs', 'weights': '"""efficientnetb0_notop.h5"""'}), "(include_top=False, input_tensor=inputs, weights=\n 'efficientnetb0_notop.h5')\n", (11313, 11393), False, 'from tensorflow.keras.applications import EfficientNetB0\n'), ((11744, 11793), 'tensorflow.keras.Model', 'keras.Model', (['inputs', 'outputs'], {'name': '"""EfficientNet"""'}), "(inputs, outputs, name='EfficientNet')\n", (11755, 11793), False, 'from tensorflow import keras\n'), ((13235, 13254), 'pandas.DataFrame', 'pd.DataFrame', (['dict1'], {}), '(dict1)\n', (13247, 13254), True, 'import pandas as pd\n'), ((13264, 13283), 'pandas.DataFrame', 'pd.DataFrame', (['dict2'], {}), '(dict2)\n', (13276, 13283), True, 'import pandas as pd\n'), ((13293, 13312), 'pandas.DataFrame', 'pd.DataFrame', (['dict3'], {}), '(dict3)\n', (13305, 13312), True, 'import pandas as pd\n'), ((13322, 13341), 'pandas.DataFrame', 'pd.DataFrame', (['dict4'], {}), '(dict4)\n', (13334, 13341), True, 'import pandas as pd\n'), ((13412, 13514), 'seaborn.relplot', 'sns.relplot', ([], {'data': 'df', 'x': "df['index']", 'y': '"""MAPE"""', 'hue': '"""model"""', 'col': '"""type"""', 'kind': '"""line"""', 'legend': '(False)'}), "(data=df, x=df['index'], y='MAPE', hue='model', col='type', kind\n ='line', legend=False)\n", (13423, 13514), True, 'import seaborn as sns\n'), ((13863, 13888), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'labels': 'labels'}), '(labels=labels)\n', (13873, 13888), True, 'import matplotlib.pyplot as plt\n'), ((13893, 13931), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""training_validation.png"""'], {}), "('training_validation.png')\n", (13904, 13931), True, 'import matplotlib.pyplot as plt\n'), ((13936, 13946), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13944, 13946), True, 'import matplotlib.pyplot as plt\n'), ((14200, 14231), 'pandas.read_pickle', 'pd.read_pickle', (['"""./data/df.pkl"""'], {}), "('./data/df.pkl')\n", (14214, 14231), True, 'import pandas as pd\n'), ((2021, 2045), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(3)', '(i + 1)'], {}), '(3, 3, i + 1)\n', (2032, 2045), True, 'import matplotlib.pyplot as plt\n'), ((2305, 2320), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (2315, 2320), True, 'import matplotlib.pyplot as plt\n'), ((8454, 8525), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', (['(32)', '(3, 3)'], {'activation': '"""relu"""', 'input_shape': '(224, 224, 3)'}), "(32, (3, 3), activation='relu', input_shape=(224, 224, 3))\n", (8467, 8525), False, 'from tensorflow.keras import layers, models, Model\n'), ((8541, 8568), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (8560, 8568), False, 'from tensorflow.keras import layers, models, Model\n'), ((8584, 8628), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (8597, 8628), False, 'from tensorflow.keras import layers, models, Model\n'), ((8644, 8671), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (8663, 8671), False, 'from tensorflow.keras import layers, models, Model\n'), ((8687, 8731), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (8700, 8731), False, 'from tensorflow.keras import layers, models, Model\n'), ((8748, 8764), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (8762, 8764), False, 'from tensorflow.keras import layers, models, Model\n'), ((8780, 8815), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(64)'], {'activation': '"""relu"""'}), "(64, activation='relu')\n", (8792, 8815), False, 'from tensorflow.keras import layers, models, Model\n'), ((8831, 8846), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (8843, 8846), False, 'from tensorflow.keras import layers, models, Model\n'), ((11480, 11526), 'tensorflow.keras.layers.GlobalAveragePooling2D', 'layers.GlobalAveragePooling2D', ([], {'name': '"""avg_pool"""'}), "(name='avg_pool')\n", (11509, 11526), False, 'from tensorflow.keras import layers, models, Model\n'), ((11549, 11576), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (11574, 11576), False, 'from tensorflow.keras import layers, models, Model\n'), ((11615, 11667), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['top_dropout_rate'], {'name': '"""top_dropout"""'}), "(top_dropout_rate, name='top_dropout')\n", (11629, 11667), False, 'from tensorflow.keras import layers, models, Model\n'), ((11685, 11713), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'name': '"""pred"""'}), "(1, name='pred')\n", (11697, 11713), False, 'from tensorflow.keras import layers, models, Model\n'), ((1602, 1637), 'pandas.concat', 'pd.concat', (['[series, series]'], {'axis': '(1)'}), '([series, series], axis=1)\n', (1611, 1637), True, 'import pandas as pd\n'), ((13351, 13386), 'pandas.concat', 'pd.concat', (['[s1, s2, s3, s4]'], {'axis': '(0)'}), '([s1, s2, s3, s4], axis=0)\n', (13360, 13386), True, 'import pandas as pd\n'), ((7126, 7140), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7138, 7140), False, 'from datetime import datetime\n'), ((10376, 10395), 'tensorflow.keras.losses.MeanAbsoluteError', 'MeanAbsoluteError', ([], {}), '()\n', (10393, 10395), False, 'from tensorflow.keras.losses import MeanAbsoluteError, MeanAbsolutePercentageError\n'), ((10397, 10426), 'tensorflow.keras.losses.MeanAbsolutePercentageError', 'MeanAbsolutePercentageError', ([], {}), '()\n', (10424, 10426), False, 'from tensorflow.keras.losses import MeanAbsoluteError, MeanAbsolutePercentageError\n')]
|
#!/usr/bin/env python3
import sys, os, re
os.chdir(os.getenv('srcdir', os.path.dirname(__file__)))
HBHEADERS = [os.path.basename(x) for x in os.getenv('HBHEADERS', '').split()] or \
[x for x in os.listdir('.') if x.startswith('hb') and x.endswith('.h')]
HBSOURCES = [os.path.basename(x) for x in os.getenv('HBSOURCES', '').split()] or \
[x for x in os.listdir('.') if x.startswith('hb') and x.endswith(('.cc', '.hh'))]
stat = 0
print('Checking that public header files #include "hb-common.h" or "hb.h" first (or none)')
for x in HBHEADERS:
if x == 'hb.h' or x == 'hb-common.h': continue
with open(x, 'r', encoding='utf-8') as f:
content = f.read()
first = re.findall(r'#.*include.*', content)[0]
if first not in ['#include "hb.h"', '#include "hb-common.h"']:
print('failure on %s' % x)
stat = 1
print('Checking that source files #include a private header first (or none)')
for x in HBSOURCES:
with open(x, 'r', encoding='utf-8') as f:
content = f.read()
includes = re.findall(r'#.*include.*', content)
if includes:
if not len(re.findall(r'"hb.*\.hh"', includes[0])):
print('failure on %s' % x)
stat = 1
print('Checking that there is no #include <hb-*.h>')
for x in HBHEADERS + HBSOURCES:
with open(x, 'r', encoding='utf-8') as f:
content = f.read()
if re.findall('#.*include.*<.*hb', content):
print('failure on %s' % x)
stat = 1
sys.exit(stat)
|
[
"os.path.basename",
"os.path.dirname",
"re.findall",
"os.getenv",
"os.listdir",
"sys.exit"
] |
[((1486, 1500), 'sys.exit', 'sys.exit', (['stat'], {}), '(stat)\n', (1494, 1500), False, 'import sys, os, re\n'), ((1051, 1086), 're.findall', 're.findall', (['"""#.*include.*"""', 'content'], {}), "('#.*include.*', content)\n", (1061, 1086), False, 'import sys, os, re\n'), ((1391, 1431), 're.findall', 're.findall', (['"""#.*include.*<.*hb"""', 'content'], {}), "('#.*include.*<.*hb', content)\n", (1401, 1431), False, 'import sys, os, re\n'), ((73, 98), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (88, 98), False, 'import sys, os, re\n'), ((115, 134), 'os.path.basename', 'os.path.basename', (['x'], {}), '(x)\n', (131, 134), False, 'import sys, os, re\n'), ((282, 301), 'os.path.basename', 'os.path.basename', (['x'], {}), '(x)\n', (298, 301), False, 'import sys, os, re\n'), ((705, 740), 're.findall', 're.findall', (['"""#.*include.*"""', 'content'], {}), "('#.*include.*', content)\n", (715, 740), False, 'import sys, os, re\n'), ((209, 224), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (219, 224), False, 'import sys, os, re\n'), ((376, 391), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (386, 391), False, 'import sys, os, re\n'), ((1124, 1162), 're.findall', 're.findall', (['""""hb.*\\\\.hh\\""""', 'includes[0]'], {}), '(\'"hb.*\\\\.hh"\', includes[0])\n', (1134, 1162), False, 'import sys, os, re\n'), ((144, 170), 'os.getenv', 'os.getenv', (['"""HBHEADERS"""', '""""""'], {}), "('HBHEADERS', '')\n", (153, 170), False, 'import sys, os, re\n'), ((311, 337), 'os.getenv', 'os.getenv', (['"""HBSOURCES"""', '""""""'], {}), "('HBSOURCES', '')\n", (320, 337), False, 'import sys, os, re\n')]
|
import os
import sys
import lmdb
import json
import torch
import pickle
import random
import msgpack
import numpy as np
import msgpack_numpy
# from transformers import AutoTokenizer
from lz4.frame import compress, decompress
from os.path import exists, abspath, dirname
from sklearn.metrics.pairwise import cosine_similarity
from PIL import Image, ImageFont, ImageDraw, ImageEnhance
import pprint
pp = pprint.PrettyPrinter()
msgpack_numpy.patch()
origin_img_dir = '/data/share/UNITER/origin_imgs/flickr30k/flickr30k-images/'
def load_txt_db(db_dir):
# db loading
env_in = lmdb.open(db_dir, readonly=True, create=False)
txn_in = env_in.begin()
db = {}
for key, value in txn_in.cursor():
db[key] = value
print('db length:', len(db)) # db length: 443757
env_in.close()
return db
def load_img_db(img_dir, conf_th=0.2, max_bb=100, min_bb=10, num_bb=36, compress=False):
if conf_th == -1:
db_name = f'feat_numbb{num_bb}'
name2nbb = defaultdict(lambda: num_bb)
else:
db_name = f'feat_th{conf_th}_max{max_bb}_min{min_bb}'
nbb = f'nbb_th{conf_th}_max{max_bb}_min{min_bb}.json'
if not os.path.exists(f'{img_dir}/{nbb}'):
# nbb is not pre-computed
name2nbb = None
else:
name2nbb = json.load(open(f'{img_dir}/{nbb}'))
# => {'coco_test2015_000000043222.npz': 57, ...}
if compress:
db_name += '_compressed'
if name2nbb is None:
if compress:
db_name = 'all_compressed'
else:
db_name = 'all'
# db loading
env = lmdb.open(f'{img_dir}/{db_name}', readonly=True, create=False)
txn = env.begin(buffers=True)
return name2nbb, txn
def load_single_img(txn, file_name, compress=False):
# load single image with its file_name
dump = txn.get(file_name.encode('utf-8'))
if compress:
with io.BytesIO(dump) as reader:
img_dump = np.load(reader, allow_pickle=True)
img_dump = {'features': img_dump['features'],
'norm_bb': img_dump['norm_bb']}
else:
img_dump = msgpack.loads(dump, raw=False)
return img_dump
def get_concat_h(im1, im2):
dst = Image.new('RGB', (im1.width + im2.width, im1.height))
dst.paste(im1, (0, 0))
dst.paste(im2, (im1.width, 0))
return dst
def draw_bounding_box(img_name, img_bb, outline=(0, 0, 0, 255)):
source_img = Image.open(origin_img_dir + img_name).convert("RGB")
width, height = source_img.size
draw = ImageDraw.Draw(source_img, 'RGBA')
p1 = (width*img_bb[0], height*img_bb[1])
p2 = (width*img_bb[2], height*img_bb[3])
draw.rectangle((p1, p2), outline=outline, width=2)
# draw.text((img_bb[0], img_bb[1]), "something123", font=ImageFont.truetype("font_path123"))
return source_img
# source_img.save('bb_' + img_name, "JPEG")
def crop_bb(img_name, img_bbs):
source_img = Image.open(origin_img_dir + img_name).convert("RGB")
width, height = source_img.size
for i in range(img_bbs.shape[0]):
p1 = (width*img_bbs[i][0], height*img_bbs[i][1])
p2 = (width*img_bbs[i][2], height*img_bbs[i][3])
crop = source_img.crop((p1[0], p1[1], p2[0], p2[1]))
crop.save('crop_%d.jpg'%(i), 'JPEG')
def main():
NUM_LABELS = 1600
# tokenizer = AutoTokenizer.from_pretrained('bert-base-cased')
id2tok = json.load(open('id2tok.json'))
labels_ids = json.load(open('object_labels_ids.json'))
def convert_ids_to_tokens(i):
if isinstance(i, int):
return id2tok[str(i)]
else:
i = list(i)
return [id2tok[str(ii)] for ii in i]
def get_label_str(i):
if isinstance(i, int):
return convert_ids_to_tokens(labels_ids[i])
else:
i = list(i)
return [convert_ids_to_tokens(labels_ids[ii]) if ii > 0 else '[BACKGROUND]' for ii in i]
def get_hard_labels(soft_labels, top_k=3):
if len(soft_labels.shape) < 2:
soft_labels = soft_labels.reshape(1, -1)
sorted_labels = soft_labels.argsort(axis=-1)[:, ::-1][:, :top_k]
sorted_labels = sorted_labels - 1
res = []
for l in sorted_labels:
res.append(get_label_str(l))
return res
checkpoint = torch.load(
"/data/private/cc/experiment/MMP/pretrained_ckpts/pretrained/uniter-base.pt")
emb_weight = checkpoint['uniter.embeddings.word_embeddings.weight']
txt_db_old = load_txt_db('/data/share/UNITER/ve/txt_db/ve_train.db')
txt_db_new = load_txt_db(
'/data/share/UNITER/ve/da/GloVe/seed2/txt_db/ve_train.db')
name2nbb, img_db_txn = load_img_db('/data/share/UNITER/ve/img_db/flickr30k')
def display(k):
d1 = msgpack.loads(decompress(txt_db_old[k.split(b'_')[1]]), raw=False)
d2 = msgpack.loads(decompress(txt_db_new[k]), raw=False)
# input_1 = tokenizer.convet_ids_to_tokens(d1['input_ids'])
# input_2 = tokenizer.convert_ids_to_tokens(d2['input_ids'])
input_1 = convert_ids_to_tokens(d1['input_ids'])
input_2 = convert_ids_to_tokens(d2['input_ids'])
input_3 = convert_ids_to_tokens(d2['mix_input_ids'])
hard_labels = get_hard_labels(d2['mix_soft_labels'])
# img1 = load_single_img(img_db_txn, d1['img_fname'])
img = load_single_img(img_db_txn, d2['img_fname'])
origin_img_name = str(k).split('_')[1].split('#')[0]
im1 = draw_bounding_box(origin_img_name, img['norm_bb'][d2['mix_index']])
im2 = draw_bounding_box(d2['mix_img_flk_id'], d2['mix_bb'], (200, 0, 0, 255))
cat_im = get_concat_h(im1, im2)
cat_im.save('bb_' + origin_img_name + '_' + d2['mix_img_flk_id'], 'JPEG')
# crop_bb(origin_img_name, img['norm_bb'])
return input_1, input_2, input_3, hard_labels
# print(list(txt_db_new.keys())[:10])
pp.pprint(display(list(txt_db_new.keys())[3]))
# pp.pprint(display(list(txt_db_new.keys())[1]))
# pp.pprint(display(list(txt_db_new.keys())[2]))
# import ipdb
# ipdb.set_trace()
if __name__ == '__main__':
main()
|
[
"msgpack_numpy.patch",
"PIL.Image.new",
"msgpack.loads",
"numpy.load",
"torch.load",
"os.path.exists",
"PIL.Image.open",
"pprint.PrettyPrinter",
"lmdb.open",
"PIL.ImageDraw.Draw",
"lz4.frame.decompress"
] |
[((403, 425), 'pprint.PrettyPrinter', 'pprint.PrettyPrinter', ([], {}), '()\n', (423, 425), False, 'import pprint\n'), ((427, 448), 'msgpack_numpy.patch', 'msgpack_numpy.patch', ([], {}), '()\n', (446, 448), False, 'import msgpack_numpy\n'), ((583, 629), 'lmdb.open', 'lmdb.open', (['db_dir'], {'readonly': '(True)', 'create': '(False)'}), '(db_dir, readonly=True, create=False)\n', (592, 629), False, 'import lmdb\n'), ((1610, 1672), 'lmdb.open', 'lmdb.open', (['f"""{img_dir}/{db_name}"""'], {'readonly': '(True)', 'create': '(False)'}), "(f'{img_dir}/{db_name}', readonly=True, create=False)\n", (1619, 1672), False, 'import lmdb\n'), ((2226, 2279), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(im1.width + im2.width, im1.height)'], {}), "('RGB', (im1.width + im2.width, im1.height))\n", (2235, 2279), False, 'from PIL import Image, ImageFont, ImageDraw, ImageEnhance\n'), ((2541, 2575), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['source_img', '"""RGBA"""'], {}), "(source_img, 'RGBA')\n", (2555, 2575), False, 'from PIL import Image, ImageFont, ImageDraw, ImageEnhance\n'), ((4305, 4403), 'torch.load', 'torch.load', (['"""/data/private/cc/experiment/MMP/pretrained_ckpts/pretrained/uniter-base.pt"""'], {}), "(\n '/data/private/cc/experiment/MMP/pretrained_ckpts/pretrained/uniter-base.pt'\n )\n", (4315, 4403), False, 'import torch\n'), ((2136, 2166), 'msgpack.loads', 'msgpack.loads', (['dump'], {'raw': '(False)'}), '(dump, raw=False)\n', (2149, 2166), False, 'import msgpack\n'), ((1169, 1203), 'os.path.exists', 'os.path.exists', (['f"""{img_dir}/{nbb}"""'], {}), "(f'{img_dir}/{nbb}')\n", (1183, 1203), False, 'import os\n'), ((1958, 1992), 'numpy.load', 'np.load', (['reader'], {'allow_pickle': '(True)'}), '(reader, allow_pickle=True)\n', (1965, 1992), True, 'import numpy as np\n'), ((2440, 2477), 'PIL.Image.open', 'Image.open', (['(origin_img_dir + img_name)'], {}), '(origin_img_dir + img_name)\n', (2450, 2477), False, 'from PIL import Image, ImageFont, ImageDraw, ImageEnhance\n'), ((2938, 2975), 'PIL.Image.open', 'Image.open', (['(origin_img_dir + img_name)'], {}), '(origin_img_dir + img_name)\n', (2948, 2975), False, 'from PIL import Image, ImageFont, ImageDraw, ImageEnhance\n'), ((4855, 4880), 'lz4.frame.decompress', 'decompress', (['txt_db_new[k]'], {}), '(txt_db_new[k])\n', (4865, 4880), False, 'from lz4.frame import compress, decompress\n')]
|
import requests
from typing import Dict, List
class LIFX:
'''
docs: https://api.developer.lifx.com
selectors: https://api.developer.lifx.com/docs/selectors
'''
url = 'https://api.lifx.com'
def __init__(self, token):
self.headers = {
'Authorization': f'Bearer {token}',
'Content-Type': 'application/json'
}
def list_lights(self, selector: str = 'all'):
'''
Args:
selector = what lights to list
Returns:
response object
'''
response = requests.get(
url=f'{LIFX.url}/v1/lights/{selector}',
headers=self.headers
)
return response
def set_state(self, color: str, selector: str = 'all', power: str = 'on', brightness: float = 1, duration: float = 0, fast: bool = True):
'''
Args
selector = what lights to change
power = on|off
color = color to change state to
brightness = 0.0 - 1.0
duration = how long until state is full
fast = don't make checks and just change
Returns
response object
'''
response = requests.put(
url=f'{LIFX.url}/v1/lights/{selector}/state',
headers=self.headers,
json={
'power': power,
'color': color,
'brightness': brightness,
'duration': duration,
'fast': fast
}
)
return response
def set_states(self, states: List = [], defaults: Dict = {}, fast: bool = True):
'''
Args:
states = a list of state objects
defaults = default parameters for each state object
fast = don't make checks and just change
Returns:
response object
'''
response = requests.put(
url=f'{LIFX.url}/v1/lights/states',
headers=self.headers,
json={
'states': states,
'defaults': defaults,
'fast': fast
}
)
return response
def pulse_effect(self, color: str, selector: str = 'all', from_color: str = '', period: float = 2, cycles: float = 5, power_on: bool = True):
'''
Args:
color = the color for the effect
from_color = the color to start the effect from
period = time in seconds for one cycle
cycles = number of times to repeat
power_on = turn on the light if not already on
Returns:
response object
'''
response = requests.post(
url=f'{LIFX.url}/v1/lights/{selector}/effects/pulse',
headers=self.headers,
json={
'color': color,
'from_color': from_color,
'period': period,
'cycles': cycles,
'power_on': power_on
}
)
return response
def effects_off(self, selector: str = 'all', power_off: bool = False):
'''
Args:
power_off = also turn the lights off
Returns:
response object
'''
response = requests.post(
url=f'{LIFX.url}/v1/lights/{selector}/effects/off',
headers=self.headers,
json={'power_off': power_off}
)
return response
|
[
"requests.put",
"requests.post",
"requests.get"
] |
[((593, 667), 'requests.get', 'requests.get', ([], {'url': 'f"""{LIFX.url}/v1/lights/{selector}"""', 'headers': 'self.headers'}), "(url=f'{LIFX.url}/v1/lights/{selector}', headers=self.headers)\n", (605, 667), False, 'import requests\n'), ((1262, 1452), 'requests.put', 'requests.put', ([], {'url': 'f"""{LIFX.url}/v1/lights/{selector}/state"""', 'headers': 'self.headers', 'json': "{'power': power, 'color': color, 'brightness': brightness, 'duration':\n duration, 'fast': fast}"}), "(url=f'{LIFX.url}/v1/lights/{selector}/state', headers=self.\n headers, json={'power': power, 'color': color, 'brightness': brightness,\n 'duration': duration, 'fast': fast})\n", (1274, 1452), False, 'import requests\n'), ((1982, 2118), 'requests.put', 'requests.put', ([], {'url': 'f"""{LIFX.url}/v1/lights/states"""', 'headers': 'self.headers', 'json': "{'states': states, 'defaults': defaults, 'fast': fast}"}), "(url=f'{LIFX.url}/v1/lights/states', headers=self.headers, json\n ={'states': states, 'defaults': defaults, 'fast': fast})\n", (1994, 2118), False, 'import requests\n'), ((2789, 2994), 'requests.post', 'requests.post', ([], {'url': 'f"""{LIFX.url}/v1/lights/{selector}/effects/pulse"""', 'headers': 'self.headers', 'json': "{'color': color, 'from_color': from_color, 'period': period, 'cycles':\n cycles, 'power_on': power_on}"}), "(url=f'{LIFX.url}/v1/lights/{selector}/effects/pulse', headers\n =self.headers, json={'color': color, 'from_color': from_color, 'period':\n period, 'cycles': cycles, 'power_on': power_on})\n", (2802, 2994), False, 'import requests\n'), ((3393, 3516), 'requests.post', 'requests.post', ([], {'url': 'f"""{LIFX.url}/v1/lights/{selector}/effects/off"""', 'headers': 'self.headers', 'json': "{'power_off': power_off}"}), "(url=f'{LIFX.url}/v1/lights/{selector}/effects/off', headers=\n self.headers, json={'power_off': power_off})\n", (3406, 3516), False, 'import requests\n')]
|
#!/usr/bin/env python
"""Plot both the standard 'is_a' field and the optional 'part_of' relationship."""
from __future__ import print_function
__copyright__ = "Copyright (C) 2016-2018, <NAME>, <NAME>, All rights reserved."
import os
import sys
import timeit
import datetime
from goatools.base import download_go_basic_obo
from goatools.obo_parser import GODag
from goatools.gosubdag.gosubdag import GoSubDag
REPO = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../..")
def test_gosubdag_relationships(prt=sys.stdout):
"""Plot both the standard 'is_a' field and the 'part_of' relationship."""
goids = set([
"GO:0032501",
"GO:0044707", # alt_id: GO:0032501 # BP 1011 L01 D01 B multicellular organismal process
"GO:0050874",
"GO:0007608", # sensory perception of smell
"GO:0050911"]) # detection of chemical stimulus involved in sensory perception of smell
# Load GO-DAG: Load optional 'relationship'
fin_obo = os.path.join(REPO, "go-basic.obo")
download_go_basic_obo(fin_obo, prt, loading_bar=None)
go2obj_plain = GODag(fin_obo)
go2obj_relat = GODag(fin_obo, optional_attrs=['relationship'])
print("\nCreate GoSubDag with GO DAG containing no relationships.")
tic = timeit.default_timer()
# Create Plot object; Plot both 'is_a' and optional 'part_of' relationship
gosubdag = GoSubDag(goids, go2obj_plain, relationships=False, prt=prt)
# gosubdag.prt_goids(gosubdag.go2obj)
goids_plain = set(gosubdag.go2obj)
tic = _rpt_hms(tic, len(gosubdag.go2obj))
print("\nCreate GoSubDag while IGNORING relationships")
# Create Plot object; Plot both 'is_a' and optional 'part_of' relationship
gosubdag = GoSubDag(goids, go2obj_relat, relationships=False, prt=prt)
# gosubdag.prt_goids(gosubdag.go2obj)
goids_false = set(gosubdag.go2obj)
tic = _rpt_hms(tic, len(gosubdag.go2obj))
assert goids_plain == goids_false
print("\nCreate GoSubDag while loading only the 'part_of' relationship")
gosubdag = GoSubDag(goids, go2obj_relat, relationships=['part_of'], prt=prt)
# gosubdag.prt_goids(gosubdag.go2obj)
goids_part_of = set(gosubdag.go2obj)
tic = _rpt_hms(tic, len(gosubdag.go2obj))
assert goids_plain.intersection(goids_part_of) == goids_plain
assert len(goids_part_of) > len(goids_plain)
print("\nCreate GoSubDag while loading all relationships")
gosubdag = GoSubDag(goids, go2obj_relat, relationships=True, prt=prt)
# gosubdag.prt_goids(gosubdag.go2obj)
goids_true = set(gosubdag.go2obj)
tic = _rpt_hms(tic, len(gosubdag.go2obj))
assert goids_part_of.intersection(goids_true) == goids_part_of
assert len(goids_true) >= len(goids_part_of)
def _rpt_hms(tic, num_goids):
"""Report the elapsed time for particular events."""
elapsed_time = str(datetime.timedelta(seconds=(timeit.default_timer()-tic)))
print("Elapsed HMS: {HMS} {N} GO IDs".format(HMS=elapsed_time, N=num_goids))
return timeit.default_timer()
if __name__ == '__main__':
test_gosubdag_relationships()
# Copyright (C) 2016-2018, <NAME>, <NAME>, All rights reserved.
|
[
"goatools.gosubdag.gosubdag.GoSubDag",
"os.path.abspath",
"goatools.obo_parser.GODag",
"timeit.default_timer",
"goatools.base.download_go_basic_obo",
"os.path.join"
] |
[((990, 1024), 'os.path.join', 'os.path.join', (['REPO', '"""go-basic.obo"""'], {}), "(REPO, 'go-basic.obo')\n", (1002, 1024), False, 'import os\n'), ((1029, 1082), 'goatools.base.download_go_basic_obo', 'download_go_basic_obo', (['fin_obo', 'prt'], {'loading_bar': 'None'}), '(fin_obo, prt, loading_bar=None)\n', (1050, 1082), False, 'from goatools.base import download_go_basic_obo\n'), ((1102, 1116), 'goatools.obo_parser.GODag', 'GODag', (['fin_obo'], {}), '(fin_obo)\n', (1107, 1116), False, 'from goatools.obo_parser import GODag\n'), ((1136, 1183), 'goatools.obo_parser.GODag', 'GODag', (['fin_obo'], {'optional_attrs': "['relationship']"}), "(fin_obo, optional_attrs=['relationship'])\n", (1141, 1183), False, 'from goatools.obo_parser import GODag\n'), ((1267, 1289), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (1287, 1289), False, 'import timeit\n'), ((1384, 1443), 'goatools.gosubdag.gosubdag.GoSubDag', 'GoSubDag', (['goids', 'go2obj_plain'], {'relationships': '(False)', 'prt': 'prt'}), '(goids, go2obj_plain, relationships=False, prt=prt)\n', (1392, 1443), False, 'from goatools.gosubdag.gosubdag import GoSubDag\n'), ((1726, 1785), 'goatools.gosubdag.gosubdag.GoSubDag', 'GoSubDag', (['goids', 'go2obj_relat'], {'relationships': '(False)', 'prt': 'prt'}), '(goids, go2obj_relat, relationships=False, prt=prt)\n', (1734, 1785), False, 'from goatools.gosubdag.gosubdag import GoSubDag\n'), ((2044, 2109), 'goatools.gosubdag.gosubdag.GoSubDag', 'GoSubDag', (['goids', 'go2obj_relat'], {'relationships': "['part_of']", 'prt': 'prt'}), "(goids, go2obj_relat, relationships=['part_of'], prt=prt)\n", (2052, 2109), False, 'from goatools.gosubdag.gosubdag import GoSubDag\n'), ((2433, 2491), 'goatools.gosubdag.gosubdag.GoSubDag', 'GoSubDag', (['goids', 'go2obj_relat'], {'relationships': '(True)', 'prt': 'prt'}), '(goids, go2obj_relat, relationships=True, prt=prt)\n', (2441, 2491), False, 'from goatools.gosubdag.gosubdag import GoSubDag\n'), ((2995, 3017), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3015, 3017), False, 'import timeit\n'), ((449, 474), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (464, 474), False, 'import os\n'), ((2873, 2895), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2893, 2895), False, 'import timeit\n')]
|
import torch
import torchvision
from torch.utils.tensorboard import SummaryWriter
from torchvision import datasets, transforms
# Writer will output to ./runs/ directory by default
class TensorboardLogger(object):
def __init__(self, dir):
"""Create a summary writer logging to log_dir."""
self.writer = SummaryWriter(log_dir=dir)
self.time_s = 0
def scalar_summary(self, tag, value, t=-1):
if t == -1:
self.writer.add_scalar(tag, value, global_step=self.time_s)
else :
self.writer.add_scalar(tag, value, global_step=t)
def histogram_summary(self, tag, tensor, t=-1):
if t == -1:
self.writer.add_histogram(tag, tensor, global_step=self.time_s)
else :
self.writer.add_histogram(tag, tensor, global_step=t)
def logger_close(self):
self.writer.close()
def update(self):
self.time_s += 1
|
[
"torch.utils.tensorboard.SummaryWriter"
] |
[((323, 349), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', ([], {'log_dir': 'dir'}), '(log_dir=dir)\n', (336, 349), False, 'from torch.utils.tensorboard import SummaryWriter\n')]
|
import json
import os
from dotenv import load_dotenv
from utils.copier import generate_files_from_template
from utils.gitlab_service import GitlabService
from utils.study_data import fetch_new_study_data
from utils.supabase_service import SupabaseService
load_dotenv()
def generate_repo(sbs: SupabaseService, gs: GitlabService, study_id: str):
study = sbs.fetch_study(study_id)
generate_files_from_template(
study_title=study["title"], path=os.environ.get("PROJECT_PATH")
)
project = gs.create_project(study["title"])
print(f"Created project: {project.name}")
generate_initial_commit(gs, project)
print("Committed initial files")
fetch_new_study_data(sbs, gs, study_id, project, 'create')
print("Fetched newest study data")
return project.id
def generate_initial_commit(gs, project):
commit_actions = [
commit_action(file_path, os.environ.get("PROJECT_PATH"))
for file_path in walk_generated_project(os.environ.get("PROJECT_PATH"))
]
return gs.make_commit(
project=project,
message="Generated project from copier-studyu\n\nhttps://github.com/hpi-studyu/copier-studyu",
actions=commit_actions,
)
def commit_action(file_path: str, project_path: str, action: str = "create"):
return {
"action": action,
"file_path": file_path,
"content": open(os.path.join(project_path, file_path)).read(),
}
def walk_generated_project(project_path):
for root, dirs, files in os.walk(project_path):
for name in files:
yield os.path.relpath(os.path.join(root, name), project_path).replace(
os.sep, "/"
)
|
[
"os.walk",
"dotenv.load_dotenv",
"os.environ.get",
"utils.study_data.fetch_new_study_data",
"os.path.join"
] |
[((258, 271), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (269, 271), False, 'from dotenv import load_dotenv\n'), ((680, 738), 'utils.study_data.fetch_new_study_data', 'fetch_new_study_data', (['sbs', 'gs', 'study_id', 'project', '"""create"""'], {}), "(sbs, gs, study_id, project, 'create')\n", (700, 738), False, 'from utils.study_data import fetch_new_study_data\n'), ((1514, 1535), 'os.walk', 'os.walk', (['project_path'], {}), '(project_path)\n', (1521, 1535), False, 'import os\n'), ((464, 494), 'os.environ.get', 'os.environ.get', (['"""PROJECT_PATH"""'], {}), "('PROJECT_PATH')\n", (478, 494), False, 'import os\n'), ((901, 931), 'os.environ.get', 'os.environ.get', (['"""PROJECT_PATH"""'], {}), "('PROJECT_PATH')\n", (915, 931), False, 'import os\n'), ((981, 1011), 'os.environ.get', 'os.environ.get', (['"""PROJECT_PATH"""'], {}), "('PROJECT_PATH')\n", (995, 1011), False, 'import os\n'), ((1388, 1425), 'os.path.join', 'os.path.join', (['project_path', 'file_path'], {}), '(project_path, file_path)\n', (1400, 1425), False, 'import os\n'), ((1598, 1622), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (1610, 1622), False, 'import os\n')]
|
from deep_utils.vision.face_detection.main import FaceDetector
from deep_utils.utils.lib_utils.lib_decorators import get_from_config, expand_input, get_elapsed_time, rgb2bgr
from deep_utils.utils.lib_utils.download_utils import download_decorator
from deep_utils.utils.box_utils.boxes import Box, Point
from .config import Config
class HaarcascadeCV2FaceDetector(FaceDetector):
def __init__(self, **kwargs):
super().__init__(name=self.__class__.__name__,
file_path=__file__,
download_variables=("haarcascade_frontalface",
"haarcascade_eye",
"haarcascade_nose",
"landmarks"),
**kwargs)
self.config: Config
@download_decorator
def load_model(self):
import cv2
face_detector = cv2.CascadeClassifier(self.config.haarcascade_frontalface)
nose_detector = cv2.CascadeClassifier(self.config.haarcascade_nose)
eye_detector = cv2.CascadeClassifier(self.config.haarcascade_eye)
landmark_detector = cv2.face.createFacemarkLBF()
landmark_detector.loadModel(self.config.landmarks)
self.model = dict(face=face_detector, nose=nose_detector, eye=eye_detector, landmarks=landmark_detector)
@get_elapsed_time
@expand_input(3)
@get_from_config
@rgb2bgr('gray')
def detect_faces(self,
img,
is_rgb,
scaleFactor=None,
minNeighbors=None,
minSize=None,
maxSize=None,
flags=None,
get_landmarks=True,
get_nose=True,
get_eye=True,
get_time=False,
):
boxes, landmarks_, confidences, eye_poses, nose_poses = [], [], [], [], []
for image in img:
faces = self.model['face'].detectMultiScale(image,
scaleFactor=scaleFactor,
minNeighbors=minNeighbors,
minSize=minSize,
maxSize=maxSize,
flags=flags)
boxes.append(Box.box2box(faces, in_source='CV', to_source='Numpy', in_format='XYWH', to_format='XYXY'))
if get_nose:
nose_pos = self.model['nose'].detectMultiScale(image,
scaleFactor=scaleFactor,
minNeighbors=minNeighbors,
minSize=minSize,
maxSize=maxSize,
flags=flags)
nose_pos = Point.point2point(nose_pos, in_source='CV', to_source='Numpy')
nose_poses.append(nose_pos)
if get_eye:
eye_pos = self.model['eye'].detectMultiScale(image,
scaleFactor=scaleFactor,
minNeighbors=minNeighbors,
minSize=minSize,
maxSize=maxSize,
flags=flags)
eye_pos = Point.point2point(eye_pos, in_source='CV', to_source='Numpy')
eye_poses.append(eye_pos)
if len(faces) != 0 and get_landmarks:
_, landmarks = self.model['landmarks'].fit(image, faces)
landmarks = [Point.point2point(face_landmarks[0].tolist(),
in_source='CV', to_source='Numpy') for face_landmarks in landmarks]
landmarks_.append(landmarks)
return dict(boxes=boxes,
confidences=confidences,
landmarks=landmarks_,
eye_poses=eye_poses,
nose_poses=nose_poses)
|
[
"deep_utils.utils.box_utils.boxes.Box.box2box",
"deep_utils.utils.lib_utils.lib_decorators.rgb2bgr",
"cv2.face.createFacemarkLBF",
"deep_utils.utils.lib_utils.lib_decorators.expand_input",
"cv2.CascadeClassifier",
"deep_utils.utils.box_utils.boxes.Point.point2point"
] |
[((1398, 1413), 'deep_utils.utils.lib_utils.lib_decorators.expand_input', 'expand_input', (['(3)'], {}), '(3)\n', (1410, 1413), False, 'from deep_utils.utils.lib_utils.lib_decorators import get_from_config, expand_input, get_elapsed_time, rgb2bgr\n'), ((1440, 1455), 'deep_utils.utils.lib_utils.lib_decorators.rgb2bgr', 'rgb2bgr', (['"""gray"""'], {}), "('gray')\n", (1447, 1455), False, 'from deep_utils.utils.lib_utils.lib_decorators import get_from_config, expand_input, get_elapsed_time, rgb2bgr\n'), ((932, 990), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['self.config.haarcascade_frontalface'], {}), '(self.config.haarcascade_frontalface)\n', (953, 990), False, 'import cv2\n'), ((1015, 1066), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['self.config.haarcascade_nose'], {}), '(self.config.haarcascade_nose)\n', (1036, 1066), False, 'import cv2\n'), ((1090, 1140), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['self.config.haarcascade_eye'], {}), '(self.config.haarcascade_eye)\n', (1111, 1140), False, 'import cv2\n'), ((1169, 1197), 'cv2.face.createFacemarkLBF', 'cv2.face.createFacemarkLBF', ([], {}), '()\n', (1195, 1197), False, 'import cv2\n'), ((2469, 2562), 'deep_utils.utils.box_utils.boxes.Box.box2box', 'Box.box2box', (['faces'], {'in_source': '"""CV"""', 'to_source': '"""Numpy"""', 'in_format': '"""XYWH"""', 'to_format': '"""XYXY"""'}), "(faces, in_source='CV', to_source='Numpy', in_format='XYWH',\n to_format='XYXY')\n", (2480, 2562), False, 'from deep_utils.utils.box_utils.boxes import Box, Point\n'), ((3096, 3158), 'deep_utils.utils.box_utils.boxes.Point.point2point', 'Point.point2point', (['nose_pos'], {'in_source': '"""CV"""', 'to_source': '"""Numpy"""'}), "(nose_pos, in_source='CV', to_source='Numpy')\n", (3113, 3158), False, 'from deep_utils.utils.box_utils.boxes import Box, Point\n'), ((3725, 3786), 'deep_utils.utils.box_utils.boxes.Point.point2point', 'Point.point2point', (['eye_pos'], {'in_source': '"""CV"""', 'to_source': '"""Numpy"""'}), "(eye_pos, in_source='CV', to_source='Numpy')\n", (3742, 3786), False, 'from deep_utils.utils.box_utils.boxes import Box, Point\n')]
|
import random
import unittest
import pandas as pd
from tests.integration.feature_repos.test_repo_configuration import (
Environment,
parametrize_online_test,
)
@parametrize_online_test
def test_online_retrieval(environment: Environment):
fs = environment.feature_store
full_feature_names = environment.test_repo_config.full_feature_names
sample_drivers = random.sample(environment.driver_entities, 10)
drivers_df = environment.driver_df[
environment.driver_df["driver_id"].isin(sample_drivers)
]
sample_customers = random.sample(environment.customer_entities, 10)
customers_df = environment.customer_df[
environment.customer_df["customer_id"].isin(sample_customers)
]
entity_rows = [
{"driver": d, "customer_id": c}
for (d, c) in zip(sample_drivers, sample_customers)
]
feature_refs = [
"driver_stats:conv_rate",
"driver_stats:avg_daily_trips",
"customer_profile:current_balance",
"customer_profile:avg_passenger_count",
"customer_profile:lifetime_trip_count",
]
unprefixed_feature_refs = [f.rsplit(":", 1)[-1] for f in feature_refs]
online_features = fs.get_online_features(
features=feature_refs,
entity_rows=entity_rows,
full_feature_names=full_feature_names,
)
assert online_features is not None
keys = online_features.to_dict().keys()
assert (
len(keys) == len(feature_refs) + 2
) # Add two for the driver id and the customer id entity keys.
for feature in feature_refs:
if full_feature_names:
assert feature.replace(":", "__") in keys
else:
assert feature.rsplit(":", 1)[-1] in keys
assert "driver_stats" not in keys and "customer_profile" not in keys
online_features_dict = online_features.to_dict()
tc = unittest.TestCase()
for i, entity_row in enumerate(entity_rows):
df_features = get_latest_feature_values_from_dataframes(
drivers_df, customers_df, entity_row
)
assert df_features["customer_id"] == online_features_dict["customer_id"][i]
assert df_features["driver_id"] == online_features_dict["driver_id"][i]
for unprefixed_feature_ref in unprefixed_feature_refs:
tc.assertEqual(
df_features[unprefixed_feature_ref],
online_features_dict[
response_feature_name(unprefixed_feature_ref, full_feature_names)
][i],
)
# Check what happens for missing values
missing_responses_dict = fs.get_online_features(
features=feature_refs,
entity_rows=[{"driver": 0, "customer_id": 0}],
full_feature_names=full_feature_names,
).to_dict()
assert missing_responses_dict is not None
for unprefixed_feature_ref in unprefixed_feature_refs:
tc.assertIsNone(
missing_responses_dict[
response_feature_name(unprefixed_feature_ref, full_feature_names)
][0]
)
def response_feature_name(feature: str, full_feature_names: bool) -> str:
if (
feature in {"current_balance", "avg_passenger_count", "lifetime_trip_count"}
and full_feature_names
):
return f"customer_profile__{feature}"
if feature in {"conv_rate", "avg_daily_trips"} and full_feature_names:
return f"driver_stats__{feature}"
return feature
def get_latest_feature_values_from_dataframes(driver_df, customer_df, entity_row):
driver_rows = driver_df[driver_df["driver_id"] == entity_row["driver"]]
latest_driver_row: pd.DataFrame = driver_rows.loc[
driver_rows["event_timestamp"].idxmax()
].to_dict()
customer_rows = customer_df[customer_df["customer_id"] == entity_row["customer_id"]]
latest_customer_row = customer_rows.loc[
customer_rows["event_timestamp"].idxmax()
].to_dict()
latest_customer_row.update(latest_driver_row)
return latest_customer_row
|
[
"random.sample",
"unittest.TestCase"
] |
[((380, 426), 'random.sample', 'random.sample', (['environment.driver_entities', '(10)'], {}), '(environment.driver_entities, 10)\n', (393, 426), False, 'import random\n'), ((561, 609), 'random.sample', 'random.sample', (['environment.customer_entities', '(10)'], {}), '(environment.customer_entities, 10)\n', (574, 609), False, 'import random\n'), ((1876, 1895), 'unittest.TestCase', 'unittest.TestCase', ([], {}), '()\n', (1893, 1895), False, 'import unittest\n')]
|
# Solving reinforcement learning problems using pgpelib with parallelization
# and with observation normalization
# ==========================================================================
#
# This example demonstrates how to solve locomotion tasks.
# The following techniques are used:
#
# - dynamic population size
# - observation normalization
# - parallelization (using the ray library)
#
# Because we are using both parallelization and observation normalization,
# we will have to synchronize the observation stats between the remote
# workers and the main process.
# We demonstrate how to do this synchronization using ray,
# however the logic is applicable to other parallelization libraries.
from pgpelib import PGPE
from pgpelib.policies import Policy, LinearPolicy, MLPPolicy
from pgpelib.restore import to_torch_module
from pgpelib.runningstat import RunningStat
from typing import Tuple, Iterable
from numbers import Real
import numpy as np
import torch
import gym
import ray
import multiprocessing as mp
from time import sleep
import pickle
# Here is the gym environment to solve.
ENV_NAME = 'Walker2d-v2'
# The environment we are considering to solve is a locomotion problem.
# It defines an "alive bonus" to encourage the agent to stand on its
# feet without falling.
# However, such alive bonuses might drive the evolution process into
# generating agents which focus ONLY on standing on their feet (without
# progressing), just to collect these bonuses.
# We therefore remove this alive bonus by subtracting 1.0 at every
# simulator timestep.
DECREASE_REWARDS_BY = 1.0
# Ray supports stateful parallelization via remote actors.
# An actor is a class instance which lives on different process than
# the main process, and which stores its state.
# Here, we define a remote actor class (which will be instantiated
# multiple times, so that we will be able to use the instances for
# parallelized evaluation of our solutions)
@ray.remote
class Worker:
def __init__(self, policy, decrease_rewards_by):
policy: Policy
self.policy: Policy = policy
self.decrease_rewards_by = decrease_rewards_by
def set_main_obs_stats(self, rs):
# Set the main observation stats of the remote worker.
# The goal of this function is to receive the observation
# stats from the main process.
rs: RunningStat
self.policy.set_main_obs_stats(rs)
def pop_collected_obs_stats(self):
# Pop the observation stats collected by the worker.
# At the time of synchronization, the main process will call
# this method of each remote worker, and update its main
# observation stats with those collected data.
return self.policy.pop_collected_obs_stats()
def run(self, d):
# Run a each solution in the dictionary d.
# The dictionary d is expected in this format:
# { solution_index1: solution1,
# solution_index2: solution2,
# ... }
# and the result will be:
# { solution_index1: (cumulative_reward1, number_of_interactions1)
# solution_index2: (cumulative_reward2, number_of_interactions2)
# ... }
return self.policy.set_params_and_run_all(
d,
decrease_rewards_by=self.decrease_rewards_by
)
# Set the number of workers to be instantiated as the number of CPUs.
NUM_WORKERS = mp.cpu_count()
# List of workers.
# Initialized as a list containing `None`s in the beginning.
WORKERS = [None] * NUM_WORKERS
def prepare_workers(policy, decrease_rewards_by):
# Fill the WORKERS list.
# Initialize the ray library.
ray.init()
# For each index i of WORKERS list, fill the i-th element with a new
# worker instance.
for i in range(len(WORKERS)):
WORKERS[i] = Worker.remote(policy, decrease_rewards_by)
Reals = Iterable[Real]
def evaluate_solutions(solutions: Iterable[np.ndarray]) -> Tuple[Reals, Reals]:
# This function evaluates the given solutions in parallel.
# Get the number of solutions
nslns = len(solutions)
if len(WORKERS) > nslns:
# If the number of workers is greater than the number of solutions
# then the workers that we are going to actually use here
# is the first `nslns` amount of workers, not all of them.
workers = WORKERS[:nslns]
else:
# If the number of solutions is equal to or greater than the
# number of workers, then we will use all of the instantiated
# workers.
workers = WORKERS
# Number of workers that are going to be used now.
nworkers = len(workers)
# To each worker, we aim to send a dictionary, each dictionary being
# in this form:
# { solution_index1: solution1, solution_index2: solution2, ...}
# We keep those dictionaries in the `to_worker` variable.
# to_worker[i] stores the dictionary to be sent to the i-th worker.
to_worker = [dict() for _ in range(nworkers)]
# Iterate over the solutions and assign them one by one to the
# workers.
i_worker = 0
for i, solution in enumerate(solutions):
to_worker[i_worker][i] = solution
i_worker = (i_worker + 1) % nworkers
# Each worker executes the solution dictionary assigned to itself.
# The results are then collected to the list `worker_results`.
# The workers do their tasks in parallel.
worker_results = ray.get(
[
workers[i].run.remote(to_worker[i])
for i in range(nworkers)
]
)
# Allocate a list for storing the fitnesses, and another list for
# storing the number of interactions.
fitnesses = [None] * nslns
num_interactions = [None] * nslns
# For each worker:
for worker_result in worker_results:
# For each solution and its index mentioned in the worker's
# result dictionary:
for i, result in worker_result.items():
fitness, timesteps = result
# Store the i-th solution's fitness in the fitnesses list
fitnesses[i] = fitness
# Store the i-th solution's number of interactions in the
# num_interactions list.
num_interactions[i] = timesteps
# Return the fitnesses and the number of interactions lists.
return fitnesses, num_interactions
def sync_obs_stats(main_policy: Policy):
# This function synchronizes the observation stats of the
# main process and of the main workers.
# Collect observation stats from the remote workers
collected_stats = ray.get(
[
worker.pop_collected_obs_stats.remote()
for worker in WORKERS
]
)
# In the main process, update the main policy's
# observation stats with the stats collected from the remote workers.
for stats in collected_stats:
main_policy.update_main_obs_stats(stats)
# To each worker, send the main policy's up-to-date stats.
ray.get(
[
worker.set_main_obs_stats.remote(
main_policy.get_main_obs_stats()
)
for worker in WORKERS
]
)
def main():
# This is the main function.
# The main evolution procedure will be defined here.
# Make a linear policy.
policy = LinearPolicy(
env_name=ENV_NAME, # Name of the environment
observation_normalization=True
)
# Prepare the workers
prepare_workers(policy, DECREASE_REWARDS_BY)
# Initial solution
x0 = np.zeros(policy.get_parameters_count(), dtype='float32')
# The following are the Walker2d-v2 hyperparameters used in the paper:
# ClipUp: A Simple and Powerful Optimizer for Distribution-based
# Policy Evolution
N = policy.get_parameters_count()
max_speed = 0.015
center_learning_rate = max_speed / 2.0
radius = max_speed * 15
# Compute the stdev_init from the radius:
stdev_init = np.sqrt((radius ** 2) / N)
popsize = 100
popsize_max = 800
# Below we initialize our PGPE solver.
pgpe = PGPE(
solution_length=N,
popsize=popsize,
popsize_max=popsize_max,
num_interactions=int(popsize * 1000 * (3 / 4)),
center_init=x0,
center_learning_rate=center_learning_rate,
optimizer='clipup',
optimizer_config={'max_speed': max_speed},
stdev_init=stdev_init,
stdev_learning_rate=0.1,
stdev_max_change=0.2,
solution_ranking=True,
dtype='float32'
)
num_iterations = 500
# The main loop of the evolutionary computation
for i in range(1, 1 + num_iterations):
total_episodes = 0
while True:
# Get the solutions from the pgpe solver
solutions = pgpe.ask()
# Evaluate the solutions in parallel and get the fitnesses
fitnesses, num_interactions = evaluate_solutions(solutions)
sync_obs_stats(policy)
# Send the pgpe solver the received fitnesses
iteration_finished = pgpe.tell(fitnesses, num_interactions)
total_episodes += len(fitnesses)
if iteration_finished:
break
print(
"Iteration:", i,
" median score:", np.median(fitnesses),
" num.episodes:", total_episodes
)
print("Visualizing the center solution...")
# Get the center solution
center_solution = pgpe.center.copy()
# Make the gym environment for visualizing the center solution
env = gym.make(ENV_NAME)
# Load the center solution into the policy
policy.set_parameters(center_solution)
# Save the policy into a pickle file
with open(__file__ + '.pickle', 'wb') as f:
pickle.dump(policy, f)
# Convert the policy to a PyTorch module
net = to_torch_module(policy)
while True:
print("Please choose: 1> Visualize the agent 2> Quit")
response = input(">>")
if response == '1':
cumulative_reward = 0.0
# Reset the environment, and get the observation of the initial
# state into a variable.
observation = env.reset()
# Visualize the initial state
env.render()
# Main loop of the trajectory
while True:
with torch.no_grad():
action = net(
torch.as_tensor(observation, dtype=torch.float32)
).numpy()
if isinstance(env.action_space, gym.spaces.Box):
interaction = action
elif isinstance(env.action_space, gym.spaces.Discrete):
interaction = int(np.argmax(action))
else:
assert False, "Unknown action space"
observation, reward, done, info = env.step(interaction)
env.render()
cumulative_reward += reward
if done:
break
print("cumulative_reward", cumulative_reward)
elif response == '2':
break
else:
print('Unrecognized response:', repr(response))
if __name__ == "__main__":
main()
|
[
"ray.init",
"pickle.dump",
"gym.make",
"pgpelib.policies.LinearPolicy",
"numpy.argmax",
"numpy.median",
"multiprocessing.cpu_count",
"torch.as_tensor",
"pgpelib.restore.to_torch_module",
"torch.no_grad",
"numpy.sqrt"
] |
[((3508, 3522), 'multiprocessing.cpu_count', 'mp.cpu_count', ([], {}), '()\n', (3520, 3522), True, 'import multiprocessing as mp\n'), ((3754, 3764), 'ray.init', 'ray.init', ([], {}), '()\n', (3762, 3764), False, 'import ray\n'), ((7389, 7452), 'pgpelib.policies.LinearPolicy', 'LinearPolicy', ([], {'env_name': 'ENV_NAME', 'observation_normalization': '(True)'}), '(env_name=ENV_NAME, observation_normalization=True)\n', (7401, 7452), False, 'from pgpelib.policies import Policy, LinearPolicy, MLPPolicy\n'), ((8030, 8054), 'numpy.sqrt', 'np.sqrt', (['(radius ** 2 / N)'], {}), '(radius ** 2 / N)\n', (8037, 8054), True, 'import numpy as np\n'), ((9655, 9673), 'gym.make', 'gym.make', (['ENV_NAME'], {}), '(ENV_NAME)\n', (9663, 9673), False, 'import gym\n'), ((9942, 9965), 'pgpelib.restore.to_torch_module', 'to_torch_module', (['policy'], {}), '(policy)\n', (9957, 9965), False, 'from pgpelib.restore import to_torch_module\n'), ((9863, 9885), 'pickle.dump', 'pickle.dump', (['policy', 'f'], {}), '(policy, f)\n', (9874, 9885), False, 'import pickle\n'), ((9378, 9398), 'numpy.median', 'np.median', (['fitnesses'], {}), '(fitnesses)\n', (9387, 9398), True, 'import numpy as np\n'), ((10451, 10466), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (10464, 10466), False, 'import torch\n'), ((10823, 10840), 'numpy.argmax', 'np.argmax', (['action'], {}), '(action)\n', (10832, 10840), True, 'import numpy as np\n'), ((10526, 10575), 'torch.as_tensor', 'torch.as_tensor', (['observation'], {'dtype': 'torch.float32'}), '(observation, dtype=torch.float32)\n', (10541, 10575), False, 'import torch\n')]
|
import rospy
from std_msgs.msg import String
def callback(data):
rospy.loginfo(rospy.get_caller_id() + "I heard %s", data.data)
def listener():
rospy.init_node('throttled_listener')
rospy.Subscriber('chatter_message_throttled', String, callback)
rospy.Subscriber('chatter_bandwidth_throttled', String, callback)
rospy.spin()
if __name__ == '__main__':
listener()
|
[
"rospy.spin",
"rospy.Subscriber",
"rospy.init_node",
"rospy.get_caller_id"
] |
[((154, 191), 'rospy.init_node', 'rospy.init_node', (['"""throttled_listener"""'], {}), "('throttled_listener')\n", (169, 191), False, 'import rospy\n'), ((197, 260), 'rospy.Subscriber', 'rospy.Subscriber', (['"""chatter_message_throttled"""', 'String', 'callback'], {}), "('chatter_message_throttled', String, callback)\n", (213, 260), False, 'import rospy\n'), ((265, 330), 'rospy.Subscriber', 'rospy.Subscriber', (['"""chatter_bandwidth_throttled"""', 'String', 'callback'], {}), "('chatter_bandwidth_throttled', String, callback)\n", (281, 330), False, 'import rospy\n'), ((336, 348), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (346, 348), False, 'import rospy\n'), ((84, 105), 'rospy.get_caller_id', 'rospy.get_caller_id', ([], {}), '()\n', (103, 105), False, 'import rospy\n')]
|
# Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from mock import patch, Mock
from cassandra import ConsistencyLevel, DriverException, Timeout, Unavailable, RequestExecutionException, ReadTimeout, WriteTimeout, CoordinationFailure, ReadFailure, WriteFailure, FunctionFailure, AlreadyExists,\
InvalidRequest, Unauthorized, AuthenticationFailed, OperationTimedOut, UnsupportedOperation, RequestValidationException, ConfigurationException
from cassandra.cluster import _Scheduler, Session, Cluster
from cassandra.policies import HostDistance
from cassandra.query import SimpleStatement
class ExceptionTypeTest(unittest.TestCase):
def test_exception_types(self):
"""
PYTHON-443
Sanity check to ensure we don't unintentionally change class hierarchy of exception types
"""
self.assertTrue(issubclass(Unavailable, DriverException))
self.assertTrue(issubclass(Unavailable, RequestExecutionException))
self.assertTrue(issubclass(ReadTimeout, DriverException))
self.assertTrue(issubclass(ReadTimeout, RequestExecutionException))
self.assertTrue(issubclass(ReadTimeout, Timeout))
self.assertTrue(issubclass(WriteTimeout, DriverException))
self.assertTrue(issubclass(WriteTimeout, RequestExecutionException))
self.assertTrue(issubclass(WriteTimeout, Timeout))
self.assertTrue(issubclass(CoordinationFailure, DriverException))
self.assertTrue(issubclass(CoordinationFailure, RequestExecutionException))
self.assertTrue(issubclass(ReadFailure, DriverException))
self.assertTrue(issubclass(ReadFailure, RequestExecutionException))
self.assertTrue(issubclass(ReadFailure, CoordinationFailure))
self.assertTrue(issubclass(WriteFailure, DriverException))
self.assertTrue(issubclass(WriteFailure, RequestExecutionException))
self.assertTrue(issubclass(WriteFailure, CoordinationFailure))
self.assertTrue(issubclass(FunctionFailure, DriverException))
self.assertTrue(issubclass(FunctionFailure, RequestExecutionException))
self.assertTrue(issubclass(RequestValidationException, DriverException))
self.assertTrue(issubclass(ConfigurationException, DriverException))
self.assertTrue(issubclass(ConfigurationException, RequestValidationException))
self.assertTrue(issubclass(AlreadyExists, DriverException))
self.assertTrue(issubclass(AlreadyExists, RequestValidationException))
self.assertTrue(issubclass(AlreadyExists, ConfigurationException))
self.assertTrue(issubclass(InvalidRequest, DriverException))
self.assertTrue(issubclass(InvalidRequest, RequestValidationException))
self.assertTrue(issubclass(Unauthorized, DriverException))
self.assertTrue(issubclass(Unauthorized, RequestValidationException))
self.assertTrue(issubclass(AuthenticationFailed, DriverException))
self.assertTrue(issubclass(OperationTimedOut, DriverException))
self.assertTrue(issubclass(UnsupportedOperation, DriverException))
class ClusterTest(unittest.TestCase):
def test_invalid_contact_point_types(self):
with self.assertRaises(ValueError):
Cluster(contact_points=[None], protocol_version=4, connect_timeout=1)
with self.assertRaises(TypeError):
Cluster(contact_points="not a sequence", protocol_version=4, connect_timeout=1)
def test_requests_in_flight_threshold(self):
d = HostDistance.LOCAL
mn = 3
mx = 5
c = Cluster(protocol_version=2)
c.set_min_requests_per_connection(d, mn)
c.set_max_requests_per_connection(d, mx)
# min underflow, max, overflow
for n in (-1, mx, 127):
self.assertRaises(ValueError, c.set_min_requests_per_connection, d, n)
# max underflow, under min, overflow
for n in (0, mn, 128):
self.assertRaises(ValueError, c.set_max_requests_per_connection, d, n)
class SchedulerTest(unittest.TestCase):
# TODO: this suite could be expanded; for now just adding a test covering a ticket
@patch('time.time', return_value=3) # always queue at same time
@patch('cassandra.cluster._Scheduler.run') # don't actually run the thread
def test_event_delay_timing(self, *_):
"""
Schedule something with a time collision to make sure the heap comparison works
PYTHON-473
"""
sched = _Scheduler(None)
sched.schedule(0, lambda: None)
sched.schedule(0, lambda: None) # pre-473: "TypeError: unorderable types: function() < function()"t
class SessionTest(unittest.TestCase):
# TODO: this suite could be expanded; for now just adding a test covering a PR
@patch('cassandra.cluster.ResponseFuture._make_query_plan')
def test_default_serial_consistency_level(self, *_):
"""
Make sure default_serial_consistency_level passes through to a query message.
Also make sure Statement.serial_consistency_level overrides the default.
PR #510
"""
s = Session(Mock(protocol_version=4), [])
# default is None
self.assertIsNone(s.default_serial_consistency_level)
sentinel = 1001
for cl in (None, ConsistencyLevel.LOCAL_SERIAL, ConsistencyLevel.SERIAL, sentinel):
s.default_serial_consistency_level = cl
# default is passed through
f = s._create_response_future(query='', parameters=[], trace=False, custom_payload={}, timeout=100)
self.assertEqual(f.message.serial_consistency_level, cl)
# any non-None statement setting takes precedence
for cl_override in (ConsistencyLevel.LOCAL_SERIAL, ConsistencyLevel.SERIAL):
f = s._create_response_future(SimpleStatement(query_string='', serial_consistency_level=cl_override), parameters=[], trace=False, custom_payload={}, timeout=100)
self.assertEqual(s.default_serial_consistency_level, cl)
self.assertEqual(f.message.serial_consistency_level, cl_override)
|
[
"mock.patch",
"cassandra.cluster._Scheduler",
"cassandra.query.SimpleStatement",
"mock.Mock",
"cassandra.cluster.Cluster"
] |
[((4756, 4790), 'mock.patch', 'patch', (['"""time.time"""'], {'return_value': '(3)'}), "('time.time', return_value=3)\n", (4761, 4790), False, 'from mock import patch, Mock\n'), ((4825, 4866), 'mock.patch', 'patch', (['"""cassandra.cluster._Scheduler.run"""'], {}), "('cassandra.cluster._Scheduler.run')\n", (4830, 4866), False, 'from mock import patch, Mock\n'), ((5386, 5444), 'mock.patch', 'patch', (['"""cassandra.cluster.ResponseFuture._make_query_plan"""'], {}), "('cassandra.cluster.ResponseFuture._make_query_plan')\n", (5391, 5444), False, 'from mock import patch, Mock\n'), ((4182, 4209), 'cassandra.cluster.Cluster', 'Cluster', ([], {'protocol_version': '(2)'}), '(protocol_version=2)\n', (4189, 4209), False, 'from cassandra.cluster import _Scheduler, Session, Cluster\n'), ((5091, 5107), 'cassandra.cluster._Scheduler', '_Scheduler', (['None'], {}), '(None)\n', (5101, 5107), False, 'from cassandra.cluster import _Scheduler, Session, Cluster\n'), ((3854, 3923), 'cassandra.cluster.Cluster', 'Cluster', ([], {'contact_points': '[None]', 'protocol_version': '(4)', 'connect_timeout': '(1)'}), '(contact_points=[None], protocol_version=4, connect_timeout=1)\n', (3861, 3923), False, 'from cassandra.cluster import _Scheduler, Session, Cluster\n'), ((3979, 4058), 'cassandra.cluster.Cluster', 'Cluster', ([], {'contact_points': '"""not a sequence"""', 'protocol_version': '(4)', 'connect_timeout': '(1)'}), "(contact_points='not a sequence', protocol_version=4, connect_timeout=1)\n", (3986, 4058), False, 'from cassandra.cluster import _Scheduler, Session, Cluster\n'), ((5730, 5754), 'mock.Mock', 'Mock', ([], {'protocol_version': '(4)'}), '(protocol_version=4)\n', (5734, 5754), False, 'from mock import patch, Mock\n'), ((6438, 6508), 'cassandra.query.SimpleStatement', 'SimpleStatement', ([], {'query_string': '""""""', 'serial_consistency_level': 'cl_override'}), "(query_string='', serial_consistency_level=cl_override)\n", (6453, 6508), False, 'from cassandra.query import SimpleStatement\n')]
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from typing import Callable
import torch
from flash.core.data.io.input_transform import InputTransform
from flash.core.data.transforms import ApplyToKeys
from flash.core.utilities.imports import _KORNIA_AVAILABLE, _PYTORCHVIDEO_AVAILABLE, requires
if _KORNIA_AVAILABLE:
import kornia.augmentation as K
if _PYTORCHVIDEO_AVAILABLE:
from pytorchvideo.transforms import UniformTemporalSubsample
from torchvision.transforms import CenterCrop, Compose, RandomCrop
else:
ClipSampler, LabeledVideoDataset, EncodedVideo, ApplyTransformToKey = None, None, None, None
@requires("video")
@dataclass
class VideoClassificationInputTransform(InputTransform):
image_size: int = 244
temporal_sub_sample: int = 8
mean: torch.Tensor = torch.tensor([0.45, 0.45, 0.45])
std: torch.Tensor = torch.tensor([0.225, 0.225, 0.225])
data_format: str = "BCTHW"
same_on_frame: bool = False
def per_sample_transform(self) -> Callable:
if self.training:
per_sample_transform = [RandomCrop(self.image_size, pad_if_needed=True)]
else:
per_sample_transform = [CenterCrop(self.image_size)]
return ApplyToKeys(
"video", Compose([UniformTemporalSubsample(self.temporal_sub_sample)] + per_sample_transform)
)
def per_batch_transform_on_device(self) -> Callable:
return ApplyToKeys(
"video",
K.VideoSequential(
K.Normalize(self.mean, self.std),
data_format=self.data_format,
same_on_frame=self.same_on_frame,
),
)
|
[
"pytorchvideo.transforms.UniformTemporalSubsample",
"kornia.augmentation.Normalize",
"torchvision.transforms.CenterCrop",
"torch.tensor",
"flash.core.utilities.imports.requires",
"torchvision.transforms.RandomCrop"
] |
[((1200, 1217), 'flash.core.utilities.imports.requires', 'requires', (['"""video"""'], {}), "('video')\n", (1208, 1217), False, 'from flash.core.utilities.imports import _KORNIA_AVAILABLE, _PYTORCHVIDEO_AVAILABLE, requires\n'), ((1371, 1403), 'torch.tensor', 'torch.tensor', (['[0.45, 0.45, 0.45]'], {}), '([0.45, 0.45, 0.45])\n', (1383, 1403), False, 'import torch\n'), ((1428, 1463), 'torch.tensor', 'torch.tensor', (['[0.225, 0.225, 0.225]'], {}), '([0.225, 0.225, 0.225])\n', (1440, 1463), False, 'import torch\n'), ((1638, 1685), 'torchvision.transforms.RandomCrop', 'RandomCrop', (['self.image_size'], {'pad_if_needed': '(True)'}), '(self.image_size, pad_if_needed=True)\n', (1648, 1685), False, 'from torchvision.transforms import CenterCrop, Compose, RandomCrop\n'), ((1737, 1764), 'torchvision.transforms.CenterCrop', 'CenterCrop', (['self.image_size'], {}), '(self.image_size)\n', (1747, 1764), False, 'from torchvision.transforms import CenterCrop, Compose, RandomCrop\n'), ((2065, 2097), 'kornia.augmentation.Normalize', 'K.Normalize', (['self.mean', 'self.std'], {}), '(self.mean, self.std)\n', (2076, 2097), True, 'import kornia.augmentation as K\n'), ((1825, 1875), 'pytorchvideo.transforms.UniformTemporalSubsample', 'UniformTemporalSubsample', (['self.temporal_sub_sample'], {}), '(self.temporal_sub_sample)\n', (1849, 1875), False, 'from pytorchvideo.transforms import UniformTemporalSubsample\n')]
|
import argparse
import configparser
import copy
import datetime
import json
import os
import random
import re
import sys
import time
from collections import Counter, defaultdict
from contextlib import contextmanager
from io import StringIO
from threading import current_thread
import networkx as nx
import pandas as pd
import penman as pn
import requests
import streamlit as st
import streamlit.components.v1 as components
from potato.dataset.utils import default_pn_to_graph
from potato.graph_extractor.extract import FeatureEvaluator
from potato.models.trainer import GraphTrainer
from PIL import Image
from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode
from streamlit.report_thread import REPORT_CONTEXT_ATTR_NAME
from tuw_nlp.grammar.text_to_4lang import TextTo4lang
from tuw_nlp.graph.fourlang import FourLang
from tuw_nlp.graph.utils import GraphFormulaMatcher, pn_to_graph, read_alto_output
if "false_graph_number" not in st.session_state:
st.session_state.false_graph_number = 0
if "true_graph_number" not in st.session_state:
st.session_state.true_graph_number = 0
if "false_neg_number" not in st.session_state:
st.session_state.false_neg_number = 0
if "predicted_num" not in st.session_state:
st.session_state.predicted_num = 0
if "whole_accuracy" not in st.session_state:
st.session_state.whole_accuracy = []
if "df_statistics" not in st.session_state:
st.session_state.df_statistics = pd.DataFrame
if "val_dataframe" not in st.session_state:
st.session_state.val_dataframe = pd.DataFrame
if "whole_accuracy_val" not in st.session_state:
st.session_state.whole_accuracy_val = []
if "feature_df" not in st.session_state:
st.session_state.feature_df = pd.DataFrame
if "clustered_words_path" not in st.session_state:
st.session_state.clustered_words_path = None
if "features" not in st.session_state:
st.session_state.features = {}
if "suggested_features" not in st.session_state:
st.session_state.suggested_features = {}
if "trained" not in st.session_state:
st.session_state.trained = False
if "ml_feature" not in st.session_state:
st.session_state.ml_feature = None
if "sens" not in st.session_state:
st.session_state.sens = []
if "min_edge" not in st.session_state:
st.session_state.min_edge = 0
if "rows_to_delete" not in st.session_state:
st.session_state.rows_to_delete = []
if "rls_after_delete" not in st.session_state:
st.session_state.rls_after_delete = []
def rerun():
raise st.experimental_rerun()
@contextmanager
def st_redirect(src, dst):
placeholder = st.empty()
output_func = getattr(placeholder, dst)
with StringIO() as buffer:
old_write = src.write
def new_write(b):
if getattr(current_thread(), REPORT_CONTEXT_ATTR_NAME, None):
buffer.write(b)
output_func(buffer.getvalue())
else:
old_write(b)
try:
src.write = new_write
yield
finally:
src.write = old_write
@contextmanager
def st_stdout(dst):
with st_redirect(sys.stdout, dst):
yield
def to_dot(graph, marked_nodes=set(), integ=False):
lines = ["digraph finite_state_machine {", "\tdpi=70;"]
# lines.append('\tordering=out;')
# sorting everything to make the process deterministic
node_lines = []
node_to_name = {}
for node, n_data in graph.nodes(data=True):
if integ:
d_node = d_clean(str(node))
else:
d_node = d_clean(n_data["name"]) if n_data["name"] else "None"
printname = d_node
node_to_name[node] = printname
if "expanded" in n_data and n_data["expanded"] and printname in marked_nodes:
node_line = '\t{0} [shape = circle, label = "{1}", \
style=filled, fillcolor=purple];'.format(
d_node, printname
).replace(
"-", "_"
)
elif "expanded" in n_data and n_data["expanded"]:
node_line = '\t{0} [shape = circle, label = "{1}", \
style="filled"];'.format(
d_node, printname
).replace(
"-", "_"
)
elif "fourlang" in n_data and n_data["fourlang"]:
node_line = '\t{0} [shape = circle, label = "{1}", \
style="filled", fillcolor=red];'.format(
d_node, printname
).replace(
"-", "_"
)
elif "substituted" in n_data and n_data["substituted"]:
node_line = '\t{0} [shape = circle, label = "{1}", \
style="filled"];'.format(
d_node, printname
).replace(
"-", "_"
)
elif printname in marked_nodes:
node_line = '\t{0} [shape = circle, label = "{1}", style=filled, fillcolor=lightblue];'.format(
d_node, printname
).replace(
"-", "_"
)
else:
node_line = '\t{0} [shape = circle, label = "{1}"];'.format(
d_node, printname
).replace("-", "_")
node_lines.append(node_line)
lines += sorted(node_lines)
edge_lines = []
for u, v, edata in graph.edges(data=True):
if "color" in edata:
d_node1 = node_to_name[u].replace("-", "_")
d_node2 = node_to_name[v].replace("-", "_")
edge_lines.append(
'\t{0} -> {1} [ label = "{2}" ];'.format(
d_node1, d_node2, edata["color"]
)
)
lines += sorted(edge_lines)
lines.append("}")
return "\n".join(lines)
def save_ruleset(path, features):
with open(path, "w+") as f:
json.dump(features, f)
def d_clean(string):
s = string
for c in "\\=@-,'\".!:;<>/{}[]()#^?":
s = s.replace(c, "_")
s = s.replace("$", "_dollars")
s = s.replace("%", "_percent")
s = s.replace("|", " ")
s = s.replace("*", " ")
if s == "#":
s = "_number"
keywords = ("graph", "node", "strict", "edge")
if re.match("^[0-9]", s) or s in keywords:
s = "X" + s
return s
def get_df_from_rules(rules, negated_rules):
data = {"rules": rules, "negated_rules": negated_rules}
# Create DataFrame.
df = pd.DataFrame(data)
return df
def save_after_modify(hand_made_rules, classes):
st.session_state.features[classes] = copy.deepcopy(
st.session_state.rls_after_delete
)
st.session_state.feature_df = get_df_from_rules(
[";".join(feat[0]) for feat in st.session_state.features[classes]],
[";".join(feat[1]) for feat in st.session_state.features[classes]],
)
save_rules = hand_made_rules or "saved_features.json"
save_ruleset(save_rules, st.session_state.features)
st.session_state.rows_to_delete = []
rerun()
@st.cache(allow_output_mutation=True)
def load_text_to_4lang():
tfl = TextTo4lang("en", "en_nlp_cache")
return tfl
@st.cache()
def init_evaluator():
return FeatureEvaluator()
@st.cache(allow_output_mutation=True)
def read_train(path):
return pd.read_pickle(path)
def save_dataframe(data, path):
data.to_pickle(path)
@st.cache(allow_output_mutation=True)
def read_val(path):
return pd.read_pickle(path)
def train_df(df, min_edge=0):
with st_stdout("code"):
trainer = GraphTrainer(df)
features = trainer.prepare_and_train(min_edge=min_edge)
return features
def rule_chooser():
option = st.selectbox("Choose from the rules", st.session_state.sens)
G, _ = default_pn_to_graph(option.split(";")[0])
text_G, _ = default_pn_to_graph(option.split(";")[0])
st.graphviz_chart(to_dot(text_G), use_container_width=True)
nodes = [d_clean(n[1]["name"].split("_")[0]) for n in text_G.nodes(data=True)]
return nodes, option
def annotate_df(predicted):
for i, pred in enumerate(predicted):
if pred:
st.session_state.df.at[i, "label"] = st.session_state.inverse_labels[1]
st.session_state.df.at[i, "applied_rules"] = pred
else:
st.session_state.df.at[i, "applied_rules"] = []
if st.session_state.df.loc[i, "annotated"] == False:
st.session_state.df.at[i, "label"] = ""
def show_ml_feature(classes, hand_made_rules):
st.markdown(
f"<span>Feature: {st.session_state.ml_feature[0]}, Precision: <b>{st.session_state.ml_feature[1]:.3f}</b>, \
Recall: <b>{st.session_state.ml_feature[2]:.3f}</b>, Fscore: <b>{st.session_state.ml_feature[3]:.3f}</b>, \
Support: <b>{st.session_state.ml_feature[4]}</b></span>",
unsafe_allow_html=True,
)
accept_rule = st.button("Accept")
decline_rule = st.button("Decline")
if accept_rule:
st.session_state.features[classes].append(st.session_state.ml_feature[0])
st.session_state.ml_feature = None
if st.session_state.features[classes]:
st.session_state.feature_df = get_df_from_rules(
[";".join(feat[0]) for feat in st.session_state.features[classes]],
[";".join(feat[1]) for feat in st.session_state.features[classes]],
)
save_rules = hand_made_rules or "saved_features.json"
save_ruleset(save_rules, st.session_state.features)
rerun()
elif decline_rule:
st.session_state.ml_feature = None
rerun()
def extract_data_from_dataframe(option):
fp_graphs = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].False_positive_graphs
fp_sentences = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].False_positive_sens
tp_graphs = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].True_positive_graphs
tp_sentences = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].True_positive_sens
fn_graphs = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].False_negative_graphs
fn_sentences = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].False_negative_sens
prec = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].Precision
recall = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].Recall
fscore = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].Fscore
support = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].Support
predicted = st.session_state.df_statistics.iloc[
st.session_state.sens.index(option)
].Predicted
return (
fn_graphs,
fn_sentences,
fp_graphs,
fp_sentences,
fscore,
prec,
predicted,
recall,
support,
tp_graphs,
tp_sentences,
)
def graph_viewer(type, graphs, sentences, nodes):
graph_type = {
"FP": st.session_state.false_graph_number,
"TP": st.session_state.true_graph_number,
"FN": st.session_state.false_neg_number,
}
if st.button(f"Previous {type}"):
graph_type[type] = max(0, graph_type[type] - 1)
if st.button(f"Next {type}"):
graph_type[type] = min(
graph_type[type] + 1,
len(graphs) - 1,
)
if graph_type[type] > len(graphs) - 1:
graph_type[type] = 0
st.markdown(
f"<span><b>Sentence:</b> {sentences[graph_type[type]][0]}</span>",
unsafe_allow_html=True,
)
st.markdown(
f"<span><b>Gold label:</b> {sentences[graph_type[type]][1]}</span>",
unsafe_allow_html=True,
)
st.text(f"{type}: {len(graphs)}")
current_graph = graphs[graph_type[type]]
st.graphviz_chart(
to_dot(
current_graph,
marked_nodes=set(nodes),
),
use_container_width=True,
)
if type == "FP":
st.session_state.false_graph_number = graph_type[type]
elif type == "TP":
st.session_state.true_graph_number = graph_type[type]
elif type == "FN":
st.session_state.false_neg_number = graph_type[type]
def add_rule_manually(classes, hand_made_rules):
text = st.text_area("You can add a new rule here manually")
negated_text = st.text_area("You can modify the negated features here")
agree = st.button("Add rule to the ruleset")
if agree:
if not negated_text.strip():
negated_features = []
else:
negated_features = negated_text.split(";")
st.session_state.features[classes].append([[text], negated_features, classes])
if st.session_state.features[classes]:
st.session_state.feature_df = get_df_from_rules(
[";".join(feat[0]) for feat in st.session_state.features[classes]],
[";".join(feat[1]) for feat in st.session_state.features[classes]],
)
save_rules = hand_made_rules or "saved_features.json"
save_ruleset(save_rules, st.session_state.features)
rerun()
st.markdown(
f"<span><b>Or get suggestions by our ML!</b></span>",
unsafe_allow_html=True,
)
def rank_and_suggest(classes, data, evaluator):
suggest_new_rule = st.button("suggest new rules")
if suggest_new_rule:
if (
not st.session_state.df_statistics.empty
and st.session_state.sens
and st.session_state.suggested_features[classes]
):
features_to_rank = st.session_state.suggested_features[classes][:5]
with st.spinner("Ranking rules..."):
features_ranked = evaluator.rank_features(
classes,
features_to_rank,
data,
st.session_state.df_statistics.iloc[0].False_negative_indices,
)
suggested_feature = features_ranked[0]
st.session_state.suggested_features[classes].remove(suggested_feature[0])
st.session_state.ml_feature = suggested_feature
def supervised_mode(
evaluator, data, val_data, graph_format, feature_path, hand_made_rules
):
if hand_made_rules:
with open(hand_made_rules) as f:
st.session_state.features = json.load(f)
if not feature_path and not st.session_state.trained:
st.sidebar.title("Train your dataset!")
show_app = st.sidebar.button("Train")
st.session_state.min_edge = st.sidebar.number_input(
"Min edge in features", min_value=0, max_value=3, value=0, step=1
)
if show_app:
st.session_state.suggested_features = train_df(
data, st.session_state.min_edge
)
st.session_state.trained = True
with st_stdout("success"):
print("Success, your dataset is trained, wait for the app to load..")
time.sleep(3)
rerun()
st.markdown(
"<h3 style='text-align: center; color: black;'>Your dataset is shown below, click the train button to train your dataset!</h3>",
unsafe_allow_html=True,
)
sample_df = AgGrid(data, width="100%", fit_columns_on_grid_load=True)
st.write("label distribution:")
st.bar_chart(data.groupby("label").size())
st.write("sentence lenghts:")
st.bar_chart(data.text.str.len())
st.write("common words:")
st.bar_chart(
pd.Series(" ".join(data["text"]).lower().split()).value_counts()[:100]
)
if st.session_state.trained or feature_path:
col1, col2 = st.columns(2)
if (
feature_path
and os.path.exists(feature_path)
and not st.session_state.suggested_features
):
with open(feature_path) as f:
st.session_state.suggested_features = json.load(f)
if not st.session_state.features:
for key in st.session_state.suggested_features:
pop_len = (
5
if len(st.session_state.suggested_features[key]) > 5
else len(st.session_state.suggested_features[key])
)
st.session_state.features[key] = [
st.session_state.suggested_features[key].pop(0)
for _ in range(pop_len)
]
col1.header("Rule to apply")
col2.header("Graphs and results")
# if graph_format == "fourlang":
# tfl = load_text_to_4lang()
with col1:
classes = st.selectbox(
"Choose class", list(st.session_state.features.keys())
)
st.session_state.feature_df = get_df_from_rules(
[";".join(feat[0]) for feat in st.session_state.features[classes]],
[";".join(feat[1]) for feat in st.session_state.features[classes]],
)
with st.form("example form") as f:
gb = GridOptionsBuilder.from_dataframe(st.session_state.feature_df)
# make all columns editable
gb.configure_columns(["rules", "negated_rules"], editable=True)
gb.configure_selection(
"multiple",
use_checkbox=True,
groupSelectsChildren=True,
groupSelectsFiltered=True,
# ◙pre_selected_rows=[1,2]
)
go = gb.build()
ag = AgGrid(
st.session_state.feature_df,
gridOptions=go,
key="grid1",
allow_unsafe_jscode=True,
reload_data=True,
update_mode=GridUpdateMode.MODEL_CHANGED
| GridUpdateMode.VALUE_CHANGED,
width="100%",
theme="material",
fit_columns_on_grid_load=True,
)
delete_or_train = st.radio(
"Delete or Train selected rules", ("none", "delete", "train")
)
submit = st.form_submit_button(label="save updates")
evaluate = st.form_submit_button(label="evaluate selected")
if evaluate:
feature_list = []
selected_rules = (
ag["selected_rows"]
if ag["selected_rows"]
else ag["data"].to_dict(orient="records")
)
for rule in selected_rules:
positive_rules = (
rule["rules"].split(";")
if "rules" in rule and rule["rules"].strip()
else []
)
negated_rules = (
rule["negated_rules"].split(";")
if "negated_rules" in rule and rule["negated_rules"].strip()
else []
)
feature_list.append(
[
positive_rules,
negated_rules,
classes,
]
)
st.session_state.sens = [";".join(feat[0]) for feat in feature_list]
with st.spinner("Evaluating rules..."):
(
st.session_state.df_statistics,
st.session_state.whole_accuracy,
) = evaluator.evaluate_feature(
classes, feature_list, data, graph_format
)
(
st.session_state.val_dataframe,
st.session_state.whole_accuracy_val,
) = evaluator.evaluate_feature(
classes,
feature_list,
val_data,
graph_format,
)
st.success("Done!")
rerun()
if submit:
delete = delete_or_train == "delete"
train = delete_or_train == "train"
st.session_state.rows_to_delete = [
r["rules"] for r in ag["selected_rows"]
]
st.session_state.rls_after_delete = []
negated_list = ag["data"]["negated_rules"].tolist()
feature_list = []
for i, rule in enumerate(ag["data"]["rules"].tolist()):
if not negated_list[i].strip():
feature_list.append([rule.split(";"), [], classes])
else:
feature_list.append(
[
rule.split(";"),
negated_list[i].strip().split(";"),
classes,
]
)
if st.session_state.rows_to_delete and delete:
for r in feature_list:
if ";".join(r[0]) not in st.session_state.rows_to_delete:
st.session_state.rls_after_delete.append(r)
elif st.session_state.rows_to_delete and train:
st.session_state.rls_after_delete = copy.deepcopy(feature_list)
rule_to_train = st.session_state.rows_to_delete[0]
if ";" in rule_to_train or ".*" not in rule_to_train:
st.text("Only single and underspecified rules can be trained!")
else:
selected_words = evaluator.train_feature(
classes, rule_to_train, data, graph_format
)
for f in selected_words:
st.session_state.rls_after_delete.append([[f], [], classes])
else:
st.session_state.rls_after_delete = copy.deepcopy(feature_list)
if st.session_state.rls_after_delete and not delete:
save_after_modify(hand_made_rules, classes)
if st.session_state.rows_to_delete and delete_or_train == "delete":
with st.form("Delete form"):
st.write("The following rules will be deleted, do you accept it?")
st.write(st.session_state.rows_to_delete)
save_button = st.form_submit_button("Accept Delete")
if save_button:
save_after_modify(hand_made_rules, classes)
add_rule_manually(classes, hand_made_rules)
rank_and_suggest(classes, data, evaluator)
if st.session_state.ml_feature:
show_ml_feature(classes, hand_made_rules)
with col2:
if not st.session_state.df_statistics.empty and st.session_state.sens:
if st.session_state.sens:
nodes, option = rule_chooser()
st.markdown(
f"<span>Result of using all the rules: Precision: <b>{st.session_state.whole_accuracy[0]:.3f}</b>, \
Recall: <b>{st.session_state.whole_accuracy[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy[2]:.3f}</b>, \
Support: <b>{st.session_state.whole_accuracy[3]}</b></span>",
unsafe_allow_html=True,
)
(
fn_graphs,
fn_sentences,
fp_graphs,
fp_sentences,
fscore,
prec,
predicted,
recall,
support,
tp_graphs,
tp_sentences,
) = extract_data_from_dataframe(option)
st.markdown(
f"<span>The rule's result: Precision: <b>{prec:.3f}</b>, Recall: <b>{recall:.3f}</b>, \
Fscore: <b>{fscore:.3f}</b>, Support: <b>{support}</b></span>",
unsafe_allow_html=True,
)
with st.expander("Show validation data", expanded=False):
val_prec = st.session_state.val_dataframe.iloc[
st.session_state.sens.index(option)
].Precision
val_recall = st.session_state.val_dataframe.iloc[
st.session_state.sens.index(option)
].Recall
val_fscore = st.session_state.val_dataframe.iloc[
st.session_state.sens.index(option)
].Fscore
val_support = st.session_state.val_dataframe.iloc[
st.session_state.sens.index(option)
].Support
st.markdown(
f"<span>Result of using all the rules on the validation data: Precision: <b>{st.session_state.whole_accuracy_val[0]:.3f}</b>, \
Recall: <b>{st.session_state.whole_accuracy_val[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy_val[2]:.3f}</b>, \
Support: <b>{st.session_state.whole_accuracy_val[3]}</b></span>",
unsafe_allow_html=True,
)
st.markdown(
f"<span>The rule's result on the validation data: Precision: <b>{val_prec:.3f}</b>, \
Recall: <b>{val_recall:.3f}</b>, Fscore: <b>{val_fscore:.3f}</b>, \
Support: <b>{val_support}</b></span>",
unsafe_allow_html=True,
)
tp_fp_fn_choice = (
"True Positive graphs",
"False Positive graphs",
"False Negative graphs",
)
tp_fp_fn = st.selectbox(
"Select the graphs you want to view", tp_fp_fn_choice
)
if tp_fp_fn == "False Positive graphs":
if fp_graphs:
graph_viewer("FP", fp_graphs, fp_sentences, nodes)
elif tp_fp_fn == "True Positive graphs":
if tp_graphs:
graph_viewer("TP", tp_graphs, tp_sentences, nodes)
elif tp_fp_fn == "False Negative graphs":
if fn_graphs:
graph_viewer("FN", fn_graphs, fn_sentences, nodes)
def unsupervised_mode(
evaluator, train_data, graph_format, feature_path, hand_made_rules
):
data = read_train(train_data)
if hand_made_rules:
with open(hand_made_rules) as f:
st.session_state.features = json.load(f)
if "df" not in st.session_state:
st.session_state.df = data.copy()
if "annotated" not in st.session_state.df:
st.session_state.df["annotated"] = False
if "applied_rules" not in st.session_state.df:
st.session_state.df["applied_rules"] = [
[] for _ in range(len(st.session_state.df))
]
if "index" not in st.session_state.df:
st.session_state.df.reset_index(level=0, inplace=True)
if "df_to_train" not in st.session_state:
st.session_state.df_to_train = pd.DataFrame
if "applied_rules" not in st.session_state:
st.session_state.applied_rules = []
df_annotated = st.session_state.df[st.session_state.df.annotated == True][
["index", "text", "label", "applied_rules"]
]
df_unannotated = st.session_state.df[st.session_state.df.annotated == False][
["index", "text", "label", "applied_rules"]
]
if "labels" not in st.session_state:
st.text("Before we start, please provide labels you want to train")
user_input = st.text_input("label encoding", "NOT:0,OFF:1")
st.session_state.labels = {
label.split(":")[0]: int(label.split(":")[1])
for label in user_input.split(",")
}
st.write(st.session_state.labels)
st.session_state.inverse_labels = {
v: k for (k, v) in st.session_state.labels.items()
}
else:
st.markdown(
f"<span><b>Annotate samples here:</b></span>",
unsafe_allow_html=True,
)
if st.session_state.applied_rules:
st.markdown(
f"<span>Currently the following rules are applied:</span>",
unsafe_allow_html=True,
)
st.write(st.session_state.applied_rules)
with st.form("annotate form") as f:
gb = GridOptionsBuilder.from_dataframe(df_unannotated)
gb.configure_default_column(
editable=True,
resizable=True,
sorteable=True,
wrapText=True,
autoHeight=True,
)
# make all columns editable
gb.configure_selection(
"multiple",
use_checkbox=True,
groupSelectsChildren=True,
groupSelectsFiltered=True,
)
go = gb.build()
ag = AgGrid(
df_unannotated,
gridOptions=go,
key="grid2",
allow_unsafe_jscode=True,
reload_data=True,
update_mode=GridUpdateMode.MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED,
width="100%",
theme="material",
fit_columns_on_grid_load=True,
)
annotate = st.form_submit_button("Annotate")
if annotate:
if ag["selected_rows"]:
for row in ag["selected_rows"]:
st.session_state.df.loc[
row["index"], "label"
] = st.session_state.inverse_labels[1]
st.session_state.df.loc[row["index"], "annotated"] = True
save_dataframe(st.session_state.df, train_data)
rerun()
st.markdown(
f"<span>Samples you have already annotated:</span>",
unsafe_allow_html=True,
)
with st.form("annotated form") as f:
gb = GridOptionsBuilder.from_dataframe(df_annotated)
gb.configure_default_column(
editable=True,
resizable=True,
sorteable=True,
wrapText=True,
)
# make all columns editable
gb.configure_selection(
"multiple",
use_checkbox=True,
groupSelectsChildren=True,
groupSelectsFiltered=True,
)
go = gb.build()
ag_ann = AgGrid(
df_annotated,
gridOptions=go,
key="grid3",
allow_unsafe_jscode=True,
reload_data=True,
update_mode=GridUpdateMode.MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED,
width="100%",
theme="material",
fit_columns_on_grid_load=True,
)
clear_annotate = st.form_submit_button("Clear annotation")
if clear_annotate:
if ag_ann["selected_rows"]:
for row in ag_ann["selected_rows"]:
st.session_state.df.loc[
row["index"], "label"
] = st.session_state.inverse_labels[1]
st.session_state.df.loc[row["index"], "annotated"] = False
st.session_state.df.loc[row["index"], "label"] = ""
save_dataframe(st.session_state.df, train_data)
rerun()
train = st.button("Train!")
if train:
df_to_train = st.session_state.df.copy()
df_to_train = df_to_train[df_to_train.applied_rules.map(len) == 0]
if not df_to_train.empty:
st.session_state.trained = True
df_to_train["label"] = df_to_train["label"].apply(
lambda x: st.session_state.inverse_labels[0] if not x else x
)
df_to_train["label_id"] = df_to_train["label"].apply(
lambda x: st.session_state.labels[x]
)
positive_size = df_to_train.groupby("label").size()[
st.session_state.inverse_labels[1]
]
df_to_train = df_to_train.groupby("label").sample(
n=positive_size, random_state=1, replace=True
)
st.session_state.suggested_features = train_df(
df_to_train, st.session_state.min_edge
)
st.session_state.df_to_train = df_to_train
st.session_state.df_statistics = pd.DataFrame
for key in st.session_state.suggested_features:
if key not in st.session_state.features:
st.session_state.features[key] = [
st.session_state.suggested_features[key].pop(0)
]
else:
st.session_state.features[key].append(
st.session_state.suggested_features[key].pop(0)
)
else:
st.write("Empty dataframe!")
col1, col2 = st.columns(2)
if st.session_state.trained and st.session_state.suggested_features:
with col1:
if not st.session_state.features:
for key in st.session_state.suggested_features:
st.session_state.features[key] = [
st.session_state.suggested_features[key].pop(0)
]
classes = st.selectbox(
"Choose class", list(st.session_state.features.keys())
)
st.session_state.feature_df = get_df_from_rules(
[";".join(feat[0]) for feat in st.session_state.features[classes]],
[";".join(feat[1]) for feat in st.session_state.features[classes]],
)
with st.form("example form") as f:
gb = GridOptionsBuilder.from_dataframe(st.session_state.feature_df)
# make all columns editable
gb.configure_columns(["rules", "negated_rules"], editable=True)
gb.configure_selection(
"multiple",
use_checkbox=True,
groupSelectsChildren=True,
groupSelectsFiltered=True,
# ◙pre_selected_rows=[1,2]
)
go = gb.build()
ag = AgGrid(
st.session_state.feature_df,
gridOptions=go,
key="grid1",
allow_unsafe_jscode=True,
reload_data=True,
update_mode=GridUpdateMode.MODEL_CHANGED
| GridUpdateMode.VALUE_CHANGED,
width="100%",
theme="material",
fit_columns_on_grid_load=True,
)
delete_or_train = st.radio(
"Delete or Train selected rules", ("none", "delete", "train")
)
submit = st.form_submit_button(label="save updates")
evaluate = st.form_submit_button(label="evaluate selected")
annotate = st.form_submit_button(label="annotate based on selected")
feature_list = []
selected_rules = (
ag["selected_rows"]
if ag["selected_rows"]
else ag["data"].to_dict(orient="records")
)
for rule in selected_rules:
positive_rules = (
rule["rules"].split(";")
if "rules" in rule and rule["rules"].strip()
else []
)
negated_rules = (
rule["negated_rules"].split(";")
if "negated_rules" in rule and rule["negated_rules"].strip()
else []
)
feature_list.append(
[
positive_rules,
negated_rules,
classes,
]
)
if evaluate or annotate:
st.session_state.sens = [";".join(feat[0]) for feat in feature_list]
with st.spinner("Evaluating rules..."):
(
st.session_state.df_statistics,
st.session_state.whole_accuracy,
) = evaluator.evaluate_feature(
classes,
feature_list,
st.session_state.df,
graph_format,
)
st.success("Done!")
if annotate:
predicted_rules = [[] for _ in range(len(st.session_state.df))]
st.session_state.applied_rules = st.session_state.sens
for j, opt in enumerate(st.session_state.sens):
predicted = st.session_state.df_statistics.iloc[j].Predicted
predicted_indices = [
i for i, pred in enumerate(predicted) if pred == 1
]
for ind in predicted_indices:
predicted_rules[ind].append(opt)
annotate_df(predicted_rules)
st.session_state.trained = False
rerun()
if submit:
delete = delete_or_train == "delete"
train = delete_or_train == "train"
st.session_state.rows_to_delete = [
r["rules"] for r in ag["selected_rows"]
]
st.session_state.rls_after_delete = []
negated_list = ag["data"]["negated_rules"].tolist()
feature_list = []
for i, rule in enumerate(ag["data"]["rules"].tolist()):
if not negated_list[i].strip():
feature_list.append([rule.split(";"), [], classes])
else:
feature_list.append(
[
rule.split(";"),
negated_list[i].strip().split(";"),
classes,
]
)
if st.session_state.rows_to_delete and delete:
for r in feature_list:
if ";".join(r[0]) not in st.session_state.rows_to_delete:
st.session_state.rls_after_delete.append(r)
elif st.session_state.rows_to_delete and train:
st.session_state.rls_after_delete = copy.deepcopy(feature_list)
rule_to_train = st.session_state.rows_to_delete[0]
if ";" in rule_to_train or ".*" not in rule_to_train:
st.text(
"Only single and underspecified rules can be trained!"
)
else:
selected_words = evaluator.train_feature(
classes,
rule_to_train,
st.session_state.df,
graph_format,
)
for f in selected_words:
st.session_state.rls_after_delete.append(
[[f], [], classes]
)
else:
st.session_state.rls_after_delete = copy.deepcopy(feature_list)
if st.session_state.rls_after_delete and not delete:
save_after_modify(hand_made_rules, classes)
if st.session_state.rows_to_delete and delete_or_train == "delete":
with st.form("Delete form"):
st.write(
"The following rules will be deleted, do you accept it?"
)
st.write(st.session_state.rows_to_delete)
save_button = st.form_submit_button("Accept Delete")
if save_button:
save_after_modify(hand_made_rules, classes)
add_rule_manually(classes, hand_made_rules)
rank_and_suggest(classes, st.session_state.df, evaluator)
if st.session_state.ml_feature:
show_ml_feature(classes, hand_made_rules)
with col2:
if not st.session_state.df_statistics.empty and st.session_state.sens:
if st.session_state.sens:
nodes, option = rule_chooser()
st.markdown(
f"<span>Result of using all the rules: Precision: <b>{st.session_state.whole_accuracy[0]:.3f}</b>, \
Recall: <b>{st.session_state.whole_accuracy[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy[2]:.3f}</b>, \
Support: <b>{st.session_state.whole_accuracy[3]}</b></span>",
unsafe_allow_html=True,
)
(
fn_graphs,
fn_sentences,
fp_graphs,
fp_sentences,
fscore,
prec,
predicted,
recall,
support,
tp_graphs,
tp_sentences,
) = extract_data_from_dataframe(option)
st.markdown(
f"<span>The rule's result: Precision: <b>{prec:.3f}</b>, Recall: <b>{recall:.3f}</b>, \
Fscore: <b>{fscore:.3f}</b>, Support: <b>{support}</b></span>",
unsafe_allow_html=True,
)
tp_fp_fn_choice = (
"Predicted",
"True Positive graphs",
"False Positive graphs",
"False Negative graphs",
)
tp_fp_fn = st.selectbox(
"Select the option you want to view", tp_fp_fn_choice
)
current_graph = None
if tp_fp_fn == "Predicted":
predicted_inds = [
i for i, pred in enumerate(predicted) if pred == 1
]
if st.button("Previous Predicted"):
st.session_state.predicted_num = max(
0, st.session_state.predicted_num - 1
)
if st.button("Next Predicted"):
st.session_state.predicted_num = min(
st.session_state.predicted_num + 1,
len(predicted_inds) - 1,
)
if st.session_state.predicted_num > len(predicted_inds) - 1:
st.session_state.predicted_inds = 0
st.markdown(
f"<span><b>Sentence:</b> {st.session_state.df.iloc[predicted_inds[st.session_state.predicted_num]].text}</span>",
unsafe_allow_html=True,
)
st.markdown(
f"<span><b>Gold label:</b> {st.session_state.df.iloc[predicted_inds[st.session_state.predicted_num]].label}</span>",
unsafe_allow_html=True,
)
st.text(f"Predicted: {len(predicted_inds)}")
current_graph = st.session_state.df.iloc[
predicted_inds[st.session_state.predicted_num]
].graph
st.graphviz_chart(
to_dot(
current_graph,
marked_nodes=set(nodes),
),
use_container_width=True,
)
elif tp_fp_fn == "False Positive graphs":
if fp_graphs:
graph_viewer("FP", fp_graphs, fp_sentences, nodes)
elif tp_fp_fn == "True Positive graphs":
if tp_graphs:
graph_viewer("TP", tp_graphs, tp_sentences, nodes)
elif tp_fp_fn == "False Negative graphs":
if fn_graphs:
graph_viewer("FN", fn_graphs, fn_sentences, nodes)
def get_args():
parser = argparse.ArgumentParser(description="")
parser.add_argument("-t", "--train-data", type=str, required=True)
parser.add_argument("-v", "--val-data", type=str)
parser.add_argument(
"-sr",
"--suggested-rules",
default=None,
type=str,
help="Rules extracted automatically from python. If not present, the UI will automatically train it.",
)
parser.add_argument(
"-hr",
"--hand-rules",
default=None,
type=str,
help="Rules extracted with the UI. If provided, the UI will load them.",
)
parser.add_argument("-m", "--mode", default="supervised", type=str)
parser.add_argument("-g", "--graph-format", default="fourlang", type=str)
return parser.parse_args()
def main(args):
st.set_page_config(layout="wide")
st.markdown(
"<h1 style='text-align: center; color: black;'>Rule extraction framework</h1>",
unsafe_allow_html=True,
)
evaluator = init_evaluator()
data = read_train(args.train_data)
if args.val_data:
val_data = read_val(args.val_data)
graph_format = args.graph_format
feature_path = args.suggested_rules
hand_made_rules = args.hand_rules
mode = args.mode
if mode == "supervised":
assert args.val_data
supervised_mode(
evaluator, data, val_data, graph_format, feature_path, hand_made_rules
)
elif mode == "unsupervised":
unsupervised_mode(
evaluator, args.train_data, graph_format, feature_path, hand_made_rules
)
if __name__ == "__main__":
args = get_args()
main(args)
|
[
"streamlit.session_state.sens.index",
"streamlit.experimental_rerun",
"streamlit.text_input",
"argparse.ArgumentParser",
"streamlit.session_state.df.reset_index",
"streamlit.radio",
"streamlit.expander",
"streamlit.sidebar.title",
"streamlit.sidebar.button",
"pandas.DataFrame",
"streamlit.set_page_config",
"streamlit.spinner",
"streamlit.cache",
"streamlit.session_state.rls_after_delete.append",
"os.path.exists",
"streamlit.text_area",
"streamlit.button",
"streamlit.text",
"st_aggrid.GridOptionsBuilder.from_dataframe",
"st_aggrid.AgGrid",
"streamlit.session_state.df.copy",
"threading.current_thread",
"streamlit.session_state.features.keys",
"json.dump",
"copy.deepcopy",
"io.StringIO",
"streamlit.session_state.labels.items",
"re.match",
"streamlit.form_submit_button",
"time.sleep",
"streamlit.empty",
"potato.graph_extractor.extract.FeatureEvaluator",
"streamlit.success",
"streamlit.form",
"streamlit.markdown",
"streamlit.sidebar.number_input",
"streamlit.columns",
"json.load",
"potato.models.trainer.GraphTrainer",
"streamlit.write",
"tuw_nlp.grammar.text_to_4lang.TextTo4lang",
"streamlit.selectbox",
"pandas.read_pickle"
] |
[((6940, 6976), 'streamlit.cache', 'st.cache', ([], {'allow_output_mutation': '(True)'}), '(allow_output_mutation=True)\n', (6948, 6976), True, 'import streamlit as st\n'), ((7065, 7075), 'streamlit.cache', 'st.cache', ([], {}), '()\n', (7073, 7075), True, 'import streamlit as st\n'), ((7131, 7167), 'streamlit.cache', 'st.cache', ([], {'allow_output_mutation': '(True)'}), '(allow_output_mutation=True)\n', (7139, 7167), True, 'import streamlit as st\n'), ((7284, 7320), 'streamlit.cache', 'st.cache', ([], {'allow_output_mutation': '(True)'}), '(allow_output_mutation=True)\n', (7292, 7320), True, 'import streamlit as st\n'), ((2515, 2538), 'streamlit.experimental_rerun', 'st.experimental_rerun', ([], {}), '()\n', (2536, 2538), True, 'import streamlit as st\n'), ((2602, 2612), 'streamlit.empty', 'st.empty', ([], {}), '()\n', (2610, 2612), True, 'import streamlit as st\n'), ((6370, 6388), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (6382, 6388), True, 'import pandas as pd\n'), ((6496, 6544), 'copy.deepcopy', 'copy.deepcopy', (['st.session_state.rls_after_delete'], {}), '(st.session_state.rls_after_delete)\n', (6509, 6544), False, 'import copy\n'), ((7013, 7046), 'tuw_nlp.grammar.text_to_4lang.TextTo4lang', 'TextTo4lang', (['"""en"""', '"""en_nlp_cache"""'], {}), "('en', 'en_nlp_cache')\n", (7024, 7046), False, 'from tuw_nlp.grammar.text_to_4lang import TextTo4lang\n'), ((7109, 7127), 'potato.graph_extractor.extract.FeatureEvaluator', 'FeatureEvaluator', ([], {}), '()\n', (7125, 7127), False, 'from potato.graph_extractor.extract import FeatureEvaluator\n'), ((7201, 7221), 'pandas.read_pickle', 'pd.read_pickle', (['path'], {}), '(path)\n', (7215, 7221), True, 'import pandas as pd\n'), ((7352, 7372), 'pandas.read_pickle', 'pd.read_pickle', (['path'], {}), '(path)\n', (7366, 7372), True, 'import pandas as pd\n'), ((7592, 7652), 'streamlit.selectbox', 'st.selectbox', (['"""Choose from the rules"""', 'st.session_state.sens'], {}), "('Choose from the rules', st.session_state.sens)\n", (7604, 7652), True, 'import streamlit as st\n'), ((8418, 8786), 'streamlit.markdown', 'st.markdown', (['f"""<span>Feature: {st.session_state.ml_feature[0]}, Precision: <b>{st.session_state.ml_feature[1]:.3f}</b>, Recall: <b>{st.session_state.ml_feature[2]:.3f}</b>, Fscore: <b>{st.session_state.ml_feature[3]:.3f}</b>, Support: <b>{st.session_state.ml_feature[4]}</b></span>"""'], {'unsafe_allow_html': '(True)'}), "(\n f'<span>Feature: {st.session_state.ml_feature[0]}, Precision: <b>{st.session_state.ml_feature[1]:.3f}</b>, Recall: <b>{st.session_state.ml_feature[2]:.3f}</b>, Fscore: <b>{st.session_state.ml_feature[3]:.3f}</b>, Support: <b>{st.session_state.ml_feature[4]}</b></span>'\n , unsafe_allow_html=True)\n", (8429, 8786), True, 'import streamlit as st\n'), ((8822, 8841), 'streamlit.button', 'st.button', (['"""Accept"""'], {}), "('Accept')\n", (8831, 8841), True, 'import streamlit as st\n'), ((8861, 8881), 'streamlit.button', 'st.button', (['"""Decline"""'], {}), "('Decline')\n", (8870, 8881), True, 'import streamlit as st\n'), ((11346, 11375), 'streamlit.button', 'st.button', (['f"""Previous {type}"""'], {}), "(f'Previous {type}')\n", (11355, 11375), True, 'import streamlit as st\n'), ((11440, 11465), 'streamlit.button', 'st.button', (['f"""Next {type}"""'], {}), "(f'Next {type}')\n", (11449, 11465), True, 'import streamlit as st\n'), ((11648, 11754), 'streamlit.markdown', 'st.markdown', (['f"""<span><b>Sentence:</b> {sentences[graph_type[type]][0]}</span>"""'], {'unsafe_allow_html': '(True)'}), "(f'<span><b>Sentence:</b> {sentences[graph_type[type]][0]}</span>',\n unsafe_allow_html=True)\n", (11659, 11754), True, 'import streamlit as st\n'), ((11778, 11887), 'streamlit.markdown', 'st.markdown', (['f"""<span><b>Gold label:</b> {sentences[graph_type[type]][1]}</span>"""'], {'unsafe_allow_html': '(True)'}), "(f'<span><b>Gold label:</b> {sentences[graph_type[type]][1]}</span>'\n , unsafe_allow_html=True)\n", (11789, 11887), True, 'import streamlit as st\n'), ((12459, 12511), 'streamlit.text_area', 'st.text_area', (['"""You can add a new rule here manually"""'], {}), "('You can add a new rule here manually')\n", (12471, 12511), True, 'import streamlit as st\n'), ((12531, 12587), 'streamlit.text_area', 'st.text_area', (['"""You can modify the negated features here"""'], {}), "('You can modify the negated features here')\n", (12543, 12587), True, 'import streamlit as st\n'), ((12600, 12636), 'streamlit.button', 'st.button', (['"""Add rule to the ruleset"""'], {}), "('Add rule to the ruleset')\n", (12609, 12636), True, 'import streamlit as st\n'), ((13322, 13415), 'streamlit.markdown', 'st.markdown', (['f"""<span><b>Or get suggestions by our ML!</b></span>"""'], {'unsafe_allow_html': '(True)'}), "(f'<span><b>Or get suggestions by our ML!</b></span>',\n unsafe_allow_html=True)\n", (13333, 13415), True, 'import streamlit as st\n'), ((13508, 13538), 'streamlit.button', 'st.button', (['"""suggest new rules"""'], {}), "('suggest new rules')\n", (13517, 13538), True, 'import streamlit as st\n'), ((46242, 46281), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '""""""'}), "(description='')\n", (46265, 46281), False, 'import argparse\n'), ((47027, 47060), 'streamlit.set_page_config', 'st.set_page_config', ([], {'layout': '"""wide"""'}), "(layout='wide')\n", (47045, 47060), True, 'import streamlit as st\n'), ((47065, 47190), 'streamlit.markdown', 'st.markdown', (['"""<h1 style=\'text-align: center; color: black;\'>Rule extraction framework</h1>"""'], {'unsafe_allow_html': '(True)'}), '(\n "<h1 style=\'text-align: center; color: black;\'>Rule extraction framework</h1>"\n , unsafe_allow_html=True)\n', (47076, 47190), True, 'import streamlit as st\n'), ((2667, 2677), 'io.StringIO', 'StringIO', ([], {}), '()\n', (2675, 2677), False, 'from io import StringIO\n'), ((5801, 5823), 'json.dump', 'json.dump', (['features', 'f'], {}), '(features, f)\n', (5810, 5823), False, 'import json\n'), ((6157, 6178), 're.match', 're.match', (['"""^[0-9]"""', 's'], {}), "('^[0-9]', s)\n", (6165, 6178), False, 'import re\n'), ((7451, 7467), 'potato.models.trainer.GraphTrainer', 'GraphTrainer', (['df'], {}), '(df)\n', (7463, 7467), False, 'from potato.models.trainer import GraphTrainer\n'), ((14606, 14645), 'streamlit.sidebar.title', 'st.sidebar.title', (['"""Train your dataset!"""'], {}), "('Train your dataset!')\n", (14622, 14645), True, 'import streamlit as st\n'), ((14665, 14691), 'streamlit.sidebar.button', 'st.sidebar.button', (['"""Train"""'], {}), "('Train')\n", (14682, 14691), True, 'import streamlit as st\n'), ((14728, 14822), 'streamlit.sidebar.number_input', 'st.sidebar.number_input', (['"""Min edge in features"""'], {'min_value': '(0)', 'max_value': '(3)', 'value': '(0)', 'step': '(1)'}), "('Min edge in features', min_value=0, max_value=3,\n value=0, step=1)\n", (14751, 14822), True, 'import streamlit as st\n'), ((15215, 15389), 'streamlit.markdown', 'st.markdown', (['"""<h3 style=\'text-align: center; color: black;\'>Your dataset is shown below, click the train button to train your dataset!</h3>"""'], {'unsafe_allow_html': '(True)'}), '(\n "<h3 style=\'text-align: center; color: black;\'>Your dataset is shown below, click the train button to train your dataset!</h3>"\n , unsafe_allow_html=True)\n', (15226, 15389), True, 'import streamlit as st\n'), ((15435, 15492), 'st_aggrid.AgGrid', 'AgGrid', (['data'], {'width': '"""100%"""', 'fit_columns_on_grid_load': '(True)'}), "(data, width='100%', fit_columns_on_grid_load=True)\n", (15441, 15492), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((15502, 15533), 'streamlit.write', 'st.write', (['"""label distribution:"""'], {}), "('label distribution:')\n", (15510, 15533), True, 'import streamlit as st\n'), ((15594, 15623), 'streamlit.write', 'st.write', (['"""sentence lenghts:"""'], {}), "('sentence lenghts:')\n", (15602, 15623), True, 'import streamlit as st\n'), ((15675, 15700), 'streamlit.write', 'st.write', (['"""common words:"""'], {}), "('common words:')\n", (15683, 15700), True, 'import streamlit as st\n'), ((15887, 15900), 'streamlit.columns', 'st.columns', (['(2)'], {}), '(2)\n', (15897, 15900), True, 'import streamlit as st\n'), ((28164, 28231), 'streamlit.text', 'st.text', (['"""Before we start, please provide labels you want to train"""'], {}), "('Before we start, please provide labels you want to train')\n", (28171, 28231), True, 'import streamlit as st\n'), ((28253, 28299), 'streamlit.text_input', 'st.text_input', (['"""label encoding"""', '"""NOT:0,OFF:1"""'], {}), "('label encoding', 'NOT:0,OFF:1')\n", (28266, 28299), True, 'import streamlit as st\n'), ((28461, 28494), 'streamlit.write', 'st.write', (['st.session_state.labels'], {}), '(st.session_state.labels)\n', (28469, 28494), True, 'import streamlit as st\n'), ((28630, 28716), 'streamlit.markdown', 'st.markdown', (['f"""<span><b>Annotate samples here:</b></span>"""'], {'unsafe_allow_html': '(True)'}), "(f'<span><b>Annotate samples here:</b></span>',\n unsafe_allow_html=True)\n", (28641, 28716), True, 'import streamlit as st\n'), ((30489, 30581), 'streamlit.markdown', 'st.markdown', (['f"""<span>Samples you have already annotated:</span>"""'], {'unsafe_allow_html': '(True)'}), "(f'<span>Samples you have already annotated:</span>',\n unsafe_allow_html=True)\n", (30500, 30581), True, 'import streamlit as st\n'), ((32179, 32198), 'streamlit.button', 'st.button', (['"""Train!"""'], {}), "('Train!')\n", (32188, 32198), True, 'import streamlit as st\n'), ((33866, 33879), 'streamlit.columns', 'st.columns', (['(2)'], {}), '(2)\n', (33876, 33879), True, 'import streamlit as st\n'), ((9653, 9688), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (9680, 9688), True, 'import streamlit as st\n'), ((9781, 9816), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (9808, 9816), True, 'import streamlit as st\n'), ((9904, 9939), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (9931, 9939), True, 'import streamlit as st\n'), ((10031, 10066), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10058, 10066), True, 'import streamlit as st\n'), ((10153, 10188), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10180, 10188), True, 'import streamlit as st\n'), ((10281, 10316), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10308, 10316), True, 'import streamlit as st\n'), ((10399, 10434), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10426, 10434), True, 'import streamlit as st\n'), ((10509, 10544), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10536, 10544), True, 'import streamlit as st\n'), ((10616, 10651), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10643, 10651), True, 'import streamlit as st\n'), ((10724, 10759), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10751, 10759), True, 'import streamlit as st\n'), ((10835, 10870), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (10862, 10870), True, 'import streamlit as st\n'), ((14526, 14538), 'json.load', 'json.load', (['f'], {}), '(f)\n', (14535, 14538), False, 'import json\n'), ((15956, 15984), 'os.path.exists', 'os.path.exists', (['feature_path'], {}), '(feature_path)\n', (15970, 15984), False, 'import os\n'), ((27153, 27165), 'json.load', 'json.load', (['f'], {}), '(f)\n', (27162, 27165), False, 'import json\n'), ((27590, 27644), 'streamlit.session_state.df.reset_index', 'st.session_state.df.reset_index', ([], {'level': '(0)', 'inplace': '(True)'}), '(level=0, inplace=True)\n', (27621, 27644), True, 'import streamlit as st\n'), ((28804, 28903), 'streamlit.markdown', 'st.markdown', (['f"""<span>Currently the following rules are applied:</span>"""'], {'unsafe_allow_html': '(True)'}), "(f'<span>Currently the following rules are applied:</span>',\n unsafe_allow_html=True)\n", (28815, 28903), True, 'import streamlit as st\n'), ((28959, 28999), 'streamlit.write', 'st.write', (['st.session_state.applied_rules'], {}), '(st.session_state.applied_rules)\n', (28967, 28999), True, 'import streamlit as st\n'), ((29013, 29037), 'streamlit.form', 'st.form', (['"""annotate form"""'], {}), "('annotate form')\n", (29020, 29037), True, 'import streamlit as st\n'), ((29061, 29110), 'st_aggrid.GridOptionsBuilder.from_dataframe', 'GridOptionsBuilder.from_dataframe', (['df_unannotated'], {}), '(df_unannotated)\n', (29094, 29110), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((29609, 29853), 'st_aggrid.AgGrid', 'AgGrid', (['df_unannotated'], {'gridOptions': 'go', 'key': '"""grid2"""', 'allow_unsafe_jscode': '(True)', 'reload_data': '(True)', 'update_mode': '(GridUpdateMode.MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED)', 'width': '"""100%"""', 'theme': '"""material"""', 'fit_columns_on_grid_load': '(True)'}), "(df_unannotated, gridOptions=go, key='grid2', allow_unsafe_jscode=\n True, reload_data=True, update_mode=GridUpdateMode.MODEL_CHANGED |\n GridUpdateMode.VALUE_CHANGED, width='100%', theme='material',\n fit_columns_on_grid_load=True)\n", (29615, 29853), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((30024, 30057), 'streamlit.form_submit_button', 'st.form_submit_button', (['"""Annotate"""'], {}), "('Annotate')\n", (30045, 30057), True, 'import streamlit as st\n'), ((30626, 30651), 'streamlit.form', 'st.form', (['"""annotated form"""'], {}), "('annotated form')\n", (30633, 30651), True, 'import streamlit as st\n'), ((30675, 30722), 'st_aggrid.GridOptionsBuilder.from_dataframe', 'GridOptionsBuilder.from_dataframe', (['df_annotated'], {}), '(df_annotated)\n', (30708, 30722), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((31192, 31433), 'st_aggrid.AgGrid', 'AgGrid', (['df_annotated'], {'gridOptions': 'go', 'key': '"""grid3"""', 'allow_unsafe_jscode': '(True)', 'reload_data': '(True)', 'update_mode': '(GridUpdateMode.MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED)', 'width': '"""100%"""', 'theme': '"""material"""', 'fit_columns_on_grid_load': '(True)'}), "(df_annotated, gridOptions=go, key='grid3', allow_unsafe_jscode=True,\n reload_data=True, update_mode=GridUpdateMode.MODEL_CHANGED |\n GridUpdateMode.VALUE_CHANGED, width='100%', theme='material',\n fit_columns_on_grid_load=True)\n", (31198, 31433), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((31611, 31652), 'streamlit.form_submit_button', 'st.form_submit_button', (['"""Clear annotation"""'], {}), "('Clear annotation')\n", (31632, 31652), True, 'import streamlit as st\n'), ((32243, 32269), 'streamlit.session_state.df.copy', 'st.session_state.df.copy', ([], {}), '()\n', (32267, 32269), True, 'import streamlit as st\n'), ((2769, 2785), 'threading.current_thread', 'current_thread', ([], {}), '()\n', (2783, 2785), False, 'from threading import current_thread\n'), ((13837, 13867), 'streamlit.spinner', 'st.spinner', (['"""Ranking rules..."""'], {}), "('Ranking rules...')\n", (13847, 13867), True, 'import streamlit as st\n'), ((15169, 15182), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (15179, 15182), False, 'import time\n'), ((16148, 16160), 'json.load', 'json.load', (['f'], {}), '(f)\n', (16157, 16160), False, 'import json\n'), ((17223, 17246), 'streamlit.form', 'st.form', (['"""example form"""'], {}), "('example form')\n", (17230, 17246), True, 'import streamlit as st\n'), ((17274, 17336), 'st_aggrid.GridOptionsBuilder.from_dataframe', 'GridOptionsBuilder.from_dataframe', (['st.session_state.feature_df'], {}), '(st.session_state.feature_df)\n', (17307, 17336), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((17784, 18042), 'st_aggrid.AgGrid', 'AgGrid', (['st.session_state.feature_df'], {'gridOptions': 'go', 'key': '"""grid1"""', 'allow_unsafe_jscode': '(True)', 'reload_data': '(True)', 'update_mode': '(GridUpdateMode.MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED)', 'width': '"""100%"""', 'theme': '"""material"""', 'fit_columns_on_grid_load': '(True)'}), "(st.session_state.feature_df, gridOptions=go, key='grid1',\n allow_unsafe_jscode=True, reload_data=True, update_mode=GridUpdateMode.\n MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED, width='100%', theme=\n 'material', fit_columns_on_grid_load=True)\n", (17790, 18042), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((18283, 18354), 'streamlit.radio', 'st.radio', (['"""Delete or Train selected rules"""', "('none', 'delete', 'train')"], {}), "('Delete or Train selected rules', ('none', 'delete', 'train'))\n", (18291, 18354), True, 'import streamlit as st\n'), ((18418, 18461), 'streamlit.form_submit_button', 'st.form_submit_button', ([], {'label': '"""save updates"""'}), "(label='save updates')\n", (18439, 18461), True, 'import streamlit as st\n'), ((18489, 18537), 'streamlit.form_submit_button', 'st.form_submit_button', ([], {'label': '"""evaluate selected"""'}), "(label='evaluate selected')\n", (18510, 18537), True, 'import streamlit as st\n'), ((20312, 20331), 'streamlit.success', 'st.success', (['"""Done!"""'], {}), "('Done!')\n", (20322, 20331), True, 'import streamlit as st\n'), ((23369, 23741), 'streamlit.markdown', 'st.markdown', (['f"""<span>Result of using all the rules: Precision: <b>{st.session_state.whole_accuracy[0]:.3f}</b>, Recall: <b>{st.session_state.whole_accuracy[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy[2]:.3f}</b>, Support: <b>{st.session_state.whole_accuracy[3]}</b></span>"""'], {'unsafe_allow_html': '(True)'}), "(\n f'<span>Result of using all the rules: Precision: <b>{st.session_state.whole_accuracy[0]:.3f}</b>, Recall: <b>{st.session_state.whole_accuracy[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy[2]:.3f}</b>, Support: <b>{st.session_state.whole_accuracy[3]}</b></span>'\n , unsafe_allow_html=True)\n", (23380, 23741), True, 'import streamlit as st\n'), ((24223, 24442), 'streamlit.markdown', 'st.markdown', (['f"""<span>The rule\'s result: Precision: <b>{prec:.3f}</b>, Recall: <b>{recall:.3f}</b>, Fscore: <b>{fscore:.3f}</b>, Support: <b>{support}</b></span>"""'], {'unsafe_allow_html': '(True)'}), '(\n f"<span>The rule\'s result: Precision: <b>{prec:.3f}</b>, Recall: <b>{recall:.3f}</b>, Fscore: <b>{fscore:.3f}</b>, Support: <b>{support}</b></span>"\n , unsafe_allow_html=True)\n', (24234, 24442), True, 'import streamlit as st\n'), ((26309, 26376), 'streamlit.selectbox', 'st.selectbox', (['"""Select the graphs you want to view"""', 'tp_fp_fn_choice'], {}), "('Select the graphs you want to view', tp_fp_fn_choice)\n", (26321, 26376), True, 'import streamlit as st\n'), ((28570, 28601), 'streamlit.session_state.labels.items', 'st.session_state.labels.items', ([], {}), '()\n', (28599, 28601), True, 'import streamlit as st\n'), ((33815, 33843), 'streamlit.write', 'st.write', (['"""Empty dataframe!"""'], {}), "('Empty dataframe!')\n", (33823, 33843), True, 'import streamlit as st\n'), ((16913, 16945), 'streamlit.session_state.features.keys', 'st.session_state.features.keys', ([], {}), '()\n', (16943, 16945), True, 'import streamlit as st\n'), ((19630, 19663), 'streamlit.spinner', 'st.spinner', (['"""Evaluating rules..."""'], {}), "('Evaluating rules...')\n", (19640, 19663), True, 'import streamlit as st\n'), ((22599, 22621), 'streamlit.form', 'st.form', (['"""Delete form"""'], {}), "('Delete form')\n", (22606, 22621), True, 'import streamlit as st\n'), ((22643, 22709), 'streamlit.write', 'st.write', (['"""The following rules will be deleted, do you accept it?"""'], {}), "('The following rules will be deleted, do you accept it?')\n", (22651, 22709), True, 'import streamlit as st\n'), ((22730, 22771), 'streamlit.write', 'st.write', (['st.session_state.rows_to_delete'], {}), '(st.session_state.rows_to_delete)\n', (22738, 22771), True, 'import streamlit as st\n'), ((22806, 22844), 'streamlit.form_submit_button', 'st.form_submit_button', (['"""Accept Delete"""'], {}), "('Accept Delete')\n", (22827, 22844), True, 'import streamlit as st\n'), ((24516, 24567), 'streamlit.expander', 'st.expander', (['"""Show validation data"""'], {'expanded': '(False)'}), "('Show validation data', expanded=False)\n", (24527, 24567), True, 'import streamlit as st\n'), ((25228, 25647), 'streamlit.markdown', 'st.markdown', (['f"""<span>Result of using all the rules on the validation data: Precision: <b>{st.session_state.whole_accuracy_val[0]:.3f}</b>, Recall: <b>{st.session_state.whole_accuracy_val[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy_val[2]:.3f}</b>, Support: <b>{st.session_state.whole_accuracy_val[3]}</b></span>"""'], {'unsafe_allow_html': '(True)'}), "(\n f'<span>Result of using all the rules on the validation data: Precision: <b>{st.session_state.whole_accuracy_val[0]:.3f}</b>, Recall: <b>{st.session_state.whole_accuracy_val[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy_val[2]:.3f}</b>, Support: <b>{st.session_state.whole_accuracy_val[3]}</b></span>'\n , unsafe_allow_html=True)\n", (25239, 25647), True, 'import streamlit as st\n'), ((25733, 26027), 'streamlit.markdown', 'st.markdown', (['f"""<span>The rule\'s result on the validation data: Precision: <b>{val_prec:.3f}</b>, Recall: <b>{val_recall:.3f}</b>, Fscore: <b>{val_fscore:.3f}</b>, Support: <b>{val_support}</b></span>"""'], {'unsafe_allow_html': '(True)'}), '(\n f"<span>The rule\'s result on the validation data: Precision: <b>{val_prec:.3f}</b>, Recall: <b>{val_recall:.3f}</b>, Fscore: <b>{val_fscore:.3f}</b>, Support: <b>{val_support}</b></span>"\n , unsafe_allow_html=True)\n', (25744, 26027), True, 'import streamlit as st\n'), ((34677, 34700), 'streamlit.form', 'st.form', (['"""example form"""'], {}), "('example form')\n", (34684, 34700), True, 'import streamlit as st\n'), ((34732, 34794), 'st_aggrid.GridOptionsBuilder.from_dataframe', 'GridOptionsBuilder.from_dataframe', (['st.session_state.feature_df'], {}), '(st.session_state.feature_df)\n', (34765, 34794), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((35286, 35544), 'st_aggrid.AgGrid', 'AgGrid', (['st.session_state.feature_df'], {'gridOptions': 'go', 'key': '"""grid1"""', 'allow_unsafe_jscode': '(True)', 'reload_data': '(True)', 'update_mode': '(GridUpdateMode.MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED)', 'width': '"""100%"""', 'theme': '"""material"""', 'fit_columns_on_grid_load': '(True)'}), "(st.session_state.feature_df, gridOptions=go, key='grid1',\n allow_unsafe_jscode=True, reload_data=True, update_mode=GridUpdateMode.\n MODEL_CHANGED | GridUpdateMode.VALUE_CHANGED, width='100%', theme=\n 'material', fit_columns_on_grid_load=True)\n", (35292, 35544), False, 'from st_aggrid import AgGrid, DataReturnMode, GridOptionsBuilder, GridUpdateMode, JsCode\n'), ((35833, 35904), 'streamlit.radio', 'st.radio', (['"""Delete or Train selected rules"""', "('none', 'delete', 'train')"], {}), "('Delete or Train selected rules', ('none', 'delete', 'train'))\n", (35841, 35904), True, 'import streamlit as st\n'), ((35980, 36023), 'streamlit.form_submit_button', 'st.form_submit_button', ([], {'label': '"""save updates"""'}), "(label='save updates')\n", (36001, 36023), True, 'import streamlit as st\n'), ((36055, 36103), 'streamlit.form_submit_button', 'st.form_submit_button', ([], {'label': '"""evaluate selected"""'}), "(label='evaluate selected')\n", (36076, 36103), True, 'import streamlit as st\n'), ((36135, 36192), 'streamlit.form_submit_button', 'st.form_submit_button', ([], {'label': '"""annotate based on selected"""'}), "(label='annotate based on selected')\n", (36156, 36192), True, 'import streamlit as st\n'), ((37765, 37784), 'streamlit.success', 'st.success', (['"""Done!"""'], {}), "('Done!')\n", (37775, 37784), True, 'import streamlit as st\n'), ((42119, 42499), 'streamlit.markdown', 'st.markdown', (['f"""<span>Result of using all the rules: Precision: <b>{st.session_state.whole_accuracy[0]:.3f}</b>, Recall: <b>{st.session_state.whole_accuracy[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy[2]:.3f}</b>, Support: <b>{st.session_state.whole_accuracy[3]}</b></span>"""'], {'unsafe_allow_html': '(True)'}), "(\n f'<span>Result of using all the rules: Precision: <b>{st.session_state.whole_accuracy[0]:.3f}</b>, Recall: <b>{st.session_state.whole_accuracy[1]:.3f}</b>, Fscore: <b>{st.session_state.whole_accuracy[2]:.3f}</b>, Support: <b>{st.session_state.whole_accuracy[3]}</b></span>'\n , unsafe_allow_html=True)\n", (42130, 42499), True, 'import streamlit as st\n'), ((43049, 43272), 'streamlit.markdown', 'st.markdown', (['f"""<span>The rule\'s result: Precision: <b>{prec:.3f}</b>, Recall: <b>{recall:.3f}</b>, Fscore: <b>{fscore:.3f}</b>, Support: <b>{support}</b></span>"""'], {'unsafe_allow_html': '(True)'}), '(\n f"<span>The rule\'s result: Precision: <b>{prec:.3f}</b>, Recall: <b>{recall:.3f}</b>, Fscore: <b>{fscore:.3f}</b>, Support: <b>{support}</b></span>"\n , unsafe_allow_html=True)\n', (43060, 43272), True, 'import streamlit as st\n'), ((43613, 43680), 'streamlit.selectbox', 'st.selectbox', (['"""Select the option you want to view"""', 'tp_fp_fn_choice'], {}), "('Select the option you want to view', tp_fp_fn_choice)\n", (43625, 43680), True, 'import streamlit as st\n'), ((21668, 21695), 'copy.deepcopy', 'copy.deepcopy', (['feature_list'], {}), '(feature_list)\n', (21681, 21695), False, 'import copy\n'), ((22335, 22362), 'copy.deepcopy', 'copy.deepcopy', (['feature_list'], {}), '(feature_list)\n', (22348, 22362), False, 'import copy\n'), ((34343, 34375), 'streamlit.session_state.features.keys', 'st.session_state.features.keys', ([], {}), '()\n', (34373, 34375), True, 'import streamlit as st\n'), ((37310, 37343), 'streamlit.spinner', 'st.spinner', (['"""Evaluating rules..."""'], {}), "('Evaluating rules...')\n", (37320, 37343), True, 'import streamlit as st\n'), ((41225, 41247), 'streamlit.form', 'st.form', (['"""Delete form"""'], {}), "('Delete form')\n", (41232, 41247), True, 'import streamlit as st\n'), ((41273, 41339), 'streamlit.write', 'st.write', (['"""The following rules will be deleted, do you accept it?"""'], {}), "('The following rules will be deleted, do you accept it?')\n", (41281, 41339), True, 'import streamlit as st\n'), ((41418, 41459), 'streamlit.write', 'st.write', (['st.session_state.rows_to_delete'], {}), '(st.session_state.rows_to_delete)\n', (41426, 41459), True, 'import streamlit as st\n'), ((41498, 41536), 'streamlit.form_submit_button', 'st.form_submit_button', (['"""Accept Delete"""'], {}), "('Accept Delete')\n", (41519, 41536), True, 'import streamlit as st\n'), ((43992, 44023), 'streamlit.button', 'st.button', (['"""Previous Predicted"""'], {}), "('Previous Predicted')\n", (44001, 44023), True, 'import streamlit as st\n'), ((44218, 44245), 'streamlit.button', 'st.button', (['"""Next Predicted"""'], {}), "('Next Predicted')\n", (44227, 44245), True, 'import streamlit as st\n'), ((44643, 44802), 'streamlit.markdown', 'st.markdown', (['f"""<span><b>Sentence:</b> {st.session_state.df.iloc[predicted_inds[st.session_state.predicted_num]].text}</span>"""'], {'unsafe_allow_html': '(True)'}), "(\n f'<span><b>Sentence:</b> {st.session_state.df.iloc[predicted_inds[st.session_state.predicted_num]].text}</span>'\n , unsafe_allow_html=True)\n", (44654, 44802), True, 'import streamlit as st\n'), ((44900, 45062), 'streamlit.markdown', 'st.markdown', (['f"""<span><b>Gold label:</b> {st.session_state.df.iloc[predicted_inds[st.session_state.predicted_num]].label}</span>"""'], {'unsafe_allow_html': '(True)'}), "(\n f'<span><b>Gold label:</b> {st.session_state.df.iloc[predicted_inds[st.session_state.predicted_num]].label}</span>'\n , unsafe_allow_html=True)\n", (44911, 45062), True, 'import streamlit as st\n'), ((21504, 21547), 'streamlit.session_state.rls_after_delete.append', 'st.session_state.rls_after_delete.append', (['r'], {}), '(r)\n', (21544, 21547), True, 'import streamlit as st\n'), ((21865, 21928), 'streamlit.text', 'st.text', (['"""Only single and underspecified rules can be trained!"""'], {}), "('Only single and underspecified rules can be trained!')\n", (21872, 21928), True, 'import streamlit as st\n'), ((24661, 24696), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (24688, 24696), True, 'import streamlit as st\n'), ((24823, 24858), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (24850, 24858), True, 'import streamlit as st\n'), ((24982, 25017), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (25009, 25017), True, 'import streamlit as st\n'), ((25142, 25177), 'streamlit.session_state.sens.index', 'st.session_state.sens.index', (['option'], {}), '(option)\n', (25169, 25177), True, 'import streamlit as st\n'), ((39990, 40017), 'copy.deepcopy', 'copy.deepcopy', (['feature_list'], {}), '(feature_list)\n', (40003, 40017), False, 'import copy\n'), ((40945, 40972), 'copy.deepcopy', 'copy.deepcopy', (['feature_list'], {}), '(feature_list)\n', (40958, 40972), False, 'import copy\n'), ((22196, 22256), 'streamlit.session_state.rls_after_delete.append', 'st.session_state.rls_after_delete.append', (['[[f], [], classes]'], {}), '([[f], [], classes])\n', (22236, 22256), True, 'import streamlit as st\n'), ((39818, 39861), 'streamlit.session_state.rls_after_delete.append', 'st.session_state.rls_after_delete.append', (['r'], {}), '(r)\n', (39858, 39861), True, 'import streamlit as st\n'), ((40199, 40262), 'streamlit.text', 'st.text', (['"""Only single and underspecified rules can be trained!"""'], {}), "('Only single and underspecified rules can be trained!')\n", (40206, 40262), True, 'import streamlit as st\n'), ((40728, 40788), 'streamlit.session_state.rls_after_delete.append', 'st.session_state.rls_after_delete.append', (['[[f], [], classes]'], {}), '([[f], [], classes])\n', (40768, 40788), True, 'import streamlit as st\n')]
|
import lue.data_model as ldm
import numpy as np
import csv
def export_partition_shape_results(
lue_dataset,
csv_writer):
# Assert that the number of array shapes for which experiments where
# performed is 1
lue_array = lue_dataset.array.array
assert lue_array.shape.value.nr_arrays == 1
# For each array shape for which experiments where performed
lue_measurement = lue_dataset.benchmark.measurement
array_shapes = lue_measurement.array_shape.value[:]
assert np.all(array_shapes == array_shapes[0])
count = lue_measurement.duration.value.array_shape[:][0]
lue_partition = lue_dataset.partition.partition
partition_shape = lue_measurement.partition_shape.value[:]
nr_partitions = lue_measurement.nr_partitions.value[:,-1]
assert len(partition_shape) == len(nr_partitions)
if count == 1:
assert False, "Implement!"
else:
# Write the following columns:
# - partition_shape
# - nr_partitions
# - {mean,std}_duration
csv_writer.writerow([
# "partition_shape",
"partition_size",
"nr_partitions",
"mean_duration",
"std_duration",
])
mean_duration = \
lue_partition.properties["mean_duration_{}".format(0)].value[:]
std_duration = \
lue_partition.properties["std_duration_{}".format(0)].value[:]
for n in range(len(partition_shape)):
csv_writer.writerow([
# "{},{}".format(*partition_shape[n]),
np.prod(partition_shape[n]),
nr_partitions[n],
mean_duration[n],
std_duration[n],
])
def export_strong_scaling_results(
lue_dataset,
csv_writer):
lue_measurement = lue_dataset.benchmark.measurement
count = lue_measurement.duration.value.array_shape[:][0]
nr_workers = lue_measurement.nr_workers.value[:]
sort_idxs = np.argsort(nr_workers)
nr_workers = nr_workers[sort_idxs]
if count == 1:
# Write the following columns:
# - nr_workers
# - relative_speed_up
# - relative_efficiency
# - lups
csv_writer.writerow([
"nr_workers",
"duration",
"relative_speed_up",
"relative_efficiency",
"lups",
])
lue_scaling = lue_dataset.benchmark.scaling
duration = lue_measurement.duration.value[:][sort_idxs]
relative_speed_up = lue_scaling.relative_speed_up.value[:][sort_idxs]
relative_efficiency = lue_scaling.relative_efficiency.value[:][sort_idxs]
lups = lue_scaling.lups.value[:][sort_idxs]
for n in range(len(nr_workers)):
csv_writer.writerow([
nr_workers[n],
duration[n][0],
relative_speed_up[n][0],
relative_efficiency[n][0],
lups[n][0],
])
else:
# Write the following columns:
# - nr_workers
# - {mean,std}_duration
# - {mean,std}_relative_efficiency
# - {mean,std}_lups
csv_writer.writerow([
"nr_workers",
"mean_duration",
"std_duration",
"mean_relative_efficiency",
"std_relative_efficiency",
"mean_lups",
"std_lups",
])
lue_scaling = lue_dataset.benchmark.scaling
mean_duration = lue_scaling.mean_duration.value[:][sort_idxs]
std_duration = lue_scaling.std_duration.value[:][sort_idxs]
mean_relative_efficiency = lue_scaling.mean_relative_efficiency.value[:][sort_idxs]
std_relative_efficiency = lue_scaling.std_relative_efficiency.value[:][sort_idxs]
mean_lups = lue_scaling.mean_lups.value[:][sort_idxs]
std_lups = lue_scaling.std_lups.value[:][sort_idxs]
for n in range(len(nr_workers)):
csv_writer.writerow([
nr_workers[n],
mean_duration[n],
std_duration[n],
mean_relative_efficiency[n],
std_relative_efficiency[n],
mean_lups[n],
std_lups[n],
])
def export_weak_scaling_results(
lue_dataset,
csv_writer):
lue_measurement = lue_dataset.benchmark.measurement
count = lue_measurement.duration.value.array_shape[:][0]
nr_workers = lue_measurement.nr_workers.value[:]
sort_idxs = np.argsort(nr_workers)
nr_workers = nr_workers[sort_idxs]
if count == 1:
# Write the following columns:
# - nr_workers
# - duration
# - relative_efficiency
# - lups
csv_writer.writerow([
"nr_workers",
"duration",
"relative_efficiency",
"lups",
])
lue_scaling = lue_dataset.benchmark.scaling
duration = lue_measurement.duration.value[:]
relative_efficiency = lue_scaling.relative_efficiency.value[:][sort_idxs]
lups = lue_scaling.lups.value[:][sort_idxs]
for n in range(len(nr_workers)):
csv_writer.writerow([
nr_workers[n],
duration[n][0],
relative_efficiency[n][0],
lups[n][0],
])
else:
# Write the following columns:
# - nr_workers
# - {mean,std}_duration
# - {mean,std}_relative_efficiency
# - {mean,std}_lups
csv_writer.writerow([
"nr_workers",
"mean_duration",
"std_duration",
"mean_relative_efficiency",
"std_relative_efficiency",
"mean_lups",
"std_lups",
])
lue_scaling = lue_dataset.benchmark.scaling
mean_duration = lue_scaling.mean_duration.value[:][sort_idxs]
std_duration = lue_scaling.std_duration.value[:][sort_idxs]
mean_relative_efficiency = lue_scaling.mean_relative_efficiency.value[:][sort_idxs]
std_relative_efficiency = lue_scaling.std_relative_efficiency.value[:][sort_idxs]
mean_lups = lue_scaling.mean_lups.value[:][sort_idxs]
std_lups = lue_scaling.std_lups.value[:][sort_idxs]
for n in range(len(nr_workers)):
csv_writer.writerow([
nr_workers[n],
mean_duration[n],
std_duration[n],
mean_relative_efficiency[n],
std_relative_efficiency[n],
mean_lups[n],
std_lups[n],
])
def export_results(
lue_dataset_pathname,
csv_file_pathname):
lue_dataset = ldm.open_dataset(lue_dataset_pathname, "r")
kind = lue_dataset.benchmark.meta_information.kind.value[:][0]
with open(csv_file_pathname, "w") as csv_file:
csv_writer = csv.writer(csv_file)
export_by_kind = {
"partition_shape": export_partition_shape_results,
"strong_scaling": export_strong_scaling_results,
"weak_scaling": export_weak_scaling_results,
}
export_by_kind[kind](lue_dataset, csv_writer)
|
[
"csv.writer",
"numpy.prod",
"numpy.argsort",
"lue.data_model.open_dataset",
"numpy.all"
] |
[((512, 551), 'numpy.all', 'np.all', (['(array_shapes == array_shapes[0])'], {}), '(array_shapes == array_shapes[0])\n', (518, 551), True, 'import numpy as np\n'), ((2065, 2087), 'numpy.argsort', 'np.argsort', (['nr_workers'], {}), '(nr_workers)\n', (2075, 2087), True, 'import numpy as np\n'), ((4753, 4775), 'numpy.argsort', 'np.argsort', (['nr_workers'], {}), '(nr_workers)\n', (4763, 4775), True, 'import numpy as np\n'), ((7092, 7135), 'lue.data_model.open_dataset', 'ldm.open_dataset', (['lue_dataset_pathname', '"""r"""'], {}), "(lue_dataset_pathname, 'r')\n", (7108, 7135), True, 'import lue.data_model as ldm\n'), ((7276, 7296), 'csv.writer', 'csv.writer', (['csv_file'], {}), '(csv_file)\n', (7286, 7296), False, 'import csv\n'), ((1638, 1665), 'numpy.prod', 'np.prod', (['partition_shape[n]'], {}), '(partition_shape[n])\n', (1645, 1665), True, 'import numpy as np\n')]
|
import time
from dataclasses import dataclass, field
from threading import Event, Thread
from typing import Callable, NoReturn
@dataclass
class Clock:
callback: Callable[[], None] = field(repr=False)
thread: Thread = field(init=False, repr=False)
started: Event = field(default_factory=Event, init=False, repr=False)
lag: float = field(init=False)
previous_time: float = field(init=False, repr=False)
def __post_init__(self) -> None:
self.thread = Thread(daemon=True, target=self.run)
self.thread.start()
def start(self) -> None:
self.previous_time = time.monotonic()
self.started.set()
def pause(self) -> None:
self.started.clear()
def destroy(self) -> None:
self.thread.join(0)
def _sleep(self):
result = self.lag + self.previous_time - time.monotonic()
sleep_for = result * 0.925 if result > 0 else 0
time.sleep(sleep_for)
def _tick(self) -> None:
self._sleep()
self.previous_time += self.lag
self.callback()
def _run_once(self) -> None:
self.started.wait()
while self.started.is_set():
self._tick()
def run(self) -> NoReturn:
while True:
self._run_once()
|
[
"dataclasses.field",
"threading.Thread",
"time.monotonic",
"time.sleep"
] |
[((188, 205), 'dataclasses.field', 'field', ([], {'repr': '(False)'}), '(repr=False)\n', (193, 205), False, 'from dataclasses import dataclass, field\n'), ((227, 256), 'dataclasses.field', 'field', ([], {'init': '(False)', 'repr': '(False)'}), '(init=False, repr=False)\n', (232, 256), False, 'from dataclasses import dataclass, field\n'), ((278, 330), 'dataclasses.field', 'field', ([], {'default_factory': 'Event', 'init': '(False)', 'repr': '(False)'}), '(default_factory=Event, init=False, repr=False)\n', (283, 330), False, 'from dataclasses import dataclass, field\n'), ((348, 365), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (353, 365), False, 'from dataclasses import dataclass, field\n'), ((393, 422), 'dataclasses.field', 'field', ([], {'init': '(False)', 'repr': '(False)'}), '(init=False, repr=False)\n', (398, 422), False, 'from dataclasses import dataclass, field\n'), ((483, 519), 'threading.Thread', 'Thread', ([], {'daemon': '(True)', 'target': 'self.run'}), '(daemon=True, target=self.run)\n', (489, 519), False, 'from threading import Event, Thread\n'), ((607, 623), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (621, 623), False, 'import time\n'), ((923, 944), 'time.sleep', 'time.sleep', (['sleep_for'], {}), '(sleep_for)\n', (933, 944), False, 'import time\n'), ((842, 858), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (856, 858), False, 'import time\n')]
|
import os
from statistics import mean
import numpy as np
import matplotlib.pyplot as pyplot
from simtk import unit
from simtk.openmm.app.pdbfile import PDBFile
from foldamers.cg_model.cgmodel import CGModel
from foldamers.parameters.reweight import *
from foldamers.thermo.calc import *
from foldamers.ensembles.ens_build import *
from cg_openmm.simulation.rep_exch import *
from cg_openmm.simulation.tools import *
# Job settings
scan_sc_bb_bb_sc_torsions = True
calculate_dQ = True
calculate_free_energies = True
evaluate_heat_capacity = True
output_directory = "output"
if not os.path.exists(output_directory):
os.mkdir(output_directory)
# Number of grid points to scan (around initial angle definition)
grid_points = 3
# Configure Yank (replica exchange) simulation settings
print_frequency = 5 # Number of steps to skip when printing output
total_simulation_time = 500.0 * unit.picosecond
simulation_time_step = 5.0 * unit.femtosecond
number_replicas = 30
min_temp = 1.0 * unit.kelvin
max_temp = 400.0 * unit.kelvin
temperature_list = get_temperature_list(min_temp, max_temp, number_replicas)
# Model settings
polymer_length = 12
backbone_lengths = [1]
sidechain_lengths = [1]
sidechain_positions = [0]
include_bond_forces = False
include_bond_angle_forces = True
include_nonbonded_forces = True
include_torsion_forces = True
constrain_bonds = True
# Bond definitions
bond_length = 7.5 * unit.angstrom
bond_lengths = {
"bb_bb_bond_length": bond_length,
"bb_sc_bond_length": bond_length,
"sc_sc_bond_length": bond_length,
}
bond_force_constant = 0 * unit.kilocalorie_per_mole / unit.nanometer / unit.nanometer
bond_force_constants = {
"bb_bb_bond_k": bond_force_constant,
"bb_sc_bond_k": bond_force_constant,
"sc_sc_bond_k": bond_force_constant,
}
# Particle definitions
mass = 100.0 * unit.amu
masses = {"backbone_bead_masses": mass, "sidechain_bead_masses": mass}
r_min = 3.0 * bond_length # Lennard-Jones potential r_min
sigma = r_min / (2.0 ** (1 / 6)) # Factor of /(2.0**(1/6)) is applied to convert r_min to sigma
sigmas = {"bb_sigma": sigma, "sc_sigma": sigma}
epsilon = 0.05 * unit.kilocalorie_per_mole
epsilons = {"bb_eps": epsilon, "sc_eps": epsilon}
# Bond angle definitions
bond_angle_force_constant = 0.0001 * unit.kilocalorie_per_mole / unit.radian / unit.radian
bond_angle_force_constants = {
"bb_bb_bb_angle_k": bond_angle_force_constant,
"bb_bb_sc_angle_k": bond_angle_force_constant,
}
bb_bb_bb_equil_bond_angle = 120.0 * (
3.14 / 180.0
) # OpenMM expects angle definitions in units of radians
bb_bb_sc_equil_bond_angle = 120.0 * (3.14 / 180.0)
equil_bond_angles = {
"bb_bb_bb_angle_0": bb_bb_bb_equil_bond_angle,
"bb_bb_sc_angle_0": bb_bb_sc_equil_bond_angle,
}
# Torsion angle definitions (Used to establish a scanning range below)
torsion_force_constant = 0.01 * unit.kilocalorie_per_mole / unit.radian / unit.radian
if scan_sc_bb_bb_sc_torsions == True:
torsion_force_constants = {
"bb_bb_bb_bb_torsion_k": torsion_force_constant,
"sc_bb_bb_sc_torsion_k": torsion_force_constant,
}
bb_bb_bb_bb_equil_torsion_angle = 78.0 * (
3.14 / 180.0
) # OpenMM defaults to units of radians for angle definitions
sc_bb_bb_sc_equil_torsion_angle = 120.0 * (3.14 / 180.0)
equil_torsion_angles = {
"bb_bb_bb_bb_torsion_0": bb_bb_bb_bb_equil_torsion_angle,
"sc_bb_bb_sc_torsion_0": sc_bb_bb_sc_equil_torsion_angle,
}
torsion_periodicities = {"bb_bb_bb_bb_period": 1, "sc_bb_bb_sc_period": 1}
else:
torsion_force_constants = {"bb_bb_bb_bb_torsion_k": torsion_force_constant}
bb_bb_bb_bb_equil_torsion_angle = 78.0 * (
3.14 / 180.0
) # OpenMM defaults to units of radians for angle definitions
equil_torsion_angles = {"bb_bb_bb_bb_torsion_0": bb_bb_bb_bb_equil_torsion_angle}
torsion_periodicities = {"bb_bb_bb_bb_period": 1}
# Get initial positions from local file
positions = PDBFile("helix.pdb").getPositions()
# Build a coarse grained model
cgmodel = CGModel(
polymer_length=polymer_length,
backbone_lengths=backbone_lengths,
sidechain_lengths=sidechain_lengths,
sidechain_positions=sidechain_positions,
masses=masses,
sigmas=sigmas,
epsilons=epsilons,
bond_lengths=bond_lengths,
bond_force_constants=bond_force_constants,
bond_angle_force_constants=bond_angle_force_constants,
torsion_force_constants=torsion_force_constants,
equil_bond_angles=equil_bond_angles,
equil_torsion_angles=equil_torsion_angles,
torsion_periodicities=torsion_periodicities,
include_nonbonded_forces=include_nonbonded_forces,
include_bond_forces=include_bond_forces,
include_bond_angle_forces=include_bond_angle_forces,
include_torsion_forces=include_torsion_forces,
constrain_bonds=constrain_bonds,
positions=positions,
)
# Run test simulations (NVT) with this coarse-grained model at the minimum and maximum temperatures
# to make sure the parameters are reasonable before attempting replica exchange simulations
# (If high-T simulations fail then we need to modify the model parameters)
test_simulation_time = 50.0 * unit.picosecond
print_frequency = 5
temperature = temperature_list[0]
output_directory = str("test_" + str(round(temperature._value, 1)))
if not os.path.exists(output_directory):
os.mkdir(output_directory)
run_simulation(
cgmodel,
output_directory,
test_simulation_time,
simulation_time_step,
temperature,
print_frequency,
)
temperature = temperature_list[-1]
output_directory = str("test_" + str(round(temperature._value, 1)))
if not os.path.exists(output_directory):
os.mkdir(output_directory)
run_simulation(
cgmodel,
output_directory,
test_simulation_time,
simulation_time_step,
temperature,
print_frequency,
)
# Reset the output directory
output_directory = "output"
if not os.path.exists(output_directory):
os.mkdir(output_directory)
# Create a list of the torsion angles that we will investigate in our parameter scan
bb_bb_bb_bb_equil_torsion_angles = [
float(bb_bb_bb_bb_equil_torsion_angle + i * 0.05) for i in range(-grid_points, grid_points, 1)
]
if scan_sc_bb_bb_sc_torsions == True:
sc_bb_bb_sc_equil_torsion_angles = [
float(sc_bb_bb_sc_equil_torsion_angle + i * 0.05)
for i in range(-grid_points, grid_points, 1)
]
else:
sc_bb_bb_sc_equil_torsion_angles = [0.0]
if calculate_dQ:
# Set parameters for evaluating native contacts
native_structure_contact_distance_cutoff = 1.00 * cgmodel.get_sigma(
0
) # This distance cutoff determines which nonbonded interactions are considered 'native' contacts
native_fraction_cutoff = (
0.95 # The threshold fraction of native contacts above which a pose is considered 'native'
)
nonnative_fraction_cutoff = 0.95 # The threshold fraction of native contacts below which a pose is considered 'nonnative'
native_ensemble_size = 10
nonnative_ensemble_size = 10
decorrelate = True
# Build arrays to store data for each model parameter scan/grid point
dQ_list = []
df_ij_list = []
ddf_ij_list = []
Delta_u_list = []
dDelta_u_list = []
Delta_s_list = []
dDelta_s_list = []
C_v_list = []
dC_v_list = []
# This is where we start evaluating the properties of models with different equilibrium torsion angles
for sc_bb_bb_sc_equil_torsion_angle in sc_bb_bb_sc_equil_torsion_angles:
for bb_bb_bb_bb_equil_torsion_angle in bb_bb_bb_bb_equil_torsion_angles:
if scan_sc_bb_bb_sc_torsions == True:
equil_torsion_angles = {
"bb_bb_bb_bb_torsion_0": bb_bb_bb_bb_equil_torsion_angle,
"sc_bb_bb_sc_torsion_0": sc_bb_bb_sc_equil_torsion_angle,
}
else:
equil_torsion_angles = {"bb_bb_bb_bb_torsion_0": bb_bb_bb_bb_equil_torsion_angle}
# Build a coarse grained model that has the torsion parameters for this grid point.
positions = PDBFile("helix.pdb").getPositions()
cgmodel = CGModel(
polymer_length=polymer_length,
backbone_lengths=backbone_lengths,
sidechain_lengths=sidechain_lengths,
sidechain_positions=sidechain_positions,
masses=masses,
sigmas=sigmas,
epsilons=epsilons,
bond_lengths=bond_lengths,
bond_force_constants=bond_force_constants,
bond_angle_force_constants=bond_angle_force_constants,
torsion_force_constants=torsion_force_constants,
equil_bond_angles=equil_bond_angles,
equil_torsion_angles=equil_torsion_angles,
torsion_periodicities=torsion_periodicities,
include_nonbonded_forces=include_nonbonded_forces,
include_bond_forces=include_bond_forces,
include_bond_angle_forces=include_bond_angle_forces,
include_torsion_forces=include_torsion_forces,
constrain_bonds=constrain_bonds,
positions=positions,
)
if scan_sc_bb_bb_sc_torsions == True:
output_data = str(
str(output_directory)
+ "/torsions_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_"
+ str(round(sc_bb_bb_sc_equil_torsion_angle * (180.0 / 3.14), 1))
+ ".nc"
)
file_name = str(
str(output_directory)
+ "/re_min_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_"
+ str(round(sc_bb_bb_sc_equil_torsion_angle * (180.0 / 3.14), 1))
+ ".pdb"
)
else:
output_data = str(
str(output_directory)
+ "/torsions_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ ".nc"
)
file_name = str(
str(output_directory)
+ "/re_min_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ ".pdb"
)
if os.path.exists(file_name):
print("\n")
print("Reading existing simulation data for a coarse grained model")
print(
"with bb_bb_bb_bb torsion angles of "
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ " degrees."
)
if scan_sc_bb_bb_sc_torsions == True:
print(
"and sc_bb_bb_sc torsion angles of "
+ str(round(sc_bb_bb_sc_equil_torsion_angle * (180.0 / 3.14), 1))
+ " degrees."
)
print("\n")
# Search for existing data, and reading it if possible
replica_energies, replica_positions, replica_states = read_replica_exchange_data(
system=cgmodel.system,
topology=cgmodel.topology,
temperature_list=temperature_list,
output_data=output_data,
print_frequency=print_frequency,
)
# Find the lowest energy pose for this model
native_structure = PDBFile(file_name).getPositions()
else:
print("\n")
print("Performing simulations for a coarse grained model")
print(
"with bb_bb_bb_bb torsion angles of "
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ " degrees."
)
if scan_sc_bb_bb_sc_torsions == True:
print(
"and sc_bb_bb_sc torsion angles of "
+ str(round(sc_bb_bb_sc_equil_torsion_angle * (180.0 / 3.14), 1))
+ " degrees."
)
print("\n")
# Run a replica exchange simulation with this cgmodel
replica_energies, replica_positions, replica_states = run_replica_exchange(
cgmodel.topology,
cgmodel.system,
cgmodel.positions,
temperature_list=temperature_list,
simulation_time_step=simulation_time_step,
total_simulation_time=total_simulation_time,
print_frequency=print_frequency,
output_data=output_data,
)
native_structure = get_native_structure(
replica_positions, replica_energies, temperature_list
)
file = open(file_name, "w")
PDBFile.writeFile(cgmodel.topology, native_structure, file=file)
file.close()
if calculate_dQ:
native_structure_contact_distance_cutoff = 1.15 * cgmodel.get_sigma(
0
) # This distance cutoff determines which nonbonded interactions are considered 'native' contacts
native_fraction_cutoff = 0.95 # The threshold fraction of native contacts above which a pose is considered 'native'
nonnative_fraction_cutoff = 0.95 # The threshold fraction of native contacts below which a pose is considered 'nonnative'
native_ensemble_size = 10
nonnative_ensemble_size = 100
decorrelate = True
(
native_ensemble,
native_ensemble_energies,
nonnative_ensemble,
nonnative_ensemble_energies,
) = get_ensembles_from_replica_positions(
cgmodel,
replica_positions,
replica_energies,
temperature_list,
decorrelate=decorrelate,
native_fraction_cutoff=native_fraction_cutoff,
nonnative_fraction_cutoff=nonnative_fraction_cutoff,
native_structure_contact_distance_cutoff=native_structure_contact_distance_cutoff,
native_ensemble_size=native_ensemble_size,
nonnative_ensemble_size=nonnative_ensemble_size,
)
if (
len(native_ensemble_energies) != native_ensemble_size
or len(nonnative_ensemble_energies) != nonnative_ensemble_size
):
print(
"ERROR: attempt to generate native and nonnative ensembles was unsuccessful."
)
print(
str(len(native_ensemble_energies))
+ " native ensemble members were generated ("
+ str(native_ensemble_size)
+ " were requested),"
)
print(
"and "
+ str(len(nonnative_ensemble_energies))
+ " non-native ensemble members were generated ("
+ str(nonnative_ensemble_size)
+ " were requested)."
)
print(
"Try adjusting the 'native_structure_distance_cutoff' parameter (current value="
+ str(native_structure_contact_distance_cutoff.__div__(cgmodel.get_sigma(0)))
+ "*'bb_sigma'),"
)
print(
"and the 'nonnative_fraction_cutoff' parameter (current value="
+ str(nonnative_fraction_cutoff)
+ ")"
)
print("to see if either of these approaches fixes the problem.")
exit()
if scan_sc_bb_bb_sc_torsions == True:
nonnative_ensemble_directory = str(
str(output_directory)
+ "/ens_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_"
+ str(round(sc_bb_bb_sc_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_nonnative"
)
native_ensemble_directory = str(
str(output_directory)
+ "/ens_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_"
+ str(round(sc_bb_bb_sc_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_native"
)
else:
nonnative_ensemble_directory = str(
str(output_directory)
+ "/ens_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_nonnative"
)
native_ensemble_directory = str(
str(output_directory)
+ "/ens_"
+ str(round(bb_bb_bb_bb_equil_torsion_angle * (180.0 / 3.14), 1))
+ "_native"
)
# We build an ensemble of nonnative poses for energetic comparison with the native pose.
if os.path.exists(nonnative_ensemble_directory):
nonnative_ensemble, nonnative_ensemble_energies = get_ensemble_data(
cgmodel, nonnative_ensemble_directory
)
if len(nonnative_ensemble) != nonnative_ensemble_size:
print(
"ERROR: "
+ str(len(nonnative_ensemble_energies))
+ " nonnative poses were found in existing output folders, but "
+ str(nonnative_ensemble_size)
+ " poses were requested."
)
print(
"This probably means that the requested ensemble size changed since the script was last run."
)
exit()
else:
os.mkdir(nonnative_ensemble_directory)
for pose in nonnative_ensemble:
cgmodel.positions = pose
write_ensemble_pdb(cgmodel, ensemble_directory=nonnative_ensemble_directory)
nonnative_ensemble_Q = []
for pose in nonnative_ensemble:
Q = fraction_native_contacts(cgmodel, pose, native_structure)
nonnative_ensemble_Q.append(Q)
nonnative_ensemble_Q = np.array([Q for Q in nonnative_ensemble_Q])
mean_nonnative_contacts = mean(nonnative_ensemble_Q)
print(
"The mean fraction of native contacts for this model is: "
+ str(mean_nonnative_contacts)
)
# We build an ensemble of native poses in order to understand the energy distribution around the folded state.
if os.path.exists(native_ensemble_directory):
native_ensemble, native_ensemble_energies = get_ensemble_data(
cgmodel, native_ensemble_directory
)
if len(native_ensemble_energies) != native_ensemble_size:
print(
"ERROR: "
+ str(len(native_ensemble_energies))
+ " native poses were found in existing output folders, but "
+ str(native_ensemble_size)
+ " poses were requested."
)
print(
"This probably means that the requested ensemble size changed since the script was last run."
)
exit()
else:
os.mkdir(native_ensemble_directory)
for pose in native_ensemble:
cgmodel.positions = pose
write_ensemble_pdb(cgmodel, ensemble_directory=native_ensemble_directory)
# Get the average change in the fraction of native contacts during folding (dQ),
# calculated as the difference between the average fraction of native contacts
# in the nonnative ensemble.
# A large dQ means the model/structure has a stable folded state.
# A small dQ means the model/structure does not have a stable folded state.
dQ = 1.0 - mean_nonnative_contacts
dQ_list.append(dQ)
if calculate_free_energies:
num_intermediate_states = 1
mbar, E_kn, E_expect, dE_expect, new_temp_list = get_mbar_expectation(
replica_energies, temperature_list, num_intermediate_states
)
df_ij, ddf_ij = get_free_energy_differences(mbar)
df_ij_list.append(df_ij)
ddf_ij_list.append(ddf_ij)
Delta_s, dDelta_s = get_entropy_differences(mbar)
Delta_s_list.append(Delta_s)
dDelta_s_list.append(dDelta_s)
Delta_u, dDelta_u = get_enthalpy_differences(mbar)
Delta_u_list.append(Delta_u)
dDelta_u_list.append(dDelta_u)
if evaluate_heat_capacity:
C_v, dC_v, new_temperature_list = get_heat_capacity(
replica_energies, temperature_list, num_intermediate_states=1
)
C_v_list.append(C_v)
dC_v_list.append(dC_v)
if scan_sc_bb_bb_sc_torsions == True:
file_name = "dQ_for_variable_equil_torsion_angles.png"
figure = pyplot.figure(1)
bb_bb_bb_bb_equil_torsion_angles = np.array(
[float(equil_torsion_angle) for equil_torsion_angle in bb_bb_bb_bb_equil_torsion_angles]
)
sc_bb_bb_sc_equil_torsion_angles = np.array(
[float(equil_torsion_angle) for equil_torsion_angle in sc_bb_bb_sc_equil_torsion_angles]
)
x = np.unique(bb_bb_bb_bb_equil_torsion_angles * (180.0 / 3.14))
y = np.unique(sc_bb_bb_sc_equil_torsion_angles * (180.0 / 3.14))
X, Y = np.meshgrid(x, y)
Z = dQ_list.reshape(len(x), len(y))
pyplot.xlabel(r"$ \alpha_{0}^{BB-BB-BB-BB} $ ( Degrees )")
pyplot.ylabel(r"$ \alpha_{0}^{SC-BB-BB-SC} $ ( Degrees )")
pyplot.title("dQ (Change in native contacts during folding)")
pyplot.pcolormesh(X, Y, Z)
pyplot.colorbar()
pyplot.savefig(file_name)
pyplot.show()
pyplot.close()
if calculate_dQ:
file_name = "dQ_for_variable_bb_bb_bb_bb_torsion_angle.png"
figure = pyplot.figure(1)
x = np.array([float(angle * (180.0 / 3.14)) for angle in bb_bb_bb_bb_equil_torsion_angles])
y = np.array([float(dQ) for dQ in dQ_list])
pyplot.xlabel(r"$ \alpha_{0}^{BB-BB-BB-BB} $ ( Degrees )")
pyplot.ylabel(r"$\Delta$Q")
pyplot.title(r"$\Delta$Q (Change in native contacts) during folding")
pyplot.plot(x, y)
pyplot.savefig(file_name)
pyplot.show()
pyplot.close()
if calculate_free_energies:
file_name = "free_energies_for_variable_bb_bb_bb_bb_torsion_angle.png"
figure = pyplot.figure(1)
legend_title = r"$ \alpha_{0}^{BB-BB-BB-BB} $ (Degrees)"
legend_labels = np.array(
[float(round(angle * (180.0 / 3.14), 1)) for angle in bb_bb_bb_bb_equil_torsion_angles]
)
temperatures = np.array([temperature for temperature in new_temp_list])
index = 0
for df_ij, ddf_ij in zip(df_ij_list, ddf_ij_list):
df_ij = np.array([df_ij[i][0] for i in range(len(df_ij))])
ddf_ij = np.array([ddf_ij[i][0] for i in range(len(ddf_ij))])
(line,) = pyplot.plot(temperatures, df_ij)
line.set_label(legend_labels[index])
index = index + 1
pyplot.xlabel("Temperature (Kelvin)")
pyplot.ylabel(r"Dimensionless free energy differences $\mathit{F}$")
pyplot.title(r"$\mathit{F}$ for variable $\alpha_{0}^{BB-BB-BB-BB}$")
pyplot.legend(legend_labels)
pyplot.savefig(file_name)
pyplot.show()
pyplot.close()
file_name = "entropies_for_variable_bb_bb_bb_bb_torsion_angle.png"
figure = pyplot.figure(1)
legend_title = r"$ \alpha_{0}^{BB-BB-BB-BB} $ (Degrees)"
legend_labels = np.array(
[float(round(angle * (180.0 / 3.14), 1)) for angle in bb_bb_bb_bb_equil_torsion_angles]
)
temperatures = np.array([temperature for temperature in new_temp_list])
index = 0
for Delta_s in Delta_s_list:
delta_s = np.array([Delta_s[i][0] for i in range(len(Delta_s))])
(line,) = pyplot.plot(temperatures, delta_s)
line.set_label(legend_labels[index])
index = index + 1
pyplot.xlabel("Temperature (Kelvin)")
pyplot.ylabel("Entropy differences ($\Delta$S)")
pyplot.title(r"Entropy for variable $\alpha_{0}^{BB-BB-BB-BB}$")
pyplot.legend(legend_labels)
pyplot.savefig(file_name)
pyplot.show()
pyplot.close()
if evaluate_heat_capacity:
file_name = "heat_capacity_for_variable_bb_bb_bb_bb_torsion_angle.png"
figure = pyplot.figure(1)
legend_title = r"$ \alpha_{0}^{BB-BB-BB-BB} $ (Degrees)"
legend_labels = np.array(
[float(round(angle * (180.0 / 3.14), 1)) for angle in bb_bb_bb_bb_equil_torsion_angles]
)
temperatures = np.array([temperature for temperature in new_temp_list])
index = 0
for C_v, dC_v in zip(C_v_list, dC_v_list):
C_v = np.array([C_v[i] for i in range(len(C_v))])
dC_v = np.array([dC_v[i] for i in range(len(dC_v))])
pyplot.errorbar(temperatures, C_v, yerr=dC_v, figure=figure, label=legend_labels[index])
index = index + 1
pyplot.xlabel("Temperature ( Kelvin )")
pyplot.ylabel(r"C$_{v}$ ( kcal/mol * Kelvin )")
pyplot.title(r"Heat capacity for variable $\epsilon$")
pyplot.legend(legend_labels)
pyplot.savefig(file_name)
pyplot.show()
pyplot.close()
exit()
|
[
"matplotlib.pyplot.title",
"os.mkdir",
"simtk.openmm.app.pdbfile.PDBFile.writeFile",
"matplotlib.pyplot.figure",
"numpy.unique",
"numpy.meshgrid",
"matplotlib.pyplot.close",
"os.path.exists",
"matplotlib.pyplot.colorbar",
"foldamers.cg_model.cgmodel.CGModel",
"simtk.openmm.app.pdbfile.PDBFile",
"matplotlib.pyplot.errorbar",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"statistics.mean",
"matplotlib.pyplot.pcolormesh",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.plot",
"numpy.array",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((4025, 4818), 'foldamers.cg_model.cgmodel.CGModel', 'CGModel', ([], {'polymer_length': 'polymer_length', 'backbone_lengths': 'backbone_lengths', 'sidechain_lengths': 'sidechain_lengths', 'sidechain_positions': 'sidechain_positions', 'masses': 'masses', 'sigmas': 'sigmas', 'epsilons': 'epsilons', 'bond_lengths': 'bond_lengths', 'bond_force_constants': 'bond_force_constants', 'bond_angle_force_constants': 'bond_angle_force_constants', 'torsion_force_constants': 'torsion_force_constants', 'equil_bond_angles': 'equil_bond_angles', 'equil_torsion_angles': 'equil_torsion_angles', 'torsion_periodicities': 'torsion_periodicities', 'include_nonbonded_forces': 'include_nonbonded_forces', 'include_bond_forces': 'include_bond_forces', 'include_bond_angle_forces': 'include_bond_angle_forces', 'include_torsion_forces': 'include_torsion_forces', 'constrain_bonds': 'constrain_bonds', 'positions': 'positions'}), '(polymer_length=polymer_length, backbone_lengths=backbone_lengths,\n sidechain_lengths=sidechain_lengths, sidechain_positions=\n sidechain_positions, masses=masses, sigmas=sigmas, epsilons=epsilons,\n bond_lengths=bond_lengths, bond_force_constants=bond_force_constants,\n bond_angle_force_constants=bond_angle_force_constants,\n torsion_force_constants=torsion_force_constants, equil_bond_angles=\n equil_bond_angles, equil_torsion_angles=equil_torsion_angles,\n torsion_periodicities=torsion_periodicities, include_nonbonded_forces=\n include_nonbonded_forces, include_bond_forces=include_bond_forces,\n include_bond_angle_forces=include_bond_angle_forces,\n include_torsion_forces=include_torsion_forces, constrain_bonds=\n constrain_bonds, positions=positions)\n', (4032, 4818), False, 'from foldamers.cg_model.cgmodel import CGModel\n'), ((581, 613), 'os.path.exists', 'os.path.exists', (['output_directory'], {}), '(output_directory)\n', (595, 613), False, 'import os\n'), ((619, 645), 'os.mkdir', 'os.mkdir', (['output_directory'], {}), '(output_directory)\n', (627, 645), False, 'import os\n'), ((5297, 5329), 'os.path.exists', 'os.path.exists', (['output_directory'], {}), '(output_directory)\n', (5311, 5329), False, 'import os\n'), ((5335, 5361), 'os.mkdir', 'os.mkdir', (['output_directory'], {}), '(output_directory)\n', (5343, 5361), False, 'import os\n'), ((5615, 5647), 'os.path.exists', 'os.path.exists', (['output_directory'], {}), '(output_directory)\n', (5629, 5647), False, 'import os\n'), ((5653, 5679), 'os.mkdir', 'os.mkdir', (['output_directory'], {}), '(output_directory)\n', (5661, 5679), False, 'import os\n'), ((5888, 5920), 'os.path.exists', 'os.path.exists', (['output_directory'], {}), '(output_directory)\n', (5902, 5920), False, 'import os\n'), ((5926, 5952), 'os.mkdir', 'os.mkdir', (['output_directory'], {}), '(output_directory)\n', (5934, 5952), False, 'import os\n'), ((21292, 21308), 'matplotlib.pyplot.figure', 'pyplot.figure', (['(1)'], {}), '(1)\n', (21305, 21308), True, 'import matplotlib.pyplot as pyplot\n'), ((21622, 21682), 'numpy.unique', 'np.unique', (['(bb_bb_bb_bb_equil_torsion_angles * (180.0 / 3.14))'], {}), '(bb_bb_bb_bb_equil_torsion_angles * (180.0 / 3.14))\n', (21631, 21682), True, 'import numpy as np\n'), ((21691, 21751), 'numpy.unique', 'np.unique', (['(sc_bb_bb_sc_equil_torsion_angles * (180.0 / 3.14))'], {}), '(sc_bb_bb_sc_equil_torsion_angles * (180.0 / 3.14))\n', (21700, 21751), True, 'import numpy as np\n'), ((21763, 21780), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (21774, 21780), True, 'import numpy as np\n'), ((21826, 21884), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""$ \\\\alpha_{0}^{BB-BB-BB-BB} $ ( Degrees )"""'], {}), "('$ \\\\alpha_{0}^{BB-BB-BB-BB} $ ( Degrees )')\n", (21839, 21884), True, 'import matplotlib.pyplot as pyplot\n'), ((21889, 21947), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""$ \\\\alpha_{0}^{SC-BB-BB-SC} $ ( Degrees )"""'], {}), "('$ \\\\alpha_{0}^{SC-BB-BB-SC} $ ( Degrees )')\n", (21902, 21947), True, 'import matplotlib.pyplot as pyplot\n'), ((21952, 22013), 'matplotlib.pyplot.title', 'pyplot.title', (['"""dQ (Change in native contacts during folding)"""'], {}), "('dQ (Change in native contacts during folding)')\n", (21964, 22013), True, 'import matplotlib.pyplot as pyplot\n'), ((22018, 22044), 'matplotlib.pyplot.pcolormesh', 'pyplot.pcolormesh', (['X', 'Y', 'Z'], {}), '(X, Y, Z)\n', (22035, 22044), True, 'import matplotlib.pyplot as pyplot\n'), ((22049, 22066), 'matplotlib.pyplot.colorbar', 'pyplot.colorbar', ([], {}), '()\n', (22064, 22066), True, 'import matplotlib.pyplot as pyplot\n'), ((22071, 22096), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['file_name'], {}), '(file_name)\n', (22085, 22096), True, 'import matplotlib.pyplot as pyplot\n'), ((22101, 22114), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (22112, 22114), True, 'import matplotlib.pyplot as pyplot\n'), ((22119, 22133), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (22131, 22133), True, 'import matplotlib.pyplot as pyplot\n'), ((22229, 22245), 'matplotlib.pyplot.figure', 'pyplot.figure', (['(1)'], {}), '(1)\n', (22242, 22245), True, 'import matplotlib.pyplot as pyplot\n'), ((22396, 22454), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""$ \\\\alpha_{0}^{BB-BB-BB-BB} $ ( Degrees )"""'], {}), "('$ \\\\alpha_{0}^{BB-BB-BB-BB} $ ( Degrees )')\n", (22409, 22454), True, 'import matplotlib.pyplot as pyplot\n'), ((22459, 22486), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""$\\\\Delta$Q"""'], {}), "('$\\\\Delta$Q')\n", (22472, 22486), True, 'import matplotlib.pyplot as pyplot\n'), ((22491, 22560), 'matplotlib.pyplot.title', 'pyplot.title', (['"""$\\\\Delta$Q (Change in native contacts) during folding"""'], {}), "('$\\\\Delta$Q (Change in native contacts) during folding')\n", (22503, 22560), True, 'import matplotlib.pyplot as pyplot\n'), ((22565, 22582), 'matplotlib.pyplot.plot', 'pyplot.plot', (['x', 'y'], {}), '(x, y)\n', (22576, 22582), True, 'import matplotlib.pyplot as pyplot\n'), ((22587, 22612), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['file_name'], {}), '(file_name)\n', (22601, 22612), True, 'import matplotlib.pyplot as pyplot\n'), ((22617, 22630), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (22628, 22630), True, 'import matplotlib.pyplot as pyplot\n'), ((22635, 22649), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (22647, 22649), True, 'import matplotlib.pyplot as pyplot\n'), ((22767, 22783), 'matplotlib.pyplot.figure', 'pyplot.figure', (['(1)'], {}), '(1)\n', (22780, 22783), True, 'import matplotlib.pyplot as pyplot\n'), ((22996, 23052), 'numpy.array', 'np.array', (['[temperature for temperature in new_temp_list]'], {}), '([temperature for temperature in new_temp_list])\n', (23004, 23052), True, 'import numpy as np\n'), ((23386, 23423), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Temperature (Kelvin)"""'], {}), "('Temperature (Kelvin)')\n", (23399, 23423), True, 'import matplotlib.pyplot as pyplot\n'), ((23428, 23496), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""Dimensionless free energy differences $\\\\mathit{F}$"""'], {}), "('Dimensionless free energy differences $\\\\mathit{F}$')\n", (23441, 23496), True, 'import matplotlib.pyplot as pyplot\n'), ((23501, 23571), 'matplotlib.pyplot.title', 'pyplot.title', (['"""$\\\\mathit{F}$ for variable $\\\\alpha_{0}^{BB-BB-BB-BB}$"""'], {}), "('$\\\\mathit{F}$ for variable $\\\\alpha_{0}^{BB-BB-BB-BB}$')\n", (23513, 23571), True, 'import matplotlib.pyplot as pyplot\n'), ((23575, 23603), 'matplotlib.pyplot.legend', 'pyplot.legend', (['legend_labels'], {}), '(legend_labels)\n', (23588, 23603), True, 'import matplotlib.pyplot as pyplot\n'), ((23608, 23633), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['file_name'], {}), '(file_name)\n', (23622, 23633), True, 'import matplotlib.pyplot as pyplot\n'), ((23638, 23651), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (23649, 23651), True, 'import matplotlib.pyplot as pyplot\n'), ((23656, 23670), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (23668, 23670), True, 'import matplotlib.pyplot as pyplot\n'), ((23756, 23772), 'matplotlib.pyplot.figure', 'pyplot.figure', (['(1)'], {}), '(1)\n', (23769, 23772), True, 'import matplotlib.pyplot as pyplot\n'), ((23985, 24041), 'numpy.array', 'np.array', (['[temperature for temperature in new_temp_list]'], {}), '([temperature for temperature in new_temp_list])\n', (23993, 24041), True, 'import numpy as np\n'), ((24291, 24328), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Temperature (Kelvin)"""'], {}), "('Temperature (Kelvin)')\n", (24304, 24328), True, 'import matplotlib.pyplot as pyplot\n'), ((24333, 24382), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""Entropy differences ($\\\\Delta$S)"""'], {}), "('Entropy differences ($\\\\Delta$S)')\n", (24346, 24382), True, 'import matplotlib.pyplot as pyplot\n'), ((24386, 24450), 'matplotlib.pyplot.title', 'pyplot.title', (['"""Entropy for variable $\\\\alpha_{0}^{BB-BB-BB-BB}$"""'], {}), "('Entropy for variable $\\\\alpha_{0}^{BB-BB-BB-BB}$')\n", (24398, 24450), True, 'import matplotlib.pyplot as pyplot\n'), ((24455, 24483), 'matplotlib.pyplot.legend', 'pyplot.legend', (['legend_labels'], {}), '(legend_labels)\n', (24468, 24483), True, 'import matplotlib.pyplot as pyplot\n'), ((24488, 24513), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['file_name'], {}), '(file_name)\n', (24502, 24513), True, 'import matplotlib.pyplot as pyplot\n'), ((24518, 24531), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (24529, 24531), True, 'import matplotlib.pyplot as pyplot\n'), ((24536, 24550), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (24548, 24550), True, 'import matplotlib.pyplot as pyplot\n'), ((24667, 24683), 'matplotlib.pyplot.figure', 'pyplot.figure', (['(1)'], {}), '(1)\n', (24680, 24683), True, 'import matplotlib.pyplot as pyplot\n'), ((24896, 24952), 'numpy.array', 'np.array', (['[temperature for temperature in new_temp_list]'], {}), '([temperature for temperature in new_temp_list])\n', (24904, 24952), True, 'import numpy as np\n'), ((25261, 25300), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Temperature ( Kelvin )"""'], {}), "('Temperature ( Kelvin )')\n", (25274, 25300), True, 'import matplotlib.pyplot as pyplot\n'), ((25305, 25351), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""C$_{v}$ ( kcal/mol * Kelvin )"""'], {}), "('C$_{v}$ ( kcal/mol * Kelvin )')\n", (25318, 25351), True, 'import matplotlib.pyplot as pyplot\n'), ((25357, 25411), 'matplotlib.pyplot.title', 'pyplot.title', (['"""Heat capacity for variable $\\\\epsilon$"""'], {}), "('Heat capacity for variable $\\\\epsilon$')\n", (25369, 25411), True, 'import matplotlib.pyplot as pyplot\n'), ((25416, 25444), 'matplotlib.pyplot.legend', 'pyplot.legend', (['legend_labels'], {}), '(legend_labels)\n', (25429, 25444), True, 'import matplotlib.pyplot as pyplot\n'), ((25449, 25474), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['file_name'], {}), '(file_name)\n', (25463, 25474), True, 'import matplotlib.pyplot as pyplot\n'), ((25479, 25492), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (25490, 25492), True, 'import matplotlib.pyplot as pyplot\n'), ((25497, 25511), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (25509, 25511), True, 'import matplotlib.pyplot as pyplot\n'), ((3947, 3967), 'simtk.openmm.app.pdbfile.PDBFile', 'PDBFile', (['"""helix.pdb"""'], {}), "('helix.pdb')\n", (3954, 3967), False, 'from simtk.openmm.app.pdbfile import PDBFile\n'), ((8023, 8816), 'foldamers.cg_model.cgmodel.CGModel', 'CGModel', ([], {'polymer_length': 'polymer_length', 'backbone_lengths': 'backbone_lengths', 'sidechain_lengths': 'sidechain_lengths', 'sidechain_positions': 'sidechain_positions', 'masses': 'masses', 'sigmas': 'sigmas', 'epsilons': 'epsilons', 'bond_lengths': 'bond_lengths', 'bond_force_constants': 'bond_force_constants', 'bond_angle_force_constants': 'bond_angle_force_constants', 'torsion_force_constants': 'torsion_force_constants', 'equil_bond_angles': 'equil_bond_angles', 'equil_torsion_angles': 'equil_torsion_angles', 'torsion_periodicities': 'torsion_periodicities', 'include_nonbonded_forces': 'include_nonbonded_forces', 'include_bond_forces': 'include_bond_forces', 'include_bond_angle_forces': 'include_bond_angle_forces', 'include_torsion_forces': 'include_torsion_forces', 'constrain_bonds': 'constrain_bonds', 'positions': 'positions'}), '(polymer_length=polymer_length, backbone_lengths=backbone_lengths,\n sidechain_lengths=sidechain_lengths, sidechain_positions=\n sidechain_positions, masses=masses, sigmas=sigmas, epsilons=epsilons,\n bond_lengths=bond_lengths, bond_force_constants=bond_force_constants,\n bond_angle_force_constants=bond_angle_force_constants,\n torsion_force_constants=torsion_force_constants, equil_bond_angles=\n equil_bond_angles, equil_torsion_angles=equil_torsion_angles,\n torsion_periodicities=torsion_periodicities, include_nonbonded_forces=\n include_nonbonded_forces, include_bond_forces=include_bond_forces,\n include_bond_angle_forces=include_bond_angle_forces,\n include_torsion_forces=include_torsion_forces, constrain_bonds=\n constrain_bonds, positions=positions)\n', (8030, 8816), False, 'from foldamers.cg_model.cgmodel import CGModel\n'), ((10174, 10199), 'os.path.exists', 'os.path.exists', (['file_name'], {}), '(file_name)\n', (10188, 10199), False, 'import os\n'), ((23277, 23309), 'matplotlib.pyplot.plot', 'pyplot.plot', (['temperatures', 'df_ij'], {}), '(temperatures, df_ij)\n', (23288, 23309), True, 'import matplotlib.pyplot as pyplot\n'), ((24180, 24214), 'matplotlib.pyplot.plot', 'pyplot.plot', (['temperatures', 'delta_s'], {}), '(temperatures, delta_s)\n', (24191, 24214), True, 'import matplotlib.pyplot as pyplot\n'), ((25141, 25234), 'matplotlib.pyplot.errorbar', 'pyplot.errorbar', (['temperatures', 'C_v'], {'yerr': 'dC_v', 'figure': 'figure', 'label': 'legend_labels[index]'}), '(temperatures, C_v, yerr=dC_v, figure=figure, label=\n legend_labels[index])\n', (25156, 25234), True, 'import matplotlib.pyplot as pyplot\n'), ((12636, 12700), 'simtk.openmm.app.pdbfile.PDBFile.writeFile', 'PDBFile.writeFile', (['cgmodel.topology', 'native_structure'], {'file': 'file'}), '(cgmodel.topology, native_structure, file=file)\n', (12653, 12700), False, 'from simtk.openmm.app.pdbfile import PDBFile\n'), ((16996, 17040), 'os.path.exists', 'os.path.exists', (['nonnative_ensemble_directory'], {}), '(nonnative_ensemble_directory)\n', (17010, 17040), False, 'import os\n'), ((18316, 18359), 'numpy.array', 'np.array', (['[Q for Q in nonnative_ensemble_Q]'], {}), '([Q for Q in nonnative_ensemble_Q])\n', (18324, 18359), True, 'import numpy as np\n'), ((18398, 18424), 'statistics.mean', 'mean', (['nonnative_ensemble_Q'], {}), '(nonnative_ensemble_Q)\n', (18402, 18424), False, 'from statistics import mean\n'), ((18719, 18760), 'os.path.exists', 'os.path.exists', (['native_ensemble_directory'], {}), '(native_ensemble_directory)\n', (18733, 18760), False, 'import os\n'), ((7969, 7989), 'simtk.openmm.app.pdbfile.PDBFile', 'PDBFile', (['"""helix.pdb"""'], {}), "('helix.pdb')\n", (7976, 7989), False, 'from simtk.openmm.app.pdbfile import PDBFile\n'), ((17844, 17882), 'os.mkdir', 'os.mkdir', (['nonnative_ensemble_directory'], {}), '(nonnative_ensemble_directory)\n', (17852, 17882), False, 'import os\n'), ((19549, 19584), 'os.mkdir', 'os.mkdir', (['native_ensemble_directory'], {}), '(native_ensemble_directory)\n', (19557, 19584), False, 'import os\n'), ((11283, 11301), 'simtk.openmm.app.pdbfile.PDBFile', 'PDBFile', (['file_name'], {}), '(file_name)\n', (11290, 11301), False, 'from simtk.openmm.app.pdbfile import PDBFile\n')]
|
'''
## PyPoll

* In this challenge, you are tasked with helping a small, rural town
modernize its vote-counting process. (Up until now, Uncle Cleetus had
been trustfully tallying them one-by-one, but unfortunately, his concentration isn't what it used to be.)
* You will be give a set of poll data called [election_data.csv](PyPoll/Resources/election_data.csv).
The dataset is composed of three columns: `Voter ID`, `County`, and `Candidate`.
Your task is to create a Python script that analyzes the votes and calculates each of the following:
* The total number of votes cast
* A complete list of candidates who received votes
* The percentage of votes each candidate won
* The total number of votes each candidate won
* The winner of the election based on popular vote.
* As an example, your analysis should look similar to the one below:
```text
Election Results
-------------------------
Total Votes: 3521001
-------------------------
Khan: 63.000% (2218231)
Correy: 20.000% (704200)
Li: 14.000% (492940)
O'Tooley: 3.000% (105630)
-------------------------
Winner: Khan
-------------------------
```
* In addition, your final script should both print the analysis to the terminal and export a text file with the results.
'''
# Imports
import os, csv
# CSV Path
data_file = os.path.join("election_data.csv")
# Store Objects
database_total_votes = []
candidates_with_votes = []
store_candidates_votes = []
winner = []
# Variables
total_votes = 0
vote_percents = 0
# Open csv with reaser, header, and F statement
with open (data_file, newline="", encoding="UTF=8") as csv_file:
csv_reader = csv.reader(csv_file, delimiter=",")
csv_header = next(csv_file)
# Loop through the data to variables
for row in csv_reader:
total_votes = total_votes +1
database_total_votes = total_votes
print(database_total_votes)
# A complete list of candidates who received votes `Voter ID`, `County`, and `Candidate`
candidates_with_votes.append(row[2])
candidates_with_votes = candidates_with_votes
print(candidates_with_votes)
|
[
"csv.reader",
"os.path.join"
] |
[((1372, 1405), 'os.path.join', 'os.path.join', (['"""election_data.csv"""'], {}), "('election_data.csv')\n", (1384, 1405), False, 'import os, csv\n'), ((1696, 1731), 'csv.reader', 'csv.reader', (['csv_file'], {'delimiter': '""","""'}), "(csv_file, delimiter=',')\n", (1706, 1731), False, 'import os, csv\n')]
|
import tensorflow as tf
from utils import box_utils
from models import registry
from .loss import MultiBoxLoss
from .inference import PostProcessor
from models.head.box_predictor import make_box_predictor
from models.anchors.prior_box import PriorBox
@registry.BOX_HEADS.register('SSDBoxHead')
class SSDBoxHead(tf.keras.layers.Layer):
def __init__(self, cfg):
super(SSDBoxHead, self).__init__()
self.cfg = cfg
self.predictor = make_box_predictor(cfg)
self.loss_evaluator = MultiBoxLoss(neg_pos_ratio=cfg.MODEL.NEG_POS_RATIO)
self.post_processor = PostProcessor(cfg)
self.priors = None
def call(self, features, targets=None):
cls_logits, bbox_pred = self.predictor(features) # (batch_size, num_priors, num_C) | (batch_size, num_priors, 4)
if targets is not None:
return self._call_train(cls_logits, bbox_pred, targets)
return self._call_test(cls_logits, bbox_pred)
def _call_train(self, cls_logits, bbox_pred, targets):
gt_boxes, gt_labels = targets
reg_loss, cls_loss = self.loss_evaluator(cls_logits, bbox_pred, gt_labels, gt_boxes)
return reg_loss, cls_loss
def _call_test(self, cls_logits, bbox_pred):
if self.priors is None:
self.priors = PriorBox(self.cfg)()
scores = tf.keras.activations.softmax(cls_logits, axis=2)
boxes = box_utils.convert_locations_to_boxes(bbox_pred, self.priors , self.cfg.MODEL.CENTER_VARIANCE, self.cfg.MODEL.SIZE_VARIANCE)
boxes = box_utils.center_form_to_corner_form(boxes)
detections = (scores, boxes)
detections = self.post_processor(detections)
return detections
|
[
"models.head.box_predictor.make_box_predictor",
"models.anchors.prior_box.PriorBox",
"tensorflow.keras.activations.softmax",
"models.registry.BOX_HEADS.register",
"utils.box_utils.convert_locations_to_boxes",
"utils.box_utils.center_form_to_corner_form"
] |
[((263, 304), 'models.registry.BOX_HEADS.register', 'registry.BOX_HEADS.register', (['"""SSDBoxHead"""'], {}), "('SSDBoxHead')\n", (290, 304), False, 'from models import registry\n'), ((471, 494), 'models.head.box_predictor.make_box_predictor', 'make_box_predictor', (['cfg'], {}), '(cfg)\n', (489, 494), False, 'from models.head.box_predictor import make_box_predictor\n'), ((1381, 1429), 'tensorflow.keras.activations.softmax', 'tf.keras.activations.softmax', (['cls_logits'], {'axis': '(2)'}), '(cls_logits, axis=2)\n', (1409, 1429), True, 'import tensorflow as tf\n'), ((1447, 1574), 'utils.box_utils.convert_locations_to_boxes', 'box_utils.convert_locations_to_boxes', (['bbox_pred', 'self.priors', 'self.cfg.MODEL.CENTER_VARIANCE', 'self.cfg.MODEL.SIZE_VARIANCE'], {}), '(bbox_pred, self.priors, self.cfg.MODEL\n .CENTER_VARIANCE, self.cfg.MODEL.SIZE_VARIANCE)\n', (1483, 1574), False, 'from utils import box_utils\n'), ((1588, 1631), 'utils.box_utils.center_form_to_corner_form', 'box_utils.center_form_to_corner_form', (['boxes'], {}), '(boxes)\n', (1624, 1631), False, 'from utils import box_utils\n'), ((1342, 1360), 'models.anchors.prior_box.PriorBox', 'PriorBox', (['self.cfg'], {}), '(self.cfg)\n', (1350, 1360), False, 'from models.anchors.prior_box import PriorBox\n')]
|
import pymysql
import sys
from mahjong.ai.comb.perm_comb_mahjong import PermCombMahjongGenerator
Tiles = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17]
db = pymysql.connect(host='127.0.0.1', user='root',
password='<PASSWORD>', db='mahjong', port=3306, charset='utf8')
cursor = db.cursor()
raw_sql = """INSERT INTO comb_chain(hands_comb,
search_chain)
VALUES ('{0}', '')
ON DUPLICATE KEY UPDATE search_chain = ''"""
comb_gen = PermCombMahjongGenerator(Tiles, 13, end_point=1, start_comb=[0, 0, 0, 0, 1, 1, 1, 1, 2, 3, 4, 5, 6])
comb = comb_gen.next()
i = 1
while comb is not None:
i += 1
if i % 1000 == 0:
print(comb)
comb_str = ""
comb_str_list = []
for tile in comb:
comb_str_list.append(tile.__str__()+",")
comb_str = ''.join(comb_str_list)
comb_str = comb_str[:-1]
s = raw_sql.format(comb_str)
try:
# 执行sql语句
cursor.execute(s)
# 提交到数据库执行
db.commit()
except Exception:
# 如果发生错误则回滚
db.rollback()
print("wrong")
print(sys.exc_info()[0], sys.exc_info()[1])
comb = comb_gen.next()
|
[
"mahjong.ai.comb.perm_comb_mahjong.PermCombMahjongGenerator",
"pymysql.connect",
"sys.exc_info"
] |
[((183, 298), 'pymysql.connect', 'pymysql.connect', ([], {'host': '"""127.0.0.1"""', 'user': '"""root"""', 'password': '"""<PASSWORD>"""', 'db': '"""mahjong"""', 'port': '(3306)', 'charset': '"""utf8"""'}), "(host='127.0.0.1', user='root', password='<PASSWORD>', db=\n 'mahjong', port=3306, charset='utf8')\n", (198, 298), False, 'import pymysql\n'), ((507, 611), 'mahjong.ai.comb.perm_comb_mahjong.PermCombMahjongGenerator', 'PermCombMahjongGenerator', (['Tiles', '(13)'], {'end_point': '(1)', 'start_comb': '[0, 0, 0, 0, 1, 1, 1, 1, 2, 3, 4, 5, 6]'}), '(Tiles, 13, end_point=1, start_comb=[0, 0, 0, 0, 1,\n 1, 1, 1, 2, 3, 4, 5, 6])\n', (531, 611), False, 'from mahjong.ai.comb.perm_comb_mahjong import PermCombMahjongGenerator\n'), ((1146, 1160), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1158, 1160), False, 'import sys\n'), ((1165, 1179), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1177, 1179), False, 'import sys\n')]
|
#!/usr/bin/env python
"""Unprivileged memory RPC client code."""
import abc
from typing import TypeVar, Generic
from grr_response_client.unprivileged import communication
from grr_response_client.unprivileged.proto import memory_pb2
class ConnectionWrapper:
"""Wraps a connection, adding protobuf serialization of messages."""
def __init__(self, connection: communication.Connection):
self._connection = connection
def Send(self, request: memory_pb2.Request) -> None:
self._connection.Send(
communication.Message(request.SerializeToString(), b""))
def Recv(self) -> memory_pb2.Response:
raw_response, _ = self._connection.Recv()
response = memory_pb2.Response()
response.ParseFromString(raw_response)
return response
class Error(Exception):
"""Base class for exceptions in this module."""
pass
class OperationError(Exception):
"""Error while executing the operation."""
def __init__(self, message: str, formatted_exception: str):
"""Constructor.
Args:
message: the exception message
formatted_exception: the remote exception formatted using
traceback.format_exc()
"""
super().__init__(message)
self.formatted_exception = formatted_exception
RequestType = TypeVar("RequestType")
ResponseType = TypeVar("ResponseType")
class OperationHandler(abc.ABC, Generic[RequestType, ResponseType]):
"""Base class for RPC handlers."""
def __init__(self, connection: ConnectionWrapper):
self._connection = connection
def Run(self, request: RequestType) -> ResponseType:
self._connection.Send(self.PackRequest(request))
packed_response = self._connection.Recv()
if packed_response.HasField("exception"):
raise OperationError(packed_response.exception.message,
packed_response.exception.formatted_exception)
else:
response = self.UnpackResponse(packed_response)
return response
@abc.abstractmethod
def UnpackResponse(self, response: memory_pb2.Response) -> ResponseType:
"""Extracts an inner Response message from a response message."""
pass
@abc.abstractmethod
def PackRequest(self, request: RequestType) -> memory_pb2.Request:
"""Packs an inner Request message into a request message."""
pass
class UploadSignatureHandler(
OperationHandler[memory_pb2.UploadSignatureRequest,
memory_pb2.UploadSignatureResponse]):
"""Implements the UploadSignature RPC."""
def UnpackResponse(
self,
response: memory_pb2.Response) -> memory_pb2.UploadSignatureResponse:
return response.upload_signature_response
def PackRequest(
self, request: memory_pb2.UploadSignatureRequest) -> memory_pb2.Request:
return memory_pb2.Request(upload_signature_request=request)
class ProcessScanHandler(OperationHandler[memory_pb2.ProcessScanRequest,
memory_pb2.ProcessScanResponse]):
"""Implements the ProcessScan RPC."""
def UnpackResponse(
self, response: memory_pb2.Response) -> memory_pb2.ProcessScanResponse:
return response.process_scan_response
def PackRequest(self,
request: memory_pb2.ProcessScanRequest) -> memory_pb2.Request:
return memory_pb2.Request(process_scan_request=request)
class Client:
"""Client for the RPC memory service."""
def __init__(self, connection: communication.Connection):
self._connection = ConnectionWrapper(connection)
def UploadSignature(self, yara_signature: str):
"""Uploads a yara signature to be used for this connection."""
request = memory_pb2.UploadSignatureRequest(yara_signature=yara_signature)
UploadSignatureHandler(self._connection).Run(request)
def ProcessScan(self, serialized_file_descriptor: int, offset: int, size: int,
timeout_seconds: int) -> memory_pb2.ProcessScanResponse:
"""Scans process memory.
Args:
serialized_file_descriptor: Serialized file descriptor for the process
memory. The file descriptor must be accessible by the server process.
offset: Offset in memory.
size: Size of memory to scan.
timeout_seconds: Timeout in seconds.
Returns:
A `ScanResult` proto.
"""
request = memory_pb2.ProcessScanRequest(
serialized_file_descriptor=serialized_file_descriptor,
offset=offset,
size=size,
timeout_seconds=timeout_seconds)
response = ProcessScanHandler(self._connection).Run(request)
return response
def CreateMemoryClient(connection: communication.Connection) -> Client:
"""Creates a memory client."""
return Client(connection)
|
[
"grr_response_client.unprivileged.proto.memory_pb2.Response",
"grr_response_client.unprivileged.proto.memory_pb2.ProcessScanRequest",
"grr_response_client.unprivileged.proto.memory_pb2.UploadSignatureRequest",
"typing.TypeVar",
"grr_response_client.unprivileged.proto.memory_pb2.Request"
] |
[((1258, 1280), 'typing.TypeVar', 'TypeVar', (['"""RequestType"""'], {}), "('RequestType')\n", (1265, 1280), False, 'from typing import TypeVar, Generic\n'), ((1296, 1319), 'typing.TypeVar', 'TypeVar', (['"""ResponseType"""'], {}), "('ResponseType')\n", (1303, 1319), False, 'from typing import TypeVar, Generic\n'), ((679, 700), 'grr_response_client.unprivileged.proto.memory_pb2.Response', 'memory_pb2.Response', ([], {}), '()\n', (698, 700), False, 'from grr_response_client.unprivileged.proto import memory_pb2\n'), ((2741, 2793), 'grr_response_client.unprivileged.proto.memory_pb2.Request', 'memory_pb2.Request', ([], {'upload_signature_request': 'request'}), '(upload_signature_request=request)\n', (2759, 2793), False, 'from grr_response_client.unprivileged.proto import memory_pb2\n'), ((3245, 3293), 'grr_response_client.unprivileged.proto.memory_pb2.Request', 'memory_pb2.Request', ([], {'process_scan_request': 'request'}), '(process_scan_request=request)\n', (3263, 3293), False, 'from grr_response_client.unprivileged.proto import memory_pb2\n'), ((3599, 3663), 'grr_response_client.unprivileged.proto.memory_pb2.UploadSignatureRequest', 'memory_pb2.UploadSignatureRequest', ([], {'yara_signature': 'yara_signature'}), '(yara_signature=yara_signature)\n', (3632, 3663), False, 'from grr_response_client.unprivileged.proto import memory_pb2\n'), ((4249, 4402), 'grr_response_client.unprivileged.proto.memory_pb2.ProcessScanRequest', 'memory_pb2.ProcessScanRequest', ([], {'serialized_file_descriptor': 'serialized_file_descriptor', 'offset': 'offset', 'size': 'size', 'timeout_seconds': 'timeout_seconds'}), '(serialized_file_descriptor=\n serialized_file_descriptor, offset=offset, size=size, timeout_seconds=\n timeout_seconds)\n', (4278, 4402), False, 'from grr_response_client.unprivileged.proto import memory_pb2\n')]
|
from django.db import models
# Create your models here.
class Uber(models.Model):
source = models.CharField(max_length=150)
destination = models.CharField(max_length=150)
time = models.TimeField()
email = models.EmailField()
|
[
"django.db.models.CharField",
"django.db.models.TimeField",
"django.db.models.EmailField"
] |
[((97, 129), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)'}), '(max_length=150)\n', (113, 129), False, 'from django.db import models\n'), ((148, 180), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)'}), '(max_length=150)\n', (164, 180), False, 'from django.db import models\n'), ((192, 210), 'django.db.models.TimeField', 'models.TimeField', ([], {}), '()\n', (208, 210), False, 'from django.db import models\n'), ((223, 242), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (240, 242), False, 'from django.db import models\n')]
|
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from utils import normalizeFeaturesL2
class SJE_GMPool(nn.Module):
def __init__(self, img_feature_size, num_attributes, margin):
super(SJE_GMPool, self).__init__()
self.margin = margin
# copying initialization technique from original code
W = torch.rand(img_feature_size, num_attributes, requires_grad=True)
W = normalizeFeaturesL2(W.permute(1,0)).permute(1,0)
self.W = nn.Parameter(W, requires_grad=True)
power = torch.zeros(num_attributes, requires_grad=True)
self.power = nn.Parameter(power, requires_grad=True)
self.example_indices = random.choices(range(1000), k=2) # this is a hack
def get_power(self):
c = float(10)
p = self.power * 3
power = torch.zeros_like(p)
power[p>=2] = c
power = torch.where((0<=p)&(p<2), (c-1)/2*p+1, power)
power = torch.where((-1<=p)&(p<0), 1/((1-c)*p+1), power)
power = torch.where((-1.5<=p)&(p<-1), -1/(2*(c-1)*(p+1.5)+1), power)
power = torch.where((-2<=p)&(p<-1.5), 2*(c-1)*(p+2)-c, power)
power[p<-2] = -c
assert torch.all(power != 0)
return power
def apply_gmpool(self, projected_feats):
'''
projected_feats: torch.Tensor of shape [B, num_attributes, H, W]
returns pooled features of shape [B, num_attributes]
'''
m = projected_feats.min()
p = self.get_power().view(1,-1,1,1)
if m < 0:
pooled = (projected_feats-m+1e-3).pow(p).mean(2, keepdim=True).mean(3, keepdim=True).pow(1/p)+m+1e-3
else:
pooled = projected_feats.pow(p).mean(2, keepdim=True).mean(3, keepdim=True).pow(1/p)
return pooled.squeeze(2).squeeze(2)
def forward(self, *args, **kwargs):
if self.training:
return self.forward_train(*args, **kwargs)
else:
return self.forward_test(*args, **kwargs)
def forward_train(self, img_features, all_class_attributes, class_attributes, labels):
'''
img_features: torch.Tensor of shape [B, img_feature_size, H, W]
class_attributes: torch.Tensor of shape [B, num_attributes]
labels: torch.Tensor of shape [B]
all_class_attributes: torch.Tensor of shape [num_attributes, num_classes]
returns scalar loss
'''
XW = torch.tensordot(img_features, self.W, [[1],[0]]).permute(0,3,1,2) # shape [B, num_attributes, H, W]
XW = self.apply_gmpool(XW) # shape [B, num_attributes]
if torch.any(XW.isnan()):
print("YIKES")
XW = normalizeFeaturesL2(XW) # normalize each projected vector to have unit length
scores = torch.matmul(XW.unsqueeze(1), all_class_attributes).squeeze(1) # shape [B, num_classes]
gt_class_scores = scores[torch.arange(len(scores)), labels].unsqueeze(1) # shape [B, 1]
# add margin to scores
losses = self.margin + scores - gt_class_scores # shape [B, num_classes]
losses[torch.arange(len(losses)), labels] = 0.0
losses = losses.max(dim=1)[0] # shape [B]
return losses.clamp(0).mean()
def forward_test(self, img_features, all_class_attributes):
XW = torch.tensordot(img_features, self.W, [[1],[0]]).permute(0,3,1,2) # shape [B, num_attributes, H, W]
XW = self.apply_gmpool(XW) # shape [B, num_attributes]
if torch.any(XW.isnan()):
print("YIKES")
XW = normalizeFeaturesL2(XW) # normalize each projected vector to have unit length
scores = torch.matmul(XW.unsqueeze(1), all_class_attributes).squeeze(1) # shape [B, num_classes]
return scores.argmax(1) # shape [B]
def log_spatial_examples(self, dataloader, device, writer, split, epoch):
dataset = dataloader.dataset
self.eval()
classes = dataset.classes
for i, idx in enumerate(self.example_indices):
# unpack data
data = dataset[idx]
img_features = data['img'].to(device).unsqueeze(0)
gt_label = classes[data['label']]
all_class_attributes = dataset.class_attributes
gt_class_attributes = all_class_attributes[:,data['label']]
img = mpimg.imread(dataset.get_img_path(idx))
# forward pass
XW = torch.tensordot(img_features, self.W, [[1],[0]]).permute(0,3,1,2).squeeze() # shape [num_attributes, H, W]
for spatial_dist, gt_attribute_score, attribute_name in zip(XW, gt_class_attributes, dataset.attributes):
fig, (ax1,ax2) = plt.subplots(nrows=1,ncols=2)
ax1.set_title(f"Attribute: {attribute_name}\nGT Attribute Value: {gt_attribute_score:.4f}")
mappable = ax1.imshow(spatial_dist.cpu().detach().numpy(), vmin=-0.25, vmax=0.25)
fig.colorbar(mappable, ax=ax1)
ax2.set_title(f"Original Image({gt_label})")
ax2.imshow(img)
plt.tight_layout()
writer.add_figure(f"Spatial Examples ({split})/{attribute_name}-{i}", fig, epoch)
plt.close(fig)
|
[
"torch.nn.Parameter",
"matplotlib.pyplot.tight_layout",
"torch.zeros_like",
"torch.where",
"matplotlib.pyplot.close",
"matplotlib.pyplot.subplots",
"utils.normalizeFeaturesL2",
"torch.rand",
"torch.zeros",
"torch.tensordot",
"torch.all"
] |
[((430, 494), 'torch.rand', 'torch.rand', (['img_feature_size', 'num_attributes'], {'requires_grad': '(True)'}), '(img_feature_size, num_attributes, requires_grad=True)\n', (440, 494), False, 'import torch\n'), ((573, 608), 'torch.nn.Parameter', 'nn.Parameter', (['W'], {'requires_grad': '(True)'}), '(W, requires_grad=True)\n', (585, 608), True, 'import torch.nn as nn\n'), ((626, 673), 'torch.zeros', 'torch.zeros', (['num_attributes'], {'requires_grad': '(True)'}), '(num_attributes, requires_grad=True)\n', (637, 673), False, 'import torch\n'), ((695, 734), 'torch.nn.Parameter', 'nn.Parameter', (['power'], {'requires_grad': '(True)'}), '(power, requires_grad=True)\n', (707, 734), True, 'import torch.nn as nn\n'), ((907, 926), 'torch.zeros_like', 'torch.zeros_like', (['p'], {}), '(p)\n', (923, 926), False, 'import torch\n'), ((967, 1026), 'torch.where', 'torch.where', (['((0 <= p) & (p < 2))', '((c - 1) / 2 * p + 1)', 'power'], {}), '((0 <= p) & (p < 2), (c - 1) / 2 * p + 1, power)\n', (978, 1026), False, 'import torch\n'), ((1029, 1091), 'torch.where', 'torch.where', (['((-1 <= p) & (p < 0))', '(1 / ((1 - c) * p + 1))', 'power'], {}), '((-1 <= p) & (p < 0), 1 / ((1 - c) * p + 1), power)\n', (1040, 1091), False, 'import torch\n'), ((1094, 1172), 'torch.where', 'torch.where', (['((-1.5 <= p) & (p < -1))', '(-1 / (2 * (c - 1) * (p + 1.5) + 1))', 'power'], {}), '((-1.5 <= p) & (p < -1), -1 / (2 * (c - 1) * (p + 1.5) + 1), power)\n', (1105, 1172), False, 'import torch\n'), ((1171, 1240), 'torch.where', 'torch.where', (['((-2 <= p) & (p < -1.5))', '(2 * (c - 1) * (p + 2) - c)', 'power'], {}), '((-2 <= p) & (p < -1.5), 2 * (c - 1) * (p + 2) - c, power)\n', (1182, 1240), False, 'import torch\n'), ((1265, 1286), 'torch.all', 'torch.all', (['(power != 0)'], {}), '(power != 0)\n', (1274, 1286), False, 'import torch\n'), ((2724, 2747), 'utils.normalizeFeaturesL2', 'normalizeFeaturesL2', (['XW'], {}), '(XW)\n', (2743, 2747), False, 'from utils import normalizeFeaturesL2\n'), ((3574, 3597), 'utils.normalizeFeaturesL2', 'normalizeFeaturesL2', (['XW'], {}), '(XW)\n', (3593, 3597), False, 'from utils import normalizeFeaturesL2\n'), ((2487, 2536), 'torch.tensordot', 'torch.tensordot', (['img_features', 'self.W', '[[1], [0]]'], {}), '(img_features, self.W, [[1], [0]])\n', (2502, 2536), False, 'import torch\n'), ((3337, 3386), 'torch.tensordot', 'torch.tensordot', (['img_features', 'self.W', '[[1], [0]]'], {}), '(img_features, self.W, [[1], [0]])\n', (3352, 3386), False, 'import torch\n'), ((4687, 4717), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(2)'}), '(nrows=1, ncols=2)\n', (4699, 4717), True, 'import matplotlib.pyplot as plt\n'), ((5079, 5097), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5095, 5097), True, 'import matplotlib.pyplot as plt\n'), ((5212, 5226), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (5221, 5226), True, 'import matplotlib.pyplot as plt\n'), ((4428, 4477), 'torch.tensordot', 'torch.tensordot', (['img_features', 'self.W', '[[1], [0]]'], {}), '(img_features, self.W, [[1], [0]])\n', (4443, 4477), False, 'import torch\n')]
|
"""UIModules for the error pages plugin"""
import oz
import base64
import pprint
import oz.error_pages
import tornado.web
import tornado.escape
TABLE_FORMAT = """
<table %s %s>
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
%s
</tbody>
</table>
"""
TABLE_ROW_FORMAT = """
<tr>
<td>%s %s</td>
<td class="code">%s</td>
</tr>
"""
@oz.uimodule
class DictTable(tornado.web.UIModule):
"""Renders an HTML table from a dict"""
def render(self, d, id=None, kls=None):
items = sorted(d.items())
if items:
rows = []
for k, v in items:
try:
escaped_val = tornado.escape.xhtml_escape(oz.error_pages.prettify_object(v))
rows.append(TABLE_ROW_FORMAT % (k, "", escaped_val))
except UnicodeDecodeError:
rows.append(TABLE_ROW_FORMAT % (k, "(in base64)", base64.b64encode(v)))
return TABLE_FORMAT % ("id='%s'" % id if id else "", "class='%s'" if kls else "", "\n".join(rows))
else:
return "<p>No data</p>"
|
[
"oz.error_pages.prettify_object",
"base64.b64encode"
] |
[((756, 789), 'oz.error_pages.prettify_object', 'oz.error_pages.prettify_object', (['v'], {}), '(v)\n', (786, 789), False, 'import oz\n'), ((977, 996), 'base64.b64encode', 'base64.b64encode', (['v'], {}), '(v)\n', (993, 996), False, 'import base64\n')]
|
from cryptography.fernet import Fernet
class Key_generate:
def write_key():
"""
Generates a key and save it into a file
"""
key = Fernet.generate_key()
with open("key.key", "wb") as key_file:
key_file.write(key)
key = open("key.key", "rb").read()
print(key)
|
[
"cryptography.fernet.Fernet.generate_key"
] |
[((165, 186), 'cryptography.fernet.Fernet.generate_key', 'Fernet.generate_key', ([], {}), '()\n', (184, 186), False, 'from cryptography.fernet import Fernet\n')]
|
#!/usr/bin/env python
"""Tests the tdb data store - in memory implementation."""
import shutil
# pylint: disable=unused-import,g-bad-import-order
from grr.lib import server_plugins
# pylint: enable=unused-import,g-bad-import-order
from grr.lib import access_control
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import data_store_test
from grr.lib import flags
from grr.lib import test_lib
from grr.lib.data_stores import tdb_data_store
# pylint: mode=test
class TDBTestMixin(object):
def InitDatastore(self):
self.token = access_control.ACLToken(username="test",
reason="Running tests")
config_lib.CONFIG.Set("Datastore.location", "%s/tdb_test/" % self.temp_dir)
self.DestroyDatastore()
data_store.DB = tdb_data_store.TDBDataStore()
data_store.DB.security_manager = test_lib.MockSecurityManager()
def testCorrectDataStore(self):
self.assertTrue(isinstance(data_store.DB, tdb_data_store.TDBDataStore))
def DestroyDatastore(self):
try:
shutil.rmtree(config_lib.CONFIG.Get("Datastore.location"))
except (OSError, IOError):
pass
class TDBDataStoreTest(TDBTestMixin, data_store_test._DataStoreTest):
"""Test the tdb data store."""
class TDBDataStoreBenchmarks(TDBTestMixin,
data_store_test.DataStoreBenchmarks):
"""Benchmark the TDB data store abstraction."""
class TDBDataStoreCSVBenchmarks(TDBTestMixin,
data_store_test.DataStoreCSVBenchmarks):
"""Benchmark the TDB data store abstraction."""
def main(args):
test_lib.main(args)
if __name__ == "__main__":
flags.StartMain(main)
|
[
"grr.lib.flags.StartMain",
"grr.lib.config_lib.CONFIG.Set",
"grr.lib.config_lib.CONFIG.Get",
"grr.lib.test_lib.main",
"grr.lib.data_stores.tdb_data_store.TDBDataStore",
"grr.lib.access_control.ACLToken",
"grr.lib.test_lib.MockSecurityManager"
] |
[((1616, 1635), 'grr.lib.test_lib.main', 'test_lib.main', (['args'], {}), '(args)\n', (1629, 1635), False, 'from grr.lib import test_lib\n'), ((1666, 1687), 'grr.lib.flags.StartMain', 'flags.StartMain', (['main'], {}), '(main)\n', (1681, 1687), False, 'from grr.lib import flags\n'), ((566, 630), 'grr.lib.access_control.ACLToken', 'access_control.ACLToken', ([], {'username': '"""test"""', 'reason': '"""Running tests"""'}), "(username='test', reason='Running tests')\n", (589, 630), False, 'from grr.lib import access_control\n'), ((676, 751), 'grr.lib.config_lib.CONFIG.Set', 'config_lib.CONFIG.Set', (['"""Datastore.location"""', "('%s/tdb_test/' % self.temp_dir)"], {}), "('Datastore.location', '%s/tdb_test/' % self.temp_dir)\n", (697, 751), False, 'from grr.lib import config_lib\n'), ((802, 831), 'grr.lib.data_stores.tdb_data_store.TDBDataStore', 'tdb_data_store.TDBDataStore', ([], {}), '()\n', (829, 831), False, 'from grr.lib.data_stores import tdb_data_store\n'), ((869, 899), 'grr.lib.test_lib.MockSecurityManager', 'test_lib.MockSecurityManager', ([], {}), '()\n', (897, 899), False, 'from grr.lib import test_lib\n'), ((1071, 1114), 'grr.lib.config_lib.CONFIG.Get', 'config_lib.CONFIG.Get', (['"""Datastore.location"""'], {}), "('Datastore.location')\n", (1092, 1114), False, 'from grr.lib import config_lib\n')]
|
import subprocess
from ExperimentRunner.Script import Script
class MonkeyRunner(Script):
"""
Subclass of `Script` for running MonkeyRunner scripts directly.
As opposed to `MonkeyReplay`, it runs the scripts directly using MonkeyRunner.
Thanks to that it's not necessary to go through a layer of indirection in the
form of JSON files and a custom runner. This results in higher flexibility and
greater control.
Usage:
1. Create a script runnable by MonkeyRunner.
2. Add it to the config file with the type "monkeyrunner".
Important!
The script has to be directly runnable by MonkeyRunner. It means that:
- it has to create an instance of `MonkeyDevice` explicitly in the script,
- all operations are supposed to be invoked on this instance,
- there has to be module-level code running the operations,
- it has to follow any other restrictions specified in the docs.
Docs and examples: https://developer.android.com/studio/test/monkeyrunner/
"""
def __init__(self, path, timeout=0, logcat_regex=None, monkeyrunner_path='monkeyrunner'):
super(MonkeyRunner, self).__init__(path, timeout, logcat_regex)
self.monkeyrunner_path = monkeyrunner_path
def execute_script(self, device, *args, **kwargs):
"""
Run the MonkeyRunner script.
Returns the return value returned by MonkeyRunner.
"""
super(MonkeyRunner, self).execute_script(device, *args, **kwargs)
return subprocess.call([self.monkeyrunner_path, self.path])
|
[
"subprocess.call"
] |
[((1503, 1555), 'subprocess.call', 'subprocess.call', (['[self.monkeyrunner_path, self.path]'], {}), '([self.monkeyrunner_path, self.path])\n', (1518, 1555), False, 'import subprocess\n')]
|
from django.contrib import admin
from .models import PhotoRecord, KeyValueRecord
admin.site.register(PhotoRecord)
admin.site.register(KeyValueRecord)
|
[
"django.contrib.admin.site.register"
] |
[((83, 115), 'django.contrib.admin.site.register', 'admin.site.register', (['PhotoRecord'], {}), '(PhotoRecord)\n', (102, 115), False, 'from django.contrib import admin\n'), ((116, 151), 'django.contrib.admin.site.register', 'admin.site.register', (['KeyValueRecord'], {}), '(KeyValueRecord)\n', (135, 151), False, 'from django.contrib import admin\n')]
|
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from orders.models import RaceOrder, LineFollowerRaceOrder
class Command(BaseCommand):
args = '<day>'
help = 'Deletes line follower race orders of the specified day.'
def handle(self, *args, **options):
try:
day = int(args[0])
except IndexError:
raise CommandError('Please specify a day.')
if day < 1 or day > 2:
raise CommandError('Day interval is 1 <= day <= 2.')
LineFollowerRaceOrder.objects.filter(stage__order=day).delete()
self.stdout.write(
"Line follower race orders day #{} deleted.".format(day))
|
[
"django.core.management.base.CommandError",
"orders.models.LineFollowerRaceOrder.objects.filter"
] |
[((494, 540), 'django.core.management.base.CommandError', 'CommandError', (['"""Day interval is 1 <= day <= 2."""'], {}), "('Day interval is 1 <= day <= 2.')\n", (506, 540), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((406, 443), 'django.core.management.base.CommandError', 'CommandError', (['"""Please specify a day."""'], {}), "('Please specify a day.')\n", (418, 443), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((550, 604), 'orders.models.LineFollowerRaceOrder.objects.filter', 'LineFollowerRaceOrder.objects.filter', ([], {'stage__order': 'day'}), '(stage__order=day)\n', (586, 604), False, 'from orders.models import RaceOrder, LineFollowerRaceOrder\n')]
|
# SPDX-License-Identifier: Apache-2.0
import bpy
import math
import os
import time
from xrs import automate as xra
## Depricated - used to render asset submissions
# rename
xra.log_info('Rendering Asset Master Image from angle')
arguments = xra.get_command_line_arguments()
working_dir = arguments[0]
asset_name = arguments[1]
submission_id = arguments[2]
theta = float(arguments[3])
phi = float(arguments[4])
radius = float(arguments[5])
asset_blend = working_dir + asset_name + '.blend'
# Save a copy for testing purposes. Can be removed in the future.
# TODO: Remove this when it is not needed anymore
xra.save_as(working_dir, 'master_image_from_angle')
xra.log_info('Linking asset from ' + asset_blend + ' with camera rotation ' + str(theta) + ', ' + str(phi) + ', ' + str(radius))
xra.append_collection(asset_blend, "master")
if ("master" not in bpy.data.collections):
# Exit if the collection couldn't be loaded
xra.quit_with_error("Unable to load master collection")
xra.log_info('Setting Render Engine to Cycles')
xra.set_renderer_to_cycles(64) #TODO: experiment with this number
xra.set_render_resolution(2048, 2048)
xra.log_info('Rendering device: ' + bpy.context.scene.cycles.device)
bpy.context.scene.render.film_transparent = True
# Don't render the backdrop
if "Backdrop" in bpy.data.objects:
bpy.data.objects["Backdrop"].cycles.is_holdout = True
# when camera is under, backface culling is used instead of holdout
if (phi > math.pi):
bpy.data.objects["Floor"].cycles.is_holdout = True
else:
bpy.data.objects["Floor"].cycles.is_holdout = False
class shotClass:
def __init__(self, name, x, z):
self.name = name
self.x = x
self.z = z
orbitX = str(math.floor(math.degrees(theta)))
orbitY = str(math.floor(math.degrees(phi)))
shot = shotClass(("-" + orbitX + "_" + orbitY), theta, phi)
# Camera
bpy.ops.object.camera_add()
bpy.context.scene.camera = bpy.context.active_object
# Join all objects in the master collection into a single object
# this is for camera scaling purposes
xra.join_collection_objects_into_one("master")
# Make sure that it is not hidden from the render (TODO: add to validation)
bpy.data.collections["master"].objects[0].hide_render = False
# Rotate the object and angle the camera (vertically only)
xra.rotate_object_and_angle_camera(
bpy.context.scene.camera,
bpy.data.collections["master"].objects[0],
shot.x,
shot.z
)
# Render Image
xra.log_info('Starting Render')
timer = time.time()
bpy.ops.render.render()
# Image Save Location
xra.log_info('Setting Image Save Location')
# TODO: pass the filename from rabbit
bpy.context.scene.render.filepath = working_dir + asset_name + shot.name + ".png"
bpy.context.scene.render.image_settings.file_format = "PNG"
xra.log_info(bpy.context.scene.render.filepath)
# Save Image
bpy.data.images["Render Result"].save_render(filepath=bpy.context.scene.render.filepath)
xra.log_info(shot.name + " Render Time: " + str(time.time() - timer) + " seconds")
if xra.record_asset_submission_render(submission_id, (asset_name + shot.name + ".png")) == False:
xra.log_error("Unable to record renders on 3xr.com")
# Save again with all of the changes
# TODO: remove this when no longer needed
xra.save_as(working_dir, 'master_image_from_angle')
|
[
"xrs.automate.rotate_object_and_angle_camera",
"xrs.automate.join_collection_objects_into_one",
"xrs.automate.set_render_resolution",
"bpy.ops.object.camera_add",
"math.degrees",
"xrs.automate.append_collection",
"time.time",
"xrs.automate.quit_with_error",
"xrs.automate.log_error",
"bpy.ops.render.render",
"xrs.automate.record_asset_submission_render",
"xrs.automate.set_renderer_to_cycles",
"xrs.automate.log_info",
"xrs.automate.save_as",
"xrs.automate.get_command_line_arguments"
] |
[((175, 230), 'xrs.automate.log_info', 'xra.log_info', (['"""Rendering Asset Master Image from angle"""'], {}), "('Rendering Asset Master Image from angle')\n", (187, 230), True, 'from xrs import automate as xra\n'), ((243, 275), 'xrs.automate.get_command_line_arguments', 'xra.get_command_line_arguments', ([], {}), '()\n', (273, 275), True, 'from xrs import automate as xra\n'), ((608, 659), 'xrs.automate.save_as', 'xra.save_as', (['working_dir', '"""master_image_from_angle"""'], {}), "(working_dir, 'master_image_from_angle')\n", (619, 659), True, 'from xrs import automate as xra\n'), ((790, 834), 'xrs.automate.append_collection', 'xra.append_collection', (['asset_blend', '"""master"""'], {}), "(asset_blend, 'master')\n", (811, 834), True, 'from xrs import automate as xra\n'), ((983, 1030), 'xrs.automate.log_info', 'xra.log_info', (['"""Setting Render Engine to Cycles"""'], {}), "('Setting Render Engine to Cycles')\n", (995, 1030), True, 'from xrs import automate as xra\n'), ((1031, 1061), 'xrs.automate.set_renderer_to_cycles', 'xra.set_renderer_to_cycles', (['(64)'], {}), '(64)\n', (1057, 1061), True, 'from xrs import automate as xra\n'), ((1097, 1134), 'xrs.automate.set_render_resolution', 'xra.set_render_resolution', (['(2048)', '(2048)'], {}), '(2048, 2048)\n', (1122, 1134), True, 'from xrs import automate as xra\n'), ((1135, 1203), 'xrs.automate.log_info', 'xra.log_info', (["('Rendering device: ' + bpy.context.scene.cycles.device)"], {}), "('Rendering device: ' + bpy.context.scene.cycles.device)\n", (1147, 1203), True, 'from xrs import automate as xra\n'), ((1848, 1875), 'bpy.ops.object.camera_add', 'bpy.ops.object.camera_add', ([], {}), '()\n', (1873, 1875), False, 'import bpy\n'), ((2033, 2079), 'xrs.automate.join_collection_objects_into_one', 'xra.join_collection_objects_into_one', (['"""master"""'], {}), "('master')\n", (2069, 2079), True, 'from xrs import automate as xra\n'), ((2279, 2403), 'xrs.automate.rotate_object_and_angle_camera', 'xra.rotate_object_and_angle_camera', (['bpy.context.scene.camera', "bpy.data.collections['master'].objects[0]", 'shot.x', 'shot.z'], {}), "(bpy.context.scene.camera, bpy.data.\n collections['master'].objects[0], shot.x, shot.z)\n", (2313, 2403), True, 'from xrs import automate as xra\n'), ((2425, 2456), 'xrs.automate.log_info', 'xra.log_info', (['"""Starting Render"""'], {}), "('Starting Render')\n", (2437, 2456), True, 'from xrs import automate as xra\n'), ((2465, 2476), 'time.time', 'time.time', ([], {}), '()\n', (2474, 2476), False, 'import time\n'), ((2477, 2500), 'bpy.ops.render.render', 'bpy.ops.render.render', ([], {}), '()\n', (2498, 2500), False, 'import bpy\n'), ((2524, 2567), 'xrs.automate.log_info', 'xra.log_info', (['"""Setting Image Save Location"""'], {}), "('Setting Image Save Location')\n", (2536, 2567), True, 'from xrs import automate as xra\n'), ((2748, 2795), 'xrs.automate.log_info', 'xra.log_info', (['bpy.context.scene.render.filepath'], {}), '(bpy.context.scene.render.filepath)\n', (2760, 2795), True, 'from xrs import automate as xra\n'), ((3216, 3267), 'xrs.automate.save_as', 'xra.save_as', (['working_dir', '"""master_image_from_angle"""'], {}), "(working_dir, 'master_image_from_angle')\n", (3227, 3267), True, 'from xrs import automate as xra\n'), ((926, 981), 'xrs.automate.quit_with_error', 'xra.quit_with_error', (['"""Unable to load master collection"""'], {}), "('Unable to load master collection')\n", (945, 981), True, 'from xrs import automate as xra\n'), ((2986, 3072), 'xrs.automate.record_asset_submission_render', 'xra.record_asset_submission_render', (['submission_id', "(asset_name + shot.name + '.png')"], {}), "(submission_id, asset_name + shot.name +\n '.png')\n", (3020, 3072), True, 'from xrs import automate as xra\n'), ((3083, 3135), 'xrs.automate.log_error', 'xra.log_error', (['"""Unable to record renders on 3xr.com"""'], {}), "('Unable to record renders on 3xr.com')\n", (3096, 3135), True, 'from xrs import automate as xra\n'), ((1712, 1731), 'math.degrees', 'math.degrees', (['theta'], {}), '(theta)\n', (1724, 1731), False, 'import math\n'), ((1758, 1775), 'math.degrees', 'math.degrees', (['phi'], {}), '(phi)\n', (1770, 1775), False, 'import math\n'), ((2947, 2958), 'time.time', 'time.time', ([], {}), '()\n', (2956, 2958), False, 'import time\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import ovirtsdk4 as sdk
import ovirtsdk4.types as types
logging.basicConfig(level=logging.DEBUG, filename='example.log')
# This example will connect to the server and add a network interface
# card to an existing virtual machine.
# Create the connection to the server:
connection = sdk.Connection(
url='https://engine40.example.com/ovirt-engine/api',
username='admin@internal',
password='<PASSWORD>',
ca_file='ca.pem',
debug=True,
log=logging.getLogger(),
)
# Locate the virtual machines service and use it to find the virtual
# machine:
system_service = connection.system_service()
vms_service = system_service.vms_service()
vm = vms_service.list(search='name=myvm')[0]
# In order to specify the network that the new interface will be
# connected to we need to specify the identifier of the virtual network
# interface profile, so we need to find it. We can have duplicate names
# for vnic profiles in different clusters, so we must also find first the
# network by datacenter and cluster:
cluster = system_service.clusters_service().cluster_service(vm.cluster.id).get()
dcs_service = connection.system_service().data_centers_service()
dc = dcs_service.list(search='Clusters.name=%s' % cluster.name)[0]
networks_service = dcs_service.service(dc.id).networks_service()
network = next(
(n for n in networks_service.list()
if n.name == 'mynetwork'),
None
)
profiles_service = connection.system_service().vnic_profiles_service()
profile_id = None
for profile in profiles_service.list():
if profile.name == 'mynetwork':
profile_id = profile.id
break
# Locate the service that manages the network interface cards of the
# virtual machine:
nics_service = vms_service.vm_service(vm.id).nics_service()
# Use the "add" method of the network interface cards service to add the
# new network interface card:
nics_service.add(
types.Nic(
name='mynic',
description='My network interface card',
vnic_profile=types.VnicProfile(
id=profile_id,
),
),
)
# Close the connection to the server:
connection.close()
|
[
"logging.getLogger",
"logging.basicConfig",
"ovirtsdk4.types.VnicProfile"
] |
[((704, 768), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'filename': '"""example.log"""'}), "(level=logging.DEBUG, filename='example.log')\n", (723, 768), False, 'import logging\n'), ((1109, 1128), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1126, 1128), False, 'import logging\n'), ((2633, 2665), 'ovirtsdk4.types.VnicProfile', 'types.VnicProfile', ([], {'id': 'profile_id'}), '(id=profile_id)\n', (2650, 2665), True, 'import ovirtsdk4.types as types\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from rawdisk.util.filetimes import dt_to_filetime, filetime_to_dt, UTC, ZERO
from datetime import datetime
class TestFiletimesModule(unittest.TestCase):
def test_dt_to_filetime(self):
value = datetime(2009, 7, 25, 23, 0)
self.assertEqual(128930364000000000, dt_to_filetime(value))
def test_filetime_to_dt(self):
value = 116444736000000000
self.assertEqual(datetime(1970, 1, 1, 0, 0), filetime_to_dt(value))
def test_utc(self):
utc = UTC()
self.assertEqual(utc.tzname(None), "UTC")
self.assertEqual(utc.utcoffset(None), ZERO)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"rawdisk.util.filetimes.dt_to_filetime",
"datetime.datetime",
"rawdisk.util.filetimes.filetime_to_dt",
"rawdisk.util.filetimes.UTC"
] |
[((694, 709), 'unittest.main', 'unittest.main', ([], {}), '()\n', (707, 709), False, 'import unittest\n'), ((269, 297), 'datetime.datetime', 'datetime', (['(2009)', '(7)', '(25)', '(23)', '(0)'], {}), '(2009, 7, 25, 23, 0)\n', (277, 297), False, 'from datetime import datetime\n'), ((552, 557), 'rawdisk.util.filetimes.UTC', 'UTC', ([], {}), '()\n', (555, 557), False, 'from rawdisk.util.filetimes import dt_to_filetime, filetime_to_dt, UTC, ZERO\n'), ((343, 364), 'rawdisk.util.filetimes.dt_to_filetime', 'dt_to_filetime', (['value'], {}), '(value)\n', (357, 364), False, 'from rawdisk.util.filetimes import dt_to_filetime, filetime_to_dt, UTC, ZERO\n'), ((462, 488), 'datetime.datetime', 'datetime', (['(1970)', '(1)', '(1)', '(0)', '(0)'], {}), '(1970, 1, 1, 0, 0)\n', (470, 488), False, 'from datetime import datetime\n'), ((490, 511), 'rawdisk.util.filetimes.filetime_to_dt', 'filetime_to_dt', (['value'], {}), '(value)\n', (504, 511), False, 'from rawdisk.util.filetimes import dt_to_filetime, filetime_to_dt, UTC, ZERO\n')]
|
"""update, add constraints
Revision ID: ccc37f794db6
Revises: <PASSWORD>
Create Date: 2020-05-15 14:02:21.163220
"""
from datetime import datetime
from uuid import uuid4
from alembic import op
from geoalchemy2 import Geometry
from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String
from sqlalchemy.dialects.sqlite import REAL, TIMESTAMP
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import ( # used to defer fetching attributes unless it's specifically called
declarative_base,
declared_attr,
deferred,
relationship,
)
from sqlalchemy.sql.schema import CheckConstraint
from pepys_import.core.store import constants
from pepys_import.core.store.common_db import (
ActivationMixin,
CommentMixin,
ContactMixin,
DatafileMixin,
ElevationPropertyMixin,
GeometryMixin,
HostedByMixin,
LocationPropertyMixin,
LogMixin,
LogsHoldingMixin,
MediaMixin,
PlatformMixin,
ReferenceDefaultFields,
ReferenceRepr,
SensorMixin,
StateMixin,
TaggedItemMixin,
)
from pepys_import.core.store.db_base import sqlite_naming_convention
from pepys_import.core.store.db_status import TableTypes
from pepys_import.utils.sqlalchemy_utils import UUIDType
Metadata = MetaData(naming_convention=sqlite_naming_convention)
BaseSpatiaLite = declarative_base(metadata=Metadata)
class ClassificationType(BaseSpatiaLite):
__tablename__ = constants.CLASSIFICATION_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 19
class_type_id = Column(UUIDType, primary_key=True, default=uuid4)
class_type = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class ContactType(BaseSpatiaLite):
__tablename__ = constants.CONTACT_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 20
contact_type_id = Column(UUIDType, primary_key=True, default=uuid4)
contact_type = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class ConfidenceLevel(BaseSpatiaLite):
__tablename__ = constants.CONFIDENCE_LEVEL
table_type = TableTypes.REFERENCE
table_type_id = 27
confidence_level_id = Column(UUIDType, primary_key=True, default=uuid4)
level = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class Task(BaseSpatiaLite):
__tablename__ = constants.TASK
table_type = TableTypes.METADATA
table_type_id = 4
task_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False)
parent_id = Column(UUIDType, ForeignKey("Tasks.task_id"), nullable=False)
start = Column(TIMESTAMP, nullable=False)
end = Column(TIMESTAMP, nullable=False)
environment = deferred(Column(String(150)))
location = deferred(Column(String(150)))
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class CommentType(BaseSpatiaLite):
__tablename__ = constants.COMMENT_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 25
comment_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class MediaType(BaseSpatiaLite):
__tablename__ = constants.MEDIA_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 24
media_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class Privacy(BaseSpatiaLite):
__tablename__ = constants.PRIVACY
table_type = TableTypes.REFERENCE
table_type_id = 22
privacy_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
level = Column(Integer, nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class State(BaseSpatiaLite, StateMixin, ElevationPropertyMixin, LocationPropertyMixin):
__tablename__ = constants.STATE
table_type = TableTypes.MEASUREMENT
table_type_id = 28
state_id = Column(UUIDType, primary_key=True, default=uuid4)
time = Column(TIMESTAMP, nullable=False)
sensor_id = Column(UUIDType, ForeignKey("Sensors.sensor_id"), nullable=False)
_location = deferred(
Column(
"location",
Geometry(geometry_type="POINT", srid=4326, management=True, spatial_index=False),
)
)
_elevation = deferred(Column("elevation", REAL))
_heading = deferred(Column("heading", REAL))
_course = deferred(Column("course", REAL))
_speed = deferred(Column("speed", REAL))
source_id = Column(UUIDType, ForeignKey("Datafiles.datafile_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"))
created_date = Column(DateTime, default=datetime.utcnow)
@declared_attr
def platform(self):
return relationship(
"Platform",
secondary=constants.SENSOR,
primaryjoin="State.sensor_id == Sensor.sensor_id",
secondaryjoin="Platform.platform_id == Sensor.host",
lazy="joined",
join_depth=1,
uselist=False,
viewonly=True,
)
@declared_attr
def platform_name(self):
return association_proxy("platform", "name")
class Change(BaseSpatiaLite):
__tablename__ = constants.CHANGE
table_type = TableTypes.METADATA
table_type_id = 8
change_id = Column(UUIDType, primary_key=True, default=uuid4)
user = Column(String(150), nullable=False)
modified = Column(DATE, nullable=False)
reason = Column(String(500), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class Log(BaseSpatiaLite, LogMixin):
__tablename__ = constants.LOG
table_type = TableTypes.METADATA
table_type_id = 9
log_id = Column(UUIDType, primary_key=True, default=uuid4)
table = Column(String(150), nullable=False)
id = Column(UUIDType, nullable=False)
field = Column(String(150))
new_value = Column(String(150))
change_id = Column(UUIDType, ForeignKey("Changes.change_id"), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class Tag(BaseSpatiaLite):
__tablename__ = constants.TAG
table_type = TableTypes.METADATA
table_type_id = 11
tag_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class HostedBy(BaseSpatiaLite, HostedByMixin):
__tablename__ = constants.HOSTED_BY
table_type = TableTypes.METADATA
table_type_id = 1
hosted_by_id = Column(UUIDType, primary_key=True, default=uuid4)
subject_id = Column(UUIDType, ForeignKey("Platforms.platform_id"), nullable=False)
host_id = Column(UUIDType, ForeignKey("Platforms.platform_id"), nullable=False)
hosted_from = Column(DATE, nullable=False)
host_to = Column(DATE, nullable=False)
privacy_id = Column(Integer, ForeignKey("Privacies.privacy_id"), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class Platform(BaseSpatiaLite, PlatformMixin):
__tablename__ = constants.PLATFORM
table_type = TableTypes.METADATA
table_type_id = 3
platform_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False)
pennant = deferred(Column(String(10), nullable=False))
trigraph = deferred(Column(String(3)))
quadgraph = deferred(Column(String(4)))
nationality_id = Column(UUIDType, ForeignKey("Nationalities.nationality_id"), nullable=False)
platform_type_id = Column(
UUIDType, ForeignKey("PlatformTypes.platform_type_id"), nullable=False
)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class GeometrySubType(BaseSpatiaLite):
__tablename__ = constants.GEOMETRY_SUBTYPE
table_type = TableTypes.REFERENCE
table_type_id = 16
geo_sub_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
parent = Column(
UUIDType, ForeignKey("GeometryTypes.geo_type_id", onupdate="cascade"), nullable=False
)
created_date = Column(DateTime, default=datetime.utcnow)
class Serial(BaseSpatiaLite):
__tablename__ = constants.SERIAL
table_type = TableTypes.METADATA
table_type_id = 37
serial_id = Column(UUIDType, primary_key=True, default=uuid4)
wargame_id = Column(
UUIDType,
ForeignKey("Wargames.wargame_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
serial_number = Column(
String(150),
CheckConstraint("serial_number <> ''", name="ck_Serials_serial_number"),
nullable=False,
)
start = Column(TIMESTAMP, nullable=False)
end = Column(TIMESTAMP, nullable=False)
exercise = Column(String(150))
environment = deferred(Column(String(150)))
location = deferred(Column(String(150)))
privacy_id = Column(
UUIDType,
ForeignKey("Privacies.privacy_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
created_date = Column(DateTime, default=datetime.utcnow)
class Wargame(BaseSpatiaLite):
__tablename__ = constants.WARGAME
table_type = TableTypes.METADATA
table_type_id = 37
wargame_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(
String(150), CheckConstraint("name <> ''", name="ck_Wargames_name"), nullable=False
)
series_id = Column(
UUIDType,
ForeignKey("Series.series_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
start = Column(TIMESTAMP, nullable=False)
end = Column(TIMESTAMP, nullable=False)
privacy_id = Column(
UUIDType,
ForeignKey("Privacies.privacy_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
created_date = Column(DateTime, default=datetime.utcnow)
class Series(BaseSpatiaLite):
__tablename__ = constants.SERIES
table_type = TableTypes.METADATA
table_type_id = 36
series_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), CheckConstraint("name <> ''", name="ck_Series_name"), nullable=False)
privacy_id = Column(
UUIDType,
ForeignKey("Privacies.privacy_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
created_date = Column(DateTime, default=datetime.utcnow)
class WargameParticipant(BaseSpatiaLite):
__tablename__ = constants.WARGAME_PARTICIPANT
table_type = TableTypes.METADATA
table_type_id = 38
wargame_participant_id = Column(UUIDType, primary_key=True, default=uuid4)
wargame_id = Column(
UUIDType,
ForeignKey("Wargames.wargame_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
platform_id = Column(
UUIDType,
ForeignKey("Platforms.platform_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
privacy_id = Column(
UUIDType,
ForeignKey("Privacies.privacy_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
created_date = Column(DateTime, default=datetime.utcnow)
class SerialParticipant(BaseSpatiaLite):
__tablename__ = constants.SERIAL_PARTICIPANT
table_type = TableTypes.METADATA
table_type_id = 39
serial_participant_id = Column(UUIDType, primary_key=True, default=uuid4)
wargame_participant_id = Column(
UUIDType,
ForeignKey(
"WargameParticipants.wargame_participant_id", onupdate="cascade", ondelete="cascade"
),
nullable=False,
)
serial_id = Column(
UUIDType,
ForeignKey("Serials.serial_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
start = Column(TIMESTAMP)
end = Column(TIMESTAMP)
force_type_id = Column(
UUIDType,
ForeignKey("ForceTypes.force_type_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
privacy_id = Column(
UUIDType,
ForeignKey("Privacies.privacy_id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
created_date = Column(DateTime, default=datetime.utcnow)
# Reference Tables
class ForceType(BaseSpatiaLite, ReferenceRepr, ReferenceDefaultFields):
__tablename__ = constants.FORCE_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 40
force_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(
String(150),
CheckConstraint("name <> ''", name="ck_ForceTypes_name"),
nullable=False,
unique=True,
)
color = Column(String(10))
created_date = Column(DateTime, default=datetime.utcnow)
class Geometry1(BaseSpatiaLite, GeometryMixin):
__tablename__ = constants.GEOMETRY
table_type = TableTypes.MEASUREMENT
table_type_id = 33
geometry_id = Column(UUIDType, primary_key=True, default=uuid4)
geometry = deferred(
Column(
Geometry(geometry_type="GEOMETRY", management=True, spatial_index=False), nullable=False
)
)
name = Column(String(150), nullable=False)
geo_type_id = Column(UUIDType, ForeignKey("GeometryTypes.geo_type_id"), nullable=False)
geo_sub_type_id = Column(
UUIDType, ForeignKey("GeometrySubTypes.geo_sub_type_id"), nullable=False
)
start = Column(TIMESTAMP)
end = Column(TIMESTAMP)
serial_id = Column(UUIDType, ForeignKey("Serials.serial_id"))
subject_platform_id = Column(UUIDType, ForeignKey("Platforms.platform_id"))
sensor_platform_id = Column(UUIDType, ForeignKey("Platforms.platform_id"))
source_id = Column(UUIDType, ForeignKey("Datafiles.datafile_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"))
created_date = Column(DateTime, default=datetime.utcnow)
class GeometryType(BaseSpatiaLite):
__tablename__ = constants.GEOMETRY_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 15
geo_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class PlatformType(BaseSpatiaLite):
__tablename__ = constants.PLATFORM_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 13
platform_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class Participant(BaseSpatiaLite):
__tablename__ = constants.PARTICIPANT
table_type = TableTypes.METADATA
table_type_id = 5
participant_id = Column(UUIDType, primary_key=True, default=uuid4)
platform_id = Column(UUIDType, ForeignKey("Platforms.platform_id"), nullable=False)
task_id = Column(UUIDType, ForeignKey("Tasks.task_id"), nullable=False)
start = Column(TIMESTAMP)
end = Column(TIMESTAMP)
force = Column(String(150))
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class TaggedItem(BaseSpatiaLite, TaggedItemMixin):
__tablename__ = constants.TAGGED_ITEM
table_type = TableTypes.METADATA
table_type_id = 12
tagged_item_id = Column(UUIDType, primary_key=True, default=uuid4)
tag_id = Column(UUIDType, ForeignKey("Tags.tag_id"), nullable=False)
item_id = Column(UUIDType, nullable=False)
tagged_by_id = Column(UUIDType, ForeignKey("Users.user_id"), nullable=False)
private = Column(Boolean, nullable=False)
tagged_on = Column(DATE, nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class SensorType(BaseSpatiaLite):
__tablename__ = constants.SENSOR_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 21
sensor_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class DatafileType(BaseSpatiaLite):
__tablename__ = constants.DATAFILE_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 23
datafile_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class User(BaseSpatiaLite):
__tablename__ = constants.USER
table_type = TableTypes.REFERENCE
table_type_id = 17
user_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class Activation(BaseSpatiaLite, ActivationMixin):
__tablename__ = constants.ACTIVATION
table_type = TableTypes.MEASUREMENT
table_type_id = 30
activation_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False)
sensor_id = Column(UUIDType, ForeignKey("Sensors.sensor_id"), nullable=False)
start = deferred(Column(TIMESTAMP, nullable=False))
end = deferred(Column(TIMESTAMP, nullable=False))
_min_range = deferred(Column("min_range", REAL))
_max_range = deferred(Column("max_range", REAL))
_left_arc = deferred(Column("left_arc", REAL))
_right_arc = deferred(Column("right_arc", REAL))
source_id = Column(UUIDType, ForeignKey("Datafiles.datafile_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"))
created_date = Column(DateTime, default=datetime.utcnow)
class Comment(BaseSpatiaLite, CommentMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.sensor_name = "N/A"
__tablename__ = constants.COMMENT
table_type = TableTypes.MEASUREMENT
table_type_id = 32
comment_id = Column(UUIDType, primary_key=True, default=uuid4)
platform_id = Column(UUIDType, ForeignKey("Platforms.platform_id"))
time = Column(TIMESTAMP, nullable=False)
comment_type_id = Column(UUIDType, ForeignKey("CommentTypes.comment_type_id"), nullable=False)
content = Column(String(150), nullable=False)
source_id = Column(UUIDType, ForeignKey("Datafiles.datafile_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"))
created_date = Column(DateTime, default=datetime.utcnow)
class Contact(BaseSpatiaLite, ContactMixin, LocationPropertyMixin, ElevationPropertyMixin):
__tablename__ = constants.CONTACT
table_type = TableTypes.MEASUREMENT
table_type_id = 29
contact_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150))
sensor_id = Column(UUIDType, ForeignKey("Sensors.sensor_id"), nullable=False)
time = Column(TIMESTAMP, nullable=False)
_bearing = deferred(Column("bearing", REAL))
_rel_bearing = deferred(Column("rel_bearing", REAL))
_ambig_bearing = deferred(Column("ambig_bearing", REAL))
_freq = deferred(Column("freq", REAL))
_range = deferred(Column("range", REAL))
_location = deferred(
Column(
"location",
Geometry(geometry_type="POINT", srid=4326, management=True, spatial_index=False),
)
)
_elevation = deferred(Column("elevation", REAL))
_major = deferred(Column("major", REAL))
_minor = deferred(Column("minor", REAL))
_orientation = deferred(Column("orientation", REAL))
classification = deferred(Column(UUIDType, ForeignKey("ClassificationTypes.class_type_id")))
confidence = deferred(Column(UUIDType, ForeignKey("ConfidenceLevels.confidence_level_id")))
contact_type = deferred(Column(UUIDType, ForeignKey("ContactTypes.contact_type_id")))
_mla = deferred(Column("mla", REAL))
_soa = deferred(Column("soa", REAL))
subject_id = Column(UUIDType, ForeignKey("Platforms.platform_id"))
source_id = Column(UUIDType, ForeignKey("Datafiles.datafile_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"))
created_date = deferred(Column(DateTime, default=datetime.utcnow))
@declared_attr
def platform(self):
return relationship(
"Platform",
secondary=constants.SENSOR,
primaryjoin="Contact.sensor_id == Sensor.sensor_id",
secondaryjoin="Platform.platform_id == Sensor.host",
lazy="joined",
join_depth=1,
uselist=False,
viewonly=True,
)
@declared_attr
def platform_name(self):
return association_proxy("platform", "name")
class Sensor(BaseSpatiaLite, SensorMixin):
__tablename__ = constants.SENSOR
table_type = TableTypes.METADATA
table_type_id = 2
sensor_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False)
sensor_type_id = Column(UUIDType, ForeignKey("SensorTypes.sensor_type_id"), nullable=False)
host = Column(UUIDType, ForeignKey("Platforms.platform_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"), nullable=False)
created_date = Column(DateTime, default=datetime.utcnow)
class Datafile(BaseSpatiaLite, DatafileMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.measurements = dict()
__tablename__ = constants.DATAFILE
table_type = TableTypes.METADATA
table_type_id = 6
datafile_id = Column(UUIDType, primary_key=True, default=uuid4)
simulated = deferred(Column(Boolean, nullable=False))
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"), nullable=False)
datafile_type_id = Column(
UUIDType, ForeignKey("DatafileTypes.datafile_type_id"), nullable=False
)
reference = Column(String(150))
url = Column(String(150))
size = deferred(Column(Integer, nullable=False))
hash = deferred(Column(String(32), nullable=False))
created_date = Column(DateTime, default=datetime.utcnow)
class UnitType(BaseSpatiaLite):
__tablename__ = constants.UNIT_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 18
unit_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class CommodityType(BaseSpatiaLite):
__tablename__ = constants.COMMODITY_TYPE
table_type = TableTypes.REFERENCE
table_type_id = 26
commodity_type_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
class Media(BaseSpatiaLite, MediaMixin, ElevationPropertyMixin, LocationPropertyMixin):
__tablename__ = constants.MEDIA
table_type = TableTypes.MEASUREMENT
table_type_id = 34
media_id = Column(UUIDType, primary_key=True, default=uuid4)
platform_id = Column(UUIDType, ForeignKey("Platforms.platform_id"))
subject_id = Column(UUIDType, ForeignKey("Platforms.platform_id"))
sensor_id = Column(UUIDType, ForeignKey("Sensors.sensor_id"))
_location = deferred(
Column(
"location",
Geometry(geometry_type="POINT", srid=4326, management=True, spatial_index=False),
)
)
_elevation = deferred(Column("elevation", REAL))
time = Column(TIMESTAMP)
media_type_id = Column(UUIDType, ForeignKey("MediaTypes.media_type_id"), nullable=False)
url = deferred(Column(String(150), nullable=False))
source_id = Column(UUIDType, ForeignKey("Datafiles.datafile_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"))
created_date = Column(DateTime, default=datetime.utcnow)
class LogsHolding(BaseSpatiaLite, LogsHoldingMixin):
__tablename__ = constants.LOGS_HOLDING
table_type = TableTypes.MEASUREMENT
table_type_id = 31
logs_holding_id = Column(UUIDType, primary_key=True, default=uuid4)
time = Column(TIMESTAMP, nullable=False)
commodity_id = Column(UUIDType, ForeignKey("CommodityTypes.commodity_type_id"), nullable=False)
quantity = Column(REAL, nullable=False)
unit_type_id = Column(UUIDType, ForeignKey("UnitTypes.unit_type_id"), nullable=False)
platform_id = Column(UUIDType, ForeignKey("Platforms.platform_id"), nullable=False)
comment = Column(String(150), nullable=False)
source_id = Column(UUIDType, ForeignKey("Datafiles.datafile_id"), nullable=False)
privacy_id = Column(UUIDType, ForeignKey("Privacies.privacy_id"))
created_date = Column(DateTime, default=datetime.utcnow)
class Nationality(BaseSpatiaLite):
__tablename__ = constants.NATIONALITY
table_type = TableTypes.REFERENCE
table_type_id = 14
nationality_id = Column(UUIDType, primary_key=True, default=uuid4)
name = Column(String(150), nullable=False, unique=True)
created_date = Column(DateTime, default=datetime.utcnow)
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "<PASSWORD>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("Activations", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Activations_source_id_Datafiles"),
"Datafiles",
["source_id"],
["datafile_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Activations_sensor_id_Sensors"), "Sensors", ["sensor_id"], ["sensor_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Activations_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
with op.batch_alter_table("Comments", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Comments_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Comments_platform_id_Platforms"),
"Platforms",
["platform_id"],
["platform_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Comments_source_id_Datafiles"),
"Datafiles",
["source_id"],
["datafile_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Comments_comment_type_id_CommentTypes"),
"CommentTypes",
["comment_type_id"],
["comment_type_id"],
)
with op.batch_alter_table("Contacts", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Contacts_sensor_id_Sensors"), "Sensors", ["sensor_id"], ["sensor_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Contacts_source_id_Datafiles"),
"Datafiles",
["source_id"],
["datafile_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Contacts_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Contacts_subject_id_Platforms"),
"Platforms",
["subject_id"],
["platform_id"],
)
with op.batch_alter_table("Datafiles", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Datafiles_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Datafiles_datafile_type_id_DatafileTypes"),
"DatafileTypes",
["datafile_type_id"],
["datafile_type_id"],
)
with op.batch_alter_table("Geometries", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Geometries_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Geometries_task_id_Tasks"), "Tasks", ["task_id"], ["task_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Geometries_subject_platform_id_Platforms"),
"Platforms",
["subject_platform_id"],
["platform_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Geometries_geo_sub_type_id_GeometrySubTypes"),
"GeometrySubTypes",
["geo_sub_type_id"],
["geo_sub_type_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Geometries_source_id_Datafiles"),
"Datafiles",
["source_id"],
["datafile_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Geometries_geo_type_id_GeometryTypes"),
"GeometryTypes",
["geo_type_id"],
["geo_type_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Geometries_sensor_platform_id_Platforms"),
"Platforms",
["sensor_platform_id"],
["platform_id"],
)
with op.batch_alter_table("HostedBy", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_HostedBy_subject_id_Platforms"),
"Platforms",
["subject_id"],
["platform_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_HostedBy_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_HostedBy_host_id_Platforms"), "Platforms", ["host_id"], ["platform_id"]
)
with op.batch_alter_table("Logs", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Logs_change_id_Changes"), "Changes", ["change_id"], ["change_id"]
)
with op.batch_alter_table("LogsHoldings", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_LogsHoldings_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_LogsHoldings_platform_id_Platforms"),
"Platforms",
["platform_id"],
["platform_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_LogsHoldings_source_id_Datafiles"),
"Datafiles",
["source_id"],
["datafile_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_LogsHoldings_unit_type_id_UnitTypes"),
"UnitTypes",
["unit_type_id"],
["unit_type_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_LogsHoldings_commodity_id_CommodityTypes"),
"CommodityTypes",
["commodity_id"],
["commodity_type_id"],
)
with op.batch_alter_table("Media", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Media_source_id_Datafiles"), "Datafiles", ["source_id"], ["datafile_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Media_sensor_id_Sensors"), "Sensors", ["sensor_id"], ["sensor_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Media_privacy_id_Privacies"), "Privacies", ["privacy_id"], ["privacy_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Media_media_type_id_MediaTypes"),
"MediaTypes",
["media_type_id"],
["media_type_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Media_subject_id_Platforms"),
"Platforms",
["subject_id"],
["platform_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Media_platform_id_Platforms"),
"Platforms",
["platform_id"],
["platform_id"],
)
with op.batch_alter_table("Participants", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Participants_platform_id_Platforms"),
"Platforms",
["platform_id"],
["platform_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Participants_task_id_Tasks"), "Tasks", ["task_id"], ["task_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Participants_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
with op.batch_alter_table("Platforms", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Platforms_nationality_id_Nationalities"),
"Nationalities",
["nationality_id"],
["nationality_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Platforms_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Platforms_platform_type_id_PlatformTypes"),
"PlatformTypes",
["platform_type_id"],
["platform_type_id"],
)
with op.batch_alter_table("Sensors", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Sensors_sensor_type_id_SensorTypes"),
"SensorTypes",
["sensor_type_id"],
["sensor_type_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Sensors_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_Sensors_host_Platforms"), "Platforms", ["host"], ["platform_id"]
)
with op.batch_alter_table("States", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_States_sensor_id_Sensors"), "Sensors", ["sensor_id"], ["sensor_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_States_privacy_id_Privacies"),
"Privacies",
["privacy_id"],
["privacy_id"],
)
batch_op.create_foreign_key(
batch_op.f("fk_States_source_id_Datafiles"), "Datafiles", ["source_id"], ["datafile_id"]
)
with op.batch_alter_table("TaggedItems", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_TaggedItems_tag_id_Tags"), "Tags", ["tag_id"], ["tag_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_TaggedItems_tagged_by_id_Users"), "Users", ["tagged_by_id"], ["user_id"]
)
with op.batch_alter_table("Tasks", schema=None) as batch_op:
batch_op.create_foreign_key(
batch_op.f("fk_Tasks_privacy_id_Privacies"), "Privacies", ["privacy_id"], ["privacy_id"]
)
batch_op.create_foreign_key(
batch_op.f("fk_Tasks_parent_id_Tasks"), "Tasks", ["parent_id"], ["task_id"]
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("Tasks", schema=None, copy_from=Task.__table__) as batch_op:
batch_op.drop_constraint(batch_op.f("fk_Tasks_parent_id_Tasks"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Tasks_privacy_id_Privacies"), type_="foreignkey")
with op.batch_alter_table(
"TaggedItems", schema=None, copy_from=TaggedItem.__table__
) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_TaggedItems_tagged_by_id_Users"), type_="foreignkey"
)
batch_op.drop_constraint(batch_op.f("fk_TaggedItems_tag_id_Tags"), type_="foreignkey")
with op.batch_alter_table("States", schema=None, copy_from=State.__table__) as batch_op:
batch_op.drop_constraint(batch_op.f("fk_States_source_id_Datafiles"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_States_privacy_id_Privacies"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_States_sensor_id_Sensors"), type_="foreignkey")
with op.batch_alter_table("Sensors", schema=None, copy_from=Sensor.__table__) as batch_op:
batch_op.drop_constraint(batch_op.f("fk_Sensors_host_Platforms"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Sensors_privacy_id_Privacies"), type_="foreignkey")
batch_op.drop_constraint(
batch_op.f("fk_Sensors_sensor_type_id_SensorTypes"), type_="foreignkey"
)
with op.batch_alter_table("Platforms", schema=None, copy_from=Platform.__table__) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_Platforms_platform_type_id_PlatformTypes"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_Platforms_privacy_id_Privacies"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_Platforms_nationality_id_Nationalities"), type_="foreignkey"
)
with op.batch_alter_table(
"Participants", schema=None, copy_from=Participant.__table__
) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_Participants_privacy_id_Privacies"), type_="foreignkey"
)
batch_op.drop_constraint(batch_op.f("fk_Participants_task_id_Tasks"), type_="foreignkey")
batch_op.drop_constraint(
batch_op.f("fk_Participants_platform_id_Platforms"), type_="foreignkey"
)
with op.batch_alter_table("Media", schema=None, copy_from=Media.__table__) as batch_op:
batch_op.drop_constraint(batch_op.f("fk_Media_platform_id_Platforms"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Media_subject_id_Platforms"), type_="foreignkey")
batch_op.drop_constraint(
batch_op.f("fk_Media_media_type_id_MediaTypes"), type_="foreignkey"
)
batch_op.drop_constraint(batch_op.f("fk_Media_privacy_id_Privacies"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Media_sensor_id_Sensors"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Media_source_id_Datafiles"), type_="foreignkey")
with op.batch_alter_table(
"LogsHoldings", schema=None, copy_from=LogsHolding.__table__
) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_LogsHoldings_commodity_id_CommodityTypes"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_LogsHoldings_unit_type_id_UnitTypes"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_LogsHoldings_source_id_Datafiles"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_LogsHoldings_platform_id_Platforms"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_LogsHoldings_privacy_id_Privacies"), type_="foreignkey"
)
with op.batch_alter_table("Logs", schema=None, copy_from=Log.__table__) as batch_op:
batch_op.drop_constraint(batch_op.f("fk_Logs_change_id_Changes"), type_="foreignkey")
with op.batch_alter_table("HostedBy", schema=None, copy_from=HostedBy.__table__) as batch_op:
batch_op.drop_constraint(batch_op.f("fk_HostedBy_host_id_Platforms"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_HostedBy_privacy_id_Privacies"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_HostedBy_subject_id_Platforms"), type_="foreignkey")
with op.batch_alter_table("Geometries", schema=None, copy_from=Geometry1.__table__) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_Geometries_sensor_platform_id_Platforms"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_Geometries_geo_type_id_GeometryTypes"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_Geometries_source_id_Datafiles"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_Geometries_geo_sub_type_id_GeometrySubTypes"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_Geometries_subject_platform_id_Platforms"), type_="foreignkey"
)
batch_op.drop_constraint(batch_op.f("fk_Geometries_task_id_Tasks"), type_="foreignkey")
batch_op.drop_constraint(
batch_op.f("fk_Geometries_privacy_id_Privacies"), type_="foreignkey"
)
with op.batch_alter_table("Datafiles", schema=None, copy_from=Datafile.__table__) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_Datafiles_datafile_type_id_DatafileTypes"), type_="foreignkey"
)
batch_op.drop_constraint(
batch_op.f("fk_Datafiles_privacy_id_Privacies"), type_="foreignkey"
)
with op.batch_alter_table("Contacts", schema=None, copy_from=Contact.__table__) as batch_op:
batch_op.drop_constraint(batch_op.f("fk_Contacts_subject_id_Platforms"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Contacts_privacy_id_Privacies"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Contacts_source_id_Datafiles"), type_="foreignkey")
batch_op.drop_constraint(batch_op.f("fk_Contacts_sensor_id_Sensors"), type_="foreignkey")
with op.batch_alter_table("Comments", schema=None, copy_from=Comment.__table__) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_Comments_comment_type_id_CommentTypes"), type_="foreignkey"
)
batch_op.drop_constraint(batch_op.f("fk_Comments_source_id_Datafiles"), type_="foreignkey")
batch_op.drop_constraint(
batch_op.f("fk_Comments_platform_id_Platforms"), type_="foreignkey"
)
batch_op.drop_constraint(batch_op.f("fk_Comments_privacy_id_Privacies"), type_="foreignkey")
with op.batch_alter_table(
"Activations", schema=None, copy_from=Activation.__table__
) as batch_op:
batch_op.drop_constraint(
batch_op.f("fk_Activations_privacy_id_Privacies"), type_="foreignkey"
)
batch_op.drop_constraint(batch_op.f("fk_Activations_sensor_id_Sensors"), type_="foreignkey")
batch_op.drop_constraint(
batch_op.f("fk_Activations_source_id_Datafiles"), type_="foreignkey"
)
# ### end Alembic commands ###
|
[
"sqlalchemy.MetaData",
"sqlalchemy.orm.declarative_base",
"sqlalchemy.ext.associationproxy.association_proxy",
"sqlalchemy.ForeignKey",
"sqlalchemy.orm.relationship",
"geoalchemy2.Geometry",
"sqlalchemy.Column",
"sqlalchemy.String",
"sqlalchemy.sql.schema.CheckConstraint",
"alembic.op.batch_alter_table"
] |
[((1296, 1348), 'sqlalchemy.MetaData', 'MetaData', ([], {'naming_convention': 'sqlite_naming_convention'}), '(naming_convention=sqlite_naming_convention)\n', (1304, 1348), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((1366, 1401), 'sqlalchemy.orm.declarative_base', 'declarative_base', ([], {'metadata': 'Metadata'}), '(metadata=Metadata)\n', (1382, 1401), False, 'from sqlalchemy.orm import declarative_base, declared_attr, deferred, relationship\n'), ((1578, 1627), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (1584, 1627), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((1713, 1754), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (1719, 1754), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((1919, 1968), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (1925, 1968), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2056, 2097), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (2062, 2097), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2274, 2323), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (2280, 2323), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2404, 2445), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (2410, 2445), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2585, 2634), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (2591, 2634), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2772, 2805), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (2778, 2805), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2816, 2849), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (2822, 2849), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3048, 3089), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (3054, 3089), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3254, 3303), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (3260, 3303), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3383, 3424), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (3389, 3424), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3583, 3632), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (3589, 3632), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3712, 3753), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (3718, 3753), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3904, 3953), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (3910, 3953), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4026, 4057), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (4032, 4057), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4077, 4118), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (4083, 4118), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4324, 4373), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (4330, 4373), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4385, 4418), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (4391, 4418), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((5046, 5087), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (5052, 5087), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((5717, 5766), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (5723, 5766), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((5829, 5857), 'sqlalchemy.Column', 'Column', (['DATE'], {'nullable': '(False)'}), '(DATE, nullable=False)\n', (5835, 5857), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((5926, 5967), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (5932, 5967), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6114, 6163), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (6120, 6163), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6221, 6253), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'nullable': '(False)'}), '(UUIDType, nullable=False)\n', (6227, 6253), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6423, 6464), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (6429, 6464), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6602, 6651), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (6608, 6651), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6718, 6759), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (6724, 6759), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6928, 6977), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (6934, 6977), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7167, 7195), 'sqlalchemy.Column', 'Column', (['DATE'], {'nullable': '(False)'}), '(DATE, nullable=False)\n', (7173, 7195), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7210, 7238), 'sqlalchemy.Column', 'Column', (['DATE'], {'nullable': '(False)'}), '(DATE, nullable=False)\n', (7216, 7238), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7343, 7384), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (7349, 7384), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7551, 7600), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (7557, 7600), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8113, 8154), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (8119, 8154), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8327, 8376), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (8333, 8376), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8577, 8618), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (8583, 8618), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8765, 8814), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (8771, 8814), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9143, 9176), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (9149, 9176), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9187, 9220), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (9193, 9220), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9525, 9566), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (9531, 9566), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9716, 9765), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (9722, 9765), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10047, 10080), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (10053, 10080), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10091, 10124), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (10097, 10124), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10301, 10342), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (10307, 10342), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10489, 10538), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (10495, 10538), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10816, 10857), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (10822, 10857), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((11042, 11091), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (11048, 11091), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((11583, 11624), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (11589, 11624), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((11806, 11855), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (11812, 11855), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12234, 12251), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {}), '(TIMESTAMP)\n', (12240, 12251), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12262, 12279), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {}), '(TIMESTAMP)\n', (12268, 12279), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12620, 12661), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (12626, 12661), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12878, 12927), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (12884, 12927), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13135, 13176), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (13141, 13176), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13348, 13397), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (13354, 13397), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13824, 13841), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {}), '(TIMESTAMP)\n', (13830, 13841), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13852, 13869), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {}), '(TIMESTAMP)\n', (13858, 13869), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14270, 14311), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (14276, 14311), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14474, 14523), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (14480, 14523), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14603, 14644), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (14609, 14644), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14812, 14861), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (14818, 14861), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14941, 14982), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (14947, 14982), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15143, 15192), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (15149, 15192), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15369, 15386), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {}), '(TIMESTAMP)\n', (15375, 15386), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15397, 15414), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {}), '(TIMESTAMP)\n', (15403, 15414), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15552, 15593), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (15558, 15593), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15771, 15820), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (15777, 15820), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15908, 15940), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'nullable': '(False)'}), '(UUIDType, nullable=False)\n', (15914, 15940), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16036, 16067), 'sqlalchemy.Column', 'Column', (['Boolean'], {'nullable': '(False)'}), '(Boolean, nullable=False)\n', (16042, 16067), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16084, 16112), 'sqlalchemy.Column', 'Column', (['DATE'], {'nullable': '(False)'}), '(DATE, nullable=False)\n', (16090, 16112), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16132, 16173), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (16138, 16173), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16335, 16384), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (16341, 16384), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16464, 16505), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (16470, 16505), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16673, 16722), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (16679, 16722), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16802, 16843), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (16808, 16843), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16985, 17034), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (16991, 17034), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17114, 17155), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (17120, 17155), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17334, 17383), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (17340, 17383), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18008, 18049), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (18014, 18049), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18333, 18382), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (18339, 18382), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18466, 18499), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (18472, 18499), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18824, 18865), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (18830, 18865), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19079, 19128), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (19085, 19128), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19253, 19286), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (19259, 19286), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((21225, 21274), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (21231, 21274), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((21604, 21645), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (21610, 21645), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((21931, 21980), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (21937, 21980), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22435, 22476), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (22441, 22476), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22632, 22681), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (22638, 22681), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22761, 22802), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (22767, 22802), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22973, 23022), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (22979, 23022), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23102, 23143), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (23108, 23143), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23349, 23398), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (23355, 23398), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23848, 23865), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {}), '(TIMESTAMP)\n', (23854, 23865), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24190, 24231), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (24196, 24231), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24416, 24465), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (24422, 24465), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24477, 24510), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (24483, 24510), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24626, 24654), 'sqlalchemy.Column', 'Column', (['REAL'], {'nullable': '(False)'}), '(REAL, nullable=False)\n', (24632, 24654), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((25058, 25099), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (25064, 25099), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((25262, 25311), 'sqlalchemy.Column', 'Column', (['UUIDType'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUIDType, primary_key=True, default=uuid4)\n', (25268, 25311), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((25391, 25432), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (25397, 25432), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((1652, 1663), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (1658, 1663), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((1995, 2006), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (2001, 2006), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2343, 2354), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (2349, 2354), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2653, 2664), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (2659, 2664), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2715, 2742), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Tasks.task_id"""'], {}), "('Tasks.task_id')\n", (2725, 2742), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2977, 3011), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (2987, 3011), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3322, 3333), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (3328, 3333), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3651, 3662), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (3657, 3662), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((3972, 3983), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (3978, 3983), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4452, 4483), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Sensors.sensor_id"""'], {}), "('Sensors.sensor_id')\n", (4462, 4483), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4703, 4728), 'sqlalchemy.Column', 'Column', (['"""elevation"""', 'REAL'], {}), "('elevation', REAL)\n", (4709, 4728), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4754, 4777), 'sqlalchemy.Column', 'Column', (['"""heading"""', 'REAL'], {}), "('heading', REAL)\n", (4760, 4777), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4802, 4824), 'sqlalchemy.Column', 'Column', (['"""course"""', 'REAL'], {}), "('course', REAL)\n", (4808, 4824), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4848, 4869), 'sqlalchemy.Column', 'Column', (['"""speed"""', 'REAL'], {}), "('speed', REAL)\n", (4854, 4869), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4904, 4939), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Datafiles.datafile_id"""'], {}), "('Datafiles.datafile_id')\n", (4914, 4939), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4991, 5025), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (5001, 5025), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((5147, 5376), 'sqlalchemy.orm.relationship', 'relationship', (['"""Platform"""'], {'secondary': 'constants.SENSOR', 'primaryjoin': '"""State.sensor_id == Sensor.sensor_id"""', 'secondaryjoin': '"""Platform.platform_id == Sensor.host"""', 'lazy': '"""joined"""', 'join_depth': '(1)', 'uselist': '(False)', 'viewonly': '(True)'}), "('Platform', secondary=constants.SENSOR, primaryjoin=\n 'State.sensor_id == Sensor.sensor_id', secondaryjoin=\n 'Platform.platform_id == Sensor.host', lazy='joined', join_depth=1,\n uselist=False, viewonly=True)\n", (5159, 5376), False, 'from sqlalchemy.orm import declarative_base, declared_attr, deferred, relationship\n'), ((5534, 5571), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""platform"""', '"""name"""'], {}), "('platform', 'name')\n", (5551, 5571), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n'), ((5785, 5796), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (5791, 5796), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((5878, 5889), 'sqlalchemy.String', 'String', (['(500)'], {}), '(500)\n', (5884, 5889), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6183, 6194), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (6189, 6194), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6273, 6284), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (6279, 6284), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6309, 6320), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (6315, 6320), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6355, 6386), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Changes.change_id"""'], {}), "('Changes.change_id')\n", (6365, 6386), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((6670, 6681), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (6676, 6681), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7012, 7047), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (7022, 7047), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7096, 7131), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (7106, 7131), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7272, 7306), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (7282, 7306), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7619, 7630), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (7625, 7630), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7832, 7874), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Nationalities.nationality_id"""'], {}), "('Nationalities.nationality_id')\n", (7842, 7874), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7941, 7985), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""PlatformTypes.platform_type_id"""'], {}), "('PlatformTypes.platform_type_id')\n", (7951, 7985), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8042, 8076), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (8052, 8076), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8395, 8406), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (8401, 8406), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8476, 8535), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""GeometryTypes.geo_type_id"""'], {'onupdate': '"""cascade"""'}), "('GeometryTypes.geo_type_id', onupdate='cascade')\n", (8486, 8535), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((8866, 8939), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Wargames.wargame_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Wargames.wargame_id', onupdate='cascade', ondelete='cascade')\n", (8876, 8939), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9007, 9018), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (9013, 9018), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9028, 9099), 'sqlalchemy.sql.schema.CheckConstraint', 'CheckConstraint', (['"""serial_number <> \'\'"""'], {'name': '"""ck_Serials_serial_number"""'}), '("serial_number <> \'\'", name=\'ck_Serials_serial_number\')\n', (9043, 9099), False, 'from sqlalchemy.sql.schema import CheckConstraint\n'), ((9243, 9254), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (9249, 9254), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9400, 9474), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Privacies.privacy_id', onupdate='cascade', ondelete='cascade')\n", (9410, 9474), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9793, 9804), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (9799, 9804), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9806, 9860), 'sqlalchemy.sql.schema.CheckConstraint', 'CheckConstraint', (['"""name <> \'\'"""'], {'name': '"""ck_Wargames_name"""'}), '("name <> \'\'", name=\'ck_Wargames_name\')\n', (9821, 9860), False, 'from sqlalchemy.sql.schema import CheckConstraint\n'), ((9933, 10003), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Series.series_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Series.series_id', onupdate='cascade', ondelete='cascade')\n", (9943, 10003), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10176, 10250), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Privacies.privacy_id', onupdate='cascade', ondelete='cascade')\n", (10186, 10250), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10557, 10568), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (10563, 10568), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((10570, 10622), 'sqlalchemy.sql.schema.CheckConstraint', 'CheckConstraint', (['"""name <> \'\'"""'], {'name': '"""ck_Series_name"""'}), '("name <> \'\'", name=\'ck_Series_name\')\n', (10585, 10622), False, 'from sqlalchemy.sql.schema import CheckConstraint\n'), ((10691, 10765), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Privacies.privacy_id', onupdate='cascade', ondelete='cascade')\n", (10701, 10765), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((11143, 11216), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Wargames.wargame_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Wargames.wargame_id', onupdate='cascade', ondelete='cascade')\n", (11153, 11216), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((11300, 11375), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Platforms.platform_id', onupdate='cascade', ondelete='cascade')\n", (11310, 11375), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((11458, 11532), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Privacies.privacy_id', onupdate='cascade', ondelete='cascade')\n", (11468, 11532), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((11919, 12019), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""WargameParticipants.wargame_participant_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('WargameParticipants.wargame_participant_id', onupdate='cascade',\n ondelete='cascade')\n", (11929, 12019), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12119, 12190), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Serials.serial_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Serials.serial_id', onupdate='cascade', ondelete='cascade')\n", (12129, 12190), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12334, 12412), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""ForceTypes.force_type_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('ForceTypes.force_type_id', onupdate='cascade', ondelete='cascade')\n", (12344, 12412), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12495, 12569), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {'onupdate': '"""cascade"""', 'ondelete': '"""cascade"""'}), "('Privacies.privacy_id', onupdate='cascade', ondelete='cascade')\n", (12505, 12569), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12955, 12966), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (12961, 12966), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((12976, 13032), 'sqlalchemy.sql.schema.CheckConstraint', 'CheckConstraint', (['"""name <> \'\'"""'], {'name': '"""ck_ForceTypes_name"""'}), '("name <> \'\'", name=\'ck_ForceTypes_name\')\n', (12991, 13032), False, 'from sqlalchemy.sql.schema import CheckConstraint\n'), ((13104, 13114), 'sqlalchemy.String', 'String', (['(10)'], {}), '(10)\n', (13110, 13114), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13574, 13585), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (13580, 13585), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13638, 13677), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""GeometryTypes.geo_type_id"""'], {}), "('GeometryTypes.geo_type_id')\n", (13648, 13677), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13743, 13789), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""GeometrySubTypes.geo_sub_type_id"""'], {}), "('GeometrySubTypes.geo_sub_type_id')\n", (13753, 13789), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13903, 13934), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Serials.serial_id"""'], {}), "('Serials.serial_id')\n", (13913, 13934), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13979, 14014), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (13989, 14014), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14058, 14093), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (14068, 14093), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14128, 14163), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Datafiles.datafile_id"""'], {}), "('Datafiles.datafile_id')\n", (14138, 14163), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14215, 14249), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (14225, 14249), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14542, 14553), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (14548, 14553), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((14880, 14891), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (14886, 14891), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15228, 15263), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (15238, 15263), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15312, 15339), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Tasks.task_id"""'], {}), "('Tasks.task_id')\n", (15322, 15339), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15434, 15445), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (15440, 15445), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15481, 15515), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (15491, 15515), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15851, 15876), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Tags.tag_id"""'], {}), "('Tags.tag_id')\n", (15861, 15876), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((15977, 16004), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Users.user_id"""'], {}), "('Users.user_id')\n", (15987, 16004), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16403, 16414), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (16409, 16414), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((16741, 16752), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (16747, 16752), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17053, 17064), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (17059, 17064), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17402, 17413), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (17408, 17413), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17464, 17495), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Sensors.sensor_id"""'], {}), "('Sensors.sensor_id')\n", (17474, 17495), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17534, 17567), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (17540, 17567), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17588, 17621), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(False)'}), '(TIMESTAMP, nullable=False)\n', (17594, 17621), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17649, 17674), 'sqlalchemy.Column', 'Column', (['"""min_range"""', 'REAL'], {}), "('min_range', REAL)\n", (17655, 17674), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17702, 17727), 'sqlalchemy.Column', 'Column', (['"""max_range"""', 'REAL'], {}), "('max_range', REAL)\n", (17708, 17727), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17754, 17778), 'sqlalchemy.Column', 'Column', (['"""left_arc"""', 'REAL'], {}), "('left_arc', REAL)\n", (17760, 17778), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17806, 17831), 'sqlalchemy.Column', 'Column', (['"""right_arc"""', 'REAL'], {}), "('right_arc', REAL)\n", (17812, 17831), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17866, 17901), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Datafiles.datafile_id"""'], {}), "('Datafiles.datafile_id')\n", (17876, 17901), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((17953, 17987), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (17963, 17987), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18418, 18453), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (18428, 18453), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18539, 18581), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""CommentTypes.comment_type_id"""'], {}), "('CommentTypes.comment_type_id')\n", (18549, 18581), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18620, 18631), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (18626, 18631), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18682, 18717), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Datafiles.datafile_id"""'], {}), "('Datafiles.datafile_id')\n", (18692, 18717), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((18769, 18803), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (18779, 18803), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19147, 19158), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (19153, 19158), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19193, 19224), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Sensors.sensor_id"""'], {}), "('Sensors.sensor_id')\n", (19203, 19224), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19311, 19334), 'sqlalchemy.Column', 'Column', (['"""bearing"""', 'REAL'], {}), "('bearing', REAL)\n", (19317, 19334), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19364, 19391), 'sqlalchemy.Column', 'Column', (['"""rel_bearing"""', 'REAL'], {}), "('rel_bearing', REAL)\n", (19370, 19391), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19423, 19452), 'sqlalchemy.Column', 'Column', (['"""ambig_bearing"""', 'REAL'], {}), "('ambig_bearing', REAL)\n", (19429, 19452), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19475, 19495), 'sqlalchemy.Column', 'Column', (['"""freq"""', 'REAL'], {}), "('freq', REAL)\n", (19481, 19495), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19519, 19540), 'sqlalchemy.Column', 'Column', (['"""range"""', 'REAL'], {}), "('range', REAL)\n", (19525, 19540), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19744, 19769), 'sqlalchemy.Column', 'Column', (['"""elevation"""', 'REAL'], {}), "('elevation', REAL)\n", (19750, 19769), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19793, 19814), 'sqlalchemy.Column', 'Column', (['"""major"""', 'REAL'], {}), "('major', REAL)\n", (19799, 19814), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19838, 19859), 'sqlalchemy.Column', 'Column', (['"""minor"""', 'REAL'], {}), "('minor', REAL)\n", (19844, 19859), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((19889, 19916), 'sqlalchemy.Column', 'Column', (['"""orientation"""', 'REAL'], {}), "('orientation', REAL)\n", (19895, 19916), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20221, 20240), 'sqlalchemy.Column', 'Column', (['"""mla"""', 'REAL'], {}), "('mla', REAL)\n", (20227, 20240), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20262, 20281), 'sqlalchemy.Column', 'Column', (['"""soa"""', 'REAL'], {}), "('soa', REAL)\n", (20268, 20281), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20317, 20352), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (20327, 20352), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20387, 20422), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Datafiles.datafile_id"""'], {}), "('Datafiles.datafile_id')\n", (20397, 20422), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20474, 20508), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (20484, 20508), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20538, 20579), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (20544, 20579), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20640, 20871), 'sqlalchemy.orm.relationship', 'relationship', (['"""Platform"""'], {'secondary': 'constants.SENSOR', 'primaryjoin': '"""Contact.sensor_id == Sensor.sensor_id"""', 'secondaryjoin': '"""Platform.platform_id == Sensor.host"""', 'lazy': '"""joined"""', 'join_depth': '(1)', 'uselist': '(False)', 'viewonly': '(True)'}), "('Platform', secondary=constants.SENSOR, primaryjoin=\n 'Contact.sensor_id == Sensor.sensor_id', secondaryjoin=\n 'Platform.platform_id == Sensor.host', lazy='joined', join_depth=1,\n uselist=False, viewonly=True)\n", (20652, 20871), False, 'from sqlalchemy.orm import declarative_base, declared_attr, deferred, relationship\n'), ((21029, 21066), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""platform"""', '"""name"""'], {}), "('platform', 'name')\n", (21046, 21066), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n'), ((21293, 21304), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (21299, 21304), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((21360, 21400), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""SensorTypes.sensor_type_id"""'], {}), "('SensorTypes.sensor_type_id')\n", (21370, 21400), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((21446, 21481), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (21456, 21481), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((21533, 21567), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (21543, 21567), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22006, 22037), 'sqlalchemy.Column', 'Column', (['Boolean'], {'nullable': '(False)'}), '(Boolean, nullable=False)\n', (22012, 22037), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22073, 22107), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (22083, 22107), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22174, 22218), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""DatafileTypes.datafile_type_id"""'], {}), "('DatafileTypes.datafile_type_id')\n", (22184, 22218), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22264, 22275), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (22270, 22275), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22294, 22305), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (22300, 22305), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22327, 22358), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (22333, 22358), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22700, 22711), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (22706, 22711), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23041, 23052), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (23047, 23052), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23434, 23469), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (23444, 23469), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23505, 23540), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (23515, 23540), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23575, 23606), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Sensors.sensor_id"""'], {}), "('Sensors.sensor_id')\n", (23585, 23606), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23810, 23835), 'sqlalchemy.Column', 'Column', (['"""elevation"""', 'REAL'], {}), "('elevation', REAL)\n", (23816, 23835), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23903, 23941), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""MediaTypes.media_type_id"""'], {}), "('MediaTypes.media_type_id')\n", (23913, 23941), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24048, 24083), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Datafiles.datafile_id"""'], {}), "('Datafiles.datafile_id')\n", (24058, 24083), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24135, 24169), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (24145, 24169), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24547, 24593), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""CommodityTypes.commodity_type_id"""'], {}), "('CommodityTypes.commodity_type_id')\n", (24557, 24593), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24691, 24727), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""UnitTypes.unit_type_id"""'], {}), "('UnitTypes.unit_type_id')\n", (24701, 24727), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24780, 24815), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Platforms.platform_id"""'], {}), "('Platforms.platform_id')\n", (24790, 24815), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24854, 24865), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (24860, 24865), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((24916, 24951), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Datafiles.datafile_id"""'], {}), "('Datafiles.datafile_id')\n", (24926, 24951), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((25003, 25037), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Privacies.privacy_id"""'], {}), "('Privacies.privacy_id')\n", (25013, 25037), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((25330, 25341), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (25336, 25341), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((25655, 25703), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Activations"""'], {'schema': 'None'}), "('Activations', schema=None)\n", (25675, 25703), False, 'from alembic import op\n'), ((26255, 26300), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Comments"""'], {'schema': 'None'}), "('Comments', schema=None)\n", (26275, 26300), False, 'from alembic import op\n'), ((27099, 27144), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Contacts"""'], {'schema': 'None'}), "('Contacts', schema=None)\n", (27119, 27144), False, 'from alembic import op\n'), ((27876, 27922), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Datafiles"""'], {'schema': 'None'}), "('Datafiles', schema=None)\n", (27896, 27922), False, 'from alembic import op\n'), ((28350, 28397), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Geometries"""'], {'schema': 'None'}), "('Geometries', schema=None)\n", (28370, 28397), False, 'from alembic import op\n'), ((29772, 29817), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""HostedBy"""'], {'schema': 'None'}), "('HostedBy', schema=None)\n", (29792, 29817), False, 'from alembic import op\n'), ((30364, 30405), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Logs"""'], {'schema': 'None'}), "('Logs', schema=None)\n", (30384, 30405), False, 'from alembic import op\n'), ((30569, 30618), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""LogsHoldings"""'], {'schema': 'None'}), "('LogsHoldings', schema=None)\n", (30589, 30618), False, 'from alembic import op\n'), ((31631, 31673), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Media"""'], {'schema': 'None'}), "('Media', schema=None)\n", (31651, 31673), False, 'from alembic import op\n'), ((32703, 32752), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Participants"""'], {'schema': 'None'}), "('Participants', schema=None)\n", (32723, 32752), False, 'from alembic import op\n'), ((33301, 33347), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Platforms"""'], {'schema': 'None'}), "('Platforms', schema=None)\n", (33321, 33347), False, 'from alembic import op\n'), ((33984, 34028), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Sensors"""'], {'schema': 'None'}), "('Sensors', schema=None)\n", (34004, 34028), False, 'from alembic import op\n'), ((34581, 34624), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""States"""'], {'schema': 'None'}), "('States', schema=None)\n", (34601, 34624), False, 'from alembic import op\n'), ((35124, 35172), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""TaggedItems"""'], {'schema': 'None'}), "('TaggedItems', schema=None)\n", (35144, 35172), False, 'from alembic import op\n'), ((35475, 35517), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Tasks"""'], {'schema': 'None'}), "('Tasks', schema=None)\n", (35495, 35517), False, 'from alembic import op\n'), ((35944, 36012), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Tasks"""'], {'schema': 'None', 'copy_from': 'Task.__table__'}), "('Tasks', schema=None, copy_from=Task.__table__)\n", (35964, 36012), False, 'from alembic import op\n'), ((36227, 36312), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""TaggedItems"""'], {'schema': 'None', 'copy_from': 'TaggedItem.__table__'}), "('TaggedItems', schema=None, copy_from=TaggedItem.__table__\n )\n", (36247, 36312), False, 'from alembic import op\n'), ((36564, 36634), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""States"""'], {'schema': 'None', 'copy_from': 'State.__table__'}), "('States', schema=None, copy_from=State.__table__)\n", (36584, 36634), False, 'from alembic import op\n'), ((36951, 37023), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Sensors"""'], {'schema': 'None', 'copy_from': 'Sensor.__table__'}), "('Sensors', schema=None, copy_from=Sensor.__table__)\n", (36971, 37023), False, 'from alembic import op\n'), ((37369, 37445), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Platforms"""'], {'schema': 'None', 'copy_from': 'Platform.__table__'}), "('Platforms', schema=None, copy_from=Platform.__table__)\n", (37389, 37445), False, 'from alembic import op\n'), ((37859, 37946), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Participants"""'], {'schema': 'None', 'copy_from': 'Participant.__table__'}), "('Participants', schema=None, copy_from=Participant.\n __table__)\n", (37879, 37946), False, 'from alembic import op\n'), ((38332, 38401), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Media"""'], {'schema': 'None', 'copy_from': 'Media.__table__'}), "('Media', schema=None, copy_from=Media.__table__)\n", (38352, 38401), False, 'from alembic import op\n'), ((39036, 39123), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""LogsHoldings"""'], {'schema': 'None', 'copy_from': 'LogsHolding.__table__'}), "('LogsHoldings', schema=None, copy_from=LogsHolding.\n __table__)\n", (39056, 39123), False, 'from alembic import op\n'), ((39800, 39866), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Logs"""'], {'schema': 'None', 'copy_from': 'Log.__table__'}), "('Logs', schema=None, copy_from=Log.__table__)\n", (39820, 39866), False, 'from alembic import op\n'), ((39984, 40059), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""HostedBy"""'], {'schema': 'None', 'copy_from': 'HostedBy.__table__'}), "('HostedBy', schema=None, copy_from=HostedBy.__table__)\n", (40004, 40059), False, 'from alembic import op\n'), ((40383, 40461), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Geometries"""'], {'schema': 'None', 'copy_from': 'Geometry1.__table__'}), "('Geometries', schema=None, copy_from=Geometry1.__table__)\n", (40403, 40461), False, 'from alembic import op\n'), ((41364, 41440), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Datafiles"""'], {'schema': 'None', 'copy_from': 'Datafile.__table__'}), "('Datafiles', schema=None, copy_from=Datafile.__table__)\n", (41384, 41440), False, 'from alembic import op\n'), ((41722, 41796), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Contacts"""'], {'schema': 'None', 'copy_from': 'Contact.__table__'}), "('Contacts', schema=None, copy_from=Contact.__table__)\n", (41742, 41796), False, 'from alembic import op\n'), ((42220, 42294), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Comments"""'], {'schema': 'None', 'copy_from': 'Comment.__table__'}), "('Comments', schema=None, copy_from=Comment.__table__)\n", (42240, 42294), False, 'from alembic import op\n'), ((42774, 42859), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""Activations"""'], {'schema': 'None', 'copy_from': 'Activation.__table__'}), "('Activations', schema=None, copy_from=Activation.__table__\n )\n", (42794, 42859), False, 'from alembic import op\n'), ((2884, 2895), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (2890, 2895), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((2929, 2940), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (2935, 2940), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((4579, 4664), 'geoalchemy2.Geometry', 'Geometry', ([], {'geometry_type': '"""POINT"""', 'srid': '(4326)', 'management': '(True)', 'spatial_index': '(False)'}), "(geometry_type='POINT', srid=4326, management=True, spatial_index=False\n )\n", (4587, 4664), False, 'from geoalchemy2 import Geometry\n'), ((7678, 7688), 'sqlalchemy.String', 'String', (['(10)'], {}), '(10)\n', (7684, 7688), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7738, 7747), 'sqlalchemy.String', 'String', (['(3)'], {}), '(3)\n', (7744, 7747), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((7782, 7791), 'sqlalchemy.String', 'String', (['(4)'], {}), '(4)\n', (7788, 7791), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9290, 9301), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (9296, 9301), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((9335, 9346), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (9341, 9346), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((13451, 13523), 'geoalchemy2.Geometry', 'Geometry', ([], {'geometry_type': '"""GEOMETRY"""', 'management': '(True)', 'spatial_index': '(False)'}), "(geometry_type='GEOMETRY', management=True, spatial_index=False)\n", (13459, 13523), False, 'from geoalchemy2 import Geometry\n'), ((19620, 19705), 'geoalchemy2.Geometry', 'Geometry', ([], {'geometry_type': '"""POINT"""', 'srid': '(4326)', 'management': '(True)', 'spatial_index': '(False)'}), "(geometry_type='POINT', srid=4326, management=True, spatial_index=False\n )\n", (19628, 19705), False, 'from geoalchemy2 import Geometry\n'), ((19965, 20012), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""ClassificationTypes.class_type_id"""'], {}), "('ClassificationTypes.class_type_id')\n", (19975, 20012), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20058, 20108), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""ConfidenceLevels.confidence_level_id"""'], {}), "('ConfidenceLevels.confidence_level_id')\n", (20068, 20108), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((20156, 20198), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""ContactTypes.contact_type_id"""'], {}), "('ContactTypes.contact_type_id')\n", (20166, 20198), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((22387, 22397), 'sqlalchemy.String', 'String', (['(32)'], {}), '(32)\n', (22393, 22397), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n'), ((23686, 23771), 'geoalchemy2.Geometry', 'Geometry', ([], {'geometry_type': '"""POINT"""', 'srid': '(4326)', 'management': '(True)', 'spatial_index': '(False)'}), "(geometry_type='POINT', srid=4326, management=True, spatial_index=False\n )\n", (23694, 23771), False, 'from geoalchemy2 import Geometry\n'), ((23985, 23996), 'sqlalchemy.String', 'String', (['(150)'], {}), '(150)\n', (23991, 23996), False, 'from sqlalchemy import DATE, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String\n')]
|
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
try:
import pkg_resources
except ImportError:
# When running from pex use vendored library from pex.
from pex.vendor._vendored.setuptools import pkg_resources
from wca.runners import detection
from wca.runners import allocation
from wca.runners import measurement
from wca.extra import static_allocator
from wca import config
from wca import detectors
from wca import allocators
from wca import mesos
from wca import kubernetes
from wca import storage
from wca.extra import static_node
from wca.extra import numa_allocator
from wca import security
REGISTERED_COMPONENTS = [
measurement.MeasurementRunner,
allocation.AllocationRunner,
detection.DetectionRunner,
mesos.MesosNode,
kubernetes.KubernetesNode,
storage.LogStorage,
storage.KafkaStorage,
storage.FilterStorage,
detectors.NOPAnomalyDetector,
allocators.NOPAllocator,
allocators.AllocationConfiguration,
kubernetes.CgroupDriverType,
static_node.StaticNode,
numa_allocator.NUMAAllocator,
static_allocator.StaticAllocator,
security.SSL,
measurement.TaskLabelRegexGenerator,
]
def register_components(extra_components: List[str]):
for component in REGISTERED_COMPONENTS:
config.register(component)
for component in extra_components:
# Load external class ignored its requirements.
ep = pkg_resources.EntryPoint.parse('external_cls=%s' % component)
cls = ep.resolve()
config.register(cls)
|
[
"wca.config.register",
"pex.vendor._vendored.setuptools.pkg_resources.EntryPoint.parse"
] |
[((1832, 1858), 'wca.config.register', 'config.register', (['component'], {}), '(component)\n', (1847, 1858), False, 'from wca import config\n'), ((1968, 2029), 'pex.vendor._vendored.setuptools.pkg_resources.EntryPoint.parse', 'pkg_resources.EntryPoint.parse', (["('external_cls=%s' % component)"], {}), "('external_cls=%s' % component)\n", (1998, 2029), False, 'from pex.vendor._vendored.setuptools import pkg_resources\n'), ((2065, 2085), 'wca.config.register', 'config.register', (['cls'], {}), '(cls)\n', (2080, 2085), False, 'from wca import config\n')]
|
# Copyright 2020 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for tensorflow_graphics.datasets.features.camera_feature."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
import tensorflow_datasets as tfds
from tensorflow_graphics.datasets.features import camera_feature
class CameraFeatureTest(tfds.testing.FeatureExpectationsTestCase):
"""Test Cases for Camera FeatureConnector."""
def __get_camera_params(self):
pose = {'R': np.eye(3).astype(np.float32),
't': np.zeros(3).astype(np.float32)}
f = 35.
optical_center = (640 / 2, 480 / 2)
return pose, f, optical_center
def test_simple_camera(self):
"""Tests camera parameters with fixed focal length, no skew and no aspect ratio."""
expected_pose, expected_f, expected_center = self.__get_camera_params()
expected_intrinsics = np.asarray([[expected_f, 0, expected_center[0]],
[0, expected_f, expected_center[1]],
[0, 0, 1]], dtype=np.float32)
expected_camera = {'pose': expected_pose, 'intrinsics': expected_intrinsics}
inputs = {'f': expected_f, 'optical_center': expected_center,
'pose': expected_pose}
lookat_inputs = {
'f': expected_f,
'optical_center': expected_center,
'pose': {
'look_at': np.array([0, 0, -1], dtype=np.float32),
'up': np.array([0, 1, 0], dtype=np.float32),
'position': np.array([0, 0, 0], dtype=np.float32)
}
}
raising_pose_entry = {
'f': expected_f,
'optical_center': expected_center,
'pose': np.eye(4)
}
raising_pose_inputs = {
'f': expected_f,
'optical_center': expected_center,
'pose': {'rot': np.eye(3), 'trans': np.zeros(3)}
}
raising_lookat_inputs = {
'f': expected_f,
'optical_center': expected_center,
'pose': {
'l': np.array([0, 0, -1], dtype=np.float32),
'up': np.array([0, 1, 0], dtype=np.float32),
'C': np.array([0, 0, 0], dtype=np.float32)
}
}
self.assertFeature(
feature=camera_feature.Camera(),
shape={
'pose': {
'R': (3, 3),
't': (3,)
},
'intrinsics': (3, 3)
},
dtype={
'pose': {
'R': tf.float32,
't': tf.float32
},
'intrinsics': tf.float32
},
tests=[
tfds.testing.FeatureExpectationItem(
value=inputs,
expected=expected_camera,
),
tfds.testing.FeatureExpectationItem(
value=lookat_inputs,
expected=expected_camera
),
tfds.testing.FeatureExpectationItem(
value=raising_pose_inputs,
raise_cls=ValueError,
raise_msg='Wrong keys for pose feature provided'
),
tfds.testing.FeatureExpectationItem(
value=raising_lookat_inputs,
raise_cls=ValueError,
raise_msg='Wrong keys for pose feature provided'
),
tfds.testing.FeatureExpectationItem(
value=raising_pose_entry,
raise_cls=ValueError,
raise_msg='Pose needs to be a dictionary'
),
],
)
def test_camera_with_aspect_ratio_and_skew(self):
"""Tests camera parameters with fixed focal length, aspect_ratio and skew."""
expected_pose, expected_f, expected_center = self.__get_camera_params()
expected_aspect_ratio = expected_center[0] / expected_center[1]
expected_skew = 0.6
expected_intrinsics = np.asarray(
[[expected_f, expected_skew, expected_center[0]],
[0, expected_aspect_ratio * expected_f, expected_center[1]],
[0, 0, 1]], dtype=np.float32)
expected_camera = {'pose': expected_pose, 'intrinsics': expected_intrinsics}
inputs = {'f': expected_f,
'optical_center': expected_center,
'skew': expected_skew,
'aspect_ratio': expected_aspect_ratio,
'pose': expected_pose}
self.assertFeature(
feature=camera_feature.Camera(),
shape={
'pose': {
'R': (3, 3),
't': (3,)
},
'intrinsics': (3, 3)
},
dtype={
'pose': {
'R': tf.float32,
't': tf.float32
},
'intrinsics': tf.float32
},
tests=[
tfds.testing.FeatureExpectationItem(
value=inputs,
expected=expected_camera,
),
],
)
def test_full_camera_calibration_matrix(self):
"""Tests camera parameters with different focal length per camera axis and skew."""
expected_pose, _, expected_optical_center = self.__get_camera_params()
expected_skew = 0.6
expected_f = (35., 40.)
expected_intrinsics = np.array(
[[expected_f[0], expected_skew, expected_optical_center[0]],
[0, expected_f[1], expected_optical_center[1]],
[0, 0, 1]], dtype=np.float32)
expected_camera = {'pose': expected_pose, 'intrinsics': expected_intrinsics}
inputs = {'f': expected_f,
'optical_center': expected_optical_center,
'skew': expected_skew, 'pose': expected_pose}
raising_inputs = {'f': expected_f,
'aspect_ratio': 1.5,
'optical_center': expected_optical_center,
'skew': expected_skew, 'pose': expected_pose}
self.assertFeature(
feature=camera_feature.Camera(),
shape={
'pose': {
'R': (3, 3),
't': (3,)
},
'intrinsics': (3, 3)
},
dtype={
'pose': {
'R': tf.float32,
't': tf.float32
},
'intrinsics': tf.float32
},
tests=[
tfds.testing.FeatureExpectationItem(
value=inputs,
expected=expected_camera,
),
tfds.testing.FeatureExpectationItem(
value=raising_inputs,
raise_cls=ValueError,
raise_msg='If aspect ratio is provided, f needs to '
'be a single float',
),
],
)
if __name__ == '__main__':
tfds.testing.test_main()
|
[
"numpy.asarray",
"numpy.zeros",
"tensorflow_graphics.datasets.features.camera_feature.Camera",
"tensorflow_datasets.testing.test_main",
"numpy.array",
"tensorflow_datasets.testing.FeatureExpectationItem",
"numpy.eye"
] |
[((7184, 7208), 'tensorflow_datasets.testing.test_main', 'tfds.testing.test_main', ([], {}), '()\n', (7206, 7208), True, 'import tensorflow_datasets as tfds\n'), ((1490, 1609), 'numpy.asarray', 'np.asarray', (['[[expected_f, 0, expected_center[0]], [0, expected_f, expected_center[1]],\n [0, 0, 1]]'], {'dtype': 'np.float32'}), '([[expected_f, 0, expected_center[0]], [0, expected_f,\n expected_center[1]], [0, 0, 1]], dtype=np.float32)\n', (1500, 1609), True, 'import numpy as np\n'), ((4411, 4571), 'numpy.asarray', 'np.asarray', (['[[expected_f, expected_skew, expected_center[0]], [0, expected_aspect_ratio *\n expected_f, expected_center[1]], [0, 0, 1]]'], {'dtype': 'np.float32'}), '([[expected_f, expected_skew, expected_center[0]], [0, \n expected_aspect_ratio * expected_f, expected_center[1]], [0, 0, 1]],\n dtype=np.float32)\n', (4421, 4571), True, 'import numpy as np\n'), ((5724, 5875), 'numpy.array', 'np.array', (['[[expected_f[0], expected_skew, expected_optical_center[0]], [0, expected_f\n [1], expected_optical_center[1]], [0, 0, 1]]'], {'dtype': 'np.float32'}), '([[expected_f[0], expected_skew, expected_optical_center[0]], [0,\n expected_f[1], expected_optical_center[1]], [0, 0, 1]], dtype=np.float32)\n', (5732, 5875), True, 'import numpy as np\n'), ((2285, 2294), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (2291, 2294), True, 'import numpy as np\n'), ((1998, 2036), 'numpy.array', 'np.array', (['[0, 0, -1]'], {'dtype': 'np.float32'}), '([0, 0, -1], dtype=np.float32)\n', (2006, 2036), True, 'import numpy as np\n'), ((2056, 2093), 'numpy.array', 'np.array', (['[0, 1, 0]'], {'dtype': 'np.float32'}), '([0, 1, 0], dtype=np.float32)\n', (2064, 2093), True, 'import numpy as np\n'), ((2119, 2156), 'numpy.array', 'np.array', (['[0, 0, 0]'], {'dtype': 'np.float32'}), '([0, 0, 0], dtype=np.float32)\n', (2127, 2156), True, 'import numpy as np\n'), ((2422, 2431), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (2428, 2431), True, 'import numpy as np\n'), ((2442, 2453), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2450, 2453), True, 'import numpy as np\n'), ((2594, 2632), 'numpy.array', 'np.array', (['[0, 0, -1]'], {'dtype': 'np.float32'}), '([0, 0, -1], dtype=np.float32)\n', (2602, 2632), True, 'import numpy as np\n'), ((2652, 2689), 'numpy.array', 'np.array', (['[0, 1, 0]'], {'dtype': 'np.float32'}), '([0, 1, 0], dtype=np.float32)\n', (2660, 2689), True, 'import numpy as np\n'), ((2708, 2745), 'numpy.array', 'np.array', (['[0, 0, 0]'], {'dtype': 'np.float32'}), '([0, 0, 0], dtype=np.float32)\n', (2716, 2745), True, 'import numpy as np\n'), ((2803, 2826), 'tensorflow_graphics.datasets.features.camera_feature.Camera', 'camera_feature.Camera', ([], {}), '()\n', (2824, 2826), False, 'from tensorflow_graphics.datasets.features import camera_feature\n'), ((4920, 4943), 'tensorflow_graphics.datasets.features.camera_feature.Camera', 'camera_feature.Camera', ([], {}), '()\n', (4941, 4943), False, 'from tensorflow_graphics.datasets.features import camera_feature\n'), ((6385, 6408), 'tensorflow_graphics.datasets.features.camera_feature.Camera', 'camera_feature.Camera', ([], {}), '()\n', (6406, 6408), False, 'from tensorflow_graphics.datasets.features import camera_feature\n'), ((1098, 1107), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (1104, 1107), True, 'import numpy as np\n'), ((1145, 1156), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (1153, 1156), True, 'import numpy as np\n'), ((3174, 3249), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'inputs', 'expected': 'expected_camera'}), '(value=inputs, expected=expected_camera)\n', (3209, 3249), True, 'import tensorflow_datasets as tfds\n'), ((3310, 3397), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'lookat_inputs', 'expected': 'expected_camera'}), '(value=lookat_inputs, expected=\n expected_camera)\n', (3345, 3397), True, 'import tensorflow_datasets as tfds\n'), ((3452, 3591), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'raising_pose_inputs', 'raise_cls': 'ValueError', 'raise_msg': '"""Wrong keys for pose feature provided"""'}), "(value=raising_pose_inputs, raise_cls=\n ValueError, raise_msg='Wrong keys for pose feature provided')\n", (3487, 3591), True, 'import tensorflow_datasets as tfds\n'), ((3662, 3803), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'raising_lookat_inputs', 'raise_cls': 'ValueError', 'raise_msg': '"""Wrong keys for pose feature provided"""'}), "(value=raising_lookat_inputs, raise_cls=\n ValueError, raise_msg='Wrong keys for pose feature provided')\n", (3697, 3803), True, 'import tensorflow_datasets as tfds\n'), ((3874, 4005), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'raising_pose_entry', 'raise_cls': 'ValueError', 'raise_msg': '"""Pose needs to be a dictionary"""'}), "(value=raising_pose_entry, raise_cls=\n ValueError, raise_msg='Pose needs to be a dictionary')\n", (3909, 4005), True, 'import tensorflow_datasets as tfds\n'), ((5291, 5366), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'inputs', 'expected': 'expected_camera'}), '(value=inputs, expected=expected_camera)\n', (5326, 5366), True, 'import tensorflow_datasets as tfds\n'), ((6756, 6831), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'inputs', 'expected': 'expected_camera'}), '(value=inputs, expected=expected_camera)\n', (6791, 6831), True, 'import tensorflow_datasets as tfds\n'), ((6892, 7052), 'tensorflow_datasets.testing.FeatureExpectationItem', 'tfds.testing.FeatureExpectationItem', ([], {'value': 'raising_inputs', 'raise_cls': 'ValueError', 'raise_msg': '"""If aspect ratio is provided, f needs to be a single float"""'}), "(value=raising_inputs, raise_cls=\n ValueError, raise_msg=\n 'If aspect ratio is provided, f needs to be a single float')\n", (6927, 7052), True, 'import tensorflow_datasets as tfds\n')]
|
import os
import shutil
import pytest
from top_secret import FileSecretSource, DirectorySecretSource
from top_secret import SecretMissingError
SECRET_BASE_PATH = os.path.join("/tmp", ".top_secret_test")
@pytest.fixture(scope="module", autouse=True)
def setup_teardown_module():
# Setup
os.makedirs(SECRET_BASE_PATH, exist_ok=True)
yield
# Tear Down
if os.path.exists(SECRET_BASE_PATH):
shutil.rmtree(SECRET_BASE_PATH)
@pytest.fixture(autouse=True)
def setup_function():
for file in os.listdir(SECRET_BASE_PATH):
path = os.path.join(SECRET_BASE_PATH, file)
if os.path.isfile(path):
os.unlink(path)
def test_file_ss_raise_if_file_does_not_exist():
ss = DirectorySecretSource(SECRET_BASE_PATH)
with pytest.raises(SecretMissingError):
ss.get("missing.txt")
def test_file_ss_exists():
ss = DirectorySecretSource(SECRET_BASE_PATH)
with open(os.path.join(SECRET_BASE_PATH, "my_secret.txt"), "w") as fd:
fd.write("secret")
secret = ss.get("my_secret.txt")
assert secret == "secret"
def test_file_ss_stripes_whitespaces():
ss = DirectorySecretSource(SECRET_BASE_PATH)
with open(os.path.join(SECRET_BASE_PATH, "my_secret.txt"), "w") as fd:
fd.write("\t\n secret\t \n\n")
secret = ss.get("my_secret.txt")
assert secret == "secret"
def test_file_ss_with_whitespace_striping():
ss = DirectorySecretSource(SECRET_BASE_PATH, stripe_whitespaces=False)
secret_in_file = "\t\n secret\t \n\n"
with open(os.path.join(SECRET_BASE_PATH, "my_secret.txt"), "w") as fd:
fd.write(secret_in_file)
secret = ss.get("my_secret.txt")
assert secret == secret_in_file
def test_file_ss_postfix():
ss = DirectorySecretSource(SECRET_BASE_PATH, postfix=".txt")
with open(os.path.join(SECRET_BASE_PATH, "my_secret.txt"), "w") as fd:
fd.write("secret")
secret = ss.get("my_secret")
assert secret == "secret"
def test_file_ss_get_secret_by_asb_path():
ss = DirectorySecretSource(SECRET_BASE_PATH)
path = os.path.join(SECRET_BASE_PATH, "my_secret.txt")
secret_in_file = "secret"
with open(path, "w") as fd:
fd.write(secret_in_file)
secret = ss.get(path)
assert secret == secret_in_file
|
[
"os.makedirs",
"os.unlink",
"top_secret.DirectorySecretSource",
"pytest.fixture",
"os.path.exists",
"os.path.isfile",
"pytest.raises",
"shutil.rmtree",
"os.path.join",
"os.listdir"
] |
[((165, 205), 'os.path.join', 'os.path.join', (['"""/tmp"""', '""".top_secret_test"""'], {}), "('/tmp', '.top_secret_test')\n", (177, 205), False, 'import os\n'), ((209, 253), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'autouse': '(True)'}), "(scope='module', autouse=True)\n", (223, 253), False, 'import pytest\n'), ((456, 484), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (470, 484), False, 'import pytest\n'), ((299, 343), 'os.makedirs', 'os.makedirs', (['SECRET_BASE_PATH'], {'exist_ok': '(True)'}), '(SECRET_BASE_PATH, exist_ok=True)\n', (310, 343), False, 'import os\n'), ((379, 411), 'os.path.exists', 'os.path.exists', (['SECRET_BASE_PATH'], {}), '(SECRET_BASE_PATH)\n', (393, 411), False, 'import os\n'), ((523, 551), 'os.listdir', 'os.listdir', (['SECRET_BASE_PATH'], {}), '(SECRET_BASE_PATH)\n', (533, 551), False, 'import os\n'), ((726, 765), 'top_secret.DirectorySecretSource', 'DirectorySecretSource', (['SECRET_BASE_PATH'], {}), '(SECRET_BASE_PATH)\n', (747, 765), False, 'from top_secret import FileSecretSource, DirectorySecretSource\n'), ((878, 917), 'top_secret.DirectorySecretSource', 'DirectorySecretSource', (['SECRET_BASE_PATH'], {}), '(SECRET_BASE_PATH)\n', (899, 917), False, 'from top_secret import FileSecretSource, DirectorySecretSource\n'), ((1140, 1179), 'top_secret.DirectorySecretSource', 'DirectorySecretSource', (['SECRET_BASE_PATH'], {}), '(SECRET_BASE_PATH)\n', (1161, 1179), False, 'from top_secret import FileSecretSource, DirectorySecretSource\n'), ((1419, 1484), 'top_secret.DirectorySecretSource', 'DirectorySecretSource', (['SECRET_BASE_PATH'], {'stripe_whitespaces': '(False)'}), '(SECRET_BASE_PATH, stripe_whitespaces=False)\n', (1440, 1484), False, 'from top_secret import FileSecretSource, DirectorySecretSource\n'), ((1749, 1804), 'top_secret.DirectorySecretSource', 'DirectorySecretSource', (['SECRET_BASE_PATH'], {'postfix': '""".txt"""'}), "(SECRET_BASE_PATH, postfix='.txt')\n", (1770, 1804), False, 'from top_secret import FileSecretSource, DirectorySecretSource\n'), ((2025, 2064), 'top_secret.DirectorySecretSource', 'DirectorySecretSource', (['SECRET_BASE_PATH'], {}), '(SECRET_BASE_PATH)\n', (2046, 2064), False, 'from top_secret import FileSecretSource, DirectorySecretSource\n'), ((2077, 2124), 'os.path.join', 'os.path.join', (['SECRET_BASE_PATH', '"""my_secret.txt"""'], {}), "(SECRET_BASE_PATH, 'my_secret.txt')\n", (2089, 2124), False, 'import os\n'), ((421, 452), 'shutil.rmtree', 'shutil.rmtree', (['SECRET_BASE_PATH'], {}), '(SECRET_BASE_PATH)\n', (434, 452), False, 'import shutil\n'), ((568, 604), 'os.path.join', 'os.path.join', (['SECRET_BASE_PATH', 'file'], {}), '(SECRET_BASE_PATH, file)\n', (580, 604), False, 'import os\n'), ((616, 636), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (630, 636), False, 'import os\n'), ((775, 808), 'pytest.raises', 'pytest.raises', (['SecretMissingError'], {}), '(SecretMissingError)\n', (788, 808), False, 'import pytest\n'), ((650, 665), 'os.unlink', 'os.unlink', (['path'], {}), '(path)\n', (659, 665), False, 'import os\n'), ((933, 980), 'os.path.join', 'os.path.join', (['SECRET_BASE_PATH', '"""my_secret.txt"""'], {}), "(SECRET_BASE_PATH, 'my_secret.txt')\n", (945, 980), False, 'import os\n'), ((1195, 1242), 'os.path.join', 'os.path.join', (['SECRET_BASE_PATH', '"""my_secret.txt"""'], {}), "(SECRET_BASE_PATH, 'my_secret.txt')\n", (1207, 1242), False, 'import os\n'), ((1542, 1589), 'os.path.join', 'os.path.join', (['SECRET_BASE_PATH', '"""my_secret.txt"""'], {}), "(SECRET_BASE_PATH, 'my_secret.txt')\n", (1554, 1589), False, 'import os\n'), ((1820, 1867), 'os.path.join', 'os.path.join', (['SECRET_BASE_PATH', '"""my_secret.txt"""'], {}), "(SECRET_BASE_PATH, 'my_secret.txt')\n", (1832, 1867), False, 'import os\n')]
|
# Library Imports
from os import execl
import nextcord, json
from nextcord.ui import button, View, Select
# Custom Imports
from Functions.Embed import *
# Options from Json
with open('Config/Options.json') as RawOptions:
Options = json.load(RawOptions)
# Note: The roles are fetched from IDs. These id's are stored as the values of the options in the dropdowns. To change this, modify the "options" variable in each subclass of Select
# Age roles dropdown
class AgeMenu(Select):
def __init__(self, bot:commands.Bot):
self.bot = bot
options = [
nextcord.SelectOption(label = "- 13", description = "Click to get/remove this role", value = "886537316418609172"),
nextcord.SelectOption(label = "+ 13", description = "Click to get/remove this role", value = "886537379450589215"),
nextcord.SelectOption(label = "+ 16", description = "Click to get/remove this role", value = "886537464452366376"),
nextcord.SelectOption(label = "+ 18", description = "Click to get/remove this role", value = "886537714206392320"),
]
super().__init__(placeholder = 'Age Roles...', min_values = 1, max_values = 1, options = options, custom_id = "AgeRoleMenu2000", row = 3)
async def callback(self, interaction: nextcord.Interaction):
try:
Guild = self.bot.get_guild(Options['Guild']['ID'])
Role = Guild.get_role(int(self.values[0]))
if Role in interaction.user.roles:
await interaction.user.remove_roles(Role)
else:
await interaction.user.add_roles(Role)
await interaction.response.edit_message(embed = interaction.message.embeds[0])
except: pass
# Gender roles dropdown
class SexMenu(Select):
def __init__(self, bot:commands.Bot):
self.bot = bot
options = [
nextcord.SelectOption(label = "Male", description = "Click to get/remove this role", value = "886537847258112071"),
nextcord.SelectOption(label = "Female", description = "Click to get/remove this role", value = "886537907412815912"),
]
super().__init__(placeholder = 'Gender Roles...', min_values = 1, max_values = 1, options = options, custom_id = "SexRoleMenu2000", row = 2)
async def callback(self, interaction: nextcord.Interaction):
try:
Guild = self.bot.get_guild(Options['Guild']['ID'])
Role = Guild.get_role(int(self.values[0]))
if Role in interaction.user.roles:
await interaction.user.remove_roles(Role)
else:
await interaction.user.add_roles(Role)
await interaction.response.edit_message(embed = interaction.message.embeds[0])
except: pass
# Hobby roles dropdown
class InterestMenu(Select):
def __init__(self, bot:commands.Bot):
self.bot = bot
options = [
nextcord.SelectOption(label = "Arts", description = "Click to get/remove this role", value = "886538932018348032"),
nextcord.SelectOption(label = "Sports", description = "Click to get/remove this role", value = "886538985852248094"),
nextcord.SelectOption(label = "Music", description = "Click to get/remove this role", value = "886539050062864404"),
nextcord.SelectOption(label = "Reading", description = "Click to get/remove this role", value = "886539142740209714"),
nextcord.SelectOption(label = "Cooking", description = "Click to get/remove this role", value = "886539267998896128"),
nextcord.SelectOption(label = "Singing", description = "Click to get/remove this role", value = "886539873631211520"),
]
super().__init__(placeholder = 'Interest Roles...', min_values = 1, max_values = 1, options = options, custom_id = "InterestRoleMenu2000", row = 1)
async def callback(self, interaction: nextcord.Interaction):
try:
Guild = self.bot.get_guild(Options['Guild']['ID'])
Role = Guild.get_role(int(self.values[0]))
if Role in interaction.user.roles:
await interaction.user.remove_roles(Role)
else:
await interaction.user.add_roles(Role)
await interaction.response.edit_message(embed = interaction.message.embeds[0])
except: pass
# Mention roles dropdown
class PingMenu(Select):
def __init__(self, bot:commands.Bot):
self.bot = bot
options = [
nextcord.SelectOption(label = "Announcement", description = "Click to get/remove this role", value = "886540581004795904"),
nextcord.SelectOption(label = "Event", description = "Click to get/remove this role", value = "886540636168282132"),
nextcord.SelectOption(label = "Partner", description = "Click to get/remove this role", value = "886540681663873065"),
nextcord.SelectOption(label = "Chat Revive", description = "Click to get/remove this role", value = "886540760583901185")
]
super().__init__(placeholder = 'Mention Roles...', min_values = 1, max_values = 1, options = options, custom_id = "PingRoleMenu2000", row = 0)
async def callback(self, interaction: nextcord.Interaction):
try:
Guild = self.bot.get_guild(Options['Guild']['ID'])
Role = Guild.get_role(int(self.values[0]))
if Role in interaction.user.roles:
await interaction.user.remove_roles(Role)
else:
await interaction.user.add_roles(Role)
await interaction.response.edit_message(embed = interaction.message.embeds[0])
except: pass
# Button array
class RoleView(View):
def __init__(self, bot:commands.Bot):
super().__init__(timeout = None)
self.response = None
self.bot = bot
# Add all the views
self.add_item(AgeMenu(bot))
self.add_item(SexMenu(bot))
self.add_item(InterestMenu(bot))
self.add_item(PingMenu(bot))
|
[
"json.load",
"nextcord.SelectOption"
] |
[((237, 258), 'json.load', 'json.load', (['RawOptions'], {}), '(RawOptions)\n', (246, 258), False, 'import nextcord, json\n'), ((584, 697), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""- 13"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886537316418609172"""'}), "(label='- 13', description=\n 'Click to get/remove this role', value='886537316418609172')\n", (605, 697), False, 'import nextcord, json\n'), ((712, 825), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""+ 13"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886537379450589215"""'}), "(label='+ 13', description=\n 'Click to get/remove this role', value='886537379450589215')\n", (733, 825), False, 'import nextcord, json\n'), ((840, 953), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""+ 16"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886537464452366376"""'}), "(label='+ 16', description=\n 'Click to get/remove this role', value='886537464452366376')\n", (861, 953), False, 'import nextcord, json\n'), ((968, 1081), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""+ 18"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886537714206392320"""'}), "(label='+ 18', description=\n 'Click to get/remove this role', value='886537714206392320')\n", (989, 1081), False, 'import nextcord, json\n'), ((1878, 1991), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Male"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886537847258112071"""'}), "(label='Male', description=\n 'Click to get/remove this role', value='886537847258112071')\n", (1899, 1991), False, 'import nextcord, json\n'), ((2006, 2121), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Female"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886537907412815912"""'}), "(label='Female', description=\n 'Click to get/remove this role', value='886537907412815912')\n", (2027, 2121), False, 'import nextcord, json\n'), ((2925, 3038), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Arts"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886538932018348032"""'}), "(label='Arts', description=\n 'Click to get/remove this role', value='886538932018348032')\n", (2946, 3038), False, 'import nextcord, json\n'), ((3053, 3168), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Sports"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886538985852248094"""'}), "(label='Sports', description=\n 'Click to get/remove this role', value='886538985852248094')\n", (3074, 3168), False, 'import nextcord, json\n'), ((3183, 3297), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Music"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886539050062864404"""'}), "(label='Music', description=\n 'Click to get/remove this role', value='886539050062864404')\n", (3204, 3297), False, 'import nextcord, json\n'), ((3312, 3428), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Reading"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886539142740209714"""'}), "(label='Reading', description=\n 'Click to get/remove this role', value='886539142740209714')\n", (3333, 3428), False, 'import nextcord, json\n'), ((3443, 3559), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Cooking"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886539267998896128"""'}), "(label='Cooking', description=\n 'Click to get/remove this role', value='886539267998896128')\n", (3464, 3559), False, 'import nextcord, json\n'), ((3574, 3690), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Singing"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886539873631211520"""'}), "(label='Singing', description=\n 'Click to get/remove this role', value='886539873631211520')\n", (3595, 3690), False, 'import nextcord, json\n'), ((4499, 4620), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Announcement"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886540581004795904"""'}), "(label='Announcement', description=\n 'Click to get/remove this role', value='886540581004795904')\n", (4520, 4620), False, 'import nextcord, json\n'), ((4635, 4749), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Event"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886540636168282132"""'}), "(label='Event', description=\n 'Click to get/remove this role', value='886540636168282132')\n", (4656, 4749), False, 'import nextcord, json\n'), ((4764, 4880), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Partner"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886540681663873065"""'}), "(label='Partner', description=\n 'Click to get/remove this role', value='886540681663873065')\n", (4785, 4880), False, 'import nextcord, json\n'), ((4895, 5015), 'nextcord.SelectOption', 'nextcord.SelectOption', ([], {'label': '"""Chat Revive"""', 'description': '"""Click to get/remove this role"""', 'value': '"""886540760583901185"""'}), "(label='Chat Revive', description=\n 'Click to get/remove this role', value='886540760583901185')\n", (4916, 5015), False, 'import nextcord, json\n')]
|
from django.shortcuts import render, get_object_or_404, get_list_or_404
from .models import Article,Comment
from django.core.paginator import Paginator,EmptyPage,PageNotAnInteger
from .forms import ShareEmailForm,CommentForm,SearchForm,ArticleForm
# from django.core.mail import send_mail
from django.db.models import Count
from django.contrib.auth.decorators import login_required
from taggit.models import Tag
# for more aggregation function, read /topics/db/aggregation/
# Create your views here.
# tag_slug comes with the URL of this request
@login_required
def article_list(request, tag_slug=None):
all_articles=Article.objects.all()
tag=None
if tag_slug:
tag=get_object_or_404(Tag,slug=tag_slug)
all_articles=all_articles.filter(tags__in=[tag])
# each page only display 6 posts
paginator=Paginator(all_articles,3)
page=request.GET.get('page')
try:
one_page_articles=paginator.page(page)
except PageNotAnInteger:
one_page_articles=paginator.page(1)
except EmptyPage:
#retrieve the last page content if page number beyond range
one_page_articles=paginator.page(paginator.num_pages)
new_article = None
if request.method == 'POST':
article_form = ArticleForm(data=request.POST)
if article_form.is_valid():
# comment_form.save can create a comment object,but donot save to database immediatley
new_article = article_form.save(commit=False)
new_article.author = request.user
cd = article_form.cleaned_data
from django.utils import timezone
from django.contrib import messages
if not Article.objects.filter(publish_time=timezone.now()).filter(label_in_url=cd.get('label_in_url')).exists():
new_article.save()
for each_tag in cd.get('tags'):
new_article.tags.add(each_tag)
messages.success(request, 'profile and user information updated successfully')
from django.http.response import HttpResponseRedirect
from django.urls import reverse
return HttpResponseRedirect(reverse('article:article_list'))
else:
messages.error(request, 'updated failed, may because duplicate slug today')
# if this view is called by GET method, then render a brand new form
else:
article_form = ArticleForm()
return render(request,
'article/articles/article_list.html',
{'articles':one_page_articles,
'tag':tag,
'article_form':article_form})
@login_required
def article_detail(request,year,month,day,label_in_url):
# query the Article table using filter as below
article=get_list_or_404(Article,label_in_url=label_in_url,
publish_time__year=year,
publish_time__month=month,
publish_time__day=day,
)[0]
# list active comments
comments=article.article_comments.all()
# each page only display 6 posts
paginator = Paginator(comments, 6)
page = request.GET.get('page')
try:
one_page_comments = paginator.page(page)
except PageNotAnInteger:
one_page_comments = paginator.page(1)
except EmptyPage:
# retrieve the last page content if page number beyond range
one_page_comments = paginator.page(paginator.num_pages)
new_comment=None
if request.method=='POST':
comment_form=CommentForm(data=request.POST)
if comment_form.is_valid():
# comment_form.save can create a comment object,but donot save to database immediatley
new_comment=comment_form.save(commit=False)
new_comment.article=article
new_comment.user=request.user
new_comment.save()
# prevent submitting same forms again when refresh page
from django.http.response import HttpResponseRedirect
from django.urls import reverse
return HttpResponseRedirect(reverse('article:article_detail',
args=[
article.publish_time.year,
article.publish_time.month,
article.publish_time.day,
article.label_in_url
]))
# if this view is called by GET method, then render a brand new form
else:
comment_form=CommentForm()
# flat=True, let tuple returned by values_list() to a python list
article_tags_list=article.tags.values_list('id',flat=True)
similar_articles=Article.published_set.filter(tags__in=article_tags_list).exclude(id=article.id)
# use Count() to generate a new filed to those retrieved articles, named same_tags, then
# order those articles by this new attribute - same_tags
similar_articles=similar_articles.annotate(same_tags=Count('tags')).order_by('-same_tags','-publish_time')[:3]
# use the object returned by above filter to render detail.html
return render(request,'article/articles/article_detail.html',
{'article':article,
'comments':one_page_comments,
'new_comment':new_comment,
'comment_form':comment_form,
'similar_articles':similar_articles,})
# @login_required
# def share_article(request,article_id):
# # retrieve artivle by its id
# article=get_object_or_404(Article,id=article_id)
# sent=False
# error=''
# sender_address='<EMAIL>'
#
# if request.method=='POST':
# # submitted data by user is stored in request.Post
# form=ShareEmailForm(request.POST)
# if form.is_valid():
# try:
# # .cleaned_data returns a dict containing only
# # valid form field data
# data_from_form=form.cleaned_data
# # use .build_absolute_uri to build a complete URL including
# # HTTP shcema and hostname with post url
# article_url=request.build_absolute_uri(
# article.get_absolute_url()
# )
# subject="user {} whose email is {} recommends this article {}".format(data_from_form['name'],data_from_form['email'],article.title)
# message="read {} at {} \n\n {}'s email_content:{}".format(article.title,article_url,data_from_form['name'],data_from_form['email_content'])
# # here i must 给自己抄送y一份, otherwise, will fail to send
# send_mail(subject,message,sender_address,[sender_address,data_from_form['to']])
# sent=True
# except Exception:
# form=ShareEmailForm()
# error='somthing wrong,failed to send email,sorry'
# else:
# form=ShareEmailForm()
#
# return render(request,'article/articles/share.html',
# {'article':article,
# 'form':form,
# 'sent':sent,
# 'error':error})
|
[
"django.shortcuts.get_list_or_404",
"django.utils.timezone.now",
"django.contrib.messages.error",
"django.urls.reverse",
"django.shortcuts.get_object_or_404",
"django.core.paginator.Paginator",
"django.shortcuts.render",
"django.contrib.messages.success",
"django.db.models.Count"
] |
[((837, 863), 'django.core.paginator.Paginator', 'Paginator', (['all_articles', '(3)'], {}), '(all_articles, 3)\n', (846, 863), False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((2457, 2589), 'django.shortcuts.render', 'render', (['request', '"""article/articles/article_list.html"""', "{'articles': one_page_articles, 'tag': tag, 'article_form': article_form}"], {}), "(request, 'article/articles/article_list.html', {'articles':\n one_page_articles, 'tag': tag, 'article_form': article_form})\n", (2463, 2589), False, 'from django.shortcuts import render, get_object_or_404, get_list_or_404\n'), ((3160, 3182), 'django.core.paginator.Paginator', 'Paginator', (['comments', '(6)'], {}), '(comments, 6)\n', (3169, 3182), False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((5148, 5360), 'django.shortcuts.render', 'render', (['request', '"""article/articles/article_detail.html"""', "{'article': article, 'comments': one_page_comments, 'new_comment':\n new_comment, 'comment_form': comment_form, 'similar_articles':\n similar_articles}"], {}), "(request, 'article/articles/article_detail.html', {'article': article,\n 'comments': one_page_comments, 'new_comment': new_comment,\n 'comment_form': comment_form, 'similar_articles': similar_articles})\n", (5154, 5360), False, 'from django.shortcuts import render, get_object_or_404, get_list_or_404\n'), ((691, 728), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Tag'], {'slug': 'tag_slug'}), '(Tag, slug=tag_slug)\n', (708, 728), False, 'from django.shortcuts import render, get_object_or_404, get_list_or_404\n'), ((2795, 2925), 'django.shortcuts.get_list_or_404', 'get_list_or_404', (['Article'], {'label_in_url': 'label_in_url', 'publish_time__year': 'year', 'publish_time__month': 'month', 'publish_time__day': 'day'}), '(Article, label_in_url=label_in_url, publish_time__year=year,\n publish_time__month=month, publish_time__day=day)\n', (2810, 2925), False, 'from django.shortcuts import render, get_object_or_404, get_list_or_404\n'), ((1940, 2018), 'django.contrib.messages.success', 'messages.success', (['request', '"""profile and user information updated successfully"""'], {}), "(request, 'profile and user information updated successfully')\n", (1956, 2018), False, 'from django.contrib import messages\n'), ((2248, 2323), 'django.contrib.messages.error', 'messages.error', (['request', '"""updated failed, may because duplicate slug today"""'], {}), "(request, 'updated failed, may because duplicate slug today')\n", (2262, 2323), False, 'from django.contrib import messages\n'), ((4135, 4283), 'django.urls.reverse', 'reverse', (['"""article:article_detail"""'], {'args': '[article.publish_time.year, article.publish_time.month, article.\n publish_time.day, article.label_in_url]'}), "('article:article_detail', args=[article.publish_time.year, article.\n publish_time.month, article.publish_time.day, article.label_in_url])\n", (4142, 4283), False, 'from django.urls import reverse\n'), ((2181, 2212), 'django.urls.reverse', 'reverse', (['"""article:article_list"""'], {}), "('article:article_list')\n", (2188, 2212), False, 'from django.urls import reverse\n'), ((5009, 5022), 'django.db.models.Count', 'Count', (['"""tags"""'], {}), "('tags')\n", (5014, 5022), False, 'from django.db.models import Count\n'), ((1720, 1734), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1732, 1734), False, 'from django.utils import timezone\n')]
|
from django.contrib.auth.backends import ModelBackend
from 你个人的.models import UserProfile
from social_auth.models import UserSocialAuth
class OAuth2Backend(ModelBackend):
'''
oauth backend
'''
def authenticate(self, provider=None, uid=None):
try:
user_social = UserSocialAuth.objects.get(provider=provider, uid=uid)
return user_social.user
except UserSocialAuth.DoesNotExist:
return None
|
[
"social_auth.models.UserSocialAuth.objects.get"
] |
[((299, 353), 'social_auth.models.UserSocialAuth.objects.get', 'UserSocialAuth.objects.get', ([], {'provider': 'provider', 'uid': 'uid'}), '(provider=provider, uid=uid)\n', (325, 353), False, 'from social_auth.models import UserSocialAuth\n')]
|
"""Generic way to build regex from examples."""
import logging
import regex as re
from typing import List, Dict
import pandas
from tabulate import tabulate
logger = logging.getLogger(__name__)
def merge_regex(regex_tokens: List[str]):
"""Merge a list of regex to one group."""
tokens = r'|'.join(sorted(regex_tokens, key=len, reverse=True))
return f'(?:{tokens})'
def harmonize_whitespaces(text):
"""Convert multiple whitespaces to one."""
single_whitespace_replaced = re.sub(r'(?<! ) (?! )', r'[ ]+', text)
suggestion = re.sub(r' {2,}', r'[ ]{2,}', single_whitespace_replaced)
return suggestion
def escape(string: str):
"""Escape a string, so that it can still be used to create a regex."""
escaped_original = (
string.replace('\\', "\\\\")
.replace('[', r'\[')
.replace(']', r'\]')
.replace('+', r'[\+]')
.replace('*', r'\*')
.replace('|', r'\|')
.replace('\n', '\n')
.replace('-', '[-]')
.replace('.', r'\.')
.replace('$', r'\$')
.replace('(', r'\(')
.replace(')', r'\)')
.replace('@', r'\@')
.replace('?', r'\?')
.replace('!', r'\!')
.replace(',', r'\,')
.replace('#', r'\#')
.replace('{', r'\{')
.replace('}', r'\}')
)
return escaped_original
def plausible_regex(suggestion, string):
"""
Test regex for plausibility.
We keep those tests in production to collect edge cases and always return true.
"""
try:
re.compile(suggestion)
plausibility_run = re.findall(suggestion, string)
if not plausibility_run:
logger.error(
f'Using "{repr(string)}" we found the regex {repr(suggestion)}, which does not match the input.'
)
logger.error(
'We are not able to able to convert your string to a valid regex. Please help to make it happen.'
)
result = ''
else:
result = suggestion
except re.error as e:
logger.exception(f'The proposed regex >>{repr(suggestion)}<< is not a valid regex of string: >>{string}<<')
logger.error('We are not able to able to convert your string to a valid regex. Please help to make it happen.')
logger.error(e)
result = ''
return result
def suggest_regex_for_string(string: str, replace_characters: bool = False, replace_numbers: bool = True):
"""Suggest regex for a given string."""
escaped_original = escape(string)
if replace_characters:
# strict replace capital letters
strict_escaped_capital_letters = re.sub(r'[A-Z\Ä\Ö\Ü]', r'[A-ZÄÖÜ]', escaped_original)
# combine multiple capital letters in sequence
combined_capital_letters = re.sub(r'(\[A-Z\Ä\Ö\Ü\]){2,}', r'[A-ZÄÖÜ]+', strict_escaped_capital_letters)
# escape all lower case letters
escaped_small_letters = re.sub(r'[a-zäöüß]', r'[a-zäöüß]', combined_capital_letters)
# combine multiple lower case letters in sequence
escaped_original = re.sub(r'(\[a-zäöüß\]){2,}', '[a-zäöüß]+', escaped_small_letters)
if replace_numbers:
escaped_original = re.sub('\\d', r'\\d', escaped_original)
# replace multiple whitespaces with r' +'
suggestion = harmonize_whitespaces(escaped_original)
suggestion = plausible_regex(suggestion, string)
return suggestion
def get_best_regex(evaluations: List, log_stats: bool = True, allow_zero_f1score=False) -> List:
"""Optimize selection of one regex in scenarios were we are unsure if all correct Annotations are Labeled."""
df = pandas.DataFrame(evaluations)
if df.empty:
logger.error('We cannot find any regex!')
return []
if not allow_zero_f1score:
df = df.loc[df['f1_score'] > 0]
df = df.sort_values(
[
'total_correct_findings',
'f1_score',
'regex_quality',
'annotation_precision',
'runtime', # take the fastest regex
],
ascending=[0, 0, 0, 0, 1],
).reset_index(drop=True)
df['correct_findings_id'] = df['correct_findings'].apply(lambda x: set(y.id_ for y in x))
df['all_matches_id'] = [set.union(*df.loc[0:i, 'correct_findings_id']) for i in range(len(df.index))]
df['new_matches_id'] = df.all_matches_id - df.all_matches_id.shift(1)
null_mask = df['new_matches_id'].isnull()
df.loc[null_mask, 'new_matches_id'] = df.loc[null_mask]['correct_findings_id']
df.insert(0, 'new_matches_count', df['new_matches_id'].str.len())
df = df.drop(['correct_findings_id', 'correct_findings', 'all_matches_id', 'new_matches_id'], axis=1)
# iterate over sorted df, mark any row if it adds no matching value compared to regex above, we used max windowsize
# matched_document = df.filter(regex=r'document_\d+').rolling(min_periods=1, window=100000000).max()
# any regex witch matches more Documents that the regex before, is a good regex
# relevant_regex = matched_document.sum(axis=1).diff()
# df['matched_annotations_total'] = matched_document.sum(axis=1)
# df['matched_annotations_additional'] = relevant_regex
# get the index of all good regex
index_of_regex = df[df['new_matches_count'] > 0].index
if log_stats:
stats = df.loc[index_of_regex][
['regex', 'runtime', 'annotation_recall', 'annotation_precision', 'f1_score', 'new_matches_count']
]
logger.info(f'\n\n{tabulate(stats, floatfmt=".4f", headers="keys", tablefmt="pipe")}\n')
# best_regex = df.loc[index_of_regex, 'regex'].to_list()
best_regex = df.loc[df['new_matches_count'] > 0, 'regex'].to_list()
return best_regex
def regex_matches(
doctext: str, regex: str, start_chr: int = 0, flags=0, overlapped=False, keep_full_match=True, filtered_group=None
) -> List[Dict]:
"""
Convert a text with the help by one regex to text offsets.
A result of results is a full regex match, matches or (named) groups are separated by keys within this result. The
function regexinfo in konfuzio.wrapper standardizes the information we keep per match.
:param filtered_group: Name of the regex group you want to return as results
:param keep_full_match: Keep the information about the full regex even the regex contains groups
:param overlapped: Allow regex to overlap, e.g. ' ([^ ]*) ' creates an overlap on ' my name '
:param flags: Regex flag to compile regex
:param doctext: A text you want to apply a rgx on
:param regex: The regex, either with groups, named groups or just a regex
:param start_chr: The start chr of the annotation_set, in case the text is a annotation_set within a text
"""
results = []
# compile regex pattern
# will throw an error if the name of the group, ?P<GROUP_NAME>, is not a valid Python variable name,
# e.g. GROUP_NAME starts with a numeric character.
# we catch this error and then add a leading underscore to the group name, making it a valid Python variable name
try:
pattern = re.compile(regex, flags=flags)
except re.error:
# throws error if group name is an invalid Python variable
match = re.search(r'\?P<.*?>', regex) # match the invalid group name
group_name = match.group(0) # get the string representation
group_name = group_name.replace('?P<', '?P<_') # add a leading underscore
regex = re.sub(r'\?P<.*?>', group_name, regex) # replace invalid group name with new one
pattern = re.compile(regex, flags=flags) # try the compile again
for match in pattern.finditer(doctext, overlapped=overlapped):
# hold results per match
_results = []
if match.groups():
# parse named groups, if available
for group_name, group_index in match.re.groupindex.items():
if match[group_index] is not None:
# if one regex group ( a annotation's token) does not match, it returns none
# https://stackoverflow.com/a/59120080
_results.append(
{
'regex_used': repr(regex),
'regex_group': group_name,
'value': match[group_index],
'start_offset': match.regs[group_index][0],
'end_offset': match.regs[group_index][1],
'start_text': start_chr,
}
)
# find unnamed groups if available
unnamed_groups = [x for x in range(1, match.re.groups + 1) if x not in match.re.groupindex.values()]
for group_index in unnamed_groups:
_results.append(
{
'regex_used': repr(regex),
'regex_group': str(group_index),
'value': match[group_index],
'start_offset': match.regs[group_index][0],
'end_offset': match.regs[group_index][1],
'start_text': start_chr,
}
)
if match.groups() and keep_full_match or not match.groups():
_results.append(
{
'regex_used': repr(regex),
'regex_group': '0',
'value': match.group(),
'start_offset': match.span()[0],
'end_offset': match.span()[1],
'start_text': start_chr,
}
)
# if bbox:
# # update each element in _results with bbox
# for res in _results:
# res['bounding_box'] = get_bbox(
# bbox, res['start_offset'] + res['start_text'], res['end_offset'] + res['start_text']
# )
# add results per match to all results
results.extend(_results)
if filtered_group:
# allow to use similar group names, you can use "Ort_" if the group name is "Ort_255_259"
return [result for result in results if filtered_group in result['regex_group']]
else:
return results
def generic_candidate_function(regex, flags=0, overlapped=False, filtered_group=None):
"""Regex approach tob build a candidate function by one regex.
:param filtered_group: If a regex contains multiple named groups, you can filter the respective group by name
:param overlapped: Indicate if regex matches can overlapp.
:param regex: Regex to create a candidate_function.
:param flags: Regex flag which should be considered.
:return: An initialized candidate function.
"""
# function to build candidates
def candidate_function(doctext):
"""
Split the text in candidates and other text chunks.
:param doctext: Text of the candidate
:return: Tuple of list of candidates and other text chunks
"""
annotations = regex_matches(
doctext=doctext,
regex=regex,
flags=flags,
overlapped=overlapped,
keep_full_match=False,
filtered_group=filtered_group,
)
# reduce the available information to value, start_offset and end_offset:
# Due to historical aim of the candidate function to only find regex matches
matches_tuples = [(d['value'], (d['start_offset'], d['end_offset'])) for d in annotations]
candidates = [x for x, y in matches_tuples]
candidates_spans = [y for x, y in matches_tuples]
# Calculate other text bases on spans.
other_text = []
previous = 0
for span in candidates_spans:
other_text.append(doctext[previous : span[0]])
previous = span[1]
other_text.append(doctext[previous:])
return candidates, other_text, candidates_spans
candidate_function.__name__ = f"regex_{regex}"
return candidate_function
|
[
"pandas.DataFrame",
"regex.findall",
"regex.compile",
"regex.search",
"regex.sub",
"tabulate.tabulate",
"logging.getLogger"
] |
[((167, 194), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (184, 194), False, 'import logging\n'), ((495, 531), 'regex.sub', 're.sub', (['"""(?<! ) (?! )"""', '"""[ ]+"""', 'text'], {}), "('(?<! ) (?! )', '[ ]+', text)\n", (501, 531), True, 'import regex as re\n'), ((551, 605), 'regex.sub', 're.sub', (['""" {2,}"""', '"""[ ]{2,}"""', 'single_whitespace_replaced'], {}), "(' {2,}', '[ ]{2,}', single_whitespace_replaced)\n", (557, 605), True, 'import regex as re\n'), ((3663, 3692), 'pandas.DataFrame', 'pandas.DataFrame', (['evaluations'], {}), '(evaluations)\n', (3679, 3692), False, 'import pandas\n'), ((1546, 1568), 'regex.compile', 're.compile', (['suggestion'], {}), '(suggestion)\n', (1556, 1568), True, 'import regex as re\n'), ((1596, 1626), 'regex.findall', 're.findall', (['suggestion', 'string'], {}), '(suggestion, string)\n', (1606, 1626), True, 'import regex as re\n'), ((2664, 2718), 'regex.sub', 're.sub', (['"""[A-Z\\\\Ä\\\\Ö\\\\Ü]"""', '"""[A-ZÄÖÜ]"""', 'escaped_original'], {}), "('[A-Z\\\\Ä\\\\Ö\\\\Ü]', '[A-ZÄÖÜ]', escaped_original)\n", (2670, 2718), True, 'import regex as re\n'), ((2808, 2887), 'regex.sub', 're.sub', (['"""(\\\\[A-Z\\\\Ä\\\\Ö\\\\Ü\\\\]){2,}"""', '"""[A-ZÄÖÜ]+"""', 'strict_escaped_capital_letters'], {}), "('(\\\\[A-Z\\\\Ä\\\\Ö\\\\Ü\\\\]){2,}', '[A-ZÄÖÜ]+', strict_escaped_capital_letters)\n", (2814, 2887), True, 'import regex as re\n'), ((2957, 3015), 'regex.sub', 're.sub', (['"""[a-zäöüß]"""', '"""[a-zäöüß]"""', 'combined_capital_letters'], {}), "('[a-zäöüß]', '[a-zäöüß]', combined_capital_letters)\n", (2963, 3015), True, 'import regex as re\n'), ((3103, 3169), 'regex.sub', 're.sub', (['"""(\\\\[a-zäöüß\\\\]){2,}"""', '"""[a-zäöüß]+"""', 'escaped_small_letters'], {}), "('(\\\\[a-zäöüß\\\\]){2,}', '[a-zäöüß]+', escaped_small_letters)\n", (3109, 3169), True, 'import regex as re\n'), ((3221, 3261), 'regex.sub', 're.sub', (['"""\\\\d"""', '"""\\\\\\\\d"""', 'escaped_original'], {}), "('\\\\d', '\\\\\\\\d', escaped_original)\n", (3227, 3261), True, 'import regex as re\n'), ((7113, 7143), 'regex.compile', 're.compile', (['regex'], {'flags': 'flags'}), '(regex, flags=flags)\n', (7123, 7143), True, 'import regex as re\n'), ((7248, 7277), 'regex.search', 're.search', (['"""\\\\?P<.*?>"""', 'regex'], {}), "('\\\\?P<.*?>', regex)\n", (7257, 7277), True, 'import regex as re\n'), ((7478, 7516), 'regex.sub', 're.sub', (['"""\\\\?P<.*?>"""', 'group_name', 'regex'], {}), "('\\\\?P<.*?>', group_name, regex)\n", (7484, 7516), True, 'import regex as re\n'), ((7578, 7608), 'regex.compile', 're.compile', (['regex'], {'flags': 'flags'}), '(regex, flags=flags)\n', (7588, 7608), True, 'import regex as re\n'), ((5519, 5583), 'tabulate.tabulate', 'tabulate', (['stats'], {'floatfmt': '""".4f"""', 'headers': '"""keys"""', 'tablefmt': '"""pipe"""'}), "(stats, floatfmt='.4f', headers='keys', tablefmt='pipe')\n", (5527, 5583), False, 'from tabulate import tabulate\n')]
|
from kurbopy import Point, CubicBez
import math
def test_cubicbez_deriv():
c = CubicBez(
Point(0.0, 0.0),
Point(1.0 / 3.0, 0.0),
Point(2.0 / 3.0, 1.0 / 3.0),
Point(1.0, 1.0),
);
deriv = c.deriv();
n = 10;
for i in range(1, n):
t = 1/(i*n)
delta = 1e-6
p = c.eval(t)
p1 = c.eval(t + delta)
d_approx = (p1.to_vec2() - p.to_vec2()) * (1/delta)
d = deriv.eval(t).to_vec2()
assert (d - d_approx).hypot() < delta * 2.0
def test_cubicbez_arclen():
# y = x^2
c = CubicBez(
Point(0.0, 0.0),
Point(1.0 / 3.0, 0.0),
Point(2.0 / 3.0, 1.0 / 3.0),
Point(1.0, 1.0),
);
true_arclen = 0.5 * math.sqrt(5.0) + 0.25 * math.log(2.0 + math.sqrt(5.0))
for i in range(0, 12):
accuracy = 0.1 ** i
error = c.arclen(accuracy) - true_arclen
assert abs(error) < accuracy
# def test_cubicbez_inv_arclen():
# // y = x^2 / 100
# c = CubicBez(
# Point(0.0, 0.0),
# Point(100.0 / 3.0, 0.0),
# Point(200.0 / 3.0, 100.0 / 3.0),
# Point(100.0, 100.0),
# );
# true_arclen = 100.0 * (0.5 * 5.0f64.sqrt() + 0.25 * (2.0 + 5.0f64.sqrt()).ln());
# for i in 0..12 {
# accuracy = 0.1f64.powi(i);
# n = 10;
# for j in 0..=n {
# arc = (j as f64) * ((n as f64).recip() * true_arclen);
# t = c.inv_arclen(arc, accuracy * 0.5);
# actual_arc = c.subsegment(0.0..t).arclen(accuracy * 0.5);
# assert!(
# (arc - actual_arc).abs() < accuracy,
# "at accuracy {:e, wanted { got {",
# accuracy,
# actual_arc,
# arc
# );
# // corner case: user passes accuracy larger than total arc length
# accuracy = true_arclen * 1.1;
# arc = true_arclen * 0.5;
# t = c.inv_arclen(arc, accuracy);
# actual_arc = c.subsegment(0.0..t).arclen(accuracy);
# assert!(
# (arc - actual_arc).abs() < 2.0 * accuracy,
# "at accuracy {:e, want { got {",
# accuracy,
# actual_arc,
# arc
# );
# def test_cubicbez_signed_area_linear():
# #
# c = CubicBez::new(
# (1.0, 0.0),
# (2.0 / 3.0, 1.0 / 3.0),
# (1.0 / 3.0, 2.0 / 3.0),
# (0.0, 1.0),
# );
# epsilon = 1e-12;
# assert_eq!((Affine::rotate(0.5) * c).signed_area(), 0.5);
# assert!(((Affine::rotate(0.5) * c).signed_area() - 0.5).abs() < epsilon);
# assert!(((Affine::translate((0.0, 1.0)) * c).signed_area() - 1.0).abs() < epsilon);
# assert!(((Affine::translate((1.0, 0.0)) * c).signed_area() - 1.0).abs() < epsilon);
# def test_cubicbez_signed_area():
# // y = 1 - x^3
# c = CubicBez::new((1.0, 0.0), (2.0 / 3.0, 1.0), (1.0 / 3.0, 1.0), (0.0, 1.0));
# epsilon = 1e-12;
# assert!((c.signed_area() - 0.75).abs() < epsilon);
# assert!(((Affine::rotate(0.5) * c).signed_area() - 0.75).abs() < epsilon);
# assert!(((Affine::translate((0.0, 1.0)) * c).signed_area() - 1.25).abs() < epsilon);
# assert!(((Affine::translate((1.0, 0.0)) * c).signed_area() - 1.25).abs() < epsilon);
# def test_cubicbez_nearest():
# fn verify(result: Nearest, expected: f64) {
# assert!(
# (result.t - expected).abs() < 1e-6,
# "got {:? expected {",
# result,
# expected
# );
# // y = x^3
# c = CubicBez::new((0.0, 0.0), (1.0 / 3.0, 0.0), (2.0 / 3.0, 0.0), (1.0, 1.0));
# verify(c.nearest((0.1, 0.001).into(), 1e-6), 0.1);
# verify(c.nearest((0.2, 0.008).into(), 1e-6), 0.2);
# verify(c.nearest((0.3, 0.027).into(), 1e-6), 0.3);
# verify(c.nearest((0.4, 0.064).into(), 1e-6), 0.4);
# verify(c.nearest((0.5, 0.125).into(), 1e-6), 0.5);
# verify(c.nearest((0.6, 0.216).into(), 1e-6), 0.6);
# verify(c.nearest((0.7, 0.343).into(), 1e-6), 0.7);
# verify(c.nearest((0.8, 0.512).into(), 1e-6), 0.8);
# verify(c.nearest((0.9, 0.729).into(), 1e-6), 0.9);
# verify(c.nearest((1.0, 1.0).into(), 1e-6), 1.0);
# verify(c.nearest((1.1, 1.1).into(), 1e-6), 1.0);
# verify(c.nearest((-0.1, 0.0).into(), 1e-6), 0.0);
# a = Affine::rotate(0.5);
# verify((a * c).nearest(a * Point::new(0.1, 0.001), 1e-6), 0.1);
# // ensure to_quads returns something given colinear points
# def test_degenerate_to_quads():
# c = CubicBez::new((0., 9.), (6., 6.), (12., 3.0), (18., 0.0));
# quads = c.to_quads(1e-6).collect::<Vec<_>>();
# assert_eq!(quads.len(), 1, "{:?", &quads);
def test_cubicbez_extrema():
q = CubicBez(Point(0.0, 0.0), Point(0.0, 1.0), Point(1.0, 1.0), Point(1.0, 0.0));
extrema = q.extrema()
assert len(extrema) == 1
assert abs(extrema[0] - 0.5) < 1e-6
q = CubicBez(Point(0.4, 0.5), Point(0.0, 1.0), Point(1.0, 0.0), Point(0.5, 0.4));
extrema = q.extrema();
assert len(extrema) == 4
# def test_cubicbez_toquads():
# // y = x^3
# c = CubicBez::new((0.0, 0.0), (1.0 / 3.0, 0.0), (2.0 / 3.0, 0.0), (1.0, 1.0));
# for i in 0..10 {
# accuracy = 0.1f64.powi(i);
# mut worst: f64 = 0.0;
# for (_count, (t0, t1, q)) in c.to_quads(accuracy).enumerate() {
# epsilon = 1e-12;
# assert!((q.start() - c.eval(t0)).hypot() < epsilon);
# assert!((q.end() - c.eval(t1)).hypot() < epsilon);
# n = 4;
# for j in 0..=n {
# t = (j as f64) * (n as f64).recip();
# p = q.eval(t);
# err = (p.y - p.x.powi(3)).abs();
# worst = worst.max(err);
# assert!(err < accuracy, "got { wanted {", err, accuracy);
|
[
"math.sqrt",
"kurbopy.Point"
] |
[((102, 117), 'kurbopy.Point', 'Point', (['(0.0)', '(0.0)'], {}), '(0.0, 0.0)\n', (107, 117), False, 'from kurbopy import Point, CubicBez\n'), ((127, 148), 'kurbopy.Point', 'Point', (['(1.0 / 3.0)', '(0.0)'], {}), '(1.0 / 3.0, 0.0)\n', (132, 148), False, 'from kurbopy import Point, CubicBez\n'), ((158, 185), 'kurbopy.Point', 'Point', (['(2.0 / 3.0)', '(1.0 / 3.0)'], {}), '(2.0 / 3.0, 1.0 / 3.0)\n', (163, 185), False, 'from kurbopy import Point, CubicBez\n'), ((195, 210), 'kurbopy.Point', 'Point', (['(1.0)', '(1.0)'], {}), '(1.0, 1.0)\n', (200, 210), False, 'from kurbopy import Point, CubicBez\n'), ((594, 609), 'kurbopy.Point', 'Point', (['(0.0)', '(0.0)'], {}), '(0.0, 0.0)\n', (599, 609), False, 'from kurbopy import Point, CubicBez\n'), ((619, 640), 'kurbopy.Point', 'Point', (['(1.0 / 3.0)', '(0.0)'], {}), '(1.0 / 3.0, 0.0)\n', (624, 640), False, 'from kurbopy import Point, CubicBez\n'), ((650, 677), 'kurbopy.Point', 'Point', (['(2.0 / 3.0)', '(1.0 / 3.0)'], {}), '(2.0 / 3.0, 1.0 / 3.0)\n', (655, 677), False, 'from kurbopy import Point, CubicBez\n'), ((687, 702), 'kurbopy.Point', 'Point', (['(1.0)', '(1.0)'], {}), '(1.0, 1.0)\n', (692, 702), False, 'from kurbopy import Point, CubicBez\n'), ((4680, 4695), 'kurbopy.Point', 'Point', (['(0.0)', '(0.0)'], {}), '(0.0, 0.0)\n', (4685, 4695), False, 'from kurbopy import Point, CubicBez\n'), ((4697, 4712), 'kurbopy.Point', 'Point', (['(0.0)', '(1.0)'], {}), '(0.0, 1.0)\n', (4702, 4712), False, 'from kurbopy import Point, CubicBez\n'), ((4714, 4729), 'kurbopy.Point', 'Point', (['(1.0)', '(1.0)'], {}), '(1.0, 1.0)\n', (4719, 4729), False, 'from kurbopy import Point, CubicBez\n'), ((4731, 4746), 'kurbopy.Point', 'Point', (['(1.0)', '(0.0)'], {}), '(1.0, 0.0)\n', (4736, 4746), False, 'from kurbopy import Point, CubicBez\n'), ((4862, 4877), 'kurbopy.Point', 'Point', (['(0.4)', '(0.5)'], {}), '(0.4, 0.5)\n', (4867, 4877), False, 'from kurbopy import Point, CubicBez\n'), ((4879, 4894), 'kurbopy.Point', 'Point', (['(0.0)', '(1.0)'], {}), '(0.0, 1.0)\n', (4884, 4894), False, 'from kurbopy import Point, CubicBez\n'), ((4896, 4911), 'kurbopy.Point', 'Point', (['(1.0)', '(0.0)'], {}), '(1.0, 0.0)\n', (4901, 4911), False, 'from kurbopy import Point, CubicBez\n'), ((4913, 4928), 'kurbopy.Point', 'Point', (['(0.5)', '(0.4)'], {}), '(0.5, 0.4)\n', (4918, 4928), False, 'from kurbopy import Point, CubicBez\n'), ((735, 749), 'math.sqrt', 'math.sqrt', (['(5.0)'], {}), '(5.0)\n', (744, 749), False, 'import math\n'), ((774, 788), 'math.sqrt', 'math.sqrt', (['(5.0)'], {}), '(5.0)\n', (783, 788), False, 'import math\n')]
|
"""
Main entrance to the application
"""
# Local application imports
from app import create_app
application = create_app()
if __name__ == '__main__':
application.run()
|
[
"app.create_app"
] |
[((113, 125), 'app.create_app', 'create_app', ([], {}), '()\n', (123, 125), False, 'from app import create_app\n')]
|
"""
Provides ready-made implementations for filters used in templates.
"""
from string import Template
import arrow
from dateutil import tz
from .constants import ThemeColor
__all__ = ('humanize', 'if_true', 'navbar_skin', 'sidebar_skin', 'replace_with_flag')
def humanize(dt, locale='en_us', time_zone=None):
"""The filter converts the date to human readable."""
dt = arrow.get(dt, tz.gettz(time_zone))
return dt.humanize(locale=locale, only_distance=True)
def if_true(value, replace_with=None):
"""Replaces the value with the passed if the value is true."""
if not value:
return ''
if replace_with is None:
return value
return Template(replace_with).safe_substitute(value=value)
def replace_with_flag(locale):
"""The filter replaces the locale with the CSS flag classes of the flag-icon-css library."""
locale = locale.replace('-', '_').lower().rsplit('_', maxsplit=1)
if len(locale) == 2:
return f'flag-icon flag-icon-{locale[-1]}'
return ''
def navbar_skin(color):
"""Returns a collection of classes to style the navigation bar."""
if color:
light = {ThemeColor.LIGHT, ThemeColor.WARNING, ThemeColor.WHITE, ThemeColor.ORANGE}
style = 'light' if color in light else f'dark'
return f'navbar-{style} navbar-{color}'
return ''
def sidebar_skin(color, light=False):
"""Returns a collection of classes to style the main sidebar bar."""
if color:
style = 'light' if light else f'dark'
return f'sidebar-{style}-{color}'
return ''
|
[
"dateutil.tz.gettz",
"string.Template"
] |
[((398, 417), 'dateutil.tz.gettz', 'tz.gettz', (['time_zone'], {}), '(time_zone)\n', (406, 417), False, 'from dateutil import tz\n'), ((684, 706), 'string.Template', 'Template', (['replace_with'], {}), '(replace_with)\n', (692, 706), False, 'from string import Template\n')]
|
"""Management of versions to help users update."""
import os
from configparser import ConfigParser
from datetime import datetime
from distutils.version import StrictVersion
import requests
from .utils.logging import print_warning # pylint: disable=cyclic-import
def get_version() -> str:
"""Get current installed version of the SDK."""
with open(
os.path.join(os.path.dirname(__file__), "VERSION"),
"r",
encoding="utf-8",
) as file_:
return file_.read().strip()
def version_check() -> None:
"""Check if current installed version of the SDK is up to date with latest pypi release."""
# Getting latest version on pypi
cache_file = os.path.join(os.path.expanduser("~"), ".redbrickai", "version")
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
cache_config = ConfigParser()
cache_config.read(cache_file)
update_cache = False
if (
"version" not in cache_config
or "current_version" not in cache_config["version"]
or cache_config["version"]["current_version"] != __version__
):
cache_config["version"] = {"current_version": __version__}
update_cache = True
current_timestamp = int(datetime.now().timestamp())
if (
"latest_version" not in cache_config["version"]
or "last_checked" not in cache_config["version"]
or current_timestamp - int(cache_config["version"]["last_checked"]) > 86400
):
url = "https://pypi.org/pypi/redbrick-sdk/json"
data = requests.get(url).json()
versions = list(data["releases"].keys())
versions.sort(key=StrictVersion)
latest_version = versions[-1]
# Comparing with current installed version
if __version__ != latest_version:
warn = (
"You are using version '{}' of the SDK. However, version '{}' is available!\n"
+ "Please update as soon as possible to get the latest features and bug fixes.\n"
+ "You can use 'python -m pip install --upgrade redbrick-sdk'"
+ " to get the latest version."
)
print_warning(warn.format(__version__, latest_version))
cache_config["version"]["latest_version"] = latest_version
cache_config["version"]["last_checked"] = str(current_timestamp)
update_cache = True
if update_cache:
with open(cache_file, "w", encoding="utf-8") as file_:
cache_config.write(file_)
__version__ = get_version()
version_check()
|
[
"os.path.dirname",
"datetime.datetime.now",
"requests.get",
"configparser.ConfigParser",
"os.path.expanduser"
] |
[((836, 850), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (848, 850), False, 'from configparser import ConfigParser\n'), ((705, 728), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (723, 728), False, 'import os\n'), ((772, 799), 'os.path.dirname', 'os.path.dirname', (['cache_file'], {}), '(cache_file)\n', (787, 799), False, 'import os\n'), ((381, 406), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (396, 406), False, 'import os\n'), ((1219, 1233), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1231, 1233), False, 'from datetime import datetime\n'), ((1532, 1549), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1544, 1549), False, 'import requests\n')]
|
import ray
from ray import serve
import requests
import os
import pickle
import numpy as np
import asyncio
# Models locations
RANDOM_FOREST_MODEL_PATH = os.path.join("wine-quality_random_forest.pkl")
XGBOOST_MODEL_PATH = os.path.join("wine-quality_xgboost.pkl")
GRBOOST_MODEL_PATH = os.path.join("wine-quality_grboost.pkl")
# Start Ray
ray.init()
# Start Serve
serve.start()
#define deployments
@serve.deployment(route_prefix="/randomforest")
class RandomForestModel:
def __init__(self, path):
with open(path, "rb") as f:
self.model = pickle.load(f)
async def __call__(self, request):
payload = await request.json()
return self.serve(payload)
def serve(self, request):
input_vector = [
request["fixed acidity"],
request["volatile acidity"],
request["citric acid"],
request["residual sugar"],
request["chlorides"],
request["free sulfur dioxide"],
request["total sulfur dioxide"],
request["density"],
request["pH"],
request["sulphates"],
request["alcohol"],
]
prediction = self.model.predict([input_vector])[0]
return {"result": str(prediction)}
@serve.deployment(route_prefix="/grboost")
class GRBoostModel:
def __init__(self, path):
with open(path, "rb") as f:
self.model = pickle.load(f)
async def __call__(self, request):
payload = await request.json()
return self.serve(payload)
def serve(self, request):
input_vector = np.array([
request["fixed acidity"],
request["volatile acidity"],
request["citric acid"],
request["residual sugar"],
request["chlorides"],
request["free sulfur dioxide"],
request["total sulfur dioxide"],
request["density"],
request["pH"],
request["sulphates"],
request["alcohol"],
])
prediction = self.model.predict(input_vector.reshape(1,11))[0]
return {"result": str(prediction)}
@serve.deployment(route_prefix="/xgboost")
class XGBoostModel:
def __init__(self, path):
with open(path, "rb") as f:
self.model = pickle.load(f)
async def __call__(self, request):
payload = await request.json()
return self.serve(payload)
def serve(self, request):
input_vector = np.array([
request["fixed acidity"],
request["volatile acidity"],
request["citric acid"],
request["residual sugar"],
request["chlorides"],
request["free sulfur dioxide"],
request["total sulfur dioxide"],
request["density"],
request["pH"],
request["sulphates"],
request["alcohol"],
])
prediction = self.model.predict(input_vector.reshape(1,11))[0]
return {"result": str(prediction)}
RandomForestModel.deploy(RANDOM_FOREST_MODEL_PATH)
XGBoostModel.deploy(XGBOOST_MODEL_PATH)
GRBoostModel.deploy(GRBOOST_MODEL_PATH)
@serve.deployment(route_prefix="/speculative")
class Speculative:
def __init__(self):
self.rfhandle = RandomForestModel.get_handle(sync=False)
self.xgboosthandle = XGBoostModel.get_handle(sync=False)
self.grboosthandle = GRBoostModel.get_handle(sync=False)
async def __call__(self, request):
payload = await request.json()
f1, f2, f3 = await asyncio.gather(self.rfhandle.serve.remote(payload),
self.xgboosthandle.serve.remote(payload), self.grboosthandle.serve.remote(payload))
rfresurlt = ray.get(f1)['result']
xgresurlt = ray.get(f2)['result']
grresult = ray.get(f3)['result']
ones = []
zeros = []
if rfresurlt == "1":
ones.append("Random forest")
else:
zeros.append("Random forest")
if xgresurlt == "1":
ones.append("XGBoost")
else:
zeros.append("XGBoost")
if grresult == "1":
ones.append("Gradient boost")
else:
zeros.append("Gradient boost")
if len(ones) >= 2:
return {"result": "1", "methods": ones}
else:
return {"result": "0", "methods": zeros}
Speculative.deploy()
sample_request_input = {
"fixed acidity": -0.70071875,
"volatile acidity": 0.34736425,
"citric acid": -1.34012182,
"residual sugar": -0.16942723,
"chlorides": -0.1586918,
"free sulfur dioxide": 1.06389977,
"total sulfur dioxide": -0.10545198,
"density": -0.66075704,
"pH": 0.70550789,
"sulphates": -0.46118037,
"alcohol": 0.26002813,
}
print(requests.get("http://localhost:8000/randomforest", json=sample_request_input).text)
print(requests.get("http://localhost:8000/grboost", json=sample_request_input).text)
print(requests.get("http://localhost:8000/xgboost", json=sample_request_input).text)
print(requests.get("http://localhost:8000/speculative", json=sample_request_input).text)
|
[
"ray.init",
"ray.serve.deployment",
"ray.get",
"pickle.load",
"numpy.array",
"requests.get",
"ray.serve.start",
"os.path.join"
] |
[((155, 201), 'os.path.join', 'os.path.join', (['"""wine-quality_random_forest.pkl"""'], {}), "('wine-quality_random_forest.pkl')\n", (167, 201), False, 'import os\n'), ((223, 263), 'os.path.join', 'os.path.join', (['"""wine-quality_xgboost.pkl"""'], {}), "('wine-quality_xgboost.pkl')\n", (235, 263), False, 'import os\n'), ((285, 325), 'os.path.join', 'os.path.join', (['"""wine-quality_grboost.pkl"""'], {}), "('wine-quality_grboost.pkl')\n", (297, 325), False, 'import os\n'), ((339, 349), 'ray.init', 'ray.init', ([], {}), '()\n', (347, 349), False, 'import ray\n'), ((365, 378), 'ray.serve.start', 'serve.start', ([], {}), '()\n', (376, 378), False, 'from ray import serve\n'), ((400, 446), 'ray.serve.deployment', 'serve.deployment', ([], {'route_prefix': '"""/randomforest"""'}), "(route_prefix='/randomforest')\n", (416, 446), False, 'from ray import serve\n'), ((1264, 1305), 'ray.serve.deployment', 'serve.deployment', ([], {'route_prefix': '"""/grboost"""'}), "(route_prefix='/grboost')\n", (1280, 1305), False, 'from ray import serve\n'), ((2140, 2181), 'ray.serve.deployment', 'serve.deployment', ([], {'route_prefix': '"""/xgboost"""'}), "(route_prefix='/xgboost')\n", (2156, 2181), False, 'from ray import serve\n'), ((3148, 3193), 'ray.serve.deployment', 'serve.deployment', ([], {'route_prefix': '"""/speculative"""'}), "(route_prefix='/speculative')\n", (3164, 3193), False, 'from ray import serve\n'), ((1600, 1898), 'numpy.array', 'np.array', (["[request['fixed acidity'], request['volatile acidity'], request[\n 'citric acid'], request['residual sugar'], request['chlorides'],\n request['free sulfur dioxide'], request['total sulfur dioxide'],\n request['density'], request['pH'], request['sulphates'], request['alcohol']\n ]"], {}), "([request['fixed acidity'], request['volatile acidity'], request[\n 'citric acid'], request['residual sugar'], request['chlorides'],\n request['free sulfur dioxide'], request['total sulfur dioxide'],\n request['density'], request['pH'], request['sulphates'], request[\n 'alcohol']])\n", (1608, 1898), True, 'import numpy as np\n'), ((2476, 2774), 'numpy.array', 'np.array', (["[request['fixed acidity'], request['volatile acidity'], request[\n 'citric acid'], request['residual sugar'], request['chlorides'],\n request['free sulfur dioxide'], request['total sulfur dioxide'],\n request['density'], request['pH'], request['sulphates'], request['alcohol']\n ]"], {}), "([request['fixed acidity'], request['volatile acidity'], request[\n 'citric acid'], request['residual sugar'], request['chlorides'],\n request['free sulfur dioxide'], request['total sulfur dioxide'],\n request['density'], request['pH'], request['sulphates'], request[\n 'alcohol']])\n", (2484, 2774), True, 'import numpy as np\n'), ((4777, 4854), 'requests.get', 'requests.get', (['"""http://localhost:8000/randomforest"""'], {'json': 'sample_request_input'}), "('http://localhost:8000/randomforest', json=sample_request_input)\n", (4789, 4854), False, 'import requests\n'), ((4867, 4939), 'requests.get', 'requests.get', (['"""http://localhost:8000/grboost"""'], {'json': 'sample_request_input'}), "('http://localhost:8000/grboost', json=sample_request_input)\n", (4879, 4939), False, 'import requests\n'), ((4952, 5024), 'requests.get', 'requests.get', (['"""http://localhost:8000/xgboost"""'], {'json': 'sample_request_input'}), "('http://localhost:8000/xgboost', json=sample_request_input)\n", (4964, 5024), False, 'import requests\n'), ((5037, 5113), 'requests.get', 'requests.get', (['"""http://localhost:8000/speculative"""'], {'json': 'sample_request_input'}), "('http://localhost:8000/speculative', json=sample_request_input)\n", (5049, 5113), False, 'import requests\n'), ((563, 577), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (574, 577), False, 'import pickle\n'), ((1417, 1431), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1428, 1431), False, 'import pickle\n'), ((2293, 2307), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2304, 2307), False, 'import pickle\n'), ((3710, 3721), 'ray.get', 'ray.get', (['f1'], {}), '(f1)\n', (3717, 3721), False, 'import ray\n'), ((3752, 3763), 'ray.get', 'ray.get', (['f2'], {}), '(f2)\n', (3759, 3763), False, 'import ray\n'), ((3793, 3804), 'ray.get', 'ray.get', (['f3'], {}), '(f3)\n', (3800, 3804), False, 'import ray\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import math
from contextlib import contextmanager
from timeit import default_timer
from redis import StrictRedis
import six
from django.conf import settings
# noinspection PyUnresolvedReferences
from six.moves import xrange
from easy_cache import caches
from easy_cache.contrib.redis_cache import RedisCacheInstance
from easy_cache.decorators import ecached
from tests.conf import REDIS_HOST, MEMCACHED_HOST
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}
},
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'locmem',
'KEY_PREFIX': 'custom_prefix',
},
'memcached': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': MEMCACHED_HOST,
'KEY_PREFIX': 'memcached',
},
'redis': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://{}/1'.format(REDIS_HOST),
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
}
}
},
ROOT_URLCONF='',
INSTALLED_APPS=()
)
# adds custom redis client
redis_host, redis_port = REDIS_HOST.split(':')
caches['redis_client'] = RedisCacheInstance(
StrictRedis(host=redis_host, port=redis_port),
prefix='bench'
)
def ratio(a, b):
if a > b:
return a / b, 1
elif a < b:
return 1, b / a
else:
return 1, 1
class Stopwatch(object):
def __init__(self, name):
self.name = name
self.t0 = default_timer()
self.laps = []
def __unicode__(self):
m = self.mean()
d = self.stddev()
a = self.median()
fmt = u'%-37s: mean=%0.5f, median=%0.5f, stddev=%0.5f, n=%3d, snr=%8.5f:%8.5f'
return fmt % ((self.name, m, a, d, len(self.laps)) + ratio(m, d))
def __str__(self):
if six.PY2:
return six.binary_type(self.__unicode__())
else:
return self.__unicode__()
def mean(self):
return sum(self.laps) / len(self.laps)
def median(self):
return sorted(self.laps)[int(len(self.laps) / 2)]
def stddev(self):
mean = self.mean()
return math.sqrt(sum((lap - mean) ** 2 for lap in self.laps) / len(self.laps))
def total(self):
return default_timer() - self.t0
def reset(self):
self.t0 = default_timer()
self.laps = []
@contextmanager
def timing(self):
t0 = default_timer()
try:
yield
finally:
te = default_timer()
self.laps.append(te - t0)
c = 0
def time_consuming_operation():
global c
c += 1
a = sum(xrange(1000000))
return str(a)
def test_no_cache():
return time_consuming_operation()
@ecached(cache_alias='default')
def test_locmem_cache():
return time_consuming_operation()
@ecached(cache_alias='memcached')
def test_memcached_cache():
return time_consuming_operation()
@ecached(cache_alias='redis')
def test_redis_cache():
return time_consuming_operation()
@ecached(cache_alias='redis_client')
def test_redis_client_cache():
return time_consuming_operation()
@ecached(cache_alias='default', tags=['tag1', 'tag2'])
def test_locmem_cache_tags():
return time_consuming_operation()
@ecached(cache_alias='memcached', tags=['tag1', 'tag2'])
def test_memcached_cache_tags():
return time_consuming_operation()
@ecached(cache_alias='redis', tags=['tag1', 'tag2'])
def test_redis_cache_tags():
return time_consuming_operation()
@ecached(cache_alias='redis_client', tags=['tag1', 'tag2'])
def test_redis_client_cache_tags():
return time_consuming_operation()
def main():
from django import get_version
import sys
print('=======', 'Python:', sys.version.replace('\n', ''), 'Django:', get_version(), '=======')
global c
n = 100
benchmarks = (
(test_no_cache, n),
(test_locmem_cache, 1),
(test_locmem_cache_tags, 1),
(test_memcached_cache, 1),
(test_memcached_cache_tags, 1),
(test_redis_cache, 1),
(test_redis_cache_tags, 1),
(test_redis_client_cache, 1),
(test_redis_client_cache_tags, 1),
)
def cleanup(function):
if hasattr(function, 'invalidate_cache_by_key'):
function.invalidate_cache_by_key()
if hasattr(function, 'invalidate_cache_by_tags'):
function.invalidate_cache_by_tags()
for method, count in benchmarks:
sw1 = Stopwatch('[cleanup] ' + method.__name__)
cleanup(method)
c = 0
for _ in xrange(n):
with sw1.timing():
method()
cleanup(method)
assert c == n, c
print(sw1)
sw2 = Stopwatch('[ normal] ' + method.__name__)
cleanup(method)
c = 0
for _ in xrange(n):
# skip first time
if _ == 0:
method()
continue
with sw2.timing():
method()
assert c == count, c
print(sw2)
print('mean diff: {:.3} %, median diff: {:.3} %'.format(
float(sw2.mean()) / sw1.mean() * 100,
float(sw2.median()) / sw1.median() * 100,
))
if __name__ == '__main__':
main()
|
[
"timeit.default_timer",
"sys.version.replace",
"easy_cache.decorators.ecached",
"six.moves.xrange",
"tests.conf.REDIS_HOST.split",
"redis.StrictRedis",
"django.get_version"
] |
[((1410, 1431), 'tests.conf.REDIS_HOST.split', 'REDIS_HOST.split', (['""":"""'], {}), "(':')\n", (1426, 1431), False, 'from tests.conf import REDIS_HOST, MEMCACHED_HOST\n'), ((3027, 3057), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""default"""'}), "(cache_alias='default')\n", (3034, 3057), False, 'from easy_cache.decorators import ecached\n'), ((3124, 3156), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""memcached"""'}), "(cache_alias='memcached')\n", (3131, 3156), False, 'from easy_cache.decorators import ecached\n'), ((3226, 3254), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""redis"""'}), "(cache_alias='redis')\n", (3233, 3254), False, 'from easy_cache.decorators import ecached\n'), ((3320, 3355), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""redis_client"""'}), "(cache_alias='redis_client')\n", (3327, 3355), False, 'from easy_cache.decorators import ecached\n'), ((3428, 3481), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""default"""', 'tags': "['tag1', 'tag2']"}), "(cache_alias='default', tags=['tag1', 'tag2'])\n", (3435, 3481), False, 'from easy_cache.decorators import ecached\n'), ((3553, 3608), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""memcached"""', 'tags': "['tag1', 'tag2']"}), "(cache_alias='memcached', tags=['tag1', 'tag2'])\n", (3560, 3608), False, 'from easy_cache.decorators import ecached\n'), ((3683, 3734), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""redis"""', 'tags': "['tag1', 'tag2']"}), "(cache_alias='redis', tags=['tag1', 'tag2'])\n", (3690, 3734), False, 'from easy_cache.decorators import ecached\n'), ((3805, 3863), 'easy_cache.decorators.ecached', 'ecached', ([], {'cache_alias': '"""redis_client"""', 'tags': "['tag1', 'tag2']"}), "(cache_alias='redis_client', tags=['tag1', 'tag2'])\n", (3812, 3863), False, 'from easy_cache.decorators import ecached\n'), ((1481, 1526), 'redis.StrictRedis', 'StrictRedis', ([], {'host': 'redis_host', 'port': 'redis_port'}), '(host=redis_host, port=redis_port)\n', (1492, 1526), False, 'from redis import StrictRedis\n'), ((1777, 1792), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (1790, 1792), False, 'from timeit import default_timer\n'), ((2621, 2636), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (2634, 2636), False, 'from timeit import default_timer\n'), ((2716, 2731), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (2729, 2731), False, 'from timeit import default_timer\n'), ((2928, 2943), 'six.moves.xrange', 'xrange', (['(1000000)'], {}), '(1000000)\n', (2934, 2943), False, 'from six.moves import xrange\n'), ((4035, 4064), 'sys.version.replace', 'sys.version.replace', (['"""\n"""', '""""""'], {}), "('\\n', '')\n", (4054, 4064), False, 'import sys\n'), ((4077, 4090), 'django.get_version', 'get_version', ([], {}), '()\n', (4088, 4090), False, 'from django import get_version\n'), ((4863, 4872), 'six.moves.xrange', 'xrange', (['n'], {}), '(n)\n', (4869, 4872), False, 'from six.moves import xrange\n'), ((5116, 5125), 'six.moves.xrange', 'xrange', (['n'], {}), '(n)\n', (5122, 5125), False, 'from six.moves import xrange\n'), ((2555, 2570), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (2568, 2570), False, 'from timeit import default_timer\n'), ((2797, 2812), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (2810, 2812), False, 'from timeit import default_timer\n')]
|
from __future__ import absolute_import, division, print_function, unicode_literals
import wx
import logging
log = logging.getLogger(__name__)
MENU_FILE = wx.NewId()
MENU_VIEW = wx.NewId()
MENU_INTERACT = wx.NewId()
MENU_PROC = wx.NewId()
MENU_DB = wx.NewId()
MENU_SERVER = wx.NewId()
MENU_TOOLS = wx.NewId()
MENU_HELP = wx.NewId()
MENU_FILE_IMP = wx.NewId()
MENU_FILE_IMP_DIGI_S = wx.NewId()
MENU_FILE_IMP_UNB = wx.NewId()
MENU_FILE_IMP_SIPPICAN = wx.NewId()
MENU_FILE_IMP_SEABIRD = wx.NewId()
MENU_FILE_IMP_VALEPORT = wx.NewId()
MENU_FILE_IMP_VALE_MIDAS = wx.NewId()
MENU_FILE_IMP_VALE_MON = wx.NewId()
MENU_FILE_IMP_VALE_MINIS = wx.NewId()
MENU_FILE_IMP_TURO = wx.NewId()
MENU_FILE_IMP_DIGIBAR = wx.NewId()
MENU_FILE_IMP_DIGI_PRO = wx.NewId()
MENU_FILE_IMP_CASTAWAY = wx.NewId()
MENU_FILE_IMP_IDRONAUT = wx.NewId()
MENU_FILE_IMP_SAIV = wx.NewId()
MENU_FILE_QUERY = wx.NewId()
MENU_FILE_QUERY_WOA = wx.NewId()
MENU_FILE_QUERY_RTOFS = wx.NewId()
MENU_FILE_QUERY_SIS = wx.NewId()
MENU_FILE_EXPORT = wx.NewId()
MENU_FILE_EXPORT_ASVP = wx.NewId()
MENU_FILE_EXPORT_VEL = wx.NewId()
MENU_FILE_EXPORT_HIPS = wx.NewId()
MENU_FILE_EXPORT_PRO = wx.NewId()
MENU_FILE_EXPORT_IXBLUE = wx.NewId()
MENU_FILE_EXPORT_UNB = wx.NewId()
MENU_FILE_EXPORT_ELAC = wx.NewId()
MENU_FILE_EXPORT_CSV = wx.NewId()
MENU_FILE_EXPORT_CAST = wx.NewId()
MENU_FILE_CLEAR = wx.NewId()
MENU_FILE_EXIT = wx.NewId()
MENU_PROC_INS_ZOOM = wx.NewId()
MENU_PROC_INS_FLAG = wx.NewId()
MENU_PROC_INS_UNFLAG = wx.NewId()
MENU_PROC_INS_INSERT = wx.NewId()
MENU_VIEW_RESET = wx.NewId()
MENU_VIEW_HIDE_WOA = wx.NewId()
MENU_VIEW_HIDE_FLAGGED = wx.NewId()
MENU_VIEW_HIDE_DEPTH = wx.NewId()
MENU_PROC_LOAD_SAL = wx.NewId()
MENU_PROC_LOAD_TEMP_SAL = wx.NewId()
MENU_PROC_LOAD_SURFSP = wx.NewId()
MENU_PROC_EXTEND_CAST = wx.NewId()
MENU_PROC_INSPECTION = wx.NewId()
MENU_PROC_PREVIEW_THINNING = wx.NewId()
MENU_PROC_SEND_PROFILE = wx.NewId()
MENU_PROC_STORE_SSP = wx.NewId()
MENU_PROC_REDO_SSP = wx.NewId()
MENU_PROC_LOG_METADATA = wx.NewId()
# MENU_PROC_EXPRESS = wx.NewId()
MENU_DB_QUERY = wx.NewId()
MENU_DB_QUERY_INTERNAL_DB = wx.NewId()
MENU_DB_QUERY_EXTERNAL_DB = wx.NewId()
MENU_DB_DELETE = wx.NewId()
MENU_DB_DELETE_INTERNAL_DB = wx.NewId()
MENU_DB_DELETE_EXTERNAL_DB = wx.NewId()
MENU_DB_EXPORT = wx.NewId()
MENU_DB_EXPORT_SHP = wx.NewId()
MENU_DB_EXPORT_KML = wx.NewId()
MENU_DB_EXPORT_CSV = wx.NewId()
MENU_DB_PLOT = wx.NewId()
MENU_DB_PLOT_MAP_SSP = wx.NewId()
MENU_DB_PLOT_DAILY_SSP = wx.NewId()
MENU_DB_SAVE_DAILY_SSP = wx.NewId()
MENU_TOOLS_SERVER = wx.NewId()
MENU_TOOLS_SET_REFERENCE_CAST = wx.NewId()
MENU_TOOLS_CLEAR_REFERENCE_CAST = wx.NewId()
MENU_TOOLS_EDIT_REFERENCE_CAST = wx.NewId()
MENU_TOOLS_REFERENCE = wx.NewId()
MENU_TOOLS_MODIFY_SETTINGS = wx.NewId()
MENU_TOOLS_VIEW_SETTINGS = wx.NewId()
MENU_TOOLS_RELOAD_SETTINGS = wx.NewId()
MENU_TOOLS_USER_INPUTS = wx.NewId()
MENU_TOOLS_REF_MON = wx.NewId()
MENU_TOOLS_GEO_MONITOR = wx.NewId()
MENU_SERVER_START = wx.NewId()
MENU_SERVER_SEND = wx.NewId()
MENU_SERVER_STOP = wx.NewId()
MENU_SERVER_LOG_METADATA = wx.NewId()
MENU_HELP_MANUAL = wx.NewId()
MENU_HELP_ABOUT = wx.NewId()
MENUS_ALL = (MENU_FILE_IMP, MENU_FILE_IMP_CASTAWAY, MENU_FILE_IMP_DIGIBAR, MENU_FILE_IMP_DIGI_PRO, MENU_FILE_IMP_DIGI_S,
MENU_FILE_IMP_IDRONAUT, MENU_FILE_IMP_SAIV, MENU_FILE_IMP_SEABIRD, MENU_FILE_IMP_SIPPICAN,
MENU_FILE_IMP_TURO, MENU_FILE_IMP_UNB, MENU_FILE_IMP_VALEPORT,
MENU_FILE_IMP_VALE_MIDAS, MENU_FILE_IMP_VALE_MON, MENU_FILE_IMP_VALE_MINIS,
MENU_FILE_QUERY,
MENU_FILE_EXPORT, MENU_FILE_EXPORT_CAST,
MENU_FILE_EXPORT_ASVP, MENU_FILE_EXPORT_PRO, MENU_FILE_EXPORT_HIPS, MENU_FILE_EXPORT_IXBLUE,
MENU_FILE_EXPORT_VEL, MENU_FILE_EXPORT_UNB, MENU_FILE_EXPORT_ELAC, MENU_FILE_EXPORT_CSV,
MENU_FILE_CLEAR,
MENU_VIEW_RESET, MENU_VIEW_HIDE_WOA, MENU_VIEW_HIDE_FLAGGED, MENU_VIEW_HIDE_DEPTH,
MENU_PROC_LOAD_SAL, MENU_PROC_LOAD_TEMP_SAL, MENU_PROC_LOAD_SURFSP, MENU_PROC_EXTEND_CAST,
MENU_PROC_INSPECTION, MENU_PROC_INS_ZOOM, MENU_PROC_INS_FLAG, MENU_PROC_INS_UNFLAG, MENU_PROC_INS_INSERT,
# MENU_PROC_EXPRESS,
MENU_PROC_PREVIEW_THINNING, MENU_PROC_SEND_PROFILE,
MENU_PROC_STORE_SSP, MENU_PROC_REDO_SSP, MENU_PROC_LOG_METADATA,
MENU_DB_QUERY,
MENU_DB_DELETE,
MENU_DB_EXPORT,
MENU_DB_PLOT,
MENU_SERVER_START, MENU_SERVER_SEND, MENU_SERVER_STOP, MENU_SERVER_LOG_METADATA,
MENU_TOOLS_GEO_MONITOR, MENU_TOOLS_REF_MON,
MENU_TOOLS_SET_REFERENCE_CAST, MENU_TOOLS_EDIT_REFERENCE_CAST, MENU_TOOLS_CLEAR_REFERENCE_CAST,
MENU_TOOLS_MODIFY_SETTINGS, MENU_TOOLS_VIEW_SETTINGS, MENU_TOOLS_RELOAD_SETTINGS,
MENU_TOOLS_USER_INPUTS)
MENUS_DISABLED_ON_CLOSED = (
MENU_FILE_EXPORT_CAST, MENU_FILE_CLEAR,
MENU_VIEW_RESET, MENU_VIEW_HIDE_WOA, MENU_VIEW_HIDE_FLAGGED, MENU_VIEW_HIDE_DEPTH,
MENU_PROC_LOAD_SAL, MENU_PROC_LOAD_TEMP_SAL, MENU_PROC_LOAD_SURFSP,
MENU_PROC_EXTEND_CAST, MENU_PROC_INSPECTION,
MENU_PROC_INS_ZOOM, MENU_PROC_INS_FLAG, MENU_PROC_INS_INSERT, MENU_PROC_INS_UNFLAG,
MENU_PROC_PREVIEW_THINNING, MENU_PROC_SEND_PROFILE,
MENU_PROC_STORE_SSP, MENU_PROC_REDO_SSP,
# MENU_PROC_EXPRESS,
MENU_TOOLS_SET_REFERENCE_CAST,
MENU_SERVER_SEND, MENU_SERVER_STOP)
MENUS_DISABLED_ON_OPEN = (MENU_SERVER_SEND, MENU_SERVER_STOP)
MENUS_DISABLED_ON_SERVER = (
MENU_FILE_IMP, # all import
MENU_FILE_QUERY, # all query
MENU_FILE_EXPORT, # all export
MENU_FILE_CLEAR,
MENU_PROC_LOG_METADATA, MENU_TOOLS_SET_REFERENCE_CAST, MENU_TOOLS_EDIT_REFERENCE_CAST,
MENU_TOOLS_CLEAR_REFERENCE_CAST, MENU_FILE_IMP_DIGI_S, MENU_FILE_IMP_SEABIRD,
# MENU_PROC_EXPRESS,
MENU_PROC_LOAD_SAL, MENU_PROC_LOAD_TEMP_SAL, MENU_PROC_LOAD_SURFSP, MENU_PROC_EXTEND_CAST,
MENU_PROC_INSPECTION, MENU_PROC_PREVIEW_THINNING, MENU_PROC_SEND_PROFILE, MENU_PROC_REDO_SSP,
MENU_DB_QUERY,
MENU_DB_DELETE,
MENU_DB_EXPORT,
MENU_DB_PLOT,
MENU_SERVER_START)
class SSPManagerBase(wx.Frame):
def __init__(self, *args, **kwds):
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
# Menu Bar
self.SVPEditorFrame_menubar = wx.MenuBar()
# ### FILE ###
self.FileMenu = wx.Menu()
# File/Import
FileImp = wx.Menu()
self.FileImpCastaway = wx.MenuItem(FileImp, MENU_FILE_IMP_CASTAWAY, "Castaway (.csv)",
"Import a Castaway cast", wx.ITEM_NORMAL)
FileImp.AppendItem(self.FileImpCastaway)
FileImpDigi = wx.Menu()
self.FileImpDigibarPro = wx.MenuItem(FileImpDigi, MENU_FILE_IMP_DIGI_PRO, "Digibar Pro (.txt)",
"Import a Digibar Pro cast", wx.ITEM_NORMAL)
FileImpDigi.AppendItem(self.FileImpDigibarPro)
self.FileImpDigibarS = wx.MenuItem(FileImpDigi, MENU_FILE_IMP_DIGI_S, "Digibar S (.csv)",
"Import a Digibar S cast", wx.ITEM_NORMAL)
FileImpDigi.AppendItem(self.FileImpDigibarS)
FileImp.AppendMenu(MENU_FILE_IMP_DIGIBAR, "Digibar", FileImpDigi, "Import Digibar formats")
self.FileImpIdronaut = wx.MenuItem(FileImp, MENU_FILE_IMP_IDRONAUT, "Idronaut (*.txt)",
"Import an Idronaut cast", wx.ITEM_NORMAL)
FileImp.AppendItem(self.FileImpIdronaut)
self.FileImpSaiv = wx.MenuItem(FileImp, MENU_FILE_IMP_SAIV, "Saiv (*.txt)",
"Import a Saiv cast", wx.ITEM_NORMAL)
FileImp.AppendItem(self.FileImpSaiv)
self.FileImpSeabird = wx.MenuItem(FileImp, MENU_FILE_IMP_SEABIRD, "Seabird (.cnv)",
"Import a Seabird cast", wx.ITEM_NORMAL)
FileImp.AppendItem(self.FileImpSeabird)
self.FileImpSippican = wx.MenuItem(FileImp, MENU_FILE_IMP_SIPPICAN, "Sippican (.edf)",
"Import a Sippican cast", wx.ITEM_NORMAL)
FileImp.AppendItem(self.FileImpSippican)
self.FileImpTuro = wx.MenuItem(FileImp, MENU_FILE_IMP_TURO, "Turo (.nc)",
"Import a Turo cast", wx.ITEM_NORMAL)
FileImp.AppendItem(self.FileImpTuro)
self.FileImpUNB = wx.MenuItem(FileImp, MENU_FILE_IMP_UNB, "UNB (.unb)",
"Import a UNB cast", wx.ITEM_NORMAL)
FileImp.AppendItem(self.FileImpUNB)
FileImpVale = wx.Menu()
self.FileImpValeMidas = wx.MenuItem(FileImpVale, MENU_FILE_IMP_VALE_MIDAS, "Midas (.000)",
"Import a Valeport Midas cast", wx.ITEM_NORMAL)
FileImpVale.AppendItem(self.FileImpValeMidas)
self.FileImpValeMonitor = wx.MenuItem(FileImpVale, MENU_FILE_IMP_VALE_MON, "Monitor (.000)",
"Import a Valeport Monitor cast", wx.ITEM_NORMAL)
FileImpVale.AppendItem(self.FileImpValeMonitor)
self.FileImpValeMiniS = wx.MenuItem(FileImpVale, MENU_FILE_IMP_VALE_MINIS, "MiniSVP (.txt)",
"Import a Valeport MiniSVP cast", wx.ITEM_NORMAL)
FileImpVale.AppendItem(self.FileImpValeMiniS)
FileImp.AppendMenu(MENU_FILE_IMP_VALEPORT, "Valeport", FileImpVale, "Import Valeport formats")
self.FileMenu.AppendMenu(MENU_FILE_IMP, "Import cast", FileImp, "Import an SSP cast")
# File/Query
FileQuery = wx.Menu()
self.FileQuerySis = wx.MenuItem(FileQuery, MENU_FILE_QUERY_SIS, "Kongsberg SIS",
"Retrieve the SSP cast in use by SIS", wx.ITEM_NORMAL)
FileQuery.AppendItem(self.FileQuerySis)
self.FileQueryRtofs = wx.MenuItem(FileQuery, MENU_FILE_QUERY_RTOFS, "RTOFS atlas",
"Retrieve a predicted RTOFS-based SSP", wx.ITEM_NORMAL)
FileQuery.AppendItem(self.FileQueryRtofs)
self.FileQueryWoa = wx.MenuItem(FileQuery, MENU_FILE_QUERY_WOA, "WOA09 atlas",
"Retrieve statistical info about the SSP in the area", wx.ITEM_NORMAL)
FileQuery.AppendItem(self.FileQueryWoa)
self.FileMenu.AppendMenu(MENU_FILE_QUERY, "Query from", FileQuery,
"Retrieve SSP info from external sources")
# File / Export
FileExp = wx.Menu()
self.FileExpCast = wx.MenuItem(FileExp, MENU_FILE_EXPORT_CAST, "Export selected formats",
"Export the current SSP in the selected formats", wx.ITEM_NORMAL)
FileExp.AppendItem(self.FileExpCast)
FileExp.AppendSeparator()
self.FileExpHips = wx.MenuItem(FileExp, MENU_FILE_EXPORT_HIPS, "Caris HIPS (.svp)",
"Export the current SSP as Caris HIPS format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpHips)
self.FileExpCsv = wx.MenuItem(FileExp, MENU_FILE_EXPORT_CSV, "Comma-separated (.csv)",
"Export the current SSP as comma-separated format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpCsv)
self.FileExpElac = wx.MenuItem(FileExp, MENU_FILE_EXPORT_ELAC, "Elac (.sva)",
"Export the current SSP as Elac format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpElac)
self.FileExpVel = wx.MenuItem(FileExp, MENU_FILE_EXPORT_VEL, "Hypack (.vel)",
"Export the current SSP as Hypack format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpVel)
self.FileExpIxblue = wx.MenuItem(FileExp, MENU_FILE_EXPORT_IXBLUE, "IXBLUE (.txt)",
"Export the current SSP as IXBLUE format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpIxblue)
self.FileExpAsvp = wx.MenuItem(FileExp, MENU_FILE_EXPORT_ASVP, "Kongsberg (.asvp)",
"Export the current SSP as Kongsberg format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpAsvp)
self.FileExpPro = wx.MenuItem(FileExp, MENU_FILE_EXPORT_PRO, "Sonardyne (.pro)",
"Export the current SSP as Sonardyne format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpPro)
self.FileExpUnb = wx.MenuItem(FileExp, MENU_FILE_EXPORT_UNB, "UNB (.unb)",
"Export the current SSP as UNB format", wx.ITEM_CHECK)
FileExp.AppendItem(self.FileExpUnb)
self.FileMenu.AppendMenu(MENU_FILE_EXPORT, "Export SSP", FileExp,
"Export the current SSP")
self.FileClear = wx.MenuItem(self.FileMenu, MENU_FILE_CLEAR, "Clear",
"Clear the loaded cast", wx.ITEM_NORMAL)
self.FileMenu.AppendItem(self.FileClear)
self.FileMenu.AppendSeparator()
self.FileExit = wx.MenuItem(self.FileMenu, MENU_FILE_EXIT, "Exit",
"Quit SSP Manager", wx.ITEM_NORMAL)
self.FileMenu.AppendItem(self.FileExit)
self.SVPEditorFrame_menubar.Append(self.FileMenu, "File")
# ### VIEW ###
self.ViewMenu = wx.Menu()
self.ResetView = wx.MenuItem(self.ViewMenu, MENU_VIEW_RESET, "Reset plot view",
"Reset the plot view", wx.ITEM_NORMAL)
self.ViewMenu.AppendItem(self.ResetView)
self.ViewMenu.AppendSeparator()
self.ViewHideWOA = wx.MenuItem(self.ViewMenu, MENU_VIEW_HIDE_WOA, "Hide WOA info",
"Hide the visualization of WOA info", wx.ITEM_CHECK)
self.ViewMenu.AppendItem(self.ViewHideWOA)
self.HideFlagged = wx.MenuItem(self.ViewMenu, MENU_VIEW_HIDE_FLAGGED, "Hide flagged data",
"Hide all the flagged data", wx.ITEM_CHECK)
self.ViewMenu.AppendItem(self.HideFlagged)
self.HideDepth = wx.MenuItem(self.ViewMenu, MENU_VIEW_HIDE_DEPTH, "Hide depth",
"Hide the depth visualization on the plot", wx.ITEM_CHECK)
self.ViewMenu.AppendItem(self.HideDepth)
self.SVPEditorFrame_menubar.Append(self.ViewMenu, "View")
# ### Process ###
self.ProcessMenu = wx.Menu()
self.ProcessLoadSal = wx.MenuItem(self.ProcessMenu, MENU_PROC_LOAD_SAL, "Load salinity",
"Load salinity from reference cast [XBT only]", wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessLoadSal)
self.ProcessLoadTempSal = wx.MenuItem(self.ProcessMenu, MENU_PROC_LOAD_TEMP_SAL,
"Load temperature/salinity",
"Load temperature and salinity from reference cast [SVP and XBT only]",
wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessLoadTempSal)
self.ProcessLoadSurfSpeed = wx.MenuItem(self.ProcessMenu, MENU_PROC_LOAD_SURFSP, "Get surface sound speed",
"Get the surface sound speed value from SIS", wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessLoadSurfSpeed)
self.ProcessMenu.AppendSeparator()
self.ProcessExtend = wx.MenuItem(self.ProcessMenu, MENU_PROC_EXTEND_CAST, "Extend cast",
"Extend the cast using the reference cast", wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessExtend)
self.ProcessInspection = wx.Menu()
self.PlotZoom = wx.MenuItem(self.ProcessInspection, MENU_PROC_INS_ZOOM, "Zoom",
"Zoom on plot by mouse selection", wx.ITEM_RADIO)
self.ProcessInspection.AppendItem(self.PlotZoom)
self.PlotFlag = wx.MenuItem(self.ProcessInspection, MENU_PROC_INS_FLAG, "Flag",
"Flag samples on plot by mouse selection", wx.ITEM_RADIO)
self.ProcessInspection.AppendItem(self.PlotFlag)
self.PlotUnflag = wx.MenuItem(self.ProcessInspection, MENU_PROC_INS_UNFLAG, "Unflag",
"Unflag samples on plot by mouse selection", wx.ITEM_RADIO)
self.ProcessInspection.AppendItem(self.PlotUnflag)
self.PlotInsert = wx.MenuItem(self.ProcessInspection, MENU_PROC_INS_INSERT, "Insert",
"Insert a sample by mouse clicking", wx.ITEM_RADIO)
self.ProcessInspection.AppendItem(self.PlotInsert)
self.ProcessMenu.AppendMenu(MENU_PROC_INSPECTION, "Visual inspection", self.ProcessInspection,
"Visual inspection of the resulting profile")
self.ProcessMenu.AppendSeparator()
self.ProcessPreviewThinning = wx.MenuItem(self.ProcessMenu, MENU_PROC_PREVIEW_THINNING, "Preview thinning",
"Preview the thinning required by some client types", wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessPreviewThinning)
self.ProcessSendProfile = wx.MenuItem(self.ProcessMenu, MENU_PROC_SEND_PROFILE, "Send SSP",
"Send the current SSP to the clients", wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessSendProfile)
self.ProcessMenu.AppendSeparator()
self.ProcessStoreDb = wx.MenuItem(self.ProcessMenu, MENU_PROC_STORE_SSP, "Store SSP",
"Locally store the current SSP data", wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessStoreDb)
self.ProcessRedoSsp = wx.MenuItem(self.ProcessMenu, MENU_PROC_REDO_SSP, "Redo processing",
"Redo the processing by reloading the stored raw data", wx.ITEM_NORMAL)
self.ProcessMenu.AppendItem(self.ProcessRedoSsp)
self.ProcessLogMetadata = wx.MenuItem(self.ProcessMenu, MENU_PROC_LOG_METADATA, "Log processing metadata",
"Store the processing metadata in the log DB", wx.ITEM_CHECK)
self.ProcessMenu.AppendItem(self.ProcessLogMetadata)
# self.ProcessMenu.AppendSeparator()
# self.ProcessExpressMode = wx.MenuItem(self.ProcessMenu, MENU_PROC_EXPRESS, "Express mode",
# "Activate the express mode (be careful!)", wx.ITEM_NORMAL)
# self.ProcessMenu.AppendItem(self.ProcessExpressMode)
self.SVPEditorFrame_menubar.Append(self.ProcessMenu, "Process")
# ### DATABASE ###
self.DbMenu = wx.Menu()
# Query
DbQuery = wx.Menu()
self.DbQueryInternalDb = wx.MenuItem(DbQuery, MENU_DB_QUERY_INTERNAL_DB, "Internal DB",
"Retrieve the locally stored SSP", wx.ITEM_NORMAL)
DbQuery.AppendItem(self.DbQueryInternalDb)
self.DbQueryExternalDb = wx.MenuItem(DbQuery, MENU_DB_QUERY_EXTERNAL_DB, "External DB",
"Retrieve a SSP stored in the select DB", wx.ITEM_NORMAL)
DbQuery.AppendItem(self.DbQueryExternalDb)
self.DbMenu.AppendMenu(MENU_DB_QUERY, "Query from", DbQuery,
"Retrieve SSP info from databases")
# Db/Delete
DbDelete = wx.Menu()
self.DbDeleteInternalDb = wx.MenuItem(DbDelete, MENU_DB_DELETE_INTERNAL_DB, "Internal DB",
"Delete a locally stored SSP", wx.ITEM_NORMAL)
DbDelete.AppendItem(self.DbDeleteInternalDb)
self.DbDeleteExternalDb = wx.MenuItem(DbDelete, MENU_DB_DELETE_EXTERNAL_DB, "External DB",
"Delete a SSP stored in the select DB", wx.ITEM_NORMAL)
DbDelete.AppendItem(self.DbDeleteExternalDb)
self.DbMenu.AppendMenu(MENU_DB_DELETE, "Delete SSP", DbDelete, "")
# Db/Export
DbExport = wx.Menu()
self.DbExportShp = wx.MenuItem(DbExport, MENU_DB_EXPORT_SHP, "Shapefile",
"Export all the stored SSPs as a Shapefile", wx.ITEM_NORMAL)
DbExport.AppendItem(self.DbExportShp)
self.DbExportKml = wx.MenuItem(DbExport, MENU_DB_EXPORT_KML, "KML",
"Export all the stored SSPs as a KML file", wx.ITEM_NORMAL)
DbExport.AppendItem(self.DbExportKml)
self.DbExportCsv = wx.MenuItem(DbExport, MENU_DB_EXPORT_CSV, "CSV",
"Export all the stored SSPs as a Comma-Separated file", wx.ITEM_NORMAL)
DbExport.AppendItem(self.DbExportCsv)
self.DbMenu.AppendMenu(MENU_DB_EXPORT, "Export", DbExport, "")
# Db/Plot
DbPlot = wx.Menu()
self.DbPlotMapSsp = wx.MenuItem(DbPlot, MENU_DB_PLOT_MAP_SSP, "Map all SSPs",
"Create a map with all the stored SSPs", wx.ITEM_NORMAL)
DbPlot.AppendItem(self.DbPlotMapSsp)
self.DbPlotDailySsp = wx.MenuItem(DbPlot, MENU_DB_PLOT_DAILY_SSP, "Create daily plot",
"Create a SSP plot for each day", wx.ITEM_NORMAL)
DbPlot.AppendItem(self.DbPlotDailySsp)
self.DbSaveDailySsp = wx.MenuItem(DbPlot, MENU_DB_SAVE_DAILY_SSP, "Save daily plot",
"Save a SSP plot for each day", wx.ITEM_NORMAL)
DbPlot.AppendItem(self.DbSaveDailySsp)
self.DbMenu.AppendMenu(MENU_DB_PLOT, "Plot", DbPlot, "")
self.SVPEditorFrame_menubar.Append(self.DbMenu, "Database")
# ### Tools ###
self.ToolsMenu = wx.Menu()
ServerMenu = wx.Menu()
self.ToolsServerStart = wx.MenuItem(ServerMenu, MENU_SERVER_START, "Start server",
"Start SIS server mode", wx.ITEM_NORMAL)
ServerMenu.AppendItem(self.ToolsServerStart)
self.ToolsServerSend = wx.MenuItem(ServerMenu, MENU_SERVER_SEND, "Force send",
"Force to send a SSP", wx.ITEM_NORMAL)
ServerMenu.AppendItem(self.ToolsServerSend)
self.ToolsServerStop = wx.MenuItem(ServerMenu, MENU_SERVER_STOP, "Stop server",
"Stop SIS server mode", wx.ITEM_NORMAL)
ServerMenu.AppendItem(self.ToolsServerStop)
ServerMenu.AppendSeparator()
self.ServerLogMetadata = wx.MenuItem(ServerMenu, MENU_SERVER_LOG_METADATA, "Log server metadata",
"Store the server metadata in the log DB", wx.ITEM_CHECK)
ServerMenu.AppendItem(self.ServerLogMetadata)
self.ToolsMenu.AppendMenu(MENU_TOOLS_SERVER, "Server", ServerMenu, "")
self.ToolsMenu.AppendSeparator()
self.ToolsRefMon = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_REF_MON, "Refraction Monitor",
"Open the refraction monitor", wx.ITEM_NORMAL)
self.ToolsMenu.AppendItem(self.ToolsRefMon)
self.ToolsGeoMap = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_GEO_MONITOR, "Geo Monitor",
"Open the Geo Monitor", wx.ITEM_NORMAL)
self.ToolsMenu.AppendItem(self.ToolsGeoMap)
self.ToolsMenu.AppendSeparator()
ReferenceMenu = wx.Menu()
self.ToolsSetReferenceCast = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_SET_REFERENCE_CAST,
"Set as reference cast",
"Set the current SSP as reference cast", wx.ITEM_NORMAL)
ReferenceMenu.AppendItem(self.ToolsSetReferenceCast)
self.ToolsEditReferenceCast = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_EDIT_REFERENCE_CAST,
"Edit the reference cast",
"Edit the current reference cast", wx.ITEM_NORMAL)
ReferenceMenu.AppendItem(self.ToolsEditReferenceCast)
self.ToolsClearReferenceCast = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_CLEAR_REFERENCE_CAST,
"Clear the reference cast",
"Clear the current reference cast", wx.ITEM_NORMAL)
ReferenceMenu.AppendItem(self.ToolsClearReferenceCast)
self.ToolsMenu.AppendMenu(MENU_TOOLS_REFERENCE, "Reference cast", ReferenceMenu,
"Actions about a reference cast")
self.ToolsMenu.AppendSeparator()
self.ToolsUserInputs = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_USER_INPUTS, "Monitor user inputs",
"Provide information about user inputs", wx.ITEM_NORMAL)
self.ToolsMenu.AppendItem(self.ToolsUserInputs)
self.ToolsModifySettings = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_MODIFY_SETTINGS, "Modify SSP settings",
"Open tool to modify SSP settings", wx.ITEM_NORMAL)
self.ToolsMenu.AppendItem(self.ToolsModifySettings)
self.ToolsViewSettings = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_VIEW_SETTINGS, "View SSP settings",
"Show SSP settings information", wx.ITEM_NORMAL)
self.ToolsMenu.AppendItem(self.ToolsViewSettings)
self.ToolsReloadSettings = wx.MenuItem(self.ToolsMenu, MENU_TOOLS_RELOAD_SETTINGS, "Reload SSP settings",
"Reload SSP settings information", wx.ITEM_NORMAL)
self.ToolsMenu.AppendItem(self.ToolsReloadSettings)
self.SVPEditorFrame_menubar.Append(self.ToolsMenu, "Tools")
self.HelpMenu = wx.Menu()
self.HelpManual = wx.MenuItem(self.HelpMenu, MENU_HELP_MANUAL, "Manual",
"Open the manual", wx.ITEM_NORMAL)
self.HelpMenu.AppendItem(self.HelpManual)
self.HelpMenu.AppendSeparator()
self.HelpAbout = wx.MenuItem(self.HelpMenu, MENU_HELP_ABOUT, "About",
"Info about the application", wx.ITEM_NORMAL)
self.HelpMenu.AppendItem(self.HelpAbout)
self.SVPEditorFrame_menubar.Append(self.HelpMenu, "Help")
self.SetMenuBar(self.SVPEditorFrame_menubar)
self.frame_statusbar = self.CreateStatusBar(2)
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_MENU, self.on_file_import_castaway, self.FileImpCastaway)
self.Bind(wx.EVT_MENU, self.on_file_import_digibar_pro, self.FileImpDigibarPro)
self.Bind(wx.EVT_MENU, self.on_file_import_digibar_s, self.FileImpDigibarS)
self.Bind(wx.EVT_MENU, self.on_file_import_sippican, self.FileImpSippican)
self.Bind(wx.EVT_MENU, self.on_file_import_seabird, self.FileImpSeabird)
self.Bind(wx.EVT_MENU, self.on_file_import_turo, self.FileImpTuro)
self.Bind(wx.EVT_MENU, self.on_file_import_unb, self.FileImpUNB)
self.Bind(wx.EVT_MENU, self.on_file_import_valeport_midas, self.FileImpValeMidas)
self.Bind(wx.EVT_MENU, self.on_file_import_valeport_monitor, self.FileImpValeMonitor)
self.Bind(wx.EVT_MENU, self.on_file_import_valeport_minisvp, self.FileImpValeMiniS)
self.Bind(wx.EVT_MENU, self.on_file_import_idronaut, self.FileImpIdronaut)
self.Bind(wx.EVT_MENU, self.on_file_import_saiv, self.FileImpSaiv)
self.Bind(wx.EVT_MENU, self.on_file_query_woa09, self.FileQueryWoa)
self.Bind(wx.EVT_MENU, self.on_file_query_rtofs, self.FileQueryRtofs)
self.Bind(wx.EVT_MENU, self.on_file_query_sis, self.FileQuerySis)
self.Bind(wx.EVT_MENU, self.on_file_export_cast, self.FileExpCast)
self.Bind(wx.EVT_MENU, self.on_file_export_asvp, self.FileExpAsvp)
self.Bind(wx.EVT_MENU, self.on_file_export_pro, self.FileExpPro)
self.Bind(wx.EVT_MENU, self.on_file_export_vel, self.FileExpVel)
self.Bind(wx.EVT_MENU, self.on_file_export_ixblue, self.FileExpIxblue)
self.Bind(wx.EVT_MENU, self.on_file_export_hips, self.FileExpHips)
self.Bind(wx.EVT_MENU, self.on_file_export_unb, self.FileExpUnb)
self.Bind(wx.EVT_MENU, self.on_file_export_elac, self.FileExpElac)
self.Bind(wx.EVT_MENU, self.on_file_export_csv, self.FileExpCsv)
self.Bind(wx.EVT_MENU, self.on_file_clear, self.FileClear)
self.Bind(wx.EVT_MENU, self.on_file_exit, self.FileExit)
self.Bind(wx.EVT_MENU, self.on_plot_zoom, self.PlotZoom)
self.Bind(wx.EVT_MENU, self.on_plot_flag, self.PlotFlag)
self.Bind(wx.EVT_MENU, self.on_plot_unflag, self.PlotUnflag)
self.Bind(wx.EVT_MENU, self.on_plot_insert, self.PlotInsert)
self.Bind(wx.EVT_MENU, self.on_reset_view, self.ResetView)
self.Bind(wx.EVT_MENU, self.on_view_hide_woa, self.ViewHideWOA)
self.Bind(wx.EVT_MENU, self.on_view_hide_flagged, self.HideFlagged)
self.Bind(wx.EVT_MENU, self.on_view_hide_depth, self.HideDepth)
self.Bind(wx.EVT_MENU, self.on_process_load_salinity, self.ProcessLoadSal)
self.Bind(wx.EVT_MENU, self.on_process_load_temp_and_sal, self.ProcessLoadTempSal)
self.Bind(wx.EVT_MENU, self.on_process_load_surface_ssp, self.ProcessLoadSurfSpeed)
self.Bind(wx.EVT_MENU, self.on_process_extend, self.ProcessExtend)
self.Bind(wx.EVT_MENU, self.on_process_preview_thinning, self.ProcessPreviewThinning)
self.Bind(wx.EVT_MENU, self.on_process_send_profile, self.ProcessSendProfile)
self.Bind(wx.EVT_MENU, self.on_process_store_db, self.ProcessStoreDb)
self.Bind(wx.EVT_MENU, self.on_process_redo_processing, self.ProcessRedoSsp)
self.Bind(wx.EVT_MENU, self.on_process_log_metadata, self.ProcessLogMetadata)
# self.Bind(wx.EVT_MENU, self.on_process_express_mode, self.ProcessExpressMode)
self.Bind(wx.EVT_MENU, self.on_db_query_internal_db, self.DbQueryInternalDb)
self.Bind(wx.EVT_MENU, self.on_db_query_external_db, self.DbQueryExternalDb)
self.Bind(wx.EVT_MENU, self.on_db_delete_internal, self.DbDeleteInternalDb)
self.Bind(wx.EVT_MENU, self.on_db_delete_external, self.DbDeleteExternalDb)
self.Bind(wx.EVT_MENU, self.on_db_export_shp, self.DbExportShp)
self.Bind(wx.EVT_MENU, self.on_db_export_kml, self.DbExportKml)
self.Bind(wx.EVT_MENU, self.on_db_export_csv, self.DbExportCsv)
self.Bind(wx.EVT_MENU, self.on_db_plot_map_ssp, self.DbPlotMapSsp)
self.Bind(wx.EVT_MENU, self.on_db_plot_daily_ssp, self.DbPlotDailySsp)
self.Bind(wx.EVT_MENU, self.on_db_save_daily_ssp, self.DbSaveDailySsp)
self.Bind(wx.EVT_MENU, self.on_tools_refraction_monitor, self.ToolsRefMon)
self.Bind(wx.EVT_MENU, self.on_tools_geo_monitor, self.ToolsGeoMap)
self.Bind(wx.EVT_MENU, self.on_tools_server_start, self.ToolsServerStart)
self.Bind(wx.EVT_MENU, self.on_tools_server_send, self.ToolsServerSend)
self.Bind(wx.EVT_MENU, self.on_tools_server_stop, self.ToolsServerStop)
self.Bind(wx.EVT_MENU, self.on_tools_server_log_metadata, self.ServerLogMetadata)
self.Bind(wx.EVT_MENU, self.on_tools_set_reference_cast, self.ToolsSetReferenceCast)
self.Bind(wx.EVT_MENU, self.on_tools_edit_reference_cast, self.ToolsEditReferenceCast)
self.Bind(wx.EVT_MENU, self.on_tools_clear_reference_cast, self.ToolsClearReferenceCast)
self.Bind(wx.EVT_MENU, self.on_tools_user_inputs, self.ToolsUserInputs)
self.Bind(wx.EVT_MENU, self.on_tools_modify_settings, self.ToolsModifySettings)
self.Bind(wx.EVT_MENU, self.on_tools_view_settings, self.ToolsViewSettings)
self.Bind(wx.EVT_MENU, self.on_tools_reload_settings, self.ToolsReloadSettings)
self.Bind(wx.EVT_MENU, self.on_help_manual, self.HelpManual)
self.Bind(wx.EVT_MENU, self.on_help_about, self.HelpAbout)
def __set_properties(self):
self.SetTitle("SSP Manager")
# self.SetSize((1100, 700))
self.frame_statusbar.SetStatusWidths([-1, 400])
SSPManFrame_statusbar_fields = ["", ""]
for i in range(len(SSPManFrame_statusbar_fields)):
self.frame_statusbar.SetStatusText(SSPManFrame_statusbar_fields[i], i)
def __do_layout(self):
sizer_1 = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(sizer_1)
self.Layout()
def on_file_query_woa09(self, event):
log.info("Event handler 'on_file_query_woa09' not implemented!")
event.Skip()
def on_file_query_rtofs(self, event):
log.info("Event handler 'on_file_query_rtofs' not implemented!")
event.Skip()
def on_file_query_sis(self, event):
log.info("Event handler 'on_file_query_sis' not implemented!")
event.Skip()
def on_process_store_db(self, event):
log.info("Event handler 'on_process_store_db' not implemented!")
event.Skip()
def on_process_log_metadata(self, event):
log.info("Event handler 'on_process_log_metadata' not implemented!")
event.Skip()
def on_file_import_castaway(self, event):
log.info("Event handler 'on_file_import_castaway' not implemented!")
event.Skip()
def on_file_import_digibar_pro(self, event):
log.info("Event handler 'on_file_import_digibar_pro' not implemented!")
event.Skip()
def on_file_import_digibar_s(self, event):
log.info("Event handler 'on_file_import_digibar_s' not implemented!")
event.Skip()
def on_file_import_sippican(self, event):
log.info("Event handler 'on_file_import_sippican' not implemented!")
event.Skip()
def on_file_import_seabird(self, event):
log.info("Event handler 'on_file_import_seabird' not implemented!")
event.Skip()
def on_file_import_turo(self, event):
log.info("Event handler 'on_file_import_turo' not implemented!")
event.Skip()
def on_file_import_unb(self, event):
log.info("Event handler 'on_file_import_unb' not implemented!")
event.Skip()
def on_file_import_valeport_midas(self, event):
log.info("Event handler 'on_file_import_valeport_midas' not implemented!")
event.Skip()
def on_file_import_valeport_monitor(self, event):
log.info("Event handler 'on_file_import_valeport_monitor' not implemented!")
event.Skip()
def on_file_import_valeport_minisvp(self, event):
log.info("Event handler 'on_file_import_valeport_minisvp' not implemented!")
event.Skip()
def on_file_import_idronaut(self, event):
log.info("Event handler 'on_file_import_idronaut' not implemented!")
event.Skip()
def on_file_import_saiv(self, event):
log.info("Event handler 'on_file_import_saiv' not implemented!")
event.Skip()
def on_file_export_cast(self, event):
log.info("Event handler 'on_file_export_cast' not implemented!")
event.Skip()
def on_file_export_asvp(self, event):
log.info("Event handler 'on_file_export_asvp' not implemented!")
event.Skip()
def on_file_export_pro(self, event):
log.info("Event handler 'on_file_export_pro' not implemented!")
event.Skip()
def on_file_export_vel(self, event):
log.info("Event handler 'on_file_export_vel' not implemented!")
event.Skip()
def on_file_export_ixblue(self, event):
log.info("Event handler 'on_file_export_ixblue' not implemented!")
event.Skip()
def on_file_export_hips(self, event):
log.info("Event handler 'on_file_export_hips' not implemented!")
event.Skip()
def on_file_export_unb(self, event):
log.info("Event handler 'on_file_export_unb' not implemented!")
event.Skip()
def on_file_export_elac(self, event):
log.info("Event handler 'on_file_export_elac' not implemented!")
event.Skip()
def on_file_export_csv(self, event):
log.info("Event handler 'on_file_export_csv' not implemented!")
event.Skip()
def on_file_clear(self, event):
log.info("Event handler 'on_file_clear' not implemented!")
event.Skip()
def on_file_exit(self, event):
log.info("Event handler 'on_file_exit' not implemented!")
event.Skip()
def on_plot_zoom(self, event):
log.info("Event handler 'on_plot_zoom' not implemented!")
event.Skip()
def on_plot_flag(self, event):
log.info("Event handler 'on_plot_flag' not implemented!")
event.Skip()
def on_plot_unflag(self, event):
log.info("Event handler 'on_plot_unflag' not implemented!")
event.Skip()
def on_plot_insert(self, event):
log.info("Event handler 'on_plot_insert' not implemented!")
event.Skip()
def on_reset_view(self, event):
log.info("Event handler 'on_reset_view' not implemented!")
event.Skip()
def on_view_hide_woa(self, event):
log.info("Event handler 'on_view_hide_woa' not implemented!")
event.Skip()
def on_view_hide_flagged(self, event):
log.info("Event handler 'on_view_hide_flagged' not implemented!")
event.Skip()
def on_view_hide_depth(self, event):
log.info("Event handler 'on_view_hide_depth' not implemented!")
event.Skip()
def on_tools_refraction_monitor(self, event):
log.info("Event handler 'on_tools_refraction_monitor' not implemented!")
event.Skip()
def on_tools_geo_monitor(self, event):
log.info("Event handler 'on_tools_geo_monitor' not implemented!")
event.Skip()
# def on_process_express_mode(self, event):
# log.info("Event handler `OnToolsExpress' not implemented!")
# event.Skip()
def on_process_load_salinity(self, event):
log.info("Event handler 'on_process_load_salinity' not implemented!")
event.Skip()
def on_process_load_temp_and_sal(self, event):
log.info("Event handler 'on_process_load_temp_and_sal' not implemented!")
event.Skip()
def on_process_load_surface_ssp(self, event):
log.info("Event handler 'on_process_load_surface_ssp' not implemented!")
event.Skip()
def on_process_extend(self, event):
log.info("Event handler 'on_process_extend' not implemented!")
event.Skip()
def on_process_preview_thinning(self, event):
log.info("Event handler 'on_process_preview_thinning' not implemented!")
event.Skip()
def on_process_send_profile(self, event):
log.info("Event handler 'on_process_send_profile' not implemented!")
event.Skip()
def on_process_redo_processing(self, event):
log.info("Event handler 'on_process_redo_processing' not implemented!")
event.Skip()
def on_db_query_internal_db(self, event):
log.info("Event handler 'on_db_query_internal' not implemented!")
event.Skip()
def on_db_query_external_db(self, event):
log.info("Event handler 'on_db_query_external' not implemented!")
event.Skip()
def on_db_delete_internal(self, event):
log.info("Event handler 'on_db_delete_internal' not implemented!")
event.Skip()
def on_db_delete_external(self, event):
log.info("Event handler 'on_db_delete_external' not implemented!")
event.Skip()
def on_db_export_shp(self, event):
log.info("Event handler 'on_db_export_shp' not implemented!")
event.Skip()
def on_db_export_kml(self, event):
log.info("Event handler 'on_db_export_kml' not implemented!")
event.Skip()
def on_db_export_csv(self, event):
log.info("Event handler 'on_db_export_csv' not implemented!")
event.Skip()
def on_db_plot_map_ssp(self, event):
log.info("Event handler 'on_db_plot_map_ssp' not implemented!")
event.Skip()
def on_db_plot_daily_ssp(self, event):
log.info("Event handler 'on_db_plot_daily_ssp' not implemented!")
event.Skip()
def on_db_save_daily_ssp(self, event):
log.info("Event handler 'on_db_save_daily_ssp' not implemented!")
event.Skip()
def on_tools_user_inputs(self, event):
log.info("Event handler 'on_tools_user_inputs' not implemented!")
event.Skip()
def on_tools_set_reference_cast(self, event):
log.info("Event handler 'on_tools_set_reference_cast' not implemented!")
event.Skip()
def on_tools_edit_reference_cast(self, event):
log.info("Event handler 'on_tools_edit_reference_cast' not implemented!")
event.Skip()
def on_tools_clear_reference_cast(self, event):
log.info("Event handler 'on_tools_clear_reference_cast' not implemented!")
event.Skip()
def on_tools_server_start(self, event):
log.info("Event handler 'on_tools_server_start' not implemented!")
event.Skip()
def on_tools_server_send(self, event):
log.info("Event handler 'on_tools_server_send' not implemented!")
event.Skip()
def on_tools_server_stop(self, event):
log.info("Event handler 'on_tools_server_stop' not implemented!")
event.Skip()
def on_tools_server_log_metadata(self, event):
log.info("Event handler 'on_tools_server_log_metadata' not implemented!")
event.Skip()
def on_tools_modify_settings(self, event):
log.info("Event handler 'on_tools_modify_settings' not implemented!")
event.Skip()
def on_tools_view_settings(self, event):
log.info("Event handler 'on_tools_view_settings' not implemented!")
event.Skip()
def on_tools_reload_settings(self, event):
log.info("Event handler 'on_tools_reload_settings' not implemented!")
event.Skip()
def on_help_manual(self, event):
log.info("Event handler 'on_help_manual' not implemented!")
event.Skip()
def on_help_about(self, event):
log.info("Event handler 'on_help_about' not implemented!")
event.Skip()
|
[
"wx.Menu",
"wx.BoxSizer",
"wx.MenuItem",
"wx.Frame.__init__",
"wx.NewId",
"logging.getLogger",
"wx.MenuBar"
] |
[((117, 144), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (134, 144), False, 'import logging\n'), ((158, 168), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (166, 168), False, 'import wx\n'), ((181, 191), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (189, 191), False, 'import wx\n'), ((208, 218), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (216, 218), False, 'import wx\n'), ((231, 241), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (239, 241), False, 'import wx\n'), ((252, 262), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (260, 262), False, 'import wx\n'), ((277, 287), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (285, 287), False, 'import wx\n'), ((301, 311), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (309, 311), False, 'import wx\n'), ((324, 334), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (332, 334), False, 'import wx\n'), ((352, 362), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (360, 362), False, 'import wx\n'), ((386, 396), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (394, 396), False, 'import wx\n'), ((417, 427), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (425, 427), False, 'import wx\n'), ((453, 463), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (461, 463), False, 'import wx\n'), ((488, 498), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (496, 498), False, 'import wx\n'), ((524, 534), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (532, 534), False, 'import wx\n'), ((562, 572), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (570, 572), False, 'import wx\n'), ((598, 608), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (606, 608), False, 'import wx\n'), ((636, 646), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (644, 646), False, 'import wx\n'), ((668, 678), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (676, 678), False, 'import wx\n'), ((703, 713), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (711, 713), False, 'import wx\n'), ((739, 749), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (747, 749), False, 'import wx\n'), ((775, 785), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (783, 785), False, 'import wx\n'), ((811, 821), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (819, 821), False, 'import wx\n'), ((843, 853), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (851, 853), False, 'import wx\n'), ((872, 882), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (880, 882), False, 'import wx\n'), ((905, 915), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (913, 915), False, 'import wx\n'), ((940, 950), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (948, 950), False, 'import wx\n'), ((973, 983), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (981, 983), False, 'import wx\n'), ((1003, 1013), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1011, 1013), False, 'import wx\n'), ((1038, 1048), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1046, 1048), False, 'import wx\n'), ((1072, 1082), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1080, 1082), False, 'import wx\n'), ((1107, 1117), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1115, 1117), False, 'import wx\n'), ((1141, 1151), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1149, 1151), False, 'import wx\n'), ((1178, 1188), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1186, 1188), False, 'import wx\n'), ((1212, 1222), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1220, 1222), False, 'import wx\n'), ((1247, 1257), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1255, 1257), False, 'import wx\n'), ((1281, 1291), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1289, 1291), False, 'import wx\n'), ((1316, 1326), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1324, 1326), False, 'import wx\n'), ((1345, 1355), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1353, 1355), False, 'import wx\n'), ((1373, 1383), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1381, 1383), False, 'import wx\n'), ((1406, 1416), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1414, 1416), False, 'import wx\n'), ((1438, 1448), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1446, 1448), False, 'import wx\n'), ((1472, 1482), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1480, 1482), False, 'import wx\n'), ((1506, 1516), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1514, 1516), False, 'import wx\n'), ((1536, 1546), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1544, 1546), False, 'import wx\n'), ((1568, 1578), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1576, 1578), False, 'import wx\n'), ((1604, 1614), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1612, 1614), False, 'import wx\n'), ((1638, 1648), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1646, 1648), False, 'import wx\n'), ((1671, 1681), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1679, 1681), False, 'import wx\n'), ((1708, 1718), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1716, 1718), False, 'import wx\n'), ((1743, 1753), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1751, 1753), False, 'import wx\n'), ((1778, 1788), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1786, 1788), False, 'import wx\n'), ((1812, 1822), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1820, 1822), False, 'import wx\n'), ((1852, 1862), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1860, 1862), False, 'import wx\n'), ((1888, 1898), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1896, 1898), False, 'import wx\n'), ((1921, 1931), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1929, 1931), False, 'import wx\n'), ((1953, 1963), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1961, 1963), False, 'import wx\n'), ((1989, 1999), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (1997, 1999), False, 'import wx\n'), ((2050, 2060), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2058, 2060), False, 'import wx\n'), ((2089, 2099), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2097, 2099), False, 'import wx\n'), ((2128, 2138), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2136, 2138), False, 'import wx\n'), ((2156, 2166), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2164, 2166), False, 'import wx\n'), ((2196, 2206), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2204, 2206), False, 'import wx\n'), ((2236, 2246), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2244, 2246), False, 'import wx\n'), ((2264, 2274), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2272, 2274), False, 'import wx\n'), ((2296, 2306), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2304, 2306), False, 'import wx\n'), ((2328, 2338), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2336, 2338), False, 'import wx\n'), ((2360, 2370), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2368, 2370), False, 'import wx\n'), ((2386, 2396), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2394, 2396), False, 'import wx\n'), ((2420, 2430), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2428, 2430), False, 'import wx\n'), ((2456, 2466), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2464, 2466), False, 'import wx\n'), ((2492, 2502), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2500, 2502), False, 'import wx\n'), ((2524, 2534), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2532, 2534), False, 'import wx\n'), ((2567, 2577), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2575, 2577), False, 'import wx\n'), ((2612, 2622), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2620, 2622), False, 'import wx\n'), ((2656, 2666), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2664, 2666), False, 'import wx\n'), ((2690, 2700), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2698, 2700), False, 'import wx\n'), ((2730, 2740), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2738, 2740), False, 'import wx\n'), ((2768, 2778), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2776, 2778), False, 'import wx\n'), ((2808, 2818), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2816, 2818), False, 'import wx\n'), ((2844, 2854), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2852, 2854), False, 'import wx\n'), ((2876, 2886), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2884, 2886), False, 'import wx\n'), ((2912, 2922), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2920, 2922), False, 'import wx\n'), ((2944, 2954), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2952, 2954), False, 'import wx\n'), ((2974, 2984), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (2982, 2984), False, 'import wx\n'), ((3004, 3014), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (3012, 3014), False, 'import wx\n'), ((3042, 3052), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (3050, 3052), False, 'import wx\n'), ((3072, 3082), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (3080, 3082), False, 'import wx\n'), ((3101, 3111), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (3109, 3111), False, 'import wx\n'), ((6233, 6271), 'wx.Frame.__init__', 'wx.Frame.__init__', (['self', '*args'], {}), '(self, *args, **kwds)\n', (6250, 6271), False, 'import wx\n'), ((6330, 6342), 'wx.MenuBar', 'wx.MenuBar', ([], {}), '()\n', (6340, 6342), False, 'import wx\n'), ((6391, 6400), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (6398, 6400), False, 'import wx\n'), ((6442, 6451), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (6449, 6451), False, 'import wx\n'), ((6483, 6592), 'wx.MenuItem', 'wx.MenuItem', (['FileImp', 'MENU_FILE_IMP_CASTAWAY', '"""Castaway (.csv)"""', '"""Import a Castaway cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImp, MENU_FILE_IMP_CASTAWAY, 'Castaway (.csv)',\n 'Import a Castaway cast', wx.ITEM_NORMAL)\n", (6494, 6592), False, 'import wx\n'), ((6703, 6712), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (6710, 6712), False, 'import wx\n'), ((6746, 6865), 'wx.MenuItem', 'wx.MenuItem', (['FileImpDigi', 'MENU_FILE_IMP_DIGI_PRO', '"""Digibar Pro (.txt)"""', '"""Import a Digibar Pro cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImpDigi, MENU_FILE_IMP_DIGI_PRO, 'Digibar Pro (.txt)',\n 'Import a Digibar Pro cast', wx.ITEM_NORMAL)\n", (6757, 6865), False, 'import wx\n'), ((6993, 7106), 'wx.MenuItem', 'wx.MenuItem', (['FileImpDigi', 'MENU_FILE_IMP_DIGI_S', '"""Digibar S (.csv)"""', '"""Import a Digibar S cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImpDigi, MENU_FILE_IMP_DIGI_S, 'Digibar S (.csv)',\n 'Import a Digibar S cast', wx.ITEM_NORMAL)\n", (7004, 7106), False, 'import wx\n'), ((7330, 7441), 'wx.MenuItem', 'wx.MenuItem', (['FileImp', 'MENU_FILE_IMP_IDRONAUT', '"""Idronaut (*.txt)"""', '"""Import an Idronaut cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImp, MENU_FILE_IMP_IDRONAUT, 'Idronaut (*.txt)',\n 'Import an Idronaut cast', wx.ITEM_NORMAL)\n", (7341, 7441), False, 'import wx\n'), ((7557, 7655), 'wx.MenuItem', 'wx.MenuItem', (['FileImp', 'MENU_FILE_IMP_SAIV', '"""Saiv (*.txt)"""', '"""Import a Saiv cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImp, MENU_FILE_IMP_SAIV, 'Saiv (*.txt)',\n 'Import a Saiv cast', wx.ITEM_NORMAL)\n", (7568, 7655), False, 'import wx\n'), ((7766, 7872), 'wx.MenuItem', 'wx.MenuItem', (['FileImp', 'MENU_FILE_IMP_SEABIRD', '"""Seabird (.cnv)"""', '"""Import a Seabird cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImp, MENU_FILE_IMP_SEABIRD, 'Seabird (.cnv)',\n 'Import a Seabird cast', wx.ITEM_NORMAL)\n", (7777, 7872), False, 'import wx\n'), ((7990, 8099), 'wx.MenuItem', 'wx.MenuItem', (['FileImp', 'MENU_FILE_IMP_SIPPICAN', '"""Sippican (.edf)"""', '"""Import a Sippican cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImp, MENU_FILE_IMP_SIPPICAN, 'Sippican (.edf)',\n 'Import a Sippican cast', wx.ITEM_NORMAL)\n", (8001, 8099), False, 'import wx\n'), ((8215, 8311), 'wx.MenuItem', 'wx.MenuItem', (['FileImp', 'MENU_FILE_IMP_TURO', '"""Turo (.nc)"""', '"""Import a Turo cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImp, MENU_FILE_IMP_TURO, 'Turo (.nc)', 'Import a Turo cast',\n wx.ITEM_NORMAL)\n", (8226, 8311), False, 'import wx\n'), ((8418, 8512), 'wx.MenuItem', 'wx.MenuItem', (['FileImp', 'MENU_FILE_IMP_UNB', '"""UNB (.unb)"""', '"""Import a UNB cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImp, MENU_FILE_IMP_UNB, 'UNB (.unb)', 'Import a UNB cast',\n wx.ITEM_NORMAL)\n", (8429, 8512), False, 'import wx\n'), ((8613, 8622), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (8620, 8622), False, 'import wx\n'), ((8655, 8773), 'wx.MenuItem', 'wx.MenuItem', (['FileImpVale', 'MENU_FILE_IMP_VALE_MIDAS', '"""Midas (.000)"""', '"""Import a Valeport Midas cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImpVale, MENU_FILE_IMP_VALE_MIDAS, 'Midas (.000)',\n 'Import a Valeport Midas cast', wx.ITEM_NORMAL)\n", (8666, 8773), False, 'import wx\n'), ((8902, 9022), 'wx.MenuItem', 'wx.MenuItem', (['FileImpVale', 'MENU_FILE_IMP_VALE_MON', '"""Monitor (.000)"""', '"""Import a Valeport Monitor cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImpVale, MENU_FILE_IMP_VALE_MON, 'Monitor (.000)',\n 'Import a Valeport Monitor cast', wx.ITEM_NORMAL)\n", (8913, 9022), False, 'import wx\n'), ((9153, 9275), 'wx.MenuItem', 'wx.MenuItem', (['FileImpVale', 'MENU_FILE_IMP_VALE_MINIS', '"""MiniSVP (.txt)"""', '"""Import a Valeport MiniSVP cast"""', 'wx.ITEM_NORMAL'], {}), "(FileImpVale, MENU_FILE_IMP_VALE_MINIS, 'MiniSVP (.txt)',\n 'Import a Valeport MiniSVP cast', wx.ITEM_NORMAL)\n", (9164, 9275), False, 'import wx\n'), ((9609, 9618), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (9616, 9618), False, 'import wx\n'), ((9647, 9766), 'wx.MenuItem', 'wx.MenuItem', (['FileQuery', 'MENU_FILE_QUERY_SIS', '"""Kongsberg SIS"""', '"""Retrieve the SSP cast in use by SIS"""', 'wx.ITEM_NORMAL'], {}), "(FileQuery, MENU_FILE_QUERY_SIS, 'Kongsberg SIS',\n 'Retrieve the SSP cast in use by SIS', wx.ITEM_NORMAL)\n", (9658, 9766), False, 'import wx\n'), ((9881, 10001), 'wx.MenuItem', 'wx.MenuItem', (['FileQuery', 'MENU_FILE_QUERY_RTOFS', '"""RTOFS atlas"""', '"""Retrieve a predicted RTOFS-based SSP"""', 'wx.ITEM_NORMAL'], {}), "(FileQuery, MENU_FILE_QUERY_RTOFS, 'RTOFS atlas',\n 'Retrieve a predicted RTOFS-based SSP', wx.ITEM_NORMAL)\n", (9892, 10001), False, 'import wx\n'), ((10118, 10251), 'wx.MenuItem', 'wx.MenuItem', (['FileQuery', 'MENU_FILE_QUERY_WOA', '"""WOA09 atlas"""', '"""Retrieve statistical info about the SSP in the area"""', 'wx.ITEM_NORMAL'], {}), "(FileQuery, MENU_FILE_QUERY_WOA, 'WOA09 atlas',\n 'Retrieve statistical info about the SSP in the area', wx.ITEM_NORMAL)\n", (10129, 10251), False, 'import wx\n'), ((10530, 10539), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (10537, 10539), False, 'import wx\n'), ((10567, 10707), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_CAST', '"""Export selected formats"""', '"""Export the current SSP in the selected formats"""', 'wx.ITEM_NORMAL'], {}), "(FileExp, MENU_FILE_EXPORT_CAST, 'Export selected formats',\n 'Export the current SSP in the selected formats', wx.ITEM_NORMAL)\n", (10578, 10707), False, 'import wx\n'), ((10849, 10979), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_HIPS', '"""Caris HIPS (.svp)"""', '"""Export the current SSP as Caris HIPS format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_HIPS, 'Caris HIPS (.svp)',\n 'Export the current SSP as Caris HIPS format', wx.ITEM_CHECK)\n", (10860, 10979), False, 'import wx\n'), ((11086, 11225), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_CSV', '"""Comma-separated (.csv)"""', '"""Export the current SSP as comma-separated format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_CSV, 'Comma-separated (.csv)',\n 'Export the current SSP as comma-separated format', wx.ITEM_CHECK)\n", (11097, 11225), False, 'import wx\n'), ((11331, 11449), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_ELAC', '"""Elac (.sva)"""', '"""Export the current SSP as Elac format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_ELAC, 'Elac (.sva)',\n 'Export the current SSP as Elac format', wx.ITEM_CHECK)\n", (11342, 11449), False, 'import wx\n'), ((11556, 11677), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_VEL', '"""Hypack (.vel)"""', '"""Export the current SSP as Hypack format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_VEL, 'Hypack (.vel)',\n 'Export the current SSP as Hypack format', wx.ITEM_CHECK)\n", (11567, 11677), False, 'import wx\n'), ((11785, 11909), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_IXBLUE', '"""IXBLUE (.txt)"""', '"""Export the current SSP as IXBLUE format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_IXBLUE, 'IXBLUE (.txt)',\n 'Export the current SSP as IXBLUE format', wx.ITEM_CHECK)\n", (11796, 11909), False, 'import wx\n'), ((12021, 12150), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_ASVP', '"""Kongsberg (.asvp)"""', '"""Export the current SSP as Kongsberg format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_ASVP, 'Kongsberg (.asvp)',\n 'Export the current SSP as Kongsberg format', wx.ITEM_CHECK)\n", (12032, 12150), False, 'import wx\n'), ((12257, 12384), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_PRO', '"""Sonardyne (.pro)"""', '"""Export the current SSP as Sonardyne format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_PRO, 'Sonardyne (.pro)',\n 'Export the current SSP as Sonardyne format', wx.ITEM_CHECK)\n", (12268, 12384), False, 'import wx\n'), ((12489, 12604), 'wx.MenuItem', 'wx.MenuItem', (['FileExp', 'MENU_FILE_EXPORT_UNB', '"""UNB (.unb)"""', '"""Export the current SSP as UNB format"""', 'wx.ITEM_CHECK'], {}), "(FileExp, MENU_FILE_EXPORT_UNB, 'UNB (.unb)',\n 'Export the current SSP as UNB format', wx.ITEM_CHECK)\n", (12500, 12604), False, 'import wx\n'), ((12842, 12939), 'wx.MenuItem', 'wx.MenuItem', (['self.FileMenu', 'MENU_FILE_CLEAR', '"""Clear"""', '"""Clear the loaded cast"""', 'wx.ITEM_NORMAL'], {}), "(self.FileMenu, MENU_FILE_CLEAR, 'Clear',\n 'Clear the loaded cast', wx.ITEM_NORMAL)\n", (12853, 12939), False, 'import wx\n'), ((13087, 13178), 'wx.MenuItem', 'wx.MenuItem', (['self.FileMenu', 'MENU_FILE_EXIT', '"""Exit"""', '"""Quit SSP Manager"""', 'wx.ITEM_NORMAL'], {}), "(self.FileMenu, MENU_FILE_EXIT, 'Exit', 'Quit SSP Manager', wx.\n ITEM_NORMAL)\n", (13098, 13178), False, 'import wx\n'), ((13372, 13381), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (13379, 13381), False, 'import wx\n'), ((13407, 13512), 'wx.MenuItem', 'wx.MenuItem', (['self.ViewMenu', 'MENU_VIEW_RESET', '"""Reset plot view"""', '"""Reset the plot view"""', 'wx.ITEM_NORMAL'], {}), "(self.ViewMenu, MENU_VIEW_RESET, 'Reset plot view',\n 'Reset the plot view', wx.ITEM_NORMAL)\n", (13418, 13512), False, 'import wx\n'), ((13662, 13782), 'wx.MenuItem', 'wx.MenuItem', (['self.ViewMenu', 'MENU_VIEW_HIDE_WOA', '"""Hide WOA info"""', '"""Hide the visualization of WOA info"""', 'wx.ITEM_CHECK'], {}), "(self.ViewMenu, MENU_VIEW_HIDE_WOA, 'Hide WOA info',\n 'Hide the visualization of WOA info', wx.ITEM_CHECK)\n", (13673, 13782), False, 'import wx\n'), ((13896, 14015), 'wx.MenuItem', 'wx.MenuItem', (['self.ViewMenu', 'MENU_VIEW_HIDE_FLAGGED', '"""Hide flagged data"""', '"""Hide all the flagged data"""', 'wx.ITEM_CHECK'], {}), "(self.ViewMenu, MENU_VIEW_HIDE_FLAGGED, 'Hide flagged data',\n 'Hide all the flagged data', wx.ITEM_CHECK)\n", (13907, 14015), False, 'import wx\n'), ((14127, 14252), 'wx.MenuItem', 'wx.MenuItem', (['self.ViewMenu', 'MENU_VIEW_HIDE_DEPTH', '"""Hide depth"""', '"""Hide the depth visualization on the plot"""', 'wx.ITEM_CHECK'], {}), "(self.ViewMenu, MENU_VIEW_HIDE_DEPTH, 'Hide depth',\n 'Hide the depth visualization on the plot', wx.ITEM_CHECK)\n", (14138, 14252), False, 'import wx\n'), ((14455, 14464), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (14462, 14464), False, 'import wx\n'), ((14495, 14629), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_LOAD_SAL', '"""Load salinity"""', '"""Load salinity from reference cast [XBT only]"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_LOAD_SAL, 'Load salinity',\n 'Load salinity from reference cast [XBT only]', wx.ITEM_NORMAL)\n", (14506, 14629), False, 'import wx\n'), ((14759, 14942), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_LOAD_TEMP_SAL', '"""Load temperature/salinity"""', '"""Load temperature and salinity from reference cast [SVP and XBT only]"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_LOAD_TEMP_SAL,\n 'Load temperature/salinity',\n 'Load temperature and salinity from reference cast [SVP and XBT only]',\n wx.ITEM_NORMAL)\n", (14770, 14942), False, 'import wx\n'), ((15166, 15315), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_LOAD_SURFSP', '"""Get surface sound speed"""', '"""Get the surface sound speed value from SIS"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_LOAD_SURFSP,\n 'Get surface sound speed', 'Get the surface sound speed value from SIS',\n wx.ITEM_NORMAL)\n", (15177, 15315), False, 'import wx\n'), ((15491, 15622), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_EXTEND_CAST', '"""Extend cast"""', '"""Extend the cast using the reference cast"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_EXTEND_CAST, 'Extend cast',\n 'Extend the cast using the reference cast', wx.ITEM_NORMAL)\n", (15502, 15622), False, 'import wx\n'), ((15749, 15758), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (15756, 15758), False, 'import wx\n'), ((15783, 15900), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessInspection', 'MENU_PROC_INS_ZOOM', '"""Zoom"""', '"""Zoom on plot by mouse selection"""', 'wx.ITEM_RADIO'], {}), "(self.ProcessInspection, MENU_PROC_INS_ZOOM, 'Zoom',\n 'Zoom on plot by mouse selection', wx.ITEM_RADIO)\n", (15794, 15900), False, 'import wx\n'), ((16014, 16139), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessInspection', 'MENU_PROC_INS_FLAG', '"""Flag"""', '"""Flag samples on plot by mouse selection"""', 'wx.ITEM_RADIO'], {}), "(self.ProcessInspection, MENU_PROC_INS_FLAG, 'Flag',\n 'Flag samples on plot by mouse selection', wx.ITEM_RADIO)\n", (16025, 16139), False, 'import wx\n'), ((16255, 16386), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessInspection', 'MENU_PROC_INS_UNFLAG', '"""Unflag"""', '"""Unflag samples on plot by mouse selection"""', 'wx.ITEM_RADIO'], {}), "(self.ProcessInspection, MENU_PROC_INS_UNFLAG, 'Unflag',\n 'Unflag samples on plot by mouse selection', wx.ITEM_RADIO)\n", (16266, 16386), False, 'import wx\n'), ((16506, 16629), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessInspection', 'MENU_PROC_INS_INSERT', '"""Insert"""', '"""Insert a sample by mouse clicking"""', 'wx.ITEM_RADIO'], {}), "(self.ProcessInspection, MENU_PROC_INS_INSERT, 'Insert',\n 'Insert a sample by mouse clicking', wx.ITEM_RADIO)\n", (16517, 16629), False, 'import wx\n'), ((16989, 17144), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_PREVIEW_THINNING', '"""Preview thinning"""', '"""Preview the thinning required by some client types"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_PREVIEW_THINNING,\n 'Preview thinning',\n 'Preview the thinning required by some client types', wx.ITEM_NORMAL)\n", (17000, 17144), False, 'import wx\n'), ((17286, 17410), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_SEND_PROFILE', '"""Send SSP"""', '"""Send the current SSP to the clients"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_SEND_PROFILE, 'Send SSP',\n 'Send the current SSP to the clients', wx.ITEM_NORMAL)\n", (17297, 17410), False, 'import wx\n'), ((17587, 17708), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_STORE_SSP', '"""Store SSP"""', '"""Locally store the current SSP data"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_STORE_SSP, 'Store SSP',\n 'Locally store the current SSP data', wx.ITEM_NORMAL)\n", (17598, 17708), False, 'import wx\n'), ((17834, 17978), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_REDO_SSP', '"""Redo processing"""', '"""Redo the processing by reloading the stored raw data"""', 'wx.ITEM_NORMAL'], {}), "(self.ProcessMenu, MENU_PROC_REDO_SSP, 'Redo processing',\n 'Redo the processing by reloading the stored raw data', wx.ITEM_NORMAL)\n", (17845, 17978), False, 'import wx\n'), ((18108, 18258), 'wx.MenuItem', 'wx.MenuItem', (['self.ProcessMenu', 'MENU_PROC_LOG_METADATA', '"""Log processing metadata"""', '"""Store the processing metadata in the log DB"""', 'wx.ITEM_CHECK'], {}), "(self.ProcessMenu, MENU_PROC_LOG_METADATA,\n 'Log processing metadata',\n 'Store the processing metadata in the log DB', wx.ITEM_CHECK)\n", (18119, 18258), False, 'import wx\n'), ((18796, 18805), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (18803, 18805), False, 'import wx\n'), ((18840, 18849), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (18847, 18849), False, 'import wx\n'), ((18883, 19000), 'wx.MenuItem', 'wx.MenuItem', (['DbQuery', 'MENU_DB_QUERY_INTERNAL_DB', '"""Internal DB"""', '"""Retrieve the locally stored SSP"""', 'wx.ITEM_NORMAL'], {}), "(DbQuery, MENU_DB_QUERY_INTERNAL_DB, 'Internal DB',\n 'Retrieve the locally stored SSP', wx.ITEM_NORMAL)\n", (18894, 19000), False, 'import wx\n'), ((19126, 19250), 'wx.MenuItem', 'wx.MenuItem', (['DbQuery', 'MENU_DB_QUERY_EXTERNAL_DB', '"""External DB"""', '"""Retrieve a SSP stored in the select DB"""', 'wx.ITEM_NORMAL'], {}), "(DbQuery, MENU_DB_QUERY_EXTERNAL_DB, 'External DB',\n 'Retrieve a SSP stored in the select DB', wx.ITEM_NORMAL)\n", (19137, 19250), False, 'import wx\n'), ((19518, 19527), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (19525, 19527), False, 'import wx\n'), ((19562, 19677), 'wx.MenuItem', 'wx.MenuItem', (['DbDelete', 'MENU_DB_DELETE_INTERNAL_DB', '"""Internal DB"""', '"""Delete a locally stored SSP"""', 'wx.ITEM_NORMAL'], {}), "(DbDelete, MENU_DB_DELETE_INTERNAL_DB, 'Internal DB',\n 'Delete a locally stored SSP', wx.ITEM_NORMAL)\n", (19573, 19677), False, 'import wx\n'), ((19807, 19931), 'wx.MenuItem', 'wx.MenuItem', (['DbDelete', 'MENU_DB_DELETE_EXTERNAL_DB', '"""External DB"""', '"""Delete a SSP stored in the select DB"""', 'wx.ITEM_NORMAL'], {}), "(DbDelete, MENU_DB_DELETE_EXTERNAL_DB, 'External DB',\n 'Delete a SSP stored in the select DB', wx.ITEM_NORMAL)\n", (19818, 19931), False, 'import wx\n'), ((20142, 20151), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (20149, 20151), False, 'import wx\n'), ((20179, 20298), 'wx.MenuItem', 'wx.MenuItem', (['DbExport', 'MENU_DB_EXPORT_SHP', '"""Shapefile"""', '"""Export all the stored SSPs as a Shapefile"""', 'wx.ITEM_NORMAL'], {}), "(DbExport, MENU_DB_EXPORT_SHP, 'Shapefile',\n 'Export all the stored SSPs as a Shapefile', wx.ITEM_NORMAL)\n", (20190, 20298), False, 'import wx\n'), ((20407, 20519), 'wx.MenuItem', 'wx.MenuItem', (['DbExport', 'MENU_DB_EXPORT_KML', '"""KML"""', '"""Export all the stored SSPs as a KML file"""', 'wx.ITEM_NORMAL'], {}), "(DbExport, MENU_DB_EXPORT_KML, 'KML',\n 'Export all the stored SSPs as a KML file', wx.ITEM_NORMAL)\n", (20418, 20519), False, 'import wx\n'), ((20628, 20752), 'wx.MenuItem', 'wx.MenuItem', (['DbExport', 'MENU_DB_EXPORT_CSV', '"""CSV"""', '"""Export all the stored SSPs as a Comma-Separated file"""', 'wx.ITEM_NORMAL'], {}), "(DbExport, MENU_DB_EXPORT_CSV, 'CSV',\n 'Export all the stored SSPs as a Comma-Separated file', wx.ITEM_NORMAL)\n", (20639, 20752), False, 'import wx\n'), ((20941, 20950), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (20948, 20950), False, 'import wx\n'), ((20979, 21097), 'wx.MenuItem', 'wx.MenuItem', (['DbPlot', 'MENU_DB_PLOT_MAP_SSP', '"""Map all SSPs"""', '"""Create a map with all the stored SSPs"""', 'wx.ITEM_NORMAL'], {}), "(DbPlot, MENU_DB_PLOT_MAP_SSP, 'Map all SSPs',\n 'Create a map with all the stored SSPs', wx.ITEM_NORMAL)\n", (20990, 21097), False, 'import wx\n'), ((21209, 21327), 'wx.MenuItem', 'wx.MenuItem', (['DbPlot', 'MENU_DB_PLOT_DAILY_SSP', '"""Create daily plot"""', '"""Create a SSP plot for each day"""', 'wx.ITEM_NORMAL'], {}), "(DbPlot, MENU_DB_PLOT_DAILY_SSP, 'Create daily plot',\n 'Create a SSP plot for each day', wx.ITEM_NORMAL)\n", (21220, 21327), False, 'import wx\n'), ((21443, 21557), 'wx.MenuItem', 'wx.MenuItem', (['DbPlot', 'MENU_DB_SAVE_DAILY_SSP', '"""Save daily plot"""', '"""Save a SSP plot for each day"""', 'wx.ITEM_NORMAL'], {}), "(DbPlot, MENU_DB_SAVE_DAILY_SSP, 'Save daily plot',\n 'Save a SSP plot for each day', wx.ITEM_NORMAL)\n", (21454, 21557), False, 'import wx\n'), ((21827, 21836), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (21834, 21836), False, 'import wx\n'), ((21858, 21867), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (21865, 21867), False, 'import wx\n'), ((21900, 22003), 'wx.MenuItem', 'wx.MenuItem', (['ServerMenu', 'MENU_SERVER_START', '"""Start server"""', '"""Start SIS server mode"""', 'wx.ITEM_NORMAL'], {}), "(ServerMenu, MENU_SERVER_START, 'Start server',\n 'Start SIS server mode', wx.ITEM_NORMAL)\n", (21911, 22003), False, 'import wx\n'), ((22128, 22226), 'wx.MenuItem', 'wx.MenuItem', (['ServerMenu', 'MENU_SERVER_SEND', '"""Force send"""', '"""Force to send a SSP"""', 'wx.ITEM_NORMAL'], {}), "(ServerMenu, MENU_SERVER_SEND, 'Force send',\n 'Force to send a SSP', wx.ITEM_NORMAL)\n", (22139, 22226), False, 'import wx\n'), ((22349, 22449), 'wx.MenuItem', 'wx.MenuItem', (['ServerMenu', 'MENU_SERVER_STOP', '"""Stop server"""', '"""Stop SIS server mode"""', 'wx.ITEM_NORMAL'], {}), "(ServerMenu, MENU_SERVER_STOP, 'Stop server',\n 'Stop SIS server mode', wx.ITEM_NORMAL)\n", (22360, 22449), False, 'import wx\n'), ((22611, 22745), 'wx.MenuItem', 'wx.MenuItem', (['ServerMenu', 'MENU_SERVER_LOG_METADATA', '"""Log server metadata"""', '"""Store the server metadata in the log DB"""', 'wx.ITEM_CHECK'], {}), "(ServerMenu, MENU_SERVER_LOG_METADATA, 'Log server metadata',\n 'Store the server metadata in the log DB', wx.ITEM_CHECK)\n", (22622, 22745), False, 'import wx\n'), ((22989, 23109), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_REF_MON', '"""Refraction Monitor"""', '"""Open the refraction monitor"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_REF_MON, 'Refraction Monitor',\n 'Open the refraction monitor', wx.ITEM_NORMAL)\n", (23000, 23109), False, 'import wx\n'), ((23224, 23334), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_GEO_MONITOR', '"""Geo Monitor"""', '"""Open the Geo Monitor"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_GEO_MONITOR, 'Geo Monitor',\n 'Open the Geo Monitor', wx.ITEM_NORMAL)\n", (23235, 23334), False, 'import wx\n'), ((23488, 23497), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (23495, 23497), False, 'import wx\n'), ((23535, 23684), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_SET_REFERENCE_CAST', '"""Set as reference cast"""', '"""Set the current SSP as reference cast"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_SET_REFERENCE_CAST,\n 'Set as reference cast', 'Set the current SSP as reference cast', wx.\n ITEM_NORMAL)\n", (23546, 23684), False, 'import wx\n'), ((23873, 24019), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_EDIT_REFERENCE_CAST', '"""Edit the reference cast"""', '"""Edit the current reference cast"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_EDIT_REFERENCE_CAST,\n 'Edit the reference cast', 'Edit the current reference cast', wx.\n ITEM_NORMAL)\n", (23884, 24019), False, 'import wx\n'), ((24212, 24361), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_CLEAR_REFERENCE_CAST', '"""Clear the reference cast"""', '"""Clear the current reference cast"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_CLEAR_REFERENCE_CAST,\n 'Clear the reference cast', 'Clear the current reference cast', wx.\n ITEM_NORMAL)\n", (24223, 24361), False, 'import wx\n'), ((24747, 24882), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_USER_INPUTS', '"""Monitor user inputs"""', '"""Provide information about user inputs"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_USER_INPUTS, 'Monitor user inputs',\n 'Provide information about user inputs', wx.ITEM_NORMAL)\n", (24758, 24882), False, 'import wx\n'), ((25013, 25147), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_MODIFY_SETTINGS', '"""Modify SSP settings"""', '"""Open tool to modify SSP settings"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_MODIFY_SETTINGS,\n 'Modify SSP settings', 'Open tool to modify SSP settings', wx.ITEM_NORMAL)\n", (25024, 25147), False, 'import wx\n'), ((25282, 25409), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_VIEW_SETTINGS', '"""View SSP settings"""', '"""Show SSP settings information"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_VIEW_SETTINGS, 'View SSP settings',\n 'Show SSP settings information', wx.ITEM_NORMAL)\n", (25293, 25409), False, 'import wx\n'), ((25544, 25677), 'wx.MenuItem', 'wx.MenuItem', (['self.ToolsMenu', 'MENU_TOOLS_RELOAD_SETTINGS', '"""Reload SSP settings"""', '"""Reload SSP settings information"""', 'wx.ITEM_NORMAL'], {}), "(self.ToolsMenu, MENU_TOOLS_RELOAD_SETTINGS,\n 'Reload SSP settings', 'Reload SSP settings information', wx.ITEM_NORMAL)\n", (25555, 25677), False, 'import wx\n'), ((25874, 25883), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (25881, 25883), False, 'import wx\n'), ((25910, 26003), 'wx.MenuItem', 'wx.MenuItem', (['self.HelpMenu', 'MENU_HELP_MANUAL', '"""Manual"""', '"""Open the manual"""', 'wx.ITEM_NORMAL'], {}), "(self.HelpMenu, MENU_HELP_MANUAL, 'Manual', 'Open the manual',\n wx.ITEM_NORMAL)\n", (25921, 26003), False, 'import wx\n'), ((26153, 26255), 'wx.MenuItem', 'wx.MenuItem', (['self.HelpMenu', 'MENU_HELP_ABOUT', '"""About"""', '"""Info about the application"""', 'wx.ITEM_NORMAL'], {}), "(self.HelpMenu, MENU_HELP_ABOUT, 'About',\n 'Info about the application', wx.ITEM_NORMAL)\n", (26164, 26255), False, 'import wx\n'), ((32463, 32487), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (32474, 32487), False, 'import wx\n')]
|
from torch.utils.data import DataLoader
from torch.utils.data.distributed import DistributedSampler
from .s3dis import S3DISDataset
from .scannetv2 import ScanNetDataset
__all__ = ['S3DISDataset', 'ScanNetDataset', 'build_dataset']
def build_dataset(data_cfg, logger):
assert 'type' in data_cfg
_data_cfg = data_cfg.copy()
_data_cfg['logger'] = logger
data_type = _data_cfg.pop('type')
if data_type == 's3dis':
return S3DISDataset(**_data_cfg)
elif data_type == 'scannetv2':
return ScanNetDataset(**_data_cfg)
else:
raise ValueError(f'Unknown {data_type}')
def build_dataloader(dataset, batch_size=1, num_workers=1, training=True, dist=False):
shuffle = training
sampler = DistributedSampler(dataset, shuffle=shuffle) if dist else None
if sampler is not None:
shuffle = False
if training:
return DataLoader(
dataset,
batch_size=batch_size,
num_workers=num_workers,
collate_fn=dataset.collate_fn,
shuffle=shuffle,
sampler=sampler,
drop_last=True,
pin_memory=True)
else:
assert batch_size == 1
return DataLoader(
dataset,
batch_size=batch_size,
num_workers=num_workers,
collate_fn=dataset.collate_fn,
shuffle=False,
sampler=sampler,
drop_last=False,
pin_memory=True)
|
[
"torch.utils.data.distributed.DistributedSampler",
"torch.utils.data.DataLoader"
] |
[((739, 783), 'torch.utils.data.distributed.DistributedSampler', 'DistributedSampler', (['dataset'], {'shuffle': 'shuffle'}), '(dataset, shuffle=shuffle)\n', (757, 783), False, 'from torch.utils.data.distributed import DistributedSampler\n'), ((886, 1059), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'num_workers': 'num_workers', 'collate_fn': 'dataset.collate_fn', 'shuffle': 'shuffle', 'sampler': 'sampler', 'drop_last': '(True)', 'pin_memory': '(True)'}), '(dataset, batch_size=batch_size, num_workers=num_workers,\n collate_fn=dataset.collate_fn, shuffle=shuffle, sampler=sampler,\n drop_last=True, pin_memory=True)\n', (896, 1059), False, 'from torch.utils.data import DataLoader\n'), ((1205, 1377), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'num_workers': 'num_workers', 'collate_fn': 'dataset.collate_fn', 'shuffle': '(False)', 'sampler': 'sampler', 'drop_last': '(False)', 'pin_memory': '(True)'}), '(dataset, batch_size=batch_size, num_workers=num_workers,\n collate_fn=dataset.collate_fn, shuffle=False, sampler=sampler,\n drop_last=False, pin_memory=True)\n', (1215, 1377), False, 'from torch.utils.data import DataLoader\n')]
|
import keras
from keras.layers import Activation
from keras.models import load_model
from keras import backend as K
from keras.utils.generic_utils import get_custom_objects
import tensorflow as tf
import numpy as np
import pandas as pd
import timeit
import sys
import argparse
# Constants
#window_size = 1024
def windowNoOverlay(data, window_size): # Without overlay
windowed_data = []
i = 0
while(i + window_size-1 < len(data)):
windowed_data.append(data[i:(i+window_size)])
i += window_size
if (i != len(data)):
i = len(data) - window_size
windowed_data.append(data[i:len(data)]) # add the rest
return windowed_data
def parser_args(cmd_args):
parser = argparse.ArgumentParser(sys.argv[0], description="", formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-e", "--exp", type=str, action="store", default="pairwise_distances", help="Experiment")
parser.add_argument("-d", "--dataset", type=str, action="store", default="PigArtPressure", help="Dataset name")
return parser.parse_args(cmd_args)
# obtaining arguments from command line
args = parser_args(sys.argv[1:])
dataset = args.dataset
exp = args.exp
def swish(x, beta = 1):
return (x * K.sigmoid(beta * x))
get_custom_objects().update({'Swish': Activation(swish)})
# Swish Activation
#class Swish(Activation):
# def __init__(self, activation, **kwargs):
# super(Swish, self).__init__(activation, **kwargs)
# self.__name__ = 'swish'
#def swish(x):
# return (K.sigmoid(x) * x)
#get_custom_objects().update({'swish': Swish(swish)})
encoder = load_model('../models/' + exp + '/new_train/' + 'encoder_' + dataset + ".h5", compile = False)
if (exp == "pairwise_distances"):
data = np.genfromtxt('../data/' + exp + '/' + dataset + '.txt', delimiter=' ',)
print("Data shape:", data.shape)
elif (exp == "similarity_search"):
data = np.genfromtxt('../data/' + exp + '/' + dataset + '/' + 'Data.txt', delimiter=' ',)
print("Data shape:", data.shape)
print("Encoding the queries as well")
for i in range(1, 6):
query = np.genfromtxt('../data/' + exp + '/' + dataset + '/' + 'Query' + str(i) + '.txt', delimiter=' ',)
query.shape = 1, query.shape[0], 1
query = encoder.predict(query)
query.shape = query.shape[1]
np.savetxt('../data/' + exp + '/' + dataset + '/coded_data/Query' + str (i) + '.txt', query)
del query
else:
data = np.genfromtxt('../data/' + exp + '/' + dataset + '/' + dataset + '_test.txt', delimiter=' ',)
print("Data shape:", data.shape)
# Getting rid of the NaNs and infs with interpolation
if (len(data.shape) == 1):
data = np.array(pd.Series(data).interpolate())
serie_length = 1024
# 'Windowing'
data = np.array(windowNoOverlay(data, serie_length))
print("Window Data shape:", data.shape)
else:
serie_length = data.shape[1]
print("Serie length:", serie_length)
data.shape = data.shape[0], serie_length, 1
# Workaround to load the libraries so it doesn't count in the timer,
# in production these libraries would be already loaded
coded_data = encoder.predict(data)
start = timeit.default_timer()
coded_data = encoder.predict(data)
print("Coded Data shape:", coded_data.shape)
stop = timeit.default_timer()
print("Time to code the serie:", stop - start)
coded_data.shape = coded_data.shape[0], coded_data.shape[1]
if (exp == "similarity_search"):
np.savetxt('../data/' + exp + '/' + dataset + '/coded_data/' + 'Data.txt', coded_data)
elif(exp == "pairwise_distances"):
np.savetxt('../data/' + exp + '/coded_data/' + dataset + '_coded.txt', coded_data)
else:
np.savetxt('../data/' + exp + '/' + dataset + '/' + dataset + '_coded.txt', coded_data)
|
[
"keras.models.load_model",
"argparse.ArgumentParser",
"keras.layers.Activation",
"timeit.default_timer",
"numpy.savetxt",
"numpy.genfromtxt",
"keras.utils.generic_utils.get_custom_objects",
"pandas.Series",
"keras.backend.sigmoid"
] |
[((1566, 1662), 'keras.models.load_model', 'load_model', (["('../models/' + exp + '/new_train/' + 'encoder_' + dataset + '.h5')"], {'compile': '(False)'}), "('../models/' + exp + '/new_train/' + 'encoder_' + dataset +\n '.h5', compile=False)\n", (1576, 1662), False, 'from keras.models import load_model\n'), ((3046, 3068), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3066, 3068), False, 'import timeit\n'), ((3158, 3180), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3178, 3180), False, 'import timeit\n'), ((681, 794), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['sys.argv[0]'], {'description': '""""""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(sys.argv[0], description='', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (704, 794), False, 'import argparse\n'), ((1704, 1775), 'numpy.genfromtxt', 'np.genfromtxt', (["('../data/' + exp + '/' + dataset + '.txt')"], {'delimiter': '""" """'}), "('../data/' + exp + '/' + dataset + '.txt', delimiter=' ')\n", (1717, 1775), True, 'import numpy as np\n'), ((3324, 3414), 'numpy.savetxt', 'np.savetxt', (["('../data/' + exp + '/' + dataset + '/coded_data/' + 'Data.txt')", 'coded_data'], {}), "('../data/' + exp + '/' + dataset + '/coded_data/' + 'Data.txt',\n coded_data)\n", (3334, 3414), True, 'import numpy as np\n'), ((1206, 1225), 'keras.backend.sigmoid', 'K.sigmoid', (['(beta * x)'], {}), '(beta * x)\n', (1215, 1225), True, 'from keras import backend as K\n'), ((1228, 1248), 'keras.utils.generic_utils.get_custom_objects', 'get_custom_objects', ([], {}), '()\n', (1246, 1248), False, 'from keras.utils.generic_utils import get_custom_objects\n'), ((1266, 1283), 'keras.layers.Activation', 'Activation', (['swish'], {}), '(swish)\n', (1276, 1283), False, 'from keras.layers import Activation\n'), ((1854, 1939), 'numpy.genfromtxt', 'np.genfromtxt', (["('../data/' + exp + '/' + dataset + '/' + 'Data.txt')"], {'delimiter': '""" """'}), "('../data/' + exp + '/' + dataset + '/' + 'Data.txt',\n delimiter=' ')\n", (1867, 1939), True, 'import numpy as np\n'), ((2365, 2461), 'numpy.genfromtxt', 'np.genfromtxt', (["('../data/' + exp + '/' + dataset + '/' + dataset + '_test.txt')"], {'delimiter': '""" """'}), "('../data/' + exp + '/' + dataset + '/' + dataset +\n '_test.txt', delimiter=' ')\n", (2378, 2461), True, 'import numpy as np\n'), ((3447, 3533), 'numpy.savetxt', 'np.savetxt', (["('../data/' + exp + '/coded_data/' + dataset + '_coded.txt')", 'coded_data'], {}), "('../data/' + exp + '/coded_data/' + dataset + '_coded.txt',\n coded_data)\n", (3457, 3533), True, 'import numpy as np\n'), ((3537, 3628), 'numpy.savetxt', 'np.savetxt', (["('../data/' + exp + '/' + dataset + '/' + dataset + '_coded.txt')", 'coded_data'], {}), "('../data/' + exp + '/' + dataset + '/' + dataset + '_coded.txt',\n coded_data)\n", (3547, 3628), True, 'import numpy as np\n'), ((2592, 2607), 'pandas.Series', 'pd.Series', (['data'], {}), '(data)\n', (2601, 2607), True, 'import pandas as pd\n')]
|
from rest_framework import mixins, response, viewsets
from dateflix_api.models import User
from dateflix_api.serializers import ProfileSerializer
class MeViewSet(mixins.ListModelMixin, viewsets.GenericViewSet):
"""
API endpoint that allows current profile to be viewed.
"""
serializer_class = ProfileSerializer
queryset = User.objects.all()
def list(self, request, *args, **kwargs):
# assumes the user is authenticated, handle this according your needs
return response.Response(self.serializer_class(request.user).data)
|
[
"dateflix_api.models.User.objects.all"
] |
[((346, 364), 'dateflix_api.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (362, 364), False, 'from dateflix_api.models import User\n')]
|
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
p = GPIO.PWM(18, 50)
p.start(2.5)
while True:
p.ChangeDutyCycle(2.5) # 0 degree
time.sleep(1)
p.ChangeDutyCycle(6.75)
time.sleep(1)
p.ChangeDutyCycle(10.5)
time.sleep(1)
|
[
"RPi.GPIO.setup",
"RPi.GPIO.setmode",
"RPi.GPIO.PWM",
"time.sleep"
] |
[((37, 59), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (49, 59), True, 'import RPi.GPIO as GPIO\n'), ((60, 84), 'RPi.GPIO.setup', 'GPIO.setup', (['(18)', 'GPIO.OUT'], {}), '(18, GPIO.OUT)\n', (70, 84), True, 'import RPi.GPIO as GPIO\n'), ((90, 106), 'RPi.GPIO.PWM', 'GPIO.PWM', (['(18)', '(50)'], {}), '(18, 50)\n', (98, 106), True, 'import RPi.GPIO as GPIO\n'), ((176, 189), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (186, 189), False, 'import time\n'), ((222, 235), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (232, 235), False, 'import time\n'), ((268, 281), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (278, 281), False, 'import time\n')]
|
# date: 2020.09.11
# author: Bartłomiej "furas" Burek (https://blog.furas.pl)
# https://stackoverflow.com/questions/63840415/how-to-scrape-website-tables-where-the-value-can-be-different-as-we-chose-but-th
import requests
from bs4 import BeautifulSoup
import csv
url = 'https://id.investing.com/instruments/HistoricalDataAjax'
payload = {
"curr_id": "8830",
"smlID": "300004",
"header": "Data+Historis+Emas+Berjangka",
"st_date": "01/30/2020",
"end_date": "12/31/2020",
"interval_sec": "Daily",
"sort_col": "date",
"sort_ord": "DESC",
"action":"historical_data"
}
headers = {
#"Referer": "https://id.investing.com/commodities/gold-historical-data",
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:80.0) Gecko/20100101 Firefox/80.0",
"X-Requested-With": "XMLHttpRequest"
}
fh = open('output.csv', 'w')
csv_writer = csv.writer(fh)
for year in range(2010, 2021):
print('year:', year)
payload["st_date"] = f"01/01/{year}"
payload["end_date"] = f"12/31/{year}"
r = requests.post(url, data=payload, headers=headers)
#print(r.text)
soup = BeautifulSoup(r.text, 'lxml')
table = soup.find('table')
for row in table.find_all('tr')[1:]: # [1:] to skip header
row_data = [item.text for item in row.find_all('td')]
print(row_data)
csv_writer.writerow(row_data)
fh.close()
|
[
"bs4.BeautifulSoup",
"requests.post",
"csv.writer"
] |
[((878, 892), 'csv.writer', 'csv.writer', (['fh'], {}), '(fh)\n', (888, 892), False, 'import csv\n'), ((1051, 1100), 'requests.post', 'requests.post', (['url'], {'data': 'payload', 'headers': 'headers'}), '(url, data=payload, headers=headers)\n', (1064, 1100), False, 'import requests\n'), ((1136, 1165), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""lxml"""'], {}), "(r.text, 'lxml')\n", (1149, 1165), False, 'from bs4 import BeautifulSoup\n')]
|
import numpy as np
from igp2 import AgentState, plot_map
from igp2.data import ScenarioConfig, InDScenario
from igp2.opendrive.map import Map
import matplotlib.pyplot as plt
from shapely.ops import unary_union
from grit.core.data_processing import get_episode_frames
from grit.core.feature_extraction import FeatureExtractor
from grit.occlusion_detection.occlusion_detection_geometry import OcclusionDetector2D
from grit.core.base import get_base_dir
def get_feature_extractor(episode_idx=1, scenario_name="bendplatz"):
scenario_map = Map.parse_from_opendrive(get_base_dir() + f"/scenarios/maps/{scenario_name}.xodr")
return FeatureExtractor(scenario_map, scenario_name, episode_idx)
def plot_occlusion(frame_id=153, episode_idx=1, *frame, plot_occlusions=True, all_vehicles=False,
scenario_name="bendplatz"):
feature_extractor = get_feature_extractor(episode_idx=episode_idx, scenario_name=scenario_name)
occlusions = feature_extractor.occlusions[frame_id]
scenario_config = ScenarioConfig.load(get_base_dir() + f"/scenarios/configs/{scenario_name}.json")
scenario = InDScenario(scenario_config)
episode = scenario.load_episode(feature_extractor.episode_idx)
# Take a step every 25 recorded frames (1s)
# episode_frames contain for each second the list of frames for all vehicles alive that moment
episode_frames = get_episode_frames(episode, exclude_parked_cars=False, exclude_bicycles=True, step=25)
ego_id = list(occlusions.keys())[0]
ego_occlusions = occlusions[ego_id]
ego = episode_frames[frame_id][ego_id]
plot_map(feature_extractor.scenario_map, scenario_config=scenario_config, plot_buildings=True)
if plot_occlusions:
lane_occlusions_all = []
for road_occlusions in ego_occlusions:
for lane_occlusions in ego_occlusions[road_occlusions]:
lane_occlusion = ego_occlusions[road_occlusions][lane_occlusions]
if lane_occlusion is not None:
lane_occlusions_all.append(lane_occlusion)
OcclusionDetector2D.plot_area_from_list(lane_occlusions_all, color="r", alpha=0.5)
if all_vehicles:
for aid, state in episode_frames[frame_id].items():
plt.text(*state.position, aid)
plt.plot(*list(zip(*OcclusionDetector2D.get_box(state).boundary)), color="black")
if frame:
for aid, state in frame[0].items():
plt.text(*state.position, aid)
plt.plot(*list(zip(*OcclusionDetector2D.get_box(state).boundary)))
plt.plot(*list(zip(*OcclusionDetector2D.get_box(ego).boundary)))
def find_lane_at(point, scenario_name="bendplatz"):
scenario_map = Map.parse_from_opendrive(get_base_dir() + f"/scenarios/maps/{scenario_name}.xodr")
lanes = scenario_map.lanes_at(point)
for lane in lanes:
plot_map(scenario_map)
lane = scenario_map.get_lane(lane.parent_road.id, lane.id)
plt.plot(*list(zip(*[x for x in lane.midline.coords])))
plt.show()
def get_occlusions_and_ego(frame=153, episode_idx=1):
feature_extractor = get_feature_extractor(episode_idx)
occlusions = feature_extractor.occlusions[frame]
ego_id = list(occlusions.keys())[0]
ego_occlusions = occlusions[ego_id]
occlusions = []
for road_occlusions in ego_occlusions:
for lane_occlusions in ego_occlusions[road_occlusions]:
lane_occlusion = ego_occlusions[road_occlusions][lane_occlusions]
if lane_occlusion is not None:
occlusions.append(lane_occlusion)
occlusions = unary_union(occlusions)
return ego_id, occlusions
def test_occluded_area_no_vehicle_in_oncoming_lanes():
mfe = get_feature_extractor()
lane_path = [mfe.scenario_map.get_lane(8, -1, 0)]
ego_id, occlusions = get_occlusions_and_ego()
state0 = AgentState(time=0,
position=np.array((45.67, -46.72)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(45.67, -46.72)
)
state1 = AgentState(time=0,
position=np.array((62.88, -20.96)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(-120)
)
state_ego = AgentState(time=0,
position=np.array((43.88, -44.25)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(45)
)
frame = {ego_id: state_ego, 0: state0, 1: state1}
# plot_occlusion(153, 1, frame)
oncoming_vehicle_id, oncoming_vehicle_dist = mfe.oncoming_vehicle(0, lane_path, frame)
missing = mfe.is_oncoming_vehicle_missing(oncoming_vehicle_dist, lane_path, occlusions)
plt.show()
assert missing
def set_up_frame_ep3_frame100(third_agent_position, third_agent_heading):
"""
The third agent is the possible oncoming vehicle.
State 1 is the target vehicle.
"""
episode_idx = 3
frame_id = 100
mfe = get_feature_extractor(episode_idx=episode_idx)
lane_path = [mfe.scenario_map.get_lane(1, 1, 0),
mfe.scenario_map.get_lane(9, -1, 0)]
ego_id, occlusions = get_occlusions_and_ego(frame=frame_id, episode_idx=episode_idx)
state0 = AgentState(time=0,
position=np.array((45.67, -46.72)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(45.67, -46.72)
)
state1 = AgentState(time=0,
position=np.array(third_agent_position),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(third_agent_heading)
)
state_ego = AgentState(time=0,
position=np.array((43.88, -44.25)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(45)
)
target_id = 0
frame = {target_id: state0, 1: state1, ego_id: state_ego}
# plot_occlusion(frame_id, episode_idx, frame)
oncoming_vehicle_id, oncoming_vehicle_dist = mfe.oncoming_vehicle(target_id, lane_path, frame)
missing = mfe.is_oncoming_vehicle_missing(oncoming_vehicle_dist, lane_path, occlusions)
plt.show()
return missing
def test_occluded_area_vehicle_in_oncoming_lanes():
missing = set_up_frame_ep3_frame100((62.88, -20.96), -110)
assert missing
def test_occluded_area_vehicle_in_oncoming_lanes_2():
missing = set_up_frame_ep3_frame100((60.12, -33.10), 140)
assert missing
def test_occluded_area_vehicle_in_oncoming_lanes_3():
missing = set_up_frame_ep3_frame100((49.12, -30.13), -45)
assert missing
def test_occluded_area_vehicle_in_oncoming_lanes_4():
missing = set_up_frame_ep3_frame100((53.81, -38.10), 170)
assert not missing
def test_occluded_area_vehicle_in_oncoming_lanes_5():
missing = set_up_frame_ep3_frame100((56.46, -38.11), -45)
assert missing
def test_occluded_area_vehicle_in_oncoming_lanes_6():
missing = set_up_frame_ep3_frame100((55.75, -37.73), 180)
assert not missing
# Tests for missing vehicle ahead.
def test_the_vehicle_in_front_is_hidden():
"""
State1 is the possible vehicle in front.
"""
episode_idx = 6
frame_id = 50
mfe = get_feature_extractor(episode_idx=episode_idx)
lane_path = [mfe.scenario_map.get_lane(1, 1, 0)]
ego_id, occlusions = get_occlusions_and_ego(frame=frame_id, episode_idx=episode_idx)
state_target = AgentState(time=0,
position=np.array((34.58, -56.93)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(45.67, -46.72)
)
state1 = AgentState(time=0,
position=np.array((39.90, -52.22)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(45)
)
state_ego = AgentState(time=0,
position=np.array((34.62, -11.01)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(-45)
)
target_id = 0
frame = {target_id: state_target, 1: state1, ego_id: state_ego}
# plot_occlusion(frame_id, episode_idx, frame)
vehicle_in_front_id, vehicle_in_front_dist = mfe.vehicle_in_front(target_id, lane_path, frame)
missing = mfe.is_vehicle_in_front_missing(vehicle_in_front_dist, target_id, lane_path, frame, occlusions)
plt.show()
assert missing
def test_vehicle_is_behind():
"""
State1 is the possible vehicle in front.
"""
episode_idx = 6
frame_id = 50
mfe = get_feature_extractor(episode_idx=episode_idx)
lane_path = [mfe.scenario_map.get_lane(3, -1, 0)]
ego_id, occlusions = get_occlusions_and_ego(frame=frame_id, episode_idx=episode_idx)
state_target = AgentState(time=0,
position=np.array((76.54, -11.56)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(76.54, -11.56)
)
state1 = AgentState(time=0,
position=np.array((68.24, -20.61)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(45)
)
state_ego = AgentState(time=0,
position=np.array((34.62, -11.01)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(-45)
)
target_id = 0
frame = {target_id: state_target, 1: state1, ego_id: state_ego}
# plot_occlusion(frame_id, episode_idx, frame)
vehicle_in_front_id, vehicle_in_front_dist = mfe.vehicle_in_front(target_id, lane_path, frame)
missing = mfe.is_vehicle_in_front_missing(vehicle_in_front_dist, target_id, lane_path, frame, occlusions)
plt.show()
assert missing
def test_no_vehicle_in_front_2():
"""
State1 is the possible vehicle in front.
"""
episode_idx = 6
frame_id = 50
mfe = get_feature_extractor(episode_idx=episode_idx)
lane_path = [mfe.scenario_map.get_lane(3, -1, 0)]
ego_id, occlusions = get_occlusions_and_ego(frame=frame_id, episode_idx=episode_idx)
state_target = AgentState(time=0,
position=np.array((72.77, -9.44)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(72.77, -9.44)
)
state1 = AgentState(time=0,
position=np.array((66.29, -16.77)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(45)
)
state_ego = AgentState(time=0,
position=np.array((34.62, -11.01)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(-45)
)
target_id = 0
frame = {target_id: state_target, 1: state1, ego_id: state_ego}
# plot_occlusion(frame_id, episode_idx, frame)
vehicle_in_front_id, vehicle_in_front_dist = mfe.vehicle_in_front(target_id, lane_path, frame)
missing = mfe.is_vehicle_in_front_missing(vehicle_in_front_dist, target_id, lane_path, frame, occlusions)
plt.show()
assert not missing
def test_occlusion_far_away():
episode_idx = 7
frame_id = 200
mfe = get_feature_extractor(episode_idx=episode_idx)
lane_path = [mfe.scenario_map.get_lane(2, 2, 0),
mfe.scenario_map.get_lane(10, -1, 0)]
ego_id, occlusions = get_occlusions_and_ego(frame=frame_id, episode_idx=episode_idx)
state_target = AgentState(time=0,
position=np.array((84.70, -60.43)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(84.70, -60.43)
)
state_ego = AgentState(time=0,
position=np.array((73.39, -56.32)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(-45)
)
target_id = 0
frame = {target_id: state_target, ego_id: state_ego}
# plot_occlusion(frame_id, episode_idx, frame)
vehicle_in_front_id, vehicle_in_front_dist = mfe.vehicle_in_front(target_id, lane_path, frame)
missing = mfe.is_vehicle_in_front_missing(vehicle_in_front_dist, target_id, lane_path, frame, occlusions)
plt.show()
assert not missing
def test_occlusion_close_enough():
episode_idx = 7
frame_id = 200
mfe = get_feature_extractor(episode_idx=episode_idx)
lane_path = [mfe.scenario_map.get_lane(10, -1, 0)]
ego_id, occlusions = get_occlusions_and_ego(frame=frame_id, episode_idx=episode_idx)
state_target = AgentState(time=0,
position=np.array((61.59, -34.41)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(61.59, -34.41)
)
state_ego = AgentState(time=0,
position=np.array((73.39, -56.32)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(-45)
)
target_id = 0
frame = {target_id: state_target, ego_id: state_ego}
# plot_occlusion(frame_id, episode_idx, frame)
vehicle_in_front_id, vehicle_in_front_dist = mfe.vehicle_in_front(target_id, lane_path, frame)
missing = mfe.is_vehicle_in_front_missing(vehicle_in_front_dist, target_id, lane_path, frame, occlusions)
plt.show()
assert missing
def test_occlusion_between_vehicle_in_front():
"""
State1 is the possible vehicle in front.
"""
episode_idx = 6
frame_id = 42
mfe = get_feature_extractor(episode_idx=episode_idx)
lane_path = [mfe.scenario_map.get_lane(1, 1, 0),
mfe.scenario_map.get_lane(7, -1, 0)]
ego_id, occlusions = get_occlusions_and_ego(frame=frame_id, episode_idx=episode_idx)
state_target = AgentState(time=0,
position=np.array((33.07, -58.33)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=lane_path[0].get_heading_at(33.07, -58.33)
)
state1 = AgentState(time=0,
position=np.array((43.62, -48.47)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(45)
)
state_ego = AgentState(time=0,
position=np.array((73.39, -56.32)),
velocity=np.array((0, 0)),
acceleration=np.array((0, 0)),
heading=np.deg2rad(-45)
)
target_id = 0
frame = {target_id: state_target, ego_id: state_ego, 1: state1}
# plot_occlusion(frame_id, episode_idx, frame)
vehicle_in_front_id, vehicle_in_front_dist = mfe.vehicle_in_front(target_id, lane_path, frame)
missing = mfe.is_vehicle_in_front_missing(vehicle_in_front_dist, target_id, lane_path, frame, occlusions)
plt.show()
assert missing
# find_lane_at((32.7, -59.4))
# plot_occlusion(42, 5, scenario_name="bendplatz")
# plt.show()
|
[
"grit.core.feature_extraction.FeatureExtractor",
"grit.core.data_processing.get_episode_frames",
"shapely.ops.unary_union",
"matplotlib.pyplot.show",
"grit.occlusion_detection.occlusion_detection_geometry.OcclusionDetector2D.plot_area_from_list",
"numpy.deg2rad",
"igp2.plot_map",
"grit.core.base.get_base_dir",
"matplotlib.pyplot.text",
"igp2.data.InDScenario",
"numpy.array",
"grit.occlusion_detection.occlusion_detection_geometry.OcclusionDetector2D.get_box"
] |
[((637, 695), 'grit.core.feature_extraction.FeatureExtractor', 'FeatureExtractor', (['scenario_map', 'scenario_name', 'episode_idx'], {}), '(scenario_map, scenario_name, episode_idx)\n', (653, 695), False, 'from grit.core.feature_extraction import FeatureExtractor\n'), ((1118, 1146), 'igp2.data.InDScenario', 'InDScenario', (['scenario_config'], {}), '(scenario_config)\n', (1129, 1146), False, 'from igp2.data import ScenarioConfig, InDScenario\n'), ((1383, 1474), 'grit.core.data_processing.get_episode_frames', 'get_episode_frames', (['episode'], {'exclude_parked_cars': '(False)', 'exclude_bicycles': '(True)', 'step': '(25)'}), '(episode, exclude_parked_cars=False, exclude_bicycles=\n True, step=25)\n', (1401, 1474), False, 'from grit.core.data_processing import get_episode_frames\n'), ((1600, 1698), 'igp2.plot_map', 'plot_map', (['feature_extractor.scenario_map'], {'scenario_config': 'scenario_config', 'plot_buildings': '(True)'}), '(feature_extractor.scenario_map, scenario_config=scenario_config,\n plot_buildings=True)\n', (1608, 1698), False, 'from igp2 import AgentState, plot_map\n'), ((3591, 3614), 'shapely.ops.unary_union', 'unary_union', (['occlusions'], {}), '(occlusions)\n', (3602, 3614), False, 'from shapely.ops import unary_union\n'), ((4984, 4994), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4992, 4994), True, 'import matplotlib.pyplot as plt\n'), ((6701, 6711), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6709, 6711), True, 'import matplotlib.pyplot as plt\n'), ((9196, 9206), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9204, 9206), True, 'import matplotlib.pyplot as plt\n'), ((10809, 10819), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10817, 10819), True, 'import matplotlib.pyplot as plt\n'), ((12424, 12434), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (12432, 12434), True, 'import matplotlib.pyplot as plt\n'), ((13752, 13762), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13760, 13762), True, 'import matplotlib.pyplot as plt\n'), ((15033, 15043), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15041, 15043), True, 'import matplotlib.pyplot as plt\n'), ((16715, 16725), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16723, 16725), True, 'import matplotlib.pyplot as plt\n'), ((2070, 2156), 'grit.occlusion_detection.occlusion_detection_geometry.OcclusionDetector2D.plot_area_from_list', 'OcclusionDetector2D.plot_area_from_list', (['lane_occlusions_all'], {'color': '"""r"""', 'alpha': '(0.5)'}), "(lane_occlusions_all, color='r',\n alpha=0.5)\n", (2109, 2156), False, 'from grit.occlusion_detection.occlusion_detection_geometry import OcclusionDetector2D\n'), ((2852, 2874), 'igp2.plot_map', 'plot_map', (['scenario_map'], {}), '(scenario_map)\n', (2860, 2874), False, 'from igp2 import AgentState, plot_map\n'), ((3014, 3024), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3022, 3024), True, 'import matplotlib.pyplot as plt\n'), ((568, 582), 'grit.core.base.get_base_dir', 'get_base_dir', ([], {}), '()\n', (580, 582), False, 'from grit.core.base import get_base_dir\n'), ((1042, 1056), 'grit.core.base.get_base_dir', 'get_base_dir', ([], {}), '()\n', (1054, 1056), False, 'from grit.core.base import get_base_dir\n'), ((2247, 2277), 'matplotlib.pyplot.text', 'plt.text', (['*state.position', 'aid'], {}), '(*state.position, aid)\n', (2255, 2277), True, 'import matplotlib.pyplot as plt\n'), ((2443, 2473), 'matplotlib.pyplot.text', 'plt.text', (['*state.position', 'aid'], {}), '(*state.position, aid)\n', (2451, 2473), True, 'import matplotlib.pyplot as plt\n'), ((2721, 2735), 'grit.core.base.get_base_dir', 'get_base_dir', ([], {}), '()\n', (2733, 2735), False, 'from grit.core.base import get_base_dir\n'), ((3908, 3933), 'numpy.array', 'np.array', (['(45.67, -46.72)'], {}), '((45.67, -46.72))\n', (3916, 3933), True, 'import numpy as np\n'), ((3968, 3984), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (3976, 3984), True, 'import numpy as np\n'), ((4023, 4039), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (4031, 4039), True, 'import numpy as np\n'), ((4208, 4233), 'numpy.array', 'np.array', (['(62.88, -20.96)'], {}), '((62.88, -20.96))\n', (4216, 4233), True, 'import numpy as np\n'), ((4268, 4284), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (4276, 4284), True, 'import numpy as np\n'), ((4323, 4339), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (4331, 4339), True, 'import numpy as np\n'), ((4373, 4389), 'numpy.deg2rad', 'np.deg2rad', (['(-120)'], {}), '(-120)\n', (4383, 4389), True, 'import numpy as np\n'), ((4488, 4513), 'numpy.array', 'np.array', (['(43.88, -44.25)'], {}), '((43.88, -44.25))\n', (4496, 4513), True, 'import numpy as np\n'), ((4551, 4567), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (4559, 4567), True, 'import numpy as np\n'), ((4609, 4625), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (4617, 4625), True, 'import numpy as np\n'), ((4662, 4676), 'numpy.deg2rad', 'np.deg2rad', (['(45)'], {}), '(45)\n', (4672, 4676), True, 'import numpy as np\n'), ((5556, 5581), 'numpy.array', 'np.array', (['(45.67, -46.72)'], {}), '((45.67, -46.72))\n', (5564, 5581), True, 'import numpy as np\n'), ((5616, 5632), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (5624, 5632), True, 'import numpy as np\n'), ((5671, 5687), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (5679, 5687), True, 'import numpy as np\n'), ((5856, 5886), 'numpy.array', 'np.array', (['third_agent_position'], {}), '(third_agent_position)\n', (5864, 5886), True, 'import numpy as np\n'), ((5921, 5937), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (5929, 5937), True, 'import numpy as np\n'), ((5976, 5992), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (5984, 5992), True, 'import numpy as np\n'), ((6026, 6057), 'numpy.deg2rad', 'np.deg2rad', (['third_agent_heading'], {}), '(third_agent_heading)\n', (6036, 6057), True, 'import numpy as np\n'), ((6156, 6181), 'numpy.array', 'np.array', (['(43.88, -44.25)'], {}), '((43.88, -44.25))\n', (6164, 6181), True, 'import numpy as np\n'), ((6219, 6235), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (6227, 6235), True, 'import numpy as np\n'), ((6277, 6293), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (6285, 6293), True, 'import numpy as np\n'), ((6330, 6344), 'numpy.deg2rad', 'np.deg2rad', (['(45)'], {}), '(45)\n', (6340, 6344), True, 'import numpy as np\n'), ((8025, 8050), 'numpy.array', 'np.array', (['(34.58, -56.93)'], {}), '((34.58, -56.93))\n', (8033, 8050), True, 'import numpy as np\n'), ((8091, 8107), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (8099, 8107), True, 'import numpy as np\n'), ((8152, 8168), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (8160, 8168), True, 'import numpy as np\n'), ((8349, 8373), 'numpy.array', 'np.array', (['(39.9, -52.22)'], {}), '((39.9, -52.22))\n', (8357, 8373), True, 'import numpy as np\n'), ((8409, 8425), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (8417, 8425), True, 'import numpy as np\n'), ((8464, 8480), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (8472, 8480), True, 'import numpy as np\n'), ((8514, 8528), 'numpy.deg2rad', 'np.deg2rad', (['(45)'], {}), '(45)\n', (8524, 8528), True, 'import numpy as np\n'), ((8627, 8652), 'numpy.array', 'np.array', (['(34.62, -11.01)'], {}), '((34.62, -11.01))\n', (8635, 8652), True, 'import numpy as np\n'), ((8690, 8706), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (8698, 8706), True, 'import numpy as np\n'), ((8748, 8764), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (8756, 8764), True, 'import numpy as np\n'), ((8801, 8816), 'numpy.deg2rad', 'np.deg2rad', (['(-45)'], {}), '(-45)\n', (8811, 8816), True, 'import numpy as np\n'), ((9638, 9663), 'numpy.array', 'np.array', (['(76.54, -11.56)'], {}), '((76.54, -11.56))\n', (9646, 9663), True, 'import numpy as np\n'), ((9704, 9720), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (9712, 9720), True, 'import numpy as np\n'), ((9765, 9781), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (9773, 9781), True, 'import numpy as np\n'), ((9962, 9987), 'numpy.array', 'np.array', (['(68.24, -20.61)'], {}), '((68.24, -20.61))\n', (9970, 9987), True, 'import numpy as np\n'), ((10022, 10038), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (10030, 10038), True, 'import numpy as np\n'), ((10077, 10093), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (10085, 10093), True, 'import numpy as np\n'), ((10127, 10141), 'numpy.deg2rad', 'np.deg2rad', (['(45)'], {}), '(45)\n', (10137, 10141), True, 'import numpy as np\n'), ((10240, 10265), 'numpy.array', 'np.array', (['(34.62, -11.01)'], {}), '((34.62, -11.01))\n', (10248, 10265), True, 'import numpy as np\n'), ((10303, 10319), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (10311, 10319), True, 'import numpy as np\n'), ((10361, 10377), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (10369, 10377), True, 'import numpy as np\n'), ((10414, 10429), 'numpy.deg2rad', 'np.deg2rad', (['(-45)'], {}), '(-45)\n', (10424, 10429), True, 'import numpy as np\n'), ((11255, 11279), 'numpy.array', 'np.array', (['(72.77, -9.44)'], {}), '((72.77, -9.44))\n', (11263, 11279), True, 'import numpy as np\n'), ((11320, 11336), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (11328, 11336), True, 'import numpy as np\n'), ((11381, 11397), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (11389, 11397), True, 'import numpy as np\n'), ((11577, 11602), 'numpy.array', 'np.array', (['(66.29, -16.77)'], {}), '((66.29, -16.77))\n', (11585, 11602), True, 'import numpy as np\n'), ((11637, 11653), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (11645, 11653), True, 'import numpy as np\n'), ((11692, 11708), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (11700, 11708), True, 'import numpy as np\n'), ((11742, 11756), 'numpy.deg2rad', 'np.deg2rad', (['(45)'], {}), '(45)\n', (11752, 11756), True, 'import numpy as np\n'), ((11855, 11880), 'numpy.array', 'np.array', (['(34.62, -11.01)'], {}), '((34.62, -11.01))\n', (11863, 11880), True, 'import numpy as np\n'), ((11918, 11934), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (11926, 11934), True, 'import numpy as np\n'), ((11976, 11992), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (11984, 11992), True, 'import numpy as np\n'), ((12029, 12044), 'numpy.deg2rad', 'np.deg2rad', (['(-45)'], {}), '(-45)\n', (12039, 12044), True, 'import numpy as np\n'), ((12864, 12888), 'numpy.array', 'np.array', (['(84.7, -60.43)'], {}), '((84.7, -60.43))\n', (12872, 12888), True, 'import numpy as np\n'), ((12930, 12946), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (12938, 12946), True, 'import numpy as np\n'), ((12991, 13007), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (12999, 13007), True, 'import numpy as np\n'), ((13194, 13219), 'numpy.array', 'np.array', (['(73.39, -56.32)'], {}), '((73.39, -56.32))\n', (13202, 13219), True, 'import numpy as np\n'), ((13257, 13273), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (13265, 13273), True, 'import numpy as np\n'), ((13315, 13331), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (13323, 13331), True, 'import numpy as np\n'), ((13368, 13383), 'numpy.deg2rad', 'np.deg2rad', (['(-45)'], {}), '(-45)\n', (13378, 13383), True, 'import numpy as np\n'), ((14145, 14170), 'numpy.array', 'np.array', (['(61.59, -34.41)'], {}), '((61.59, -34.41))\n', (14153, 14170), True, 'import numpy as np\n'), ((14211, 14227), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (14219, 14227), True, 'import numpy as np\n'), ((14272, 14288), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (14280, 14288), True, 'import numpy as np\n'), ((14475, 14500), 'numpy.array', 'np.array', (['(73.39, -56.32)'], {}), '((73.39, -56.32))\n', (14483, 14500), True, 'import numpy as np\n'), ((14538, 14554), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (14546, 14554), True, 'import numpy as np\n'), ((14596, 14612), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (14604, 14612), True, 'import numpy as np\n'), ((14649, 14664), 'numpy.deg2rad', 'np.deg2rad', (['(-45)'], {}), '(-45)\n', (14659, 14664), True, 'import numpy as np\n'), ((15544, 15569), 'numpy.array', 'np.array', (['(33.07, -58.33)'], {}), '((33.07, -58.33))\n', (15552, 15569), True, 'import numpy as np\n'), ((15610, 15626), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (15618, 15626), True, 'import numpy as np\n'), ((15671, 15687), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (15679, 15687), True, 'import numpy as np\n'), ((15868, 15893), 'numpy.array', 'np.array', (['(43.62, -48.47)'], {}), '((43.62, -48.47))\n', (15876, 15893), True, 'import numpy as np\n'), ((15928, 15944), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (15936, 15944), True, 'import numpy as np\n'), ((15983, 15999), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (15991, 15999), True, 'import numpy as np\n'), ((16033, 16047), 'numpy.deg2rad', 'np.deg2rad', (['(45)'], {}), '(45)\n', (16043, 16047), True, 'import numpy as np\n'), ((16146, 16171), 'numpy.array', 'np.array', (['(73.39, -56.32)'], {}), '((73.39, -56.32))\n', (16154, 16171), True, 'import numpy as np\n'), ((16209, 16225), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (16217, 16225), True, 'import numpy as np\n'), ((16267, 16283), 'numpy.array', 'np.array', (['(0, 0)'], {}), '((0, 0))\n', (16275, 16283), True, 'import numpy as np\n'), ((16320, 16335), 'numpy.deg2rad', 'np.deg2rad', (['(-45)'], {}), '(-45)\n', (16330, 16335), True, 'import numpy as np\n'), ((2578, 2610), 'grit.occlusion_detection.occlusion_detection_geometry.OcclusionDetector2D.get_box', 'OcclusionDetector2D.get_box', (['ego'], {}), '(ego)\n', (2605, 2610), False, 'from grit.occlusion_detection.occlusion_detection_geometry import OcclusionDetector2D\n'), ((2310, 2344), 'grit.occlusion_detection.occlusion_detection_geometry.OcclusionDetector2D.get_box', 'OcclusionDetector2D.get_box', (['state'], {}), '(state)\n', (2337, 2344), False, 'from grit.occlusion_detection.occlusion_detection_geometry import OcclusionDetector2D\n'), ((2506, 2540), 'grit.occlusion_detection.occlusion_detection_geometry.OcclusionDetector2D.get_box', 'OcclusionDetector2D.get_box', (['state'], {}), '(state)\n', (2533, 2540), False, 'from grit.occlusion_detection.occlusion_detection_geometry import OcclusionDetector2D\n')]
|
import glob
import json
import argparse
import os
import os.path as path
from functools import partial
from tqdm import tqdm
import pandas as pd
import numpy as np
import scipy
import plotnine as p9
from scipy.stats import bootstrap
from nlproar.dataset import SNLIDataset, SSTDataset, IMDBDataset, BabiDataset, MimicDataset
def ratio_confint(partial_df):
"""Implementes a ratio-confidence interval
The idea is to project to logits space, then assume a normal distribution,
and then project back to the inital space.
Method proposed here: https://stats.stackexchange.com/questions/263516
"""
column_name = partial_df.loc[:, 'test_metric'].iat[0]
x = partial_df.loc[:, column_name].to_numpy()
mean = np.mean(x)
if np.all(x[0] == x):
lower = mean
upper = mean
else:
res = bootstrap((x, ), np.mean, confidence_level=0.95, random_state=np.random.default_rng(0))
lower = res.confidence_interval.low
upper = res.confidence_interval.high
return pd.Series({
'lower': lower,
'mean': mean,
'upper': upper,
'format': f'${mean:.0%}^{{+{upper-mean:.1%}}}_{{-{mean-lower:.1%}}}$'.replace('%', '\\%'),
'n': len(x)
})
def dataset_stats(Loader, cachedir):
dataset = Loader(cachedir=cachedir, model_type='rnn', num_workers=0)
dataset.prepare_data()
dataset.setup('fit')
dataset.setup('test')
summaries = {}
dataloaders = [
('train', dataset.train_dataloader()),
('val', dataset.val_dataloader()),
('test', dataset.test_dataloader())
]
for split_name, split_iter in dataloaders:
lengths = []
for batch in tqdm(split_iter, desc=f'Summarizing {split_name} split', leave=False):
lengths += batch.length.tolist()
summaries[split_name] = {
'length': np.mean(lengths),
'count': len(lengths),
}
return pd.Series({
'dataset': dataset.name,
'vocab_size': len(dataset.vocabulary),
'train_size': summaries['train']['count'],
'valid_size': summaries['val']['count'],
'test_size': summaries['test']['count'],
'avg_length': np.average(
[summary['length'] for summary in summaries.values()],
weights=[summary['count'] for summary in summaries.values()]
)
})
thisdir = path.dirname(path.realpath(__file__))
parser = argparse.ArgumentParser()
parser.add_argument('--persistent-dir',
action='store',
default=path.realpath(path.join(thisdir, '..')),
type=str,
help='Directory where all persistent data will be stored')
parser.add_argument('--stage',
action='store',
default='both',
type=str,
choices=['preprocess', 'plot', 'both'],
help='Which export stage should be performed. Mostly just useful for debugging.')
if __name__ == "__main__":
pd.set_option('display.max_rows', None)
args, unknown = parser.parse_known_args()
dataset_mapping = pd.DataFrame([
{'dataset': 'sst', 'dataset_pretty': 'SST', 'test_metric': 'f1_test', 'reference': '$81\\%$'},
{'dataset': 'snli', 'dataset_pretty': 'SNLI', 'test_metric': 'f1_test', 'reference': '$88\\%$'},
{'dataset': 'imdb', 'dataset_pretty': 'IMDB', 'test_metric': 'f1_test', 'reference': '$78\\%$'},
{'dataset': 'mimic-a', 'dataset_pretty': 'Anemia', 'test_metric': 'f1_test', 'reference': '$92\\%$'},
{'dataset': 'mimic-d', 'dataset_pretty': 'Diabetes', 'test_metric': 'f1_test', 'reference': '$79\\%$'},
{'dataset': 'babi-1', 'dataset_pretty': 'bAbI-1', 'test_metric': 'acc_test', 'reference': '$100\\%$'},
{'dataset': 'babi-2', 'dataset_pretty': 'bAbI-2', 'test_metric': 'acc_test', 'reference': '$48\\%$'},
{'dataset': 'babi-3', 'dataset_pretty': 'bAbI-3', 'test_metric': 'acc_test', 'reference': '$62\\%$'}
])
model_mapping = pd.DataFrame([
{'model_type': 'rnn', 'model_type_pretty': 'BiLSTM-Attention'},
{'model_type': 'roberta', 'model_type_pretty': 'RoBERTa'}
])
datasets = {
'sst': SSTDataset,
'snli': SNLIDataset,
'imdb': IMDBDataset,
'babi-1': partial(BabiDataset, task=1),
'babi-2': partial(BabiDataset, task=2),
'babi-3': partial(BabiDataset, task=3),
'mimic-d': partial(MimicDataset, subset='diabetes', mimicdir=f'{args.persistent_dir}/mimic'),
'mimic-a': partial(MimicDataset, subset='anemia', mimicdir=f'{args.persistent_dir}/mimic'),
}
if args.stage in ['both', 'preprocess']:
# Read JSON files into dataframe
results = []
for file in tqdm(glob.glob(f'{args.persistent_dir}/results/roar/*_s-[0-9].json'),
desc='Loading .json files'):
with open(file, 'r') as fp:
try:
results.append(json.load(fp))
except json.decoder.JSONDecodeError:
print(f'{file} has a format error')
results_df = pd.DataFrame(results)
# Summarize each dataset
summaries = []
for dataset_loader in tqdm(datasets.values(), desc='Summarizing datasets'):
summaries.append(dataset_stats(dataset_loader, cachedir=args.persistent_dir + '/cache'))
summaries_df = pd.DataFrame(summaries)
df = (results_df
.merge(dataset_mapping, on='dataset')
.groupby(['dataset', 'dataset_pretty', 'reference', 'model_type'])
.apply(ratio_confint)
.reset_index()
.merge(summaries_df, on='dataset')
.merge(model_mapping, on='model_type')
.drop(['lower', 'upper', 'n', 'mean', 'dataset', 'model_type'], axis=1)
)
if args.stage in ['preprocess']:
os.makedirs(f'{args.persistent_dir}/pandas', exist_ok=True)
df.to_pickle(f'{args.persistent_dir}/pandas/dataset.pd.pkl.xz')
if args.stage in ['plot']:
df = pd.read_pickle(f'{args.persistent_dir}/pandas/dataset.pd.pkl.xz')
if args.stage in ['both', 'plot']:
print(df)
print(df
.reset_index()
.rename(columns={
'dataset_pretty': 'Dataset',
'format': 'Faithfulness'
})
.pivot(
index=['Dataset'],
columns='model_type_pretty',
values='Faithfulness'
)
.style.to_latex()
)
print(df
.reset_index()
.rename(columns={
'dataset_pretty': 'Dataset',
'format': 'Faithfulness'
})
.pivot(
index=['Dataset', 'train_size', 'valid_size', 'test_size', 'reference'],
columns='model_type_pretty',
values='Faithfulness'
)
.style.to_latex()
)
|
[
"pandas.DataFrame",
"functools.partial",
"tqdm.tqdm",
"json.load",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.join",
"os.path.realpath",
"numpy.random.default_rng",
"numpy.mean",
"glob.glob",
"pandas.read_pickle",
"pandas.set_option",
"numpy.all"
] |
[((2434, 2459), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2457, 2459), False, 'import argparse\n'), ((737, 747), 'numpy.mean', 'np.mean', (['x'], {}), '(x)\n', (744, 747), True, 'import numpy as np\n'), ((756, 773), 'numpy.all', 'np.all', (['(x[0] == x)'], {}), '(x[0] == x)\n', (762, 773), True, 'import numpy as np\n'), ((2400, 2423), 'os.path.realpath', 'path.realpath', (['__file__'], {}), '(__file__)\n', (2413, 2423), True, 'import os.path as path\n'), ((3041, 3080), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', 'None'], {}), "('display.max_rows', None)\n", (3054, 3080), True, 'import pandas as pd\n'), ((3150, 4014), 'pandas.DataFrame', 'pd.DataFrame', (["[{'dataset': 'sst', 'dataset_pretty': 'SST', 'test_metric': 'f1_test',\n 'reference': '$81\\\\%$'}, {'dataset': 'snli', 'dataset_pretty': 'SNLI',\n 'test_metric': 'f1_test', 'reference': '$88\\\\%$'}, {'dataset': 'imdb',\n 'dataset_pretty': 'IMDB', 'test_metric': 'f1_test', 'reference':\n '$78\\\\%$'}, {'dataset': 'mimic-a', 'dataset_pretty': 'Anemia',\n 'test_metric': 'f1_test', 'reference': '$92\\\\%$'}, {'dataset':\n 'mimic-d', 'dataset_pretty': 'Diabetes', 'test_metric': 'f1_test',\n 'reference': '$79\\\\%$'}, {'dataset': 'babi-1', 'dataset_pretty':\n 'bAbI-1', 'test_metric': 'acc_test', 'reference': '$100\\\\%$'}, {\n 'dataset': 'babi-2', 'dataset_pretty': 'bAbI-2', 'test_metric':\n 'acc_test', 'reference': '$48\\\\%$'}, {'dataset': 'babi-3',\n 'dataset_pretty': 'bAbI-3', 'test_metric': 'acc_test', 'reference':\n '$62\\\\%$'}]"], {}), "([{'dataset': 'sst', 'dataset_pretty': 'SST', 'test_metric':\n 'f1_test', 'reference': '$81\\\\%$'}, {'dataset': 'snli',\n 'dataset_pretty': 'SNLI', 'test_metric': 'f1_test', 'reference':\n '$88\\\\%$'}, {'dataset': 'imdb', 'dataset_pretty': 'IMDB', 'test_metric':\n 'f1_test', 'reference': '$78\\\\%$'}, {'dataset': 'mimic-a',\n 'dataset_pretty': 'Anemia', 'test_metric': 'f1_test', 'reference':\n '$92\\\\%$'}, {'dataset': 'mimic-d', 'dataset_pretty': 'Diabetes',\n 'test_metric': 'f1_test', 'reference': '$79\\\\%$'}, {'dataset': 'babi-1',\n 'dataset_pretty': 'bAbI-1', 'test_metric': 'acc_test', 'reference':\n '$100\\\\%$'}, {'dataset': 'babi-2', 'dataset_pretty': 'bAbI-2',\n 'test_metric': 'acc_test', 'reference': '$48\\\\%$'}, {'dataset':\n 'babi-3', 'dataset_pretty': 'bAbI-3', 'test_metric': 'acc_test',\n 'reference': '$62\\\\%$'}])\n", (3162, 4014), True, 'import pandas as pd\n'), ((4058, 4200), 'pandas.DataFrame', 'pd.DataFrame', (["[{'model_type': 'rnn', 'model_type_pretty': 'BiLSTM-Attention'}, {\n 'model_type': 'roberta', 'model_type_pretty': 'RoBERTa'}]"], {}), "([{'model_type': 'rnn', 'model_type_pretty': 'BiLSTM-Attention'\n }, {'model_type': 'roberta', 'model_type_pretty': 'RoBERTa'}])\n", (4070, 4200), True, 'import pandas as pd\n'), ((1696, 1765), 'tqdm.tqdm', 'tqdm', (['split_iter'], {'desc': 'f"""Summarizing {split_name} split"""', 'leave': '(False)'}), "(split_iter, desc=f'Summarizing {split_name} split', leave=False)\n", (1700, 1765), False, 'from tqdm import tqdm\n'), ((4339, 4367), 'functools.partial', 'partial', (['BabiDataset'], {'task': '(1)'}), '(BabiDataset, task=1)\n', (4346, 4367), False, 'from functools import partial\n'), ((4387, 4415), 'functools.partial', 'partial', (['BabiDataset'], {'task': '(2)'}), '(BabiDataset, task=2)\n', (4394, 4415), False, 'from functools import partial\n'), ((4435, 4463), 'functools.partial', 'partial', (['BabiDataset'], {'task': '(3)'}), '(BabiDataset, task=3)\n', (4442, 4463), False, 'from functools import partial\n'), ((4484, 4570), 'functools.partial', 'partial', (['MimicDataset'], {'subset': '"""diabetes"""', 'mimicdir': 'f"""{args.persistent_dir}/mimic"""'}), "(MimicDataset, subset='diabetes', mimicdir=\n f'{args.persistent_dir}/mimic')\n", (4491, 4570), False, 'from functools import partial\n'), ((4586, 4665), 'functools.partial', 'partial', (['MimicDataset'], {'subset': '"""anemia"""', 'mimicdir': 'f"""{args.persistent_dir}/mimic"""'}), "(MimicDataset, subset='anemia', mimicdir=f'{args.persistent_dir}/mimic')\n", (4593, 4665), False, 'from functools import partial\n'), ((5165, 5186), 'pandas.DataFrame', 'pd.DataFrame', (['results'], {}), '(results)\n', (5177, 5186), True, 'import pandas as pd\n'), ((5452, 5475), 'pandas.DataFrame', 'pd.DataFrame', (['summaries'], {}), '(summaries)\n', (5464, 5475), True, 'import pandas as pd\n'), ((5930, 5989), 'os.makedirs', 'os.makedirs', (['f"""{args.persistent_dir}/pandas"""'], {'exist_ok': '(True)'}), "(f'{args.persistent_dir}/pandas', exist_ok=True)\n", (5941, 5989), False, 'import os\n'), ((6106, 6171), 'pandas.read_pickle', 'pd.read_pickle', (['f"""{args.persistent_dir}/pandas/dataset.pd.pkl.xz"""'], {}), "(f'{args.persistent_dir}/pandas/dataset.pd.pkl.xz')\n", (6120, 6171), True, 'import pandas as pd\n'), ((1869, 1885), 'numpy.mean', 'np.mean', (['lengths'], {}), '(lengths)\n', (1876, 1885), True, 'import numpy as np\n'), ((2578, 2602), 'os.path.join', 'path.join', (['thisdir', '""".."""'], {}), "(thisdir, '..')\n", (2587, 2602), True, 'import os.path as path\n'), ((4806, 4869), 'glob.glob', 'glob.glob', (['f"""{args.persistent_dir}/results/roar/*_s-[0-9].json"""'], {}), "(f'{args.persistent_dir}/results/roar/*_s-[0-9].json')\n", (4815, 4869), False, 'import glob\n'), ((903, 927), 'numpy.random.default_rng', 'np.random.default_rng', (['(0)'], {}), '(0)\n', (924, 927), True, 'import numpy as np\n'), ((5020, 5033), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (5029, 5033), False, 'import json\n')]
|
from django.urls import path
from . import views
urlpatterns = [
path('generalchatroom/', views.index, name='index'),
path('generalchatroom/<str:room_name>/', views.room, name='room'),
path('show_Message/', views.show_Message, name='show_Message'),
]
|
[
"django.urls.path"
] |
[((71, 122), 'django.urls.path', 'path', (['"""generalchatroom/"""', 'views.index'], {'name': '"""index"""'}), "('generalchatroom/', views.index, name='index')\n", (75, 122), False, 'from django.urls import path\n'), ((128, 193), 'django.urls.path', 'path', (['"""generalchatroom/<str:room_name>/"""', 'views.room'], {'name': '"""room"""'}), "('generalchatroom/<str:room_name>/', views.room, name='room')\n", (132, 193), False, 'from django.urls import path\n'), ((199, 261), 'django.urls.path', 'path', (['"""show_Message/"""', 'views.show_Message'], {'name': '"""show_Message"""'}), "('show_Message/', views.show_Message, name='show_Message')\n", (203, 261), False, 'from django.urls import path\n')]
|
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import s1ap_types
import s1ap_wrapper
class TestSendErrorIndForDlNasWithAuthReq(unittest.TestCase):
"""Test sending of error indication for DL NAS message
carrying authentication request
"""
def setUp(self):
"""Initialize"""
self._s1ap_wrapper = s1ap_wrapper.TestWrapper()
def tearDown(self):
"""Cleanup"""
self._s1ap_wrapper.cleanup()
def test_send_error_ind_for_dl_nas_with_auth_req(self):
"""Send error indication after receiving authentication request"""
self._s1ap_wrapper.configIpBlock()
self._s1ap_wrapper.configUEDevice(1)
req = self._s1ap_wrapper.ue_req
attach_req = s1ap_types.ueAttachRequest_t()
attach_req.ue_Id = req.ue_id
sec_ctxt = s1ap_types.TFW_CREATE_NEW_SECURITY_CONTEXT
id_type = s1ap_types.TFW_MID_TYPE_IMSI
eps_type = s1ap_types.TFW_EPS_ATTACH_TYPE_EPS_ATTACH
attach_req.mIdType = id_type
attach_req.epsAttachType = eps_type
attach_req.useOldSecCtxt = sec_ctxt
self._s1ap_wrapper._s1_util.issue_cmd(
s1ap_types.tfwCmd.UE_ATTACH_REQUEST, attach_req,
)
print("************************* Sent attach request")
response = self._s1ap_wrapper.s1_util.get_response()
self.assertEqual(
response.msg_type, s1ap_types.tfwCmd.UE_AUTH_REQ_IND.value,
)
print("************************* Received authentication request")
# Send error indication
error_ind = s1ap_types.fwNbErrIndMsg_t()
# isUeAssoc flag to include optional MME_UE_S1AP_ID and eNB_UE_S1AP_ID
error_ind.isUeAssoc = True
error_ind.ue_Id = req.ue_id
error_ind.cause.pres = True
# Radio network causeType = 0
error_ind.cause.causeType = 0
# causeVal - Unknown-pair-ue-s1ap-id
error_ind.cause.causeVal = 15
print("*** Sending error indication ***")
self._s1ap_wrapper._s1_util.issue_cmd(
s1ap_types.tfwCmd.ENB_ERR_IND_MSG, error_ind,
)
# Context release
response = self._s1ap_wrapper.s1_util.get_response()
self.assertEqual(
response.msg_type, s1ap_types.tfwCmd.UE_CTX_REL_IND.value,
)
print("************************* Received UE_CTX_REL_IND")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"s1ap_types.fwNbErrIndMsg_t",
"s1ap_types.ueAttachRequest_t",
"s1ap_wrapper.TestWrapper"
] |
[((2849, 2864), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2862, 2864), False, 'import unittest\n'), ((775, 801), 's1ap_wrapper.TestWrapper', 's1ap_wrapper.TestWrapper', ([], {}), '()\n', (799, 801), False, 'import s1ap_wrapper\n'), ((1173, 1203), 's1ap_types.ueAttachRequest_t', 's1ap_types.ueAttachRequest_t', ([], {}), '()\n', (1201, 1203), False, 'import s1ap_types\n'), ((2015, 2043), 's1ap_types.fwNbErrIndMsg_t', 's1ap_types.fwNbErrIndMsg_t', ([], {}), '()\n', (2041, 2043), False, 'import s1ap_types\n')]
|
from django.core.exceptions import ValidationError
from constants.account_strings import AccountStrings
from django.db import models
from django.conf import settings
from country.models import City
from django.db.models.signals import post_delete
from django.dispatch import receiver
class ParentProfile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
primary_key=True,
verbose_name=AccountStrings.ParentProfileStrings.user_verbose_name,
related_name="user_parent")
city = models.ForeignKey(
"country.City",
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name=AccountStrings.ParentProfileStrings.city_verbose_name,
related_name="city_parent_profiles")
profession = models.CharField(max_length=500,
null=True,
blank=True,
verbose_name=AccountStrings.
ParentProfileStrings.profession_verbose_name)
class Meta:
verbose_name = AccountStrings.ParentProfileStrings.meta_verbose_name
verbose_name_plural = AccountStrings.ParentProfileStrings.meta_verbose_name_plural
@property
def get_full_name(self):
return f"{self.user.first_name} {self.user.last_name}"
def __str__(self):
return self.get_full_name
def clean(self) -> None:
"""
This method will check if the user type is a parent during creation.
"""
if self.user.user_type != 3:
raise ValidationError(AccountStrings.ParentProfileStrings.user_type_error)
|
[
"django.db.models.ForeignKey",
"django.db.models.OneToOneField",
"django.db.models.CharField",
"django.core.exceptions.ValidationError"
] |
[((331, 526), 'django.db.models.OneToOneField', 'models.OneToOneField', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE', 'primary_key': '(True)', 'verbose_name': 'AccountStrings.ParentProfileStrings.user_verbose_name', 'related_name': '"""user_parent"""'}), "(settings.AUTH_USER_MODEL, on_delete=models.CASCADE,\n primary_key=True, verbose_name=AccountStrings.ParentProfileStrings.\n user_verbose_name, related_name='user_parent')\n", (351, 526), False, 'from django.db import models\n'), ((570, 767), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""country.City"""'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)', 'verbose_name': 'AccountStrings.ParentProfileStrings.city_verbose_name', 'related_name': '"""city_parent_profiles"""'}), "('country.City', on_delete=models.SET_NULL, null=True,\n blank=True, verbose_name=AccountStrings.ParentProfileStrings.\n city_verbose_name, related_name='city_parent_profiles')\n", (587, 767), False, 'from django.db import models\n'), ((825, 959), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'null': '(True)', 'blank': '(True)', 'verbose_name': 'AccountStrings.ParentProfileStrings.profession_verbose_name'}), '(max_length=500, null=True, blank=True, verbose_name=\n AccountStrings.ParentProfileStrings.profession_verbose_name)\n', (841, 959), False, 'from django.db import models\n'), ((1628, 1696), 'django.core.exceptions.ValidationError', 'ValidationError', (['AccountStrings.ParentProfileStrings.user_type_error'], {}), '(AccountStrings.ParentProfileStrings.user_type_error)\n', (1643, 1696), False, 'from django.core.exceptions import ValidationError\n')]
|
import setuptools
from os.path import dirname, join
here = dirname(__file__)
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="algo-trader",
version="2.0.4",
author="<NAME>",
author_email="<EMAIL>",
description="Trade execution engine to process API data and transmit"
" orders to Bitmex and other brokers.",
long_description=open(join(here, 'README.md')).read(),
long_description_content_type='text/markdown',
url="https://github.com/dignitas123/algo_trader",
install_requires=['bitmex'],
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
entry_points={
'console_scripts': [
'algotrader=algo_trader.startbot:run',
],
}
)
|
[
"os.path.dirname",
"os.path.join",
"setuptools.find_packages"
] |
[((60, 77), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (67, 77), False, 'from os.path import dirname, join\n'), ((589, 615), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (613, 615), False, 'import setuptools\n'), ((405, 428), 'os.path.join', 'join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (409, 428), False, 'from os.path import dirname, join\n')]
|
import pybullet as p
import time
import math
p.connect(p.GUI)
useMaximalCoordinates = False
p.setGravity(0, 0, -10)
plane = p.loadURDF("plane.urdf", [0, 0, -1], useMaximalCoordinates=useMaximalCoordinates)
p.setRealTimeSimulation(0)
velocity = 1
num = 40
p.configureDebugVisualizer(p.COV_ENABLE_GUI, 0)
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 1) #disable this to make it faster
p.configureDebugVisualizer(p.COV_ENABLE_TINY_RENDERER, 0)
p.setPhysicsEngineParameter(enableConeFriction=1)
for i in range(num):
print("progress:", i, num)
x = velocity * math.sin(2. * 3.1415 * float(i) / num)
y = velocity * math.cos(2. * 3.1415 * float(i) / num)
print("velocity=", x, y)
sphere = p.loadURDF("sphere_small_zeroinertia.urdf",
flags=p.URDF_USE_INERTIA_FROM_FILE,
useMaximalCoordinates=useMaximalCoordinates)
p.changeDynamics(sphere, -1, lateralFriction=0.02)
#p.changeDynamics(sphere,-1,rollingFriction=10)
p.changeDynamics(sphere, -1, linearDamping=0)
p.changeDynamics(sphere, -1, angularDamping=0)
p.resetBaseVelocity(sphere, linearVelocity=[x, y, 0])
prevPos = [0, 0, 0]
for i in range(2048):
p.stepSimulation()
pos = p.getBasePositionAndOrientation(sphere)[0]
if (i & 64):
p.addUserDebugLine(prevPos, pos, [1, 0, 0], 1)
prevPos = pos
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 1)
while (1):
time.sleep(0.01)
|
[
"pybullet.setRealTimeSimulation",
"pybullet.addUserDebugLine",
"pybullet.stepSimulation",
"pybullet.setGravity",
"pybullet.changeDynamics",
"pybullet.configureDebugVisualizer",
"pybullet.getBasePositionAndOrientation",
"pybullet.resetBaseVelocity",
"time.sleep",
"pybullet.setPhysicsEngineParameter",
"pybullet.connect",
"pybullet.loadURDF"
] |
[((46, 62), 'pybullet.connect', 'p.connect', (['p.GUI'], {}), '(p.GUI)\n', (55, 62), True, 'import pybullet as p\n'), ((94, 117), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-10)'], {}), '(0, 0, -10)\n', (106, 117), True, 'import pybullet as p\n'), ((126, 212), 'pybullet.loadURDF', 'p.loadURDF', (['"""plane.urdf"""', '[0, 0, -1]'], {'useMaximalCoordinates': 'useMaximalCoordinates'}), "('plane.urdf', [0, 0, -1], useMaximalCoordinates=\n useMaximalCoordinates)\n", (136, 212), True, 'import pybullet as p\n'), ((209, 235), 'pybullet.setRealTimeSimulation', 'p.setRealTimeSimulation', (['(0)'], {}), '(0)\n', (232, 235), True, 'import pybullet as p\n'), ((259, 306), 'pybullet.configureDebugVisualizer', 'p.configureDebugVisualizer', (['p.COV_ENABLE_GUI', '(0)'], {}), '(p.COV_ENABLE_GUI, 0)\n', (285, 306), True, 'import pybullet as p\n'), ((307, 360), 'pybullet.configureDebugVisualizer', 'p.configureDebugVisualizer', (['p.COV_ENABLE_RENDERING', '(1)'], {}), '(p.COV_ENABLE_RENDERING, 1)\n', (333, 360), True, 'import pybullet as p\n'), ((394, 451), 'pybullet.configureDebugVisualizer', 'p.configureDebugVisualizer', (['p.COV_ENABLE_TINY_RENDERER', '(0)'], {}), '(p.COV_ENABLE_TINY_RENDERER, 0)\n', (420, 451), True, 'import pybullet as p\n'), ((452, 501), 'pybullet.setPhysicsEngineParameter', 'p.setPhysicsEngineParameter', ([], {'enableConeFriction': '(1)'}), '(enableConeFriction=1)\n', (479, 501), True, 'import pybullet as p\n'), ((1343, 1396), 'pybullet.configureDebugVisualizer', 'p.configureDebugVisualizer', (['p.COV_ENABLE_RENDERING', '(1)'], {}), '(p.COV_ENABLE_RENDERING, 1)\n', (1369, 1396), True, 'import pybullet as p\n'), ((704, 833), 'pybullet.loadURDF', 'p.loadURDF', (['"""sphere_small_zeroinertia.urdf"""'], {'flags': 'p.URDF_USE_INERTIA_FROM_FILE', 'useMaximalCoordinates': 'useMaximalCoordinates'}), "('sphere_small_zeroinertia.urdf', flags=p.\n URDF_USE_INERTIA_FROM_FILE, useMaximalCoordinates=useMaximalCoordinates)\n", (714, 833), True, 'import pybullet as p\n'), ((875, 925), 'pybullet.changeDynamics', 'p.changeDynamics', (['sphere', '(-1)'], {'lateralFriction': '(0.02)'}), '(sphere, -1, lateralFriction=0.02)\n', (891, 925), True, 'import pybullet as p\n'), ((978, 1023), 'pybullet.changeDynamics', 'p.changeDynamics', (['sphere', '(-1)'], {'linearDamping': '(0)'}), '(sphere, -1, linearDamping=0)\n', (994, 1023), True, 'import pybullet as p\n'), ((1026, 1072), 'pybullet.changeDynamics', 'p.changeDynamics', (['sphere', '(-1)'], {'angularDamping': '(0)'}), '(sphere, -1, angularDamping=0)\n', (1042, 1072), True, 'import pybullet as p\n'), ((1075, 1128), 'pybullet.resetBaseVelocity', 'p.resetBaseVelocity', (['sphere'], {'linearVelocity': '[x, y, 0]'}), '(sphere, linearVelocity=[x, y, 0])\n', (1094, 1128), True, 'import pybullet as p\n'), ((1411, 1427), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1421, 1427), False, 'import time\n'), ((1180, 1198), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (1196, 1198), True, 'import pybullet as p\n'), ((1209, 1248), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', (['sphere'], {}), '(sphere)\n', (1240, 1248), True, 'import pybullet as p\n'), ((1275, 1321), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (['prevPos', 'pos', '[1, 0, 0]', '(1)'], {}), '(prevPos, pos, [1, 0, 0], 1)\n', (1293, 1321), True, 'import pybullet as p\n')]
|
import torch
import torch_xla
import torch_xla.core.xla_model as xm
def detect_cuda_device_number():
return torch.cuda.current_device() if torch.cuda.is_available() else -1
def detect_tpu_device_number():
return xm.xla_device().index if xm.xla_device() else -1
|
[
"torch.cuda.current_device",
"torch.cuda.is_available",
"torch_xla.core.xla_model.xla_device"
] |
[((144, 169), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (167, 169), False, 'import torch\n'), ((113, 140), 'torch.cuda.current_device', 'torch.cuda.current_device', ([], {}), '()\n', (138, 140), False, 'import torch\n'), ((247, 262), 'torch_xla.core.xla_model.xla_device', 'xm.xla_device', ([], {}), '()\n', (260, 262), True, 'import torch_xla.core.xla_model as xm\n'), ((222, 237), 'torch_xla.core.xla_model.xla_device', 'xm.xla_device', ([], {}), '()\n', (235, 237), True, 'import torch_xla.core.xla_model as xm\n')]
|
"""Run validation test for CharacterPredictor."""
import os
from pathlib import Path
from time import time
import unittest
from text_recognizer.datasets import EmnistDataset
from text_recognizer.character_predictor import CharacterPredictor
os.environ["CUDA_VISIBLE_DEVICES"] = ""
SUPPORT_DIRNAME = Path(__file__).parents[0].resolve() / 'support' / 'emnist'
class TestEvaluateCharacterPredictor(unittest.TestCase):
def test_evaluate(self):
predictor = CharacterPredictor()
dataset = EmnistDataset()
dataset.load_or_generate_data()
t = time()
metric = predictor.evaluate(dataset)
time_taken = time() - t
print(f'acc: {metric}, time_taken: {time_taken}')
self.assertGreater(metric, 0.6)
self.assertLess(time_taken, 10)
|
[
"text_recognizer.character_predictor.CharacterPredictor",
"text_recognizer.datasets.EmnistDataset",
"pathlib.Path",
"time.time"
] |
[((469, 489), 'text_recognizer.character_predictor.CharacterPredictor', 'CharacterPredictor', ([], {}), '()\n', (487, 489), False, 'from text_recognizer.character_predictor import CharacterPredictor\n'), ((508, 523), 'text_recognizer.datasets.EmnistDataset', 'EmnistDataset', ([], {}), '()\n', (521, 523), False, 'from text_recognizer.datasets import EmnistDataset\n'), ((576, 582), 'time.time', 'time', ([], {}), '()\n', (580, 582), False, 'from time import time\n'), ((649, 655), 'time.time', 'time', ([], {}), '()\n', (653, 655), False, 'from time import time\n'), ((302, 316), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (306, 316), False, 'from pathlib import Path\n')]
|
from slack import WebClient
import server.config as config
slack_service = WebClient(token=config.SLACK_TOKEN)
|
[
"slack.WebClient"
] |
[((77, 112), 'slack.WebClient', 'WebClient', ([], {'token': 'config.SLACK_TOKEN'}), '(token=config.SLACK_TOKEN)\n', (86, 112), False, 'from slack import WebClient\n')]
|
#!/home/adam/Documents/revkit-1.3/python
#!/usr/bin/python
# RevKit: A Toolkit for Reversible Circuit Design (www.revkit.org)
# Copyright (C) 2009-2011 The RevKit Developers <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os, sys
sys.path.append(os.path.dirname(sys.path[0]))
from revkit import *
from revkitui import *
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class InfoDialog( QDialog ):
def __init__( self, circ, parent ):
QDialog.__init__( self, parent, Qt.Dialog )
self.setWindowTitle( 'Circuit details' )
self.resize( 20, 20 )
self.setLayout( QVBoxLayout() )
self.layout().setMargin( 0 )
widget = QWidget()
layout = QFormLayout()
widget.setLayout( layout )
layout.addRow( QLabel( 'Gate count:', widget ), QLabel( str( costs( circ, gate_costs() ) ), widget ) )
layout.addRow( QLabel( 'Line count:', widget ), QLabel( str( costs( circ, line_costs() ) ), widget ) )
layout.addRow( QLabel( 'Quantum cost:', widget ), QLabel( str( costs( circ, quantum_costs() ) ), widget ) )
layout.addRow( QLabel( 'Transistor cost:', widget ), QLabel( str( costs( circ, transistor_costs() ) ), widget ) )
widget2 = QWidget()
widget2.setLayout( QHBoxLayout() )
widget2.layout().addStretch()
button = QPushButton( 'Close' )
button.setAutoDefault( True )
widget2.layout().addWidget( button )
self.connect( button, SIGNAL( 'clicked()' ), self.close )
self.layout().addWidget( widget )
self.layout().addWidget( widget2 )
class AboutDialog( QDialog ):
def __init__( self, parent ):
QDialog.__init__( self, parent, Qt.Dialog )
self.setWindowTitle( 'About RevKit Viewer' )
self.resize( 20, 20 )
self.setLayout( QVBoxLayout() )
self.layout().setMargin( 0 )
widget2 = QWidget()
widget2.setLayout( QHBoxLayout() )
widget2.layout().addStretch()
button = QPushButton( 'Close' )
button.setAutoDefault( True )
widget2.layout().addWidget( button )
self.connect( button, SIGNAL( 'clicked()' ), self.close )
self.layout().addWidget( QLabel( '(c) 2009-2011 by the RevKit Developers' ) )
self.layout().addWidget( widget2 )
class SmallerTreeView( QTreeView ):
def __init__( self, parent = None ):
QTreeView.__init__( self, parent )
def sizeHint( self ):
return QSize( 200, 200 )
class Viewer( QMainWindow ):
def __init__( self ):
QMainWindow.__init__( self )
self.setWindowTitle( "RevKit Viewer" )
self.setupDockWidgets()
self.setupActions()
self.setupMenuBar()
self.setupToolBar()
# CircuitView
self.setCentralWidget( QStackedWidget( self ) )
self.connect( self.centralWidget(), SIGNAL( 'currentChanged(int)' ), self.updateStatusBar )
self.setupStatusBar()
self.resize( 600, 300 )
def setupDockWidgets( self ):
self.hierarchyDock = QDockWidget( "Hierarchy", self )
self.hierarchyView = SmallerTreeView( self )
self.hierarchyView.setExpandsOnDoubleClick( False )
self.hierarchyDock.setWidget( self.hierarchyView )
self.hierarchyDock.setVisible( False )
self.hierarchyDock.setFeatures( QDockWidget.DockWidgetClosable )
self.addDockWidget( Qt.LeftDockWidgetArea, self.hierarchyDock )
# Actions
self.connect( self.hierarchyView, SIGNAL( 'doubleClicked(QModelIndex)' ), self.loadCircuitFromHierarchy )
def setupActions( self ):
path = os.path.realpath( os.path.abspath( __file__ ) )
path = path.replace( os.path.basename( __file__ ), 'icons/' )
self.openAction = QAction( QIcon( path + 'document-open.png' ), '&Open...', self )
self.openAction.setStatusTip( 'Open a circuit realization in RevLib format' )
self.imageAction = QAction( QIcon( path + 'image-x-generic.png' ), 'Save as &Image...', self )
self.imageAction.setStatusTip( 'Saves the circuit as an image file (PNG or JPG)' )
self.latexAction = QAction( QIcon( path + 'text-x-tex.png' ), 'Save as &LaTeX...', self )
self.latexAction.setStatusTip( 'Saves the LaTeX code to generate this circuit' )
self.exitAction = QAction( QIcon( path + 'application-exit.png' ), '&Quit', self )
self.exitAction.setStatusTip( 'Quits the program' )
self.infoAction = QAction( QIcon( path + 'dialog-information.png' ), '&Circuit details', self )
self.infoAction.setStatusTip( 'Opens a dialog with circuit information' )
self.specAction = QAction( QIcon( path + 'view-form-table.png' ), '&View truth table', self )
self.specAction.setStatusTip( 'Displays the full truth table of the circuit, obtained by simulation' )
self.partialAction = QAction( QIcon( path + 'view-form-table.png' ), '&View partial truth table', self )
self.partialAction.setStatusTip( 'Displays a truth table only for non-constant and non-garbage signals' )
self.aboutAction = QAction( QIcon( path + 'help-about.png' ), '&About', self )
self.aboutAction.setStatusTip( 'Displays information about the RevKit Viewer' )
# Dock Widgets
self.hierarchyDock.toggleViewAction().setIcon( QIcon( path + 'view-sidetree.png' ) )
self.connect( self.openAction, SIGNAL( 'triggered()' ), self.open )
self.connect( self.imageAction, SIGNAL( 'triggered()' ), self.saveImage )
self.connect( self.latexAction, SIGNAL( 'triggered()' ), self.saveLatex )
self.connect( self.exitAction, SIGNAL( 'triggered()' ), SLOT( 'close()' ) )
self.connect( self.infoAction, SIGNAL( 'triggered()' ), self.info )
self.connect( self.specAction, SIGNAL( 'triggered()' ), self.truthTable )
self.connect( self.partialAction, SIGNAL( 'triggered()' ), self.partialTable )
self.connect( self.aboutAction, SIGNAL( 'triggered()' ), self.about )
def setupMenuBar( self ):
menubar = self.menuBar()
file = menubar.addMenu( '&File' )
file.addAction( self.openAction )
file.addAction( self.imageAction )
file.addAction( self.latexAction )
file.addSeparator()
file.addAction( self.exitAction )
view = menubar.addMenu( '&View' )
view.addAction( self.infoAction )
view.addSeparator()
view.addAction( self.specAction )
view.addAction( self.partialAction )
help = menubar.addMenu( '&Help' )
help.addAction( self.aboutAction )
def setupToolBar( self ):
toolbar = self.addToolBar( 'Main' )
toolbar.setIconSize( QSize( 32, 32 ) )
toolbar.addAction( self.openAction )
toolbar.addAction( self.imageAction )
toolbar.addSeparator()
toolbar.addAction( self.infoAction )
toolbar.addAction( self.partialAction )
toolbarDock = QToolBar( self )
self.addToolBar( Qt.LeftToolBarArea, toolbarDock )
toolbarDock.setOrientation( Qt.Vertical )
toolbarDock.setMovable( False )
toolbarDock.addAction( self.hierarchyDock.toggleViewAction() )
def setupStatusBar( self ):
self.statusBar()
self.updateStatusBar()
zoom_widget = None # Pointer to the current zoom widget
def updateStatusBar( self ):
if self.zoom_widget is not None:
self.statusBar().removeWidget( self.zoom_widget )
self.zoom_widget = None
if self.centralWidget().currentWidget():
self.zoom_widget = self.centralWidget().currentWidget().zoomWidget()
self.statusBar().addPermanentWidget( self.zoom_widget )
self.zoom_widget.show()
def open( self ):
filename = str( QFileDialog.getOpenFileName( self, 'Open Realization', '', 'RevLib Realization (*.real)' ) )
if len( filename ):
self.openCircuitFromFilename( filename )
def openCircuitFromFilename( self, filename, load_hierarchy = True ):
circ = circuit()
read_realization( circ, filename )
self.openCircuit( circ )
def openCircuit( self, circ ):
# remove all views TODO make more efficient (also memory)
while self.centralWidget().count():
self.centralWidget().removeWidget( self.centralWidget().widget( 0 ) )
# Save this, since all the other circuits are references
self.circ = circ
# hierarchy
tree = hierarchy_tree()
circuit_hierarchy( circ, tree )
self.hierarchyView.setModel( HierarchyModel( tree ) )
self.hierarchyView.setColumnWidth( 0, 150 )
self.hierarchyView.resizeColumnToContents( 1 )
self.hierarchyCurrentIndex = self.hierarchyView.model().index( 0, 0 )
self.circuits = [ tree.node_circuit( i ) for i in range( tree.size() ) ]
for c in self.circuits:
view = CircuitView( c, self )
view.gateDoubleClicked.connect( self.slotGateDoubleClicked )
self.centralWidget().addWidget( view )
def saveImage( self ):
filename = QFileDialog.getSaveFileName( self, 'Save as Image', '', 'PNG image (*.png);;JPG image (*.jpg)' )
if not filename.isEmpty():
scene = self.centralWidget().currentWidget().scene()
pixmap = QPixmap( scene.width(), scene.height() )
painter = QPainter( pixmap )
scene.render( painter )
pixmap.save( filename )
painter.end()
def saveLatex( self ):
filename = QFileDialog.getSaveFileName( self, 'Save as LaTeX', '', 'LaTeX file (*.tex)' )
if not filename.isEmpty():
with open( str( filename ), 'w' ) as f:
f.write( create_image( self.circ ) )
def info( self ):
dialog = InfoDialog( self.circ, self )
dialog.exec_()
def truthTable( self ):
dialog = QDialog( self, Qt.Dialog )
dialog.setWindowTitle( 'Truth Table' )
spec = binary_truth_table()
flattened = circuit()
flatten_circuit( self.circ, flattened )
circuit_to_truth_table( flattened, spec )
n = self.circ.lines
table = QTableWidget( 2 ** n, 2 * n, dialog )
table.setHorizontalHeaderLabels( self.circ.inputs + self.circ.outputs )
table.setVerticalHeaderLabels( map( str, range( 0, 2 ** n ) ) )
table.setAlternatingRowColors( True )
table.setShowGrid( False )
row = 0
for entry in spec.entries:
valid = True
for c in range( 0, n ):
if not self.circ.constants[c] is None and entry[0][c] != self.circ.constants[c]:
valid = False
break
for col in range( 0, 2 * n ):
item = QTableWidgetItem( '1' if ( entry[0] + entry[1] )[col] else '0' )
flags = Qt.ItemIsSelectable
if valid and not ( col >= n and self.circ.garbage[col % n] ) and not ( col < n and not self.circ.constants[col] is None ):
flags |= Qt.ItemIsEnabled
item.setFlags( flags )
if col >= n and not self.circ.garbage[col % n]:
item.setBackground( Qt.lightGray )
table.setItem( row, col, item )
row += 1
table.resizeColumnsToContents()
dialog.setLayout( QVBoxLayout() )
dialog.layout().setMargin( 0 )
dialog.layout().addWidget( table )
dialog.exec_()
def partialTable( self ):
dialog = QDialog( self, Qt.Dialog )
dialog.setWindowTitle( 'Partial Truth Table' )
spec = binary_truth_table()
settings = properties()
settings.set_bool( "partial", True )
flattened = circuit()
flatten_circuit( self.circ, flattened )
circuit_to_truth_table( flattened, spec, py_partial_simulation_func( settings ) )
n = len( filter( lambda x: x is None, self.circ.constants ) )
m = len( filter( lambda x: not x, self.circ.garbage ) )
table = QTableWidget( 2 ** n, n + m, dialog )
input_labels = map( lambda x: x[0], filter( lambda x: x[1] is None, map( lambda x, y: [x,y], self.circ.inputs, self.circ.constants ) ) )
output_labels = map( lambda x: x[0], filter( lambda x: not x[1], map( lambda x, y: [x,y], self.circ.outputs, self.circ.garbage ) ) )
table.setHorizontalHeaderLabels( input_labels + output_labels )
table.setVerticalHeaderLabels( map( lambda x: "", range( 0, 2 ** n ) ) )
table.setAlternatingRowColors( True )
table.setShowGrid( False )
row = 0
for entry in spec.entries:
for col in range( 0, n + m ):
item = QTableWidgetItem( '1' if ( entry[0] + entry[1] )[col] else '0' )
item.setFlags( Qt.ItemIsSelectable | Qt.ItemIsEnabled )
if col >= n:
item.setBackground( Qt.lightGray )
table.setItem( row, col, item )
row += 1
table.resizeColumnsToContents()
dialog.setLayout( QVBoxLayout() )
dialog.layout().setMargin( 0 )
dialog.layout().addWidget( table )
dialog.exec_()
def about( self ):
dialog = AboutDialog( self )
dialog.exec_()
def loadCircuitFromHierarchy( self, index ):
self.hierarchyCurrentIndex = index
self.centralWidget().setCurrentIndex( index.internalId() )
def slotGateDoubleClicked( self, gate ):
if gate.type == gate_type.module:
rows = self.hierarchyView.model().rowCount( self.hierarchyCurrentIndex )
for r in range( rows ):
if str( self.hierarchyCurrentIndex.child( r, 0 ).data().toString() ) == gate.module_name:
self.centralWidget().setCurrentIndex( self.hierarchyCurrentIndex.child( r, 0 ).internalId() )
self.hierarchyCurrentIndex = self.hierarchyCurrentIndex.child( r, 0 )
return
if __name__ == '__main__':
a = QApplication([])
w = Viewer()
w.show()
if len( sys.argv ) == 2:
w.openCircuitFromFilename( sys.argv[1] )
sys.exit( a.exec_() )
|
[
"os.path.dirname",
"os.path.abspath",
"os.path.basename"
] |
[((854, 882), 'os.path.dirname', 'os.path.dirname', (['sys.path[0]'], {}), '(sys.path[0])\n', (869, 882), False, 'import os, sys\n'), ((4252, 4277), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (4267, 4277), False, 'import os, sys\n'), ((4311, 4337), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (4327, 4337), False, 'import os, sys\n')]
|
from django.contrib import admin
from src.jobs.models import Job
admin.site.register(Job)
|
[
"django.contrib.admin.site.register"
] |
[((67, 91), 'django.contrib.admin.site.register', 'admin.site.register', (['Job'], {}), '(Job)\n', (86, 91), False, 'from django.contrib import admin\n')]
|
import numpy as np
import json
from graphviz import Digraph
import pickle
import compare_functions
def remove_item(item_list, item):
if item in item_list:
item_list.remove(item)
return list(item_list)
def create_ngrams(trace, n):
#A function that returns a list of n-grams of a trace
return [trace[i:i+n] for i in range(len(trace)-n+1)]
class Build_Tree_Data(object):
def __init__(self, all_concepts, concept_problems, all_basic_components, problem_components, n = 1, data = {'traces': {},'traces_ngrams': {}}):
self.all_concepts = all_concepts
self.concept_problems = concept_problems
self.all_basic_components = all_basic_components
self.problem_components = problem_components
self.data = data
self.data['traces']['Root'] = ''
self.data['traces_ngrams']['Root'] = []
self.n = n
class Base_Tree(object):
def return_parents(self):
# a method to return a dictionary where the keys are node names and the values are a list of the names of the immediate parents of the node
pass
def return_all_ancestors(self):
#a method to return a dictionary where the keys are node names and the values are a list of the names of all the ancestors of the node
pass
def return_children(self):
#a method to return a dictionary where the keys are node names and the values are a list of the names of the immediate childre of the node
pass
def return_all_descendants(self):
#a method to return a dictionary where the keys are node names and the values are a list of all the descendants parents of the node
pass
def return_all_concepts(self):
#a method to return a list of all the possible concepts
pass
def return_all_basic_components(self):
#a method to return a list of all possible components
pass
def return_concept_problems(self):
#a method to return a dictionary where the keys are concept names and the values are a list of all problems corresponding to that concept
pass
def return_problem_components(self):
#a method to return a dictionary where the keys are problems and the values are a list of all components corresponding to that problem
pass
def save_tree(self):
#a method for saving the tree to file
pass
class Static_Tree(Base_Tree): #can either load in using a json string representation or rebuild from a dictionary of children
def __init__(self, tree_filename = None, children = None, all_concepts = None, concept_problems = None,
all_basic_components = None, problem_components = None):
if tree_filename is not None:
with open (tree_filename, "r") as text_file:
tree_json_str = text_file.readlines()[0]
self.children, self.all_descendants, self.parents, self.all_ancestors, \
self.all_concepts, self.concept_problems, self.all_basic_components, self.problem_components = json.loads(tree_json_str)
else:
self.children = children #dict - keys: concept, values: list of immediate children of the concept
self.all_concepts = all_concepts #list: list of all concepts, each item in the list must be hashable
self.concept_problems = concept_problems #dict keys: concept, values: list of problems corresponding to the concept
self.all_basic_components = all_basic_components #list: All basic components that make up each problem (if no shared components between problems, they can be the same as the list of all problems)
self.problem_components = problem_components = problem_components #dict: keys: problem, values: list of basic the problem consists of
self.all_descendants = {}
self.parents = {}
self.all_ancestors = {}
self._calculate_all_descendants('Root')
self._calculate_parents()
for concept in self.all_concepts:
if len(self.children[concept]) == 0:
self._calculate_all_ancestors(concept)
def _calculate_all_descendants(self, concept):
if concept not in self.all_descendants:
all_descendants = set()
for child_concept in self.children[concept]:
all_descendants.update(self._calculate_all_descendants(child_concept))
all_descendants.add(child_concept)
self.all_descendants[concept] = list(all_descendants)
return self.all_descendants[concept]
def _calculate_parents(self):
for concept in self.all_concepts:
self.parents[concept] = []
for concept in self.all_concepts:
for child_concept in self.children[concept]:
if concept not in self.parents[child_concept]:
self.parents[child_concept].append(concept)
def _calculate_all_ancestors(self, concept):
if concept not in self.all_ancestors:
all_ancestors = set()
for parent_concept in self.parents[concept]:
all_ancestors.update(self._calculate_all_ancestors(parent_concept))
all_ancestors.add(parent_concept)
self.all_ancestors[concept] = list(all_ancestors)
return self.all_ancestors[concept]
def string_tree(self):
return json.dumps((
self.children,
self.all_descendants,
self.parents,
self.all_ancestors,
self.all_concepts,
self.concept_problems,
self.all_basic_components,
self.problem_components
))
def save_tree(self, tree_filename):
with open(tree_filename, "w") as text_file:
text_file.write(self.string_tree())
def return_parents(self): #return the parents dict (a dictionary where the keys are node names and the values are a list of the names of the immediate parents of the node)
return self.parents
def return_all_ancestors(self): #return the all_ancestors dict (a dictionary where the keys are node names and the values are a list of the names of all the ancestors of the node)
return self.all_ancestors
def return_children(self): #return the children_names dict (a dictionary where the keys are node names and the values are a list of the names of the immediate childre of the node)
return self.children
def return_all_descendants(self): #return the all_descendants_names dict (a dictionary where the keys are node names and the values are a list of all the descendants parents of the node)
return self.all_descendants
def return_all_concepts(self):
return self.all_concepts
def return_all_basic_components(self):
#a method to return a list of all possible components
return self.all_basic_components
def return_concept_problems(self):
#a method to return a dictionary where the keys are concept names and the values are a list of all problems corresponding to that concept
return self.concept_problems
def return_problem_components(self):
#a method to return a dictionary where the keys are problems and the values are a list of all components corresponding to that problem
return self.problem_components
def add_edges_to_progression(self, progression_graph):
#Add directed edges between parents and children to a graphviz graph for visualization purporses
for node_name, node_children in self.children.items():
for child_name in node_children:
progression_graph.edge(node_name, child_name, contraint = 'true')
#Tree object for sorting concepts that adds items recursively
class Build_Tree(Base_Tree):
#a tree node, each node is the head of a subtree of its descendants
def __init__(self, tree_filename = None, name = None, data = None, comp_func = None, children = None, children_names = None, all_descendants_names = None, parent=None, verbose = False):
if tree_filename is not None:
alternate_tree = pickle.load(open(tree_filename, "rb" ))
self.name = alternate_tree.name
self.data = alternate_tree.data
self.parent = alternate_tree.parent
self.children = alternate_tree.parent
self.children_names = alternate_tree.children_names
self.all_descendants_names = alternate_tree.all_descendants_names
self.parents = alternate_tree.parents
self.all_ancestors = alternate_tree.all_ancestors
self.comp_func = alternate_tree.comp_func
self.verbose = alternate_tree.verbose
del alternate_tree
else:
self.name = name #String - name of the node
self.data = data #Build_Tree_Data object
self.parent = parent #Tree object - immediate parent node object
self.children = children #Dictionary - keys are the node names and the values are an array of node objects that are the immediate children of the key node
self.children_names = children_names #Dictionary - keys are the node names and values are an array of names of the immediate children of the key node
self.all_descendants_names = all_descendants_names #Dictionary - keys are the node names and values are an array of names of all the descendants of the key node
self.parents = None #Dictionary - the keys are the node names and values are an array of names of the immediate parents of the key node
self.all_ancestors = None #Dictionary - keys are the node names and values are an array of names of all the ancestors of the key node
self.comp_func = comp_func #Function - function for comparing the data of two concepts and determine which one is harder
#comp_func(A, B) Returns:
#1 if B is harder than A
#0 if neither is harder than the other
#-1 if A is harder than B
self.verbose = verbose #Boolean: Whether or not to print
if children == None:
self.children = {}
self.children_names = {}
self.all_descendants_names = {}
self.children['Root'] = []
self.children_names['Root'] = []
self.all_descendants_names['Root'] = set()
for concept_name in data.all_concepts:
self.children[concept_name] = []
self.children_names[concept_name] = []
self.all_descendants_names[concept_name] = set()
def _add_child(self, node):
#inserting a child into the subtree
if self.verbose:
print("entering add child")
if not(node.name in self.all_descendants_names[self.name]) and node.name != self.name: #check it is not already a descendant of the subtree it is being inserted into
if self.verbose:
print('add child - self_name: ' + self.name + ' child_name: '+ node.name)
self.children[self.name].append(node)
self.children_names[self.name].append(node.name)
self.all_descendants_names[self.name].add(node.name)
def _remove_child(self, node):
#remove a child from the subtree
if self.verbose:
print('remove child - child_name: ' + node.name + ' self_name: ' + self.name)
for index, child in enumerate(self.children[self.name]):
if child.name == node.name:
del self.children[self.name][index]
del self.children_names[self.name][index]
break
def _check_tree(self, node):
#check your sibling's children to see if they are also your children, if they are then add them to the list of your children too
for child in self.children[self.name]:
node.insert_node(child.name)
child._check_tree(node)
def insert_node(self, node_name):
concept_name = node_name
concept_trace = concept_name
if concept_name not in self.data.data['traces']:
self.data.data['traces'][concept_name] = concept_trace
prim_traces = create_ngrams(concept_trace, self.data.n)
self.data.data['traces_ngrams'][concept_name] = prim_traces
#insert a new node into your subtree recursively
if self.name != node_name:
difficulty = self.comp_func(self.name, node_name, self.data.data)
if self.verbose:
print('node_name: ' + node_name + ' self_name: ' + self.name + " difficulty: " + str(difficulty))
if difficulty == 1: #If the node is harder than you then it belongs somewhere in your subtree
if len(self.children[self.name]) == 0:
#If you have no children, then the child is your child
if self.verbose:
print('no children and harder so insert')
node = Build_Tree(name = node_name, data = self.data, children = self.children, children_names = self.children_names, all_descendants_names = self.all_descendants_names, parent = self, comp_func = self.comp_func, verbose = self.verbose)
self._add_child(node)
return 1 #return 1 for inserted
else:
#If you have children, check if the node is your children's child and try to insert it into your children's subtrees
temp_children = list(self.children[self.name])
total_harder = 0
for child in temp_children:
total_harder = total_harder + child.insert_node(node_name)
if total_harder == 0: # if child was not inserted, then it is your child
if self.verbose:
print('not inserted, so insert')
node = Build_Tree(name = node_name, data = self.data, children = self.children, children_names = self.children_names, all_descendants_names = self.all_descendants_names, parent = self, comp_func = self.comp_func, verbose = self.verbose)
for child in temp_children:
child._check_tree(node)
self._add_child(node)
self.all_descendants_names[self.name].add(node_name)
return 1 #return 1 for inserted
elif difficulty == 0: #Cannot say one is more difficult than the other
return 0 #return 0 for not inserted
else: #difficulty == -1, means you are harder than the node so it is your parent
if self.verbose:
print('child is harder so add as parent')
node = Build_Tree(name = node_name, data = self.data, children = self.children, children_names = self.children_names, all_descendants_names = self.all_descendants_names, parent = self.parent, comp_func = self.comp_func, verbose = self.verbose)
#remove yourself from your parent
self.parent._remove_child(self)
#add the new node under your parent
for child in self.children[self.parent.name]:
child._check_tree(node)
self.parent._add_child(node)
self.parent = node
#reinsert yourself starting from your new parent
node.insert_node(self.name)
return 1 #return 1 for inserted
else:
return 1 #1 because the node was already inserted
def _add_parents(self, parents, all_ancestors):
#Add parents into the
if self.parent != None:
parents[self.name].add(self.parent.name)
all_ancestors[self.name].update(all_ancestors[self.parent.name])
all_ancestors[self.name].add(self.parent.name)
for child in self.children[self.name]:
child.parents = parents
child.all_ancestors = all_ancestors
child._add_parents(parents, all_ancestors)
def add_edges_to_progression(self, progression_graph):
#Add directed edges between parents and children to a graphviz graph for visualization purporses
for child_name, child_children in self.children.items():
for child in child_children:
progression_graph.edge(child_name, child.name, contraint = 'true')
def calculate_parents(self):
#calculate the parents of the nodes
if self.parents == None:
parents = {}
all_ancestors = {}
self.parents = parents
self.all_ancestors = all_ancestors
parents[self.name] = set()
all_ancestors[self.name] = set()
for child in self.all_descendants_names[self.name]:
parents[child] = set()
all_ancestors[child] = set()
self._add_parents(parents, all_ancestors)
def return_parents(self): #return the parents dict (a dictionary where the keys are node names and the values are a list of the names of the immediate parents of the node)
if self.parents == None:
self.calculate_parents()
return {key:remove_item(items_list, 'Root') for key, items_list in self.parents.items() if key != 'Root'}
def return_all_ancestors(self): #return the all_ancestors dict (a dictionary where the keys are node names and the values are a list of the names of all the ancestors of the node)
if self.parents == None:
self.calculate_parents()
return {key:remove_item(items_list, 'Root') for key, items_list in self.all_ancestors.items() if key != 'Root'}
def return_children(self): #return the children_names dict (a dictionary where the keys are node names and the values are a list of the names of the immediate childre of the node)
return self.children_names
def return_all_descendants(self): #return the all_descendants_names dict (a dictionary where the keys are node names and the values are a list of all the descendants parents of the node)
return {key:remove_item(items_list, 'Root') for key, items_list in self.parents.items() if key != 'Root'}
def print_tree(self, prepend_string=""):
print(prepend_string + self.name)
prepend_string=prepend_string+" "
for child in self.children[self.name]:
child.print_tree(prepend_string = prepend_string)
return
def return_all_concepts(self):
return self.data.all_concepts
def return_all_basic_components(self):
#a method to return a list of all possible components
return self.data.all_basic_components
def return_concept_problems(self):
#a method to return a dictionary where the keys are concept names and the values are a list of all problems corresponding to that concept
return self.data.concept_problems
def return_problem_components(self):
#a method to return a dictionary where the keys are problems and the values are a list of all components corresponding to that problem
return self.data.problem_components
def save_tree(self, tree_filename):
pickle.dump(self, open(tree_filename, "wb" ))
|
[
"json.loads",
"json.dumps"
] |
[((5407, 5594), 'json.dumps', 'json.dumps', (['(self.children, self.all_descendants, self.parents, self.all_ancestors,\n self.all_concepts, self.concept_problems, self.all_basic_components,\n self.problem_components)'], {}), '((self.children, self.all_descendants, self.parents, self.\n all_ancestors, self.all_concepts, self.concept_problems, self.\n all_basic_components, self.problem_components))\n', (5417, 5594), False, 'import json\n'), ((3033, 3058), 'json.loads', 'json.loads', (['tree_json_str'], {}), '(tree_json_str)\n', (3043, 3058), False, 'import json\n')]
|
"""Initial Migration
Revision ID: 2d549589ee65
Revises:
Create Date: 2019-10-02 16:59:16.744510
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2d<PASSWORD>9ee65'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('bio', sa.String(length=255), nullable=True),
sa.Column('profile_pic_path', sa.String(), nullable=True),
sa.Column('tel', sa.Integer(), nullable=True),
sa.Column('password_secure', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False)
op.create_table('book',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('summary', sa.String(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('location', sa.String(), nullable=True),
sa.Column('poster', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_book_summary'), 'book', ['summary'], unique=False)
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('book_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['book_id'], ['book.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('upvotes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('upvote', sa.Integer(), nullable=True),
sa.Column('book_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['book_id'], ['book.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('upvotes')
op.drop_table('comments')
op.drop_index(op.f('ix_book_summary'), table_name='book')
op.drop_table('book')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"alembic.op.f",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.Text",
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.String",
"sqlalchemy.Integer"
] |
[((2545, 2569), 'alembic.op.drop_table', 'op.drop_table', (['"""upvotes"""'], {}), "('upvotes')\n", (2558, 2569), False, 'from alembic import op\n'), ((2574, 2599), 'alembic.op.drop_table', 'op.drop_table', (['"""comments"""'], {}), "('comments')\n", (2587, 2599), False, 'from alembic import op\n'), ((2666, 2687), 'alembic.op.drop_table', 'op.drop_table', (['"""book"""'], {}), "('book')\n", (2679, 2687), False, 'from alembic import op\n'), ((2819, 2841), 'alembic.op.drop_table', 'op.drop_table', (['"""users"""'], {}), "('users')\n", (2832, 2841), False, 'from alembic import op\n'), ((824, 853), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (847, 853), True, 'import sqlalchemy as sa\n'), ((880, 902), 'alembic.op.f', 'op.f', (['"""ix_users_email"""'], {}), "('ix_users_email')\n", (884, 902), False, 'from alembic import op\n'), ((957, 982), 'alembic.op.f', 'op.f', (['"""ix_users_username"""'], {}), "('ix_users_username')\n", (961, 982), False, 'from alembic import op\n'), ((1440, 1490), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['users.id']"], {}), "(['user_id'], ['users.id'])\n", (1463, 1490), True, 'import sqlalchemy as sa\n'), ((1498, 1527), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1521, 1527), True, 'import sqlalchemy as sa\n'), ((1554, 1577), 'alembic.op.f', 'op.f', (['"""ix_book_summary"""'], {}), "('ix_book_summary')\n", (1558, 1577), False, 'from alembic import op\n'), ((1869, 1918), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['book_id']", "['book.id']"], {}), "(['book_id'], ['book.id'])\n", (1892, 1918), True, 'import sqlalchemy as sa\n'), ((1926, 1976), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['users.id']"], {}), "(['user_id'], ['users.id'])\n", (1949, 1976), True, 'import sqlalchemy as sa\n'), ((1984, 2013), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2007, 2013), True, 'import sqlalchemy as sa\n'), ((2270, 2319), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['book_id']", "['book.id']"], {}), "(['book_id'], ['book.id'])\n", (2293, 2319), True, 'import sqlalchemy as sa\n'), ((2327, 2377), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['users.id']"], {}), "(['user_id'], ['users.id'])\n", (2350, 2377), True, 'import sqlalchemy as sa\n'), ((2385, 2414), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2408, 2414), True, 'import sqlalchemy as sa\n'), ((2618, 2641), 'alembic.op.f', 'op.f', (['"""ix_book_summary"""'], {}), "('ix_book_summary')\n", (2622, 2641), False, 'from alembic import op\n'), ((2706, 2731), 'alembic.op.f', 'op.f', (['"""ix_users_username"""'], {}), "('ix_users_username')\n", (2710, 2731), False, 'from alembic import op\n'), ((2771, 2793), 'alembic.op.f', 'op.f', (['"""ix_users_email"""'], {}), "('ix_users_email')\n", (2775, 2793), False, 'from alembic import op\n'), ((416, 428), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (426, 428), True, 'import sqlalchemy as sa\n'), ((473, 494), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (482, 494), True, 'import sqlalchemy as sa\n'), ((535, 556), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (544, 556), True, 'import sqlalchemy as sa\n'), ((595, 616), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (604, 616), True, 'import sqlalchemy as sa\n'), ((668, 679), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (677, 679), True, 'import sqlalchemy as sa\n'), ((718, 730), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (728, 730), True, 'import sqlalchemy as sa\n'), ((781, 802), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (790, 802), True, 'import sqlalchemy as sa\n'), ((1069, 1081), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1079, 1081), True, 'import sqlalchemy as sa\n'), ((1125, 1137), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1135, 1137), True, 'import sqlalchemy as sa\n'), ((1179, 1190), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1188, 1190), True, 'import sqlalchemy as sa\n'), ((1233, 1244), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1242, 1244), True, 'import sqlalchemy as sa\n'), ((1288, 1309), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1297, 1309), True, 'import sqlalchemy as sa\n'), ((1354, 1365), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1363, 1365), True, 'import sqlalchemy as sa\n'), ((1407, 1418), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1416, 1418), True, 'import sqlalchemy as sa\n'), ((1666, 1678), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1676, 1678), True, 'import sqlalchemy as sa\n'), ((1722, 1734), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1732, 1734), True, 'import sqlalchemy as sa\n'), ((1778, 1790), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1788, 1790), True, 'import sqlalchemy as sa\n'), ((1838, 1847), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (1845, 1847), True, 'import sqlalchemy as sa\n'), ((2071, 2083), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2081, 2083), True, 'import sqlalchemy as sa\n'), ((2126, 2138), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2136, 2138), True, 'import sqlalchemy as sa\n'), ((2181, 2193), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2191, 2193), True, 'import sqlalchemy as sa\n'), ((2236, 2248), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2246, 2248), True, 'import sqlalchemy as sa\n')]
|
import numpy as np
import matplotlib.pyplot as plt
# close all figures
plt.close('all')
years = np.array([1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014])
usaGDP = np.array([543300000000.,563300000000.,605100000000.,638600000000.,685800000000.,743700000000.,815000000000.,861700000000.,942500000000.,1019900000000.,1075884000000.,1167770000000.,1282449000000.,1428549000000.,1548825000000.,1688923000000.,1877587000000.,2085951000000.,2356571000000.,2632143000000.,2862505000000.,3210956000000.,3344991000000.,3638137000000.,4040693000000.,4346734000000.,4590155000000.,4870217000000.,5252629000000.,5657693000000.,5979589000000.,6174043000000.,6539299000000.,6878718000000.,7308755000000.,7664060000000.,8100201000000.,8608515000000.,9089168000000.,9660624000000.,10284779000000.,10621824000000.,10977514000000.,11510670000000.,12274928000000.,13093726000000.,13855888000000.,14477635000000.,14718582000000.,14418739000000.,14964372000000.,15517926000000.,16163158000000.,16768053000000.,17419000000000.])
# GDP data from the worldbank http://data.worldbank.org/indicator/NY.GDP.MKTP.CD/countries/US?display=graph
# CPI data from bureau of labor statistics http://data.bls.gov/pdq/SurveyOutputServlet
usaCPI = np.array([29.6, 29.9, 30.2, 30.6, 31.0, 31.5, 32.4, 33.4, 34.8, 36.7, 38.8, 40.5, 41.8, 44.4, 49.3, 53.8, 56.9, 60.6, 65.2, 72.6, 82.4, 90.9, 96.5, 99.6, 103.9, 107.6, 109.6, 113.6, 118.3, 124.0, 130.7, 136.2, 140.3, 144.5, 148.2, 152.4, 156.9, 160.5, 163.0, 166.6, 172.2, 177.1, 179.9, 184.0, 188.9, 195.3, 201.6, 207.342, 215.303, 214.537, 218.056, 224.939, 229.594, 232.957, 236.736])
plt.figure()
plt.plot(years, usaGDP)
plt.xlabel('Year')
plt.ylabel('GDP in Current USD')
plt.grid(True)
plt.show()
# Adjust GDP for 1960 USD
usaGDP1960 = usaGDP / (usaCPI / usaCPI[0])
plt.figure()
plt.plot(years, usaGDP1960)
plt.xlabel('Year')
plt.ylabel('GDP adjusted for inflation in 1960 USD')
plt.grid(True)
plt.show()
# Adjust GDP for 2014 USD
usaGDP2014 = usaGDP / (usaCPI / usaCPI[-1])
plt.figure()
plt.plot(years, usaGDP2014)
plt.xlabel('Year')
plt.ylabel('GDP adjusted for inflation in 2014 USD')
plt.grid(True)
plt.show()
# population from world bank
usaPop = np.array([180671000,183691000,186538000,189242000,191889000,194303000,196560000,198712000,200706000,202677000,205052000,207661000,209896000,211909000,213854000,215973000,218035000,220239000,222585000,225055000,227225000,229466000,231664000,233792000,235825000,237924000,240133000,242289000,244499000,246819000,249623000,252981000,256514000,259919000,263126000,266278000,269394000,272657000,275854000,279040000,282162411,284968955,287625193,290107933,292805298,295516599,298379912,301231207,304093966,306771529,309347057,311721632,314112078,316497531,318857056])
usaGDPpercapita = usaGDP / usaPop
plt.figure()
plt.plot(years, usaGDPpercapita)
plt.xlabel('Year')
plt.ylabel('GDP per capita in Current USD')
plt.grid(True)
plt.show()
# adjust GDP per Capita to 1960s numbers
usaGDPpercapita1960 = usaGDPpercapita / (usaCPI / usaCPI[0])
plt.figure()
plt.plot(years, usaGDPpercapita1960)
plt.xlabel('Year')
plt.ylabel('GDP per capita adjusted for inflation in 1960 USD')
plt.grid(True)
plt.show()
# adjust GDP per Capita to 2014s numbers
usaGDPpercapita2014 = usaGDPpercapita / (usaCPI / usaCPI[-1])
plt.figure()
plt.plot(years, usaGDPpercapita2014)
plt.xlabel('Year')
plt.ylabel('GDP per capita adjusted for inflation to 2014 USD')
plt.grid(True)
plt.show()
# define a function to adjust the CPI based on an over or under estimation of
# the inflation rate, where rate is the percent increase or decrease change
# where a precentage overesimate of 5% would be inputted as 1.05
def adjustCPI(cpi, rate):
demo = []
for i, j in enumerate(cpi):
demo.append(j * (rate**i))
return demo
# what if we underestimated inflation?
cpiOverFive = adjustCPI(usaCPI, 1.005)
# what if we underestimated inflation?
cpiUnderFive = adjustCPI(usaCPI, 0.995)
# adjust GDP per Capita to 2014s numbers
usaGDPpercapita2014OverFive = usaGDPpercapita / (cpiOverFive / cpiOverFive[-1])
usaGDPpercapita2014UnderFive = usaGDPpercapita / (cpiUnderFive / cpiUnderFive[-1])
plt.figure()
plt.plot(years, usaGDPpercapita2014, label='normal')
plt.plot(years, usaGDPpercapita2014OverFive, label='under')
plt.plot(years, usaGDPpercapita2014UnderFive, label='over')
plt.legend()
plt.xlabel('Year')
plt.ylabel('GDP per capita adjusted for inflation to 2014 USD')
plt.grid(True)
plt.show()
years2 = np.array([1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014])
usaGNI = np.array([612178550047.646,646233886826.65,692328219512.945,753294530375.941,824183577234.192,868295290971.962,952033980993.251,1027990251284.03,1098553055567.61,1183038457083.86,1320921418184.74,1548458249174.67,1711839855738.22,1842214711486.27,1958767403397.59,2117456144199.84,2401109359261.26,2751769589536.9,3048093901726.34,3303883972259.98,3297652203866.24,3411202239818.87,3828479505092.12,4164905103485.73,4601500378186.56,5200354088055.45,5765196251790.1,5888830786924.1,6029529322891.06,6164277951121.71,6612706041742.15,6883086506452.91,7302781827892.38,7760854970064.45,8184808773787.28,8558708987900.82,8869581532268.98,9425292191447.05,10178500697503.7,10498594829042.2,10776200783181,11589035965657.3,12790914724399.8,13693955258225.3,14345564947204.5,14651211130474,15002428215985,14740580035992.9,15143137264678.1,15727290871234.6,16501015978642.4,17001290051112.6,17611490812741.3])
# GNI data atlas method from the worldbank http://databank.worldbank.org/data/reports.aspx?source=2&country=USA&series=&period=#
# CPI data from bureau of labor statistics http://data.bls.gov/pdq/SurveyOutputServlet
usaCPI2 = np.array([30.2, 30.6, 31.0, 31.5, 32.4, 33.4, 34.8, 36.7, 38.8, 40.5, 41.8, 44.4, 49.3, 53.8, 56.9, 60.6, 65.2, 72.6, 82.4, 90.9, 96.5, 99.6, 103.9, 107.6, 109.6, 113.6, 118.3, 124.0, 130.7, 136.2, 140.3, 144.5, 148.2, 152.4, 156.9, 160.5, 163.0, 166.6, 172.2, 177.1, 179.9, 184.0, 188.9, 195.3, 201.6, 207.342, 215.303, 214.537, 218.056, 224.939, 229.594, 232.957, 236.736])
plt.figure()
plt.plot(years2, usaGNI)
plt.xlabel('Year')
plt.ylabel('GNI in Current USD')
plt.grid(True)
plt.show()
# Adjust GNI for 1962 USD
usaGNI1962 = usaGNI / (usaCPI2 / usaCPI2[0])
plt.figure()
plt.plot(years2, usaGNI1962)
plt.xlabel('Year')
plt.ylabel('GNI adjusted for inflation to 1962 USD')
plt.grid(True)
plt.show()
# Adjust GNI for 2014 USD
usaGNI2014 = usaGNI / (usaCPI2 / usaCPI2[-1])
plt.figure()
plt.plot(years2, usaGNI2014)
plt.xlabel('Year')
plt.ylabel('GNI adjusted for inflation to 2014 USD')
plt.grid(True)
plt.show()
# population from world bank
usaPop = np.array([186538000,189242000,191889000,194303000,196560000,198712000,200706000,202677000,205052000,207661000,209896000,211909000,213854000,215973000,218035000,220239000,222585000,225055000,227225000,229466000,231664000,233792000,235825000,237924000,240133000,242289000,244499000,246819000,249623000,252981000,256514000,259919000,263126000,266278000,269394000,272657000,275854000,279040000,282162411,284968955,287625193,290107933,292805298,295516599,298379912,301231207,304093966,306771529,309347057,311721632,314112078,316497531,318857056])
usaGNIpercapita = usaGNI / usaPop
plt.figure()
plt.plot(years2, usaGNIpercapita)
plt.xlabel('Year')
plt.ylabel('GNI per capita in Current USD')
plt.grid(True)
plt.show()
# adjust GNI per Capita to 1962s numbers
usaGNIpercapita1962 = usaGNIpercapita / (usaCPI2 / usaCPI2[0])
plt.figure()
plt.plot(years2, usaGNIpercapita1962)
plt.xlabel('Year')
plt.ylabel('GNI per capita adjusted for inflation to 1962 USD')
plt.grid(True)
plt.show()
# adjust GNI per Capita to 2014s numbers
usaGNIpercapita2014 = usaGNIpercapita / (usaCPI2 / usaCPI2[-1])
plt.figure()
plt.plot(years2, usaGNIpercapita2014)
plt.xlabel('Year')
plt.ylabel('GNI per capita adjusted for inflation to 2014 USD')
plt.grid(True)
plt.show()
# close all figs
plt.close('all')
# save the final plots
# plot of the GDP and GNI in current USD
plt.figure()
plt.plot(years, usaGDP / 1.e12, '-k', label='GDP')
plt.plot(years2, usaGNI / 1.e12, '--b', label='GNI')
plt.xlabel('Year')
plt.ylabel('Trillion USD')
plt.legend(loc=4)
plt.grid(True)
plt.show()
plt.savefig('images/usaGDPandGNI.png')
# plot of GDP and GNI per capita in current USD
plt.figure()
plt.plot(years, usaGDPpercapita, '-k', label='GDP')
plt.plot(years2, usaGNIpercapita, '--b', label='GNI')
plt.xlabel('Year')
plt.ylabel('USD per capita')
plt.legend(loc=4)
plt.grid(True)
plt.show()
plt.savefig('images/usaGDPandGNI_perCapita.png')
# plot of GDP and GNI per capita in 2014 USD
plt.figure()
plt.plot(years, usaGDPpercapita2014, '-k', label='GDP')
plt.plot(years2, usaGNIpercapita2014, '--b', label='GNI')
plt.xlabel('Year')
plt.ylabel('USD per capita adjusted for inflation to 2014 levels')
plt.legend(loc=4)
plt.grid(True)
plt.show()
plt.savefig('images/usaGDPandGNI_perCapita_2014.png')
# plot of GDP at 0.5, 1, and 2 perecent estimations
# what if CPI has underestimated inflation?
cpiUnderHalf = adjustCPI(usaCPI, 1.005)
cpiUnderOne = adjustCPI(usaCPI, 1.01)
cpiUnderTwo = adjustCPI(usaCPI, 1.02)
# what if CPI has overestimated inflation?
cpiOverHalf = adjustCPI(usaCPI, 0.995)
cpiOverOne = adjustCPI(usaCPI, 0.99)
cpiOverTwo = adjustCPI(usaCPI, 0.98)
# recalculate GDP basedd on the CPI values
usaGDPpercapita2014UnderHalf = usaGDPpercapita / (cpiUnderHalf / cpiUnderHalf[-1])
usaGDPpercapita2014UnderOne = usaGDPpercapita / (cpiUnderOne / cpiUnderOne[-1])
usaGDPpercapita2014UnderTwo = usaGDPpercapita / (cpiUnderTwo / cpiUnderTwo[-1])
usaGDPpercapita2014OverHalf = usaGDPpercapita / (cpiOverHalf / cpiOverHalf[-1])
usaGDPpercapita2014OverOne = usaGDPpercapita / (cpiOverOne / cpiOverOne[-1])
usaGDPpercapita2014OverTwo = usaGDPpercapita / (cpiOverTwo / cpiOverTwo[-1])
plt.figure()
plt.plot(years, usaGDPpercapita2014, '-k', label='Adjusted to 2014 CPI')
plt.plot(years, usaGDPpercapita2014UnderHalf, '--k', label='CPI each year adjusted +0.5%')
plt.plot(years, usaGDPpercapita2014OverHalf, '-.k', label='CPI each year adjusted -0.5%')
plt.xlabel('Year')
plt.ylabel('GDP per capita adjusted for inflation (USD)')
plt.legend(loc=4)
plt.grid(True)
plt.show()
plt.savefig('images/usaGDPandGNI_perCapita_2014_half.png')
plt.figure()
plt.plot(years, usaGDPpercapita2014, '-k', label='Adjusted to 2014 CPI')
plt.plot(years, usaGDPpercapita2014UnderOne, '--k', label='CPI each year adjusted +1.0%')
plt.plot(years, usaGDPpercapita2014OverOne, '-.k', label='CPI each year adjusted -1.0%')
plt.xlabel('Year')
plt.ylabel('GDP per capita adjusted for inflation (USD)')
plt.legend(loc=4)
plt.grid(True)
plt.show()
plt.savefig('images/usaGDPandGNI_perCapita_2014_one.png')
plt.figure()
plt.plot(years, usaGDPpercapita2014, '-k', label='Adjusted to 2014 CPI')
plt.plot(years, usaGDPpercapita2014UnderTwo, '--k', label='CPI each year adjusted +2.0%')
plt.plot(years, usaGDPpercapita2014OverTwo, '-.k', label='CPI each year adjusted -2.0%')
plt.xlabel('Year')
plt.ylabel('GDP per capita adjusted for inflation (USD)')
plt.legend(loc=4)
plt.grid(True)
plt.show()
plt.savefig('images/usaGDPandGNI_perCapita_2014_two.png')
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"numpy.array",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig"
] |
[((74, 90), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (83, 90), True, 'import matplotlib.pyplot as plt\n'), ((100, 456), 'numpy.array', 'np.array', (['[1960, 1961, 1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970, 1971, \n 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983,\n 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995,\n 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,\n 2008, 2009, 2010, 2011, 2012, 2013, 2014]'], {}), '([1960, 1961, 1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970,\n 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982,\n 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994,\n 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,\n 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014])\n', (108, 456), True, 'import numpy as np\n'), ((396, 1409), 'numpy.array', 'np.array', (['[543300000000.0, 563300000000.0, 605100000000.0, 638600000000.0, \n 685800000000.0, 743700000000.0, 815000000000.0, 861700000000.0, \n 942500000000.0, 1019900000000.0, 1075884000000.0, 1167770000000.0, \n 1282449000000.0, 1428549000000.0, 1548825000000.0, 1688923000000.0, \n 1877587000000.0, 2085951000000.0, 2356571000000.0, 2632143000000.0, \n 2862505000000.0, 3210956000000.0, 3344991000000.0, 3638137000000.0, \n 4040693000000.0, 4346734000000.0, 4590155000000.0, 4870217000000.0, \n 5252629000000.0, 5657693000000.0, 5979589000000.0, 6174043000000.0, \n 6539299000000.0, 6878718000000.0, 7308755000000.0, 7664060000000.0, \n 8100201000000.0, 8608515000000.0, 9089168000000.0, 9660624000000.0, \n 10284779000000.0, 10621824000000.0, 10977514000000.0, 11510670000000.0,\n 12274928000000.0, 13093726000000.0, 13855888000000.0, 14477635000000.0,\n 14718582000000.0, 14418739000000.0, 14964372000000.0, 15517926000000.0,\n 16163158000000.0, 16768053000000.0, 17419000000000.0]'], {}), '([543300000000.0, 563300000000.0, 605100000000.0, 638600000000.0, \n 685800000000.0, 743700000000.0, 815000000000.0, 861700000000.0, \n 942500000000.0, 1019900000000.0, 1075884000000.0, 1167770000000.0, \n 1282449000000.0, 1428549000000.0, 1548825000000.0, 1688923000000.0, \n 1877587000000.0, 2085951000000.0, 2356571000000.0, 2632143000000.0, \n 2862505000000.0, 3210956000000.0, 3344991000000.0, 3638137000000.0, \n 4040693000000.0, 4346734000000.0, 4590155000000.0, 4870217000000.0, \n 5252629000000.0, 5657693000000.0, 5979589000000.0, 6174043000000.0, \n 6539299000000.0, 6878718000000.0, 7308755000000.0, 7664060000000.0, \n 8100201000000.0, 8608515000000.0, 9089168000000.0, 9660624000000.0, \n 10284779000000.0, 10621824000000.0, 10977514000000.0, 11510670000000.0,\n 12274928000000.0, 13093726000000.0, 13855888000000.0, 14477635000000.0,\n 14718582000000.0, 14418739000000.0, 14964372000000.0, 15517926000000.0,\n 16163158000000.0, 16768053000000.0, 17419000000000.0])\n', (404, 1409), True, 'import numpy as np\n'), ((1448, 1857), 'numpy.array', 'np.array', (['[29.6, 29.9, 30.2, 30.6, 31.0, 31.5, 32.4, 33.4, 34.8, 36.7, 38.8, 40.5, \n 41.8, 44.4, 49.3, 53.8, 56.9, 60.6, 65.2, 72.6, 82.4, 90.9, 96.5, 99.6,\n 103.9, 107.6, 109.6, 113.6, 118.3, 124.0, 130.7, 136.2, 140.3, 144.5, \n 148.2, 152.4, 156.9, 160.5, 163.0, 166.6, 172.2, 177.1, 179.9, 184.0, \n 188.9, 195.3, 201.6, 207.342, 215.303, 214.537, 218.056, 224.939, \n 229.594, 232.957, 236.736]'], {}), '([29.6, 29.9, 30.2, 30.6, 31.0, 31.5, 32.4, 33.4, 34.8, 36.7, 38.8,\n 40.5, 41.8, 44.4, 49.3, 53.8, 56.9, 60.6, 65.2, 72.6, 82.4, 90.9, 96.5,\n 99.6, 103.9, 107.6, 109.6, 113.6, 118.3, 124.0, 130.7, 136.2, 140.3, \n 144.5, 148.2, 152.4, 156.9, 160.5, 163.0, 166.6, 172.2, 177.1, 179.9, \n 184.0, 188.9, 195.3, 201.6, 207.342, 215.303, 214.537, 218.056, 224.939,\n 229.594, 232.957, 236.736])\n', (1456, 1857), True, 'import numpy as np\n'), ((1837, 1849), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1847, 1849), True, 'import matplotlib.pyplot as plt\n'), ((1850, 1873), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDP'], {}), '(years, usaGDP)\n', (1858, 1873), True, 'import matplotlib.pyplot as plt\n'), ((1874, 1892), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (1884, 1892), True, 'import matplotlib.pyplot as plt\n'), ((1893, 1925), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP in Current USD"""'], {}), "('GDP in Current USD')\n", (1903, 1925), True, 'import matplotlib.pyplot as plt\n'), ((1926, 1940), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (1934, 1940), True, 'import matplotlib.pyplot as plt\n'), ((1941, 1951), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1949, 1951), True, 'import matplotlib.pyplot as plt\n'), ((2025, 2037), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2035, 2037), True, 'import matplotlib.pyplot as plt\n'), ((2038, 2065), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDP1960'], {}), '(years, usaGDP1960)\n', (2046, 2065), True, 'import matplotlib.pyplot as plt\n'), ((2066, 2084), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (2076, 2084), True, 'import matplotlib.pyplot as plt\n'), ((2085, 2137), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP adjusted for inflation in 1960 USD"""'], {}), "('GDP adjusted for inflation in 1960 USD')\n", (2095, 2137), True, 'import matplotlib.pyplot as plt\n'), ((2138, 2152), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (2146, 2152), True, 'import matplotlib.pyplot as plt\n'), ((2153, 2163), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2161, 2163), True, 'import matplotlib.pyplot as plt\n'), ((2238, 2250), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2248, 2250), True, 'import matplotlib.pyplot as plt\n'), ((2251, 2278), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDP2014'], {}), '(years, usaGDP2014)\n', (2259, 2278), True, 'import matplotlib.pyplot as plt\n'), ((2279, 2297), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (2289, 2297), True, 'import matplotlib.pyplot as plt\n'), ((2298, 2350), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP adjusted for inflation in 2014 USD"""'], {}), "('GDP adjusted for inflation in 2014 USD')\n", (2308, 2350), True, 'import matplotlib.pyplot as plt\n'), ((2351, 2365), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (2359, 2365), True, 'import matplotlib.pyplot as plt\n'), ((2366, 2376), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2374, 2376), True, 'import matplotlib.pyplot as plt\n'), ((2418, 3077), 'numpy.array', 'np.array', (['[180671000, 183691000, 186538000, 189242000, 191889000, 194303000, \n 196560000, 198712000, 200706000, 202677000, 205052000, 207661000, \n 209896000, 211909000, 213854000, 215973000, 218035000, 220239000, \n 222585000, 225055000, 227225000, 229466000, 231664000, 233792000, \n 235825000, 237924000, 240133000, 242289000, 244499000, 246819000, \n 249623000, 252981000, 256514000, 259919000, 263126000, 266278000, \n 269394000, 272657000, 275854000, 279040000, 282162411, 284968955, \n 287625193, 290107933, 292805298, 295516599, 298379912, 301231207, \n 304093966, 306771529, 309347057, 311721632, 314112078, 316497531, 318857056\n ]'], {}), '([180671000, 183691000, 186538000, 189242000, 191889000, 194303000,\n 196560000, 198712000, 200706000, 202677000, 205052000, 207661000, \n 209896000, 211909000, 213854000, 215973000, 218035000, 220239000, \n 222585000, 225055000, 227225000, 229466000, 231664000, 233792000, \n 235825000, 237924000, 240133000, 242289000, 244499000, 246819000, \n 249623000, 252981000, 256514000, 259919000, 263126000, 266278000, \n 269394000, 272657000, 275854000, 279040000, 282162411, 284968955, \n 287625193, 290107933, 292805298, 295516599, 298379912, 301231207, \n 304093966, 306771529, 309347057, 311721632, 314112078, 316497531, \n 318857056])\n', (2426, 3077), True, 'import numpy as np\n'), ((3015, 3027), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3025, 3027), True, 'import matplotlib.pyplot as plt\n'), ((3028, 3060), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita'], {}), '(years, usaGDPpercapita)\n', (3036, 3060), True, 'import matplotlib.pyplot as plt\n'), ((3061, 3079), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (3071, 3079), True, 'import matplotlib.pyplot as plt\n'), ((3080, 3123), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP per capita in Current USD"""'], {}), "('GDP per capita in Current USD')\n", (3090, 3123), True, 'import matplotlib.pyplot as plt\n'), ((3124, 3138), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (3132, 3138), True, 'import matplotlib.pyplot as plt\n'), ((3139, 3149), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3147, 3149), True, 'import matplotlib.pyplot as plt\n'), ((3255, 3267), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3265, 3267), True, 'import matplotlib.pyplot as plt\n'), ((3268, 3304), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita1960'], {}), '(years, usaGDPpercapita1960)\n', (3276, 3304), True, 'import matplotlib.pyplot as plt\n'), ((3305, 3323), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (3315, 3323), True, 'import matplotlib.pyplot as plt\n'), ((3324, 3387), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP per capita adjusted for inflation in 1960 USD"""'], {}), "('GDP per capita adjusted for inflation in 1960 USD')\n", (3334, 3387), True, 'import matplotlib.pyplot as plt\n'), ((3388, 3402), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (3396, 3402), True, 'import matplotlib.pyplot as plt\n'), ((3403, 3413), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3411, 3413), True, 'import matplotlib.pyplot as plt\n'), ((3520, 3532), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3530, 3532), True, 'import matplotlib.pyplot as plt\n'), ((3533, 3569), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014'], {}), '(years, usaGDPpercapita2014)\n', (3541, 3569), True, 'import matplotlib.pyplot as plt\n'), ((3570, 3588), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (3580, 3588), True, 'import matplotlib.pyplot as plt\n'), ((3589, 3652), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP per capita adjusted for inflation to 2014 USD"""'], {}), "('GDP per capita adjusted for inflation to 2014 USD')\n", (3599, 3652), True, 'import matplotlib.pyplot as plt\n'), ((3653, 3667), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (3661, 3667), True, 'import matplotlib.pyplot as plt\n'), ((3668, 3678), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3676, 3678), True, 'import matplotlib.pyplot as plt\n'), ((4399, 4411), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4409, 4411), True, 'import matplotlib.pyplot as plt\n'), ((4412, 4464), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014'], {'label': '"""normal"""'}), "(years, usaGDPpercapita2014, label='normal')\n", (4420, 4464), True, 'import matplotlib.pyplot as plt\n'), ((4465, 4524), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014OverFive'], {'label': '"""under"""'}), "(years, usaGDPpercapita2014OverFive, label='under')\n", (4473, 4524), True, 'import matplotlib.pyplot as plt\n'), ((4525, 4584), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014UnderFive'], {'label': '"""over"""'}), "(years, usaGDPpercapita2014UnderFive, label='over')\n", (4533, 4584), True, 'import matplotlib.pyplot as plt\n'), ((4585, 4597), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4595, 4597), True, 'import matplotlib.pyplot as plt\n'), ((4598, 4616), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (4608, 4616), True, 'import matplotlib.pyplot as plt\n'), ((4617, 4680), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP per capita adjusted for inflation to 2014 USD"""'], {}), "('GDP per capita adjusted for inflation to 2014 USD')\n", (4627, 4680), True, 'import matplotlib.pyplot as plt\n'), ((4681, 4695), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (4689, 4695), True, 'import matplotlib.pyplot as plt\n'), ((4696, 4706), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4704, 4706), True, 'import matplotlib.pyplot as plt\n'), ((4718, 5062), 'numpy.array', 'np.array', (['[1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970, 1971, 1972, 1973, \n 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985,\n 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997,\n 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,\n 2010, 2011, 2012, 2013, 2014]'], {}), '([1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970, 1971, 1972,\n 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984,\n 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996,\n 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008,\n 2009, 2010, 2011, 2012, 2013, 2014])\n', (4726, 5062), True, 'import numpy as np\n'), ((5004, 6016), 'numpy.array', 'np.array', (['[612178550047.646, 646233886826.65, 692328219512.945, 753294530375.941, \n 824183577234.192, 868295290971.962, 952033980993.251, 1027990251284.03,\n 1098553055567.61, 1183038457083.86, 1320921418184.74, 1548458249174.67,\n 1711839855738.22, 1842214711486.27, 1958767403397.59, 2117456144199.84,\n 2401109359261.26, 2751769589536.9, 3048093901726.34, 3303883972259.98, \n 3297652203866.24, 3411202239818.87, 3828479505092.12, 4164905103485.73,\n 4601500378186.56, 5200354088055.45, 5765196251790.1, 5888830786924.1, \n 6029529322891.06, 6164277951121.71, 6612706041742.15, 6883086506452.91,\n 7302781827892.38, 7760854970064.45, 8184808773787.28, 8558708987900.82,\n 8869581532268.98, 9425292191447.05, 10178500697503.7, 10498594829042.2,\n 10776200783181, 11589035965657.3, 12790914724399.8, 13693955258225.3, \n 14345564947204.5, 14651211130474, 15002428215985, 14740580035992.9, \n 15143137264678.1, 15727290871234.6, 16501015978642.4, 17001290051112.6,\n 17611490812741.3]'], {}), '([612178550047.646, 646233886826.65, 692328219512.945, \n 753294530375.941, 824183577234.192, 868295290971.962, 952033980993.251,\n 1027990251284.03, 1098553055567.61, 1183038457083.86, 1320921418184.74,\n 1548458249174.67, 1711839855738.22, 1842214711486.27, 1958767403397.59,\n 2117456144199.84, 2401109359261.26, 2751769589536.9, 3048093901726.34, \n 3303883972259.98, 3297652203866.24, 3411202239818.87, 3828479505092.12,\n 4164905103485.73, 4601500378186.56, 5200354088055.45, 5765196251790.1, \n 5888830786924.1, 6029529322891.06, 6164277951121.71, 6612706041742.15, \n 6883086506452.91, 7302781827892.38, 7760854970064.45, 8184808773787.28,\n 8558708987900.82, 8869581532268.98, 9425292191447.05, 10178500697503.7,\n 10498594829042.2, 10776200783181, 11589035965657.3, 12790914724399.8, \n 13693955258225.3, 14345564947204.5, 14651211130474, 15002428215985, \n 14740580035992.9, 15143137264678.1, 15727290871234.6, 16501015978642.4,\n 17001290051112.6, 17611490812741.3])\n', (5012, 6016), True, 'import numpy as np\n'), ((6137, 6535), 'numpy.array', 'np.array', (['[30.2, 30.6, 31.0, 31.5, 32.4, 33.4, 34.8, 36.7, 38.8, 40.5, 41.8, 44.4, \n 49.3, 53.8, 56.9, 60.6, 65.2, 72.6, 82.4, 90.9, 96.5, 99.6, 103.9, \n 107.6, 109.6, 113.6, 118.3, 124.0, 130.7, 136.2, 140.3, 144.5, 148.2, \n 152.4, 156.9, 160.5, 163.0, 166.6, 172.2, 177.1, 179.9, 184.0, 188.9, \n 195.3, 201.6, 207.342, 215.303, 214.537, 218.056, 224.939, 229.594, \n 232.957, 236.736]'], {}), '([30.2, 30.6, 31.0, 31.5, 32.4, 33.4, 34.8, 36.7, 38.8, 40.5, 41.8,\n 44.4, 49.3, 53.8, 56.9, 60.6, 65.2, 72.6, 82.4, 90.9, 96.5, 99.6, 103.9,\n 107.6, 109.6, 113.6, 118.3, 124.0, 130.7, 136.2, 140.3, 144.5, 148.2, \n 152.4, 156.9, 160.5, 163.0, 166.6, 172.2, 177.1, 179.9, 184.0, 188.9, \n 195.3, 201.6, 207.342, 215.303, 214.537, 218.056, 224.939, 229.594, \n 232.957, 236.736])\n', (6145, 6535), True, 'import numpy as np\n'), ((6514, 6526), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6524, 6526), True, 'import matplotlib.pyplot as plt\n'), ((6527, 6551), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNI'], {}), '(years2, usaGNI)\n', (6535, 6551), True, 'import matplotlib.pyplot as plt\n'), ((6552, 6570), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (6562, 6570), True, 'import matplotlib.pyplot as plt\n'), ((6571, 6603), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GNI in Current USD"""'], {}), "('GNI in Current USD')\n", (6581, 6603), True, 'import matplotlib.pyplot as plt\n'), ((6604, 6618), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (6612, 6618), True, 'import matplotlib.pyplot as plt\n'), ((6619, 6629), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6627, 6629), True, 'import matplotlib.pyplot as plt\n'), ((6705, 6717), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6715, 6717), True, 'import matplotlib.pyplot as plt\n'), ((6718, 6746), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNI1962'], {}), '(years2, usaGNI1962)\n', (6726, 6746), True, 'import matplotlib.pyplot as plt\n'), ((6747, 6765), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (6757, 6765), True, 'import matplotlib.pyplot as plt\n'), ((6766, 6818), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GNI adjusted for inflation to 1962 USD"""'], {}), "('GNI adjusted for inflation to 1962 USD')\n", (6776, 6818), True, 'import matplotlib.pyplot as plt\n'), ((6819, 6833), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (6827, 6833), True, 'import matplotlib.pyplot as plt\n'), ((6834, 6844), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6842, 6844), True, 'import matplotlib.pyplot as plt\n'), ((6921, 6933), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6931, 6933), True, 'import matplotlib.pyplot as plt\n'), ((6934, 6962), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNI2014'], {}), '(years2, usaGNI2014)\n', (6942, 6962), True, 'import matplotlib.pyplot as plt\n'), ((6963, 6981), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (6973, 6981), True, 'import matplotlib.pyplot as plt\n'), ((6982, 7034), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GNI adjusted for inflation to 2014 USD"""'], {}), "('GNI adjusted for inflation to 2014 USD')\n", (6992, 7034), True, 'import matplotlib.pyplot as plt\n'), ((7035, 7049), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (7043, 7049), True, 'import matplotlib.pyplot as plt\n'), ((7050, 7060), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7058, 7060), True, 'import matplotlib.pyplot as plt\n'), ((7102, 7734), 'numpy.array', 'np.array', (['[186538000, 189242000, 191889000, 194303000, 196560000, 198712000, \n 200706000, 202677000, 205052000, 207661000, 209896000, 211909000, \n 213854000, 215973000, 218035000, 220239000, 222585000, 225055000, \n 227225000, 229466000, 231664000, 233792000, 235825000, 237924000, \n 240133000, 242289000, 244499000, 246819000, 249623000, 252981000, \n 256514000, 259919000, 263126000, 266278000, 269394000, 272657000, \n 275854000, 279040000, 282162411, 284968955, 287625193, 290107933, \n 292805298, 295516599, 298379912, 301231207, 304093966, 306771529, \n 309347057, 311721632, 314112078, 316497531, 318857056]'], {}), '([186538000, 189242000, 191889000, 194303000, 196560000, 198712000,\n 200706000, 202677000, 205052000, 207661000, 209896000, 211909000, \n 213854000, 215973000, 218035000, 220239000, 222585000, 225055000, \n 227225000, 229466000, 231664000, 233792000, 235825000, 237924000, \n 240133000, 242289000, 244499000, 246819000, 249623000, 252981000, \n 256514000, 259919000, 263126000, 266278000, 269394000, 272657000, \n 275854000, 279040000, 282162411, 284968955, 287625193, 290107933, \n 292805298, 295516599, 298379912, 301231207, 304093966, 306771529, \n 309347057, 311721632, 314112078, 316497531, 318857056])\n', (7110, 7734), True, 'import numpy as np\n'), ((7679, 7691), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (7689, 7691), True, 'import matplotlib.pyplot as plt\n'), ((7692, 7725), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNIpercapita'], {}), '(years2, usaGNIpercapita)\n', (7700, 7725), True, 'import matplotlib.pyplot as plt\n'), ((7726, 7744), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (7736, 7744), True, 'import matplotlib.pyplot as plt\n'), ((7745, 7788), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GNI per capita in Current USD"""'], {}), "('GNI per capita in Current USD')\n", (7755, 7788), True, 'import matplotlib.pyplot as plt\n'), ((7789, 7803), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (7797, 7803), True, 'import matplotlib.pyplot as plt\n'), ((7804, 7814), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7812, 7814), True, 'import matplotlib.pyplot as plt\n'), ((7922, 7934), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (7932, 7934), True, 'import matplotlib.pyplot as plt\n'), ((7935, 7972), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNIpercapita1962'], {}), '(years2, usaGNIpercapita1962)\n', (7943, 7972), True, 'import matplotlib.pyplot as plt\n'), ((7973, 7991), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (7983, 7991), True, 'import matplotlib.pyplot as plt\n'), ((7992, 8055), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GNI per capita adjusted for inflation to 1962 USD"""'], {}), "('GNI per capita adjusted for inflation to 1962 USD')\n", (8002, 8055), True, 'import matplotlib.pyplot as plt\n'), ((8056, 8070), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (8064, 8070), True, 'import matplotlib.pyplot as plt\n'), ((8071, 8081), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8079, 8081), True, 'import matplotlib.pyplot as plt\n'), ((8190, 8202), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8200, 8202), True, 'import matplotlib.pyplot as plt\n'), ((8203, 8240), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNIpercapita2014'], {}), '(years2, usaGNIpercapita2014)\n', (8211, 8240), True, 'import matplotlib.pyplot as plt\n'), ((8241, 8259), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (8251, 8259), True, 'import matplotlib.pyplot as plt\n'), ((8260, 8323), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GNI per capita adjusted for inflation to 2014 USD"""'], {}), "('GNI per capita adjusted for inflation to 2014 USD')\n", (8270, 8323), True, 'import matplotlib.pyplot as plt\n'), ((8324, 8338), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (8332, 8338), True, 'import matplotlib.pyplot as plt\n'), ((8339, 8349), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8347, 8349), True, 'import matplotlib.pyplot as plt\n'), ((8370, 8386), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (8379, 8386), True, 'import matplotlib.pyplot as plt\n'), ((8456, 8468), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8466, 8468), True, 'import matplotlib.pyplot as plt\n'), ((8469, 8529), 'matplotlib.pyplot.plot', 'plt.plot', (['years', '(usaGDP / 1000000000000.0)', '"""-k"""'], {'label': '"""GDP"""'}), "(years, usaGDP / 1000000000000.0, '-k', label='GDP')\n", (8477, 8529), True, 'import matplotlib.pyplot as plt\n'), ((8520, 8582), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', '(usaGNI / 1000000000000.0)', '"""--b"""'], {'label': '"""GNI"""'}), "(years2, usaGNI / 1000000000000.0, '--b', label='GNI')\n", (8528, 8582), True, 'import matplotlib.pyplot as plt\n'), ((8573, 8591), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (8583, 8591), True, 'import matplotlib.pyplot as plt\n'), ((8592, 8618), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Trillion USD"""'], {}), "('Trillion USD')\n", (8602, 8618), True, 'import matplotlib.pyplot as plt\n'), ((8619, 8636), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (8629, 8636), True, 'import matplotlib.pyplot as plt\n'), ((8637, 8651), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (8645, 8651), True, 'import matplotlib.pyplot as plt\n'), ((8652, 8662), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8660, 8662), True, 'import matplotlib.pyplot as plt\n'), ((8663, 8701), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""images/usaGDPandGNI.png"""'], {}), "('images/usaGDPandGNI.png')\n", (8674, 8701), True, 'import matplotlib.pyplot as plt\n'), ((8752, 8764), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8762, 8764), True, 'import matplotlib.pyplot as plt\n'), ((8765, 8816), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita', '"""-k"""'], {'label': '"""GDP"""'}), "(years, usaGDPpercapita, '-k', label='GDP')\n", (8773, 8816), True, 'import matplotlib.pyplot as plt\n'), ((8817, 8870), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNIpercapita', '"""--b"""'], {'label': '"""GNI"""'}), "(years2, usaGNIpercapita, '--b', label='GNI')\n", (8825, 8870), True, 'import matplotlib.pyplot as plt\n'), ((8871, 8889), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (8881, 8889), True, 'import matplotlib.pyplot as plt\n'), ((8890, 8918), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""USD per capita"""'], {}), "('USD per capita')\n", (8900, 8918), True, 'import matplotlib.pyplot as plt\n'), ((8919, 8936), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (8929, 8936), True, 'import matplotlib.pyplot as plt\n'), ((8937, 8951), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (8945, 8951), True, 'import matplotlib.pyplot as plt\n'), ((8952, 8962), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8960, 8962), True, 'import matplotlib.pyplot as plt\n'), ((8963, 9011), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""images/usaGDPandGNI_perCapita.png"""'], {}), "('images/usaGDPandGNI_perCapita.png')\n", (8974, 9011), True, 'import matplotlib.pyplot as plt\n'), ((9059, 9071), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9069, 9071), True, 'import matplotlib.pyplot as plt\n'), ((9072, 9127), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014', '"""-k"""'], {'label': '"""GDP"""'}), "(years, usaGDPpercapita2014, '-k', label='GDP')\n", (9080, 9127), True, 'import matplotlib.pyplot as plt\n'), ((9128, 9185), 'matplotlib.pyplot.plot', 'plt.plot', (['years2', 'usaGNIpercapita2014', '"""--b"""'], {'label': '"""GNI"""'}), "(years2, usaGNIpercapita2014, '--b', label='GNI')\n", (9136, 9185), True, 'import matplotlib.pyplot as plt\n'), ((9186, 9204), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (9196, 9204), True, 'import matplotlib.pyplot as plt\n'), ((9205, 9271), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""USD per capita adjusted for inflation to 2014 levels"""'], {}), "('USD per capita adjusted for inflation to 2014 levels')\n", (9215, 9271), True, 'import matplotlib.pyplot as plt\n'), ((9272, 9289), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (9282, 9289), True, 'import matplotlib.pyplot as plt\n'), ((9290, 9304), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (9298, 9304), True, 'import matplotlib.pyplot as plt\n'), ((9305, 9315), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9313, 9315), True, 'import matplotlib.pyplot as plt\n'), ((9316, 9369), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""images/usaGDPandGNI_perCapita_2014.png"""'], {}), "('images/usaGDPandGNI_perCapita_2014.png')\n", (9327, 9369), True, 'import matplotlib.pyplot as plt\n'), ((10269, 10281), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (10279, 10281), True, 'import matplotlib.pyplot as plt\n'), ((10282, 10354), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014', '"""-k"""'], {'label': '"""Adjusted to 2014 CPI"""'}), "(years, usaGDPpercapita2014, '-k', label='Adjusted to 2014 CPI')\n", (10290, 10354), True, 'import matplotlib.pyplot as plt\n'), ((10355, 10450), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014UnderHalf', '"""--k"""'], {'label': '"""CPI each year adjusted +0.5%"""'}), "(years, usaGDPpercapita2014UnderHalf, '--k', label=\n 'CPI each year adjusted +0.5%')\n", (10363, 10450), True, 'import matplotlib.pyplot as plt\n'), ((10446, 10540), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014OverHalf', '"""-.k"""'], {'label': '"""CPI each year adjusted -0.5%"""'}), "(years, usaGDPpercapita2014OverHalf, '-.k', label=\n 'CPI each year adjusted -0.5%')\n", (10454, 10540), True, 'import matplotlib.pyplot as plt\n'), ((10536, 10554), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (10546, 10554), True, 'import matplotlib.pyplot as plt\n'), ((10555, 10612), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP per capita adjusted for inflation (USD)"""'], {}), "('GDP per capita adjusted for inflation (USD)')\n", (10565, 10612), True, 'import matplotlib.pyplot as plt\n'), ((10613, 10630), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (10623, 10630), True, 'import matplotlib.pyplot as plt\n'), ((10631, 10645), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (10639, 10645), True, 'import matplotlib.pyplot as plt\n'), ((10646, 10656), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10654, 10656), True, 'import matplotlib.pyplot as plt\n'), ((10657, 10715), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""images/usaGDPandGNI_perCapita_2014_half.png"""'], {}), "('images/usaGDPandGNI_perCapita_2014_half.png')\n", (10668, 10715), True, 'import matplotlib.pyplot as plt\n'), ((10716, 10728), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (10726, 10728), True, 'import matplotlib.pyplot as plt\n'), ((10729, 10801), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014', '"""-k"""'], {'label': '"""Adjusted to 2014 CPI"""'}), "(years, usaGDPpercapita2014, '-k', label='Adjusted to 2014 CPI')\n", (10737, 10801), True, 'import matplotlib.pyplot as plt\n'), ((10802, 10896), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014UnderOne', '"""--k"""'], {'label': '"""CPI each year adjusted +1.0%"""'}), "(years, usaGDPpercapita2014UnderOne, '--k', label=\n 'CPI each year adjusted +1.0%')\n", (10810, 10896), True, 'import matplotlib.pyplot as plt\n'), ((10892, 10985), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014OverOne', '"""-.k"""'], {'label': '"""CPI each year adjusted -1.0%"""'}), "(years, usaGDPpercapita2014OverOne, '-.k', label=\n 'CPI each year adjusted -1.0%')\n", (10900, 10985), True, 'import matplotlib.pyplot as plt\n'), ((10981, 10999), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (10991, 10999), True, 'import matplotlib.pyplot as plt\n'), ((11000, 11057), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP per capita adjusted for inflation (USD)"""'], {}), "('GDP per capita adjusted for inflation (USD)')\n", (11010, 11057), True, 'import matplotlib.pyplot as plt\n'), ((11058, 11075), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (11068, 11075), True, 'import matplotlib.pyplot as plt\n'), ((11076, 11090), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (11084, 11090), True, 'import matplotlib.pyplot as plt\n'), ((11091, 11101), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11099, 11101), True, 'import matplotlib.pyplot as plt\n'), ((11102, 11159), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""images/usaGDPandGNI_perCapita_2014_one.png"""'], {}), "('images/usaGDPandGNI_perCapita_2014_one.png')\n", (11113, 11159), True, 'import matplotlib.pyplot as plt\n'), ((11160, 11172), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (11170, 11172), True, 'import matplotlib.pyplot as plt\n'), ((11173, 11245), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014', '"""-k"""'], {'label': '"""Adjusted to 2014 CPI"""'}), "(years, usaGDPpercapita2014, '-k', label='Adjusted to 2014 CPI')\n", (11181, 11245), True, 'import matplotlib.pyplot as plt\n'), ((11246, 11340), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014UnderTwo', '"""--k"""'], {'label': '"""CPI each year adjusted +2.0%"""'}), "(years, usaGDPpercapita2014UnderTwo, '--k', label=\n 'CPI each year adjusted +2.0%')\n", (11254, 11340), True, 'import matplotlib.pyplot as plt\n'), ((11336, 11429), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'usaGDPpercapita2014OverTwo', '"""-.k"""'], {'label': '"""CPI each year adjusted -2.0%"""'}), "(years, usaGDPpercapita2014OverTwo, '-.k', label=\n 'CPI each year adjusted -2.0%')\n", (11344, 11429), True, 'import matplotlib.pyplot as plt\n'), ((11425, 11443), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (11435, 11443), True, 'import matplotlib.pyplot as plt\n'), ((11444, 11501), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""GDP per capita adjusted for inflation (USD)"""'], {}), "('GDP per capita adjusted for inflation (USD)')\n", (11454, 11501), True, 'import matplotlib.pyplot as plt\n'), ((11502, 11519), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (11512, 11519), True, 'import matplotlib.pyplot as plt\n'), ((11520, 11534), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (11528, 11534), True, 'import matplotlib.pyplot as plt\n'), ((11535, 11545), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11543, 11545), True, 'import matplotlib.pyplot as plt\n'), ((11546, 11603), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""images/usaGDPandGNI_perCapita_2014_two.png"""'], {}), "('images/usaGDPandGNI_perCapita_2014_two.png')\n", (11557, 11603), True, 'import matplotlib.pyplot as plt\n')]
|
import discord
from discord.ext import commands
class Snipe(commands.Cog):
"""Gets the last message sent."""
def __init__(self, bot):
self.bot = bot
self.cache = {}
@commands.Cog.listener()
async def on_raw_message_delete(self, payload):
message = payload.cached_message
if message is None:
return
if payload.guild_id:
guild_id = payload.guild_id
self.add_cache(message, payload.channel_id, guild_id)
else:
self.add_cache(message, payload.channel_id, None)
def add_cache(self, message, channel, guild):
if guild is not None:
if guild not in self.cache:
self.cache[guild] = {}
self.cache[guild][channel] = {
"message": message.content,
"author": message.author,
"time": message.created_at}
else:
if channel not in self.cache:
self.cache[channel] = {}
self.cache[channel] = {
"message": message.content,
"author": message.author,
"time": message.created_at}
@commands.command(description="Gets last deleted message from guild / DM and sends it.")
async def snipe(self, ctx):
"""Gets last deleted message from guild / DM and sends it."""
if ctx.message.guild:
guild_cache = self.cache.get(ctx.guild.id, None)
channel_cache = guild_cache.get(ctx.channel.id, None)
else:
channel_cache = self.cache.get(ctx.channel.id, None)
if channel_cache is None:
await ctx.send("No snipe available!")
return
if not channel_cache["message"]:
embed = discord.Embed(
description="No message content, message might have been a file.",
timestamp=channel_cache["time"],
color=0xff0000)
else:
embed = discord.Embed(
description=channel_cache["message"],
timestamp=channel_cache["time"],
color=0xff0000)
author = channel_cache["author"]
embed.set_author(name=f"{author}", icon_url=author.avatar_url)
embed.set_footer(text=f"Sniped by {str(self.bot.user)}")
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(Snipe(bot))
|
[
"discord.ext.commands.command",
"discord.Embed",
"discord.ext.commands.Cog.listener"
] |
[((197, 220), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (218, 220), False, 'from discord.ext import commands\n'), ((1174, 1266), 'discord.ext.commands.command', 'commands.command', ([], {'description': '"""Gets last deleted message from guild / DM and sends it."""'}), "(description=\n 'Gets last deleted message from guild / DM and sends it.')\n", (1190, 1266), False, 'from discord.ext import commands\n'), ((1764, 1903), 'discord.Embed', 'discord.Embed', ([], {'description': '"""No message content, message might have been a file."""', 'timestamp': "channel_cache['time']", 'color': '(16711680)'}), "(description=\n 'No message content, message might have been a file.', timestamp=\n channel_cache['time'], color=16711680)\n", (1777, 1903), False, 'import discord\n'), ((1977, 2082), 'discord.Embed', 'discord.Embed', ([], {'description': "channel_cache['message']", 'timestamp': "channel_cache['time']", 'color': '(16711680)'}), "(description=channel_cache['message'], timestamp=channel_cache\n ['time'], color=16711680)\n", (1990, 2082), False, 'import discord\n')]
|
"""Derivation of variable `chlora`."""
from iris import Constraint
from ._baseclass import DerivedVariableBase
class DerivedVariable(DerivedVariableBase):
"""Derivation of variable `chlora`."""
@staticmethod
def required(project):
"""Declare the variables needed for derivation."""
required = [
{
'short_name': 'chldiatos'
},
{
'short_name': 'chlmiscos'
},
]
return required
@staticmethod
def calculate(cubes):
"""Compute surface chlorophyll concentration."""
chldiatos_cube = cubes.extract_cube(
Constraint(name='mass_concentration_of_diatoms_expressed_as' +
'_chlorophyll_in_sea_water'))
chlmiscos_cube = cubes.extract_cube(
Constraint(name='mass_concentration_of_miscellaneous' +
'_phytoplankton_expressed_as_chlorophyll' +
'_in_sea_water'))
chlora_cube = chldiatos_cube + chlmiscos_cube
return chlora_cube
|
[
"iris.Constraint"
] |
[((663, 758), 'iris.Constraint', 'Constraint', ([], {'name': "('mass_concentration_of_diatoms_expressed_as' + '_chlorophyll_in_sea_water')"}), "(name='mass_concentration_of_diatoms_expressed_as' +\n '_chlorophyll_in_sea_water')\n", (673, 758), False, 'from iris import Constraint\n'), ((836, 956), 'iris.Constraint', 'Constraint', ([], {'name': "('mass_concentration_of_miscellaneous' +\n '_phytoplankton_expressed_as_chlorophyll' + '_in_sea_water')"}), "(name='mass_concentration_of_miscellaneous' +\n '_phytoplankton_expressed_as_chlorophyll' + '_in_sea_water')\n", (846, 956), False, 'from iris import Constraint\n')]
|
"""
# Name Identifier-name, Poly Reverse Init-value XOR-out Check
[ 'crc-8', 'Crc8', 0x107, NON_REVERSE, 0x00, 0x00, 0xF4, ],
"""
from io import StringIO
from crcmod import Crc
c8 = 0x107
code = StringIO()
Crc(c8, rev=False).generateCode('crc8',code)
out = open('opsis_eeprom_crc.c', 'w')
out.write(code.getvalue().replace('UINT8', '__u8'))
out.close()
|
[
"io.StringIO",
"crcmod.Crc"
] |
[((302, 312), 'io.StringIO', 'StringIO', ([], {}), '()\n', (310, 312), False, 'from io import StringIO\n'), ((313, 331), 'crcmod.Crc', 'Crc', (['c8'], {'rev': '(False)'}), '(c8, rev=False)\n', (316, 331), False, 'from crcmod import Crc\n')]
|
from typing import List, Union
import numpy as np
def slice_to_list(
start: Union[int, None],
stop: Union[int, None],
step: Union[int, None],
size: int = None,
) -> List[int]:
if start is None and stop is None:
if size is None:
raise ValueError("size required when start and stop are None")
else:
stop = size
elif stop is not None:
if stop < 0:
if size is None:
raise ValueError(
"size required when using negative stop index")
stop = size + stop
if stop < 0:
raise ValueError("negative stop index out of range")
l = list(
range(
start if start is not None else 0,
stop if stop is not None else size,
step if step is not None else 1,
))
if np.min(l) < 0:
raise ValueError("negative start index not allowed")
return l
|
[
"numpy.min"
] |
[((862, 871), 'numpy.min', 'np.min', (['l'], {}), '(l)\n', (868, 871), True, 'import numpy as np\n')]
|
import torchfile
import h5py
dataset_types = ('train', 'valid', 'test')
dataset_path = 'data/cluttered_{}.t7'
outpath = 'data/cluttered_mnist.h5'
with h5py.File(outpath, 'w') as hf:
for dataset_type in dataset_types:
inpath = dataset_path.format(dataset_type)
print('... load {}'.format(inpath))
o = torchfile.load(inpath)
print('... save {}, shape: {}'.format('X_{}'.format(dataset_type), o[b'data'].shape))
hf.create_dataset('X_{}'.format(dataset_type), data=o[b'data'])
print('... save {}, shape: {}'.format('Y_{}'.format(dataset_type), o[b'labels'].shape))
hf.create_dataset('Y_{}'.format(dataset_type), data=o[b'labels'])
|
[
"h5py.File",
"torchfile.load"
] |
[((153, 176), 'h5py.File', 'h5py.File', (['outpath', '"""w"""'], {}), "(outpath, 'w')\n", (162, 176), False, 'import h5py\n'), ((317, 339), 'torchfile.load', 'torchfile.load', (['inpath'], {}), '(inpath)\n', (331, 339), False, 'import torchfile\n')]
|
from __future__ import absolute_import
import sklearn.neighbors
import sklearn.preprocessing
from ann_benchmarks.algorithms.base import BaseANN
from datasketch import MinHash
class LSHF(BaseANN):
def __init__(self, metric, n_estimators=10, n_candidates=50):
self.name = 'LSHF(n_est=%d, n_cand=%d)' % (n_estimators, n_candidates)
self._metric = metric
self._n_estimators = n_estimators
self._n_candidates = n_candidates
def fit(self, X):
self.index = numpy.empty([0, 128])
for i, x in enumerate(X):
m = MinHash(num_perm=128)
for e in x:
m.update(str(e).encode('utf-8'))
self.index = numpy.vstack((self.index, m.digest()))
self._index_minhash.append(m)
self._lshf = sklearn.neighbors.LSHForest(
n_estimators=self._n_estimators, n_candidates=self._n_candidates)
if self._metric == 'angular':
X = sklearn.preprocessing.normalize(X, axis=1, norm='l2')
self._lshf.fit(self.index)
def query(self, v, n):
if self._metric == 'angular':
v = sklearn.preprocessing.normalize([v], axis=1, norm='l2')[0]
m = MinHash(num_perm=128)
for e in v:
m.update(str(e).encode('utf-8'))
return self._lshf.kneighbors(
[m.digest()], return_distance=False, n_neighbors=n)[0]
|
[
"datasketch.MinHash"
] |
[((1198, 1219), 'datasketch.MinHash', 'MinHash', ([], {'num_perm': '(128)'}), '(num_perm=128)\n', (1205, 1219), False, 'from datasketch import MinHash\n'), ((573, 594), 'datasketch.MinHash', 'MinHash', ([], {'num_perm': '(128)'}), '(num_perm=128)\n', (580, 594), False, 'from datasketch import MinHash\n')]
|
# Load pickled data
import cv2
import pickle
import numpy as np
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
import tensorflow as tf
import MyAlexNet
import DataAugmentation as func
import glob
import csv
# TODO: Fill this in based on where you saved the training and testing data
training_file = "train.p"
validation_file = "valid.p"
testing_file = "test.p"
with open(training_file, mode='rb') as f:
train = pickle.load(f)
with open(validation_file, mode='rb') as f:
valid = pickle.load(f)
with open(testing_file, mode='rb') as f:
test = pickle.load(f)
X_train, y_train, X_train_size, X_train_bbox = train['features'], train['labels'], train['sizes'], train['coords']
X_valid, y_valid, X_valid_size, X_valid_bbox = valid['features'], valid['labels'], valid['sizes'], valid['coords']
X_test, y_test, X_test_size, X_test_bbox = test['features'], test['labels'], test['sizes'], test['coords']
# TODO: Number of training examples
n_train = len(X_train_size)
# TODO: Number of validation examples
print(len(X_valid_size))
n_validation = len(X_valid_size)
# TODO: Number of testing examples.
n_test = len(X_test_size)
# TODO: What's the shape of an traffic sign image?
print(X_train.shape)
# TODO: How many unique classes/labels there are in the dataset.
n_classes = len(np.unique(y_train))
# TODO: Number of training examples
n_train = len(X_train_size)
# TODO: Number of testing examples.
n_test = len(X_test_size)
# TODO: What's the shape of an traffic sign image?
image_shape = X_train.shape
# TODO: How many unique classes/labels there are in the dataset.
n_classes = len(np.unique(y_train))
img_size = X_train.shape[1] # Size of input images
print(img_size)
print("Number of training examples =", n_train)
print("Number of testing examples =", n_test)
print("Image data shape =", image_shape)
print("Number of classes =", n_classes)
### Data exploration visualization goes here.
# Visualizations will be shown in the notebook.
num_of_samples = []
plt.figure(figsize=(12, 16.5))
for i in range(0, n_classes):
plt.subplot(11, 4, i + 1)
x_selected = X_train[y_train == i]
plt.imshow(x_selected[0, :, :, :]) # draw the first image of each class
plt.title(i)
plt.axis('off')
num_of_samples.append(len(x_selected))
plt.show()
# Plot number of images per class
plt.figure(figsize=(12, 4))
plt.bar(range(0, n_classes), num_of_samples)
plt.title("Distribution of the training dataset")
plt.xlabel("Class number")
plt.ylabel("Number of images")
plt.show()
print("Min number of images in training data per class =", min(num_of_samples))
print("Max number of images in training data per class =", max(num_of_samples))
### Data exploration visualization goes here.
# Visualizations will be shown in the notebook.
num_of_samples = []
plt.figure(figsize=(12, 16.5))
for i in range(0, n_classes):
plt.subplot(11, 4, i + 1)
x_selected = X_valid[y_valid == i]
plt.imshow(x_selected[0, :, :, :]) # draw the first image of each class
plt.title(i)
plt.axis('off')
num_of_samples.append(len(x_selected))
plt.show()
# Plot number of images per class
plt.figure(figsize=(12, 4))
plt.bar(range(0, n_classes), num_of_samples)
plt.title("Distribution of the validation dataset")
plt.xlabel("Class number")
plt.ylabel("Number of images")
plt.show()
print("Min number of images in vlidation data per class =", min(num_of_samples))
print("Max number of images in validation data per class =", max(num_of_samples))
### Data exploration visualization goes here.
# Visualizations will be shown in the notebook.
num_of_samples = []
plt.figure(figsize=(12, 16.5))
for i in range(0, n_classes):
plt.subplot(11, 4, i + 1)
x_selected = X_test[y_test == i]
plt.imshow(x_selected[0, :, :, :]) # draw the first image of each class
plt.title(i)
plt.axis('off')
num_of_samples.append(len(x_selected))
plt.show()
# Plot number of images per class
plt.figure(figsize=(12, 4))
plt.bar(range(0, n_classes), num_of_samples)
plt.title("Distribution of the test dataset")
plt.xlabel("Class number")
plt.ylabel("Number of images")
plt.show()
print("Min number of images in test data per class =", min(num_of_samples))
print("Max number of images in test data per class =", max(num_of_samples))
### For Data Augmentation
# X_train_aug = []
# y_train_aug = []
# def create_data(n):
# for i in range(100):
# img=X_train[i]
# X_train_aug.append(img)
# y_train_aug.append(y_train[i])
# #Generate n new images out of each input image
# for j in range(n):
# X_train_aug.append(augment_img(img))
# y_train_aug.append(y_train[i])
# X_train_crop = np.ndarray(shape=[X_train.shape[0],IMAGE_SIZE,IMAGE_SIZE,
# 3],dtype = np.uint8)
# for i in range(n_train):
# X_train_crop[i] = crop_img(X_train[i])
# print(i)
print(X_train.shape)
print(X_train.dtype)
print(y_train.shape)
print(y_train.dtype)
print(X_valid.shape)
print(X_valid.dtype)
print(y_valid.shape)
print(y_train.dtype)
print(X_test.shape)
print(X_test.dtype)
print(y_test.shape)
print(y_test.dtype)
filename = "updated_test.p"
file = open(filename, 'rb')
X_test = pickle.load(file)
filename = "updated_train.p"
file = open(filename, 'rb')
X_train = pickle.load(file)
filename = "updated_valid.p"
file = open(filename, 'rb')
X_valid = pickle.load(file)
test = X_train[10000]
transformation = func.transform_img(test)
augmentation = func.augment_img(test)
func.show_imgs(test, transformation, augmentation)
print(X_train.shape)
print(X_train.dtype)
print(y_train.shape)
print(y_train.dtype)
print(X_valid.shape)
print(X_valid.dtype)
print(y_valid.shape)
print(y_train.dtype)
print(X_test.shape)
print(X_test.dtype)
print(y_test.shape)
print(y_test.dtype)
# Data Normalization
print(np.mean(X_train))
X_train = (X_train - np.mean(X_train)) / 255.0
print(np.mean(X_train))
print(np.mean(X_valid))
X_valid = (X_valid - np.mean(X_valid)) / 255.0
print(np.mean(X_valid))
print(np.mean(X_test))
X_test = (X_test - np.mean(X_test)) / 255.0
print(np.mean(X_test))
## Shuffle the training dataset
print(X_train.shape)
print(y_train.shape)
X_train, y_train = shuffle(X_train, y_train)
print(X_train.shape)
print(y_train.shape)
print('done')
EPOCHS = 90
BATCH_SIZE = 128
print('done')
tf.reset_default_graph()
x = tf.placeholder(tf.float32, (None, 51, 51, 3))
y = tf.placeholder(tf.int32, (None))
keep_prob = tf.placeholder(tf.float32) # probability to keep units
one_hot_y = tf.one_hot(y, 43)
print('done')
rate = 0.0005
save_file = './new_model.ckpt'
logits = MyAlexNet.AlexNet(x)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=one_hot_y)
loss_operation = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate = rate)
training_operation = optimizer.minimize(loss_operation)
correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(one_hot_y, 1))
accuracy_operation = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
Saver = tf.train.Saver()
def evaluate(X_data, y_data):
num_examples = len(X_data)
total_accuracy = 0
sess = tf.get_default_session()
for offset in range(0, num_examples, BATCH_SIZE):
batch_x, batch_y = X_data[offset:offset+BATCH_SIZE], y_data[offset:offset+BATCH_SIZE]
accuracy = sess.run(accuracy_operation, feed_dict={x: batch_x, y: batch_y, keep_prob: 1.0})
total_accuracy += (accuracy * len(batch_x))
return total_accuracy / num_examples
print('done')
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
num_examples = len(X_train)
print("Training...")
print()
for i in range(EPOCHS):
X_train, y_train = shuffle(X_train, y_train)
print("Epoch: ", i)
for offset in range(0, num_examples, BATCH_SIZE):
end = offset + BATCH_SIZE
batch_x, batch_y = X_train[offset:end], y_train[offset:end]
sess.run(training_operation, feed_dict={x: batch_x, y: batch_y, keep_prob: 0.75})
validation_accuracy = evaluate(X_valid, y_valid)
print("EPOCH {} ...".format(i + 1))
print("Validation Accuracy = {:.3f}".format(validation_accuracy))
print()
Saver.save(sess,save_file)
print("Model saved")
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver2 = tf.train.import_meta_graph('./Trained Model/final_model.ckpt.meta')
saver2.restore(sess, "./Trained Model/final_model.ckpt")
test_accuracy = evaluate(X_test, y_test)
print("Test Set Accuracy = {:.3f}".format(test_accuracy))
graph = tf.get_default_graph()
signs_class=[]
with open('signnames.csv', 'rt') as csvfile:
reader = csv.DictReader(csvfile, delimiter=',')
for row in reader:
signs_class.append((row['SignName']))
my_labels = [37,38,17,15,12,13,1,0,35,20,3,5]
test = func.load_images("./new_images1/")
test_images=X_test_data=np.uint8(np.zeros((len(test),51,51,3)))
test_images_labels=np.ndarray(shape=[len(test)],dtype=np.uint8)
test_images[0:12]=test[0:12]
test_images_labels[0:12]=my_labels[0:12]
plt.figure(figsize=(12, 8))
for i in range(len(test)):
plt.subplot(3, 4, i+1)
plt.imshow(test[i])
plt.title(signs_class[my_labels[i]])
plt.axis('off')
plt.show()
test_images=(test_images-np.mean(test_images))/255.0
### Visualize the softmax probabilities here.
with tf.Session(graph=graph) as sess:
sess.run(tf.global_variables_initializer())
saver2 = tf.train.import_meta_graph('./Trained Model/final_model.ckpt.meta')
saver2.restore(sess, "./Trained Model/final_model.ckpt")
new_test_accuracy = evaluate(test_images, test_images_labels)
print("New Test Set Accuracy = {:.3f}".format(new_test_accuracy))
softmax_logits = tf.nn.softmax(logits)
top_k = tf.nn.top_k(softmax_logits, k=5)
with tf.Session(graph=graph) as sess:
sess.run(tf.global_variables_initializer())
saver2 = tf.train.import_meta_graph('./Trained Model/final_model.ckpt.meta')
saver2.restore(sess, "./Trained Model/final_model.ckpt")
my_softmax_logits = sess.run(softmax_logits, feed_dict={x: test_images, keep_prob: 1.0})
my_top_k = sess.run(top_k, feed_dict={x: test_images, keep_prob: 1.0})
print(len(test))
plt.figure(figsize=(16, 21))
for i in range(12):
plt.subplot(12, 2, 2*i+1)
plt.imshow(test[i])
plt.title(i)
plt.axis('off')
plt.subplot(12, 2, 2*i+2)
plt.barh(np.arange(1, 6, 1), my_top_k.values[i, :])
labs=[signs_class[j] for j in my_top_k.indices[i]]
plt.yticks(np.arange(1, 6, 1), labs)
plt.show()
my_labels = [3, 11, 1, 12, 38, 34, 18, 25]
test = []
for i, img in enumerate(glob.glob('./new_images2/*x.png')):
image = func.crop_img(cv2.imread(img))
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
test.append(image)
test_images=X_test_data=np.uint8(np.zeros((len(test),51,51,3)))
test_images_labels=np.ndarray(shape=[len(test)],dtype=np.uint8)
test_images[0:len(test)]=test[0:len(test)]
test_images_labels[0:len(test)]=my_labels[0:len(test)]
plt.figure(figsize=(12, 8))
for i in range(len(test)):
plt.subplot(3, 4, i+1)
plt.imshow(test[i])
plt.title(signs_class[my_labels[i]])
plt.axis('off')
plt.show()
test_images=(test_images-np.mean(test_images))/255.0
### Visualize the softmax probabilities here.
with tf.Session(graph=graph) as sess:
sess.run(tf.global_variables_initializer())
saver2 = tf.train.import_meta_graph('./Trained Model/final_model.ckpt.meta')
saver2.restore(sess, "./Trained Model/final_model.ckpt")
new_test_accuracy = evaluate(test_images, test_images_labels)
print("New Test Set Accuracy = {:.3f}".format(new_test_accuracy))
softmax_logits = tf.nn.softmax(logits)
top_k = tf.nn.top_k(softmax_logits, k=5)
with tf.Session(graph=graph) as sess:
sess.run(tf.global_variables_initializer())
saver2 = tf.train.import_meta_graph('./Trained Model/final_model.ckpt.meta')
saver2.restore(sess, "./Trained Model/final_model.ckpt")
my_softmax_logits = sess.run(softmax_logits, feed_dict={x: test_images, keep_prob: 1.0})
my_top_k = sess.run(top_k, feed_dict={x: test_images, keep_prob: 1.0})
print(len(test))
plt.figure(figsize=(16, 21))
for i in range(len(test)):
plt.subplot(12, 2, 2*i+1)
plt.imshow(test[i])
plt.title(i)
plt.axis('off')
plt.subplot(12, 2, 2*i+2)
plt.barh(np.arange(1, 6, 1), my_top_k.values[i, :])
labs=[signs_class[j] for j in my_top_k.indices[i]]
plt.yticks(np.arange(1, 6, 1), labs)
plt.show()
### Visualize your network's feature maps here.
### Feel free to use as many code cells as needed.
# image_input: the test image being fed into the network to produce the feature maps
# tf_activation: should be a tf variable name used during your training procedure that represents the calculated state of a specific weight layer
# activation_min/max: can be used to view the activation contrast in more detail, by default matplot sets min and max to the actual min and max values of the output
# plt_num: used to plot out multiple different weight feature map sets on the same block, just extend the plt number for each new feature map entry
#
#def outputFeatureMap(image_input, tf_activation, activation_min=-1, activation_max=-1 ,plt_num=1):
# # Here make sure to preprocess your image_input in a way your network expects
# # with size, normalization, ect if needed
# # image_input =
# # Note: x should be the same name as your network's tensorflow data placeholder variable
# # If you get an error tf_activation is not defined it may be having trouble
# #accessing the variable from inside a function
# activation = tf_activation.eval(session=sess,feed_dict={x : image_input})
# featuremaps = activation.shape[3]
# plt.figure(plt_num, figsize=(15,15))
# for featuremap in range(featuremaps):
# plt.subplot(6,8, featuremap+1) # sets the number of feature maps to show on each row and column
# plt.title('FeatureMap ' + str(featuremap)) # displays the feature map number
# if activation_min != -1 & activation_max != -1:
# plt.imshow(activation[0,:,:, featuremap], interpolation="nearest", vmin =activation_min, vmax=activation_max, cmap="gray")
# elif activation_max != -1:
# plt.imshow(activation[0,:,:, featuremap], interpolation="nearest", vmax=activation_max, cmap="gray")
# elif activation_min !=-1:
# plt.imshow(activation[0,:,:, featuremap], interpolation="nearest", vmin=activation_min, cmap="gray")
# else:
# plt.imshow(activation[0,:,:, featuremap], interpolation="nearest", cmap="gray")
#
#
#
#
#test1=X_train[6500]
#plt.imshow(test1)
#test1= (test1- np.mean(test1)) / 255.0
#outputFeatureMap(test1)
|
[
"matplotlib.pyplot.title",
"DataAugmentation.transform_img",
"tensorflow.reset_default_graph",
"matplotlib.pyplot.figure",
"pickle.load",
"numpy.mean",
"numpy.arange",
"glob.glob",
"tensorflow.get_default_graph",
"numpy.unique",
"matplotlib.pyplot.xlabel",
"tensorflow.nn.softmax",
"tensorflow.one_hot",
"tensorflow.nn.softmax_cross_entropy_with_logits",
"cv2.cvtColor",
"tensorflow.nn.top_k",
"matplotlib.pyplot.imshow",
"tensorflow.placeholder",
"tensorflow.cast",
"matplotlib.pyplot.show",
"tensorflow.train.Saver",
"tensorflow.global_variables_initializer",
"csv.DictReader",
"tensorflow.reduce_mean",
"tensorflow.Session",
"DataAugmentation.load_images",
"DataAugmentation.augment_img",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.subplot",
"tensorflow.train.import_meta_graph",
"tensorflow.argmax",
"matplotlib.pyplot.axis",
"cv2.imread",
"DataAugmentation.show_imgs",
"MyAlexNet.AlexNet",
"sklearn.utils.shuffle",
"tensorflow.train.AdamOptimizer",
"tensorflow.get_default_session"
] |
[((2052, 2082), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 16.5)'}), '(figsize=(12, 16.5))\n', (2062, 2082), True, 'import matplotlib.pyplot as plt\n'), ((2347, 2357), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2355, 2357), True, 'import matplotlib.pyplot as plt\n'), ((2396, 2423), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 4)'}), '(figsize=(12, 4))\n', (2406, 2423), True, 'import matplotlib.pyplot as plt\n'), ((2471, 2520), 'matplotlib.pyplot.title', 'plt.title', (['"""Distribution of the training dataset"""'], {}), "('Distribution of the training dataset')\n", (2480, 2520), True, 'import matplotlib.pyplot as plt\n'), ((2522, 2548), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Class number"""'], {}), "('Class number')\n", (2532, 2548), True, 'import matplotlib.pyplot as plt\n'), ((2550, 2580), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of images"""'], {}), "('Number of images')\n", (2560, 2580), True, 'import matplotlib.pyplot as plt\n'), ((2582, 2592), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2590, 2592), True, 'import matplotlib.pyplot as plt\n'), ((2879, 2909), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 16.5)'}), '(figsize=(12, 16.5))\n', (2889, 2909), True, 'import matplotlib.pyplot as plt\n'), ((3174, 3184), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3182, 3184), True, 'import matplotlib.pyplot as plt\n'), ((3223, 3250), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 4)'}), '(figsize=(12, 4))\n', (3233, 3250), True, 'import matplotlib.pyplot as plt\n'), ((3298, 3349), 'matplotlib.pyplot.title', 'plt.title', (['"""Distribution of the validation dataset"""'], {}), "('Distribution of the validation dataset')\n", (3307, 3349), True, 'import matplotlib.pyplot as plt\n'), ((3351, 3377), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Class number"""'], {}), "('Class number')\n", (3361, 3377), True, 'import matplotlib.pyplot as plt\n'), ((3379, 3409), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of images"""'], {}), "('Number of images')\n", (3389, 3409), True, 'import matplotlib.pyplot as plt\n'), ((3411, 3421), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3419, 3421), True, 'import matplotlib.pyplot as plt\n'), ((3711, 3741), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 16.5)'}), '(figsize=(12, 16.5))\n', (3721, 3741), True, 'import matplotlib.pyplot as plt\n'), ((4004, 4014), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4012, 4014), True, 'import matplotlib.pyplot as plt\n'), ((4053, 4080), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 4)'}), '(figsize=(12, 4))\n', (4063, 4080), True, 'import matplotlib.pyplot as plt\n'), ((4128, 4173), 'matplotlib.pyplot.title', 'plt.title', (['"""Distribution of the test dataset"""'], {}), "('Distribution of the test dataset')\n", (4137, 4173), True, 'import matplotlib.pyplot as plt\n'), ((4175, 4201), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Class number"""'], {}), "('Class number')\n", (4185, 4201), True, 'import matplotlib.pyplot as plt\n'), ((4203, 4233), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of images"""'], {}), "('Number of images')\n", (4213, 4233), True, 'import matplotlib.pyplot as plt\n'), ((4235, 4245), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4243, 4245), True, 'import matplotlib.pyplot as plt\n'), ((5367, 5384), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (5378, 5384), False, 'import pickle\n'), ((5457, 5474), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (5468, 5474), False, 'import pickle\n'), ((5547, 5564), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (5558, 5564), False, 'import pickle\n'), ((5608, 5632), 'DataAugmentation.transform_img', 'func.transform_img', (['test'], {}), '(test)\n', (5626, 5632), True, 'import DataAugmentation as func\n'), ((5649, 5671), 'DataAugmentation.augment_img', 'func.augment_img', (['test'], {}), '(test)\n', (5665, 5671), True, 'import DataAugmentation as func\n'), ((5673, 5723), 'DataAugmentation.show_imgs', 'func.show_imgs', (['test', 'transformation', 'augmentation'], {}), '(test, transformation, augmentation)\n', (5687, 5723), True, 'import DataAugmentation as func\n'), ((6412, 6437), 'sklearn.utils.shuffle', 'shuffle', (['X_train', 'y_train'], {}), '(X_train, y_train)\n', (6419, 6437), False, 'from sklearn.utils import shuffle\n'), ((6550, 6574), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (6572, 6574), True, 'import tensorflow as tf\n'), ((6582, 6627), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '(None, 51, 51, 3)'], {}), '(tf.float32, (None, 51, 51, 3))\n', (6596, 6627), True, 'import tensorflow as tf\n'), ((6633, 6663), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32', 'None'], {}), '(tf.int32, None)\n', (6647, 6663), True, 'import tensorflow as tf\n'), ((6679, 6705), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (6693, 6705), True, 'import tensorflow as tf\n'), ((6748, 6765), 'tensorflow.one_hot', 'tf.one_hot', (['y', '(43)'], {}), '(y, 43)\n', (6758, 6765), True, 'import tensorflow as tf\n'), ((6844, 6864), 'MyAlexNet.AlexNet', 'MyAlexNet.AlexNet', (['x'], {}), '(x)\n', (6861, 6864), False, 'import MyAlexNet\n'), ((6882, 6954), 'tensorflow.nn.softmax_cross_entropy_with_logits', 'tf.nn.softmax_cross_entropy_with_logits', ([], {'logits': 'logits', 'labels': 'one_hot_y'}), '(logits=logits, labels=one_hot_y)\n', (6921, 6954), True, 'import tensorflow as tf\n'), ((6973, 7002), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cross_entropy'], {}), '(cross_entropy)\n', (6987, 7002), True, 'import tensorflow as tf\n'), ((7016, 7058), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'rate'}), '(learning_rate=rate)\n', (7038, 7058), True, 'import tensorflow as tf\n'), ((7289, 7305), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (7303, 7305), True, 'import tensorflow as tf\n'), ((9190, 9224), 'DataAugmentation.load_images', 'func.load_images', (['"""./new_images1/"""'], {}), "('./new_images1/')\n", (9206, 9224), True, 'import DataAugmentation as func\n'), ((9428, 9455), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (9438, 9455), True, 'import matplotlib.pyplot as plt\n'), ((9602, 9612), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9610, 9612), True, 'import matplotlib.pyplot as plt\n'), ((10104, 10125), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {}), '(logits)\n', (10117, 10125), True, 'import tensorflow as tf\n'), ((10135, 10167), 'tensorflow.nn.top_k', 'tf.nn.top_k', (['softmax_logits'], {'k': '(5)'}), '(softmax_logits, k=5)\n', (10146, 10167), True, 'import tensorflow as tf\n'), ((10589, 10617), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 21)'}), '(figsize=(16, 21))\n', (10599, 10617), True, 'import matplotlib.pyplot as plt\n'), ((10922, 10932), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10930, 10932), True, 'import matplotlib.pyplot as plt\n'), ((11408, 11435), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (11418, 11435), True, 'import matplotlib.pyplot as plt\n'), ((11582, 11592), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11590, 11592), True, 'import matplotlib.pyplot as plt\n'), ((12084, 12105), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {}), '(logits)\n', (12097, 12105), True, 'import tensorflow as tf\n'), ((12115, 12147), 'tensorflow.nn.top_k', 'tf.nn.top_k', (['softmax_logits'], {'k': '(5)'}), '(softmax_logits, k=5)\n', (12126, 12147), True, 'import tensorflow as tf\n'), ((12569, 12597), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 21)'}), '(figsize=(16, 21))\n', (12579, 12597), True, 'import matplotlib.pyplot as plt\n'), ((12909, 12919), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (12917, 12919), True, 'import matplotlib.pyplot as plt\n'), ((458, 472), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (469, 472), False, 'import pickle\n'), ((531, 545), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (542, 545), False, 'import pickle\n'), ((600, 614), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (611, 614), False, 'import pickle\n'), ((1345, 1363), 'numpy.unique', 'np.unique', (['y_train'], {}), '(y_train)\n', (1354, 1363), True, 'import numpy as np\n'), ((1661, 1679), 'numpy.unique', 'np.unique', (['y_train'], {}), '(y_train)\n', (1670, 1679), True, 'import numpy as np\n'), ((2119, 2144), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(11)', '(4)', '(i + 1)'], {}), '(11, 4, i + 1)\n', (2130, 2144), True, 'import matplotlib.pyplot as plt\n'), ((2190, 2224), 'matplotlib.pyplot.imshow', 'plt.imshow', (['x_selected[0, :, :, :]'], {}), '(x_selected[0, :, :, :])\n', (2200, 2224), True, 'import matplotlib.pyplot as plt\n'), ((2268, 2280), 'matplotlib.pyplot.title', 'plt.title', (['i'], {}), '(i)\n', (2277, 2280), True, 'import matplotlib.pyplot as plt\n'), ((2286, 2301), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (2294, 2301), True, 'import matplotlib.pyplot as plt\n'), ((2946, 2971), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(11)', '(4)', '(i + 1)'], {}), '(11, 4, i + 1)\n', (2957, 2971), True, 'import matplotlib.pyplot as plt\n'), ((3017, 3051), 'matplotlib.pyplot.imshow', 'plt.imshow', (['x_selected[0, :, :, :]'], {}), '(x_selected[0, :, :, :])\n', (3027, 3051), True, 'import matplotlib.pyplot as plt\n'), ((3095, 3107), 'matplotlib.pyplot.title', 'plt.title', (['i'], {}), '(i)\n', (3104, 3107), True, 'import matplotlib.pyplot as plt\n'), ((3113, 3128), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3121, 3128), True, 'import matplotlib.pyplot as plt\n'), ((3778, 3803), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(11)', '(4)', '(i + 1)'], {}), '(11, 4, i + 1)\n', (3789, 3803), True, 'import matplotlib.pyplot as plt\n'), ((3847, 3881), 'matplotlib.pyplot.imshow', 'plt.imshow', (['x_selected[0, :, :, :]'], {}), '(x_selected[0, :, :, :])\n', (3857, 3881), True, 'import matplotlib.pyplot as plt\n'), ((3925, 3937), 'matplotlib.pyplot.title', 'plt.title', (['i'], {}), '(i)\n', (3934, 3937), True, 'import matplotlib.pyplot as plt\n'), ((3943, 3958), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3951, 3958), True, 'import matplotlib.pyplot as plt\n'), ((6025, 6041), 'numpy.mean', 'np.mean', (['X_train'], {}), '(X_train)\n', (6032, 6041), True, 'import numpy as np\n'), ((6098, 6114), 'numpy.mean', 'np.mean', (['X_train'], {}), '(X_train)\n', (6105, 6114), True, 'import numpy as np\n'), ((6125, 6141), 'numpy.mean', 'np.mean', (['X_valid'], {}), '(X_valid)\n', (6132, 6141), True, 'import numpy as np\n'), ((6198, 6214), 'numpy.mean', 'np.mean', (['X_valid'], {}), '(X_valid)\n', (6205, 6214), True, 'import numpy as np\n'), ((6225, 6240), 'numpy.mean', 'np.mean', (['X_test'], {}), '(X_test)\n', (6232, 6240), True, 'import numpy as np\n'), ((6294, 6309), 'numpy.mean', 'np.mean', (['X_test'], {}), '(X_test)\n', (6301, 6309), True, 'import numpy as np\n'), ((7153, 7173), 'tensorflow.argmax', 'tf.argmax', (['logits', '(1)'], {}), '(logits, 1)\n', (7162, 7173), True, 'import tensorflow as tf\n'), ((7175, 7198), 'tensorflow.argmax', 'tf.argmax', (['one_hot_y', '(1)'], {}), '(one_hot_y, 1)\n', (7184, 7198), True, 'import tensorflow as tf\n'), ((7237, 7276), 'tensorflow.cast', 'tf.cast', (['correct_prediction', 'tf.float32'], {}), '(correct_prediction, tf.float32)\n', (7244, 7276), True, 'import tensorflow as tf\n'), ((7407, 7431), 'tensorflow.get_default_session', 'tf.get_default_session', ([], {}), '()\n', (7429, 7431), True, 'import tensorflow as tf\n'), ((7805, 7817), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (7815, 7817), True, 'import tensorflow as tf\n'), ((8587, 8599), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (8597, 8599), True, 'import tensorflow as tf\n'), ((8672, 8739), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (['"""./Trained Model/final_model.ckpt.meta"""'], {}), "('./Trained Model/final_model.ckpt.meta')\n", (8698, 8739), True, 'import tensorflow as tf\n'), ((8924, 8946), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (8944, 8946), True, 'import tensorflow as tf\n'), ((9025, 9063), 'csv.DictReader', 'csv.DictReader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (9039, 9063), False, 'import csv\n'), ((9489, 9513), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(4)', '(i + 1)'], {}), '(3, 4, i + 1)\n', (9500, 9513), True, 'import matplotlib.pyplot as plt\n'), ((9517, 9536), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test[i]'], {}), '(test[i])\n', (9527, 9536), True, 'import matplotlib.pyplot as plt\n'), ((9543, 9579), 'matplotlib.pyplot.title', 'plt.title', (['signs_class[my_labels[i]]'], {}), '(signs_class[my_labels[i]])\n', (9552, 9579), True, 'import matplotlib.pyplot as plt\n'), ((9585, 9600), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (9593, 9600), True, 'import matplotlib.pyplot as plt\n'), ((9720, 9743), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (9730, 9743), True, 'import tensorflow as tf\n'), ((9816, 9883), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (['"""./Trained Model/final_model.ckpt.meta"""'], {}), "('./Trained Model/final_model.ckpt.meta')\n", (9842, 9883), True, 'import tensorflow as tf\n'), ((10174, 10197), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (10184, 10197), True, 'import tensorflow as tf\n'), ((10270, 10337), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (['"""./Trained Model/final_model.ckpt.meta"""'], {}), "('./Trained Model/final_model.ckpt.meta')\n", (10296, 10337), True, 'import tensorflow as tf\n'), ((10644, 10673), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(12)', '(2)', '(2 * i + 1)'], {}), '(12, 2, 2 * i + 1)\n', (10655, 10673), True, 'import matplotlib.pyplot as plt\n'), ((10675, 10694), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test[i]'], {}), '(test[i])\n', (10685, 10694), True, 'import matplotlib.pyplot as plt\n'), ((10701, 10713), 'matplotlib.pyplot.title', 'plt.title', (['i'], {}), '(i)\n', (10710, 10713), True, 'import matplotlib.pyplot as plt\n'), ((10719, 10734), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (10727, 10734), True, 'import matplotlib.pyplot as plt\n'), ((10740, 10769), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(12)', '(2)', '(2 * i + 2)'], {}), '(12, 2, 2 * i + 2)\n', (10751, 10769), True, 'import matplotlib.pyplot as plt\n'), ((11015, 11048), 'glob.glob', 'glob.glob', (['"""./new_images2/*x.png"""'], {}), "('./new_images2/*x.png')\n", (11024, 11048), False, 'import glob\n'), ((11108, 11146), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2RGB'], {}), '(image, cv2.COLOR_BGR2RGB)\n', (11120, 11146), False, 'import cv2\n'), ((11469, 11493), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(4)', '(i + 1)'], {}), '(3, 4, i + 1)\n', (11480, 11493), True, 'import matplotlib.pyplot as plt\n'), ((11497, 11516), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test[i]'], {}), '(test[i])\n', (11507, 11516), True, 'import matplotlib.pyplot as plt\n'), ((11523, 11559), 'matplotlib.pyplot.title', 'plt.title', (['signs_class[my_labels[i]]'], {}), '(signs_class[my_labels[i]])\n', (11532, 11559), True, 'import matplotlib.pyplot as plt\n'), ((11565, 11580), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (11573, 11580), True, 'import matplotlib.pyplot as plt\n'), ((11700, 11723), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (11710, 11723), True, 'import tensorflow as tf\n'), ((11796, 11863), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (['"""./Trained Model/final_model.ckpt.meta"""'], {}), "('./Trained Model/final_model.ckpt.meta')\n", (11822, 11863), True, 'import tensorflow as tf\n'), ((12154, 12177), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (12164, 12177), True, 'import tensorflow as tf\n'), ((12250, 12317), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (['"""./Trained Model/final_model.ckpt.meta"""'], {}), "('./Trained Model/final_model.ckpt.meta')\n", (12276, 12317), True, 'import tensorflow as tf\n'), ((12631, 12660), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(12)', '(2)', '(2 * i + 1)'], {}), '(12, 2, 2 * i + 1)\n', (12642, 12660), True, 'import matplotlib.pyplot as plt\n'), ((12662, 12681), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test[i]'], {}), '(test[i])\n', (12672, 12681), True, 'import matplotlib.pyplot as plt\n'), ((12688, 12700), 'matplotlib.pyplot.title', 'plt.title', (['i'], {}), '(i)\n', (12697, 12700), True, 'import matplotlib.pyplot as plt\n'), ((12706, 12721), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (12714, 12721), True, 'import matplotlib.pyplot as plt\n'), ((12727, 12756), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(12)', '(2)', '(2 * i + 2)'], {}), '(12, 2, 2 * i + 2)\n', (12738, 12756), True, 'import matplotlib.pyplot as plt\n'), ((6065, 6081), 'numpy.mean', 'np.mean', (['X_train'], {}), '(X_train)\n', (6072, 6081), True, 'import numpy as np\n'), ((6165, 6181), 'numpy.mean', 'np.mean', (['X_valid'], {}), '(X_valid)\n', (6172, 6181), True, 'import numpy as np\n'), ((6262, 6277), 'numpy.mean', 'np.mean', (['X_test'], {}), '(X_test)\n', (6269, 6277), True, 'import numpy as np\n'), ((7841, 7874), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (7872, 7874), True, 'import tensorflow as tf\n'), ((8005, 8030), 'sklearn.utils.shuffle', 'shuffle', (['X_train', 'y_train'], {}), '(X_train, y_train)\n', (8012, 8030), False, 'from sklearn.utils import shuffle\n'), ((8623, 8656), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (8654, 8656), True, 'import tensorflow as tf\n'), ((9639, 9659), 'numpy.mean', 'np.mean', (['test_images'], {}), '(test_images)\n', (9646, 9659), True, 'import numpy as np\n'), ((9767, 9800), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (9798, 9800), True, 'import tensorflow as tf\n'), ((10221, 10254), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (10252, 10254), True, 'import tensorflow as tf\n'), ((10780, 10798), 'numpy.arange', 'np.arange', (['(1)', '(6)', '(1)'], {}), '(1, 6, 1)\n', (10789, 10798), True, 'import numpy as np\n'), ((10895, 10913), 'numpy.arange', 'np.arange', (['(1)', '(6)', '(1)'], {}), '(1, 6, 1)\n', (10904, 10913), True, 'import numpy as np\n'), ((11078, 11093), 'cv2.imread', 'cv2.imread', (['img'], {}), '(img)\n', (11088, 11093), False, 'import cv2\n'), ((11619, 11639), 'numpy.mean', 'np.mean', (['test_images'], {}), '(test_images)\n', (11626, 11639), True, 'import numpy as np\n'), ((11747, 11780), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (11778, 11780), True, 'import tensorflow as tf\n'), ((12201, 12234), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (12232, 12234), True, 'import tensorflow as tf\n'), ((12767, 12785), 'numpy.arange', 'np.arange', (['(1)', '(6)', '(1)'], {}), '(1, 6, 1)\n', (12776, 12785), True, 'import numpy as np\n'), ((12882, 12900), 'numpy.arange', 'np.arange', (['(1)', '(6)', '(1)'], {}), '(1, 6, 1)\n', (12891, 12900), True, 'import numpy as np\n')]
|
import datetime
import random
from config import *
from lib import app, cords, button, led
from utils import time
class Direction:
def __init__(self, x, y, left=None, right=None):
self.x = x
self.y = y
self.left = left
self.right = right
SPEED = 1000
DIR_UP = Direction(0, -1)
DIR_DOWN = Direction(0, 1)
DIR_LEFT = Direction(-1, 0, DIR_DOWN, DIR_UP)
DIR_RIGHT = Direction(1, 0, DIR_UP, DIR_DOWN)
DIR_UP.left = DIR_LEFT
DIR_UP.right = DIR_RIGHT
DIR_DOWN.left = DIR_LEFT
DIR_DOWN.right = DIR_RIGHT
class App(app.BaseApp):
def __init__(self):
super().__init__("Snake")
middle = DISPLAY_ROWS // 2
self.snake = [
cords.Cords(3, middle),
cords.Cords(2, middle),
cords.Cords(1, middle)
]
self.food = None
self.gen_food()
self.direction = DIR_RIGHT
def run(self):
self.render()
while True:
begin = datetime.datetime.now()
final_press = None
try:
while time.to_ms(datetime.datetime.now() - begin) < SPEED:
press = button.any_button_once()
if press:
final_press = press
except KeyboardInterrupt:
break
if final_press is not None:
if final_press[0] == 0:
self.direction = self.direction.left
elif final_press[0] == 1:
self.direction = self.direction.right
old_head = self.snake[0]
head = cords.Cords(old_head.x + self.direction.x, old_head.y + self.direction.y)
if 0 > head.x or DISPLAY_COLUMNS <= head.x or 0 > head.y or DISPLAY_ROWS <= head.y:
break
self.snake.insert(0, head)
if head != self.food:
self.snake.pop()
else:
self.gen_food()
self.render()
time.sleep(3)
def gen_food(self):
food = cords.Cords(random.randint(0, DISPLAY_COLUMNS - 1), random.randint(0, DISPLAY_ROWS - 1))
if food in self.snake:
self.gen_food()
else:
self.food = food
def render(self):
led.fill_func(self.render_func)
def render_func(self, cord):
if self.food == cord:
return led.COLOR_RED
elif cord in self.snake:
return led.COLOR_GREEN
return led.COLOR_BLACK
|
[
"random.randint",
"lib.cords.Cords",
"utils.time.sleep",
"lib.button.any_button_once",
"datetime.datetime.now",
"lib.led.fill_func"
] |
[((1975, 1988), 'utils.time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1985, 1988), False, 'from utils import time\n'), ((2251, 2282), 'lib.led.fill_func', 'led.fill_func', (['self.render_func'], {}), '(self.render_func)\n', (2264, 2282), False, 'from lib import app, cords, button, led\n'), ((695, 717), 'lib.cords.Cords', 'cords.Cords', (['(3)', 'middle'], {}), '(3, middle)\n', (706, 717), False, 'from lib import app, cords, button, led\n'), ((731, 753), 'lib.cords.Cords', 'cords.Cords', (['(2)', 'middle'], {}), '(2, middle)\n', (742, 753), False, 'from lib import app, cords, button, led\n'), ((767, 789), 'lib.cords.Cords', 'cords.Cords', (['(1)', 'middle'], {}), '(1, middle)\n', (778, 789), False, 'from lib import app, cords, button, led\n'), ((966, 989), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (987, 989), False, 'import datetime\n'), ((1593, 1666), 'lib.cords.Cords', 'cords.Cords', (['(old_head.x + self.direction.x)', '(old_head.y + self.direction.y)'], {}), '(old_head.x + self.direction.x, old_head.y + self.direction.y)\n', (1604, 1666), False, 'from lib import app, cords, button, led\n'), ((2041, 2079), 'random.randint', 'random.randint', (['(0)', '(DISPLAY_COLUMNS - 1)'], {}), '(0, DISPLAY_COLUMNS - 1)\n', (2055, 2079), False, 'import random\n'), ((2081, 2116), 'random.randint', 'random.randint', (['(0)', '(DISPLAY_ROWS - 1)'], {}), '(0, DISPLAY_ROWS - 1)\n', (2095, 2116), False, 'import random\n'), ((1141, 1165), 'lib.button.any_button_once', 'button.any_button_once', ([], {}), '()\n', (1163, 1165), False, 'from lib import app, cords, button, led\n'), ((1071, 1094), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1092, 1094), False, 'import datetime\n')]
|
from django.contrib import admin
from physionet import models
# Register your models here.
admin.site.register(models.StaticPage)
admin.site.register(models.Section)
|
[
"django.contrib.admin.site.register"
] |
[((92, 130), 'django.contrib.admin.site.register', 'admin.site.register', (['models.StaticPage'], {}), '(models.StaticPage)\n', (111, 130), False, 'from django.contrib import admin\n'), ((131, 166), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Section'], {}), '(models.Section)\n', (150, 166), False, 'from django.contrib import admin\n')]
|
from datetime import datetime
from common.forms import DocumentForm
from common.functions import create_thumbnail, delete_thumbnail
from django import forms
from django.conf import settings
from django.utils.dateformat import format
from .models import Article
class ArticleForm(DocumentForm):
UPLOAD_TO = 'blog/%Y/%m/%d/'
title = forms.CharField()
content = forms.CharField(required=False, widget=forms.Textarea)
is_comments = forms.BooleanField(required=False)
is_published = forms.BooleanField(required=False)
type = forms.TypedChoiceField(choices=Article.Type.choices, coerce=int)
cover = forms.CharField(required=False)
code = forms.CharField(required=False, widget=forms.Textarea)
get_youtube_image = forms.BooleanField(required=False)
status = forms.CharField(required=False)
class Meta:
fields = [
'title',
'content',
'is_comments',
'is_published',
'type',
'cover',
'code',
'get_youtube_image',
'status',
]
model = Article
def clean(self):
cd = super().clean()
cd['date_modified'] = format(datetime.today(), settings.DATETIME_FORMAT)
return cd
def delete_image(self, i):
delete_thumbnail(self.cleaned_data['images'][i])
super().delete_image(i)
def upload_image(self, image):
name = super().upload_image(image)
create_thumbnail(name)
return name
|
[
"common.functions.delete_thumbnail",
"datetime.datetime.today",
"django.forms.BooleanField",
"common.functions.create_thumbnail",
"django.forms.TypedChoiceField",
"django.forms.CharField"
] |
[((343, 360), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (358, 360), False, 'from django import forms\n'), ((375, 429), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'widget': 'forms.Textarea'}), '(required=False, widget=forms.Textarea)\n', (390, 429), False, 'from django import forms\n'), ((448, 482), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (466, 482), False, 'from django import forms\n'), ((502, 536), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (520, 536), False, 'from django import forms\n'), ((548, 612), 'django.forms.TypedChoiceField', 'forms.TypedChoiceField', ([], {'choices': 'Article.Type.choices', 'coerce': 'int'}), '(choices=Article.Type.choices, coerce=int)\n', (570, 612), False, 'from django import forms\n'), ((625, 656), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)'}), '(required=False)\n', (640, 656), False, 'from django import forms\n'), ((668, 722), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'widget': 'forms.Textarea'}), '(required=False, widget=forms.Textarea)\n', (683, 722), False, 'from django import forms\n'), ((747, 781), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (765, 781), False, 'from django import forms\n'), ((795, 826), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)'}), '(required=False)\n', (810, 826), False, 'from django import forms\n'), ((1302, 1350), 'common.functions.delete_thumbnail', 'delete_thumbnail', (["self.cleaned_data['images'][i]"], {}), "(self.cleaned_data['images'][i])\n", (1318, 1350), False, 'from common.functions import create_thumbnail, delete_thumbnail\n'), ((1470, 1492), 'common.functions.create_thumbnail', 'create_thumbnail', (['name'], {}), '(name)\n', (1486, 1492), False, 'from common.functions import create_thumbnail, delete_thumbnail\n'), ((1200, 1216), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1214, 1216), False, 'from datetime import datetime\n')]
|
# Third party inports
import tensorflow as tf
import numpy as np
# batch_sizexheightxwidthxdepthxchan
def diceLoss(y_true, y_pred):
top = 2*tf.reduce_sum(y_true * y_pred, [1, 2, 3])
bottom = tf.maximum(tf.reduce_sum(y_true+y_pred, [1, 2, 3]), 1e-5)
dice = tf.reduce_mean(top/bottom)
return -dice
def gradientLoss(penalty='l1'):
def loss(y_true, y_pred):
dy = tf.abs(y_pred[:, 1:, :, :, :] - y_pred[:, :-1, :, :, :])
dx = tf.abs(y_pred[:, :, 1:, :, :] - y_pred[:, :, :-1, :, :])
dz = tf.abs(y_pred[:, :, :, 1:, :] - y_pred[:, :, :, :-1, :])
if (penalty == 'l2'):
dy = dy * dy
dx = dx * dx
dz = dz * dz
d = tf.reduce_mean(dx)+tf.reduce_mean(dy)+tf.reduce_mean(dz)
return d/3.0
return loss
def gradientLoss2D():
def loss(y_true, y_pred):
dy = tf.abs(y_pred[:, 1:, :, :] - y_pred[:, :-1, :, :])
dx = tf.abs(y_pred[:, :, 1:, :] - y_pred[:, :, :-1, :])
dy = dy * dy
dx = dx * dx
d = tf.reduce_mean(dx)+tf.reduce_mean(dy)
return d/2.0
return loss
def cc3D(win=[9, 9, 9], voxel_weights=None):
def loss(I, J):
I2 = I*I
J2 = J*J
IJ = I*J
filt = tf.ones([win[0], win[1], win[2], 1, 1])
I_sum = tf.nn.conv3d(I, filt, [1, 1, 1, 1, 1], "SAME")
J_sum = tf.nn.conv3d(J, filt, [1, 1, 1, 1, 1], "SAME")
I2_sum = tf.nn.conv3d(I2, filt, [1, 1, 1, 1, 1], "SAME")
J2_sum = tf.nn.conv3d(J2, filt, [1, 1, 1, 1, 1], "SAME")
IJ_sum = tf.nn.conv3d(IJ, filt, [1, 1, 1, 1, 1], "SAME")
win_size = win[0]*win[1]*win[2]
u_I = I_sum/win_size
u_J = J_sum/win_size
cross = IJ_sum - u_J*I_sum - u_I*J_sum + u_I*u_J*win_size
I_var = I2_sum - 2 * u_I * I_sum + u_I*u_I*win_size
J_var = J2_sum - 2 * u_J * J_sum + u_J*u_J*win_size
cc = cross*cross / (I_var*J_var+1e-5)
# if(voxel_weights is not None):
# cc = cc * voxel_weights
return -1.0*tf.reduce_mean(cc)
return loss
def cc2D(win=[9, 9]):
def loss(I, J):
I2 = tf.multiply(I, I)
J2 = tf.multiply(J, J)
IJ = tf.multiply(I, J)
sum_filter = tf.ones([win[0], win[1], 1, 1])
I_sum = tf.nn.conv2d(I, sum_filter, [1, 1, 1, 1], "SAME")
J_sum = tf.nn.conv2d(J, sum_filter, [1, 1, 1, 1], "SAME")
I2_sum = tf.nn.conv2d(I2, sum_filter, [1, 1, 1, 1], "SAME")
J2_sum = tf.nn.conv2d(J2, sum_filter, [1, 1, 1, 1], "SAME")
IJ_sum = tf.nn.conv2d(IJ, sum_filter, [1, 1, 1, 1], "SAME")
win_size = win[0]*win[1]
u_I = I_sum/win_size
u_J = J_sum/win_size
cross = IJ_sum - u_J*I_sum - u_I*J_sum + u_I*u_J*win_size
I_var = I2_sum - 2 * u_I * I_sum + u_I*u_I*win_size
J_var = J2_sum - 2 * u_J * J_sum + u_J*u_J*win_size
cc = cross*cross / (I_var*J_var + np.finfo(float).eps)
return -1.0*tf.reduce_mean(cc)
return loss
|
[
"tensorflow.ones",
"tensorflow.abs",
"tensorflow.reduce_sum",
"tensorflow.reduce_mean",
"tensorflow.nn.conv3d",
"tensorflow.multiply",
"numpy.finfo",
"tensorflow.nn.conv2d"
] |
[((272, 300), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['(top / bottom)'], {}), '(top / bottom)\n', (286, 300), True, 'import tensorflow as tf\n'), ((148, 189), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(y_true * y_pred)', '[1, 2, 3]'], {}), '(y_true * y_pred, [1, 2, 3])\n', (161, 189), True, 'import tensorflow as tf\n'), ((214, 255), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(y_true + y_pred)', '[1, 2, 3]'], {}), '(y_true + y_pred, [1, 2, 3])\n', (227, 255), True, 'import tensorflow as tf\n'), ((393, 449), 'tensorflow.abs', 'tf.abs', (['(y_pred[:, 1:, :, :, :] - y_pred[:, :-1, :, :, :])'], {}), '(y_pred[:, 1:, :, :, :] - y_pred[:, :-1, :, :, :])\n', (399, 449), True, 'import tensorflow as tf\n'), ((463, 519), 'tensorflow.abs', 'tf.abs', (['(y_pred[:, :, 1:, :, :] - y_pred[:, :, :-1, :, :])'], {}), '(y_pred[:, :, 1:, :, :] - y_pred[:, :, :-1, :, :])\n', (469, 519), True, 'import tensorflow as tf\n'), ((533, 589), 'tensorflow.abs', 'tf.abs', (['(y_pred[:, :, :, 1:, :] - y_pred[:, :, :, :-1, :])'], {}), '(y_pred[:, :, :, 1:, :] - y_pred[:, :, :, :-1, :])\n', (539, 589), True, 'import tensorflow as tf\n'), ((870, 920), 'tensorflow.abs', 'tf.abs', (['(y_pred[:, 1:, :, :] - y_pred[:, :-1, :, :])'], {}), '(y_pred[:, 1:, :, :] - y_pred[:, :-1, :, :])\n', (876, 920), True, 'import tensorflow as tf\n'), ((934, 984), 'tensorflow.abs', 'tf.abs', (['(y_pred[:, :, 1:, :] - y_pred[:, :, :-1, :])'], {}), '(y_pred[:, :, 1:, :] - y_pred[:, :, :-1, :])\n', (940, 984), True, 'import tensorflow as tf\n'), ((1251, 1290), 'tensorflow.ones', 'tf.ones', (['[win[0], win[1], win[2], 1, 1]'], {}), '([win[0], win[1], win[2], 1, 1])\n', (1258, 1290), True, 'import tensorflow as tf\n'), ((1308, 1354), 'tensorflow.nn.conv3d', 'tf.nn.conv3d', (['I', 'filt', '[1, 1, 1, 1, 1]', '"""SAME"""'], {}), "(I, filt, [1, 1, 1, 1, 1], 'SAME')\n", (1320, 1354), True, 'import tensorflow as tf\n'), ((1371, 1417), 'tensorflow.nn.conv3d', 'tf.nn.conv3d', (['J', 'filt', '[1, 1, 1, 1, 1]', '"""SAME"""'], {}), "(J, filt, [1, 1, 1, 1, 1], 'SAME')\n", (1383, 1417), True, 'import tensorflow as tf\n'), ((1435, 1482), 'tensorflow.nn.conv3d', 'tf.nn.conv3d', (['I2', 'filt', '[1, 1, 1, 1, 1]', '"""SAME"""'], {}), "(I2, filt, [1, 1, 1, 1, 1], 'SAME')\n", (1447, 1482), True, 'import tensorflow as tf\n'), ((1500, 1547), 'tensorflow.nn.conv3d', 'tf.nn.conv3d', (['J2', 'filt', '[1, 1, 1, 1, 1]', '"""SAME"""'], {}), "(J2, filt, [1, 1, 1, 1, 1], 'SAME')\n", (1512, 1547), True, 'import tensorflow as tf\n'), ((1565, 1612), 'tensorflow.nn.conv3d', 'tf.nn.conv3d', (['IJ', 'filt', '[1, 1, 1, 1, 1]', '"""SAME"""'], {}), "(IJ, filt, [1, 1, 1, 1, 1], 'SAME')\n", (1577, 1612), True, 'import tensorflow as tf\n'), ((2136, 2153), 'tensorflow.multiply', 'tf.multiply', (['I', 'I'], {}), '(I, I)\n', (2147, 2153), True, 'import tensorflow as tf\n'), ((2167, 2184), 'tensorflow.multiply', 'tf.multiply', (['J', 'J'], {}), '(J, J)\n', (2178, 2184), True, 'import tensorflow as tf\n'), ((2198, 2215), 'tensorflow.multiply', 'tf.multiply', (['I', 'J'], {}), '(I, J)\n', (2209, 2215), True, 'import tensorflow as tf\n'), ((2238, 2269), 'tensorflow.ones', 'tf.ones', (['[win[0], win[1], 1, 1]'], {}), '([win[0], win[1], 1, 1])\n', (2245, 2269), True, 'import tensorflow as tf\n'), ((2287, 2336), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['I', 'sum_filter', '[1, 1, 1, 1]', '"""SAME"""'], {}), "(I, sum_filter, [1, 1, 1, 1], 'SAME')\n", (2299, 2336), True, 'import tensorflow as tf\n'), ((2353, 2402), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['J', 'sum_filter', '[1, 1, 1, 1]', '"""SAME"""'], {}), "(J, sum_filter, [1, 1, 1, 1], 'SAME')\n", (2365, 2402), True, 'import tensorflow as tf\n'), ((2420, 2470), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['I2', 'sum_filter', '[1, 1, 1, 1]', '"""SAME"""'], {}), "(I2, sum_filter, [1, 1, 1, 1], 'SAME')\n", (2432, 2470), True, 'import tensorflow as tf\n'), ((2488, 2538), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['J2', 'sum_filter', '[1, 1, 1, 1]', '"""SAME"""'], {}), "(J2, sum_filter, [1, 1, 1, 1], 'SAME')\n", (2500, 2538), True, 'import tensorflow as tf\n'), ((2556, 2606), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['IJ', 'sum_filter', '[1, 1, 1, 1]', '"""SAME"""'], {}), "(IJ, sum_filter, [1, 1, 1, 1], 'SAME')\n", (2568, 2606), True, 'import tensorflow as tf\n'), ((746, 764), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['dz'], {}), '(dz)\n', (760, 764), True, 'import tensorflow as tf\n'), ((1041, 1059), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['dx'], {}), '(dx)\n', (1055, 1059), True, 'import tensorflow as tf\n'), ((1060, 1078), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['dy'], {}), '(dy)\n', (1074, 1078), True, 'import tensorflow as tf\n'), ((2043, 2061), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cc'], {}), '(cc)\n', (2057, 2061), True, 'import tensorflow as tf\n'), ((2971, 2989), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cc'], {}), '(cc)\n', (2985, 2989), True, 'import tensorflow as tf\n'), ((708, 726), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['dx'], {}), '(dx)\n', (722, 726), True, 'import tensorflow as tf\n'), ((727, 745), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['dy'], {}), '(dy)\n', (741, 745), True, 'import tensorflow as tf\n'), ((2930, 2945), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (2938, 2945), True, 'import numpy as np\n')]
|
from afqueue.common.encoding_utilities import cast_bytes
from afqueue.messages.base_message import BaseMessage #@UnresolvedImport
from afqueue.common.exception_formatter import ExceptionFormatter #@UnresolvedImport
from afqueue.common.client_queue_lock import ClientQueueLock #@UnresolvedImport
from afqueue.messages import message_types #@UnresolvedImport
from afqueue.common.client_exchange import ClientExchange #@UnresolvedImport
from afqueue.common.client_queue import ClientQueue #@UnresolvedImport
from afqueue.data_objects.exchange_wrapper import ExchangeWrapper #@UnresolvedImport
from afqueue.data_objects.data_queue_wrapper import DataQueueWrapper #@UnresolvedImport
import simplejson as json #@UnresolvedImport
import bson #@UnresolvedImport
def build_settings_dictionary(id_string, start_time, redis_connection_string, shared_memory_max_size,
ordered_ownership_stop_threshold, ordered_ownership_start_threshold):
"""
Builds the settings dictionary which peers use to pass settings information back and forth.
"""
# Build.
settings_dict = dict()
settings_dict["id"] = id_string
settings_dict["start_time"] = start_time
settings_dict["sm_connection"] = redis_connection_string
settings_dict["sm_max"] = shared_memory_max_size
settings_dict["oq_stop"] = ordered_ownership_stop_threshold
settings_dict["oq_start"] = ordered_ownership_start_threshold
# Return.
return settings_dict
class PeerForwardedCommandMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, command_message_as_dict, sender_id_string = None):
# Build base.
super(PeerForwardedCommandMessage, self).__init__(message_types.PEER_FORWARDED_COMMAND_MESSAGE)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.command_message_as_dict = command_message_as_dict
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, bson.dumps(self.command_message_as_dict))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerForwardedCommandMessage(None, bson.loads(raw_message[1]), sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerOrderedQueuesExhaustedOwnersMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, ordered_queues_owners_exhausted_dictionary, sender_id_string = None):
# Build base.
super(PeerOrderedQueuesExhaustedOwnersMessage, self).__init__(message_types.PEER_ORDERED_QUEUES_OWNERS_EXHAUSTED)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.ordered_queues_owners_exhausted_dictionary = ordered_queues_owners_exhausted_dictionary
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, bson.dumps(self.ordered_queues_owners_exhausted_dictionary))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
raw_message[1] = cast_bytes(raw_message[1])
return PeerOrderedQueuesExhaustedOwnersMessage(None, bson.loads(raw_message[1]), sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerClientDeclareExchangesRequestMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, client_exchange_list, sender_id_string = None):
# Build base.
super(PeerClientDeclareExchangesRequestMessage, self).__init__(message_types.PEER_CLIENT_DECLARE_EXCHANGES_REQUEST)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.client_exchange_list = client_exchange_list
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, json.dumps(ClientExchange.create_network_tuple_list(self.client_exchange_list)))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerClientDeclareExchangesRequestMessage(None, ClientExchange.create_client_exchange_list(json.loads(raw_message[1])), sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerClientDeclareQueuesRequestMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, client_queue_list, sender_id_string = None):
# Build base.
super(PeerClientDeclareQueuesRequestMessage, self).__init__(message_types.PEER_CLIENT_DECLARE_QUEUES_REQUEST)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.client_queue_list = client_queue_list
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, json.dumps(ClientQueue.create_network_tuple_list(self.client_queue_list)))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerClientDeclareQueuesRequestMessage(None, ClientQueue.create_client_queue_list(json.loads(raw_message[1])), sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerClientDeleteQueuesRequestMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, queue_name_list, sender_id_string = None):
# Build base.
super(PeerClientDeleteQueuesRequestMessage, self).__init__(message_types.PEER_CLIENT_DELETE_QUEUES_REQUEST)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.queue_name_list = queue_name_list
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, json.dumps(self.queue_name_list))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerClientDeleteQueuesRequestMessage(None, json.loads(raw_message[1]), sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerClientLockQueuesRequestMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, client_queue_lock_list, owner_id_string, sender_id_string = None):
# Build base.
super(PeerClientLockQueuesRequestMessage, self).__init__(message_types.PEER_CLIENT_LOCK_QUEUES_REQUEST)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.client_queue_lock_list = client_queue_lock_list
self.owner_id_string = owner_id_string
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, json.dumps(ClientQueueLock.create_network_tuple_list(self.client_queue_lock_list)), self.owner_id_string)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerClientLockQueuesRequestMessage(None, ClientQueueLock.create_client_queue_lock_list(json.loads(raw_message[1])), raw_message[2], sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerClientUnlockQueuesRequestMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, queue_name_list, sender_id_string = None):
# Build base.
super(PeerClientUnlockQueuesRequestMessage, self).__init__(message_types.PEER_CLIENT_UNLOCK_QUEUES_REQUEST)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.queue_name_list = queue_name_list
# Internal data.
self.sender_id_string = sender_id_string
self.owner_id_string = None
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, json.dumps(self.queue_name_list))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerClientUnlockQueuesRequestMessage(None, json.loads(raw_message[1]), sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerHeartBeatFailureMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, disconnecting_flag, sender_id_string = None):
# Build base.
super(PeerHeartBeatFailureMessage, self).__init__(message_types.PEER_HEART_BEAT_FAILURE)
# Transmitted data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.disconnecting_flag = disconnecting_flag
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, str(self.disconnecting_flag))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
diconnecting_flag = True if raw_message[1] == "True" else False
return PeerHeartBeatFailureMessage(None, diconnecting_flag, sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerHeartBeatMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, sender_time_stamp, sender_queue_size_snapshot_dict, sender_id_string = None):
# Build base.
super(PeerHeartBeatMessage, self).__init__(message_types.PEER_HEART_BEAT)
# Transmitted data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.sender_time_stamp = sender_time_stamp
self.sender_queue_size_snapshot_dict = sender_queue_size_snapshot_dict
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, self.sender_time_stamp, bson.dumps(self.sender_queue_size_snapshot_dict))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
raw_message[2] = cast_bytes(raw_message[2])
return PeerHeartBeatMessage(None, raw_message[1], bson.loads(raw_message[2]), sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerMasterControlDataMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, pecking_order_list, queue_lock_owner_dict,
ordered_queue_owners_dict, push_rejection_queue_name_set, accepting_data_owner_id_list,
frozen_push_queue_list, frozen_pull_queue_list, sender_id_string = None):
# Build base.
super(PeerMasterControlDataMessage, self).__init__(message_types.PEER_MASTER_CONTROL_DATA)
# Transmitted data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.pecking_order_list = pecking_order_list
self.queue_lock_owner_dict = queue_lock_owner_dict
self.ordered_queue_owners_dict = ordered_queue_owners_dict
self.push_rejection_queue_name_set = push_rejection_queue_name_set
self.accepting_data_owner_id_list = accepting_data_owner_id_list
self.frozen_push_queue_list = frozen_push_queue_list
self.frozen_pull_queue_list = frozen_pull_queue_list
# Internal data.
self.sender_id_string = sender_id_string
def dump(self, include_destination_tag = True):
"""
Dumps the message into a format in which it can be recreated via the "load" method.
"""
try:
dump_dict = dict()
if include_destination_tag == True:
dump_dict["ddit"] = self.destination_dealer_id_tag
dump_dict["pol"] = self.pecking_order_list
dump_dict["qlod"] = self.queue_lock_owner_dict
dump_dict["oqod"] = self.ordered_queue_owners_dict
dump_dict["prqns"] = list(self.push_rejection_queue_name_set)
dump_dict["adol"] = self.accepting_data_owner_id_list
dump_dict["fpush"] = self.frozen_push_queue_list
dump_dict["fpull"] = self.frozen_pull_queue_list
return bson.dumps(dump_dict)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def load(dumped_string):
"""
Returns an instance object of this class built from data which was created in the "dump" method.
"""
dumped_string = cast_bytes(dumped_string)
dump_dict = bson.loads(dumped_string)
destination_dealer_id_tag = dump_dict.get("ddit", None)
pecking_order_list = dump_dict["pol"]
queue_lock_owner_dict = dump_dict["qlod"]
ordered_queue_owners_dict = dump_dict["oqod"]
push_rejection_queue_name_set = set(dump_dict["prqns"])
accepting_data_owner_id_list = dump_dict["adol"]
frozen_push_queue_list = dump_dict["fpush"]
frozen_pull_queue_list = dump_dict["fpull"]
return PeerMasterControlDataMessage(destination_dealer_id_tag, pecking_order_list, queue_lock_owner_dict, ordered_queue_owners_dict,
push_rejection_queue_name_set, accepting_data_owner_id_list,
frozen_push_queue_list, frozen_pull_queue_list)
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, self.dump(False))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
message = PeerMasterControlDataMessage.load(raw_message[1])
message.sender_id_string = sender_id_string
return message
except:
raise ExceptionFormatter.get_full_exception()
class PeerMasterSetupDataMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, exchange_wrapper_list, queue_wrapper_list, sender_id_string = None):
# Build base.
super(PeerMasterSetupDataMessage, self).__init__(message_types.PEER_MASTER_SETUP_DATA)
# Transmitted data.
self.destination_dealer_id_tag = destination_dealer_id_tag
self.exchange_wrapper_list = exchange_wrapper_list
self.queue_wrapper_list = queue_wrapper_list
# Internal data.
self.sender_id_string = sender_id_string
def dump(self):
"""
Dumps the message into a format in which it can be recreated via the "load" method.
"""
try:
dump_dict = dict()
dump_dict["ddit"] = self.destination_dealer_id_tag
dump_dict["ewl"] = [ew.dump() for ew in self.exchange_wrapper_list]
dump_dict["qwl"] = [qw.dump() for qw in self.queue_wrapper_list]
return bson.dumps(dump_dict)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def load(dumped_string):
"""
Returns an instance object of this class built from data which was created in the "dump" method.
"""
dumped_string = cast_bytes(dumped_string)
dump_dict = bson.loads(dumped_string)
destination_dealer_id_tag = dump_dict["ddit"]
exchange_wrapper_list = [ExchangeWrapper.load(dew) for dew in dump_dict["ewl"]]
queue_wrapper_list = [DataQueueWrapper.load(dqw) for dqw in dump_dict["qwl"]]
return PeerMasterSetupDataMessage(destination_dealer_id_tag, exchange_wrapper_list, queue_wrapper_list)
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag, self.dump())
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
message = PeerMasterSetupDataMessage.load(raw_message[1])
message.sender_id_string = sender_id_string
return message
except:
raise ExceptionFormatter.get_full_exception()
class PeerOnlineHandshakeReplyMessage(BaseMessage):
def __init__(self, reply_id_tag, settings_dict, sender_dealer_id_tag,
sender_master_flag, master_setup_data_message, master_control_data_message, master_synchronization_failure_flag,
ping_back_success_flag):
# Build base.
super(PeerOnlineHandshakeReplyMessage, self).__init__(message_types.PEER_ONLINE_HANDSHAKE_REPLY)
# Transmitted data.
self.reply_id_tag = reply_id_tag
self.settings_dict = settings_dict
self.sender_dealer_id_tag = sender_dealer_id_tag
self.sender_master_flag = sender_master_flag
self.master_setup_data_message = master_setup_data_message
self.master_control_data_message = master_control_data_message
self.master_synchronization_failure_flag = master_synchronization_failure_flag
self.ping_back_success_flag = ping_back_success_flag
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
if self.master_setup_data_message != None:
master_setup_data_message = self.master_setup_data_message.dump()
else:
master_setup_data_message = ""
if self.master_control_data_message != None:
master_control_data_message = self.master_control_data_message.dump()
else:
master_control_data_message = ""
BaseMessage._send_with_destination_and_delimiter(self, socket, self.reply_id_tag,
bson.dumps(self.settings_dict),
self.sender_dealer_id_tag,
str(self.sender_master_flag),
master_setup_data_message, master_control_data_message,
str(self.master_synchronization_failure_flag),
str(self.ping_back_success_flag))
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
"""
try:
if raw_message[4] != "":
master_setup_data_message = PeerMasterSetupDataMessage.load(raw_message[4])
else:
master_setup_data_message = None
if raw_message[5] != "":
master_control_data_message = PeerMasterControlDataMessage.load(raw_message[5])
else:
master_control_data_message = None
return PeerOnlineHandshakeReplyMessage(None, bson.loads(raw_message[1]), raw_message[2],
BaseMessage.bool_from_string(raw_message[3]),
master_setup_data_message, master_control_data_message,
BaseMessage.bool_from_string(raw_message[6]),
BaseMessage.bool_from_string(raw_message[7]))
except:
raise ExceptionFormatter.get_full_exception()
class PeerOnlineHandshakeRequestMessage(BaseMessage):
def __init__(self, settings_dict, sender_dealer_id_tag, receiver_dealer_id_tag = None):
# Build base.
super(PeerOnlineHandshakeRequestMessage, self).__init__(message_types.PEER_ONLINE_HANDSHAKE_REQUEST)
# Transmitted data.
self.settings_dict = settings_dict
self.sender_dealer_id_tag = sender_dealer_id_tag
# Internal data.
self.receiver_dealer_id_tag = receiver_dealer_id_tag
self.sending_thread_name = None
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, bson.dumps(self.settings_dict), self.sender_dealer_id_tag)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, qm_dealer_id_tag):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerOnlineHandshakeRequestMessage(bson.loads(raw_message[1]), raw_message[2], qm_dealer_id_tag)
except:
raise ExceptionFormatter.get_full_exception()
class PeerOfflineMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, sender_id_string = None):
# Build base.
super(PeerOfflineMessage, self).__init__(message_types.PEER_OFFLINE)
# Transmitted data.
self.destination_dealer_id_tag = destination_dealer_id_tag
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerOfflineMessage(None, sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
class PeerRequestMasterDataMessage(BaseMessage):
def __init__(self, destination_dealer_id_tag, sender_id_string = None):
# Build base.
super(PeerRequestMasterDataMessage, self).__init__(message_types.PEER_REQUEST_MASTER_DATA)
# Store data.
self.destination_dealer_id_tag = destination_dealer_id_tag
# Internal data.
self.sender_id_string = sender_id_string
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send_with_destination(self, socket, self.destination_dealer_id_tag)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message, sender_id_string):
"""
Returns a new message of this type from the raw message data.
"""
try:
return PeerRequestMasterDataMessage(None, sender_id_string)
except:
raise ExceptionFormatter.get_full_exception()
|
[
"afqueue.messages.base_message.BaseMessage._send_with_destination",
"afqueue.common.client_exchange.ClientExchange.create_network_tuple_list",
"afqueue.messages.base_message.BaseMessage.bool_from_string",
"bson.loads",
"afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception",
"simplejson.dumps",
"afqueue.common.encoding_utilities.cast_bytes",
"bson.dumps",
"afqueue.common.client_queue.ClientQueue.create_network_tuple_list",
"afqueue.data_objects.exchange_wrapper.ExchangeWrapper.load",
"simplejson.loads",
"afqueue.common.client_queue_lock.ClientQueueLock.create_network_tuple_list",
"afqueue.data_objects.data_queue_wrapper.DataQueueWrapper.load"
] |
[((16381, 16406), 'afqueue.common.encoding_utilities.cast_bytes', 'cast_bytes', (['dumped_string'], {}), '(dumped_string)\n', (16391, 16406), False, 'from afqueue.common.encoding_utilities import cast_bytes\n'), ((16427, 16452), 'bson.loads', 'bson.loads', (['dumped_string'], {}), '(dumped_string)\n', (16437, 16452), False, 'import bson\n'), ((19441, 19466), 'afqueue.common.encoding_utilities.cast_bytes', 'cast_bytes', (['dumped_string'], {}), '(dumped_string)\n', (19451, 19466), False, 'from afqueue.common.encoding_utilities import cast_bytes\n'), ((19487, 19512), 'bson.loads', 'bson.loads', (['dumped_string'], {}), '(dumped_string)\n', (19497, 19512), False, 'import bson\n'), ((4038, 4064), 'afqueue.common.encoding_utilities.cast_bytes', 'cast_bytes', (['raw_message[1]'], {}), '(raw_message[1])\n', (4048, 4064), False, 'from afqueue.common.encoding_utilities import cast_bytes\n'), ((13854, 13880), 'afqueue.common.encoding_utilities.cast_bytes', 'cast_bytes', (['raw_message[2]'], {}), '(raw_message[2])\n', (13864, 13880), False, 'from afqueue.common.encoding_utilities import cast_bytes\n'), ((16019, 16040), 'bson.dumps', 'bson.dumps', (['dump_dict'], {}), '(dump_dict)\n', (16029, 16040), False, 'import bson\n'), ((19079, 19100), 'bson.dumps', 'bson.dumps', (['dump_dict'], {}), '(dump_dict)\n', (19089, 19100), False, 'import bson\n'), ((19600, 19625), 'afqueue.data_objects.exchange_wrapper.ExchangeWrapper.load', 'ExchangeWrapper.load', (['dew'], {}), '(dew)\n', (19620, 19625), False, 'from afqueue.data_objects.exchange_wrapper import ExchangeWrapper\n'), ((19685, 19711), 'afqueue.data_objects.data_queue_wrapper.DataQueueWrapper.load', 'DataQueueWrapper.load', (['dqw'], {}), '(dqw)\n', (19706, 19711), False, 'from afqueue.data_objects.data_queue_wrapper import DataQueueWrapper\n'), ((26064, 26149), 'afqueue.messages.base_message.BaseMessage._send_with_destination', 'BaseMessage._send_with_destination', (['self', 'socket', 'self.destination_dealer_id_tag'], {}), '(self, socket, self.destination_dealer_id_tag\n )\n', (26098, 26149), False, 'from afqueue.messages.base_message import BaseMessage\n'), ((27198, 27283), 'afqueue.messages.base_message.BaseMessage._send_with_destination', 'BaseMessage._send_with_destination', (['self', 'socket', 'self.destination_dealer_id_tag'], {}), '(self, socket, self.destination_dealer_id_tag\n )\n', (27232, 27283), False, 'from afqueue.messages.base_message import BaseMessage\n'), ((2269, 2309), 'bson.dumps', 'bson.dumps', (['self.command_message_as_dict'], {}), '(self.command_message_as_dict)\n', (2279, 2309), False, 'import bson\n'), ((2355, 2394), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (2392, 2394), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((2665, 2691), 'bson.loads', 'bson.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (2675, 2691), False, 'import bson\n'), ((2745, 2784), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (2782, 2784), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((3647, 3706), 'bson.dumps', 'bson.dumps', (['self.ordered_queues_owners_exhausted_dictionary'], {}), '(self.ordered_queues_owners_exhausted_dictionary)\n', (3657, 3706), False, 'import bson\n'), ((3752, 3791), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (3789, 3791), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((4130, 4156), 'bson.loads', 'bson.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (4140, 4156), False, 'import bson\n'), ((4210, 4249), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (4247, 4249), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((5182, 5221), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (5219, 5221), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((5629, 5668), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (5666, 5668), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((6574, 6613), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (6611, 6613), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((7024, 7063), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (7061, 7063), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((7844, 7876), 'simplejson.dumps', 'json.dumps', (['self.queue_name_list'], {}), '(self.queue_name_list)\n', (7854, 7876), True, 'import simplejson as json\n'), ((7921, 7960), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (7958, 7960), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((8240, 8266), 'simplejson.loads', 'json.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (8250, 8266), True, 'import simplejson as json\n'), ((8332, 8371), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (8369, 8371), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((9376, 9415), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (9413, 9415), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((9848, 9887), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (9885, 9887), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((10704, 10736), 'simplejson.dumps', 'json.dumps', (['self.queue_name_list'], {}), '(self.queue_name_list)\n', (10714, 10736), True, 'import simplejson as json\n'), ((10781, 10820), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (10818, 10820), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((11100, 11126), 'simplejson.loads', 'json.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (11110, 11126), True, 'import simplejson as json\n'), ((11192, 11231), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (11229, 11231), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((12096, 12135), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (12133, 12135), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((12549, 12588), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (12586, 12588), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((13468, 13516), 'bson.dumps', 'bson.dumps', (['self.sender_queue_size_snapshot_dict'], {}), '(self.sender_queue_size_snapshot_dict)\n', (13478, 13516), False, 'import bson\n'), ((13572, 13611), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (13609, 13611), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((13943, 13969), 'bson.loads', 'bson.loads', (['raw_message[2]'], {}), '(raw_message[2])\n', (13953, 13969), False, 'import bson\n'), ((14023, 14062), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (14060, 14062), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((16114, 16153), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (16151, 16153), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((17515, 17554), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (17552, 17554), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((17965, 18004), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (18002, 18004), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((19174, 19213), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (19211, 19213), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((20132, 20171), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (20169, 20171), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((20572, 20611), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (20609, 20611), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((22352, 22382), 'bson.dumps', 'bson.dumps', (['self.settings_dict'], {}), '(self.settings_dict)\n', (22362, 22382), False, 'import bson\n'), ((22922, 22961), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (22959, 22961), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((23629, 23655), 'bson.loads', 'bson.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (23639, 23655), False, 'import bson\n'), ((23725, 23769), 'afqueue.messages.base_message.BaseMessage.bool_from_string', 'BaseMessage.bool_from_string', (['raw_message[3]'], {}), '(raw_message[3])\n', (23753, 23769), False, 'from afqueue.messages.base_message import BaseMessage\n'), ((23930, 23974), 'afqueue.messages.base_message.BaseMessage.bool_from_string', 'BaseMessage.bool_from_string', (['raw_message[6]'], {}), '(raw_message[6])\n', (23958, 23974), False, 'from afqueue.messages.base_message import BaseMessage\n'), ((24027, 24071), 'afqueue.messages.base_message.BaseMessage.bool_from_string', 'BaseMessage.bool_from_string', (['raw_message[7]'], {}), '(raw_message[7])\n', (24055, 24071), False, 'from afqueue.messages.base_message import BaseMessage\n'), ((24107, 24146), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (24144, 24146), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((24912, 24942), 'bson.dumps', 'bson.dumps', (['self.settings_dict'], {}), '(self.settings_dict)\n', (24922, 24942), False, 'import bson\n'), ((25025, 25064), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (25062, 25064), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((25331, 25357), 'bson.loads', 'bson.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (25341, 25357), False, 'import bson\n'), ((25427, 25466), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (25464, 25466), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((26199, 26238), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (26236, 26238), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((26548, 26587), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (26585, 26587), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((27325, 27364), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (27362, 27364), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((27679, 27718), 'afqueue.common.exception_formatter.ExceptionFormatter.get_full_exception', 'ExceptionFormatter.get_full_exception', ([], {}), '()\n', (27716, 27718), False, 'from afqueue.common.exception_formatter import ExceptionFormatter\n'), ((5068, 5135), 'afqueue.common.client_exchange.ClientExchange.create_network_tuple_list', 'ClientExchange.create_network_tuple_list', (['self.client_exchange_list'], {}), '(self.client_exchange_list)\n', (5108, 5135), False, 'from afqueue.common.client_exchange import ClientExchange\n'), ((5548, 5574), 'simplejson.loads', 'json.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (5558, 5574), True, 'import simplejson as json\n'), ((6469, 6530), 'afqueue.common.client_queue.ClientQueue.create_network_tuple_list', 'ClientQueue.create_network_tuple_list', (['self.client_queue_list'], {}), '(self.client_queue_list)\n', (6506, 6530), False, 'from afqueue.common.client_queue import ClientQueue\n'), ((6931, 6957), 'simplejson.loads', 'json.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (6941, 6957), True, 'import simplejson as json\n'), ((9238, 9308), 'afqueue.common.client_queue_lock.ClientQueueLock.create_network_tuple_list', 'ClientQueueLock.create_network_tuple_list', (['self.client_queue_lock_list'], {}), '(self.client_queue_lock_list)\n', (9279, 9308), False, 'from afqueue.common.client_queue_lock import ClientQueueLock\n'), ((9739, 9765), 'simplejson.loads', 'json.loads', (['raw_message[1]'], {}), '(raw_message[1])\n', (9749, 9765), True, 'import simplejson as json\n')]
|
# Copyright (C) 2019 Cancer Care Associates
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
from pymedphys._utilities.transforms import convert_IEC_angle_to_bipolar
Point = namedtuple("Point", ("x", "y", "z"))
class DICOMEntryMissing(ValueError):
pass
def require_gantries_be_zero(plan):
gantry_angles = set(get_gantry_angles_from_dicom(plan))
if gantry_angles != set([0.0]):
raise ValueError("Only Gantry angles equal to 0.0 are currently supported")
def get_surface_entry_point_with_fallback(plan) -> Point:
try:
return get_surface_entry_point(plan)
except DICOMEntryMissing:
pass
require_gantries_be_zero(plan)
iso_raw = get_single_value_from_control_points(plan, "IsocenterPosition")
iso = Point(*[float(item) for item in iso_raw])
source_to_surface = get_single_value_from_control_points(
plan, "SourceToSurfaceDistance"
)
source_to_axis = get_single_value_from_beams(plan, "SourceAxisDistance")
new_y_value = iso.y + source_to_surface - source_to_axis
source_entry_point = Point(iso.x, new_y_value, iso.z)
return source_entry_point
def get_single_value_from_control_points(plan, keyword):
"""Get a named keyword from all control points.
Raises an error if all values are not the same as each other. Raises an
error if no value is found.
"""
values = set()
for beam in plan.BeamSequence:
for control_point in beam.ControlPointSequence:
try:
value = getattr(control_point, keyword)
except AttributeError:
continue
try:
values.add(value)
except TypeError:
values.add(tuple(value))
if not values:
raise DICOMEntryMissing(f"{keyword} was not found within the plan")
if len(values) > 1:
raise ValueError(f"More than one disagreeing {keyword} found")
return values.pop()
def get_single_value_from_beams(plan, keyword):
"""Get a named keyword from all beams.
Raises an error if all values are not the same as each other. Raises an
error if no value is found.
"""
values = set()
for beam in plan.BeamSequence:
try:
value = getattr(beam, keyword)
except AttributeError:
continue
try:
values.add(value)
except TypeError:
values.add(tuple(value))
if not values:
raise DICOMEntryMissing(f"{keyword} was not found within the plan")
if len(values) > 1:
raise ValueError(f"More than one disagreeing {keyword} found")
return values.pop()
def get_surface_entry_point(plan) -> Point:
"""
Parameters
----------
plan : pydicom.Dataset
Returns
-------
surface_entry_point : Point("x", "y", "z")
Patient surface entry point coordinates (x,y,z) in the
Patient-Based Coordinate System described in
Section C.7.6.2.1.1 [1]_ (mm).
References
----------
.. [1] https://dicom.innolitics.com/ciods/rt-plan/rt-beams/300a00b0/300a0111/300a012e
"""
# Once we have DicomCollection sorted out, it will likely be worthwhile
# having this function take a beam sequence parameter, and get the entry
# point for a given beam sequence
surface_entry_point_raw = get_single_value_from_control_points(
plan, "SurfaceEntryPoint"
)
surface_entry_point = Point(*[float(item) for item in surface_entry_point_raw])
return surface_entry_point
def get_metersets_from_dicom(dicom_dataset, fraction_group):
fraction_group_sequence = dicom_dataset.FractionGroupSequence
fraction_group_numbers = [
fraction_group.FractionGroupNumber for fraction_group in fraction_group_sequence
]
fraction_group_index = fraction_group_numbers.index(fraction_group)
fraction_group = fraction_group_sequence[fraction_group_index]
beam_metersets = tuple(
float(referenced_beam.BeamMeterset)
for referenced_beam in fraction_group.ReferencedBeamSequence
)
return beam_metersets
def get_cp_attribute_leaning_on_prior(control_point_sequence, attribute):
current_result = None
results = []
for control_point in control_point_sequence:
try:
current_result = getattr(control_point, attribute)
# If a subsequent control point doesn't record an
# angle then leave current_angle as what it was in the
# previous iteration of the loop
except AttributeError:
if current_result is None:
raise
results.append(current_result)
return results
def get_gantry_angles_from_dicom(dicom_dataset):
beam_gantry_angles = []
for beam_sequence in dicom_dataset.BeamSequence:
cp_gantry_angles_IEC = get_cp_attribute_leaning_on_prior(
beam_sequence.ControlPointSequence, "GantryAngle"
)
cp_gantry_angles_bipolar = convert_IEC_angle_to_bipolar(cp_gantry_angles_IEC)
cp_unique_gantry_angles = set(cp_gantry_angles_bipolar)
beam_gantry_angles.append(cp_unique_gantry_angles)
for cp_unique_gantry_angles in beam_gantry_angles:
if len(cp_unique_gantry_angles) != 1:
raise ValueError(
"Only a single gantry angle per beam is currently supported"
)
result = tuple(list(item)[0] for item in beam_gantry_angles)
return result
def get_fraction_group_index(dicom_dataset, fraction_group_number):
fraction_group_numbers = [
fraction_group.FractionGroupNumber
for fraction_group in dicom_dataset.FractionGroupSequence
]
return fraction_group_numbers.index(fraction_group_number)
def get_referenced_beam_sequence(dicom_dataset, fraction_group_number):
fraction_group_index = get_fraction_group_index(
dicom_dataset, fraction_group_number
)
fraction_group = dicom_dataset.FractionGroupSequence[fraction_group_index]
referenced_beam_sequence = fraction_group.ReferencedBeamSequence
beam_numbers = [
referenced_beam.ReferencedBeamNumber
for referenced_beam in referenced_beam_sequence
]
return beam_numbers, referenced_beam_sequence
def get_beam_indices_of_fraction_group(dicom_dataset, fraction_group_number):
beam_numbers, _ = get_referenced_beam_sequence(dicom_dataset, fraction_group_number)
beam_sequence_numbers = [
beam_sequence.BeamNumber for beam_sequence in dicom_dataset.BeamSequence
]
beam_indexes = [
beam_sequence_numbers.index(beam_number) for beam_number in beam_numbers
]
return beam_indexes
def get_fraction_group_beam_sequence_and_meterset(dicom_dataset, fraction_group_number):
beam_numbers, referenced_beam_sequence = get_referenced_beam_sequence(
dicom_dataset, fraction_group_number
)
metersets = [
referenced_beam.BeamMeterset for referenced_beam in referenced_beam_sequence
]
beam_sequence_number_mapping = {
beam.BeamNumber: beam for beam in dicom_dataset.BeamSequence
}
beam_sequence = [
beam_sequence_number_mapping[beam_number] for beam_number in beam_numbers
]
return beam_sequence, metersets
|
[
"collections.namedtuple",
"pymedphys._utilities.transforms.convert_IEC_angle_to_bipolar"
] |
[((707, 743), 'collections.namedtuple', 'namedtuple', (['"""Point"""', "('x', 'y', 'z')"], {}), "('Point', ('x', 'y', 'z'))\n", (717, 743), False, 'from collections import namedtuple\n'), ((5504, 5554), 'pymedphys._utilities.transforms.convert_IEC_angle_to_bipolar', 'convert_IEC_angle_to_bipolar', (['cp_gantry_angles_IEC'], {}), '(cp_gantry_angles_IEC)\n', (5532, 5554), False, 'from pymedphys._utilities.transforms import convert_IEC_angle_to_bipolar\n')]
|
from flask import Flask
from flask.ext.mail import Mail
from peewee import *
import os
cwd = os.getcwd()
frontend_dest = os.path.join( cwd, 'frontend/' )
app = Flask( __name__, static_url_path = '', static_folder = frontend_dest )
app.config.from_object( 'config' )
mail = Mail( app )
db = SqliteDatabase( app.config[ 'DATABASE' ], threadlocals = True )
from app.views import *
|
[
"os.getcwd",
"flask.Flask",
"os.path.join",
"flask.ext.mail.Mail"
] |
[((95, 106), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (104, 106), False, 'import os\n'), ((123, 153), 'os.path.join', 'os.path.join', (['cwd', '"""frontend/"""'], {}), "(cwd, 'frontend/')\n", (135, 153), False, 'import os\n'), ((163, 227), 'flask.Flask', 'Flask', (['__name__'], {'static_url_path': '""""""', 'static_folder': 'frontend_dest'}), "(__name__, static_url_path='', static_folder=frontend_dest)\n", (168, 227), False, 'from flask import Flask\n'), ((276, 285), 'flask.ext.mail.Mail', 'Mail', (['app'], {}), '(app)\n', (280, 285), False, 'from flask.ext.mail import Mail\n')]
|
from __future__ import annotations
import re
from pathlib import Path
from logging import getLogger, Logger
from fileinput import hook_compressed
from dataclasses import dataclass, field, fields
from typing import Iterator, get_type_hints, Generator
import numpy as np
import numpy.typing as npt
from pysam import TabixFile
from .data import Data
from .genotypes import GenotypesRefAlt
@dataclass
class Extra:
"""
An extra field on a line in the .hap file
Attributes
----------
name: str
The name of the extra field
fmt: str = "s"
The python fmt string of the field value; indicates how to format the value
description: str = ""
A description of the extra field
"""
name: str
fmt: str = "s"
description: str = ""
_type: type = field(init=False, repr=False)
def __post_init(self):
if self.fmt.endswith("s"):
self._type = str
elif self.fmt.endswith("d"):
self._type = int
elif self.fmt.endswith("f"):
self._type = float
else:
raise ValueError("Unsupported extra type '{}'!".format(self.fmt[-1]))
@classmethod
def from_hap_spec(cls, line: str) -> Extra:
"""
Convert an "extra" line in the header of a .hap file into an Extra object
Parameters
----------
line: str
An "extra" field, as it appears declared in the header
Returns
-------
Extra
An Extra object
"""
line = line[3:].split("\t")
return cls(name=line[0], fmt=line[1], description=line[2])
def to_hap_spec(self, line_type_symbol: str) -> str:
"""
Convert an Extra object into a header line in the .hap format spec
Parameters
----------
hap_id: str
The ID of the haplotype associated with this variant
Returns
-------
str
A valid variant line (V) in the .hap format spec
"""
return (
"#"
+ line_type_symbol
+ "\t"
+ "\t".join((self.name, self.fmt, self.description))
)
@property
def fmt_str(self) -> str:
"""
Convert an Extra into a fmt string
Retruns
-------
str
A python format string (ex: "{beta:.3f}")
"""
return "{" + self.name + ":" + self.fmt + "}"
# We declare this class to be a dataclass to automatically define __init__ and a few
# other methods.
@dataclass
class Variant:
"""
A variant within the .hap format spec
In order to use this class with the Haplotypes class, you should
1) add properties to the class for each of extra fields
2) override the _extras property to describe the header declaration
Attributes
----------
start: int
The chromosomal start position of the variant
end: int
The chromosomal end position of the variant
In most cases this will be the same as the start position
id: str
The variant's unique ID
allele: str
The allele of this variant within the Haplotype
_extras: tuple[Extra]
Extra fields for the haplotype
Examples
--------
Let's extend this class and add an extra field called "score"
>>> from dataclasses import dataclass, field
>>> @dataclass
>>> class CustomVariant(Variant):
... score: float
... _extras: tuple = (
... Extra("score", ".3f", "Importance of inclusion"),
... )
"""
start: int
end: int
id: str
allele: str
_extras: tuple = field(default=tuple(), init=False, repr=False)
@property
def ID(self):
"""
Create an alias for the id property
"""
return self.id
@property
# TODO: use @cached_property in py3.8
def _fmt(self):
extras = ""
if len(self._extras):
extras = "\t" + "\t".join(extra.fmt_str for extra in self._extras)
return "V\t{hap:s}\t{start:d}\t{end:d}\t{id:s}\t{allele:s}" + extras
@classmethod
def from_hap_spec(cls: Variant, line: str) -> tuple[str, Variant]:
"""
Convert a variant line into a Variant object in the .hap format spec
Note that this implementation does NOT support having more extra fields than
appear in the header
Parameters
----------
line: str
A variant (V) line from the .hap file
Returns
-------
tuple[str, Variant]
The haplotype ID and Variant object for the variant
"""
assert line[0] == "V", "Attempting to init a Variant with a non-V line"
line = line[2:].split("\t")
hap_id = line[0]
var_fields = {}
idx = 1
for name, val in get_type_hints(cls).items():
if not name.startswith("_"):
var_fields[name] = val(line[idx])
idx += 1
return hap_id, cls(**var_fields)
def to_hap_spec(self, hap_id: str) -> str:
"""
Convert a Variant object into a variant line in the .hap format spec
Parameters
----------
hap_id: str
The ID of the haplotype associated with this variant
Returns
-------
str
A valid variant line (V) in the .hap format spec
"""
return self._fmt.format(**self.__dict__, hap=hap_id)
@classmethod
def extras_head(cls) -> tuple:
"""
Return the header lines of the extra fields that are supported
Returns
-------
tuple
The header lines of the extra fields
"""
return tuple(extra.to_hap_spec("V") for extra in cls._extras)
# We declare this class to be a dataclass to automatically define __init__ and a few
# other methods.
@dataclass
class Haplotype:
"""
A haplotype within the .hap format spec
In order to use this class with the Haplotypes class, you should
1) add properties to the class for each of extra fields
2) override the _extras property to describe the header declaration
Attributes
----------
chrom: str
The contig to which this haplotype belongs
start: int
The chromosomal start position of the haplotype
end: int
The chromosomal end position of the haplotype
id: str
The haplotype's unique ID
variants: list[Variant]
A list of the variants in this haplotype
_extras: tuple[Extra]
Extra fields for the haplotype
Examples
--------
Let's extend this class and add an extra field called "ancestry"
>>> from dataclasses import dataclass, field
>>> @dataclass
>>> class CustomHaplotype(Haplotype):
... ancestry: str
... _extras: tuple = (
... Extra("ancestry", "s", "Local ancestry"),
... )
"""
chrom: str
start: int
end: int
id: str
variants: tuple = field(default_factory=tuple, init=False)
_extras: tuple = field(default=tuple(), init=False, repr=False)
@property
def ID(self):
"""
Create an alias for the id property
"""
return self.id
@property
# TODO: use @cached_property in py3.8
def _fmt(self):
extras = ""
if len(self._extras):
extras = "\t" + "\t".join(extra.fmt_str for extra in self._extras)
return "H\t{chrom:s}\t{start:d}\t{end:d}\t{id:s}" + extras
@property
# TODO: use @cached_property in py3.8
def varIDs(self):
return {var.id for var in self.variants}
@classmethod
def from_hap_spec(
cls: Haplotype, line: str, variants: tuple = tuple()
) -> Haplotype:
"""
Convert a variant line into a Haplotype object in the .hap format spec
Note that this implementation does NOT support having more extra fields than
appear in the header
Parameters
----------
line: str
A variant (H) line from the .hap file
Returns
-------
Haplotype
The Haplotype object for the variant
"""
assert line[0] == "H", "Attempting to init a Haplotype with a non-H line"
line = line[2:].split("\t")
hap_fields = {}
idx = 0
for name, val in get_type_hints(cls).items():
if name != "variants" and not name.startswith("_"):
hap_fields[name] = val(line[idx])
idx += 1
hap = cls(**hap_fields)
hap.variants = variants
return hap
def to_hap_spec(self) -> str:
"""
Convert a Haplotype object into a haplotype line in the .hap format spec
Returns
-------
str
A valid haplotype line (H) in the .hap format spec
"""
return self._fmt.format(**self.__dict__)
@classmethod
def extras_head(cls) -> tuple:
"""
Return the header lines of the extra fields that are supported
Returns
-------
tuple
The header lines of the extra fields
"""
return tuple(extra.to_hap_spec("H") for extra in cls._extras)
def transform(
self, genotypes: GenotypesRefAlt, samples: list[str] = None
) -> npt.NDArray[bool]:
"""
Transform a genotypes matrix via the current haplotype
Each entry in the returned matrix denotes the presence of the current haplotype
in each chromosome of each sample in the Genotypes object
Parameters
----------
genotypes : GenotypesRefAlt
The genotypes which to transform using the current haplotype
If the genotypes have not been loaded into the Genotypes object yet, this
method will call Genotypes.read(), while loading only the needed variants
samples : list[str], optional
See documentation for :py:attr:`~.Genotypes.read`
Returns
-------
npt.NDArray[bool]
A 2D matrix of shape (num_samples, 2) where each entry in the matrix
denotes the presence of the haplotype in one chromosome of a sample
"""
var_IDs = self.varIDs
# check: have the genotypes been loaded yet?
# if not, we can load just the variants we need
if genotypes.unset():
start = min(var.start for var in self.variants)
end = max(var.end for var in self.variants)
region = f"{self.chrom}:{start}-{end}"
genotypes.read(region=region, samples=samples, variants=var_IDs)
genotypes.check_biallelic(discard_also=True)
genotypes.check_phase()
# create a dict where the variants are keyed by ID
var_dict = {
var["id"]: var["ref"] for var in genotypes.variants if var["id"] in var_IDs
}
var_idxs = [
idx for idx, var in enumerate(genotypes.variants) if var["id"] in var_IDs
]
missing_IDs = var_IDs - var_dict.keys()
if len(missing_IDs):
raise ValueError(
f"Variants {missing_IDs} are present in haplotype '{self.id}' but "
"absent in the provided genotypes"
)
# create a np array denoting the alleles that we want
alleles = [int(var.allele != var_dict[var.id]) for var in self.variants]
allele_arr = np.array([[[al] for al in alleles]]) # shape: (1, n, 1)
# look for the presence of each allele in each chromosomal strand
# and then just AND them together
return np.all(allele_arr == genotypes.data[:, var_idxs], axis=1)
class Haplotypes(Data):
"""
A class for processing haplotypes from a file
Attributes
----------
fname: Path
The path to the file containing the data
data: dict[str, Haplotype]
A dict of Haplotype objects keyed by their IDs
types: dict
A dict of class names keyed by the symbol denoting their line type
Ex: {'H': Haplotype, 'V': Variant}
version: str
A string denoting the current file format version
log: Logger
A logging instance for recording debug statements.
Examples
--------
Parsing a basic .hap file without any extra fields is simple:
>>> haplotypes = Haplotypes.load('tests/data/basic.hap')
>>> haps = haplotypes.data # a dictionary of Haplotype objects
If the .hap file contains extra fields, you'll need to call the read() method
manually. You'll also need to create Haplotype and Variant subclasses that support
the extra fields and then specify the names of the classes when you initialize the
Haplotypes object:
>>> haplotypes = Haplotypes('tests/data/simphenotype.hap', HaptoolsHaplotype)
>>> haplotypes.read()
>>> haps = haplotypes.data # a dictionary of Haplotype objects
"""
def __init__(
self,
fname: Path,
haplotype: type[Haplotype] = Haplotype,
variant: type[Variant] = Variant,
log: Logger = None,
):
super().__init__(fname, log)
self.data = None
self.types = {"H": haplotype, "V": variant}
self.version = "0.0.1"
@classmethod
def load(
cls: Haplotypes, fname: Path, region: str = None, haplotypes: set[str] = None
) -> Haplotypes:
"""
Load haplotypes from a .hap file
Read the file contents
Parameters
----------
fname: Path
See documentation for :py:attr:`~.Data.fname`
region: str, optional
See documentation for :py:meth:`~.Haplotypes.read`
haplotypes: list[str], optional
See documentation for :py:meth:`~.Haplotypes.read`
Returns
-------
Haplotypes
A Haplotypes object with the data loaded into its properties
"""
haps = cls(fname)
haps.read(region, haplotypes)
return haps
def check_header(self, lines: list[str], check_version=False):
"""
Check 1) that the version number matches and 2) that extra fields declared in
# the .haps file can be handled by the the Variant and Haplotype classes
# provided in __init__()
Parameters
----------
lines: list[str]
Header lines from the .hap file
check_version: bool = False
Whether to also check the version of the file
Raises
------
ValueError
If any of the header lines are not supported
"""
self.log.info("Checking header")
if check_version:
version_line = lines[0].split("\t")
assert version_line[1] == "version", (
"The version of the format spec must be declared as the first line of"
" the header."
)
if version_line[2] != self.version:
self.log.warning(
f"The version of the provided .hap file is {version_line} but this"
f" tool expected {self.version}"
)
expected_lines = [
line
for line_type in self.types.values()
for line in line_type.extras_head()
]
for line in lines:
if line[1] in self.types.keys():
try:
expected_lines.remove(line)
except ValueError:
# extract the name of the extra field
name = line.split("\t", maxsplit=1)[1]
raise ValueError(
f"The extra field '{name}' is declared in the header of the"
" .hap file but is not accepted by this tool."
)
# if there are any fields left...
if expected_lines:
names = [line.split("\t", maxsplit=2)[1] for line in expected_lines]
raise ValueError(
"Expected the input .hap file to have these extra fields, but they "
f"don't seem to be declared in the header: {*names,}"
)
def _line_type(self, line: str) -> type:
"""
Return the type of line that this line matches
Parameters
----------
line: str
A line of the .hap file
Returns
-------
type
The name of the class corresponding with the type of this line
"""
line_types = self.types.keys()
if line[0] in line_types:
return line[0]
else:
# if none of the lines matched, return None
return None
def read(self, region: str = None, haplotypes: set[str] = None):
"""
Read haplotypes from a .hap file into a list stored in :py:attr:`~.Haplotypes.data`
Parameters
----------
region: str, optional
The region from which to extract haplotypes; ex: 'chr1:1234-34566' or 'chr7'
For this to work, the .hap file must be indexed and the seqname must match!
Defaults to loading all haplotypes
haplotypes: list[str], optional
A list of haplotype IDs corresponding to a subset of the haplotypes to
extract
Defaults to loading haplotypes from all samples
For this to work, the .hap file must be indexed
"""
super().read()
self.data = {}
var_haps = {}
for line in self.__iter__(region, haplotypes):
if isinstance(line, Haplotype):
self.data[line.id] = line
elif isinstance(line, Variant):
hap_id = line.hap
del line.hap
# store the variant for later
var_haps.setdefault(hap_id, []).append(line)
for hap in var_haps:
self.data[hap].variants = tuple(var_haps[hap])
def __iter__(
self, region: str = None, haplotypes: set[str] = None
) -> Iterator[Variant | Haplotype]:
"""
Read haplotypes from a .hap file line by line without storing anything
Parameters
----------
region: str, optional
The region from which to extract haplotypes; ex: 'chr1:1234-34566' or 'chr7'
For this to work, the .hap file must be indexed and the seqname must match!
Defaults to loading all haplotypes
haplotypes: list[str], optional
A list of haplotype IDs corresponding to a subset of the haplotypes to
extract
Defaults to loading haplotypes from all samples
For this to work, the .hap file must be indexed
Yields
------
Iterator[Variant|Haplotype]
An iterator over each line in the file, where each line is encoded as a
Variant or Haplotype containing each of the class properties
Examples
--------
If you're worried that the contents of the .hap file will be large, you may
opt to parse the file line-by-line instead of loading it all into memory at
once. In cases like these, you can use the __iter__() method in a for-loop:
>>> haplotypes = Haplotypes('tests/data/basic.hap')
>>> for line in haplotypes:
... print(line)
Call the function manually to pass it the region or haplotypes params:
>>> haplotypes = Haplotypes('tests/data/basic.hap.gz')
>>> for line in haplotypes.__iter__(
... region='21:26928472-26941960', haplotypes={"chr21.q.3365*1"}
... ):
... print(line)
"""
# if the user requested a specific region or set of haplotypes, then we should
# handle it using tabix
# else, we use a regular text opener
if region or haplotypes:
haps_file = TabixFile(str(self.fname))
self.check_header(list(haps_file.header))
if region:
region_positions = region.split(":", maxsplit=1)[1]
# fetch region
# we already know that each line will start with an H, so we don't
# need to check that
for line in haps_file.fetch(region):
hap = self.types["H"].from_hap_spec(line)
if haplotypes is not None:
if hap.id not in haplotypes:
continue
haplotypes.remove(hap.id)
yield hap
else:
for line in haps_file.fetch():
# we only want lines that start with an H
line_type = self._line_type(line)
if line_type == "H":
hap = self.types["H"].from_hap_spec(line)
if hap.id in haplotypes:
yield hap
haplotypes.remove(hap.id)
elif line_type > "H":
# if we've already passed all of the H's, we can just exit
# We assume the file has been sorted so that all of the H lines
# come before the V lines
break
# query for the variants of each haplotype
for hap_id in self.data:
# exclude variants outside the desired region
hap_region = hap_id
if region:
hap_region = hap_id + ":" + region_positions
# fetch region
# we already know that each line will start with a V, so we don't
# need to check that
for line in haps_file.fetch(hap_region):
line_type = self._line_type(line)
if line_type == "V":
var = self.types["V"].from_hap_spec(line)[1]
# add the haplotype, since otherwise, the user won't know
# which haplotype this variant belongs to
var.hap = hap_id
yield var
else:
self.log.warning(
"Check that chromosomes are distinct from your hap IDs!"
)
haps_file.close()
else:
# the file is not indexed, so we can't assume it's sorted, either
# use hook_compressed to automatically handle gz files
with hook_compressed(self.fname, mode="rt") as haps:
self.log.info("Not taking advantage of indexing.")
header_lines = []
for line in haps:
line = line.rstrip("\n")
line_type = self._line_type(line)
if line[0] == "#":
# store header for later
try:
header_lines.append(line)
except AttributeError:
# this happens when we encounter a line beginning with a #
# after already having seen an H or V line
# in this case, it's usually just a comment, so we can ignore
pass
else:
if header_lines:
self.check_header(header_lines)
header_lines = None
self.log.info("Finished reading header.")
if line_type == "H":
yield self.types["H"].from_hap_spec(line)
elif line_type == "V":
hap_id, var = self.types["V"].from_hap_spec(line)
# add the haplotype, since otherwise, the user won't know
# which haplotype this variant belongs to
var.hap = hap_id
yield var
else:
self.log.warning(
f"Ignoring unsupported line type '{line[0]}'"
)
def to_str(self) -> Generator[str, None, None]:
"""
Create a string representation of this Haplotype
Yields
------
Generator[str, None, None]
A list of lines (strings) to include in the output
"""
yield "#\tversion\t" + self.version
for line_type in self.types:
yield from self.types[line_type].extras_head()
for hap in self.data.values():
yield self.types["H"].to_hap_spec(hap)
for hap in self.data.values():
for var in hap.variants:
yield self.types["V"].to_hap_spec(var, hap.id)
def __repr__(self):
return "\n".join(self.to_str())
def write(self):
"""
Write the contents of this Haplotypes object to the file given by fname
Parameters
----------
file: TextIO
A file-like object to which this Haplotypes object should be written.
Examples
--------
To write to a .hap file, you must first initialize a Haplotypes object and then
fill out the data property:
>>> haplotypes = Haplotypes('tests/data/basic.hap')
>>> haplotypes.data = {'H1': Haplotype('chr1', 0, 10, 'H1')}
>>> haplotypes.write()
"""
with hook_compressed(self.fname, mode="wt") as haps:
for line in self.to_str():
haps.write(line + "\n")
def transform(
self,
genotypes: GenotypesRefAlt,
hap_gts: GenotypesRefAlt,
samples: list[str] = None,
low_memory: bool = False,
) -> GenotypesRefAlt:
"""
Transform a genotypes matrix via the current haplotype
Each entry in the returned matrix denotes the presence of each haplotype
in each chromosome of each sample in the Genotypes object
Parameters
----------
genotypes : GenotypesRefAlt
The genotypes which to transform using the current haplotype
If the genotypes have not been loaded into the Genotypes object yet, this
method will call Genotypes.read(), while loading only the needed variants
hap_gts: GenotypesRefAlt
An empty GenotypesRefAlt object into which the haplotype genotypes should
be stored
samples : list[str], optional
See documentation for :py:attr:`~.Genotypes.read`
low_memory : bool, optional
If True, each haplotype's genotypes will be loaded one at a time.
Returns
-------
GenotypesRefAlt
A Genotypes object composed of haplotypes instead of regular variants.
"""
hap_gts.samples = genotypes.samples
hap_gts.variants = np.array(
[(hap.id, hap.chrom, hap.start, 0, "A", "T") for hap in self.data.values()],
dtype=[
("id", "U50"),
("chrom", "U10"),
("pos", np.uint32),
("aaf", np.float64),
("ref", "U100"),
("alt", "U100"),
],
)
self.log.info(
f"Transforming a set of genotypes from {len(genotypes.variants)} total "
f"variants with a list of {len(self.data)} haplotypes"
)
hap_gts.data = np.concatenate(
tuple(
hap.transform(genotypes, samples)[:, np.newaxis]
for hap in self.data.values()
),
axis=1,
).astype(np.uint8)
|
[
"typing.get_type_hints",
"fileinput.hook_compressed",
"dataclasses.field",
"numpy.array",
"numpy.all"
] |
[((805, 834), 'dataclasses.field', 'field', ([], {'init': '(False)', 'repr': '(False)'}), '(init=False, repr=False)\n', (810, 834), False, 'from dataclasses import dataclass, field, fields\n'), ((7026, 7066), 'dataclasses.field', 'field', ([], {'default_factory': 'tuple', 'init': '(False)'}), '(default_factory=tuple, init=False)\n', (7031, 7066), False, 'from dataclasses import dataclass, field, fields\n'), ((11472, 11508), 'numpy.array', 'np.array', (['[[[al] for al in alleles]]'], {}), '([[[al] for al in alleles]])\n', (11480, 11508), True, 'import numpy as np\n'), ((11660, 11717), 'numpy.all', 'np.all', (['(allele_arr == genotypes.data[:, var_idxs])'], {'axis': '(1)'}), '(allele_arr == genotypes.data[:, var_idxs], axis=1)\n', (11666, 11717), True, 'import numpy as np\n'), ((25545, 25583), 'fileinput.hook_compressed', 'hook_compressed', (['self.fname'], {'mode': '"""wt"""'}), "(self.fname, mode='wt')\n", (25560, 25583), False, 'from fileinput import hook_compressed\n'), ((4855, 4874), 'typing.get_type_hints', 'get_type_hints', (['cls'], {}), '(cls)\n', (4869, 4874), False, 'from typing import Iterator, get_type_hints, Generator\n'), ((8390, 8409), 'typing.get_type_hints', 'get_type_hints', (['cls'], {}), '(cls)\n', (8404, 8409), False, 'from typing import Iterator, get_type_hints, Generator\n'), ((22559, 22597), 'fileinput.hook_compressed', 'hook_compressed', (['self.fname'], {'mode': '"""rt"""'}), "(self.fname, mode='rt')\n", (22574, 22597), False, 'from fileinput import hook_compressed\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
# TODO: pass ft_cse to use fine-tuned feature
# TODO: pass fine_steps -1 to use fine samples
from absl import flags, app
import sys
sys.path.insert(0,'')
sys.path.insert(0,'third_party')
import numpy as np
from matplotlib import pyplot as plt
import matplotlib
import torch
import os
import glob
import pdb
import cv2
import trimesh
from scipy.spatial.transform import Rotation as R
import imageio
from utils.io import save_vid, str_to_frame, save_bones, draw_lines, vis_match
from utils.colors import label_colormap
from nnutils.train_utils import v2s_trainer
from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, obj_to_cam,\
Kmatinv, K2mat, K2inv, sample_xy, resample_dp,\
raycast
from nnutils.loss_utils import kp_reproj, feat_match, kp_reproj_loss
from ext_utils.util_flow import write_pfm
from ext_utils.flowlib import cat_imgflo
opts = flags.FLAGS
def construct_rays(dp_feats_rsmp, model, xys, rand_inds,
Rmat, Tmat, Kinv, near_far, flip=True):
device = dp_feats_rsmp.device
bs,nsample,_ =xys.shape
opts = model.opts
embedid=model.embedid
embedid = embedid.long().to(device)[:,None]
rays = raycast(xys, Rmat, Tmat, Kinv, near_far)
rtk_vec = rays['rtk_vec']
del rays
feats_at_samp = [dp_feats_rsmp[i].view(model.num_feat,-1).T\
[rand_inds[i].long()] for i in range(bs)]
feats_at_samp = torch.stack(feats_at_samp,0) # bs,ns,num_feat
# TODO implement for se3
if opts.lbs and model.num_bone_used>0:
bone_rts = model.nerf_body_rts(embedid)
bone_rts = bone_rts.repeat(1,nsample,1)
# TODO rearrange inputs
feats_at_samp = feats_at_samp.view(-1, model.num_feat)
xys = xys.view(-1,1,2)
if flip:
rtk_vec = rtk_vec.view(bs//2,2,-1).flip(1).view(rtk_vec.shape)
bone_rts = bone_rts.view(bs//2,2,-1).flip(1).view(bone_rts.shape)
rays = {'rtk_vec': rtk_vec,
'bone_rts': bone_rts}
return rays, feats_at_samp, xys
def match_frames(trainer, idxs, nsample=200):
idxs = [int(i) for i in idxs.split(' ')]
bs = len(idxs)
opts = trainer.opts
device = trainer.device
model = trainer.model
model.eval()
# load frames and aux data
for dataset in trainer.evalloader.dataset.datasets:
dataset.load_pair = False
batch = []
for i in idxs:
batch.append( trainer.evalloader.dataset[i] )
batch = trainer.evalloader.collate_fn(batch)
model.set_input(batch)
rtk = model.rtk
Rmat = rtk[:,:3,:3]
Tmat = rtk[:,:3,3]
Kmat = K2mat(rtk[:,3,:])
kaug = model.kaug # according to cropping, p = Kaug Kmat P
Kaug = K2inv(kaug)
Kinv = Kmatinv(Kaug.matmul(Kmat))
near_far = model.near_far[model.frameid.long()]
dp_feats_rsmp = model.dp_feats
# construct rays for sampled pixels
rand_inds, xys = sample_xy(opts.img_size, bs, nsample, device,return_all=False)
rays, feats_at_samp, xys = construct_rays(dp_feats_rsmp, model, xys, rand_inds,
Rmat, Tmat, Kinv, near_far)
model.update_delta_rts(rays)
# re-project
with torch.no_grad():
pts_pred = feat_match(model.nerf_feat, model.embedding_xyz, feats_at_samp,
model.latest_vars['obj_bound'],grid_size=20,is_training=False)
pts_pred = pts_pred.view(bs,nsample,3)
xy_reproj = kp_reproj(pts_pred, model.nerf_models, model.embedding_xyz, rays)
# draw
imgs_trg = model.imgs.view(bs//2,2,-1).flip(1).view(model.imgs.shape)
xy_reproj = xy_reproj.view(bs,nsample,2)
xys = xys.view(bs,nsample, 2)
sil_at_samp = torch.stack([model.masks[i].view(-1,1)[rand_inds[i]] \
for i in range(bs)],0) # bs,ns,1
for i in range(bs):
img1 = model.imgs[i]
img2 = imgs_trg[i]
img = torch.cat([img1, img2],2)
valid_idx = sil_at_samp[i].bool()[...,0]
p1s = xys[i][valid_idx]
p2s = xy_reproj[i][valid_idx]
p2s[...,0] = p2s[...,0] + img1.shape[2]
img = draw_lines(img, p1s,p2s)
cv2.imwrite('tmp/match_%04d.png'%i, img)
# visualize matching error
if opts.render_size<=128:
with torch.no_grad():
rendered, rand_inds = model.nerf_render(rtk, kaug, model.embedid,
nsample=opts.nsample, ndepth=opts.ndepth)
xyz_camera = rendered['xyz_camera_vis'][0].reshape(opts.render_size**2,-1)
xyz_canonical = rendered['xyz_canonical_vis'][0].reshape(opts.render_size**2,-1)
skip_idx = len(xyz_camera)//50 # vis 50 rays
trimesh.Trimesh(xyz_camera[0::skip_idx].reshape(-1,3).cpu()).\
export('tmp/match_camera_pts.obj')
trimesh.Trimesh(xyz_canonical[0::skip_idx].reshape(-1,3).cpu()).\
export('tmp/match_canonical_pts.obj')
vis_match(rendered, model.masks, model.imgs,
bs,opts.img_size, opts.ndepth)
## construct rays for all pixels
#rand_inds, xys = sample_xy(opts.img_size, bs, nsample, device,return_all=True)
#rays, feats_at_samp, xys = construct_rays(dp_feats_rsmp, model, xys, rand_inds,
# Rmat, Tmat, Kinv, near_far, flip=False)
#with torch.no_grad():
# pts_pred = feat_match(model.nerf_feat, model.embedding_xyz, feats_at_samp,
# model.latest_vars['obj_bound'],grid_size=20,is_training=False)
# pts_pred = pts_pred.view(bs,opts.render_size**2,3)
# proj_err = kp_reproj_loss(pts_pred, xys, model.nerf_models,
# model.embedding_xyz, rays)
# proj_err = proj_err.view(pts_pred.shape[:-1]+(1,))
# proj_err = proj_err/opts.img_size * 2
# results = {}
# results['proj_err'] = proj_err
## visualize current error stats
#feat_err=model.latest_vars['fp_err'][:,0]
#proj_err=model.latest_vars['fp_err'][:,1]
#feat_err = feat_err[feat_err>0]
#proj_err = proj_err[proj_err>0]
#print('feat-med: %f'%(np.median(feat_err)))
#print('proj-med: %f'%(np.median(proj_err)))
#plt.hist(feat_err,bins=100)
#plt.savefig('tmp/viser_feat_err.jpg')
#plt.clf()
#plt.hist(proj_err,bins=100)
#plt.savefig('tmp/viser_proj_err.jpg')
# visualize codes
with torch.no_grad():
fid = torch.Tensor(range(0,len(model.impath))).cuda().long()
D=model.pose_code(fid)
D = D.view(len(fid),-1)
##TODO
#px = torch.Tensor(range(len(D))).cuda()
#py = px*2
#pz = px*5+1
#D = torch.stack([px,py,pz],-1)
D = D-D.mean(0)[None]
A = D.T.matmul(D)/D.shape[0] # fxf
U,S,V=torch.svd(A) #
code_proj_3d=D.matmul(V[:,:3])
cmap = matplotlib.cm.get_cmap('cool')
time = np.asarray(range(len(model.impath)))
time = time/time.max()
code_proj_3d=code_proj_3d.detach().cpu().numpy()
trimesh.Trimesh(code_proj_3d, vertex_colors=cmap(time)).export('tmp/0.obj')
#plt.figure(figsize=(16,16))
plot_stack = []
weight_dir = opts.model_path.rsplit('/',1)[0]
bne_path = sorted(glob.glob('%s/%s-*bne-mrender*.jpg'%\
(weight_dir, opts.seqname)))
img_path = model.impath.copy()
## remove the last img for each video to make shape consistent with bone renders
#for i in model.data_offset[1:][::-1]:
# img_path.remove(img_path[i-1])
# code_proj_3d = np.delete(code_proj_3d, i-1,0)
# plot the first video
img_path = img_path [:model.data_offset[1]-2]
code_proj_3d = code_proj_3d[:model.data_offset[1]-2]
try:
bne_path = bne_path [:model.data_offset[1]-2]
except:
pass
for i in range(len(code_proj_3d)):
plt.plot(code_proj_3d[i,0], code_proj_3d[i,1], color=cmap(time[i]), marker='o')
plt.annotate(str(i), (code_proj_3d[i,0], code_proj_3d[i,1]))
plt.xlim(code_proj_3d[:,0].min(), code_proj_3d[:,0].max())
plt.ylim(code_proj_3d[:,1].min(), code_proj_3d[:,1].max())
fig = plt.gcf()
fig.canvas.draw()
plot = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
plot = plot.reshape(fig.canvas.get_width_height()[::-1] + (3,))
print('plot pose code of frame id:%03d'%i)
if len(bne_path) == len(code_proj_3d):
bneimg = cv2.imread(bne_path[i])
bneimg = cv2.resize(bneimg,\
(bneimg.shape[1]*plot.shape[0]//bneimg.shape[0], plot.shape[0]))
img=cv2.imread(img_path[i])[:,:,::-1]
img = cv2.resize(img,\
(img.shape[1]*plot.shape[0]//img.shape[0], plot.shape[0]))
plot = np.hstack([img, bneimg, plot])
plot_stack.append(plot)
save_vid('tmp/code', plot_stack, suffix='.mp4',
upsample_frame=150.,fps=30)
save_vid('tmp/code', plot_stack, suffix='.gif',
upsample_frame=150.,fps=30)
# vis dps
cv2.imwrite('tmp/match_dpc.png', model.dp_vis[model.dps[0].long()].cpu().numpy()*255)
def main(_):
opts.img_size=opts.render_size
trainer = v2s_trainer(opts, is_eval=True)
data_info = trainer.init_dataset()
trainer.define_model(data_info)
#write matching function
img_match = match_frames(trainer, opts.match_frames)
if __name__ == '__main__':
app.run(main)
|
[
"matplotlib.cm.get_cmap",
"torch.cat",
"nnutils.loss_utils.feat_match",
"glob.glob",
"torch.no_grad",
"nnutils.geom_utils.sample_xy",
"nnutils.train_utils.v2s_trainer",
"cv2.imwrite",
"nnutils.loss_utils.kp_reproj",
"utils.io.vis_match",
"cv2.resize",
"torch.svd",
"numpy.hstack",
"utils.io.draw_lines",
"matplotlib.pyplot.gcf",
"torch.stack",
"nnutils.geom_utils.raycast",
"sys.path.insert",
"utils.io.save_vid",
"cv2.imread",
"absl.app.run",
"nnutils.geom_utils.K2inv",
"nnutils.geom_utils.K2mat"
] |
[((204, 226), 'sys.path.insert', 'sys.path.insert', (['(0)', '""""""'], {}), "(0, '')\n", (219, 226), False, 'import sys\n'), ((226, 259), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""third_party"""'], {}), "(0, 'third_party')\n", (241, 259), False, 'import sys\n'), ((1284, 1324), 'nnutils.geom_utils.raycast', 'raycast', (['xys', 'Rmat', 'Tmat', 'Kinv', 'near_far'], {}), '(xys, Rmat, Tmat, Kinv, near_far)\n', (1291, 1324), False, 'from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, obj_to_cam, Kmatinv, K2mat, K2inv, sample_xy, resample_dp, raycast\n'), ((1516, 1545), 'torch.stack', 'torch.stack', (['feats_at_samp', '(0)'], {}), '(feats_at_samp, 0)\n', (1527, 1545), False, 'import torch\n'), ((2683, 2702), 'nnutils.geom_utils.K2mat', 'K2mat', (['rtk[:, 3, :]'], {}), '(rtk[:, 3, :])\n', (2688, 2702), False, 'from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, obj_to_cam, Kmatinv, K2mat, K2inv, sample_xy, resample_dp, raycast\n'), ((2777, 2788), 'nnutils.geom_utils.K2inv', 'K2inv', (['kaug'], {}), '(kaug)\n', (2782, 2788), False, 'from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, obj_to_cam, Kmatinv, K2mat, K2inv, sample_xy, resample_dp, raycast\n'), ((2977, 3040), 'nnutils.geom_utils.sample_xy', 'sample_xy', (['opts.img_size', 'bs', 'nsample', 'device'], {'return_all': '(False)'}), '(opts.img_size, bs, nsample, device, return_all=False)\n', (2986, 3040), False, 'from nnutils.geom_utils import obj_to_cam, tensor2array, vec_to_sim3, obj_to_cam, Kmatinv, K2mat, K2inv, sample_xy, resample_dp, raycast\n'), ((9389, 9420), 'nnutils.train_utils.v2s_trainer', 'v2s_trainer', (['opts'], {'is_eval': '(True)'}), '(opts, is_eval=True)\n', (9400, 9420), False, 'from nnutils.train_utils import v2s_trainer\n'), ((9615, 9628), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (9622, 9628), False, 'from absl import flags, app\n'), ((3236, 3251), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3249, 3251), False, 'import torch\n'), ((3272, 3405), 'nnutils.loss_utils.feat_match', 'feat_match', (['model.nerf_feat', 'model.embedding_xyz', 'feats_at_samp', "model.latest_vars['obj_bound']"], {'grid_size': '(20)', 'is_training': '(False)'}), "(model.nerf_feat, model.embedding_xyz, feats_at_samp, model.\n latest_vars['obj_bound'], grid_size=20, is_training=False)\n", (3282, 3405), False, 'from nnutils.loss_utils import kp_reproj, feat_match, kp_reproj_loss\n'), ((3478, 3543), 'nnutils.loss_utils.kp_reproj', 'kp_reproj', (['pts_pred', 'model.nerf_models', 'model.embedding_xyz', 'rays'], {}), '(pts_pred, model.nerf_models, model.embedding_xyz, rays)\n', (3487, 3543), False, 'from nnutils.loss_utils import kp_reproj, feat_match, kp_reproj_loss\n'), ((3957, 3983), 'torch.cat', 'torch.cat', (['[img1, img2]', '(2)'], {}), '([img1, img2], 2)\n', (3966, 3983), False, 'import torch\n'), ((4164, 4189), 'utils.io.draw_lines', 'draw_lines', (['img', 'p1s', 'p2s'], {}), '(img, p1s, p2s)\n', (4174, 4189), False, 'from utils.io import save_vid, str_to_frame, save_bones, draw_lines, vis_match\n'), ((4197, 4239), 'cv2.imwrite', 'cv2.imwrite', (["('tmp/match_%04d.png' % i)", 'img'], {}), "('tmp/match_%04d.png' % i, img)\n", (4208, 4239), False, 'import cv2\n'), ((6438, 6453), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6451, 6453), False, 'import torch\n'), ((6819, 6831), 'torch.svd', 'torch.svd', (['A'], {}), '(A)\n', (6828, 6831), False, 'import torch\n'), ((6888, 6918), 'matplotlib.cm.get_cmap', 'matplotlib.cm.get_cmap', (['"""cool"""'], {}), "('cool')\n", (6910, 6918), False, 'import matplotlib\n'), ((9028, 9105), 'utils.io.save_vid', 'save_vid', (['"""tmp/code"""', 'plot_stack'], {'suffix': '""".mp4"""', 'upsample_frame': '(150.0)', 'fps': '(30)'}), "('tmp/code', plot_stack, suffix='.mp4', upsample_frame=150.0, fps=30)\n", (9036, 9105), False, 'from utils.io import save_vid, str_to_frame, save_bones, draw_lines, vis_match\n'), ((9128, 9205), 'utils.io.save_vid', 'save_vid', (['"""tmp/code"""', 'plot_stack'], {'suffix': '""".gif"""', 'upsample_frame': '(150.0)', 'fps': '(30)'}), "('tmp/code', plot_stack, suffix='.gif', upsample_frame=150.0, fps=30)\n", (9136, 9205), False, 'from utils.io import save_vid, str_to_frame, save_bones, draw_lines, vis_match\n'), ((4313, 4328), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4326, 4328), False, 'import torch\n'), ((4983, 5059), 'utils.io.vis_match', 'vis_match', (['rendered', 'model.masks', 'model.imgs', 'bs', 'opts.img_size', 'opts.ndepth'], {}), '(rendered, model.masks, model.imgs, bs, opts.img_size, opts.ndepth)\n', (4992, 5059), False, 'from utils.io import save_vid, str_to_frame, save_bones, draw_lines, vis_match\n'), ((7285, 7350), 'glob.glob', 'glob.glob', (["('%s/%s-*bne-mrender*.jpg' % (weight_dir, opts.seqname))"], {}), "('%s/%s-*bne-mrender*.jpg' % (weight_dir, opts.seqname))\n", (7294, 7350), False, 'import glob\n'), ((8288, 8297), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (8295, 8297), True, 'from matplotlib import pyplot as plt\n'), ((8611, 8634), 'cv2.imread', 'cv2.imread', (['bne_path[i]'], {}), '(bne_path[i])\n', (8621, 8634), False, 'import cv2\n'), ((8660, 8751), 'cv2.resize', 'cv2.resize', (['bneimg', '(bneimg.shape[1] * plot.shape[0] // bneimg.shape[0], plot.shape[0])'], {}), '(bneimg, (bneimg.shape[1] * plot.shape[0] // bneimg.shape[0],\n plot.shape[0]))\n', (8670, 8751), False, 'import cv2\n'), ((8837, 8915), 'cv2.resize', 'cv2.resize', (['img', '(img.shape[1] * plot.shape[0] // img.shape[0], plot.shape[0])'], {}), '(img, (img.shape[1] * plot.shape[0] // img.shape[0], plot.shape[0]))\n', (8847, 8915), False, 'import cv2\n'), ((8952, 8982), 'numpy.hstack', 'np.hstack', (['[img, bneimg, plot]'], {}), '([img, bneimg, plot])\n', (8961, 8982), True, 'import numpy as np\n'), ((8781, 8804), 'cv2.imread', 'cv2.imread', (['img_path[i]'], {}), '(img_path[i])\n', (8791, 8804), False, 'import cv2\n')]
|
from peering_manager.api import OrderedDefaultRouter
from . import views
router = OrderedDefaultRouter()
router.APIRootView = views.ExtrasRootView
router.register("ix-api", views.IXAPIViewSet)
router.register("job-results", views.JobResultViewSet)
router.register("webhooks", views.WebhookViewSet)
app_name = "extras-api"
urlpatterns = router.urls
|
[
"peering_manager.api.OrderedDefaultRouter"
] |
[((84, 106), 'peering_manager.api.OrderedDefaultRouter', 'OrderedDefaultRouter', ([], {}), '()\n', (104, 106), False, 'from peering_manager.api import OrderedDefaultRouter\n')]
|
from flask_restx import Namespace, Resource
from flask import request
from .lib.parser import UserParser
from .view import UserView
from libs.depends.entry import container
from libs.middleware.auth import login_required, active_required
user = Namespace('user', path='/users', decorators=[active_required(), login_required()])
view = UserView()
@user.route('')
class UserResource(Resource):
'''User update, delete'''
@user.doc('get user')
def get(self):
return view.get()
@user.doc('update user')
def put(self):
parser: UserParser = container.get(UserParser)
param = parser.parse_update(request)
return view.update(param)
@user.doc('delete user')
def delete(self):
return view.delete()
|
[
"libs.middleware.auth.active_required",
"libs.middleware.auth.login_required",
"libs.depends.entry.container.get"
] |
[((604, 629), 'libs.depends.entry.container.get', 'container.get', (['UserParser'], {}), '(UserParser)\n', (617, 629), False, 'from libs.depends.entry import container\n'), ((300, 317), 'libs.middleware.auth.active_required', 'active_required', ([], {}), '()\n', (315, 317), False, 'from libs.middleware.auth import login_required, active_required\n'), ((319, 335), 'libs.middleware.auth.login_required', 'login_required', ([], {}), '()\n', (333, 335), False, 'from libs.middleware.auth import login_required, active_required\n')]
|
# This script is for the rotate function
import numpy as np
import matplotlib.pyplot as plt
import cv2
def rotate(image, degree, output_path):
"""
Rotates an OpenCV 2 / NumPy image about it's centre by the given degree
(in degrees). The returned image will be large enough to hold the entire
new image, with a black background
Arguments:
-----------------------------
image: path of input file
degree: int of degree
Output:
-----------------------------
an image file in .png format
"""
# exception handling
try:
image = plt.imread(image)
except AttributeError:
print("Please type in a string as the path for the input image file.")
raise
except TypeError:
print("Please provide a string as the path for the input image file.")
raise
except FileNotFoundError:
print("The input file/path does not exist, please double check it. ")
raise
except OSError:
print("The input file is not an image.")
raise
except Exception as e:
print("General Error:")
print(e)
raise
# Get the image size
image_size = (image.shape[1], image.shape[0])
image_center = tuple(np.array(image_size) / 2)
# Convert the OpenCV 3x2 rotation matrix to 3x3
rot_mat = np.vstack([cv2.getRotationMatrix2D(image_center, degree, 1.0), [0, 0, 1]])
rot_mat_notranslate = np.matrix(rot_mat[0:2, 0:2])
# Shorthand for below calcs
image_w2 = image_size[0] * 0.5
image_h2 = image_size[1] * 0.5
# Obtain the rotated coordinates of the image corners
rotated_coords = [
(np.array([-image_w2, image_h2]) * rot_mat_notranslate).A[0],
(np.array([image_w2, image_h2]) * rot_mat_notranslate).A[0],
(np.array([-image_w2, -image_h2]) * rot_mat_notranslate).A[0],
(np.array([image_w2, -image_h2]) * rot_mat_notranslate).A[0],
]
# Find the size of the new image
x_coords = [pt[0] for pt in rotated_coords]
x_pos = [x for x in x_coords if x > 0]
x_neg = [x for x in x_coords if x < 0]
y_coords = [pt[1] for pt in rotated_coords]
y_pos = [y for y in y_coords if y > 0]
y_neg = [y for y in y_coords if y < 0]
right_bound = max(x_pos)
left_bound = min(x_neg)
top_bound = max(y_pos)
bot_bound = min(y_neg)
new_w = int(abs(right_bound - left_bound))
new_h = int(abs(top_bound - bot_bound))
# We require a translation matrix to keep the image centred
trans_mat = np.matrix(
[
[1, 0, int(new_w * 0.5 - image_w2)],
[0, 1, int(new_h * 0.5 - image_h2)],
[0, 0, 1],
]
)
# Compute the tranform for the combined rotation and translation
affine_mat = (np.matrix(trans_mat) * np.matrix(rot_mat))[0:2, :]
# Apply the transform
result = cv2.warpAffine(image, affine_mat, (new_w, new_h), flags=cv2.INTER_LINEAR)
# exception handling
try:
plt.imshow(result)
plt.savefig(output_path)
except FileNotFoundError:
print("The output path does not exist.")
raise
except AttributeError:
print("Please provide a string as the path for the output image file.")
raise
except TypeError:
print("Please provide a string as the path for the output image file.")
raise
except Exception as e:
print("Other exceptions, please check your input and output. ")
print(e)
raise
|
[
"numpy.matrix",
"matplotlib.pyplot.imshow",
"cv2.warpAffine",
"numpy.array",
"matplotlib.pyplot.imread",
"cv2.getRotationMatrix2D",
"matplotlib.pyplot.savefig"
] |
[((1446, 1474), 'numpy.matrix', 'np.matrix', (['rot_mat[0:2, 0:2]'], {}), '(rot_mat[0:2, 0:2])\n', (1455, 1474), True, 'import numpy as np\n'), ((2875, 2948), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'affine_mat', '(new_w, new_h)'], {'flags': 'cv2.INTER_LINEAR'}), '(image, affine_mat, (new_w, new_h), flags=cv2.INTER_LINEAR)\n', (2889, 2948), False, 'import cv2\n'), ((601, 618), 'matplotlib.pyplot.imread', 'plt.imread', (['image'], {}), '(image)\n', (611, 618), True, 'import matplotlib.pyplot as plt\n'), ((2992, 3010), 'matplotlib.pyplot.imshow', 'plt.imshow', (['result'], {}), '(result)\n', (3002, 3010), True, 'import matplotlib.pyplot as plt\n'), ((3019, 3043), 'matplotlib.pyplot.savefig', 'plt.savefig', (['output_path'], {}), '(output_path)\n', (3030, 3043), True, 'import matplotlib.pyplot as plt\n'), ((1251, 1271), 'numpy.array', 'np.array', (['image_size'], {}), '(image_size)\n', (1259, 1271), True, 'import numpy as np\n'), ((1355, 1405), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['image_center', 'degree', '(1.0)'], {}), '(image_center, degree, 1.0)\n', (1378, 1405), False, 'import cv2\n'), ((2784, 2804), 'numpy.matrix', 'np.matrix', (['trans_mat'], {}), '(trans_mat)\n', (2793, 2804), True, 'import numpy as np\n'), ((2807, 2825), 'numpy.matrix', 'np.matrix', (['rot_mat'], {}), '(rot_mat)\n', (2816, 2825), True, 'import numpy as np\n'), ((1669, 1700), 'numpy.array', 'np.array', (['[-image_w2, image_h2]'], {}), '([-image_w2, image_h2])\n', (1677, 1700), True, 'import numpy as np\n'), ((1739, 1769), 'numpy.array', 'np.array', (['[image_w2, image_h2]'], {}), '([image_w2, image_h2])\n', (1747, 1769), True, 'import numpy as np\n'), ((1808, 1840), 'numpy.array', 'np.array', (['[-image_w2, -image_h2]'], {}), '([-image_w2, -image_h2])\n', (1816, 1840), True, 'import numpy as np\n'), ((1879, 1910), 'numpy.array', 'np.array', (['[image_w2, -image_h2]'], {}), '([image_w2, -image_h2])\n', (1887, 1910), True, 'import numpy as np\n')]
|
#******************************************************************************
#
# MantaGen
# Copyright 2018 <NAME>, <NAME>, <NAME>
#
# This program is free software, distributed under the terms of the
# Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
#******************************************************************************
from manta import *
import numpy
from random import randint
from scenes.scene import Scene
from scenes.volumes import *
from scenes.functions import *
from util.logger import *
def instantiate_scene(**kwargs): # instantiate independent of name , TODO replace?
info(kwargs)
return SmokeBuoyantScene(**kwargs)
class SmokeBuoyantScene(Scene):
#----------------------------------------------------------------------------------
def __init__(self, **kwargs):
super(SmokeBuoyantScene,self).__init__(**kwargs)
# optionally, init more grids etc.
self.max_iter_fac = 2
self.accuracy = 5e-4
self.max_source_count = int(kwargs.get("max_source_count", 5))
self.velocity_scale = float(kwargs.get("velocity_scale", self.resolution.y * 0.05))
self.use_inflow_sources = kwargs.get("use_inflow_sources", "True") == "True"
self.open_bound = kwargs.get("use_open_bound", "True") == "True"
self.sources = []
self.source_strengths = []
# smoke sims need to track the density
self.density = self.solver.create(RealGrid, name="Density")
noise = self.solver.create(NoiseField, loadFromFile=True)
noise.posScale = vec3(40) * numpy.random.uniform(low=0.25, high=1.)
noise.posOffset = random_vec3s(vmin=0.0) * 100.
noise.clamp = True
noise.clampNeg = 0
noise.clampPos = 1.
noise.valOffset = 0.15
noise.timeAnim = 0.4 * numpy.random.uniform(low=0.2, high=1.)
self.noise = noise
info("SmokeBuoyantScene initialized")
#----------------------------------------------------------------------------------
def set_velocity(self, volume, velocity):
if self.dimension == 2:
velocity.z = 0.0
volume.applyToGrid(solver=self.solver, grid=self.vel, value=velocity)
#----------------------------------------------------------------------------------
# sources used as smoke inflow in the following
def add_source(self, volume):
shape = volume.shape(self.solver)
self.sources.append(shape)
self.source_strengths.append(numpy.random.uniform(low=0.5, high=1.))
#----------------------------------------------------------------------------------
def _create_scene(self):
super(SmokeBuoyantScene, self)._create_scene()
self.sources = []
self.source_strengths = []
self.density.setConst(0)
self.vel.setConst(vec3(0))
is3d = (self.dimension > 2)
self.flags.initDomain(boundaryWidth=self.boundary)
self.flags.fillGrid()
if self.open_bound:
setOpenBound(self.flags, self.boundary, 'yY', CellType_TypeOutflow|CellType_TypeEmpty)
# formerly initialize_smoke_scene(scene):
source_count = randint(1, self.max_source_count)
for i in range(source_count):
volume = random_box(center_min=[0.2, 0.1, 0.2], center_max=[0.8, 0.6, 0.8], size_min=[0.005, 0.005, 0.005], size_max=[0.2, 0.2, 0.2], is3d=is3d)
self.add_source(volume)
src, sstr = self.sources[-1], self.source_strengths[-1]
densityInflow(flags=self.flags, density=self.density, noise=self.noise, shape=src, scale=2.0*sstr, sigma=0.5)
if self.show_gui:
# central view is more interesting for smoke
self._gui.setPlane(self.resolution.z // 2)
info("SmokeBuoyantScene created with {} sources".format(len(self.sources)))
#==================================================================================
# SIMULATION
#----------------------------------------------------------------------------------
def _compute_simulation_step(self):
# Note - sources are turned off earlier, the more there are in the scene
for i in range(len(self.sources)):
if self.use_inflow_sources:
src, sstr = self.sources[i], self.source_strengths[i]
densityInflow(flags=self.flags, density=self.density, noise=self.noise, shape=src, scale=2.0*sstr, sigma=0.5)
advectSemiLagrange(flags=self.flags, vel=self.vel, grid=self.density, order=2, clampMode=2)
advectSemiLagrange(flags=self.flags, vel=self.vel, grid=self.vel , order=2, clampMode=2)
vorticityConfinement(vel=self.vel, flags=self.flags, strength=0.1)
addBuoyancy(density=self.density, vel=self.vel, gravity=0.2*self.gravity, flags=self.flags)
setWallBcs(flags=self.flags, vel=self.vel)
solvePressure(flags=self.flags, vel=self.vel, pressure=self.pressure, cgMaxIterFac=self.max_iter_fac, cgAccuracy=self.accuracy)
|
[
"numpy.random.uniform",
"random.randint"
] |
[((3221, 3254), 'random.randint', 'randint', (['(1)', 'self.max_source_count'], {}), '(1, self.max_source_count)\n', (3228, 3254), False, 'from random import randint\n'), ((1630, 1670), 'numpy.random.uniform', 'numpy.random.uniform', ([], {'low': '(0.25)', 'high': '(1.0)'}), '(low=0.25, high=1.0)\n', (1650, 1670), False, 'import numpy\n'), ((1871, 1910), 'numpy.random.uniform', 'numpy.random.uniform', ([], {'low': '(0.2)', 'high': '(1.0)'}), '(low=0.2, high=1.0)\n', (1891, 1910), False, 'import numpy\n'), ((2547, 2586), 'numpy.random.uniform', 'numpy.random.uniform', ([], {'low': '(0.5)', 'high': '(1.0)'}), '(low=0.5, high=1.0)\n', (2567, 2586), False, 'import numpy\n')]
|
#! /usr/bin/env python3
import unittest
from main import run_1, run_2
class Test(unittest.TestCase):
def test_1(self):
self.assertEqual(run_1("aa bb cc dd ee\naa bb cc dd aa\naa bb cc dd aaa"), 2)
def test_2(self):
self.assertEqual(
run_2("abcde fghij\nabcde xyz ecdab\na ab abc abd abf abj\niiii oiii ooii oooi oooo\noiii ioii iioi iiio"),
3)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"main.run_1",
"main.run_2"
] |
[((429, 444), 'unittest.main', 'unittest.main', ([], {}), '()\n', (442, 444), False, 'import unittest\n'), ((151, 209), 'main.run_1', 'run_1', (['"""aa bb cc dd ee\naa bb cc dd aa\naa bb cc dd aaa"""'], {}), '("""aa bb cc dd ee\naa bb cc dd aa\naa bb cc dd aaa""")\n', (156, 209), False, 'from main import run_1, run_2\n'), ((273, 389), 'main.run_2', 'run_2', (['"""abcde fghij\nabcde xyz ecdab\na ab abc abd abf abj\niiii oiii ooii oooi oooo\noiii ioii iioi iiio"""'], {}), '(\n """abcde fghij\nabcde xyz ecdab\na ab abc abd abf abj\niiii oiii ooii oooi oooo\noiii ioii iioi iiio"""\n )\n', (278, 389), False, 'from main import run_1, run_2\n')]
|
import pygame
class Score:
def __init__(self):
self.right_score = 0
self.left_score = 0
self.right_cord = (465, 50)
self.left_cord = (300, 50)
self.left_color = (0, 0, 0)
self.right_color = (0, 0, 0)
def show_score(self, window, color):
"""Takes the window and color and creates a right and left score which are text and also creates rectangles showing the middle and the different sides."""
font = pygame.font.Font("freesansbold.ttf", 80)
right_score = font.render(f"{self.right_score}", True, self.right_color)
left_score = font.render(f"{self.left_score}", True, self.left_color)
pygame.draw.rect(window, color, (400, 0, 10, 275))
pygame.draw.rect(window, color, (400, 350, 10, 275))
window.blit(right_score, self.right_cord)
window.blit(left_score, self.left_cord)
def game_over(self):
"""Checks to see if the score of the left or right is equal to 7. Then returns True or False"""
if self.left_score == 7 or self.right_score == 7:
return True
else:
return False
def reset_score(self):
"""Resets the score for the right and left score to 0"""
self.right_score = 0
self.left_score = 0
|
[
"pygame.draw.rect",
"pygame.font.Font"
] |
[((488, 528), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(80)'], {}), "('freesansbold.ttf', 80)\n", (504, 528), False, 'import pygame\n'), ((699, 749), 'pygame.draw.rect', 'pygame.draw.rect', (['window', 'color', '(400, 0, 10, 275)'], {}), '(window, color, (400, 0, 10, 275))\n', (715, 749), False, 'import pygame\n'), ((759, 811), 'pygame.draw.rect', 'pygame.draw.rect', (['window', 'color', '(400, 350, 10, 275)'], {}), '(window, color, (400, 350, 10, 275))\n', (775, 811), False, 'import pygame\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.