code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# Generated by Django 2.1.4 on 2019-07-30 17:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0009_auto_20190710_2235'),
]
operations = [
migrations.RemoveField(
model_name='carrier',
name='model',
),
migrations.DeleteModel(
name='Carrier',
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.migrations.DeleteModel"
] |
[((223, 281), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""carrier"""', 'name': '"""model"""'}), "(model_name='carrier', name='model')\n", (245, 281), False, 'from django.db import migrations\n'), ((326, 364), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Carrier"""'}), "(name='Carrier')\n", (348, 364), False, 'from django.db import migrations\n')]
|
import logging
import traceback
import colorama
from colorama import Fore, Back, Style
colorama.init()
class CustomFormatter(logging.Formatter):
"""Logging Formatter to add colors and count warning / errors
reference: https://stackoverflow.com/questions/384076/how-can-i-color-python-logging-output/
"""
import platform
if platform.system() == 'Windows':
grey = "\x1b[38;21m"
yellow = "\x1b[33;21m"
magenta = "\x1b[35;21m"
red = "\x1b[31;21m"
reset = "\x1b[0m"
else:
grey = Style.DIM
yellow = Fore.YELLOW
magenta = Fore.MAGENTA
red = Fore.RED
reset = Style.RESET_ALL
# format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s (%(filename)s:%(lineno)d)"
format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
FORMATS = {
logging.DEBUG: grey + format + reset,
logging.INFO: grey + format + reset,
logging.WARNING: yellow + format + reset,
logging.ERROR: red + format + reset,
logging.CRITICAL: red + format + reset
}
def format(self, record):
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)
class Logger:
def __init__(self, name=None):
# create logger
self.logger = logging.getLogger(name)
self.logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
# add formatter
handler.setFormatter(CustomFormatter())
# add handler to logger
self.logger.addHandler(handler)
def debug(self, message):
self.logger.debug(message)
def info(self, message):
self.logger.info(message)
def warn(self, message):
self.logger.warning(message)
def print_stack(self):
print("---traceback---")
for line in traceback.format_stack():
print(line.strip())
def error(self, message):
self.logger.error(message)
self.print_stack()
exit()
def critical(self, message):
self.logger.critical(message)
self.print_stack()
|
[
"colorama.init",
"logging.StreamHandler",
"logging.Formatter",
"platform.system",
"traceback.format_stack",
"logging.getLogger"
] |
[((88, 103), 'colorama.init', 'colorama.init', ([], {}), '()\n', (101, 103), False, 'import colorama\n'), ((347, 364), 'platform.system', 'platform.system', ([], {}), '()\n', (362, 364), False, 'import platform\n'), ((1197, 1223), 'logging.Formatter', 'logging.Formatter', (['log_fmt'], {}), '(log_fmt)\n', (1214, 1223), False, 'import logging\n'), ((1362, 1385), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (1379, 1385), False, 'import logging\n'), ((1505, 1528), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1526, 1528), False, 'import logging\n'), ((1992, 2016), 'traceback.format_stack', 'traceback.format_stack', ([], {}), '()\n', (2014, 2016), False, 'import traceback\n')]
|
import requests
from . import UserModel, db, DEVELOPMENT
base_url = 'http://localhost:5000/api/user'
if not DEVELOPMENT:
raise SystemError('Not in development mode!')
exit()
def prepare_db():
db.drop_all()
db.create_all()
test_email = UserModel.fetch(email='<EMAIL>')
if test_email:
UserModel.delete(test_email)
def test_prepare_db():
prepare_db()
assert UserModel.fetch(email='<EMAIL>') is None
def test_get_request():
req = requests.get(base_url)
json_response = req.json()
# status code 200
assert req.status_code == 200
# usersRegistered as a property of response object
assert 'usersRegistered' in json_response.keys()
# usersRegistered value is a number
assert type(json_response['usersRegistered']) is int
def test_create_user():
test_user = {'name': 'test', 'email': '<EMAIL>', 'password':'<PASSWORD>'}
req = requests.post(base_url, json=test_user)
# status code 200
assert req.status_code == 200
# assert properties contain:
# 'message', 'success', 'user'
assert 'message' in req.json().keys()
assert 'success' in req.json().keys()
assert 'user' in req.json().keys()
# assert 'user' response object only contains 'email','id','is_admin' and 'name'
excepted_response = {
"message": "User created.",
"success": True,
"user": {
"email": "<EMAIL>",
"id": 1,
"is_admin": False,
"name": "test"
}
}
# assert response matches expected response
assert excepted_response.items() == req.json().items()
def test_user_in_db():
test_user = UserModel.fetch(email='<EMAIL>')
# assert user is created
assert test_user is not None
# assert user object properties in db match
assert test_user.name == 'test'
assert test_user.id == 1
assert test_user.email == '<EMAIL>'
assert test_user.check_password('<PASSWORD>') == True
assert test_user.is_admin == False
def test_missing_params():
test_user = {'name': 'test'}
req = requests.post(base_url, json=test_user)
# assert status code
assert req.status_code == 200
# assert response object mathes expecteds
expected_response = {'error':'Missing email field.'}
assert expected_response.items() == req.json().items()
def test_missing_payload():
req = requests.post(base_url, json=None)
# assert status code
assert req.status_code == 200
# assert response object mathes expecteds
expected_response = {'error':'No data was sent with the request.'}
assert expected_response.items() == req.json().items()
def test_already_exists():
test_user = {'name': 'test', 'email': '<EMAIL>', 'password':'<PASSWORD>'}
req = requests.post(base_url, json=test_user)
# status code 200
assert req.status_code == 200
# assert response object mathes expecteds
expected_response = {'error':'Email is already registered.'}
assert expected_response.items() == req.json().items()
def test_methods_not_allowed():
put_req = requests.put(base_url)
delete_req = requests.delete(base_url)
# same response is expected for both
expected_response = {'error':'Method not allowed.'}
# status code 200
assert put_req.status_code == 200
assert delete_req.status_code == 200
# assert expected response matched
assert put_req.json().items() == expected_response.items()
assert delete_req.json().items() == expected_response.items()
def test_clear_db():
prepare_db()
assert UserModel.fetch(email='<EMAIL>') is None
|
[
"requests.put",
"requests.post",
"requests.delete",
"requests.get"
] |
[((455, 477), 'requests.get', 'requests.get', (['base_url'], {}), '(base_url)\n', (467, 477), False, 'import requests\n'), ((866, 905), 'requests.post', 'requests.post', (['base_url'], {'json': 'test_user'}), '(base_url, json=test_user)\n', (879, 905), False, 'import requests\n'), ((1952, 1991), 'requests.post', 'requests.post', (['base_url'], {'json': 'test_user'}), '(base_url, json=test_user)\n', (1965, 1991), False, 'import requests\n'), ((2240, 2274), 'requests.post', 'requests.post', (['base_url'], {'json': 'None'}), '(base_url, json=None)\n', (2253, 2274), False, 'import requests\n'), ((2612, 2651), 'requests.post', 'requests.post', (['base_url'], {'json': 'test_user'}), '(base_url, json=test_user)\n', (2625, 2651), False, 'import requests\n'), ((2914, 2936), 'requests.put', 'requests.put', (['base_url'], {}), '(base_url)\n', (2926, 2936), False, 'import requests\n'), ((2952, 2977), 'requests.delete', 'requests.delete', (['base_url'], {}), '(base_url)\n', (2967, 2977), False, 'import requests\n')]
|
import tweepy
import re
import os, logging, datetime, argparse
from logging.handlers import RotatingFileHandler
import json
from prepare_data_for_labeling_infer import prepare_data_for_subject_object_labeling_infer
from produce_submit_json_file import Sorted_relation_and_entity_list_Management
from DataInteroperability import SPO2KG_Function
import sys
import subprocess
from py2neo import Graph, Node, Relationship,NodeMatcher
CONSUMER_KEY = "1LBqUbcbBOAD6LKsq2f49yHVM"
CONSUMER_SECRET = "<KEY>"
OAUTH_TOKEN = "<KEY>"
OAUTH_TOKEN_SECRET = "<KEY>"
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
api = tweepy.API(auth)
graph = Graph('http://localhost:7474', username='neo4j', password='<PASSWORD>')
def classify_tweets_related_or_not(Id):
data_dir=r"--data_dir=file directory of classification model\bin/predicate_classifiction/classification_data/"+Id
output_dir=r" --output_dir=file directory of classification model/output/predicate_classification_model/epochs1700/"+Id+r"/"
os.makedirs("file directory of classification model/output/predicate_classification_model/epochs1700/"+Id+r"/")
classification_command=r"C:\Users\CivilIM\Anaconda3\envs\TF115P37\python.exe file directory of classification model\run_predicate_classification.py "+data_dir+output_dir
print (data_dir)
os.system(classification_command)
def check_classification_result(Id):
f = open(r"file directory of classification model/output/predicate_classification_model/epochs1700/"+Id+r"/predicate_predict.txt")
classification_result=f.read()
classification_result=classification_result.replace("\n", "")
print(classification_result)
if classification_result=="traffic":
return True
else:
return False
def infer_relations(Id):
data_dir = r"--data_dir=file directory of classification model\bin/predicate_classifiction/classification_data/" + Id
output_dir = r" --output_dir=file directory of models for inferring interrelations and recognizing subject/objects/output/predicate_classification_model/epochs700/" + Id + r"/"
os.makedirs("file directory of models for inferring interrelations and recognizing subject/objects/output/predicate_classification_model/epochs700/"+Id+r"/")
infer_relations_command = r"C:\Users\CivilIM\Anaconda3\envs\TF115P37\python.exe file directory of models for inferring interrelations and recognizing subject/objects\run_predicate_classification.py " + data_dir + output_dir
os.system(infer_relations_command)
print("finish infer_relations")
def check_inferred_relations(Id):
f = open(r"file directory of models for inferring interrelations and recognizing subject/objects/output/predicate_classification_model/epochs700/"+Id+r"/predicate_predict.txt")
relations_result=f.read()
relations_result=relations_result.replace("\n", "")
str_list = relations_result.split(" ")
print("check_inferred_relations",str_list)
if ("Road_status" in str_list) and ( "Road_position" in str_list) and ("Lane_of_Road" not in str_list) :
return True
elif ("Road_status" in str_list) and ( "Road_position" in str_list) and ("Lane_of_Road" in str_list) and ("Lane_status" in str_list) and (( "Lane_position" in str_list) or ("Lane_direction" in str_list)):
return True
else:
return False
def prepare_data_for_extracting_SO(Id):
data_dir = r"file directory of classification model\bin/predicate_classifiction/classification_data/" + Id+"/test"
predicate_classifiction_infer_file_dir=r"file directory of models for inferring interrelations and recognizing subject/objects\output\predicate_classification_model\epochs700/"+Id+"/"
output_dir = r"file directory of models for inferring interrelations and recognizing subject/objects\bin\subject_object_labeling\sequence_labeling_data/" + Id + r"/test"
os.makedirs(r"file directory of models for inferring interrelations and recognizing subject/objects\bin\subject_object_labeling\sequence_labeling_data/" + Id + r"/test/")
prepare_data_for_subject_object_labeling_infer(data_dir,predicate_classifiction_infer_file_dir,output_dir)
def extract_SO(Id):
data_dir = r"--data_dir=file directory of models for inferring interrelations and recognizing subject/objects\bin\subject_object_labeling\sequence_labeling_data/" + Id
output_dir = r" --output_dir=file directory of models for inferring interrelations and recognizing subject/objects\output/sequnce_infer_out/epochs700/ckpt12415/" + Id
os.makedirs(r"file directory of models for inferring interrelations and recognizing subject/objects\output/sequnce_infer_out/epochs700/ckpt12415/"+Id+r"/")
extract_SO_command = r"C:\Users\CivilIM\Anaconda3\envs\TF115P37\python.exe file directory of models for inferring interrelations and recognizing subject/objects\run_sequnce_labeling.py " + data_dir + output_dir
os.system(extract_SO_command)
def generate_json_result(Id):
spo_list_manager = Sorted_relation_and_entity_list_Management(
r"file directory of models for inferring interrelations and recognizing subject/objects\bin\subject_object_labeling\sequence_labeling_data/"+Id+"/test",
r"file directory of models for inferring interrelations and recognizing subject/objects\output\sequnce_infer_out\epochs700\ckpt12415/"+Id+"/",
Competition_Mode=True)
spo_list_manager.produce_output_file(
OUT_RESULTS_DIR=r"file directory of models for inferring interrelations and recognizing subject/objects/output/final_text_spo_list_result/"+Id,
keep_empty_spo_list=True)
#override tweepy.StreamListener to add logic to on_status
class MyStreamListener(tweepy.StreamListener):
def on_data(self, data):
#print data
with open('fetched_tweets.json','a') as tf:
tf.write(data)
data = json.loads(data)
print (data)
tweet=data['text']
tweet=re.sub("[\s+\.\!\/_,$%^*(+\"\']+|[+——!,。?、~@#¥%……&*()]+", " ", tweet)
tweet=tweet.replace(" "," ")
tweet = tweet.replace(" ", " ")
tweet = tweet.replace(" ", " ")
tweet = tweet.replace(" ", " ")
tweet = tweet.replace(" ", " ")
print(tweet)
print(data['id'])
tweet_storage=r"file directory of classification model\bin/predicate_classifiction/classification_data/"+str(data['id'])+r"/test/"
os.makedirs(tweet_storage)
with open(tweet_storage+"text.txt",'w') as tf:
tf.write(tweet)
with open(tweet_storage+"token_in.txt",'w') as tf:
tf.write(tweet)
with open(tweet_storage+"token_in_not_UNK.txt",'w') as tf:
tf.write(tweet)
if str(data['user']['id'])=="1348585566040772609": # only for test
classify_tweets_related_or_not(str(data['id']))
print("check_classification_result(str(data['id']))",check_classification_result(str(data['id'])))
if check_classification_result(str(data['id']))==True:
infer_relations(str(data['id']))
print("check_inferred_relations(str(data['id']))", check_inferred_relations(str(data['id'])))
if check_inferred_relations(str(data['id']))==True:
prepare_data_for_extracting_SO(str(data['id']))
print("prepare_data_for_extracting_SO finish")
extract_SO(str(data['id']))
print("extract_SO finish")
generate_json_result(str(data['id']))
print("generate_json_result finish")
SPO2KG_Function(r"file directory of models for inferring interrelations and recognizing subject/objects/output/final_text_spo_list_result/"+str(data['id'])+r"\keep_empty_spo_list_subject_predicate_object_predict_output.json",graph)
print("Tweet2KnowledgeGraph finish")
subprocess.Popen([r"C:\Program Files\ArcGIS\Pro/bin\Python\envs/arcgispro-py3\python.exe", r"D:/ZHOUSHENGHUA/PythonNeo4j/Convert_Information_triplets to_VRM_parameters.py"])
return True
def on_error(self, status):
print (status)
def on_status(self, status):
print(status.text)
myStreamListener = MyStreamListener()
myStream = tweepy.Stream(auth=api.auth, listener=myStreamListener)
#api.verify_credentials()
myStream.filter(track=["keywords for searching tweets"], filter_level="low",locations=["depends on the city"], is_async=True)
print (myStream)
#print (myStreamListener.on_status())
#myStream.disconnect()
print ("OK")
|
[
"subprocess.Popen",
"os.makedirs",
"tweepy.API",
"json.loads",
"prepare_data_for_labeling_infer.prepare_data_for_subject_object_labeling_infer",
"os.system",
"produce_submit_json_file.Sorted_relation_and_entity_list_Management",
"tweepy.Stream",
"py2neo.Graph",
"tweepy.OAuthHandler",
"re.sub"
] |
[((565, 615), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['CONSUMER_KEY', 'CONSUMER_SECRET'], {}), '(CONSUMER_KEY, CONSUMER_SECRET)\n', (584, 615), False, 'import tweepy\n'), ((679, 695), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (689, 695), False, 'import tweepy\n'), ((705, 776), 'py2neo.Graph', 'Graph', (['"""http://localhost:7474"""'], {'username': '"""neo4j"""', 'password': '"""<PASSWORD>"""'}), "('http://localhost:7474', username='neo4j', password='<PASSWORD>')\n", (710, 776), False, 'from py2neo import Graph, Node, Relationship, NodeMatcher\n'), ((8279, 8334), 'tweepy.Stream', 'tweepy.Stream', ([], {'auth': 'api.auth', 'listener': 'myStreamListener'}), '(auth=api.auth, listener=myStreamListener)\n', (8292, 8334), False, 'import tweepy\n'), ((1069, 1193), 'os.makedirs', 'os.makedirs', (["(\n 'file directory of classification model/output/predicate_classification_model/epochs1700/'\n + Id + '/')"], {}), "(\n 'file directory of classification model/output/predicate_classification_model/epochs1700/'\n + Id + '/')\n", (1080, 1193), False, 'import os, logging, datetime, argparse\n'), ((1380, 1413), 'os.system', 'os.system', (['classification_command'], {}), '(classification_command)\n', (1389, 1413), False, 'import os, logging, datetime, argparse\n'), ((2144, 2314), 'os.makedirs', 'os.makedirs', (["(\n 'file directory of models for inferring interrelations and recognizing subject/objects/output/predicate_classification_model/epochs700/'\n + Id + '/')"], {}), "(\n 'file directory of models for inferring interrelations and recognizing subject/objects/output/predicate_classification_model/epochs700/'\n + Id + '/')\n", (2155, 2314), False, 'import os, logging, datetime, argparse\n'), ((2534, 2568), 'os.system', 'os.system', (['infer_relations_command'], {}), '(infer_relations_command)\n', (2543, 2568), False, 'import os, logging, datetime, argparse\n'), ((3910, 4091), 'os.makedirs', 'os.makedirs', (["(\n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\bin\\\\subject_object_labeling\\\\sequence_labeling_data/'\n + Id + '/test/')"], {}), "(\n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\bin\\\\subject_object_labeling\\\\sequence_labeling_data/'\n + Id + '/test/')\n", (3921, 4091), False, 'import os, logging, datetime, argparse\n'), ((4085, 4197), 'prepare_data_for_labeling_infer.prepare_data_for_subject_object_labeling_infer', 'prepare_data_for_subject_object_labeling_infer', (['data_dir', 'predicate_classifiction_infer_file_dir', 'output_dir'], {}), '(data_dir,\n predicate_classifiction_infer_file_dir, output_dir)\n', (4131, 4197), False, 'from prepare_data_for_labeling_infer import prepare_data_for_subject_object_labeling_infer\n'), ((4559, 4727), 'os.makedirs', 'os.makedirs', (["(\n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\output/sequnce_infer_out/epochs700/ckpt12415/'\n + Id + '/')"], {}), "(\n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\output/sequnce_infer_out/epochs700/ckpt12415/'\n + Id + '/')\n", (4570, 4727), False, 'import os, logging, datetime, argparse\n'), ((4934, 4963), 'os.system', 'os.system', (['extract_SO_command'], {}), '(extract_SO_command)\n', (4943, 4963), False, 'import os, logging, datetime, argparse\n'), ((5017, 5411), 'produce_submit_json_file.Sorted_relation_and_entity_list_Management', 'Sorted_relation_and_entity_list_Management', (["(\n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\bin\\\\subject_object_labeling\\\\sequence_labeling_data/'\n + Id + '/test')", "(\n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\output\\\\sequnce_infer_out\\\\epochs700\\\\ckpt12415/'\n + Id + '/')"], {'Competition_Mode': '(True)'}), "(\n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\bin\\\\subject_object_labeling\\\\sequence_labeling_data/'\n + Id + '/test', \n 'file directory of models for inferring interrelations and recognizing subject/objects\\\\output\\\\sequnce_infer_out\\\\epochs700\\\\ckpt12415/'\n + Id + '/', Competition_Mode=True)\n", (5059, 5411), False, 'from produce_submit_json_file import Sorted_relation_and_entity_list_Management\n'), ((5878, 5894), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (5888, 5894), False, 'import json\n'), ((5957, 6029), 're.sub', 're.sub', (['"""[\\\\s+\\\\.\\\\!\\\\/_,$%^*(+"\']+|[+——!,。?、~@#¥%……&*()]+"""', '""" """', 'tweet'], {}), '(\'[\\\\s+\\\\.\\\\!\\\\/_,$%^*(+"\\\']+|[+——!,。?、~@#¥%……&*()]+\', \' \', tweet)\n', (5963, 6029), False, 'import re\n'), ((6423, 6449), 'os.makedirs', 'os.makedirs', (['tweet_storage'], {}), '(tweet_storage)\n', (6434, 6449), False, 'import os, logging, datetime, argparse\n'), ((7919, 8115), 'subprocess.Popen', 'subprocess.Popen', (["['C:\\\\Program Files\\\\ArcGIS\\\\Pro/bin\\\\Python\\\\envs/arcgispro-py3\\\\python.exe',\n 'D:/ZHOUSHENGHUA/PythonNeo4j/Convert_Information_triplets to_VRM_parameters.py'\n ]"], {}), "([\n 'C:\\\\Program Files\\\\ArcGIS\\\\Pro/bin\\\\Python\\\\envs/arcgispro-py3\\\\python.exe'\n ,\n 'D:/ZHOUSHENGHUA/PythonNeo4j/Convert_Information_triplets to_VRM_parameters.py'\n ])\n", (7935, 8115), False, 'import subprocess\n')]
|
from cb import drawBox
import cb
import cv2
import numpy as np
def dynamicColorMask():
# Begins reading from the default webcam
cap = cv2.VideoCapture(0)
ret, frame = cap.read()
cv2.namedWindow('Feed')
cv2.setMouseCallback('Feed', drawBox)
while(1):
# Reads the next frame from the camera
ret, frame = cap.read()
if ret == False:
break
cv2.rectangle(frame, (cb.ix, cb.iy), (cb.fx, cb.fy), (255, 204, 51), 1)
cv2.imshow('Feed', frame)
# Use 'Q' to exit program
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
cap.release()
dynamicColorMask()
|
[
"cv2.waitKey",
"cv2.imshow",
"cv2.VideoCapture",
"cv2.setMouseCallback",
"cv2.rectangle",
"cv2.destroyAllWindows",
"cv2.namedWindow"
] |
[((137, 156), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (153, 156), False, 'import cv2\n'), ((185, 208), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Feed"""'], {}), "('Feed')\n", (200, 208), False, 'import cv2\n'), ((210, 247), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""Feed"""', 'drawBox'], {}), "('Feed', drawBox)\n", (230, 247), False, 'import cv2\n'), ((546, 569), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (567, 569), False, 'import cv2\n'), ((362, 433), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(cb.ix, cb.iy)', '(cb.fx, cb.fy)', '(255, 204, 51)', '(1)'], {}), '(frame, (cb.ix, cb.iy), (cb.fx, cb.fy), (255, 204, 51), 1)\n', (375, 433), False, 'import cv2\n'), ((437, 462), 'cv2.imshow', 'cv2.imshow', (['"""Feed"""', 'frame'], {}), "('Feed', frame)\n", (447, 462), False, 'import cv2\n'), ((500, 514), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (511, 514), False, 'import cv2\n')]
|
import torch
from models import SurfaceMapModel
from models import InterMapModel
from utils import show_mesh
from utils import show_mesh_2D
SURFACE_PATH = '/SET/HERE/YOUR/PATH'
CHECKPOINT_PATH = '/SET/HERE/YOUR/PATH'
def main() -> None:
torch.set_grad_enabled(False)
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
meta = SurfaceMapModel()
net = InterMapModel()
data = torch.load(SURFACE_PATH)
source = data['grid'].to(device).float()
faces = data['faces'].long()
weights = data['weights']
net.load_state_dict(torch.load(CHECKPOINT_PATH))
net = net.to(device)
for k in weights.keys():
weights[k] = weights[k].to(device).detach()
# generate mesh at GT vertices
surface = meta(source, weights)
param = net(source)
show_mesh_2D('param.png', param, faces)
show_mesh('param_small.ply', param, surface, faces)
# generate mesh at sample vertices
source = data['visual_grid'].to(device).float()
faces = data['visual_faces'].long()
surface = meta(source, weights)
param = net(source)
show_mesh_2D('param_big.png', param, faces)
show_mesh('neural_surface_big.ply', param, surface, faces)
if __name__ == '__main__':
main()
|
[
"models.InterMapModel",
"torch.load",
"torch.cuda.is_available",
"models.SurfaceMapModel",
"utils.show_mesh_2D",
"torch.set_grad_enabled",
"utils.show_mesh"
] |
[((252, 281), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(False)'], {}), '(False)\n', (274, 281), False, 'import torch\n'), ((371, 388), 'models.SurfaceMapModel', 'SurfaceMapModel', ([], {}), '()\n', (386, 388), False, 'from models import SurfaceMapModel\n'), ((400, 415), 'models.InterMapModel', 'InterMapModel', ([], {}), '()\n', (413, 415), False, 'from models import InterMapModel\n'), ((431, 455), 'torch.load', 'torch.load', (['SURFACE_PATH'], {}), '(SURFACE_PATH)\n', (441, 455), False, 'import torch\n'), ((832, 871), 'utils.show_mesh_2D', 'show_mesh_2D', (['"""param.png"""', 'param', 'faces'], {}), "('param.png', param, faces)\n", (844, 871), False, 'from utils import show_mesh_2D\n'), ((876, 927), 'utils.show_mesh', 'show_mesh', (['"""param_small.ply"""', 'param', 'surface', 'faces'], {}), "('param_small.ply', param, surface, faces)\n", (885, 927), False, 'from utils import show_mesh\n'), ((1130, 1173), 'utils.show_mesh_2D', 'show_mesh_2D', (['"""param_big.png"""', 'param', 'faces'], {}), "('param_big.png', param, faces)\n", (1142, 1173), False, 'from utils import show_mesh_2D\n'), ((1178, 1236), 'utils.show_mesh', 'show_mesh', (['"""neural_surface_big.ply"""', 'param', 'surface', 'faces'], {}), "('neural_surface_big.ply', param, surface, faces)\n", (1187, 1236), False, 'from utils import show_mesh\n'), ((592, 619), 'torch.load', 'torch.load', (['CHECKPOINT_PATH'], {}), '(CHECKPOINT_PATH)\n', (602, 619), False, 'import torch\n'), ((321, 346), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (344, 346), False, 'import torch\n')]
|
from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.utils.data
from torch.autograd import Variable
import numpy as np
import torch.nn.functional as F
class TNet(nn.Module):
def __init__(self, k=64):
super(TNet, self).__init__()
self.k = k
# Each layer has batchnorm and relu on it
# conv 3 64
self.conv1 = nn.Sequential(nn.Conv1d(k, 64, 1), nn.BatchNorm1d(64),
nn.ReLU(inplace=True))
# conv 64 128
self.conv2 = nn.Sequential(nn.Conv1d(64, 128, 1), nn.BatchNorm1d(128),
nn.ReLU(inplace=True))
# conv 128 1024
self.conv3 = nn.Sequential(nn.Conv1d(128, 1024, 1), nn.BatchNorm1d(1024),
nn.ReLU(inplace=True))
# max pool
self.mpool = nn.Sequential(nn.AdaptiveMaxPool1d(1))
# fc 1024 512
self.fc1 = nn.Sequential(nn.Linear(1024, 512), nn.BatchNorm1d(512),
nn.ReLU(inplace=True))
# fc 512 256
self.fc2 = nn.Sequential(nn.Linear(512, 256), nn.BatchNorm1d(256),
nn.ReLU(inplace=True))
# fc 256 k*k (no batchnorm, no relu)
self.fc3 = nn.Linear(256, k * k)
# add bias
self.fc3.bias = torch.nn.Parameter(torch.eye(k).view(-1))
# reshape
def forward(self, x):
batch_size = x.size()[0]
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.mpool(x)
x = x.view(x.shape[:-1])
x = self.fc1(x)
x = self.fc2(x)
x = self.fc3(x)
x = x.view(batch_size, self.k, self.k)
return x
class PointNetfeat(nn.Module):
def __init__(self, global_feat=True, feature_transform=False):
super(PointNetfeat, self).__init__()
self.feature_transform = feature_transform
# Use TNet to apply transformation on input and multiply the input points with the transformation
self.tnet1 = TNet(k=3)
# conv 3 64
self.conv1 = nn.Sequential(nn.Conv1d(3, 64, 1), nn.BatchNorm1d(64),
nn.ReLU(inplace=True))
# Use TNet to apply transformation on features and multiply the input features with the transformation
# (if feature_transform is true)
# conv 64 128
self.conv2 = nn.Sequential(nn.Conv1d(64, 128, 1), nn.BatchNorm1d(128),
nn.ReLU(inplace=True))
# conv 128 1024 (no relu)
self.conv3 = nn.Sequential(nn.Conv1d(128, 1024, 1), nn.BatchNorm1d(1024))
# max pool
self.mpool = nn.Sequential(nn.AdaptiveMaxPool1d(1))
self.global_feat = global_feat
self.feature_transform = feature_transform
if self.feature_transform:
self.tnet2 = TNet(k=64)
def forward(self, x):
n_pts = x.size()[2]
# You will need these extra outputs:
# trans = output of applying TNet function to input
# trans_feat = output of applying TNet function to features (if feature_transform is true)
trans = self.tnet1(x)
x = x.transpose(2, 1)
x = torch.bmm(x, trans)
x = x.transpose(2, 1)
x = self.conv1(x)
if self.feature_transform:
trans_feat = self.tnet2(x)
x = x.transpose(2, 1)
x = torch.bmm(x, trans_feat)
x = x.transpose(2, 1)
else:
trans_feat = None
pointfeat = x
x = self.conv2(x)
x = self.conv3(x)
x = self.mpool(x)
x = x.view(x.shape[:-1])
if self.global_feat: # This shows if we're doing classification or segmentation
return x, trans, trans_feat
else:
x = x.view(-1, 1024, 1).repeat(1, 1, n_pts)
return torch.cat([x, pointfeat], 1), trans, trans_feat
class PointNetCls(nn.Module):
def __init__(self, k=2, feature_transform=False):
super(PointNetCls, self).__init__()
self.feature_transform = feature_transform
self.feat = PointNetfeat(global_feat=True, feature_transform=feature_transform)
self.fc1 = nn.Linear(1024, 512)
self.fc2 = nn.Linear(512, 256)
self.fc3 = nn.Linear(256, k)
self.dropout = nn.Dropout(p=0.3)
self.bn1 = nn.BatchNorm1d(512)
self.bn2 = nn.BatchNorm1d(256)
self.relu = nn.ReLU()
def forward(self, x):
x, trans, trans_feat = self.feat(x)
x = F.relu(self.bn1(self.fc1(x)))
x = F.relu(self.bn2(self.dropout(self.fc2(x))))
x = self.fc3(x)
return F.log_softmax(x, dim=1), trans, trans_feat
class PointNetDenseCls(nn.Module):
def __init__(self, k=2, feature_transform=False):
super(PointNetDenseCls, self).__init__()
self.k = k
self.feature_transform = feature_transform
# get global features + point features from PointNetfeat
self.pointNetFeat = PointNetfeat(global_feat=False, feature_transform=feature_transform)
# conv 1088 512
self.conv1 = nn.Sequential(nn.Conv1d(1088, 512, 1), nn.BatchNorm1d(512),
nn.ReLU(inplace=True))
# conv 512 256
self.conv2 = nn.Sequential(nn.Conv1d(512, 256, 1), nn.BatchNorm1d(256),
nn.ReLU(inplace=True))
# conv 256 128
self.conv3 = nn.Sequential(nn.Conv1d(256, 128, 1), nn.BatchNorm1d(128),
nn.ReLU(inplace=True))
# conv 128 k
self.conv4 = torch.nn.Conv1d(128, self.k, 1)
# softmax
def forward(self, x):
# You will need these extra outputs:
# trans = output of applying TNet function to input
# trans_feat = output of applying TNet function to features (if feature_transform is true)
# (you can directly get them from PointNetfeat)
batch_size = x.size()[0]
n_pts = x.size()[2]
x, trans, trans_feat = self.pointNetFeat(x)
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
x = x.transpose(2, 1)
x = x.reshape(-1, self.k)
x = F.log_softmax(x, dim=-1)
x = x.view(batch_size, n_pts, self.k)
return x, trans, trans_feat
def feature_transform_regularizer(trans):
# compute |((trans * trans.transpose) - I)|^2
I_matrix = torch.eye(trans.size()[1])[None, :, :]
AAT = torch.bmm(trans, trans.transpose(2, 1))
diffMat = AAT - I_matrix.cuda()
loss = torch.norm(diffMat, dim=(1, 2))
loss = torch.mean(loss)
return loss
if __name__ == '__main__':
sim_data = Variable(torch.rand(32, 3, 2500))
print('Input data dimensions:', sim_data.size())
trans = TNet(k=3)
out = trans(sim_data)
print('TNet', out.size())
print('loss', feature_transform_regularizer(out.cuda()))
feat_trans_reg_testmat = Variable(torch.rand(16, 64, 3))
print('Input Matrix for FT Regularizer:', feat_trans_reg_testmat.size())
print('Feature Transform Regularizer Output: ', feature_transform_regularizer(feat_trans_reg_testmat.cuda()))
sim_data_64d = Variable(torch.rand(32, 64, 2500))
trans = TNet(k=64)
out = trans(sim_data_64d)
print('TNet 64d', out.size())
print('loss', feature_transform_regularizer(out.cuda()))
pointfeat = PointNetfeat(global_feat=True)
print('Input data dimensions:', sim_data.size())
out, _, _ = pointfeat(sim_data)
print('global feat', out.size())
pointfeat = PointNetfeat(global_feat=False)
print('Input data dimensions:', sim_data.size())
out, _, _ = pointfeat(sim_data)
print('point feat', out.size())
cls = PointNetCls(k=5)
print('Input data dimensions:', sim_data.size())
out, _, _ = cls(sim_data)
print('class', out.size())
seg = PointNetDenseCls(k=3)
print('Input data dimensions:', sim_data.size())
out, _, _ = seg(sim_data)
print('seg', out.size())
|
[
"torch.mean",
"torch.nn.Dropout",
"torch.bmm",
"torch.nn.ReLU",
"torch.rand",
"torch.eye",
"torch.norm",
"torch.nn.BatchNorm1d",
"torch.nn.Conv1d",
"torch.nn.AdaptiveMaxPool1d",
"torch.cat",
"torch.nn.functional.log_softmax",
"torch.nn.Linear"
] |
[((6692, 6723), 'torch.norm', 'torch.norm', (['diffMat'], {'dim': '(1, 2)'}), '(diffMat, dim=(1, 2))\n', (6702, 6723), False, 'import torch\n'), ((6735, 6751), 'torch.mean', 'torch.mean', (['loss'], {}), '(loss)\n', (6745, 6751), False, 'import torch\n'), ((1297, 1318), 'torch.nn.Linear', 'nn.Linear', (['(256)', '(k * k)'], {}), '(256, k * k)\n', (1306, 1318), True, 'import torch.nn as nn\n'), ((3319, 3338), 'torch.bmm', 'torch.bmm', (['x', 'trans'], {}), '(x, trans)\n', (3328, 3338), False, 'import torch\n'), ((4312, 4332), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(512)'], {}), '(1024, 512)\n', (4321, 4332), True, 'import torch.nn as nn\n'), ((4352, 4371), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(256)'], {}), '(512, 256)\n', (4361, 4371), True, 'import torch.nn as nn\n'), ((4391, 4408), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'k'], {}), '(256, k)\n', (4400, 4408), True, 'import torch.nn as nn\n'), ((4432, 4449), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.3)'}), '(p=0.3)\n', (4442, 4449), True, 'import torch.nn as nn\n'), ((4469, 4488), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(512)'], {}), '(512)\n', (4483, 4488), True, 'import torch.nn as nn\n'), ((4508, 4527), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(256)'], {}), '(256)\n', (4522, 4527), True, 'import torch.nn as nn\n'), ((4548, 4557), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (4555, 4557), True, 'import torch.nn as nn\n'), ((5708, 5739), 'torch.nn.Conv1d', 'torch.nn.Conv1d', (['(128)', 'self.k', '(1)'], {}), '(128, self.k, 1)\n', (5723, 5739), False, 'import torch\n'), ((6340, 6364), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['x'], {'dim': '(-1)'}), '(x, dim=-1)\n', (6353, 6364), True, 'import torch.nn.functional as F\n'), ((6821, 6844), 'torch.rand', 'torch.rand', (['(32)', '(3)', '(2500)'], {}), '(32, 3, 2500)\n', (6831, 6844), False, 'import torch\n'), ((7077, 7098), 'torch.rand', 'torch.rand', (['(16)', '(64)', '(3)'], {}), '(16, 64, 3)\n', (7087, 7098), False, 'import torch\n'), ((7320, 7344), 'torch.rand', 'torch.rand', (['(32)', '(64)', '(2500)'], {}), '(32, 64, 2500)\n', (7330, 7344), False, 'import torch\n'), ((425, 444), 'torch.nn.Conv1d', 'nn.Conv1d', (['k', '(64)', '(1)'], {}), '(k, 64, 1)\n', (434, 444), True, 'import torch.nn as nn\n'), ((446, 464), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(64)'], {}), '(64)\n', (460, 464), True, 'import torch.nn as nn\n'), ((501, 522), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (508, 522), True, 'import torch.nn as nn\n'), ((581, 602), 'torch.nn.Conv1d', 'nn.Conv1d', (['(64)', '(128)', '(1)'], {}), '(64, 128, 1)\n', (590, 602), True, 'import torch.nn as nn\n'), ((604, 623), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(128)'], {}), '(128)\n', (618, 623), True, 'import torch.nn as nn\n'), ((660, 681), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (667, 681), True, 'import torch.nn as nn\n'), ((742, 765), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', '(1024)', '(1)'], {}), '(128, 1024, 1)\n', (751, 765), True, 'import torch.nn as nn\n'), ((767, 787), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(1024)'], {}), '(1024)\n', (781, 787), True, 'import torch.nn as nn\n'), ((824, 845), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (831, 845), True, 'import torch.nn as nn\n'), ((901, 924), 'torch.nn.AdaptiveMaxPool1d', 'nn.AdaptiveMaxPool1d', (['(1)'], {}), '(1)\n', (921, 924), True, 'import torch.nn as nn\n'), ((981, 1001), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(512)'], {}), '(1024, 512)\n', (990, 1001), True, 'import torch.nn as nn\n'), ((1003, 1022), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(512)'], {}), '(512)\n', (1017, 1022), True, 'import torch.nn as nn\n'), ((1057, 1078), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1064, 1078), True, 'import torch.nn as nn\n'), ((1135, 1154), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(256)'], {}), '(512, 256)\n', (1144, 1154), True, 'import torch.nn as nn\n'), ((1156, 1175), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(256)'], {}), '(256)\n', (1170, 1175), True, 'import torch.nn as nn\n'), ((1210, 1231), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1217, 1231), True, 'import torch.nn as nn\n'), ((2144, 2163), 'torch.nn.Conv1d', 'nn.Conv1d', (['(3)', '(64)', '(1)'], {}), '(3, 64, 1)\n', (2153, 2163), True, 'import torch.nn as nn\n'), ((2165, 2183), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(64)'], {}), '(64)\n', (2179, 2183), True, 'import torch.nn as nn\n'), ((2220, 2241), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2227, 2241), True, 'import torch.nn as nn\n'), ((2526, 2547), 'torch.nn.Conv1d', 'nn.Conv1d', (['(64)', '(128)', '(1)'], {}), '(64, 128, 1)\n', (2535, 2547), True, 'import torch.nn as nn\n'), ((2549, 2568), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(128)'], {}), '(128)\n', (2563, 2568), True, 'import torch.nn as nn\n'), ((2605, 2626), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2612, 2626), True, 'import torch.nn as nn\n'), ((2698, 2721), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', '(1024)', '(1)'], {}), '(128, 1024, 1)\n', (2707, 2721), True, 'import torch.nn as nn\n'), ((2723, 2743), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(1024)'], {}), '(1024)\n', (2737, 2743), True, 'import torch.nn as nn\n'), ((2800, 2823), 'torch.nn.AdaptiveMaxPool1d', 'nn.AdaptiveMaxPool1d', (['(1)'], {}), '(1)\n', (2820, 2823), True, 'import torch.nn as nn\n'), ((3520, 3544), 'torch.bmm', 'torch.bmm', (['x', 'trans_feat'], {}), '(x, trans_feat)\n', (3529, 3544), False, 'import torch\n'), ((4766, 4789), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['x'], {'dim': '(1)'}), '(x, dim=1)\n', (4779, 4789), True, 'import torch.nn.functional as F\n'), ((5240, 5263), 'torch.nn.Conv1d', 'nn.Conv1d', (['(1088)', '(512)', '(1)'], {}), '(1088, 512, 1)\n', (5249, 5263), True, 'import torch.nn as nn\n'), ((5265, 5284), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(512)'], {}), '(512)\n', (5279, 5284), True, 'import torch.nn as nn\n'), ((5321, 5342), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5328, 5342), True, 'import torch.nn as nn\n'), ((5402, 5424), 'torch.nn.Conv1d', 'nn.Conv1d', (['(512)', '(256)', '(1)'], {}), '(512, 256, 1)\n', (5411, 5424), True, 'import torch.nn as nn\n'), ((5426, 5445), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(256)'], {}), '(256)\n', (5440, 5445), True, 'import torch.nn as nn\n'), ((5482, 5503), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5489, 5503), True, 'import torch.nn as nn\n'), ((5563, 5585), 'torch.nn.Conv1d', 'nn.Conv1d', (['(256)', '(128)', '(1)'], {}), '(256, 128, 1)\n', (5572, 5585), True, 'import torch.nn as nn\n'), ((5587, 5606), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(128)'], {}), '(128)\n', (5601, 5606), True, 'import torch.nn as nn\n'), ((5643, 5664), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5650, 5664), True, 'import torch.nn as nn\n'), ((3976, 4004), 'torch.cat', 'torch.cat', (['[x, pointfeat]', '(1)'], {}), '([x, pointfeat], 1)\n', (3985, 4004), False, 'import torch\n'), ((1381, 1393), 'torch.eye', 'torch.eye', (['k'], {}), '(k)\n', (1390, 1393), False, 'import torch\n')]
|
'''output depth statistics for a BAM file.
'''
import collections
import subprocess
import re
import os
import shlex
import cgatcore.experiment as E
import cgatcore.iotools as iotools
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
# setup command line parser
parser = E.ArgumentParser(description=__doc__)
parser.add_argument(
"--input-filename-fasta", dest="input_filename_fasta", type=str,
help="filename with reference sequence in fasta format ")
parser.add_argument(
"--counting-mode", dest="counting_mode", type=str,
choices=("all", "pileup_defaults"),
help="counting mode. all=all reads/bases. pileup-defaults= "
"use default pileup thresholds. Options will be added to "
"--mpileup-options. .")
parser.add_argument(
"--mpileup-options", dest="mpileup_options", type=str,
help="pileup options to use ")
parser.set_defaults(
mpileup_options="",
counting_mode="all",
input_filename_fasta=None,
report_step=1000000,
)
# add common options (-h/--help, ...) and parse command line
(args) = E.start(parser, argv=argv, add_output_options=True)
bamfile = args[0]
mpileup_options = args.mpileup_options
if args.counting_mode == "all":
mpileup_options += " -Q 0 -B -A"
read_depth_histogram = collections.defaultdict(int)
base_depth_histogram = collections.defaultdict(int)
# deletions are marked by something like -2AA at the first
# position and a '*' for subsequent positions
rx_deletions = re.compile("([-][0-9]+|[*])")
report_step = args.report_step
npositions = 0
samtools = iotools.which("samtools")
statement = (
"{samtools} mpileup "
"-f {reference_fasta} "
"{mpileup_options} "
"{bamfile} ".format(
samtools=samtools,
reference_fasta=args.input_filename_fasta,
mpileup_options=mpileup_options,
bamfile=os.path.abspath(bamfile)))
E.info("running the following statement: {}".format(statement))
cmd_args = shlex.split(statement)
proc = subprocess.Popen(
cmd_args,
shell=False,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
cwd=os.path.abspath(os.curdir))
for line in proc.stdout:
line = line.decode("utf-8")
contig, pos, base, read_depth, info, qualities = line[:-1].split("\t")
read_depth = int(read_depth)
pos = int(pos)
if pos % report_step == 0:
E.info("working on {}: {}".format(contig, pos))
ndeletions = len(rx_deletions.findall(info))
base_depth = read_depth - ndeletions
read_depth_histogram[read_depth] += 1
base_depth_histogram[base_depth] += 1
for line in proc.stderr:
E.warn(line)
keys = sorted(set(read_depth_histogram.keys()).union(
base_depth_histogram.keys()))
args.stdout.write("depth\tread_depth_positions\tbase_depth_positions\n")
for key in keys:
args.stdout.write("{}\t{}\t{}\n".format(
key,
read_depth_histogram[key],
base_depth_histogram[key]))
E.info("positions tested: {}".format(sum(read_depth_histogram.values())))
E.stop()
|
[
"os.path.abspath",
"cgatcore.experiment.ArgumentParser",
"shlex.split",
"cgatcore.experiment.stop",
"collections.defaultdict",
"cgatcore.experiment.warn",
"cgatcore.experiment.start",
"cgatcore.iotools.which",
"re.compile"
] |
[((395, 432), 'cgatcore.experiment.ArgumentParser', 'E.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (411, 432), True, 'import cgatcore.experiment as E\n'), ((1255, 1306), 'cgatcore.experiment.start', 'E.start', (['parser'], {'argv': 'argv', 'add_output_options': '(True)'}), '(parser, argv=argv, add_output_options=True)\n', (1262, 1306), True, 'import cgatcore.experiment as E\n'), ((1480, 1508), 'collections.defaultdict', 'collections.defaultdict', (['int'], {}), '(int)\n', (1503, 1508), False, 'import collections\n'), ((1536, 1564), 'collections.defaultdict', 'collections.defaultdict', (['int'], {}), '(int)\n', (1559, 1564), False, 'import collections\n'), ((1698, 1727), 're.compile', 're.compile', (['"""([-][0-9]+|[*])"""'], {}), "('([-][0-9]+|[*])')\n", (1708, 1727), False, 'import re\n'), ((1798, 1823), 'cgatcore.iotools.which', 'iotools.which', (['"""samtools"""'], {}), "('samtools')\n", (1811, 1823), True, 'import cgatcore.iotools as iotools\n'), ((2226, 2248), 'shlex.split', 'shlex.split', (['statement'], {}), '(statement)\n', (2237, 2248), False, 'import shlex\n'), ((3401, 3409), 'cgatcore.experiment.stop', 'E.stop', ([], {}), '()\n', (3407, 3409), True, 'import cgatcore.experiment as E\n'), ((2952, 2964), 'cgatcore.experiment.warn', 'E.warn', (['line'], {}), '(line)\n', (2958, 2964), True, 'import cgatcore.experiment as E\n'), ((2114, 2138), 'os.path.abspath', 'os.path.abspath', (['bamfile'], {}), '(bamfile)\n', (2129, 2138), False, 'import os\n'), ((2393, 2419), 'os.path.abspath', 'os.path.abspath', (['os.curdir'], {}), '(os.curdir)\n', (2408, 2419), False, 'import os\n')]
|
import os
import sys
from configparser import NoSectionError
from simple_monitor_alert.alerts import Alerts
from simple_monitor_alert.lines import Observable, ItemLine
from simple_monitor_alert.monitor import Monitors
from simple_monitor_alert.sma import Results, Config, MonitorsInfo
from simple_monitor_alert.utils.files import JSONFile
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
MONITORS_DIR = os.path.join(BASE_DIR, 'assets', 'monitors')
class FakeJSONFile(JSONFile):
def __init__(self, data):
super(FakeJSONFile, self).__init__('/Fake-JSON-File', create=False)
self.update(data)
def read(self):
pass
def write(self):
pass
class FakeObservableResults(FakeJSONFile, Results):
monitor = None
def __init__(self, data=None, sma=None):
data = data or {'monitors': {}}
super(FakeObservableResults, self).__init__(data)
self.sma = sma
def get_observable_result(self, observable):
monitor = self['monitors'].get(getattr(observable, 'monitor', self.monitor), {})
result = monitor.get(observable.name, self.get_default_observable_result())
monitor[observable.name] = result
self['monitors'][getattr(observable, 'monitor', self.monitor)] = monitor
return result
class FakeMonitorsInfo(FakeJSONFile, MonitorsInfo):
pass
class FakeMonitors(Monitors):
pass
class FakeSMA(object):
def __init__(self, config=None, monitors_info=None, monitors_dir=MONITORS_DIR):
self.results = FakeObservableResults()
self.monitors_info = monitors_info or FakeMonitorsInfo({})
self.config = config
self.monitors = FakeMonitors(monitors_dir, sma=self)
class FakeAlert(object):
executions = 0
def __init__(self, section):
self.section = section
def send(self, *args, **kwargs):
self.executions += 1
return True
class FakeConfig(Config):
def __init__(self, data):
if sys.version_info >= (3, 0):
super().__init__('/Fake-Config-File')
else:
# Old Style Class
Config.__init__(self, '/Fake-Config-File')
self._data = data
def items(self, section=None, **kwargs):
try:
return self._data[section]
except KeyError:
raise NoSectionError(section)
class TestBase(object):
def get_observable(self):
observable = Observable('test')
observable.add_line(ItemLine('test.expected', '20'))
observable.add_line(ItemLine('test.value', '19'))
return observable
def get_alerts(self, section, sma):
alerts_modules = [FakeAlert(section)]
alerts = Alerts(sma, '/Fake-Alerts-Dir', alerts_modules, [section])
return alerts
def get_results(self, data=None, monitors_info=None):
return FakeObservableResults(data, FakeSMA(monitors_info=monitors_info).monitors_info)
def get_sma(self, section=None, monitors_info=None):
config = FakeConfig({section: ()})
sma = FakeSMA(config, monitors_info=monitors_info)
return sma
|
[
"os.path.abspath",
"simple_monitor_alert.sma.Config.__init__",
"configparser.NoSectionError",
"simple_monitor_alert.lines.ItemLine",
"simple_monitor_alert.lines.Observable",
"os.path.join",
"simple_monitor_alert.alerts.Alerts"
] |
[((410, 454), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""assets"""', '"""monitors"""'], {}), "(BASE_DIR, 'assets', 'monitors')\n", (422, 454), False, 'import os\n'), ((368, 393), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (383, 393), False, 'import os\n'), ((2425, 2443), 'simple_monitor_alert.lines.Observable', 'Observable', (['"""test"""'], {}), "('test')\n", (2435, 2443), False, 'from simple_monitor_alert.lines import Observable, ItemLine\n'), ((2693, 2751), 'simple_monitor_alert.alerts.Alerts', 'Alerts', (['sma', '"""/Fake-Alerts-Dir"""', 'alerts_modules', '[section]'], {}), "(sma, '/Fake-Alerts-Dir', alerts_modules, [section])\n", (2699, 2751), False, 'from simple_monitor_alert.alerts import Alerts\n'), ((2114, 2156), 'simple_monitor_alert.sma.Config.__init__', 'Config.__init__', (['self', '"""/Fake-Config-File"""'], {}), "(self, '/Fake-Config-File')\n", (2129, 2156), False, 'from simple_monitor_alert.sma import Results, Config, MonitorsInfo\n'), ((2472, 2503), 'simple_monitor_alert.lines.ItemLine', 'ItemLine', (['"""test.expected"""', '"""20"""'], {}), "('test.expected', '20')\n", (2480, 2503), False, 'from simple_monitor_alert.lines import Observable, ItemLine\n'), ((2533, 2561), 'simple_monitor_alert.lines.ItemLine', 'ItemLine', (['"""test.value"""', '"""19"""'], {}), "('test.value', '19')\n", (2541, 2561), False, 'from simple_monitor_alert.lines import Observable, ItemLine\n'), ((2324, 2347), 'configparser.NoSectionError', 'NoSectionError', (['section'], {}), '(section)\n', (2338, 2347), False, 'from configparser import NoSectionError\n')]
|
import click
import pickle
from subprocess import call
class Safe:
update = False
def __init__(self, safe_file):
self.safe_file = safe_file
def load(self):
try:
with open(self.safe_file, 'rb') as input:
try:
self.safe = pickle.load(input)
except EOFError:
self.safe = {}
except IOError:
self.safe = {}
def save(self):
with open(self.safe_file, 'wb') as output:
pickle.dump(self.safe, output, 2)
def clear(self):
self.safe = {}
self.update = True
def delete(self, alias):
if alias in self.safe:
del self.safe[alias]
self.update = True
def get_command(self, alias):
if alias in self.safe:
return self.safe[alias]
else:
return None
def set_command(self, alias, command):
self.safe[alias] = command
self.update = True
def execute(self, alias):
call(self.safe[alias], shell=True)
def show(self):
table = [('alias:', 'command:')]
for key, value in self.safe.items():
table.append((key, value))
column_size = [max(map(len, column)) for column in zip(*table)]
format_string = ' | '.join(["{{:<{}}}".format(i) for i in column_size])
table.insert(1, ['-' * i for i in column_size])
for row in table:
click.echo('{}'.format(format_string.format(*row)))
|
[
"pickle.dump",
"pickle.load",
"subprocess.call"
] |
[((820, 854), 'subprocess.call', 'call', (['self.safe[alias]'], {'shell': '(True)'}), '(self.safe[alias], shell=True)\n', (824, 854), False, 'from subprocess import call\n'), ((404, 437), 'pickle.dump', 'pickle.dump', (['self.safe', 'output', '(2)'], {}), '(self.safe, output, 2)\n', (415, 437), False, 'import pickle\n'), ((242, 260), 'pickle.load', 'pickle.load', (['input'], {}), '(input)\n', (253, 260), False, 'import pickle\n')]
|
#!/usr/bin/env python3
import argparse
import sys
import os
import uuid
import tempfile
import random
import synapseclient as syn
script_dir = os.path.dirname(__file__)
sys.path.append(os.path.join(script_dir, '..', 'src'))
try:
from kitools import KiProject, DataUri, SysPath, DataTypeTemplate
except Exception as ex:
print('WARNING: Failed to load kitools: {0}'.format(ex))
def gen_id():
return str(uuid.uuid4())[:8]
def mk_dirs(*args):
path = os.path.join(*args)
if not os.path.exists(path):
os.makedirs(path)
return path
def write_random_data_to_file(file_path):
with open(file_path, mode='w') as f:
for _ in range(1, random.randrange(2, 10)):
f.write(str(uuid.uuid4()))
def mk_local_files_and_folders(start_path,
prefix='',
depth=3,
file_count=3,
folder_count=3,
current_depth=0,
syn_client=None,
syn_parent=None):
current_depth += 1
local_results = []
remote_results = []
for _ in range(0, file_count):
filename = '{0}test_file_{1}_{2}.dat'.format(prefix, current_depth, gen_id())
file_path = os.path.join(start_path, filename)
# Fill the file with random data.
write_random_data_to_file(file_path)
local_results.append(file_path)
# Store the file in Synapse
if syn_parent:
syn_file = syn_client.store(syn.File(path=file_path, parent=syn_parent))
remote_results.append(syn_file)
if current_depth < depth:
# Create the folders.
for _ in range(0, folder_count):
foldername = '{0}test_folder_{1}_{2}'.format(prefix, current_depth, gen_id())
folder_path = mk_dirs(start_path, foldername)
local_results.append(folder_path)
# Create the folder in Synapse
if syn_parent:
syn_folder = syn_client.store(syn.Folder(name=foldername, parent=syn_parent))
remote_results.append(syn_folder)
more_locals, more_remotes = mk_local_files_and_folders(folder_path,
prefix=prefix,
depth=depth,
current_depth=current_depth,
syn_client=syn_client,
syn_parent=syn_folder)
local_results += more_locals
remote_results += more_remotes
return local_results, remote_results
def create_demo_curator():
"""
1. Creates a new Synapse Project.
2. Creates the data/[core, artifacts, discovered] folders in Synapse.
3. Creates and uploads files and folders into Synapse.
4. Creates a new KiProject.
5. data_adds each of the root data folders (core, artifacts, discovered) that were created in Synapse.
6. data_pulls all the remote files and folders from Synapse.
7. Creates one new file in each of the local data/[core, artifacts, discovered] folders.
8. data_pushes each of the newly added local files.
9. Changes 3 of the files that were pulled from the Synapse Project and data_pushes them to Synapse.
10. Prints out the commands to load the project, data_add/data_pull the new and changed files.
"""
print('Creating Demo for curator...')
demo_id = gen_id()
demo_commands = []
demo_commands.append('')
demo_commands.append('# Import the KiProject class:')
demo_commands.append('from kitools import KiProject')
kiproject_path = mk_dirs(tempfile.gettempdir(), 'demo_curator_{0}'.format(demo_id))
syn_client = syn.login(silent=True)
# Create the Synapse project
syn_project = syn_client.store(syn.Project(name='Ki Tools Curator Demo - {0}'.format(demo_id)))
kiproject = KiProject(kiproject_path,
no_prompt=True,
title='Demo KiProject {0}'.format(demo_id),
project_uri=DataUri('syn', syn_project.id).uri,
data_type_template=DataTypeTemplate.default().name)
demo_commands.append('')
demo_commands.append('# Open the KiProject:')
demo_commands.append(
'kiproject = KiProject({0}"{1}")'.format(('r' if os.sep == '\\' else ''), kiproject.local_path))
# Add the synapse project files/folders.
syn_temp_dir = mk_dirs(kiproject_path, '.demo-data')
# Create files and folders in each DataType directory.
for data_type in kiproject.data_types:
parent = syn_project
dt_folder_path = mk_dirs(os.path.join(syn_temp_dir, data_type.rel_path))
for name in SysPath(data_type.rel_path).rel_parts:
parent = syn_client.store(syn.Folder(name=name, parent=parent))
kiproject.data_add(DataUri('syn', parent.id).uri, name=parent.name)
mk_local_files_and_folders(dt_folder_path,
depth=3,
prefix='{0}_'.format(data_type.name),
syn_client=syn_client,
syn_parent=parent)
kiproject.data_pull()
# Create some new files for data_add/data_push
demo_commands.append('')
demo_commands.append('# Add some new files and push them:')
for data_type in kiproject.data_types:
dt_folder_path = mk_dirs(data_type.abs_path)
local_results, _ = mk_local_files_and_folders(dt_folder_path, prefix='new_study_file_', depth=0, file_count=1)
for new_filename in local_results:
demo_commands.append('kiproject.data_add({0}"{1}")'.format(
('r' if os.sep == '\\' else ''), SysPath(new_filename, rel_start=kiproject.local_path).rel_path))
demo_commands.append('kiproject.data_push()')
# Create a change in some files for data_push
demo_commands.append('')
demo_commands.append('# Push some changed files:')
change_count = 0
for resource in kiproject.resources:
if change_count >= 3:
break
if resource.abs_path and resource.abs_path.endswith('.dat'):
change_count += 1
file_path = resource.abs_path
write_random_data_to_file(file_path)
demo_commands.append('kiproject.data_push("{0}")'.format(os.path.basename(resource.name)))
print('')
print('Demo project created in: {0}'.format(kiproject_path))
print('Synapse Project: {0} ({1})'.format(syn_project.name, syn_project.id))
print('')
print('Python Script:')
for command in demo_commands:
print(command)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('demo', nargs='?', help='Which demo to create.', choices=['curator'], default='curator')
args = parser.parse_args()
if args.demo == 'curator':
create_demo_curator()
if __name__ == "__main__":
main()
|
[
"kitools.DataTypeTemplate.default",
"uuid.uuid4",
"argparse.ArgumentParser",
"os.makedirs",
"synapseclient.Folder",
"os.path.basename",
"os.path.dirname",
"tempfile.gettempdir",
"os.path.exists",
"kitools.DataUri",
"random.randrange",
"synapseclient.login",
"kitools.SysPath",
"synapseclient.File",
"os.path.join"
] |
[((145, 170), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (160, 170), False, 'import os\n'), ((187, 224), 'os.path.join', 'os.path.join', (['script_dir', '""".."""', '"""src"""'], {}), "(script_dir, '..', 'src')\n", (199, 224), False, 'import os\n'), ((469, 488), 'os.path.join', 'os.path.join', (['*args'], {}), '(*args)\n', (481, 488), False, 'import os\n'), ((3938, 3960), 'synapseclient.login', 'syn.login', ([], {'silent': '(True)'}), '(silent=True)\n', (3947, 3960), True, 'import synapseclient as syn\n'), ((6919, 6944), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6942, 6944), False, 'import argparse\n'), ((500, 520), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (514, 520), False, 'import os\n'), ((530, 547), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (541, 547), False, 'import os\n'), ((1317, 1351), 'os.path.join', 'os.path.join', (['start_path', 'filename'], {}), '(start_path, filename)\n', (1329, 1351), False, 'import os\n'), ((3861, 3882), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3880, 3882), False, 'import tempfile\n'), ((418, 430), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (428, 430), False, 'import uuid\n'), ((675, 698), 'random.randrange', 'random.randrange', (['(2)', '(10)'], {}), '(2, 10)\n', (691, 698), False, 'import random\n'), ((4882, 4928), 'os.path.join', 'os.path.join', (['syn_temp_dir', 'data_type.rel_path'], {}), '(syn_temp_dir, data_type.rel_path)\n', (4894, 4928), False, 'import os\n'), ((4951, 4978), 'kitools.SysPath', 'SysPath', (['data_type.rel_path'], {}), '(data_type.rel_path)\n', (4958, 4978), False, 'from kitools import KiProject, DataUri, SysPath, DataTypeTemplate\n'), ((1580, 1623), 'synapseclient.File', 'syn.File', ([], {'path': 'file_path', 'parent': 'syn_parent'}), '(path=file_path, parent=syn_parent)\n', (1588, 1623), True, 'import synapseclient as syn\n'), ((4288, 4318), 'kitools.DataUri', 'DataUri', (['"""syn"""', 'syn_project.id'], {}), "('syn', syn_project.id)\n", (4295, 4318), False, 'from kitools import KiProject, DataUri, SysPath, DataTypeTemplate\n'), ((4369, 4395), 'kitools.DataTypeTemplate.default', 'DataTypeTemplate.default', ([], {}), '()\n', (4393, 4395), False, 'from kitools import KiProject, DataUri, SysPath, DataTypeTemplate\n'), ((5028, 5064), 'synapseclient.Folder', 'syn.Folder', ([], {'name': 'name', 'parent': 'parent'}), '(name=name, parent=parent)\n', (5038, 5064), True, 'import synapseclient as syn\n'), ((5094, 5119), 'kitools.DataUri', 'DataUri', (['"""syn"""', 'parent.id'], {}), "('syn', parent.id)\n", (5101, 5119), False, 'from kitools import KiProject, DataUri, SysPath, DataTypeTemplate\n'), ((725, 737), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (735, 737), False, 'import uuid\n'), ((2081, 2127), 'synapseclient.Folder', 'syn.Folder', ([], {'name': 'foldername', 'parent': 'syn_parent'}), '(name=foldername, parent=syn_parent)\n', (2091, 2127), True, 'import synapseclient as syn\n'), ((6597, 6628), 'os.path.basename', 'os.path.basename', (['resource.name'], {}), '(resource.name)\n', (6613, 6628), False, 'import os\n'), ((5976, 6029), 'kitools.SysPath', 'SysPath', (['new_filename'], {'rel_start': 'kiproject.local_path'}), '(new_filename, rel_start=kiproject.local_path)\n', (5983, 6029), False, 'from kitools import KiProject, DataUri, SysPath, DataTypeTemplate\n')]
|
from TASSELpy.net.maizegenetics.dna.snp.GenotypeTable import GenotypeTable
from TASSELpy.utils.helper import make_sig
from TASSELpy.utils.Overloading import javaConstructorOverload
import javabridge
## Dictionary to hold java imports
java_imports = {'AlleleDepth':'net/maizegenetics/dna/snp/depth/AlleleDepth',
'BitSet':'net/maizegenetics/util/BitSet',
'Chromosome':'net/maizegenetics/dna/map/Chromosome',
'CoreGenotypeTable':'net/maizegenetics/dna/snp/CoreGenotypeTable',
'GenotypeTable':'net.maizegenetics.dna.snp.GenotypeTable',
'GenotypeCallTable':'net/maizegenetics/dna/snp/genotypecall/GenotypeCallTable',
'PositionList':'net/maizegenetics/dna/map/PositionList',
'SiteScore':'net/maizegenetics/dna/snp/score/SiteScore',
'TaxaList':'net/maizegenetics/taxa/TaxaList'}
class CoreGenotypeTable(GenotypeTable):
_java_name = java_imports['CoreGenotypeTable']
@javaConstructorOverload(java_imports['CoreGenotypeTable'],
(make_sig([java_imports['GenotypeCallTable'],java_imports['PositionList'],
java_imports['TaxaList']],'void'),(object,object,object)),
(make_sig([java_imports['GenotypeCallTable'],java_imports['PositionList'],
java_imports['TaxaList'],java_imports['SiteScore'],
java_imports['AlleleDepth']],'void'),(object,object,object,object,object)))
def __init__(self,*args,**kwargs):
"""
Instantiates a CoreGenotypeTable
Signatures:
CoreGenotypeTable(GenotypeCallTable genotype, PositionList positionList,
TaxaList taxaList, SiteScore siteScore, AlleleDepth alleleDepth)
CoreGenotypeTable(GenotypeCallTable genotype, PositionList positionList, TaxaList taxaList)
"""
pass
|
[
"TASSELpy.utils.helper.make_sig"
] |
[((1074, 1187), 'TASSELpy.utils.helper.make_sig', 'make_sig', (["[java_imports['GenotypeCallTable'], java_imports['PositionList'],\n java_imports['TaxaList']]", '"""void"""'], {}), "([java_imports['GenotypeCallTable'], java_imports['PositionList'],\n java_imports['TaxaList']], 'void')\n", (1082, 1187), False, 'from TASSELpy.utils.helper import make_sig\n'), ((1251, 1425), 'TASSELpy.utils.helper.make_sig', 'make_sig', (["[java_imports['GenotypeCallTable'], java_imports['PositionList'],\n java_imports['TaxaList'], java_imports['SiteScore'], java_imports[\n 'AlleleDepth']]", '"""void"""'], {}), "([java_imports['GenotypeCallTable'], java_imports['PositionList'],\n java_imports['TaxaList'], java_imports['SiteScore'], java_imports[\n 'AlleleDepth']], 'void')\n", (1259, 1425), False, 'from TASSELpy.utils.helper import make_sig\n')]
|
import json
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import import_string
def drfxios(request):
DRFXIOS_ROUTER_PATH = getattr(settings, 'DRFXIOS_ROUTER_PATH', None)
if not DRFXIOS_ROUTER_PATH:
raise ImproperlyConfigured("No DRFXIOS_ROUTER_PATH attr inside settings.py")
router = import_string(DRFXIOS_ROUTER_PATH)
r = router.registry
models_list = [x[0] for x in r]
return {
'DRFXIOS': {
'MODELS_LIST': json.dumps(models_list)
}
}
|
[
"django.utils.module_loading.import_string",
"django.core.exceptions.ImproperlyConfigured",
"json.dumps"
] |
[((383, 417), 'django.utils.module_loading.import_string', 'import_string', (['DRFXIOS_ROUTER_PATH'], {}), '(DRFXIOS_ROUTER_PATH)\n', (396, 417), False, 'from django.utils.module_loading import import_string\n'), ((299, 369), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['"""No DRFXIOS_ROUTER_PATH attr inside settings.py"""'], {}), "('No DRFXIOS_ROUTER_PATH attr inside settings.py')\n", (319, 369), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((539, 562), 'json.dumps', 'json.dumps', (['models_list'], {}), '(models_list)\n', (549, 562), False, 'import json\n')]
|
#!/usr/bin/env python3
"""Encapsulates logic for generating and updating editor
configuration files to make it easy to work with Grapl code.
Provided as a self-documenting Click app for discoverability and ease
of maintenance.
"""
import json
from typing import Dict, List, Union
import click
import toml
from typing_extensions import TypedDict
# NOTE: This is essentially to silence the typechecker (and help us
# not shoot ourselves in the foot). It's not making any attempt to be
# a complete and faithful typing of Pyright configuration documents;
# it's just typing what we're currently using. Feel free to update
# this as this code develops and matures.
class PyrightConfig(TypedDict):
pythonVersion: str
pythonPlatform: str
venvPath: str
venv: str
verboseOutput: bool
reportMissingImports: bool
exclude: List[str]
executionEnvironments: List[Dict[str, Union[str, List[str]]]]
BASE_PYRIGHTCONFIG: PyrightConfig = {
"pythonVersion": "3.7",
"pythonPlatform": "Linux",
"venvPath": "build-support",
"venv": "grapl-venv",
"verboseOutput": True,
"reportMissingImports": True,
"exclude": [
"src/js/**",
"src/rust/**",
],
"executionEnvironments": [
{"root": "pulumi"},
{"root": "pants-plugins"},
# NOTE: We will augment this with the src/python root in the
# code below
],
}
PANTS_TOML = "pants.toml"
PYRIGHTCONFIG_JSON = "pyrightconfig.json"
def src_python_execution_environment() -> Dict[str, Union[str, List[str]]]:
"""Generate a pyright "executionEnvironments" entry for code in our
`src/python` directory.
Since this code is all interrelated, we need to provide the
appropriate "extraPaths" for Pyright to properly resolve imports,
types, etc. In general, this amounts to adding our Pants source
roots, with a few caveats:
1) not all the roots are required for Python code in that
directory
2) Our Pants configuration explicitly provides absolute paths, not
patterns that may be matched anywhere
As such, we first filter out what we don't need, and then
"relativize" the paths, since this is what Pyright need.
"""
pants = toml.load(PANTS_TOML)
source_roots = pants["source"]["root_patterns"]
if any(not r.startswith("/") for r in source_roots):
raise click.ClickException(
"Expected all Pants source roots to be absolute, but at least one was not!"
)
# We don't care about these source roots for things that are in src/python
filtered = [
root
for root in source_roots
if root
not in (
"/3rdparty",
"/build-support",
"/pants-plugins",
"/pulumi",
"/src/js/grapl-cdk",
"/src/proto",
)
]
relativized = [root.lstrip("/") for root in filtered]
return {"root": "src/python", "extraPaths": relativized}
def write_or_echo(output: str, path: str, write_file: bool) -> None:
""" Consolidate logic for whether to write `output` to the file at `path`, or to send it to standard output instead."""
if write_file:
with click.open_file(path, "w") as f:
f.write(output)
click.echo(f"Wrote content to {path} file")
else:
click.echo(output)
@click.command(name="generate")
@click.option(
"--write-file/--no-write-file",
is_flag=True,
default=True,
show_default=True,
help="Controls whether or not to write the generated output to disk, or to standard output.",
)
def generate_pyrightconfig(write_file: bool) -> None:
"""Generate a pyrightconfig.json file from pants.toml.
Do this if you have no existing pyrightconfig.json file that you
are using. If you already have one, on the other hand, please see
the `update` command instead.
"""
pyrightconfig = BASE_PYRIGHTCONFIG
pyrightconfig["executionEnvironments"].append(src_python_execution_environment())
output = json.dumps(pyrightconfig, indent=4)
write_or_echo(output, PYRIGHTCONFIG_JSON, write_file)
@click.command(name="update")
@click.option(
"--write-file/--no-write-file",
is_flag=True,
default=True,
show_default=True,
help="Controls whether or not to write the generated output to disk, or to standard output.",
)
def update_pyrightconfig(write_file: bool) -> None:
"""Update an existing pyrightconfig.json file.
In particular, the `extraPaths` entries for various
`executionEnvironments` must be kept in-sync with what we declare
in our pants.toml file.
Any other changes you may have made to your file will be
preserved.
"""
with click.open_file(PYRIGHTCONFIG_JSON, "r") as f:
pyright = json.load(f)
execution_environments = pyright["executionEnvironments"]
# Preserve other environments; we're only concerned about the
# src/python one here
new_execution_environments = [
e for e in execution_environments if e["root"] != "src/python"
]
new_execution_environments.append(src_python_execution_environment())
pyright.update({"executionEnvironments": new_execution_environments})
output = json.dumps(pyright, indent=4)
write_or_echo(output, PYRIGHTCONFIG_JSON, write_file)
@click.group(name="pyright")
def configure_pyright() -> None:
""" Set up Pyright for Python IDE integration. """
configure_pyright.add_command(generate_pyrightconfig)
configure_pyright.add_command(update_pyrightconfig)
@click.group()
def editor_setup() -> None:
"""A utility for helping to configure IDEs and editors for working
with Grapl code."""
editor_setup.add_command(configure_pyright)
if __name__ == "__main__":
editor_setup()
|
[
"json.load",
"click.option",
"click.echo",
"json.dumps",
"click.command",
"click.ClickException",
"click.open_file",
"toml.load",
"click.group"
] |
[((3345, 3375), 'click.command', 'click.command', ([], {'name': '"""generate"""'}), "(name='generate')\n", (3358, 3375), False, 'import click\n'), ((3377, 3576), 'click.option', 'click.option', (['"""--write-file/--no-write-file"""'], {'is_flag': '(True)', 'default': '(True)', 'show_default': '(True)', 'help': '"""Controls whether or not to write the generated output to disk, or to standard output."""'}), "('--write-file/--no-write-file', is_flag=True, default=True,\n show_default=True, help=\n 'Controls whether or not to write the generated output to disk, or to standard output.'\n )\n", (3389, 3576), False, 'import click\n'), ((4119, 4147), 'click.command', 'click.command', ([], {'name': '"""update"""'}), "(name='update')\n", (4132, 4147), False, 'import click\n'), ((4149, 4348), 'click.option', 'click.option', (['"""--write-file/--no-write-file"""'], {'is_flag': '(True)', 'default': '(True)', 'show_default': '(True)', 'help': '"""Controls whether or not to write the generated output to disk, or to standard output."""'}), "('--write-file/--no-write-file', is_flag=True, default=True,\n show_default=True, help=\n 'Controls whether or not to write the generated output to disk, or to standard output.'\n )\n", (4161, 4348), False, 'import click\n'), ((5311, 5338), 'click.group', 'click.group', ([], {'name': '"""pyright"""'}), "(name='pyright')\n", (5322, 5338), False, 'import click\n'), ((5538, 5551), 'click.group', 'click.group', ([], {}), '()\n', (5549, 5551), False, 'import click\n'), ((2220, 2241), 'toml.load', 'toml.load', (['PANTS_TOML'], {}), '(PANTS_TOML)\n', (2229, 2241), False, 'import toml\n'), ((4021, 4056), 'json.dumps', 'json.dumps', (['pyrightconfig'], {'indent': '(4)'}), '(pyrightconfig, indent=4)\n', (4031, 4056), False, 'import json\n'), ((5220, 5249), 'json.dumps', 'json.dumps', (['pyright'], {'indent': '(4)'}), '(pyright, indent=4)\n', (5230, 5249), False, 'import json\n'), ((2366, 2473), 'click.ClickException', 'click.ClickException', (['"""Expected all Pants source roots to be absolute, but at least one was not!"""'], {}), "(\n 'Expected all Pants source roots to be absolute, but at least one was not!'\n )\n", (2386, 2473), False, 'import click\n'), ((3261, 3304), 'click.echo', 'click.echo', (['f"""Wrote content to {path} file"""'], {}), "(f'Wrote content to {path} file')\n", (3271, 3304), False, 'import click\n'), ((3323, 3341), 'click.echo', 'click.echo', (['output'], {}), '(output)\n', (3333, 3341), False, 'import click\n'), ((4712, 4752), 'click.open_file', 'click.open_file', (['PYRIGHTCONFIG_JSON', '"""r"""'], {}), "(PYRIGHTCONFIG_JSON, 'r')\n", (4727, 4752), False, 'import click\n'), ((4777, 4789), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4786, 4789), False, 'import json\n'), ((3192, 3218), 'click.open_file', 'click.open_file', (['path', '"""w"""'], {}), "(path, 'w')\n", (3207, 3218), False, 'import click\n')]
|
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import pyspark
import monasca_analytics.config.config as config
import monasca_analytics.ingestor.base as bi
import monasca_analytics.ldp.base as mldp
import monasca_analytics.sink.base as msink
import monasca_analytics.sml.base as bml
import monasca_analytics.spark.aggregator as agg
import monasca_analytics.spark.streaming_context as streamingctx
import monasca_analytics.voter.base as mvoter
logger = logging.getLogger(__name__)
class DriverExecutor(object):
"""Driver part of the job submitted to spark.
This is where we control what is submitted to workers,
what is driver specific and how the pipeline is constructed.
We also execute the pipeline from here.
"""
def __init__(self, _config):
self._links = None
self._sources = None
self._orchestrator = None
self.set_links(config.instantiate_components(_config))
def restart_spark():
self._ssc = streamingctx.create_streaming_context(
self._sc,
_config)
self._restart_spark = restart_spark
self._sc = pyspark.SparkContext(
appName=_config["spark_config"]["appName"])
self._ssc = streamingctx.create_streaming_context(self._sc, _config)
def set_links(self, links):
"""Set new set of links
This function has no effect on the current pipeline.
In order to use them, you need to restart the pipeline.
"""
self._links = links
logger.debug("Collect sources...")
self._sources = config.collect_sources(self._links)
logger.debug("New list of sources: {}".format(self._sources))
self._orchestrator = agg.Aggregator(self)
logger.debug("Propagating feature list...")
self._propagate_feature_list()
def start_pipeline(self):
"""Start the pipeline"""
# Start by connecting the source
if self._phase1_required():
logger.info("Phase 1 required, ldp won't produce data until"
" smls have finished.")
# Connect sources to ingestors
self._prepare_phase(self._connect_dependents_phase1)
# Preparation step for the orchestrator:
# Accumulate everything from the sources
self._orchestrator.prepare_final_accumulate_stream_step()
# Then prepare the orchestrator
self._prepare_orchestrator()
else:
# Connect sources to ldps
logger.info("Phase 1 was not required, skipping it.")
self._prepare_phase(self._connect_dependents_phase2)
logger.info("Start the streaming context")
self._ssc.start()
def stop_pipeline(self):
logger.debug("Stop spark context.")
self._ssc.stop(False, False)
logger.debug("Terminate sources.")
self._terminate_sources()
logger.debug("Restart spark context.")
self._restart_spark()
def move_to_phase2(self):
if self._ssc is not None:
logger.debug("Phase 2: Stop SparkStreamingContext.")
self._ssc.stop(False, False)
logger.debug("Phase 2: Stop sources")
self._terminate_sources()
logger.debug("Phase 2: Restart streaming...")
self._restart_spark()
logger.debug("Phase 2: Create new connections")
self._prepare_phase(self._connect_dependents_phase2)
self._ssc.start()
# ?
self._ssc.awaitTermination()
def _terminate_sources(self):
"""Terminates the sources."""
for source in self._sources:
source.terminate_source()
def _phase1_required(self):
for src in self._sources:
if any(isinstance(el, bi.BaseIngestor) for el in self._links[src]):
return True
return False
def _prepare_orchestrator(self):
"""
This is a part of phase 1. The orchestrator collects
input from all ingestors and then orchestrate the sml
pipeline to solve it and provide to LDPs the learned
data structure.
"""
smls = filter(lambda c: isinstance(c, bml.BaseSML),
self._links.keys())
sml_with_no_dependents = filter(
lambda c: set(self._links[c]).isdisjoint(smls),
smls)
for sml in sml_with_no_dependents:
logger.debug("Append {} to orchestrator".format(sml))
self._orchestrator.append_sml(sml)
self._connect_sml_dependents(sml)
def _prepare_phase(self, connect_dependent):
"""Prepare given phase by starting sources.
:type connect_dependent: (pyspark.streaming.DStream,
monasca_analytics.source.base.BaseSource) -> None
:param connect_dependent: Callback that is going to selectively connect
the appropriate dependencies of each sources.
"""
for src in self._sources:
logger.debug("Prepare source {}".format(src))
dstream = src.create_dstream(self._ssc)
connect_dependent(dstream, src)
def _connect_sml_dependents(self, from_component):
"""Connect an sml component with all its dependencies.
During phase 1 this code is running exclusively by the driver
at the moment.
:type from_component: bml.BaseSML | mvoter.BaseVoter
:param from_component: Where we came from.
"""
for connected_node in self._links[from_component]:
# SML can, for now, only be connected to voter.
if isinstance(connected_node, mvoter.BaseVoter) and \
isinstance(from_component, bml.BaseSML):
logger.debug("Set {} to {}"
.format(connected_node, from_component))
from_component.set_voter(connected_node)
# Voter can only be connected to LDPs
if isinstance(from_component, mvoter.BaseVoter) and \
isinstance(connected_node, mldp.BaseLDP):
logger.debug("Append {} to {}"
.format(connected_node, from_component))
from_component.append_ldp(connected_node)
# We don't connect LDP to anything
continue
# Only SML can be connected to a sink
if isinstance(connected_node, msink.BaseSink):
logger.debug("Sink {} into {}"
.format(from_component, connected_node))
connected_node.sink_ml(from_component)
# Sink can't be connected to anything
continue
self._connect_sml_dependents(connected_node)
def _connect_dependents_phase2(self, dstream, from_component):
"""Connect a component to its dependencies.
During phase 2, only live data processors are considered.
All ingestors are shutdown.
:type dstream: pyspark.streaming.DStream | None
:param dstream: Dstream that will be modified by dependent.
It can be None, only if from_component is aggregator,
sml or voter.
:type from_component: monasca_analytics.component.base.BaseComponent
:param from_component: Where we came from.
"""
for connected_node in self._links[from_component]:
# Live data processors are also doing a map, they add
# the causality bit to each element in the stream.
if isinstance(connected_node, mldp.BaseLDP):
logger.debug("Connecting {} to {}".format(from_component,
connected_node))
new_dstream = connected_node.map_dstream(dstream)
self._connect_dependents_phase2(new_dstream, connected_node)
# Sink are at the end of the branch!
if isinstance(connected_node, msink.BaseSink):
logger.debug("Sink {} into {}".format(from_component,
connected_node))
connected_node.sink_dstream(dstream)
def _connect_dependents_phase1(self, dstream, from_component):
"""Connect a component to its dependencies for phase 1.
All live data processors are ignored during that phase.
:type dstream: pyspark.streaming.DStream | None
:param dstream: Dstream that will be modified by dependent.
It can be None, only if from_component is aggregator,
sml or voter.
:type from_component: monasca_analytics.component.base.BaseComponent --
:param from_component: Where we came from.
"""
for connected_node in self._links[from_component]:
# Ingestors "map" the dstream. They are mainly doing worker
# specific transformation. Like parsing and vectorizing the
# data.
if isinstance(connected_node, bi.BaseIngestor):
logger.debug("Stream from {} to {}"
.format(from_component, connected_node))
new_dstream = connected_node.map_dstream(dstream)
# We then connect directly this stream to the orchestrator
self._orchestrator.accumulate_dstream_samples(new_dstream)
# And we look for sink if any
self._connect_dependents_phase1(new_dstream, connected_node)
# Sink are at the end of the branch!
if isinstance(connected_node, msink.BaseSink):
logger.debug("Sink {} into {}"
.format(from_component, connected_node))
connected_node.sink_dstream(dstream)
def _propagate_feature_list(self):
"""Set the appropriate features list on each live data processor."""
for source in self._sources:
features = source.get_feature_list()
for connected_node in self._links[source]:
propagated = False
if isinstance(connected_node, bi.BaseIngestor):
connected_node.set_feature_list(features)
propagated = True
if isinstance(connected_node, mldp.BaseLDP):
connected_node.set_feature_list(features)
propagated = True
if propagated:
logger.info("Feature list {} propagated from {} to {}"
.format(features, source, connected_node))
|
[
"monasca_analytics.spark.streaming_context.create_streaming_context",
"pyspark.SparkContext",
"monasca_analytics.spark.aggregator.Aggregator",
"monasca_analytics.config.config.instantiate_components",
"monasca_analytics.config.config.collect_sources",
"logging.getLogger"
] |
[((1063, 1090), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1080, 1090), False, 'import logging\n'), ((1745, 1809), 'pyspark.SparkContext', 'pyspark.SparkContext', ([], {'appName': "_config['spark_config']['appName']"}), "(appName=_config['spark_config']['appName'])\n", (1765, 1809), False, 'import pyspark\n'), ((1843, 1899), 'monasca_analytics.spark.streaming_context.create_streaming_context', 'streamingctx.create_streaming_context', (['self._sc', '_config'], {}), '(self._sc, _config)\n', (1880, 1899), True, 'import monasca_analytics.spark.streaming_context as streamingctx\n'), ((2198, 2233), 'monasca_analytics.config.config.collect_sources', 'config.collect_sources', (['self._links'], {}), '(self._links)\n', (2220, 2233), True, 'import monasca_analytics.config.config as config\n'), ((2333, 2353), 'monasca_analytics.spark.aggregator.Aggregator', 'agg.Aggregator', (['self'], {}), '(self)\n', (2347, 2353), True, 'import monasca_analytics.spark.aggregator as agg\n'), ((1497, 1535), 'monasca_analytics.config.config.instantiate_components', 'config.instantiate_components', (['_config'], {}), '(_config)\n', (1526, 1535), True, 'import monasca_analytics.config.config as config\n'), ((1591, 1647), 'monasca_analytics.spark.streaming_context.create_streaming_context', 'streamingctx.create_streaming_context', (['self._sc', '_config'], {}), '(self._sc, _config)\n', (1628, 1647), True, 'import monasca_analytics.spark.streaming_context as streamingctx\n')]
|
import os
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import tensorflow.keras.backend as K
from models import load_model, load_adfmodel
import instances
# GENERAL PARAMETERS
MODE = 'joint_untargeted'
IMG_SHAPE = [28, 28]
# LOAD MODEL
model = load_model()
generator = instances.load_generator()
def get_data_sample(index):
return (
generator[index],
os.path.splitext(os.path.split(generator.filenames[index])[1])[0],
)
def store_single_result(mapping, name, fname, rate, d, subdir):
savedir = os.path.join('results', subdir, fname)
os.makedirs(savedir, exist_ok=True)
# print(mapping.shape)
mapping = np.reshape(mapping, IMG_SHAPE)
# for line in mapping:
# print(line)
# raise Exception
# for row in mapping:
# print(row)
# np.save(f'/home/Morgan/fw-rde/mnist/results/{name}.npy', mapping)
# print(np.max(mapping))
# print(np.min(mapping))
# mapping = mapping - np.min(mapping)
# mapping = mapping / np.max(mapping)
# for row in mapping:
# print(row)
plt.imsave(
os.path.join(
savedir,
f'{name}_rate-{rate}_d-{d}.png'
),
mapping.squeeze(),
cmap='Greys',
vmin=np.min(mapping),
vmax=np.max(mapping),
format='png',
)
def store_pert_img(x, s, p, name, fname, rate, d, subdir):
savedir = os.path.join('results', subdir, fname)
os.makedirs(savedir, exist_ok=True)
# print(mapping.shape)
x = np.reshape(x, IMG_SHAPE)
s = np.reshape(s, IMG_SHAPE)
p = np.reshape(p, IMG_SHAPE)
x = x + s*p
# for line in mapping:
# print(line)
# raise Exception
# np.save(f'/home/Morgan/fw-rde/mnist/results/{name}.npy', x)
plt.imsave(
os.path.join(
savedir,
f'{name}_rate-{rate}_d-{d}.png'
),
x.squeeze(),
cmap='Greys',
vmin=np.min(x),
vmax=np.max(x),
format='jpg',
)
def get_distortion(x, model=model, mode=MODE):
x_tensor = tf.constant(x, dtype=tf.float32)
s_flat = tf.placeholder(tf.float32, (np.prod(x_tensor.shape),))
s_tensor = tf.reshape(s_flat, x.shape)
p_flat = tf.placeholder(tf.float32, (np.prod(x_tensor.shape),))
p_tensor = tf.reshape(p_flat, x.shape)
pred = model.predict(x)
node = np.argpartition(pred[0, ...], -2)[-1]
# target = pred[0, node]
unprocessed = x + s_tensor * p_tensor
# network_input = (tf.tanh((unprocessed + 37.96046)/255 * 2 - 1) + 1) / 2 * 255 - 37
network_input = tf.clip_by_value(unprocessed, clip_value_min=np.min(x), clip_value_max=np.max(x))
out = model(network_input)
if mode == 'joint_untargeted':
loss = tf.squeeze(out[..., node])
gradient = K.gradients(loss, [s_flat, p_flat])
f_out = K.function([s_flat, p_flat], [loss])
f_gradient = K.function([s_flat, p_flat], [gradient])
# a = tf.random.uniform(shape=s_flat.shape)
# b = tf.random.uniform(shape=s_flat.shape)
#
# c = f_out([a, b])
# d = f_gradient([a, b])
return lambda s, p: f_out([s, p])[0], lambda s, p: f_gradient([s, p])[0][0], lambda s, p: f_gradient([s, p])[0][1], node, pred
def print_model_prediction(x, s, p):
print('\n------------------------\n')
print(np.max(x))
print(np.min(x))
print('\n------------------------\n')
print(np.max(s))
print(np.min(s))
print('\n------------------------\n')
print(np.max(p))
print(np.min(p))
print('\n------------------------\n')
s = np.reshape(s, x.shape)
p = np.reshape(p, x.shape)
pert_input = x + s * p
print(np.max(pert_input))
print(np.min(pert_input))
print('\n------------------------\n')
# for t in [x, pert_input]:
# print('\n\n\n\n')
# for row in t:
# print(row)
# raise(Exception)
# s = tf.reshape(s, x.shape)
# p = tf.reshape(p, x.shape)
# pert_input = x+s*p
pert_input = tf.convert_to_tensor(pert_input)
# pert_input = (tf.tanh((pert_input + 37.96046) / 255 * 2 - 1) + 1) / 2 * 255 - 37
pert_input = tf.clip_by_value(pert_input, clip_value_min=np.min(x), clip_value_max=np.max(x))
sess = tf.Session()
with sess.as_default():
pert_input = pert_input.eval()
print('\n------------------------\n')
print(pert_input.shape)
print(np.max(pert_input))
print(np.min(pert_input))
print('\n------------------------\n')
# pert_input[pert_input < -37.96046] = -37.96046
# pert_input[pert_input > 255-37.96046] = 255-37.96046
pred0 = model.predict(x, steps=1)
pred1 = model.predict(pert_input, steps=1)
print(f'orig pred: {pred0}')
print(f'pert pred: {pred1}')
# x, fname = get_data_sample(0)
#
# f, gs, gp, n, p = get_distortion(x)
#
# a = tf.random.uniform(shape=[28*28])
# b = tf.random.uniform(shape=[28*28])
#
# out = f(a,b)
#
#
# _=0
|
[
"instances.load_generator",
"os.makedirs",
"tensorflow.convert_to_tensor",
"tensorflow.reshape",
"tensorflow.Session",
"tensorflow.constant",
"tensorflow.keras.backend.function",
"models.load_model",
"tensorflow.keras.backend.gradients",
"numpy.argpartition",
"numpy.max",
"numpy.reshape",
"numpy.min",
"tensorflow.squeeze",
"os.path.split",
"os.path.join",
"numpy.prod"
] |
[((276, 288), 'models.load_model', 'load_model', ([], {}), '()\n', (286, 288), False, 'from models import load_model, load_adfmodel\n'), ((302, 328), 'instances.load_generator', 'instances.load_generator', ([], {}), '()\n', (326, 328), False, 'import instances\n'), ((559, 597), 'os.path.join', 'os.path.join', (['"""results"""', 'subdir', 'fname'], {}), "('results', subdir, fname)\n", (571, 597), False, 'import os\n'), ((602, 637), 'os.makedirs', 'os.makedirs', (['savedir'], {'exist_ok': '(True)'}), '(savedir, exist_ok=True)\n', (613, 637), False, 'import os\n'), ((679, 709), 'numpy.reshape', 'np.reshape', (['mapping', 'IMG_SHAPE'], {}), '(mapping, IMG_SHAPE)\n', (689, 709), True, 'import numpy as np\n'), ((1421, 1459), 'os.path.join', 'os.path.join', (['"""results"""', 'subdir', 'fname'], {}), "('results', subdir, fname)\n", (1433, 1459), False, 'import os\n'), ((1464, 1499), 'os.makedirs', 'os.makedirs', (['savedir'], {'exist_ok': '(True)'}), '(savedir, exist_ok=True)\n', (1475, 1499), False, 'import os\n'), ((1535, 1559), 'numpy.reshape', 'np.reshape', (['x', 'IMG_SHAPE'], {}), '(x, IMG_SHAPE)\n', (1545, 1559), True, 'import numpy as np\n'), ((1568, 1592), 'numpy.reshape', 'np.reshape', (['s', 'IMG_SHAPE'], {}), '(s, IMG_SHAPE)\n', (1578, 1592), True, 'import numpy as np\n'), ((1601, 1625), 'numpy.reshape', 'np.reshape', (['p', 'IMG_SHAPE'], {}), '(p, IMG_SHAPE)\n', (1611, 1625), True, 'import numpy as np\n'), ((2080, 2112), 'tensorflow.constant', 'tf.constant', (['x'], {'dtype': 'tf.float32'}), '(x, dtype=tf.float32)\n', (2091, 2112), True, 'import tensorflow as tf\n'), ((2196, 2223), 'tensorflow.reshape', 'tf.reshape', (['s_flat', 'x.shape'], {}), '(s_flat, x.shape)\n', (2206, 2223), True, 'import tensorflow as tf\n'), ((2308, 2335), 'tensorflow.reshape', 'tf.reshape', (['p_flat', 'x.shape'], {}), '(p_flat, x.shape)\n', (2318, 2335), True, 'import tensorflow as tf\n'), ((2801, 2836), 'tensorflow.keras.backend.gradients', 'K.gradients', (['loss', '[s_flat, p_flat]'], {}), '(loss, [s_flat, p_flat])\n', (2812, 2836), True, 'import tensorflow.keras.backend as K\n'), ((2849, 2885), 'tensorflow.keras.backend.function', 'K.function', (['[s_flat, p_flat]', '[loss]'], {}), '([s_flat, p_flat], [loss])\n', (2859, 2885), True, 'import tensorflow.keras.backend as K\n'), ((2903, 2943), 'tensorflow.keras.backend.function', 'K.function', (['[s_flat, p_flat]', '[gradient]'], {}), '([s_flat, p_flat], [gradient])\n', (2913, 2943), True, 'import tensorflow.keras.backend as K\n'), ((3573, 3595), 'numpy.reshape', 'np.reshape', (['s', 'x.shape'], {}), '(s, x.shape)\n', (3583, 3595), True, 'import numpy as np\n'), ((3604, 3626), 'numpy.reshape', 'np.reshape', (['p', 'x.shape'], {}), '(p, x.shape)\n', (3614, 3626), True, 'import numpy as np\n'), ((4001, 4033), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['pert_input'], {}), '(pert_input)\n', (4021, 4033), True, 'import tensorflow as tf\n'), ((4231, 4243), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (4241, 4243), True, 'import tensorflow as tf\n'), ((1119, 1173), 'os.path.join', 'os.path.join', (['savedir', 'f"""{name}_rate-{rate}_d-{d}.png"""'], {}), "(savedir, f'{name}_rate-{rate}_d-{d}.png')\n", (1131, 1173), False, 'import os\n'), ((1806, 1860), 'os.path.join', 'os.path.join', (['savedir', 'f"""{name}_rate-{rate}_d-{d}.png"""'], {}), "(savedir, f'{name}_rate-{rate}_d-{d}.png')\n", (1818, 1860), False, 'import os\n'), ((2376, 2409), 'numpy.argpartition', 'np.argpartition', (['pred[0, ...]', '(-2)'], {}), '(pred[0, ...], -2)\n', (2391, 2409), True, 'import numpy as np\n'), ((2758, 2784), 'tensorflow.squeeze', 'tf.squeeze', (['out[..., node]'], {}), '(out[..., node])\n', (2768, 2784), True, 'import tensorflow as tf\n'), ((3323, 3332), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (3329, 3332), True, 'import numpy as np\n'), ((3344, 3353), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (3350, 3353), True, 'import numpy as np\n'), ((3407, 3416), 'numpy.max', 'np.max', (['s'], {}), '(s)\n', (3413, 3416), True, 'import numpy as np\n'), ((3428, 3437), 'numpy.min', 'np.min', (['s'], {}), '(s)\n', (3434, 3437), True, 'import numpy as np\n'), ((3491, 3500), 'numpy.max', 'np.max', (['p'], {}), '(p)\n', (3497, 3500), True, 'import numpy as np\n'), ((3512, 3521), 'numpy.min', 'np.min', (['p'], {}), '(p)\n', (3518, 3521), True, 'import numpy as np\n'), ((3666, 3684), 'numpy.max', 'np.max', (['pert_input'], {}), '(pert_input)\n', (3672, 3684), True, 'import numpy as np\n'), ((3696, 3714), 'numpy.min', 'np.min', (['pert_input'], {}), '(pert_input)\n', (3702, 3714), True, 'import numpy as np\n'), ((4392, 4410), 'numpy.max', 'np.max', (['pert_input'], {}), '(pert_input)\n', (4398, 4410), True, 'import numpy as np\n'), ((4422, 4440), 'numpy.min', 'np.min', (['pert_input'], {}), '(pert_input)\n', (4428, 4440), True, 'import numpy as np\n'), ((1271, 1286), 'numpy.min', 'np.min', (['mapping'], {}), '(mapping)\n', (1277, 1286), True, 'import numpy as np\n'), ((1301, 1316), 'numpy.max', 'np.max', (['mapping'], {}), '(mapping)\n', (1307, 1316), True, 'import numpy as np\n'), ((1952, 1961), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (1958, 1961), True, 'import numpy as np\n'), ((1976, 1985), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (1982, 1985), True, 'import numpy as np\n'), ((2154, 2177), 'numpy.prod', 'np.prod', (['x_tensor.shape'], {}), '(x_tensor.shape)\n', (2161, 2177), True, 'import numpy as np\n'), ((2266, 2289), 'numpy.prod', 'np.prod', (['x_tensor.shape'], {}), '(x_tensor.shape)\n', (2273, 2289), True, 'import numpy as np\n'), ((2640, 2649), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (2646, 2649), True, 'import numpy as np\n'), ((2666, 2675), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (2672, 2675), True, 'import numpy as np\n'), ((4182, 4191), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (4188, 4191), True, 'import numpy as np\n'), ((4208, 4217), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (4214, 4217), True, 'import numpy as np\n'), ((423, 464), 'os.path.split', 'os.path.split', (['generator.filenames[index]'], {}), '(generator.filenames[index])\n', (436, 464), False, 'import os\n')]
|
from abc import ABC, abstractmethod
import gym
import numpy as np
from minerl.herobraine.hero import AgentHandler
from minerl.herobraine.hero import KEYMAP
from minerl.herobraine.hero import spaces
from minerl.herobraine.hero.spaces import DiscreteRange
class CommandAction(AgentHandler):
"""
An action handler based on commands
# Todo: support blacklisting commands. (note this has to work with mergeing somehow)
"""
def __init__(self, command: str, space: gym.Space):
"""
Initializes the space of the handler with a gym.spaces.Dict
of all of the spaces for each individual command.
"""
self._command = command
super().__init__(space)
@property
def command(self):
return self._command
def to_string(self):
return self._command
def to_hero(self, x):
"""
Returns a command string for the multi command action.
:param x:
:return:
"""
cmd = ""
verb = self.command
if isinstance(x, np.ndarray):
flat = x.flatten().tolist()
flat = [str(y) for y in flat]
adjective = " ".join(flat)
elif isinstance(x, list):
adjective = " ".join([str(y) for y in x])
else:
adjective = str(x)
cmd += "{} {}".format(
verb, adjective)
return cmd
def __or__(self, other):
if not self.command == other.command:
raise ValueError("Command must be the same between {} and {}".format(self.command, other.command))
return self
class ItemListCommandAction(CommandAction):
"""
An action handler based on a list of items
The action space is determiend by the length of the list plus one
"""
def __init__(self, command: str, items: list):
"""
Initializes the space of the handler with a gym.spaces.Dict
of all of the spaces for each individual command.
"""
# TODO must check that the first element is 'none' and last elem is 'other'
self._command = command
self._items = items
self._univ_items = ['minecraft:' + item for item in items]
assert 'none' in self._items
self._default = 'none'
super().__init__(self._command, spaces.Enum(*self._items, default=self._default))
@property
def items(self):
return self._items
@property
def universal_items(self):
return self._univ_items
@property
def default(self):
return self._default
def to_hero(self, x):
"""
Returns a command string for the multi command action.
:param x:
:return:
"""
cmd = ""
verb = self._command
if isinstance(x, np.ndarray):
raise NotImplementedError
elif isinstance(x, list):
raise NotImplementedError
elif 0 < x < len(self._items):
adjective = self._items[x]
cmd += "{} {}".format(
verb, adjective)
else:
cmd += "{} NONE".format(
verb)
return cmd
def from_universal(self, x):
raise NotImplementedError()
def __or__(self, other):
"""
Merges two ItemListCommandActions into one by unioning their items.
Assert that the commands are the same.
"""
if not isinstance(other, self.__class__):
raise TypeError("other must be an instance of ItemListCommandAction")
if self._command != other._command:
raise ValueError("Command must be the same for merging")
new_items = list(set(self._items) | set(other._items))
return self.__class__(new_items)
def __eq__(self, other):
"""
Asserts equality betwen item list command actions.
"""
if not isinstance(other, ItemListCommandAction):
return False
if self._command != other._command:
return False
# Check that all items are in self._items
if not all(x in self._items for x in other._items):
return False
# Check that all items are in other._items
if not all(x in other._items for x in self._items):
return False
return True
class CraftItem(ItemListCommandAction):
"""
An action handler for crafting items
Note when used along side Craft Item Nearby, block lists must be disjoint or from_universal will fire multiple
times
"""
_command = "craft"
def to_string(self):
return "craft"
def __init__(self, items: list):
"""
Initializes the space of the handler to be one for each item in the list plus one for the
default no-craft action (command 0)
Items are minecraft resource ID's
"""
super().__init__(self._command, items)
def from_universal(self, obs):
if 'diff' in obs and 'crafted' in obs['diff'] and len(obs['diff']['crafted']) > 0:
try:
x = self._univ_items.index(obs['diff']['crafted'][0]['item'])
return obs['diff']['crafted'][0]['item'].split('minecraft:')[-1]
except ValueError:
return self._default
# return self._items.index('other')
else:
return self._default
class CraftItemNearby(CraftItem):
"""
An action handler for crafting items when agent is in view of a crafting table
Note when used along side Craft Item, item lists must be disjoint or from_universal will fire multiple times
"""
_command = "craftNearby"
def to_string(self):
return 'nearbyCraft'
class SmeltItem(CraftItem):
def from_universal(self, obs):
if 'diff' in obs and 'smelted' in obs['diff'] and len(obs['diff']['smelted']) > 0:
try:
x = self._univ_items.index(obs['diff']['smelted'][0]['item'])
return obs['diff']['smelted'][0]['item'].split('minecraft:')[-1]
except ValueError:
return self._default
# return self._items.index('other')
else:
return self._default
class SmeltItemNearby(SmeltItem):
"""
An action handler for crafting items when agent is in view of a crafting table
Note when used along side Craft Item, block lists must be disjoint or from_universal will fire multiple times
"""
_command = 'smeltNearby'
def to_string(self):
return 'nearbySmelt'
class PlaceBlock(ItemListCommandAction):
"""
An action handler for placing a specific block
"""
def to_string(self):
return 'place'
def __init__(self, blocks: list):
"""
Initializes the space of the handler to be one for each item in the list
Requires 0th item to be 'none' and last item to be 'other' coresponding to
no-op and non-listed item respectively
"""
self._items = blocks
self._command = 'place'
super().__init__(self._command, self._items)
self._prev_inv = None
# print(self._items)
# print(self._univ_items)
def from_universal(self, obs):
try:
for action in obs['custom_action']['actions'].keys():
try:
if int(action) == -99 and self._prev_inv is not None:
item_name = self._prev_inv[int(-10 + obs['hotbar'])]['name'].split("minecraft:")[-1]
if item_name not in self._items:
raise ValueError()
else:
return item_name
except ValueError:
return self._default
except TypeError:
print('Saw a type error in PlaceBlock')
raise TypeError
except KeyError:
return self._default
finally:
try:
self._prev_inv = obs['slots']['gui']['slots']
except KeyError:
self._prev_inv = None
return self._default
class EquipItem(ItemListCommandAction):
"""
An action handler for observing a list of equipped items
"""
def to_string(self):
return 'equip'
def __init__(self, items: list):
"""
Initializes the space of the handler to be one for each item in the list plus one for the
default no-craft action
"""
self._items = items
self._command = 'equip'
super().__init__(self._command, self._items)
self.previous = self._default
# print(self._items)
# print(self._univ_items)
def from_universal(self, obs):
try:
if obs['slots']['gui']['type'] == 'class net.minecraft.inventory.ContainerPlayer':
hotbar_index = int(obs['hotbar'])
item = self._univ_items.index(obs['slots']['gui']['slots'][-10 + hotbar_index]['name'])
if item != self.previous:
self.previous = item
return obs['slots']['gui']['slots'][-10 + hotbar_index]['name'].split('minecraft:')[-1]
except KeyError:
return self._default
except ValueError:
return self._default
# return self._items.index('other')
return self._default
def reset(self):
self.previous = self._default
class ContinuousMovementAction(CommandAction, ABC):
"""
Handles player control actions
"""
def add_to_mission_spec(self, mission_spec):
mission_spec.allowAllContinuousMovementCommands()
pass
class Camera(ContinuousMovementAction):
"""
Uses <delta_pitch, delta_yaw> vector in degrees to rotate the camera. pitch range [-180, 180], yaw range [-180, 180]
"""
def to_string(self):
return 'camera'
def __init__(self):
self._command = 'camera'
super().__init__(self.command, spaces.Box(low=-180, high=180, shape=[2], dtype=np.float32))
def from_universal(self, x):
if 'custom_action' in x and 'cameraYaw' in x['custom_action'] and 'cameraPitch' in x['custom_action']:
delta_pitch = x['custom_action']['cameraPitch']
delta_yaw = x['custom_action']['cameraYaw']
assert not np.isnan(np.sum(x['custom_action']['cameraYaw'])), "NAN in action!"
assert not np.isnan(np.sum(x['custom_action']['cameraPitch'])), "NAN in action!"
return np.array([-delta_pitch, -delta_yaw], dtype=np.float32)
else:
return np.array([0.0, 0.0], dtype=np.float32)
class KeyboardAction(ContinuousMovementAction):
"""
Handles keyboard actions.
"""
def to_string(self):
return self.command
def __init__(self, command, *keys):
if len(keys) == 2:
# Like move or strafe. Example: -1 for left, 1 for right
super().__init__(command, DiscreteRange(-1, 2))
else:
# Its a n-key action with discrete items.
# Eg hotbar actions
super().__init__(command, spaces.Discrete(len(keys) + 1))
self.keys = keys
def from_universal(self, x):
actions_mapped = list(x['custom_action']['actions'].keys())
# actions_mapped is just the raw key codes.
# for action in x['custom_action']['actions'].keys():
# try:
# actions_mapped += [KEYMAP[action]]
# except KeyError:
# pass
offset = self.space.begin if isinstance(self.space, DiscreteRange) else 0
default = 0
for i, key in enumerate(self.keys):
if key in actions_mapped:
if isinstance(self.space, DiscreteRange):
return i * 2 + offset
else:
return i + 1 + offset
# if "BUTTON1" in actions_mapped:
# print("BUTTON1")
# If no key waspressed.
return default
class SingleKeyboardAction(ContinuousMovementAction):
"""
Handles keyboard actions.
"""
def to_string(self):
return self.command
def __init__(self, command, key):
super().__init__(command, spaces.Discrete(2))
self.key = key
def from_universal(self, x):
if 'custom_action' in x and 'actions' in x['custom_action']:
if self.key in x['custom_action']['actions'].keys():
return 1
else:
return 0
def __or__(self, other):
"""
Combines two keyboard actions into one by unioning their keys.
"""
if not isinstance(other, KeyboardAction):
raise TypeError("other must be an instance of KeyboardAction")
new_keys = list(set(self.keys + other.keys))
return KeyboardAction(self._command, new_keys)
def __eq__(self, other):
"""
Tests for equality between two keyboard actions.
"""
if not isinstance(other, KeyboardAction):
return False
return self._command == other._command and self.keys == other.keys
|
[
"numpy.sum",
"minerl.herobraine.hero.spaces.DiscreteRange",
"numpy.array",
"minerl.herobraine.hero.spaces.Box",
"minerl.herobraine.hero.spaces.Enum",
"minerl.herobraine.hero.spaces.Discrete"
] |
[((2307, 2355), 'minerl.herobraine.hero.spaces.Enum', 'spaces.Enum', (['*self._items'], {'default': 'self._default'}), '(*self._items, default=self._default)\n', (2318, 2355), False, 'from minerl.herobraine.hero import spaces\n'), ((9995, 10054), 'minerl.herobraine.hero.spaces.Box', 'spaces.Box', ([], {'low': '(-180)', 'high': '(180)', 'shape': '[2]', 'dtype': 'np.float32'}), '(low=-180, high=180, shape=[2], dtype=np.float32)\n', (10005, 10054), False, 'from minerl.herobraine.hero import spaces\n'), ((10520, 10574), 'numpy.array', 'np.array', (['[-delta_pitch, -delta_yaw]'], {'dtype': 'np.float32'}), '([-delta_pitch, -delta_yaw], dtype=np.float32)\n', (10528, 10574), True, 'import numpy as np\n'), ((10608, 10646), 'numpy.array', 'np.array', (['[0.0, 0.0]'], {'dtype': 'np.float32'}), '([0.0, 0.0], dtype=np.float32)\n', (10616, 10646), True, 'import numpy as np\n'), ((12241, 12259), 'minerl.herobraine.hero.spaces.Discrete', 'spaces.Discrete', (['(2)'], {}), '(2)\n', (12256, 12259), False, 'from minerl.herobraine.hero import spaces\n'), ((10973, 10993), 'minerl.herobraine.hero.spaces.DiscreteRange', 'DiscreteRange', (['(-1)', '(2)'], {}), '(-1, 2)\n', (10986, 10993), False, 'from minerl.herobraine.hero.spaces import DiscreteRange\n'), ((10349, 10388), 'numpy.sum', 'np.sum', (["x['custom_action']['cameraYaw']"], {}), "(x['custom_action']['cameraYaw'])\n", (10355, 10388), True, 'import numpy as np\n'), ((10440, 10481), 'numpy.sum', 'np.sum', (["x['custom_action']['cameraPitch']"], {}), "(x['custom_action']['cameraPitch'])\n", (10446, 10481), True, 'import numpy as np\n')]
|
from app import crud, database, schemas
from .IO import URLs
def create_users():
"""
create test users:
- username: admin, password: <PASSWORD>, isadmin: "true"
- username: user2, password: <PASSWORD>, isadmin: "false"
"""
if not crud.get_user(
db=database.SessionLocal(), username="admin"
) and not crud.get_admins(db=database.SessionLocal()):
user = schemas.UserCreate(
**{"username": "admin", "password": "<PASSWORD>", "isadmin": "true"}
)
crud.create_user(db=database.SessionLocal(), user=user)
if not crud.get_user(db=database.SessionLocal(), username="user2"):
user = schemas.UserCreate(
**{"username": "user2", "password": "<PASSWORD>", "isadmin": "false"}
)
crud.create_user(db=database.SessionLocal(), user=user)
def create_configs():
"""
create test configs
"""
# admin default config for testing
config1 = schemas.ConfigCreate(
**{
"owner": "admin",
"name": "api-1",
"metadata": {
"name": "SimpleAPI",
"url": "http://127.0.0.1:5057",
"database": {
"name": "apidb",
"type": "sql",
"ms": "postgresql",
"host": "0.0.0.0",
"port": "5432",
"enabled": "true",
"running": "true",
},
"enabled": "true",
"running": "true",
},
"note": "The api has been enabled.",
}
)
# user2 default config for testing
config2 = schemas.ConfigCreate(
**{
"owner": "user2",
"name": "api-2",
"metadata": {
"name": "SimpleAPI",
"url": "http://127.0.0.1:5057",
"database": {
"name": "apidb",
"type": "sql",
"ms": "postgresql",
"host": "0.0.0.0",
"port": "5432",
"enabled": "true",
"running": "false",
},
"enabled": "true",
"running": "false",
},
"note": "The api has been enabled without the DB!",
}
)
# create admin config
if not crud.get_config(db=database.SessionLocal(), name="api-1", owner="admin"):
crud.create_config(db=database.SessionLocal(), config=config1, owner="admin")
# create user2 config
if not crud.get_config(db=database.SessionLocal(), name="api-2", owner="user2"):
crud.create_config(db=database.SessionLocal(), config=config2, owner="user2")
|
[
"app.schemas.ConfigCreate",
"app.database.SessionLocal",
"app.schemas.UserCreate"
] |
[((963, 1321), 'app.schemas.ConfigCreate', 'schemas.ConfigCreate', ([], {}), "(**{'owner': 'admin', 'name': 'api-1', 'metadata': {\n 'name': 'SimpleAPI', 'url': 'http://127.0.0.1:5057', 'database': {\n 'name': 'apidb', 'type': 'sql', 'ms': 'postgresql', 'host': '0.0.0.0',\n 'port': '5432', 'enabled': 'true', 'running': 'true'}, 'enabled':\n 'true', 'running': 'true'}, 'note': 'The api has been enabled.'})\n", (983, 1321), False, 'from app import crud, database, schemas\n'), ((1685, 2064), 'app.schemas.ConfigCreate', 'schemas.ConfigCreate', ([], {}), "(**{'owner': 'user2', 'name': 'api-2', 'metadata': {\n 'name': 'SimpleAPI', 'url': 'http://127.0.0.1:5057', 'database': {\n 'name': 'apidb', 'type': 'sql', 'ms': 'postgresql', 'host': '0.0.0.0',\n 'port': '5432', 'enabled': 'true', 'running': 'false'}, 'enabled':\n 'true', 'running': 'false'}, 'note':\n 'The api has been enabled without the DB!'})\n", (1705, 2064), False, 'from app import crud, database, schemas\n'), ((408, 500), 'app.schemas.UserCreate', 'schemas.UserCreate', ([], {}), "(**{'username': 'admin', 'password': '<PASSWORD>',\n 'isadmin': 'true'})\n", (426, 500), False, 'from app import crud, database, schemas\n'), ((670, 763), 'app.schemas.UserCreate', 'schemas.UserCreate', ([], {}), "(**{'username': 'user2', 'password': '<PASSWORD>',\n 'isadmin': 'false'})\n", (688, 763), False, 'from app import crud, database, schemas\n'), ((547, 570), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (568, 570), False, 'from app import crud, database, schemas\n'), ((611, 634), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (632, 634), False, 'from app import crud, database, schemas\n'), ((810, 833), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (831, 833), False, 'from app import crud, database, schemas\n'), ((2427, 2450), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (2448, 2450), False, 'from app import crud, database, schemas\n'), ((2512, 2535), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (2533, 2535), False, 'from app import crud, database, schemas\n'), ((2625, 2648), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (2646, 2648), False, 'from app import crud, database, schemas\n'), ((2710, 2733), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (2731, 2733), False, 'from app import crud, database, schemas\n'), ((292, 315), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (313, 315), False, 'from app import crud, database, schemas\n'), ((367, 390), 'app.database.SessionLocal', 'database.SessionLocal', ([], {}), '()\n', (388, 390), False, 'from app import crud, database, schemas\n')]
|
#!/usr/bin/python3
#Classes for side project with Gabe
#Authors:
# <NAME> <<EMAIL>>
# ...
#imports needed
from random import randint
#classes
class Animal:
"""
This is the main animal class of the mini-game, all other animal classes come from this one
"""
def __init__(self):
#agility attributes
self.speed = 0
#damage attributes
self.attack = 0
#health attributes
self.health = 0
self.defense = 0
self.defenseFactor = 0
def animalSound(self):
return ''
def attackDamage(self):
#Simple method that returns the total damage this animal will do
return(self.speed*self.attack)#'*' is times, 3*4=12
def defensePadding(self, damageIn):
#Simple method that will be run uppon receiving damage, helps negate some of the damage comming in
return(damageIn-(self.defense+(randint(1, self.defenseFactor))))#Gabe, randint(x, y) gives us a random number in between those two points
def takeDamage(self, damage):
#Simple method that removes health
self.health -= damage
"""
TODO - Gabe:
Create these classes in this file:
class Reptile(Animal): <- the Animal inside the parenthesis means we are inheriting from the Animal class, if you read you know what it means. If you didn't, go read it.
{...}
class Mammal(Animal):
{...}
class Bird(Animal):
{...}
START THESE BY TONIGHT, use the default one I made to help you.
add your name to the authors(replace the '...') when you work on it.
"""
|
[
"random.randint"
] |
[((914, 944), 'random.randint', 'randint', (['(1)', 'self.defenseFactor'], {}), '(1, self.defenseFactor)\n', (921, 944), False, 'from random import randint\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Resolve the referred Series for a Document."""
import jsonresolver
from invenio_pidstore.errors import PIDDoesNotExistError
from werkzeug.routing import Rule
from ...api import Document, Series
from ..resolver import get_field_value_for_record as get_field_value
# Note: there must be only one resolver per file,
# otherwise only the last one is registered
@jsonresolver.hookimpl
def jsonresolver_loader(url_map):
"""Resolve the referred Series for a Document record."""
from flask import current_app
def series_resolver(document_pid):
"""Resolve the referred Series for a Document record."""
try:
series_objs = get_field_value(Document, document_pid,
"series_objs")
series = []
for obj in series_objs:
record = Series.get_record_by_pid(obj["pid"])
keep_keys = ("series_pid", "mode_of_issuance", "issn", "title")
for key in list(record):
if key not in keep_keys:
del record[key]
record["volume"] = obj["volume"]
series.append(record)
return series
except (KeyError, PIDDoesNotExistError):
return {}
url_map.add(
Rule(
"/api/resolver/documents/<document_pid>/series",
endpoint=series_resolver,
host=current_app.config.get("JSONSCHEMAS_HOST"),
)
)
|
[
"flask.current_app.config.get"
] |
[((1630, 1672), 'flask.current_app.config.get', 'current_app.config.get', (['"""JSONSCHEMAS_HOST"""'], {}), "('JSONSCHEMAS_HOST')\n", (1652, 1672), False, 'from flask import current_app\n')]
|
import socket
host = '192.168.4.254'
port = 12345
addr = (host, port) # 指定要连接的服务器
c = socket.socket()
c.connect(addr)
while True:
data = input('> ') + '\r\n'
data = data.encode()
c.send(data)
if data.strip() == b'quit':
break
rdata = c.recv(1024).decode() # 将bytes转成str类型
print(rdata, end='')
c.close()
|
[
"socket.socket"
] |
[((88, 103), 'socket.socket', 'socket.socket', ([], {}), '()\n', (101, 103), False, 'import socket\n')]
|
#!/usr/bin/env python
import fmm3dpy as fmm
import numpy as np
#
# This is a sample code to demonstrate how to use
# the fmm libraries
#
# sample with one density, sources to sources,
# charge interactions, and potential only
#
n = 200000
nd = 1
sources = np.random.uniform(0,1,(3,n))
eps = 10**(-5)
charges = np.random.uniform(0,1,n)
out = fmm.lfmm3d(eps=eps,sources=sources,charges=charges,pg=1)
# sample with a vector of densities, sources to
# sources and targets, dipole interactions,
# potential and gradietns
nd = 3
nt = 1870
targ = np.random.uniform(0,1,(3,nt))
dipvecs = np.random.uniform(0,1,(nd,3,n))
out = fmm.lfmm3d(eps=eps,sources=sources,dipvec=dipvecs,\
targets=targ,nd=nd,pg=2,pgt=2)
|
[
"numpy.random.uniform",
"fmm3dpy.lfmm3d"
] |
[((262, 293), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(3, n)'], {}), '(0, 1, (3, n))\n', (279, 293), True, 'import numpy as np\n'), ((317, 343), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', 'n'], {}), '(0, 1, n)\n', (334, 343), True, 'import numpy as np\n'), ((349, 408), 'fmm3dpy.lfmm3d', 'fmm.lfmm3d', ([], {'eps': 'eps', 'sources': 'sources', 'charges': 'charges', 'pg': '(1)'}), '(eps=eps, sources=sources, charges=charges, pg=1)\n', (359, 408), True, 'import fmm3dpy as fmm\n'), ((553, 585), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(3, nt)'], {}), '(0, 1, (3, nt))\n', (570, 585), True, 'import numpy as np\n'), ((593, 628), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(nd, 3, n)'], {}), '(0, 1, (nd, 3, n))\n', (610, 628), True, 'import numpy as np\n'), ((632, 722), 'fmm3dpy.lfmm3d', 'fmm.lfmm3d', ([], {'eps': 'eps', 'sources': 'sources', 'dipvec': 'dipvecs', 'targets': 'targ', 'nd': 'nd', 'pg': '(2)', 'pgt': '(2)'}), '(eps=eps, sources=sources, dipvec=dipvecs, targets=targ, nd=nd,\n pg=2, pgt=2)\n', (642, 722), True, 'import fmm3dpy as fmm\n')]
|
from srcs.sg.objects.abstract_object import AbstractObject
from srcs.errors import RuntimeASTError
class RationalNumberObject(AbstractObject):
type_mark = 'r'
def __init__(self, number, node):
self.number = int(number) if int(number) == number else number
self.node = node
self.is_real_number = True if isinstance(self.number, float) else False
@classmethod
def create_from_node(cls, node):
source = node.ast_node.tokens[0].source.replace(' ', '').replace('\t', '')
number = float(source) if '.' in source else int(source)
return cls(number, node)
def representation(self, context):
return f'{self.number}'
def type_representation(self):
if self.is_real_number:
return 'rational real number'
return 'rational number'
def real_operation(self, other, operation, operation_node):
if other.type_mark == self.type_mark:
if operation == '+':
number = self.number + other.number
return RationalNumberObject(number, self.node)
elif operation == '*':
number = self.number * other.number
return RationalNumberObject(number, self.node)
elif operation == '/':
if other.number == 0:
raise RuntimeASTError('division by 0', other.node)
number = self.number / other.number
return RationalNumberObject(number, self.node)
elif operation == '-':
number = self.number - other.number
return RationalNumberObject(number, self.node)
elif operation == '^':
if type(other.number) is float:
raise RuntimeASTError('not a whole degree', other.node)
if other.number < 0:
raise RuntimeASTError('the exponent is less than zero', other.node)
number = (self.number)**(other.number)
return RationalNumberObject(number, self.node)
elif operation == '%':
if other.number == 0:
raise RuntimeASTError('division by 0', other.node)
number = self.number % other.number
return RationalNumberObject(number, self.node)
elif other.type_mark == 'c':
if operation == '-':
inverted_other = other.real_operation(type(self)(-1), '*', self.node)
new_real_part = inverted_other.real_part + self.number
new_imaginary_part = inverted_other.imaginary_part
return type(other)(self.node, new_real_part, new_imaginary_part)
raise RuntimeASTError(f'the "{operation}" operation between {self.type_representation()} and {other.type_representation()} is not defined', operation_node)
|
[
"srcs.errors.RuntimeASTError"
] |
[((1340, 1384), 'srcs.errors.RuntimeASTError', 'RuntimeASTError', (['"""division by 0"""', 'other.node'], {}), "('division by 0', other.node)\n", (1355, 1384), False, 'from srcs.errors import RuntimeASTError\n'), ((1759, 1808), 'srcs.errors.RuntimeASTError', 'RuntimeASTError', (['"""not a whole degree"""', 'other.node'], {}), "('not a whole degree', other.node)\n", (1774, 1808), False, 'from srcs.errors import RuntimeASTError\n'), ((1872, 1933), 'srcs.errors.RuntimeASTError', 'RuntimeASTError', (['"""the exponent is less than zero"""', 'other.node'], {}), "('the exponent is less than zero', other.node)\n", (1887, 1933), False, 'from srcs.errors import RuntimeASTError\n'), ((2151, 2195), 'srcs.errors.RuntimeASTError', 'RuntimeASTError', (['"""division by 0"""', 'other.node'], {}), "('division by 0', other.node)\n", (2166, 2195), False, 'from srcs.errors import RuntimeASTError\n')]
|
from ..example_code import super_function_i_want_to_test, super_function_i_want_to_test2
from pytest_cases import test_target, case_tags
try: # python 3.5+
from pytest_cases import CaseData
except ImportError:
pass
@test_target(super_function_i_want_to_test)
def case_for_function1():
# type: () -> CaseData
ins = dict(a=1, b=2)
outs = 2, 3
return ins, outs, None
@test_target(super_function_i_want_to_test2)
def case_for_function2():
# type: () -> CaseData
ins = dict(a=1, b=2)
outs = 2, 3
return ins, outs, None
@case_tags(super_function_i_want_to_test, super_function_i_want_to_test2)
def case_for_function_1_and_2():
# type: () -> CaseData
ins = dict(a=1, b=2)
outs = 2, 3
return ins, outs, None
|
[
"pytest_cases.test_target",
"pytest_cases.case_tags"
] |
[((228, 270), 'pytest_cases.test_target', 'test_target', (['super_function_i_want_to_test'], {}), '(super_function_i_want_to_test)\n', (239, 270), False, 'from pytest_cases import test_target, case_tags\n'), ((395, 438), 'pytest_cases.test_target', 'test_target', (['super_function_i_want_to_test2'], {}), '(super_function_i_want_to_test2)\n', (406, 438), False, 'from pytest_cases import test_target, case_tags\n'), ((563, 635), 'pytest_cases.case_tags', 'case_tags', (['super_function_i_want_to_test', 'super_function_i_want_to_test2'], {}), '(super_function_i_want_to_test, super_function_i_want_to_test2)\n', (572, 635), False, 'from pytest_cases import test_target, case_tags\n')]
|
"""Tools for managing kernel specs"""
# Copyright (c) <NAME>
# Distributed under the terms of the Modified BSD License.
from jupyter_client.kernelspec import KernelSpec, KernelSpecManager
from traitlets import Unicode, List, Type
from traitlets.config import LoggingConfigurable
import os
class DockerKernelSpec(KernelSpec):
docker_image_name = Unicode()
docker_args = List()
class DockerKernelSpecManager(KernelSpecManager):
kernel_spec_class = Type(DockerKernelSpec, config=True,
help="""The kernel spec class. This is configurable to allow
subclassing of the KernelSpecManager for customized behavior.
"""
)
class DockerKernelManagerMixin(LoggingConfigurable):
docker_executable = Unicode("docker")
docker_default_options = List(["--rm", "--net=host"])
from jupyter_client.ioloop.manager import IOLoopKernelManager
class DockerKernelManager(IOLoopKernelManager, DockerKernelManagerMixin):
def format_kernel_cmd(self, extra_arguments=None):
cmd = super(DockerKernelManager, self).format_kernel_cmd(extra_arguments)
# Prepend all the docker stuff. This seems to be sufficient.
if self.kernel_spec.docker_image_name:
connection_path, _ = os.path.split(self.connection_file)
# Right now environment variables are dropped on the floor
docker_prefix = [self.docker_executable] \
+ self.docker_default_options \
+ self.kernel_spec.docker_args \
+ ['--user={}:{}'.format(os.getuid(), os.getgid()),
'-v', '{c}:{c}'.format(c=connection_path),
'-v', '{c}:{c}'.format(c=os.path.expanduser('~')),
self.kernel_spec.docker_image_name,
]
return docker_prefix + cmd
else:
return cmd
|
[
"traitlets.List",
"traitlets.Type",
"os.getgid",
"traitlets.Unicode",
"os.getuid",
"os.path.split",
"os.path.expanduser"
] |
[((353, 362), 'traitlets.Unicode', 'Unicode', ([], {}), '()\n', (360, 362), False, 'from traitlets import Unicode, List, Type\n'), ((381, 387), 'traitlets.List', 'List', ([], {}), '()\n', (385, 387), False, 'from traitlets import Unicode, List, Type\n'), ((463, 653), 'traitlets.Type', 'Type', (['DockerKernelSpec'], {'config': '(True)', 'help': '"""The kernel spec class. This is configurable to allow\n subclassing of the KernelSpecManager for customized behavior.\n """'}), '(DockerKernelSpec, config=True, help=\n """The kernel spec class. This is configurable to allow\n subclassing of the KernelSpecManager for customized behavior.\n """\n )\n', (467, 653), False, 'from traitlets import Unicode, List, Type\n'), ((736, 753), 'traitlets.Unicode', 'Unicode', (['"""docker"""'], {}), "('docker')\n", (743, 753), False, 'from traitlets import Unicode, List, Type\n'), ((783, 811), 'traitlets.List', 'List', (["['--rm', '--net=host']"], {}), "(['--rm', '--net=host'])\n", (787, 811), False, 'from traitlets import Unicode, List, Type\n'), ((1239, 1274), 'os.path.split', 'os.path.split', (['self.connection_file'], {}), '(self.connection_file)\n', (1252, 1274), False, 'import os\n'), ((1577, 1588), 'os.getuid', 'os.getuid', ([], {}), '()\n', (1586, 1588), False, 'import os\n'), ((1590, 1601), 'os.getgid', 'os.getgid', ([], {}), '()\n', (1599, 1601), False, 'import os\n'), ((1734, 1757), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (1752, 1757), False, 'import os\n')]
|
#!/usr/bin/env python
# Copyright (c) 2017, DIANA-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numbers
import numpy
import uproot.source.source
import uproot.source.cursor
from uproot.interp.interp import Interpretation
from uproot.interp.numerical import asdtype
from uproot.interp.numerical import _dimsprod
def sizes2offsets(sizes):
out = numpy.empty(len(sizes) + 1, dtype=sizes.dtype)
out[0] = 0
sizes.cumsum(out=out[1:])
return out
def _compactify(fromdata, fromstarts, fromstops, todata, tostarts, tostops):
for i in range(len(fromstarts)):
todata[tostarts[i]:tostops[i]] = fromdata[fromstarts[i]:fromstops[i]]
try:
import numba
except ImportError:
pass
else:
_compactify = numba.njit(_compactify)
class asjagged(Interpretation):
# makes __doc__ attribute mutable before Python 3.3
__metaclass__ = type.__new__(type, "type", (Interpretation.__metaclass__,), {})
def __init__(self, asdtype, skip_bytes=0):
self.asdtype = asdtype
self.skip_bytes = skip_bytes
def __repr__(self):
if self.skip_bytes == 0:
return "asjagged({0})".format(repr(self.asdtype))
else:
return "asjagged({0}, skip_bytes={1})".format(repr(self.asdtype), self.skip_bytes)
def to(self, todtype=None, todims=None, skip_bytes=None):
if skip_bytes is None:
skip_bytes = self.skip_bytes
return asjagged(self.asdtype.to(todtype, todims), skip_bytes)
@property
def identifier(self):
if self.skip_bytes == 0:
return "asjagged({0})".format(self.asdtype.identifier)
else:
return "asjagged({0}, {1})".format(self.asdtype.identifier, self.skip_bytes)
@property
def dtype(self):
subshape = self.asdtype.dtype.shape
sub = self.asdtype.dtype.subdtype
if sub is None:
tpe = self.asdtype.dtype
else:
tpe = sub[0]
return numpy.dtype((tpe, (0,) + subshape))
def empty(self):
return JaggedArray(self.asdtype.empty(), numpy.empty(0, dtype=numpy.int64), numpy.empty(0, dtype=numpy.int64))
def compatible(self, other):
return isinstance(other, asjagged) and self.asdtype.compatible(other.asdtype)
def numitems(self, numbytes, numentries):
return self.asdtype.numitems(numbytes - numentries*self.skip_bytes, numentries)
def source_numitems(self, source):
return self.asdtype.source_numitems(source.content)
def fromroot(self, data, offsets, local_entrystart, local_entrystop):
if local_entrystart == local_entrystop:
content = self.asdtype.fromroot(data, None, 0, 0)
else:
itemsize = self.asdtype.fromdtype.itemsize * _dimsprod(self.asdtype.fromdims)
if self.skip_bytes == 0:
numpy.floor_divide(offsets, itemsize, offsets)
starts = offsets[local_entrystart : local_entrystop ]
stops = offsets[local_entrystart + 1 : local_entrystop + 1]
content = self.asdtype.fromroot(data, None, starts[0], stops[-1])
else:
fromstarts = offsets[local_entrystart : local_entrystop ] + self.skip_bytes
fromstops = offsets[local_entrystart + 1 : local_entrystop + 1]
newoffsets = numpy.empty(1 + local_entrystop - local_entrystart, dtype=offsets.dtype)
newoffsets[0] = 0
numpy.cumsum(fromstops - fromstarts, out=newoffsets[1:])
newdata = numpy.empty(newoffsets[-1], dtype=data.dtype)
_compactify(data, fromstarts, fromstops, newdata, newoffsets[:-1], newoffsets[1:])
numpy.floor_divide(newoffsets, itemsize, newoffsets)
starts = newoffsets[:-1]
stops = newoffsets[1:]
content = self.asdtype.fromroot(newdata, None, 0, stops[-1])
return JaggedArray(content, starts, stops)
def destination(self, numitems, numentries):
content = self.asdtype.destination(numitems, numentries)
sizes = numpy.empty(numentries, dtype=numpy.int64)
return JaggedArray._Prep(content, sizes)
def fill(self, source, destination, itemstart, itemstop, entrystart, entrystop):
destination.sizes[entrystart:entrystop] = source.stops - source.starts
self.asdtype.fill(source.content, destination.content, itemstart, itemstop, entrystart, entrystop)
def clip(self, destination, itemstart, itemstop, entrystart, entrystop):
destination.content = self.asdtype.clip(destination.content, itemstart, itemstop, entrystart, entrystop)
destination.sizes = destination.sizes[entrystart:entrystop]
return destination
def finalize(self, destination, branch):
offsets = sizes2offsets(destination.sizes)
starts = offsets[:-1]
stops = offsets[1:]
content = self.asdtype.finalize(destination.content, branch)
leafcount = None
if len(branch.fLeaves) == 1:
leafcount = branch.fLeaves[0].fLeafCount
return JaggedArray(content, starts, stops, leafcount=leafcount)
def asstlvector(asdtype):
return asjagged(asdtype, skip_bytes=10)
def _jaggedarray_getitem(jaggedarray, index):
stopslen = len(jaggedarray.stops)
if index < 0:
index += stopslen
if 0 <= index < stopslen:
start = jaggedarray.starts[index]
stop = jaggedarray.stops[index]
return jaggedarray.content[start:stop]
else:
raise IndexError("index out of range for JaggedArray")
class JaggedArray(object):
# makes __doc__ attribute mutable before Python 3.3
__metaclass__ = type.__new__(type, "type", (type,), {})
class _Prep(object):
def __init__(self, content, sizes):
self.content = content
self.sizes = sizes
@staticmethod
def fromlists(lists):
offsets = numpy.empty(len(lists) + 1, dtype=numpy.int64)
offsets[0] = 0
stop = 0
anybool = False
anyint = False
anyfloat = False
anycomplex = False
for i, x in enumerate(lists):
offsets[i + 1] = stop = stop + len(x)
if isinstance(x, numpy.ndarray):
if issubclass(x.dtype.type, (numpy.bool, numpy.bool_)):
anybool = True
elif issubclass(x.dtype.type, numpy.integer):
anyint = True
elif issubclass(x.dtype.type, numpy.floating):
anyfloat = True
elif issubclass(x.dtype.type, numpy.complexfloating):
anycomplex = True
else:
if not anybool and not anyint and not anyfloat and not anycomplex and any(isinstance(y, bool) for y in x):
anybool = True
if not anyint and not anyfloat and not anycomplex and any(isinstance(y, int) for y in x):
anyint = True
if not anyfloat and not anycomplex and any(isinstance(y, float) for y in x):
anyfloat = True
if not anycomplex and any(isinstance(y, complex) for y in x):
anycomplex = True
if anycomplex:
dtype = numpy.dtype(numpy.complex)
elif anyfloat:
dtype = numpy.dtype(numpy.float64)
elif anyint:
dtype = numpy.dtype(numpy.int64)
elif anybool:
dtype = numpy.dtype(numpy.bool)
else:
raise TypeError("no numerical types found in lists")
starts = offsets[:-1]
stops = offsets[1:]
content = numpy.empty(offsets[-1], dtype=dtype)
for i, x in enumerate(lists):
content[starts[i]:stops[i]] = x
return JaggedArray(content, starts, stops)
def __init__(self, content, starts, stops, leafcount=None):
assert isinstance(content, numpy.ndarray)
assert isinstance(starts, numpy.ndarray) and issubclass(starts.dtype.type, numpy.integer)
assert isinstance(stops, numpy.ndarray) and issubclass(stops.dtype.type, numpy.integer)
assert len(stops.shape) == 1
assert starts.shape == stops.shape
self.content = content
self.starts = starts
self.stops = stops
self.leafcount = leafcount
def __getstate__(self):
state = self.__dict__.copy()
state["leafcount"] = None
return state
def __setstate__(self, state):
self.__dict__.update(state)
def __eq__(self, other):
return isinstance(other, JaggedArray) and numpy.array_equal(self.content, other.content) and self.aligned(other)
def __ne__(self, other):
return not self.__eq__(other)
@property
def offsets(self):
if self.starts.base is not None and self.stops.base is not None and self.starts.base is self.stops.base and \
self.starts.ctypes.data == self.starts.base.ctypes.data and \
self.stops.ctypes.data == self.stops.base.ctypes.data + self.starts.dtype.itemsize and \
len(self.starts) == len(self.starts.base) - 1 and \
len(self.stops) == len(self.starts.base) - 1:
return self.starts.base
elif numpy.array_equal(self.starts[1:], self.stops[:-1]):
return numpy.append(self.starts, self.stops[-1])
else:
raise ValueError("starts and stops are not compatible; cannot express as offsets")
def aligned(self, other):
if self.leafcount is not None and other.leafcount is not None and self.leafcount is other.leafcount:
return True
else:
return numpy.array_equal(self.starts, other.starts) and numpy.array_equal(self.stops, other.stops)
def __len__(self):
return len(self.stops)
def __getitem__(self, index):
if isinstance(index, numbers.Integral):
return _jaggedarray_getitem(self, index)
elif isinstance(index, slice):
if index.step is not None and index.step != 1:
raise NotImplementedError("cannot yet slice a JaggedArray with step != 1 (FIXME: this is possible, should be implemented)")
else:
return JaggedArray(self.content, self.starts[index], self.stops[index])
else:
raise TypeError("JaggedArray index must be an integer or a slice")
def __iter__(self):
content = self.content
starts = self.starts
stops = self.stops
for i in range(len(stops)):
yield content[starts[i]:stops[i]]
def __repr__(self, indent="", linewidth=None):
if linewidth is None:
linewidth = numpy.get_printoptions()["linewidth"]
dtypestr = repr(self.content.dtype).replace("(", "=").rstrip(")")
linewidth = linewidth - 12 - 2 - len(dtypestr)
return "jaggedarray({0})".format(self.__str__(indent=" " * 12, linewidth=linewidth))
def __str__(self, indent="", linewidth=None):
if linewidth is None:
linewidth = numpy.get_printoptions()["linewidth"]
def single(a):
if len(a) > 6:
return numpy.array_str(a[:3], max_line_width=numpy.inf).rstrip("]") + " ... " + numpy.array_str(a[-3:], max_line_width=numpy.inf).lstrip("[")
else:
return numpy.array_str(a, max_line_width=numpy.inf)
if len(self) > 10:
content = [single(self[i]) for i in range(3)] + ["..."] + [single(self[i]) for i in range(-3, 0)]
else:
content = [single(x) for x in self]
if sum(len(x) for x in content) + 2*(len(content) - 1) + 2 <= linewidth:
return "[" + ", ".join(content) + "]"
else:
return "[" + (",\n " + indent).join(content) + "]"
def tolist(self):
return [x.tolist() for x in self]
def __array__(self, dtype=None, copy=False, order="K", subok=False, ndmin=0):
if dtype is None:
dtype = self.content.dtype
elif not isinstance(dtype, numpy.dtype):
dtype = numpy.dtype(dtype)
if dtype == self.content.dtype and not copy and not subok and ndmin == 0:
return self.content
else:
return numpy.array(self.content, dtype=dtype, copy=copy, order=order, subok=subok, ndmin=ndmin)
class asvar(asjagged):
def __init__(self, genclass, skip_bytes=0, args=()):
self.genclass = genclass
super(asvar, self).__init__(asdtype(numpy.dtype(numpy.uint8)), skip_bytes=skip_bytes)
self.args = args
def __repr__(self):
return self.identifier
@property
def identifier(self):
args = []
if self.skip_bytes != 0:
args.append(", skip_bytes={0}".format(self.skip_bytes))
return "asvar({0}{1})".format(self.genclass.__name__, "".join(args))
@property
def dtype(self):
return self.genclass._dtype(self.args)
def empty(self):
return self.genclass(*((super(asvar, self).empty(),) + self.args))
def compatible(self, other):
return isinstance(other, asvar) and self.genclass is other.genclass and super(asvar, self).compatible(other) and self.args == other.args
def source_numitems(self, source):
return super(asvar, self).source_numitems(source.jaggedarray)
def fromroot(self, data, offsets, local_entrystart, local_entrystop):
return self.genclass(*((super(asvar, self).fromroot(data, offsets, local_entrystart, local_entrystop),) + self.args))
def fill(self, source, destination, itemstart, itemstop, entrystart, entrystop):
return super(asvar, self).fill(source.jaggedarray, destination, itemstart, itemstop, entrystart, entrystop)
def finalize(self, destination, branch):
return self.genclass(*((super(asvar, self).finalize(destination, branch),) + self.args))
class VariableLength(object):
def __init__(self, *args):
self.jaggedarray = args[0]
assert self.jaggedarray.content.dtype.itemsize == 1
assert len(self.jaggedarray.content.shape) == 1
self.args = args[1:]
def __len__(self):
return len(self.jaggedarray)
def __getitem__(self, index):
if isinstance(index, numbers.Integral):
return self.interpret(self.jaggedarray[index])
elif isinstance(index, slice):
return self.__class__(*((self.jaggedarray[index],) + self.args))
else:
raise TypeError("{0} index must be an integer or a slice".format(self.__class__.__name__))
def __iter__(self):
for x in self.jaggedarray:
yield self.interpret(x)
def __str__(self):
if len(self) > 6:
return "[{0} ... {1}]".format(" ".join(repr(self[i]) for i in range(3)), " ".join(repr(self[i]) for i in range(-3, 0)))
else:
return "[{0}]".format(" ".join(repr(x) for x in self))
def tolist(self):
return list(self)
@staticmethod
def interpret(item):
raise NotImplementedError
class asobjs(asvar):
def __init__(self, cls, context=None):
super(asobjs, self).__init__(JaggedObjects, skip_bytes=0, args=(cls, context))
self.cls = cls
self.context = context
@property
def identifier(self):
return "asobjs({0})".format(self.cls.__name__)
@property
def dtype(self):
return numpy.dtype((object, (0,)))
def asjaggedobjects(cls, context=None):
return asobjs(cls, context=context)
class JaggedObjects(VariableLength):
indexdtype = numpy.dtype(">i4")
def __init__(self, jaggedarray, cls, context):
super(JaggedObjects, self).__init__(jaggedarray, cls)
self._class = cls
self._context = context
def interpret(self, item):
size, = item[6:10].view(JaggedObjects.indexdtype)
source = uproot.source.source.Source(item)
cursor = uproot.source.cursor.Cursor(10)
out = [None] * size
for i in range(size):
out[i] = self._class.read(source, cursor, self._context, None)
return out
def __str__(self):
if len(self) > 6:
return "[{0}\n ...\n{1}]".format(",\n".join(("" if i == 0 else " ") + repr(self[i]) for i in range(3)), ",\n".join(" " + repr(self[i]) for i in range(-3, 0)))
else:
return "[{0}]".format(", ".join(repr(x) for x in self))
def __repr__(self):
return "<JaggedObjects of {0} at {1:012x}>".format(self._class.__name__, id(self))
def __getitem__(self, index):
if isinstance(index, numbers.Integral):
return self.interpret(self.jaggedarray[index])
elif isinstance(index, slice):
return JaggedObjects(self.jaggedarray[index], self._class, self._context)
else:
raise TypeError("{0} index must be an integer or a slice".format(self.__class__.__name__))
def asstlvectorvector(fromdtype):
return asvar(JaggedJaggedArray, skip_bytes=6, args=(numpy.dtype(fromdtype),))
class JaggedJaggedArray(VariableLength):
def __init__(self, jaggedarray, fromdtype):
super(JaggedJaggedArray, self).__init__(jaggedarray, fromdtype)
self.fromdtype = fromdtype
@classmethod
def _dtype(cls, args):
dtype, = args
return numpy.dtype((dtype, (0, 0)))
indexdtype = numpy.dtype(">i4")
def interpret(self, item):
i = 0
size, = item[i : i + 4].view(JaggedJaggedArray.indexdtype)
i += 4
out = []
while i < len(item):
size, = item[i : i + 4].view(JaggedJaggedArray.indexdtype)
i += 4
numbytes = size * self.fromdtype.itemsize
out.append(item[i : i + numbytes].view(self.fromdtype))
i += numbytes
return out
def __str__(self):
if len(self) > 6:
return "[{0} ... {1}]".format(", ".join(repr([y.tolist() for y in self[i]]) for i in range(3)), ", ".join(repr([y.tolist() for y in self[i]]) for i in range(-3, 0)))
else:
return "[{0}]".format(", ".join(repr([y.tolist() for y in x]) for x in self))
def __repr__(self):
return "jaggedjaggedarray({0})".format(str(self))
def tolist(self):
return [[y.tolist() for y in x] for x in self]
|
[
"numpy.array_str",
"numpy.empty",
"uproot.interp.numerical._dimsprod",
"numba.njit",
"numpy.dtype",
"numpy.floor_divide",
"numpy.cumsum",
"numpy.append",
"numpy.array",
"numpy.array_equal",
"numpy.get_printoptions"
] |
[((2202, 2225), 'numba.njit', 'numba.njit', (['_compactify'], {}), '(_compactify)\n', (2212, 2225), False, 'import numba\n'), ((17065, 17083), 'numpy.dtype', 'numpy.dtype', (['""">i4"""'], {}), "('>i4')\n", (17076, 17083), False, 'import numpy\n'), ((18846, 18864), 'numpy.dtype', 'numpy.dtype', (['""">i4"""'], {}), "('>i4')\n", (18857, 18864), False, 'import numpy\n'), ((3430, 3465), 'numpy.dtype', 'numpy.dtype', (['(tpe, (0,) + subshape)'], {}), '((tpe, (0,) + subshape))\n', (3441, 3465), False, 'import numpy\n'), ((5577, 5619), 'numpy.empty', 'numpy.empty', (['numentries'], {'dtype': 'numpy.int64'}), '(numentries, dtype=numpy.int64)\n', (5588, 5619), False, 'import numpy\n'), ((9156, 9193), 'numpy.empty', 'numpy.empty', (['offsets[-1]'], {'dtype': 'dtype'}), '(offsets[-1], dtype=dtype)\n', (9167, 9193), False, 'import numpy\n'), ((16901, 16928), 'numpy.dtype', 'numpy.dtype', (['(object, (0,))'], {}), '((object, (0,)))\n', (16912, 16928), False, 'import numpy\n'), ((18799, 18827), 'numpy.dtype', 'numpy.dtype', (['(dtype, (0, 0))'], {}), '((dtype, (0, 0)))\n', (18810, 18827), False, 'import numpy\n'), ((3537, 3570), 'numpy.empty', 'numpy.empty', (['(0)'], {'dtype': 'numpy.int64'}), '(0, dtype=numpy.int64)\n', (3548, 3570), False, 'import numpy\n'), ((3572, 3605), 'numpy.empty', 'numpy.empty', (['(0)'], {'dtype': 'numpy.int64'}), '(0, dtype=numpy.int64)\n', (3583, 3605), False, 'import numpy\n'), ((8770, 8796), 'numpy.dtype', 'numpy.dtype', (['numpy.complex'], {}), '(numpy.complex)\n', (8781, 8796), False, 'import numpy\n'), ((10113, 10159), 'numpy.array_equal', 'numpy.array_equal', (['self.content', 'other.content'], {}), '(self.content, other.content)\n', (10130, 10159), False, 'import numpy\n'), ((10750, 10801), 'numpy.array_equal', 'numpy.array_equal', (['self.starts[1:]', 'self.stops[:-1]'], {}), '(self.starts[1:], self.stops[:-1])\n', (10767, 10801), False, 'import numpy\n'), ((13748, 13840), 'numpy.array', 'numpy.array', (['self.content'], {'dtype': 'dtype', 'copy': 'copy', 'order': 'order', 'subok': 'subok', 'ndmin': 'ndmin'}), '(self.content, dtype=dtype, copy=copy, order=order, subok=subok,\n ndmin=ndmin)\n', (13759, 13840), False, 'import numpy\n'), ((4218, 4250), 'uproot.interp.numerical._dimsprod', '_dimsprod', (['self.asdtype.fromdims'], {}), '(self.asdtype.fromdims)\n', (4227, 4250), False, 'from uproot.interp.numerical import _dimsprod\n'), ((4304, 4350), 'numpy.floor_divide', 'numpy.floor_divide', (['offsets', 'itemsize', 'offsets'], {}), '(offsets, itemsize, offsets)\n', (4322, 4350), False, 'import numpy\n'), ((4814, 4886), 'numpy.empty', 'numpy.empty', (['(1 + local_entrystop - local_entrystart)'], {'dtype': 'offsets.dtype'}), '(1 + local_entrystop - local_entrystart, dtype=offsets.dtype)\n', (4825, 4886), False, 'import numpy\n'), ((4937, 4993), 'numpy.cumsum', 'numpy.cumsum', (['(fromstops - fromstarts)'], {'out': 'newoffsets[1:]'}), '(fromstops - fromstarts, out=newoffsets[1:])\n', (4949, 4993), False, 'import numpy\n'), ((5020, 5065), 'numpy.empty', 'numpy.empty', (['newoffsets[-1]'], {'dtype': 'data.dtype'}), '(newoffsets[-1], dtype=data.dtype)\n', (5031, 5065), False, 'import numpy\n'), ((5181, 5233), 'numpy.floor_divide', 'numpy.floor_divide', (['newoffsets', 'itemsize', 'newoffsets'], {}), '(newoffsets, itemsize, newoffsets)\n', (5199, 5233), False, 'import numpy\n'), ((8840, 8866), 'numpy.dtype', 'numpy.dtype', (['numpy.float64'], {}), '(numpy.float64)\n', (8851, 8866), False, 'import numpy\n'), ((10822, 10863), 'numpy.append', 'numpy.append', (['self.starts', 'self.stops[-1]'], {}), '(self.starts, self.stops[-1])\n', (10834, 10863), False, 'import numpy\n'), ((11170, 11214), 'numpy.array_equal', 'numpy.array_equal', (['self.starts', 'other.starts'], {}), '(self.starts, other.starts)\n', (11187, 11214), False, 'import numpy\n'), ((11219, 11261), 'numpy.array_equal', 'numpy.array_equal', (['self.stops', 'other.stops'], {}), '(self.stops, other.stops)\n', (11236, 11261), False, 'import numpy\n'), ((12192, 12216), 'numpy.get_printoptions', 'numpy.get_printoptions', ([], {}), '()\n', (12214, 12216), False, 'import numpy\n'), ((12557, 12581), 'numpy.get_printoptions', 'numpy.get_printoptions', ([], {}), '()\n', (12579, 12581), False, 'import numpy\n'), ((12845, 12889), 'numpy.array_str', 'numpy.array_str', (['a'], {'max_line_width': 'numpy.inf'}), '(a, max_line_width=numpy.inf)\n', (12860, 12889), False, 'import numpy\n'), ((13581, 13599), 'numpy.dtype', 'numpy.dtype', (['dtype'], {}), '(dtype)\n', (13592, 13599), False, 'import numpy\n'), ((13995, 14019), 'numpy.dtype', 'numpy.dtype', (['numpy.uint8'], {}), '(numpy.uint8)\n', (14006, 14019), False, 'import numpy\n'), ((18494, 18516), 'numpy.dtype', 'numpy.dtype', (['fromdtype'], {}), '(fromdtype)\n', (18505, 18516), False, 'import numpy\n'), ((8908, 8932), 'numpy.dtype', 'numpy.dtype', (['numpy.int64'], {}), '(numpy.int64)\n', (8919, 8932), False, 'import numpy\n'), ((8975, 8998), 'numpy.dtype', 'numpy.dtype', (['numpy.bool'], {}), '(numpy.bool)\n', (8986, 8998), False, 'import numpy\n'), ((12742, 12791), 'numpy.array_str', 'numpy.array_str', (['a[-3:]'], {'max_line_width': 'numpy.inf'}), '(a[-3:], max_line_width=numpy.inf)\n', (12757, 12791), False, 'import numpy\n'), ((12669, 12717), 'numpy.array_str', 'numpy.array_str', (['a[:3]'], {'max_line_width': 'numpy.inf'}), '(a[:3], max_line_width=numpy.inf)\n', (12684, 12717), False, 'import numpy\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import SecurityCenterSettingsServiceTransport
from .grpc import SecurityCenterSettingsServiceGrpcTransport
from .grpc_asyncio import SecurityCenterSettingsServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[SecurityCenterSettingsServiceTransport]]
_transport_registry['grpc'] = SecurityCenterSettingsServiceGrpcTransport
_transport_registry['grpc_asyncio'] = SecurityCenterSettingsServiceGrpcAsyncIOTransport
__all__ = (
'SecurityCenterSettingsServiceTransport',
'SecurityCenterSettingsServiceGrpcTransport',
'SecurityCenterSettingsServiceGrpcAsyncIOTransport',
)
|
[
"collections.OrderedDict"
] |
[((921, 934), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (932, 934), False, 'from collections import OrderedDict\n')]
|
import os
import signal
import subprocess
import sys
import random
from datetime import datetime
from pathlib import Path
from shutil import copy
import torch
import torch.nn.functional as F
from siren_pytorch import SirenNet, SirenWrapper
from torch import nn
from torch.cuda.amp import GradScaler, autocast
from torch.optim import Adam
from torchvision.utils import save_image
from tqdm import trange, tqdm
from deep_daze.clip import load, tokenize
assert torch.cuda.is_available(), 'CUDA must be available in order to use Deep Daze'
# graceful keyboard interrupt
terminate = False
def signal_handling(signum, frame):
global terminate
terminate = True
signal.signal(signal.SIGINT, signal_handling)
perceptor, normalize_image = load()
# Helpers
def exists(val):
return val is not None
def interpolate(image, size):
return F.interpolate(image, (size, size), mode='bilinear', align_corners=False)
def rand_cutout(image, size):
width = image.shape[-1]
offsetx = torch.randint(0, width - size, ())
offsety = torch.randint(0, width - size, ())
cutout = image[:, :, offsetx:offsetx + size, offsety:offsety + size]
return cutout
def open_folder(path):
if os.path.isfile(path):
path = os.path.dirname(path)
if not os.path.isdir(path):
return
cmd_list = None
if sys.platform == 'darwin':
cmd_list = ['open', '--', path]
elif sys.platform == 'linux2' or sys.platform == 'linux':
cmd_list = ['xdg-open', path]
elif sys.platform in ['win32', 'win64']:
cmd_list = ['explorer', path.replace('/', '\\')]
if cmd_list == None:
return
try:
subprocess.check_call(cmd_list)
except subprocess.CalledProcessError:
pass
except OSError:
pass
def norm_siren_output(img):
return ((img + 1) * 0.5).clamp(0, 1)
class DeepDaze(nn.Module):
def __init__(
self,
total_batches,
batch_size,
num_layers=8,
image_width=512,
loss_coef=100,
):
super().__init__()
# load clip
self.loss_coef = loss_coef
self.image_width = image_width
self.batch_size = batch_size
self.total_batches = total_batches
self.num_batches_processed = 0
siren = SirenNet(
dim_in=2,
dim_hidden=256,
num_layers=num_layers,
dim_out=3,
use_bias=True
)
self.model = SirenWrapper(
siren,
image_width=image_width,
image_height=image_width
)
self.generate_size_schedule()
def forward(self, text, return_loss=True):
out = self.model()
out = norm_siren_output(out)
if not return_loss:
return out
pieces = []
width = out.shape[-1]
size_slice = slice(self.num_batches_processed, self.num_batches_processed + self.batch_size)
for size in self.scheduled_sizes[size_slice]:
apper = rand_cutout(out, size)
apper = interpolate(apper, 224)
pieces.append(normalize_image(apper))
image = torch.cat(pieces)
with autocast(enabled=False):
image_embed = perceptor.encode_image(image)
text_embed = perceptor.encode_text(text)
self.num_batches_processed += self.batch_size
loss = -self.loss_coef * torch.cosine_similarity(text_embed, image_embed, dim=-1).mean()
return loss
def generate_size_schedule(self):
batches = 0
counter = 0
self.scheduled_sizes = []
while batches < self.total_batches:
counter += 1
sizes = self.sample_sizes(counter)
batches += len(sizes)
self.scheduled_sizes.extend(sizes)
def sample_sizes(self, counter):
pieces_per_group = 4
# 6 piece schedule increasing in context as model saturates
if counter < 500:
partition = [4, 5, 3, 2, 1, 1]
elif counter < 1000:
partition = [2, 5, 4, 2, 2, 1]
elif counter < 1500:
partition = [1, 4, 5, 3, 2, 1]
elif counter < 2000:
partition = [1, 3, 4, 4, 2, 2]
elif counter < 2500:
partition = [1, 2, 2, 4, 4, 3]
elif counter < 3000:
partition = [1, 1, 2, 3, 4, 5]
else:
partition = [1, 1, 1, 2, 4, 7]
dbase = .38
step = .1
width = self.image_width
sizes = []
for part_index in range(len(partition)):
groups = partition[part_index]
for _ in range(groups * pieces_per_group):
sizes.append(torch.randint(
int((dbase + step * part_index + .01) * width),
int((dbase + step * (1 + part_index)) * width), ()))
sizes.sort()
return sizes
class Imagine(nn.Module):
def __init__(
self,
text,
*,
lr=1e-5,
batch_size=4,
gradient_accumulate_every=4,
save_every=100,
image_width=512,
num_layers=16,
epochs=20,
iterations=1050,
save_progress=False,
seed=None,
open_folder=True,
save_date_time=False
):
super().__init__()
if exists(seed):
tqdm.write(f'setting seed: {seed}')
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
random.seed(seed)
torch.backends.cudnn.deterministic = True
self.epochs = epochs
self.iterations = iterations
total_batches = epochs * iterations * batch_size * gradient_accumulate_every
model = DeepDaze(
total_batches=total_batches,
batch_size=batch_size,
image_width=image_width,
num_layers=num_layers
).cuda()
self.model = model
self.scaler = GradScaler()
self.optimizer = Adam(model.parameters(), lr)
self.gradient_accumulate_every = gradient_accumulate_every
self.save_every = save_every
self.save_date_time = save_date_time
self.open_folder = open_folder
self.save_progress = save_progress
self.text = text
self.textpath = text.replace(" ", "_")
self.filename = self.image_output_path()
self.encoded_text = tokenize(text).cuda()
def image_output_path(self, current_iteration: int = None) -> Path:
"""
Returns underscore separated Path.
A current timestamp is prepended if `self.save_date_time` is set.
Sequence number left padded with 6 zeroes is appended if `save_every` is set.
:rtype: Path
"""
output_path = self.textpath
if current_iteration:
sequence_number = int(current_iteration / self.save_every)
sequence_number_left_padded = str(sequence_number).zfill(6)
output_path = f"{output_path}.{sequence_number_left_padded}"
if self.save_date_time:
current_time = datetime.now().strftime("%y%m%d-%H%M%S_%f")
output_path = f"{current_time}_{output_path}"
return Path(f"{output_path}.png")
def replace_current_img(self):
"""
Replace the current file at {text_path}.png with the current self.filename
"""
always_current_img = f"{self.textpath}.png"
if os.path.isfile(always_current_img) or os.path.islink(always_current_img):
os.remove(always_current_img) # remove the file
#copy(str(self.filename), str(self.filename))
def generate_and_save_image(self, custom_filename: Path = None, current_iteration: int = None):
"""
:param current_iteration:
:param custom_filename: A custom filename to use when saving - e.g. "testing.png"
"""
with torch.no_grad():
img = normalize_image(self.model(self.encoded_text, return_loss=False).cpu())
img.clamp_(0., 1.)
self.filename = custom_filename if custom_filename else self.image_output_path(current_iteration=current_iteration)
save_image(img, self.filename)
self.replace_current_img()
tqdm.write(f'image updated at "./{str(self.filename)}"')
def train_step(self, epoch, iteration) -> int:
total_loss = 0
for _ in range(self.gradient_accumulate_every):
with autocast():
loss = self.model(self.encoded_text)
loss = loss / self.gradient_accumulate_every
total_loss += loss
self.scaler.scale(loss).backward()
self.scaler.step(self.optimizer)
self.scaler.update()
self.optimizer.zero_grad()
if (iteration % self.save_every == 0) and self.save_progress:
self.generate_and_save_image(current_iteration=iteration)
return total_loss
def forward(self):
tqdm.write(f'Imagining "{self.text}" from the depths of my weights...')
if self.open_folder:
open_folder('./')
self.open_folder = False
for epoch in trange(self.epochs, desc='epochs'):
pbar = trange(self.iterations, desc='iteration')
for i in pbar:
loss = self.train_step(epoch, i)
pbar.set_description(f'loss: {loss.item():.2f}')
if terminate:
print('interrupted by keyboard, gracefully exiting')
return
|
[
"os.remove",
"torch.cat",
"os.path.isfile",
"pathlib.Path",
"os.path.islink",
"deep_daze.clip.load",
"torch.no_grad",
"subprocess.check_call",
"torch.cuda.amp.autocast",
"os.path.dirname",
"random.seed",
"datetime.datetime.now",
"torch.randint",
"siren_pytorch.SirenNet",
"tqdm.trange",
"torch.manual_seed",
"torch.cuda.manual_seed",
"torchvision.utils.save_image",
"torch.cuda.is_available",
"siren_pytorch.SirenWrapper",
"torch.cuda.amp.GradScaler",
"signal.signal",
"tqdm.tqdm.write",
"deep_daze.clip.tokenize",
"os.path.isdir",
"torch.nn.functional.interpolate",
"torch.cosine_similarity"
] |
[((461, 486), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (484, 486), False, 'import torch\n'), ((671, 716), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handling'], {}), '(signal.SIGINT, signal_handling)\n', (684, 716), False, 'import signal\n'), ((747, 753), 'deep_daze.clip.load', 'load', ([], {}), '()\n', (751, 753), False, 'from deep_daze.clip import load, tokenize\n'), ((854, 926), 'torch.nn.functional.interpolate', 'F.interpolate', (['image', '(size, size)'], {'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(image, (size, size), mode='bilinear', align_corners=False)\n", (867, 926), True, 'import torch.nn.functional as F\n'), ((1001, 1035), 'torch.randint', 'torch.randint', (['(0)', '(width - size)', '()'], {}), '(0, width - size, ())\n', (1014, 1035), False, 'import torch\n'), ((1050, 1084), 'torch.randint', 'torch.randint', (['(0)', '(width - size)', '()'], {}), '(0, width - size, ())\n', (1063, 1084), False, 'import torch\n'), ((1208, 1228), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1222, 1228), False, 'import os\n'), ((1245, 1266), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (1260, 1266), False, 'import os\n'), ((1279, 1298), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (1292, 1298), False, 'import os\n'), ((1669, 1700), 'subprocess.check_call', 'subprocess.check_call', (['cmd_list'], {}), '(cmd_list)\n', (1690, 1700), False, 'import subprocess\n'), ((2324, 2411), 'siren_pytorch.SirenNet', 'SirenNet', ([], {'dim_in': '(2)', 'dim_hidden': '(256)', 'num_layers': 'num_layers', 'dim_out': '(3)', 'use_bias': '(True)'}), '(dim_in=2, dim_hidden=256, num_layers=num_layers, dim_out=3,\n use_bias=True)\n', (2332, 2411), False, 'from siren_pytorch import SirenNet, SirenWrapper\n'), ((2500, 2570), 'siren_pytorch.SirenWrapper', 'SirenWrapper', (['siren'], {'image_width': 'image_width', 'image_height': 'image_width'}), '(siren, image_width=image_width, image_height=image_width)\n', (2512, 2570), False, 'from siren_pytorch import SirenNet, SirenWrapper\n'), ((3181, 3198), 'torch.cat', 'torch.cat', (['pieces'], {}), '(pieces)\n', (3190, 3198), False, 'import torch\n'), ((6023, 6035), 'torch.cuda.amp.GradScaler', 'GradScaler', ([], {}), '()\n', (6033, 6035), False, 'from torch.cuda.amp import GradScaler, autocast\n'), ((7271, 7297), 'pathlib.Path', 'Path', (['f"""{output_path}.png"""'], {}), "(f'{output_path}.png')\n", (7275, 7297), False, 'from pathlib import Path\n'), ((9029, 9100), 'tqdm.tqdm.write', 'tqdm.write', (['f"""Imagining "{self.text}" from the depths of my weights..."""'], {}), '(f\'Imagining "{self.text}" from the depths of my weights...\')\n', (9039, 9100), False, 'from tqdm import trange, tqdm\n'), ((9220, 9254), 'tqdm.trange', 'trange', (['self.epochs'], {'desc': '"""epochs"""'}), "(self.epochs, desc='epochs')\n", (9226, 9254), False, 'from tqdm import trange, tqdm\n'), ((3213, 3236), 'torch.cuda.amp.autocast', 'autocast', ([], {'enabled': '(False)'}), '(enabled=False)\n', (3221, 3236), False, 'from torch.cuda.amp import GradScaler, autocast\n'), ((5433, 5468), 'tqdm.tqdm.write', 'tqdm.write', (['f"""setting seed: {seed}"""'], {}), "(f'setting seed: {seed}')\n", (5443, 5468), False, 'from tqdm import trange, tqdm\n'), ((5481, 5504), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (5498, 5504), False, 'import torch\n'), ((5517, 5545), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (5539, 5545), False, 'import torch\n'), ((5558, 5575), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (5569, 5575), False, 'import random\n'), ((7504, 7538), 'os.path.isfile', 'os.path.isfile', (['always_current_img'], {}), '(always_current_img)\n', (7518, 7538), False, 'import os\n'), ((7542, 7576), 'os.path.islink', 'os.path.islink', (['always_current_img'], {}), '(always_current_img)\n', (7556, 7576), False, 'import os\n'), ((7590, 7619), 'os.remove', 'os.remove', (['always_current_img'], {}), '(always_current_img)\n', (7599, 7619), False, 'import os\n'), ((7956, 7971), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7969, 7971), False, 'import torch\n'), ((8234, 8264), 'torchvision.utils.save_image', 'save_image', (['img', 'self.filename'], {}), '(img, self.filename)\n', (8244, 8264), False, 'from torchvision.utils import save_image\n'), ((9275, 9316), 'tqdm.trange', 'trange', (['self.iterations'], {'desc': '"""iteration"""'}), "(self.iterations, desc='iteration')\n", (9281, 9316), False, 'from tqdm import trange, tqdm\n'), ((6470, 6484), 'deep_daze.clip.tokenize', 'tokenize', (['text'], {}), '(text)\n', (6478, 6484), False, 'from deep_daze.clip import load, tokenize\n'), ((8523, 8533), 'torch.cuda.amp.autocast', 'autocast', ([], {}), '()\n', (8531, 8533), False, 'from torch.cuda.amp import GradScaler, autocast\n'), ((3436, 3492), 'torch.cosine_similarity', 'torch.cosine_similarity', (['text_embed', 'image_embed'], {'dim': '(-1)'}), '(text_embed, image_embed, dim=-1)\n', (3459, 3492), False, 'import torch\n'), ((7154, 7168), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7166, 7168), False, 'from datetime import datetime\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the credentials interface."""
from __future__ import unicode_literals
import unittest
from dfvfs.credentials import credentials
from tests import test_lib as shared_test_lib
class Credentials(shared_test_lib.BaseTestCase):
"""Tests the credentials interface."""
def testInitialize(self):
"""Tests the __init__ function."""
with self.assertRaises(ValueError):
credentials.Credentials()
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"dfvfs.credentials.credentials.Credentials"
] |
[((500, 515), 'unittest.main', 'unittest.main', ([], {}), '()\n', (513, 515), False, 'import unittest\n'), ((443, 468), 'dfvfs.credentials.credentials.Credentials', 'credentials.Credentials', ([], {}), '()\n', (466, 468), False, 'from dfvfs.credentials import credentials\n')]
|
import logging
import os
import ssl
import urllib.request
import feedparser
from datetime import datetime
from time import mktime, localtime
from pprint import pprint, pformat
from bs4 import BeautifulSoup as soup
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from ..models import Feed
from ..models import FeedItem
from ..models import TaggedItem
from ..models import CachedImage
LOGGER = logging.getLogger(__name__)
FAKE_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'
feedparser.USER_AGENT = FAKE_AGENT
def get_cached_image(url):
"""
Utility to cache images
"""
# first, see if the URL has already been cached
try:
cimage = CachedImage.objects.get(url=url)
except CachedImage.DoesNotExist:
cimage = CachedImage(url=url)
cimage.save()
cimage.cache()
return cimage
def url_to_feed(url):
"""
takes a URL, returns the feed object or None
"""
# pprint"Url to feed entered")
LOGGER.debug("%s.url_to_feed entered" % __name__)
res = parse_feed(url)
# minor kluge here
parsed_feed = res['feed']
parsed_feed['etag'] = getattr(res, 'etag', None)
parsed_feed['last_modified'] = getattr(res, 'last_modified', None)
# pprintparsed_feed)
# some minor validation...
for required_key in ['title',]:
if required_key not in parsed_feed:
return None
feed = add_feed(parsed_feed, url)
# ok, now add the items
feed_items = update_items(feed)
return feed
def update_items(feed, force=False):
"""
might be an instance method?
"""
if feed.needs_update or force:
LOGGER.debug("%s.update_items entered" % __name__)
items = parse_feed(feed.feed_url, etag=feed.etag, modified=feed.last_update)['items']
res = add_items(feed, items)
else:
print("Skipping (update not needed)")
res = 0
return res
def get_feed_image(parsed_feed):
"""
Figures out how this precious little snowflake defines its image
returns it as a django File object
"""
image = None
for key in ['image']:
if hasattr(parsed_feed, key):
image_struct = parsed_feed[key]
LOGGER.info("Found image key %s: %s" % (key, image_struct))
image = get_cached_image(image_struct.url)
if image:
continue
return image
def get_feed_icon(parsed_feed):
if hasattr(parsed_feed, 'icon'):
image_url = parsed_feed['icon']
LOGGER.info("Found icon: %s" % icon_url)
def add_feed(parsed_feed, feed_url):
"""
Takes a feed dictionary, and adds it to the database
if exists, returns the original
"""
LOGGER.debug("%s.add_feed entered" % __name__)
LOGGER.debug("feed_url: %s" % feed_url)
LOGGER.debug("feed: \n%s" % pformat(parsed_feed))
if 'links' in parsed_feed:
for link in parsed_feed['links']:
if 'self' in list(link.values()):
# self-declared feed_url takes precendence
# FIXME: let's see how that works out in practice...
feed_url = link['href']
# else:
# # pprintparsed_feed)
# raise ValidationError
# check if this is a known feed
# if 'title' not in parsed_feed:
# # pprintparsed_feed)
#
try:
f = Feed.objects.get(feed_url=feed_url)
f.etag = parsed_feed['etag']
f.last_modified = parsed_feed['last_modified']
f.save()
except Feed.DoesNotExist:
# needs to be added
if parsed_feed.get('updated', None):
updated = datetime.fromtimestamp(mktime(parsed_feed['updated_parsed']))
else:
updated = datetime.now()
struct = {
'feed_title': parsed_feed['title'],
'language': parsed_feed.get('language', 'en'),
'copyright': parsed_feed.get('copyright',''),
'generator': parsed_feed.get('generator', ''),
'link': parsed_feed['link'],
'last_update': datetime.now(),
'pubDate': updated,
'lastBuildDate': updated,
'skipHours': parsed_feed.get('skipHours', 1),
'feed_url' : feed_url,
'etag' : parsed_feed['etag'],
}
struct['image'] = get_feed_image(parsed_feed)
LOGGER.debug(struct)
f = Feed(**struct)
f.save()
return f
def add_items(feed, parsed_items):
# feed: Feed object
# parsed_items: list of items from the feedparser
count = 0
for item in parsed_items:
# check of this has already been indexed
# pprintitem['id'])
# pprint(item)
if not id in item:
item['id'] = item['link']
pubDate = localtime()
try:
FeedItem.objects.get(guid=item['id'])
continue
# except KeyError as e:
# # item doesn't have a guid, for shame!
# item['id'] = item['link']
except FeedItem.DoesNotExist:
# figure out the pub_date
if 'published_parsed' in item:
pubDate = item['published_parsed']
elif item.has_key('updated_parsed'):
pubDate = item['updated_parsed']
pubDate = datetime.fromtimestamp(mktime(pubDate))
# ok, it's new
# need to figure out content
# pprintitem)
# if not item.has_key('description'):
# print "DOH!"
# LOGGER.debug('description empty, look for content')
# description = item['content'][0]['value'] # wordpress weirdness
# else:
# description = item['description']
description = item['description']
struct = {
'source': feed,
'guid': item['id'],
'pubDate': pubDate,
'title': item.get('title', 'Untitled'),
'description': description,
'link': item['link'],
'author': item.get('author', feed.author),
'comments': item.get('comments',''),
}
# pprintstruct)
i = FeedItem(**struct)
i.save()
count = count + 1
return count
def find_feed(site):
"""
Parses a page, and returns a list of
atom / RSS feeds
"""
parsed_url = urllib.parse.urlparse(site)
if not parsed_url.scheme:
site = 'http://' + site
parsed_url = urllib.parse.urlparse(site)
req = urllib.request.Request(
site,
data=None,
headers={
'User-Agent': FAKE_AGENT
}
)
raw = urllib.request.urlopen(req).read()
result = []
possible_feeds = []
html = soup(raw, features='html.parser')
feed_urls = html.findAll("link", rel="alternate")
for f in feed_urls:
t = f.get("type",None)
if t:
if "rss" in t or "xml" in t:
href = f.get("href",None)
if href:
possible_feeds.append(href)
parsed_url = urllib.parse.urlparse(site)
if not parsed_url.scheme:
parsed_url = urllib.parse.urlparse('http://' + site)
base = parsed_url.scheme+"://"+parsed_url.hostname
atags = html.findAll("a")
for a in atags:
href = a.get("href",None)
if href:
if "xml" in href or "rss" in href or "feed" in href:
possible_feeds.append(base+href)
for url in list(set(possible_feeds)):
f = feedparser.parse(url)
if len(f.entries) > 0:
if url not in result:
result.append(url)
return(result)
def parse_feed(url, etag=None, modified=None):
# use urllib to get the text
d = feedparser.parse(url, etag=etag, modified=modified)
# pprintd)
return d
|
[
"feedparser.parse",
"pprint.pformat",
"logging.getLogger",
"time.mktime",
"bs4.BeautifulSoup",
"datetime.datetime.now",
"time.localtime"
] |
[((418, 445), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (435, 445), False, 'import logging\n'), ((6921, 6954), 'bs4.BeautifulSoup', 'soup', (['raw'], {'features': '"""html.parser"""'}), "(raw, features='html.parser')\n", (6925, 6954), True, 'from bs4 import BeautifulSoup as soup\n'), ((7924, 7975), 'feedparser.parse', 'feedparser.parse', (['url'], {'etag': 'etag', 'modified': 'modified'}), '(url, etag=etag, modified=modified)\n', (7940, 7975), False, 'import feedparser\n'), ((4845, 4856), 'time.localtime', 'localtime', ([], {}), '()\n', (4854, 4856), False, 'from time import mktime, localtime\n'), ((7694, 7715), 'feedparser.parse', 'feedparser.parse', (['url'], {}), '(url)\n', (7710, 7715), False, 'import feedparser\n'), ((2927, 2947), 'pprint.pformat', 'pformat', (['parsed_feed'], {}), '(parsed_feed)\n', (2934, 2947), False, 'from pprint import pprint, pformat\n'), ((3803, 3817), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3815, 3817), False, 'from datetime import datetime\n'), ((4129, 4143), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4141, 4143), False, 'from datetime import datetime\n'), ((3728, 3765), 'time.mktime', 'mktime', (["parsed_feed['updated_parsed']"], {}), "(parsed_feed['updated_parsed'])\n", (3734, 3765), False, 'from time import mktime, localtime\n'), ((5396, 5411), 'time.mktime', 'mktime', (['pubDate'], {}), '(pubDate)\n', (5402, 5411), False, 'from time import mktime, localtime\n')]
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import requests
import pandas as pd
from lxml import etree
from bs4 import BeautifulSoup
import datetime
import io
import numpy as np
from alphacast import Alphacast
from dotenv import dotenv_values
API_KEY = dotenv_values(".env").get("API_KEY")
alphacast = Alphacast(API_KEY)
# In[2]:
url1 = "https://www.estadisticaciudad.gob.ar/eyc/wp-content/uploads/2020/10/AC_S_AX05.xlsx"
df1 = pd.read_excel(url1)
df1.columns = df1.iloc[1]
df1 = df1.drop(index=1)
df1 = df1.dropna(subset = ['Bebidas'])
df1 = df1[~df1.iloc[:, 0].astype(str).str.isdigit()]
df1 = df1.drop(df1.columns[[0]], axis=1)
df1.index = pd.date_range(start='1/1/2013', periods=len(df1), freq = "MS")
df1.index.name = "Date"
df1.columns = "Índice a valores constantes Base 2013=100 de ventas en supermercados - " + df1.columns
df1
# In[3]:
url2 = "https://www.estadisticaciudad.gob.ar/eyc/wp-content/uploads/2019/05/AC_S_AX04.xlsx"
df2 = pd.read_excel(url2)
df2.columns = df2.iloc[0]
df2.columns = "Ventas totales en supermercados (miles de pesos) - " + df2.columns
df2 = df2.drop(index=0)
df2 = df2.drop(index=1)
df2 = df2.drop(index=2)
df2 = df2.dropna(subset = ['Ventas totales en supermercados (miles de pesos) - Bebidas'])
df2 = df2[~df2.iloc[:, 0].astype(str).str.isdigit()]
df2 = df2.drop(df2.columns[[0, 1]], axis=1)
df2.index = pd.date_range(start='1/1/2007', periods=len(df2), freq = "MS")
df2.index.name = "Date"
df2 = df2.rename(columns={np.nan: "Ventas totales en supermercados (miles de pesos) - Otros"})
# In[4]:
url3 = "https://www.estadisticaciudad.gob.ar/eyc/wp-content/uploads/2019/05/AC_S_AX02.xlsx"
df3 = pd.read_excel(url3)
df3.columns = df3.iloc[1]
#df3.columns = "Ventas totales en supermercados (miles de pesos) - " + df3.columns
df3 = df3.drop(index=0)
df3 = df3.drop(index=1)
#df3 = df3.drop(index=2)
df3 = df3.dropna(subset = ['Operaciones'])
df3 = df3[~df3.iloc[:, 0].astype(str).str.isdigit()]
df3 = df3.drop(df3.columns[[0, 1]], axis=1)
df3.index = pd.date_range(start='1/1/2007', periods=len(df3), freq = "MS")
df3.index.name = "Date"
#df3 = df3.rename(columns={np.nan: "Ventas totales en supermercados (miles de pesos) - Otros"})
df3
# In[5]:
df4 = df1.merge(df2, right_index = True, left_index=True, how = "right").merge(df3, right_index = True, left_index=True)
# In[6]:
for col in df4.columns:
df4[col] = pd.to_numeric(df4[col], errors="coerce")
# In[7]:
df4["country"] = "CABA"
alphacast.datasets.dataset(657).upload_data_from_df(df4,
deleteMissingFromDB = True, onConflictUpdateDB = True, uploadIndex=True)
|
[
"alphacast.Alphacast",
"dotenv.dotenv_values",
"pandas.to_numeric",
"pandas.read_excel"
] |
[((309, 327), 'alphacast.Alphacast', 'Alphacast', (['API_KEY'], {}), '(API_KEY)\n', (318, 327), False, 'from alphacast import Alphacast\n'), ((438, 457), 'pandas.read_excel', 'pd.read_excel', (['url1'], {}), '(url1)\n', (451, 457), True, 'import pandas as pd\n'), ((957, 976), 'pandas.read_excel', 'pd.read_excel', (['url2'], {}), '(url2)\n', (970, 976), True, 'import pandas as pd\n'), ((1650, 1669), 'pandas.read_excel', 'pd.read_excel', (['url3'], {}), '(url3)\n', (1663, 1669), True, 'import pandas as pd\n'), ((2378, 2418), 'pandas.to_numeric', 'pd.to_numeric', (['df4[col]'], {'errors': '"""coerce"""'}), "(df4[col], errors='coerce')\n", (2391, 2418), True, 'import pandas as pd\n'), ((260, 281), 'dotenv.dotenv_values', 'dotenv_values', (['""".env"""'], {}), "('.env')\n", (273, 281), False, 'from dotenv import dotenv_values\n')]
|
""" This model is used by test_valueeditors to test expand/collapse on
the object and parameter panes.
"""
from openmdao.main.api import Component, Assembly, VariableTree
from openmdao.lib.datatypes.api import Float, Slot
class DumbVT3(VariableTree):
def __init__(self):
super(DumbVT3, self).__init__()
self.add('a', Float(1., units='ft'))
self.add('b', Float(12., units='inch'))
class DumbVT2(VariableTree):
vt3 = Slot(DumbVT3, iotype='in')
def __init__(self):
super(DumbVT2, self).__init__()
self.add('x', Float(-1.))
self.add('y', Float(-2.))
self.add('vt3', DumbVT3())
class DumbVT(VariableTree):
vt2 = Slot(DumbVT2, iotype='in')
def __init__(self):
super(DumbVT, self).__init__()
self.add('vt2', DumbVT2())
self.add('v1', Float(1., desc='vv1'))
self.add('v2', Float(2., desc='vv2'))
class SimpleComp(Component):
cont_in = Slot(DumbVT, iotype='in')
cont_out = Slot(DumbVT, iotype='out')
def __init__(self):
super(SimpleComp, self).__init__()
self.add('cont_in', DumbVT())
self.add('cont_out', DumbVT())
class Topp(Assembly):
def configure(self):
self.add('p1', SimpleComp())
|
[
"openmdao.lib.datatypes.api.Float",
"openmdao.lib.datatypes.api.Slot"
] |
[((451, 477), 'openmdao.lib.datatypes.api.Slot', 'Slot', (['DumbVT3'], {'iotype': '"""in"""'}), "(DumbVT3, iotype='in')\n", (455, 477), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((685, 711), 'openmdao.lib.datatypes.api.Slot', 'Slot', (['DumbVT2'], {'iotype': '"""in"""'}), "(DumbVT2, iotype='in')\n", (689, 711), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((947, 972), 'openmdao.lib.datatypes.api.Slot', 'Slot', (['DumbVT'], {'iotype': '"""in"""'}), "(DumbVT, iotype='in')\n", (951, 972), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((988, 1014), 'openmdao.lib.datatypes.api.Slot', 'Slot', (['DumbVT'], {'iotype': '"""out"""'}), "(DumbVT, iotype='out')\n", (992, 1014), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((339, 361), 'openmdao.lib.datatypes.api.Float', 'Float', (['(1.0)'], {'units': '"""ft"""'}), "(1.0, units='ft')\n", (344, 361), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((384, 409), 'openmdao.lib.datatypes.api.Float', 'Float', (['(12.0)'], {'units': '"""inch"""'}), "(12.0, units='inch')\n", (389, 409), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((564, 575), 'openmdao.lib.datatypes.api.Float', 'Float', (['(-1.0)'], {}), '(-1.0)\n', (569, 575), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((598, 609), 'openmdao.lib.datatypes.api.Float', 'Float', (['(-2.0)'], {}), '(-2.0)\n', (603, 609), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((833, 855), 'openmdao.lib.datatypes.api.Float', 'Float', (['(1.0)'], {'desc': '"""vv1"""'}), "(1.0, desc='vv1')\n", (838, 855), False, 'from openmdao.lib.datatypes.api import Float, Slot\n'), ((879, 901), 'openmdao.lib.datatypes.api.Float', 'Float', (['(2.0)'], {'desc': '"""vv2"""'}), "(2.0, desc='vv2')\n", (884, 901), False, 'from openmdao.lib.datatypes.api import Float, Slot\n')]
|
import numpy as np
import torch
import torch.nn.functional as F
import dataset_creator as DC
from torch import nn
from torch import optim
# import keras
def createNN(_inputSize):
input_size = _inputSize
hidden_sizes = [15,10] # 12 nodes in first hidden layer
output_size = 29 # Number of possible outputs
model = nn.Sequential(nn.Linear(input_size, hidden_sizes[0]),
nn.ReLU(),
#nn.Dropout(0.2),
#nn.Linear(hidden_sizes[0], hidden_sizes[1]),
#nn.ReLU(),
#nn.Dropout(0.3),
nn.Linear(hidden_sizes[0], output_size))
return model
def convert2tensor(x):
x = torch.FloatTensor(x)
return x
def convert2long(x):
x = torch.LongTensor(x)
return x
def switchLoader(e,it1,it2,it3,it4,it5):
switcher={
0:it1,
1:it2,
2:it3,
3:it4,
4:it5
}
return switcher.get(e,"Invalid Iterator")
def TrainNN(model,t1,t2,t3,t4,t5):
criterion = nn.CrossEntropyLoss()
#criterion = nn.CTCLoss()
#optimizer = optim.SGD(model.parameters(), lr=0.01)
optimizer = optim.Adam(model.parameters(), lr=1e-5)
epochs = 5
print_every = 1000
steps = 0
correct_train = 0
for e in range(epochs):
running_loss = 0
loaderForData = switchLoader(e,t1,t2,t3,t4,t5)
for images, labels in iter(loaderForData):
steps += 1
images = convert2tensor(images)
actual_label = labels
labels = [labels,]
labels = convert2long(labels)
labels = torch.LongTensor(labels)
optimizer.zero_grad() # Clear the gradients as gradients are accumulated
# Forward and backward passes
output = model.forward(images)
output = F.softmax(output, dim=0)
output = output.unsqueeze(dim=0)
loss = criterion(output, labels) # Calculate the loss
loss.backward() # backpropagate to get values of the new weights
optimizer.step() # Take a step to update the newly calculated weights
_, predicted = torch.max(output.data, 1)
correct_train += predicted.eq(labels.data).sum().item()
running_loss += loss.item()
if steps % print_every == 0:
print(predicted)
print(labels.data)
print("Epoch: {}/{}... ".format(e+1, epochs),
"Loss: {:.4f}".format(running_loss/print_every))
print("Ended Epoch.",str(e+1))
#Saving the model after training:
train_accuracy = 100 * correct_train / 5000
print("Train Accuracy on 1000 Elements: {}%".format(train_accuracy))
PATH = 'trained_model.pth'
torch.save(model.state_dict(), PATH)
def TestNN(model,testloader):
images = torch.FloatTensor(testloader[:17])
logits = model.forward(images)
ps = F.softmax(logits, dim=0)
ps = ps.data.numpy().squeeze()
prediction = np.argmax(ps)
print(ps)
D = DC.returnToArabicDictionary()
return D[prediction]
# def PrepareLabels():
def load_checkpoint(filepath):
model = torch.load('trained_model.pth')
return model
|
[
"dataset_creator.returnToArabicDictionary",
"torch.nn.ReLU",
"numpy.argmax",
"torch.LongTensor",
"torch.load",
"torch.nn.CrossEntropyLoss",
"torch.FloatTensor",
"torch.nn.functional.softmax",
"torch.max",
"torch.nn.Linear"
] |
[((737, 757), 'torch.FloatTensor', 'torch.FloatTensor', (['x'], {}), '(x)\n', (754, 757), False, 'import torch\n'), ((801, 820), 'torch.LongTensor', 'torch.LongTensor', (['x'], {}), '(x)\n', (817, 820), False, 'import torch\n'), ((1078, 1099), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1097, 1099), False, 'from torch import nn\n'), ((3005, 3039), 'torch.FloatTensor', 'torch.FloatTensor', (['testloader[:17]'], {}), '(testloader[:17])\n', (3022, 3039), False, 'import torch\n'), ((3085, 3109), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(0)'}), '(logits, dim=0)\n', (3094, 3109), True, 'import torch.nn.functional as F\n'), ((3162, 3175), 'numpy.argmax', 'np.argmax', (['ps'], {}), '(ps)\n', (3171, 3175), True, 'import numpy as np\n'), ((3198, 3227), 'dataset_creator.returnToArabicDictionary', 'DC.returnToArabicDictionary', ([], {}), '()\n', (3225, 3227), True, 'import dataset_creator as DC\n'), ((3321, 3352), 'torch.load', 'torch.load', (['"""trained_model.pth"""'], {}), "('trained_model.pth')\n", (3331, 3352), False, 'import torch\n'), ((346, 384), 'torch.nn.Linear', 'nn.Linear', (['input_size', 'hidden_sizes[0]'], {}), '(input_size, hidden_sizes[0])\n', (355, 384), False, 'from torch import nn\n'), ((412, 421), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (419, 421), False, 'from torch import nn\n'), ((647, 686), 'torch.nn.Linear', 'nn.Linear', (['hidden_sizes[0]', 'output_size'], {}), '(hidden_sizes[0], output_size)\n', (656, 686), False, 'from torch import nn\n'), ((1706, 1730), 'torch.LongTensor', 'torch.LongTensor', (['labels'], {}), '(labels)\n', (1722, 1730), False, 'import torch\n'), ((1944, 1968), 'torch.nn.functional.softmax', 'F.softmax', (['output'], {'dim': '(0)'}), '(output, dim=0)\n', (1953, 1968), True, 'import torch.nn.functional as F\n'), ((2279, 2304), 'torch.max', 'torch.max', (['output.data', '(1)'], {}), '(output.data, 1)\n', (2288, 2304), False, 'import torch\n')]
|
# coding: utf-8
# In[1]:
import nengo
import nengo_spa as spa
import numpy as np
# In[2]:
from matplotlib import pyplot as plt
# In[3]:
#create semantic pointers
words = [ 'CAT', 'BLUE', 'RED']
colors = ['RED', 'BLUE']
fingers = ['INDEX', 'MIDDLE']
D = 16 #we reduced it from 32 cause of capacity of our computers
vocab = spa.Vocabulary(D)
vocab.populate(';'.join(words))
vocab.populate('COLOR; WORD')
vocab.populate(';'.join(fingers))
stimuli = []
for i in range(10):
w = np.random.choice(colors)
c = np.random.choice(colors)
stimuli.append((w,c))
# # No recurrent connections
# In[4]:
model = spa.Network()
with model:
t_stim = 0.5
t_isi = 0.5
def word_func(t):
index = int (t / (t_stim + t_isi))
t = t % (t_stim + t_isi)
if t < t_isi:
return '0'
else:
return stimuli[index%len(stimuli)][0]
def color_func(t):
index = int (t / (t_stim + t_isi))
t = t % (t_stim + t_isi)
if t < t_isi:
return '0'
else:
return stimuli[index%len(stimuli)][1]
stim_w = spa.Transcode(word_func, output_vocab=vocab)
#create node for pre processing color to mimic delay
pre_stim_c = spa.Transcode(color_func, output_vocab=vocab)
stim_c = spa.State(vocab)
#reduced amount of neurons to increase volatility of attention
attention = spa.State(vocab, neurons_per_dimension=10)
spa.sym.WORD * 0.45 + spa.sym.COLOR * 0.55 >> attention
wm = spa.State(vocab)
nengo.Connection(pre_stim_c.output, stim_c.input, synapse=0.3)
#added gain for action selection to be triggered
(spa.sym.COLOR*stim_c+spa.sym.WORD*stim_w)*~attention*2 >> wm
finger = spa.State(vocab)
with spa.ActionSelection():
spa.ifmax( spa.dot(wm, spa.sym.BLUE),
spa.sym.INDEX >> finger)
spa.ifmax(spa.dot(wm, spa.sym.RED),
spa.sym.MIDDLE >> finger)
spa.ifmax(0.5,
spa.semantic_pointer.Zero(D) >> finger)
# In[5]:
with model:
p_input_word = nengo.Probe(stim_w.output)
p_input_color = nengo.Probe(pre_stim_c.output)
p_wm = nengo.Probe(wm.output)
p_finger = nengo.Probe(finger.output)
# In[6]:
with nengo.Simulator(model) as sim:
sim.run(5)
# In[7]:
figuge, axs = plt.subplots(ncols=1, nrows=4, figsize=(10, 10))
axs[0].plot(sim.trange(), spa.similarity(sim.data[p_input_word], vocab))
axs[0].legend(vocab.keys(), loc='right')
axs[1].plot(sim.trange(), spa.similarity(sim.data[p_input_color], vocab))
axs[1].legend(vocab.keys(), loc='right')
axs[2].plot(sim.trange(), spa.similarity(sim.data[p_wm], vocab))
axs[2].legend(vocab.keys(), loc='right')
axs[3].plot(sim.trange(), spa.similarity(sim.data[p_finger], vocab))
axs[3].legend(vocab.keys(), loc='right')
# The delay in processing 'color' vs 'word' was successful. However the model without recurrent wm always responds incorrectly (to 'word'), as it responds to the first input to wm. Thus we decided to add recurrent feedback to the wm nodes, to achive accumulation of evidence.
# # Yes recurrent connections
# In[4]:
model_rec = spa.Network()
with model_rec:
#we changed durations, to avoid intertrial effects (wm overlap)
t_stim = 0.3
t_isi = 0.7
def word_func(t):
index = int (t / (t_stim + t_isi))
t = t % (t_stim + t_isi)
if t < t_isi:
return '0'
else:
return stimuli[index%len(stimuli)][0]
def color_func(t):
#instead of achieving delay via additional node, for better control we present 'color' later than 'word'
t -= 0.1
index = int (t / (t_stim + t_isi))
t = t % (t_stim + t_isi)
if t < t_isi:
return '0'
else:
return stimuli[index%len(stimuli)][1]
stim_w = spa.Transcode(word_func, output_vocab=vocab)
stim_c = spa.Transcode(color_func, output_vocab=vocab)
rec_weight_input = 1
rec_weight_feedback = 0.5
wm_w = spa.State(vocab, feedback=rec_weight_feedback)
wm_c = spa.State(vocab, feedback=rec_weight_feedback)
stim_w * rec_weight_input >> wm_w
stim_c * rec_weight_input >> wm_c
attention = spa.State(vocab, neurons_per_dimension=10)
#we reduced attentional difference to give higher chance to'word'
spa.sym.WORD * 0.48 + spa.sym.COLOR * 0.52 >> attention
wm = spa.State(vocab, feedback=rec_weight_feedback)
(spa.sym.COLOR * wm_c + spa.sym.WORD * wm_w) * ~attention * rec_weight_input * 2 >> wm
finger = spa.State(vocab)
with spa.ActionSelection():
spa.ifmax( spa.dot(wm, spa.sym.BLUE),
spa.sym.INDEX >> finger)
spa.ifmax(spa.dot(wm, spa.sym.RED),
spa.sym.MIDDLE >> finger)
spa.ifmax(0.5,
spa.semantic_pointer.Zero(D) >> finger)
# In[5]:
with model_rec:
p_input_word = nengo.Probe(stim_w.output)
p_input_color = nengo.Probe(stim_c.output)
p_wm_word = nengo.Probe(wm_w.output)
p_wm_color = nengo.Probe(wm_c.output)
p_wm = nengo.Probe(wm.output)
p_finger = nengo.Probe(finger.output)
# In[11]:
stimuli = []
for i in range(10):
w = np.random.choice(colors)
c = np.random.choice(colors)
stimuli.append((w,c))
# In[12]:
with nengo.Simulator(model_rec) as sim_rec:
sim_rec.run(10)
# In[13]:
figuge, axs = plt.subplots(ncols=1, nrows=6, figsize=(10, 10))
axs[0].plot(sim_rec.trange(), spa.similarity(sim_rec.data[p_input_word], vocab))
axs[0].legend(vocab.keys(), loc='right')
axs[1].plot(sim_rec.trange(), spa.similarity(sim_rec.data[p_input_color], vocab))
axs[1].legend(vocab.keys(), loc='right')
axs[2].plot(sim_rec.trange(), spa.similarity(sim_rec.data[p_wm_word], vocab))
axs[2].legend(vocab.keys(), loc='right')
axs[3].plot(sim_rec.trange(), spa.similarity(sim_rec.data[p_wm_color], vocab))
axs[3].legend(vocab.keys(), loc='right')
axs[4].plot(sim_rec.trange(), spa.similarity(sim_rec.data[p_wm], vocab))
axs[4].legend(vocab.keys(), loc='right')
axs[5].plot(sim_rec.trange(), spa.similarity(sim_rec.data[p_finger], vocab))
axs[5].legend(vocab.keys(), loc='right')
# This is the closest result that shows mistakes (at least we can interpret it that way): 4,5,7 & 8 timepoints where both fingers are selected (wrong finger is the first one)
|
[
"nengo_spa.Vocabulary",
"nengo_spa.State",
"nengo_spa.Transcode",
"nengo.Probe",
"nengo_spa.Network",
"nengo.Simulator",
"nengo_spa.semantic_pointer.Zero",
"nengo_spa.similarity",
"nengo_spa.ActionSelection",
"numpy.random.choice",
"nengo_spa.dot",
"nengo.Connection",
"matplotlib.pyplot.subplots"
] |
[((335, 352), 'nengo_spa.Vocabulary', 'spa.Vocabulary', (['D'], {}), '(D)\n', (349, 352), True, 'import nengo_spa as spa\n'), ((626, 639), 'nengo_spa.Network', 'spa.Network', ([], {}), '()\n', (637, 639), True, 'import nengo_spa as spa\n'), ((2326, 2374), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(1)', 'nrows': '(4)', 'figsize': '(10, 10)'}), '(ncols=1, nrows=4, figsize=(10, 10))\n', (2338, 2374), True, 'from matplotlib import pyplot as plt\n'), ((3157, 3170), 'nengo_spa.Network', 'spa.Network', ([], {}), '()\n', (3168, 3170), True, 'import nengo_spa as spa\n'), ((5410, 5458), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(1)', 'nrows': '(6)', 'figsize': '(10, 10)'}), '(ncols=1, nrows=6, figsize=(10, 10))\n', (5422, 5458), True, 'from matplotlib import pyplot as plt\n'), ((491, 515), 'numpy.random.choice', 'np.random.choice', (['colors'], {}), '(colors)\n', (507, 515), True, 'import numpy as np\n'), ((524, 548), 'numpy.random.choice', 'np.random.choice', (['colors'], {}), '(colors)\n', (540, 548), True, 'import numpy as np\n'), ((1117, 1161), 'nengo_spa.Transcode', 'spa.Transcode', (['word_func'], {'output_vocab': 'vocab'}), '(word_func, output_vocab=vocab)\n', (1130, 1161), True, 'import nengo_spa as spa\n'), ((1237, 1282), 'nengo_spa.Transcode', 'spa.Transcode', (['color_func'], {'output_vocab': 'vocab'}), '(color_func, output_vocab=vocab)\n', (1250, 1282), True, 'import nengo_spa as spa\n'), ((1300, 1316), 'nengo_spa.State', 'spa.State', (['vocab'], {}), '(vocab)\n', (1309, 1316), True, 'import nengo_spa as spa\n'), ((1404, 1446), 'nengo_spa.State', 'spa.State', (['vocab'], {'neurons_per_dimension': '(10)'}), '(vocab, neurons_per_dimension=10)\n', (1413, 1446), True, 'import nengo_spa as spa\n'), ((1520, 1536), 'nengo_spa.State', 'spa.State', (['vocab'], {}), '(vocab)\n', (1529, 1536), True, 'import nengo_spa as spa\n'), ((1545, 1607), 'nengo.Connection', 'nengo.Connection', (['pre_stim_c.output', 'stim_c.input'], {'synapse': '(0.3)'}), '(pre_stim_c.output, stim_c.input, synapse=0.3)\n', (1561, 1607), False, 'import nengo\n'), ((1744, 1760), 'nengo_spa.State', 'spa.State', (['vocab'], {}), '(vocab)\n', (1753, 1760), True, 'import nengo_spa as spa\n'), ((2081, 2107), 'nengo.Probe', 'nengo.Probe', (['stim_w.output'], {}), '(stim_w.output)\n', (2092, 2107), False, 'import nengo\n'), ((2128, 2158), 'nengo.Probe', 'nengo.Probe', (['pre_stim_c.output'], {}), '(pre_stim_c.output)\n', (2139, 2158), False, 'import nengo\n'), ((2170, 2192), 'nengo.Probe', 'nengo.Probe', (['wm.output'], {}), '(wm.output)\n', (2181, 2192), False, 'import nengo\n'), ((2208, 2234), 'nengo.Probe', 'nengo.Probe', (['finger.output'], {}), '(finger.output)\n', (2219, 2234), False, 'import nengo\n'), ((2253, 2275), 'nengo.Simulator', 'nengo.Simulator', (['model'], {}), '(model)\n', (2268, 2275), False, 'import nengo\n'), ((2402, 2447), 'nengo_spa.similarity', 'spa.similarity', (['sim.data[p_input_word]', 'vocab'], {}), '(sim.data[p_input_word], vocab)\n', (2416, 2447), True, 'import nengo_spa as spa\n'), ((2517, 2563), 'nengo_spa.similarity', 'spa.similarity', (['sim.data[p_input_color]', 'vocab'], {}), '(sim.data[p_input_color], vocab)\n', (2531, 2563), True, 'import nengo_spa as spa\n'), ((2633, 2670), 'nengo_spa.similarity', 'spa.similarity', (['sim.data[p_wm]', 'vocab'], {}), '(sim.data[p_wm], vocab)\n', (2647, 2670), True, 'import nengo_spa as spa\n'), ((2740, 2781), 'nengo_spa.similarity', 'spa.similarity', (['sim.data[p_finger]', 'vocab'], {}), '(sim.data[p_finger], vocab)\n', (2754, 2781), True, 'import nengo_spa as spa\n'), ((3850, 3894), 'nengo_spa.Transcode', 'spa.Transcode', (['word_func'], {'output_vocab': 'vocab'}), '(word_func, output_vocab=vocab)\n', (3863, 3894), True, 'import nengo_spa as spa\n'), ((3908, 3953), 'nengo_spa.Transcode', 'spa.Transcode', (['color_func'], {'output_vocab': 'vocab'}), '(color_func, output_vocab=vocab)\n', (3921, 3953), True, 'import nengo_spa as spa\n'), ((4034, 4080), 'nengo_spa.State', 'spa.State', (['vocab'], {'feedback': 'rec_weight_feedback'}), '(vocab, feedback=rec_weight_feedback)\n', (4043, 4080), True, 'import nengo_spa as spa\n'), ((4092, 4138), 'nengo_spa.State', 'spa.State', (['vocab'], {'feedback': 'rec_weight_feedback'}), '(vocab, feedback=rec_weight_feedback)\n', (4101, 4138), True, 'import nengo_spa as spa\n'), ((4245, 4287), 'nengo_spa.State', 'spa.State', (['vocab'], {'neurons_per_dimension': '(10)'}), '(vocab, neurons_per_dimension=10)\n', (4254, 4287), True, 'import nengo_spa as spa\n'), ((4431, 4477), 'nengo_spa.State', 'spa.State', (['vocab'], {'feedback': 'rec_weight_feedback'}), '(vocab, feedback=rec_weight_feedback)\n', (4440, 4477), True, 'import nengo_spa as spa\n'), ((4591, 4607), 'nengo_spa.State', 'spa.State', (['vocab'], {}), '(vocab)\n', (4600, 4607), True, 'import nengo_spa as spa\n'), ((4932, 4958), 'nengo.Probe', 'nengo.Probe', (['stim_w.output'], {}), '(stim_w.output)\n', (4943, 4958), False, 'import nengo\n'), ((4979, 5005), 'nengo.Probe', 'nengo.Probe', (['stim_c.output'], {}), '(stim_c.output)\n', (4990, 5005), False, 'import nengo\n'), ((5022, 5046), 'nengo.Probe', 'nengo.Probe', (['wm_w.output'], {}), '(wm_w.output)\n', (5033, 5046), False, 'import nengo\n'), ((5064, 5088), 'nengo.Probe', 'nengo.Probe', (['wm_c.output'], {}), '(wm_c.output)\n', (5075, 5088), False, 'import nengo\n'), ((5100, 5122), 'nengo.Probe', 'nengo.Probe', (['wm.output'], {}), '(wm.output)\n', (5111, 5122), False, 'import nengo\n'), ((5138, 5164), 'nengo.Probe', 'nengo.Probe', (['finger.output'], {}), '(finger.output)\n', (5149, 5164), False, 'import nengo\n'), ((5220, 5244), 'numpy.random.choice', 'np.random.choice', (['colors'], {}), '(colors)\n', (5236, 5244), True, 'import numpy as np\n'), ((5253, 5277), 'numpy.random.choice', 'np.random.choice', (['colors'], {}), '(colors)\n', (5269, 5277), True, 'import numpy as np\n'), ((5323, 5349), 'nengo.Simulator', 'nengo.Simulator', (['model_rec'], {}), '(model_rec)\n', (5338, 5349), False, 'import nengo\n'), ((5490, 5539), 'nengo_spa.similarity', 'spa.similarity', (['sim_rec.data[p_input_word]', 'vocab'], {}), '(sim_rec.data[p_input_word], vocab)\n', (5504, 5539), True, 'import nengo_spa as spa\n'), ((5613, 5663), 'nengo_spa.similarity', 'spa.similarity', (['sim_rec.data[p_input_color]', 'vocab'], {}), '(sim_rec.data[p_input_color], vocab)\n', (5627, 5663), True, 'import nengo_spa as spa\n'), ((5737, 5783), 'nengo_spa.similarity', 'spa.similarity', (['sim_rec.data[p_wm_word]', 'vocab'], {}), '(sim_rec.data[p_wm_word], vocab)\n', (5751, 5783), True, 'import nengo_spa as spa\n'), ((5857, 5904), 'nengo_spa.similarity', 'spa.similarity', (['sim_rec.data[p_wm_color]', 'vocab'], {}), '(sim_rec.data[p_wm_color], vocab)\n', (5871, 5904), True, 'import nengo_spa as spa\n'), ((5978, 6019), 'nengo_spa.similarity', 'spa.similarity', (['sim_rec.data[p_wm]', 'vocab'], {}), '(sim_rec.data[p_wm], vocab)\n', (5992, 6019), True, 'import nengo_spa as spa\n'), ((6093, 6138), 'nengo_spa.similarity', 'spa.similarity', (['sim_rec.data[p_finger]', 'vocab'], {}), '(sim_rec.data[p_finger], vocab)\n', (6107, 6138), True, 'import nengo_spa as spa\n'), ((1774, 1795), 'nengo_spa.ActionSelection', 'spa.ActionSelection', ([], {}), '()\n', (1793, 1795), True, 'import nengo_spa as spa\n'), ((4621, 4642), 'nengo_spa.ActionSelection', 'spa.ActionSelection', ([], {}), '()\n', (4640, 4642), True, 'import nengo_spa as spa\n'), ((1816, 1841), 'nengo_spa.dot', 'spa.dot', (['wm', 'spa.sym.BLUE'], {}), '(wm, spa.sym.BLUE)\n', (1823, 1841), True, 'import nengo_spa as spa\n'), ((1898, 1922), 'nengo_spa.dot', 'spa.dot', (['wm', 'spa.sym.RED'], {}), '(wm, spa.sym.RED)\n', (1905, 1922), True, 'import nengo_spa as spa\n'), ((4663, 4688), 'nengo_spa.dot', 'spa.dot', (['wm', 'spa.sym.BLUE'], {}), '(wm, spa.sym.BLUE)\n', (4670, 4688), True, 'import nengo_spa as spa\n'), ((4745, 4769), 'nengo_spa.dot', 'spa.dot', (['wm', 'spa.sym.RED'], {}), '(wm, spa.sym.RED)\n', (4752, 4769), True, 'import nengo_spa as spa\n'), ((1997, 2025), 'nengo_spa.semantic_pointer.Zero', 'spa.semantic_pointer.Zero', (['D'], {}), '(D)\n', (2022, 2025), True, 'import nengo_spa as spa\n'), ((4844, 4872), 'nengo_spa.semantic_pointer.Zero', 'spa.semantic_pointer.Zero', (['D'], {}), '(D)\n', (4869, 4872), True, 'import nengo_spa as spa\n')]
|
import re
from util.hook import *
from util import web
uri = 'http://www.whatthefuckshouldimakefordinner.com'
re_mark = re.compile(r'<dt><a href="(.*?)" target="_blank">(.*?)</a></dt>')
@hook(cmds=['fucking_dinner', 'fd', 'dinner'], priority='low')
def dinner(code, input):
"""fd -- WHAT DO YOU WANT FOR FUCKING DINNER?"""
err = '{red}EAT LEFT OVER PIZZA FOR ALL I CARE.'
try:
data = web.text(uri)
results = re_mark.findall(data)
if not results:
return code.say(err)
url, food = results[0][0], web.escape(results[0][1])
code.say('WHY DON\'T YOU EAT SOME FUCKING {b}%s{b}. HERE IS THE RECIPE: %s' % (
food.upper(), url))
except:
return code.say(err)
|
[
"util.web.text",
"util.web.escape",
"re.compile"
] |
[((121, 185), 're.compile', 're.compile', (['"""<dt><a href="(.*?)" target="_blank">(.*?)</a></dt>"""'], {}), '(\'<dt><a href="(.*?)" target="_blank">(.*?)</a></dt>\')\n', (131, 185), False, 'import re\n'), ((407, 420), 'util.web.text', 'web.text', (['uri'], {}), '(uri)\n', (415, 420), False, 'from util import web\n'), ((553, 578), 'util.web.escape', 'web.escape', (['results[0][1]'], {}), '(results[0][1])\n', (563, 578), False, 'from util import web\n')]
|
from django.test import SimpleTestCase
from project.apps.blog import models
class AuthorTest(SimpleTestCase):
def test_str_dunder(self):
author = models.Author(id=1)
author_dunder_str_format = '<Author: ID - {id}>'.format(id=author.id)
self.assertMultiLineEqual(
author.__str__(),
author_dunder_str_format
)
class ArticleTest(SimpleTestCase):
def test_str_dunder(self):
article = models.Article(id=1)
article_dunder_str_format = '<Article: ID - {id}>'.format(id=article.id)
self.assertMultiLineEqual(
article.__str__(),
article_dunder_str_format
)
|
[
"project.apps.blog.models.Author",
"project.apps.blog.models.Article"
] |
[((161, 180), 'project.apps.blog.models.Author', 'models.Author', ([], {'id': '(1)'}), '(id=1)\n', (174, 180), False, 'from project.apps.blog import models\n'), ((458, 478), 'project.apps.blog.models.Article', 'models.Article', ([], {'id': '(1)'}), '(id=1)\n', (472, 478), False, 'from project.apps.blog import models\n')]
|
from multiprocessing import Pool, cpu_count
import time
flatten = lambda t: [item for sublist in t for item in sublist]
def load_list(f):
pre_list = f.read().splitlines()
return [int(e) for e in pre_list]
def dump_list(f, num_list):
for i in num_list:
f.write("%d\n" % i)
def split_list(seq, num):
avg = len(seq) / float(num)
out = []
last = 0.0
while last < len(seq):
out.append(seq[int(last):int(last + avg)])
last += avg
return out
def remove_duplicates(segment):
seen = set()
seen_add = seen.add
return [e for e in segment if not (e in seen or seen_add(e))]
def task(num_list):
num_cpus = cpu_count()
with Pool(num_cpus) as pool:
segments = split_list(num_list, num_cpus)
return flatten(pool.map_async(remove_duplicates, segments).get())
if __name__ == '__main__':
with open('out.txt', 'r') as input_file:
num_list = load_list(input_file)
# Test with multithreading
start = time.time()
result = remove_duplicates(task(num_list))
end = time.time()
print(f"With multithreading: {(end - start) * 1000} milisseconds")
with open('result.txt', 'w') as output_file:
dump_list(output_file, result)
# Test without multithreading
start = time.time()
result = remove_duplicates(num_list)
end = time.time()
print(f"Without multithreading: {(end - start) * 1000} milisseconds")
|
[
"multiprocessing.Pool",
"time.time",
"multiprocessing.cpu_count"
] |
[((675, 686), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (684, 686), False, 'from multiprocessing import Pool, cpu_count\n'), ((696, 710), 'multiprocessing.Pool', 'Pool', (['num_cpus'], {}), '(num_cpus)\n', (700, 710), False, 'from multiprocessing import Pool, cpu_count\n'), ((1010, 1021), 'time.time', 'time.time', ([], {}), '()\n', (1019, 1021), False, 'import time\n'), ((1087, 1098), 'time.time', 'time.time', ([], {}), '()\n', (1096, 1098), False, 'import time\n'), ((1326, 1337), 'time.time', 'time.time', ([], {}), '()\n', (1335, 1337), False, 'import time\n'), ((1397, 1408), 'time.time', 'time.time', ([], {}), '()\n', (1406, 1408), False, 'import time\n')]
|
import pytest
from django.http import HttpResponse
from apirouter import APIRouter
from apirouter.exceptions import APIException
pytestmark = [pytest.mark.urls(__name__)]
def exception_handler(request, exc):
return HttpResponse(str(exc), status=400)
router = APIRouter(exception_handler=exception_handler)
@router.route("/string")
def handle_string(request):
return "OK"
@router.route("/dict")
def handle_dict(request):
return {"success": True}
@router.route("/list")
def handle_list(request):
return [1, 2, 3, 4, 5]
@router.route("/error")
def handle_error(request):
raise APIException(status_code=400, detail="Error")
urlpatterns = router.urls
def test_handle_string(client):
response = client.get("/string")
assert response.status_code == 200
assert response.json() == "OK"
def test_handle_dict(client):
response = client.get("/dict")
assert response.status_code == 200
assert response.json() == {"success": True}
def test_handle_list(client):
response = client.get("/list")
assert response.status_code == 200
assert response.json() == [1, 2, 3, 4, 5]
def test_handle_error(client):
response = client.get("/error")
assert response.status_code == 400
assert response.content == b"Error"
|
[
"apirouter.APIRouter",
"pytest.mark.urls",
"apirouter.exceptions.APIException"
] |
[((269, 315), 'apirouter.APIRouter', 'APIRouter', ([], {'exception_handler': 'exception_handler'}), '(exception_handler=exception_handler)\n', (278, 315), False, 'from apirouter import APIRouter\n'), ((145, 171), 'pytest.mark.urls', 'pytest.mark.urls', (['__name__'], {}), '(__name__)\n', (161, 171), False, 'import pytest\n'), ((608, 653), 'apirouter.exceptions.APIException', 'APIException', ([], {'status_code': '(400)', 'detail': '"""Error"""'}), "(status_code=400, detail='Error')\n", (620, 653), False, 'from apirouter.exceptions import APIException\n')]
|
# Generated by Django 3.0.3 on 2020-07-12 15:21
from django.db import migrations, models
import markdownx.models
import posts.models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Tag",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"tag",
posts.models.CaseInsensitiveCharField(max_length=200, unique=True),
),
],
),
migrations.CreateModel(
name="Post",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("title", models.CharField(max_length=200)),
(
"content",
markdownx.models.MarkdownxField(
help_text="Write content of post in markdown"
),
),
("created", models.DateTimeField(auto_now_add=True)),
("updated", models.DateTimeField(auto_now=True)),
("tags", models.ManyToManyField(related_name="posts", to="posts.Tag")),
],
),
]
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.ManyToManyField",
"django.db.models.AutoField"
] |
[((379, 472), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (395, 472), False, 'from django.db import migrations, models\n'), ((928, 1021), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (944, 1021), False, 'from django.db import migrations, models\n'), ((1183, 1215), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1199, 1215), False, 'from django.db import migrations, models\n'), ((1460, 1499), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1480, 1499), False, 'from django.db import migrations, models\n'), ((1530, 1565), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1550, 1565), False, 'from django.db import migrations, models\n'), ((1593, 1653), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""posts"""', 'to': '"""posts.Tag"""'}), "(related_name='posts', to='posts.Tag')\n", (1615, 1653), False, 'from django.db import migrations, models\n')]
|
from solo.client import SoloClient
from fido2.ctap1 import ApduError
from .util import shannon_entropy
from .tester import Tester, Test
class SoloTests(Tester):
def __init__(self, tester=None):
super().__init__(tester)
def run(self,):
self.test_solo()
def test_solo(self,):
"""
Solo specific tests
"""
# RNG command
sc = SoloClient()
sc.find_device(self.dev)
sc.use_u2f()
memmap = (0x08005000, 0x08005000 + 198 * 1024 - 8)
total = 1024 * 16
with Test("Gathering %d random bytes..." % total):
entropy = b""
while len(entropy) < total:
entropy += sc.get_rng()
with Test("Test entropy is close to perfect"):
s = shannon_entropy(entropy)
assert s > 7.98
print("Entropy is %.5f bits per byte." % s)
with Test("Test Solo version command"):
assert len(sc.solo_version()) == 3
with Test("Test bootloader is not active"):
try:
sc.write_flash(memmap[0], b"1234")
except ApduError:
pass
sc.exchange = sc.exchange_fido2
with Test("Test Solo version and random commands with fido2 layer"):
assert len(sc.solo_version()) == 3
sc.get_rng()
def test_bootloader(self,):
sc = SoloClient()
sc.find_device(self.dev)
sc.use_u2f()
memmap = (0x08005000, 0x08005000 + 198 * 1024 - 8)
data = b"A" * 64
with Test("Test version command"):
assert len(sc.bootloader_version()) == 3
with Test("Test write command"):
sc.write_flash(memmap[0], data)
for addr in (memmap[0] - 8, memmap[0] - 4, memmap[1], memmap[1] - 8):
with Test("Test out of bounds write command at 0x%04x" % addr):
try:
sc.write_flash(addr, data)
except CtapError as e:
assert e.code == CtapError.ERR.NOT_ALLOWED
|
[
"solo.client.SoloClient"
] |
[((395, 407), 'solo.client.SoloClient', 'SoloClient', ([], {}), '()\n', (405, 407), False, 'from solo.client import SoloClient\n'), ((1394, 1406), 'solo.client.SoloClient', 'SoloClient', ([], {}), '()\n', (1404, 1406), False, 'from solo.client import SoloClient\n')]
|
"""
Quicksort.
"""
import random
def partition(array, left, right):
"""Quicksort partition."""
pivot = array[right]
i = left - 1
for j in range(left, right):
if array[j] < pivot:
i += 1
array[i], array[j] = array[j], array[i]
array[right], array[i + 1] = array[i + 1], array[right]
return i + 1
def quicksort_r(array, left, right):
"""Quicksort recursion."""
if right > left:
pivot_i = partition(array, left, right)
quicksort_r(array, left, pivot_i - 1)
quicksort_r(array, pivot_i + 1, right)
def quicksort(array):
"""Quicksort."""
quicksort_r(array, 0, len(array) - 1)
def main():
"""The main function."""
array = list(range(20))
random.shuffle(array)
print(array)
quicksort(array)
print(array)
if __name__ == "__main__":
main()
|
[
"random.shuffle"
] |
[((748, 769), 'random.shuffle', 'random.shuffle', (['array'], {}), '(array)\n', (762, 769), False, 'import random\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from intphys.extra.tvqa.tvqa_abc import ABC
from intphys.extra.tvqa_plus.stage import STAGE
from intphys.data import SimulationInput
from intphys.submodule import *
class TVQA(nn.Module):
SIMULATION_INPUT = SimulationInput.VIDEO
def __init__(self, config):
super().__init__()
config["tvqa"]["vocab_size"] = config["input_size"]
config["tvqa"]["dropout"] = config["dropout"]
config["tvqa"]["output_size"] = config["output_size"]
config["frame_encoder"]["depth_size"] = config["depth_size"]
config["frame_encoder"]["input_width"] = config["input_width"]
config["frame_encoder"]["input_height"] = config["input_height"]
self.config = config
self.frame_encoder = self.create_submodule("frame_encoder")
self.adaptive_pool = nn.AdaptiveAvgPool2d(config["pool_size"])
self.flatten = nn.Flatten()
config["tvqa"]["vid_feat_size"] = config["pool_size"]**2 * self.frame_encoder.out_channels
self.tvqa = ABC(config["tvqa"])
def create_submodule(self, submodule):
config = self.config[submodule]
submodule = eval(config["architecture"])(config)
return submodule
def process_simulation(self, simulations, **kwargs):
B, C, T, X1, X2 = simulations.shape
y = simulations.permute(0, 2, 1, 3, 4)
y = y.reshape(B*T, C, X1, X2)
y = self.frame_encoder(y)
y = self.adaptive_pool(y)
y = self.flatten(y)
y = y.reshape(B, T, -1)
return y
def forward(self, simulations, questions, lengths, **kwargs):
visual = self.process_simulation(simulations, **kwargs)
B, T = visual.shape[:2]
visual_lengths = torch.tensor([T for i in range(B)])
return self.tvqa(questions, torch.tensor(lengths), visual, visual_lengths)
class TVQAPlus(nn.Module):
SIMULATION_INPUT = SimulationInput.VIDEO
def __init__(self, config):
super().__init__()
config["stage"]["embed_size"] = config["question_encoder"]["hidden_size"]
config["stage"]["dropout"] = config["dropout"]
config["stage"]["output_size"] = config["output_size"]
config["question_encoder"]["vocab_size"] = config["input_size"]
config["frame_encoder"]["depth_size"] = config["depth_size"]
config["frame_encoder"]["input_width"] = config["input_width"]
config["frame_encoder"]["input_height"] = config["input_height"]
self.config = config
self.frame_encoder = self.create_submodule("frame_encoder")
self.question_encoder = self.create_submodule("question_encoder")
self.adaptive_pool = nn.AdaptiveAvgPool2d(config["pool_size"])
self.flatten = nn.Flatten()
config["stage"]["vfeat_size"] = self.frame_encoder.out_channels
self.stage = STAGE(config["stage"])
def create_submodule(self, submodule):
config = self.config[submodule]
submodule = eval(config["architecture"])(config)
return submodule
def process_simulation(self, simulations, **kwargs):
B, C, T, X1, X2 = simulations.shape
y = simulations.permute(0, 2, 1, 3, 4)
y = y.reshape(B*T, C, X1, X2)
y = self.frame_encoder(y)
y = self.adaptive_pool(y)
K, X1, X2 = y.shape[-3:]
y = y.view(B, T, K, X1 * X2)
y = y.permute(0, 1, 3, 2)
return y
def process_question(self, questions, lengths, **kwargs):
output, (hiddens, _) = self.question_encoder(questions, lengths)
return nn.utils.rnn.pad_packed_sequence(output, batch_first=True)[0]
def forward(self, simulations, questions, lengths, **kwargs):
visual = self.process_simulation(simulations, **kwargs)
textual = self.process_question(questions, lengths, **kwargs)
B, T, HW = visual.shape[:3]
device = visual.device
visual_lengths = torch.empty(B, T, HW, dtype=visual.dtype).fill_(1)
textual_lengths = torch.zeros(B, 1, max(lengths), dtype=visual.dtype)
for (i, length) in enumerate(lengths):
textual_lengths[i, 0, :length] = 1.0
batch = {
"qas_bert": textual,
"qas_mask": textual_lengths.to(device),
"vid": visual,
"vid_mask": visual_lengths.to(device),
}
return self.stage(batch)
|
[
"torch.nn.AdaptiveAvgPool2d",
"torch.empty",
"intphys.extra.tvqa_plus.stage.STAGE",
"torch.nn.utils.rnn.pad_packed_sequence",
"intphys.extra.tvqa.tvqa_abc.ABC",
"torch.tensor",
"torch.nn.Flatten"
] |
[((884, 925), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (["config['pool_size']"], {}), "(config['pool_size'])\n", (904, 925), True, 'import torch.nn as nn\n'), ((949, 961), 'torch.nn.Flatten', 'nn.Flatten', ([], {}), '()\n', (959, 961), True, 'import torch.nn as nn\n'), ((1081, 1100), 'intphys.extra.tvqa.tvqa_abc.ABC', 'ABC', (["config['tvqa']"], {}), "(config['tvqa'])\n", (1084, 1100), False, 'from intphys.extra.tvqa.tvqa_abc import ABC\n'), ((2736, 2777), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (["config['pool_size']"], {}), "(config['pool_size'])\n", (2756, 2777), True, 'import torch.nn as nn\n'), ((2801, 2813), 'torch.nn.Flatten', 'nn.Flatten', ([], {}), '()\n', (2811, 2813), True, 'import torch.nn as nn\n'), ((2907, 2929), 'intphys.extra.tvqa_plus.stage.STAGE', 'STAGE', (["config['stage']"], {}), "(config['stage'])\n", (2912, 2929), False, 'from intphys.extra.tvqa_plus.stage import STAGE\n'), ((1864, 1885), 'torch.tensor', 'torch.tensor', (['lengths'], {}), '(lengths)\n', (1876, 1885), False, 'import torch\n'), ((3628, 3686), 'torch.nn.utils.rnn.pad_packed_sequence', 'nn.utils.rnn.pad_packed_sequence', (['output'], {'batch_first': '(True)'}), '(output, batch_first=True)\n', (3660, 3686), True, 'import torch.nn as nn\n'), ((3983, 4024), 'torch.empty', 'torch.empty', (['B', 'T', 'HW'], {'dtype': 'visual.dtype'}), '(B, T, HW, dtype=visual.dtype)\n', (3994, 4024), False, 'import torch\n')]
|
import tensorflow as tf
from attention import AttentionLayer
from tensorflow.keras.models import load_model
import numpy as np
from tensorflow.keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from text_cleaner import text_cleaner,rareword_coverage
import pickle
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
def decode_sequence(input_seq,encoder_model,decoder_model,target_word_index,reverse_target_word_index,max_summary_len):
e_out,e_h,e_c=encoder_model.predict(input_seq)
target_seq=np.zeros((1,1))
target_seq[0,0]=target_word_index['sostok']
stop_condition=False
decoded_sentence=''
while not stop_condition:
output_tokens,h,c=decoder_model.predict([target_seq]+[e_out,e_h,e_c])
sampled_token_index=np.argmax(output_tokens[0,-1,:])
sampled_token=reverse_target_word_index[sampled_token_index]
if(sampled_token!='eostok'):
decoded_sentence+=' '+sampled_token
if (sampled_token=='eostok') or len(decoded_sentence.split())>=(max_summary_len-1):
stop_condition=True
target_seq=np.zeros((1,1))
target_seq[0,0]=sampled_token_index
e_h,e_c=h,c
return decoded_sentence
def predict(test_value):
max_text_len=30
max_summary_len=8
#test_value="Gave me such a caffeine overdose I had the shakes, a racing heart and an anxiety attack. Plus it tastes unbelievably bad. I'll stick with coffee, tea and soda, thanks."
cleaned_text=[]
cleaned_text.append(text_cleaner(test_value,0))
cleaned_text=np.array(cleaned_text)
short_text=[]
for i in range(len(cleaned_text)):
if len(cleaned_text[i].split())<=max_text_len:
short_text.append(cleaned_text[i])
x_tr_test=short_text
file=open('X_training_value.pkl','rb')
x_trained_text=pickle.load(file)
file.close()
#x_trained_text=np.append(x_trained_text,x_tr_test)
x_tokenizer=Tokenizer()
x_tokenizer.fit_on_texts(x_trained_text)
cnt,tot_cnt,freq,tot_freq=rareword_coverage(4,x_tokenizer)
x_tokenizer=Tokenizer(num_words=tot_cnt-cnt)
x_tokenizer.fit_on_texts(list(x_trained_text))
x_tr_seq=x_tokenizer.texts_to_sequences(x_tr_test)
x_tr=pad_sequences(x_tr_seq,maxlen=max_text_len,padding='post')
y_tokenizer=Tokenizer()
reverse_target_word_index=dict(map(reversed, y_tokenizer.word_index.items()))
file=open('reverse_target_word_index.pkl','rb')
reverse_target_word_index=pickle.load(file)
file.close()
file=open('reverse_source_word_index.pkl','rb')
reverse_source_word_index=pickle.load(file)
file.close()
file=open('target_word_index.pkl','rb')
target_word_index=pickle.load(file)
file.close()
max_summary_len=8
#target_word_index=y_tokenizer.word_index
encoder_model=load_model('encoder_model.h5',custom_objects={'AttentionLayer' : AttentionLayer})
decoder_model=load_model('decoder_model.h5',custom_objects={'AttentionLayer' : AttentionLayer})
return decode_sequence(x_tr.reshape(1,max_text_len),encoder_model,decoder_model,target_word_index,reverse_target_word_index,max_summary_len)
#print(predict("Gave me such a caffeine overdose I had the shakes, a racing heart and an anxiety attack. Plus it tastes unbelievably bad. I'll stick with coffee, tea and soda, thanks."))
|
[
"text_cleaner.text_cleaner",
"tensorflow.keras.preprocessing.text.Tokenizer",
"tensorflow.keras.models.load_model",
"numpy.argmax",
"keras.preprocessing.sequence.pad_sequences",
"numpy.zeros",
"pickle.load",
"tensorflow.compat.v1.logging.set_verbosity",
"numpy.array",
"text_cleaner.rareword_coverage"
] |
[((311, 373), 'tensorflow.compat.v1.logging.set_verbosity', 'tf.compat.v1.logging.set_verbosity', (['tf.compat.v1.logging.ERROR'], {}), '(tf.compat.v1.logging.ERROR)\n', (345, 373), True, 'import tensorflow as tf\n'), ((561, 577), 'numpy.zeros', 'np.zeros', (['(1, 1)'], {}), '((1, 1))\n', (569, 577), True, 'import numpy as np\n'), ((1610, 1632), 'numpy.array', 'np.array', (['cleaned_text'], {}), '(cleaned_text)\n', (1618, 1632), True, 'import numpy as np\n'), ((1884, 1901), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (1895, 1901), False, 'import pickle\n'), ((1993, 2004), 'tensorflow.keras.preprocessing.text.Tokenizer', 'Tokenizer', ([], {}), '()\n', (2002, 2004), False, 'from tensorflow.keras.preprocessing.text import Tokenizer\n'), ((2081, 2114), 'text_cleaner.rareword_coverage', 'rareword_coverage', (['(4)', 'x_tokenizer'], {}), '(4, x_tokenizer)\n', (2098, 2114), False, 'from text_cleaner import text_cleaner, rareword_coverage\n'), ((2132, 2166), 'tensorflow.keras.preprocessing.text.Tokenizer', 'Tokenizer', ([], {'num_words': '(tot_cnt - cnt)'}), '(num_words=tot_cnt - cnt)\n', (2141, 2166), False, 'from tensorflow.keras.preprocessing.text import Tokenizer\n'), ((2281, 2341), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['x_tr_seq'], {'maxlen': 'max_text_len', 'padding': '"""post"""'}), "(x_tr_seq, maxlen=max_text_len, padding='post')\n", (2294, 2341), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((2357, 2368), 'tensorflow.keras.preprocessing.text.Tokenizer', 'Tokenizer', ([], {}), '()\n', (2366, 2368), False, 'from tensorflow.keras.preprocessing.text import Tokenizer\n'), ((2533, 2550), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (2544, 2550), False, 'import pickle\n'), ((2650, 2667), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (2661, 2667), False, 'import pickle\n'), ((2751, 2768), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (2762, 2768), False, 'import pickle\n'), ((2873, 2958), 'tensorflow.keras.models.load_model', 'load_model', (['"""encoder_model.h5"""'], {'custom_objects': "{'AttentionLayer': AttentionLayer}"}), "('encoder_model.h5', custom_objects={'AttentionLayer':\n AttentionLayer})\n", (2883, 2958), False, 'from tensorflow.keras.models import load_model\n'), ((2973, 3058), 'tensorflow.keras.models.load_model', 'load_model', (['"""decoder_model.h5"""'], {'custom_objects': "{'AttentionLayer': AttentionLayer}"}), "('decoder_model.h5', custom_objects={'AttentionLayer':\n AttentionLayer})\n", (2983, 3058), False, 'from tensorflow.keras.models import load_model\n'), ((811, 845), 'numpy.argmax', 'np.argmax', (['output_tokens[0, -1, :]'], {}), '(output_tokens[0, -1, :])\n', (820, 845), True, 'import numpy as np\n'), ((1151, 1167), 'numpy.zeros', 'np.zeros', (['(1, 1)'], {}), '((1, 1))\n', (1159, 1167), True, 'import numpy as np\n'), ((1565, 1592), 'text_cleaner.text_cleaner', 'text_cleaner', (['test_value', '(0)'], {}), '(test_value, 0)\n', (1577, 1592), False, 'from text_cleaner import text_cleaner, rareword_coverage\n')]
|
##Tutotrial by <NAME>
from flask import Flask, request, render_template
import pandas as pd
import joblib
# Declare a Flask app
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def main():
# If a form is submitted
if request.method == "POST":
# Unpickle classifier
clf = joblib.load("clf.pkl")
# Get values through input bars
height = request.form.get("height")
weight = request.form.get("weight")
# Put inputs to dataframe
X = pd.DataFrame([[height, weight]], columns = ["Height", "Weight"])
# Get prediction
prediction = clf.predict(X)[0]
else:
prediction = ""
return render_template("website.html", output = prediction)
# Running the app
if __name__ == '__main__':
app.run(debug = True)
|
[
"pandas.DataFrame",
"flask.request.form.get",
"flask.Flask",
"flask.render_template",
"joblib.load"
] |
[((137, 152), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (142, 152), False, 'from flask import Flask, request, render_template\n'), ((743, 793), 'flask.render_template', 'render_template', (['"""website.html"""'], {'output': 'prediction'}), "('website.html', output=prediction)\n", (758, 793), False, 'from flask import Flask, request, render_template\n'), ((327, 349), 'joblib.load', 'joblib.load', (['"""clf.pkl"""'], {}), "('clf.pkl')\n", (338, 349), False, 'import joblib\n'), ((416, 442), 'flask.request.form.get', 'request.form.get', (['"""height"""'], {}), "('height')\n", (432, 442), False, 'from flask import Flask, request, render_template\n'), ((460, 486), 'flask.request.form.get', 'request.form.get', (['"""weight"""'], {}), "('weight')\n", (476, 486), False, 'from flask import Flask, request, render_template\n'), ((542, 604), 'pandas.DataFrame', 'pd.DataFrame', (['[[height, weight]]'], {'columns': "['Height', 'Weight']"}), "([[height, weight]], columns=['Height', 'Weight'])\n", (554, 604), True, 'import pandas as pd\n')]
|
import json
from typing import Dict, Iterable
import babel.dates
import pytz
from django import forms
from django.contrib.postgres.forms import SimpleArrayField
from django.forms import ValidationError
from django.forms.widgets import DateTimeInput, TextInput
from . import models
from .serializers import JSONEncoder
from .widgets import LeafletWidget, TimezoneWidget
class TimezoneField(forms.Field):
def to_python(self, value):
if not value:
# Babel will default to UTC if no string is specified.
return None
try:
return pytz.timezone(
babel.dates.get_timezone_name(value, return_zone=True)
)
except pytz.exceptions.Error:
return None
def validate(self, value):
if not value:
raise ValidationError('no value', code='required')
class QuestionForm(forms.ModelForm):
choices = SimpleArrayField(forms.CharField())
def clean_choices(self):
return [x for x in self.cleaned_data['choices'][0].splitlines() if x]
class Meta:
model = models.ParticipationQuestion
fields = ('question_text', 'answer_type', 'mandatory')
class QuestionnaireForm(forms.Form):
def __init__(self, questions: Iterable[models.ParticipationQuestion], **kwargs) -> None:
self.fields: Dict[str, forms.Field] = {}
super().__init__(**kwargs)
for question in questions:
if question.answer_type == 'TEXT':
f = forms.CharField(label=question.question_text, required=question.mandatory)
elif question.answer_type == 'BOOL':
f = forms.BooleanField(label=question.question_text, required=question.mandatory)
elif question.answer_type == 'CHOI':
f = forms.ChoiceField(label=question.question_text, required=question.mandatory, choices=[(x.id, x.text) for x in question.choices.all()])
else:
raise ValueError("invalid answer_type: %s" % (question.answer_type))
self.fields[str(question.id)] = f
def clean(self, *args, **kwargs):
for k, v in self.cleaned_data.items():
self.cleaned_data[int(k)] = self.cleaned_data.pop(k)
return super().clean()
class EventForm(forms.ModelForm):
timezone = TimezoneField(required=True, widget=TimezoneWidget())
class Meta:
model = models.Event
fields = ('name', 'whole_day', 'start', 'end', 'link', 'kind', 'location_name', 'location', 'timezone', 'description')
widgets = {
'location': LeafletWidget(),
'start': DateTimeInput(attrs={'class': 'datepicker-flat'}),
'end': DateTimeInput(attrs={'class': 'datepicker-flat', 'placeholder': 'optional'}),
'link': TextInput(attrs={'placeholder': 'https://'}),
'location_name': TextInput(attrs={'placeholder': 'e.g. Café International'}),
}
unlogged_fields = ('timezone', )
def clean(self, *args, **kwargs):
super().clean(*args, **kwargs)
if self.errors:
return self.cleaned_data
tz = self.cleaned_data.get('timezone', None)
"""
Django automatically assumes that datetimes are in the default time zone (UTC),
but in fact they're in the local time zone, so we're stripping the tzinfo from
the field and setting it to the given time zone.
This does not change the value of the time itself, only the time zone placement.
"""
self.cleaned_data['start'] = tz.localize(self.cleaned_data['start'].replace(tzinfo=None))
if self.cleaned_data['end']:
self.cleaned_data['end'] = tz.localize(self.cleaned_data['end'].replace(tzinfo=None))
if self.cleaned_data['end'] <= self.cleaned_data['start']:
self.add_error('end', 'Event end has to be after its start.')
def to_json(self):
d = {}
for field in self.fields:
if field in self.Meta.unlogged_fields:
continue
d[field] = self.cleaned_data[field]
return json.loads(json.dumps(d, cls=JSONEncoder)) # This is bad and I should feel bad.
|
[
"django.forms.widgets.TextInput",
"django.forms.BooleanField",
"django.forms.widgets.DateTimeInput",
"json.dumps",
"django.forms.ValidationError",
"django.forms.CharField"
] |
[((934, 951), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (949, 951), False, 'from django import forms\n'), ((819, 863), 'django.forms.ValidationError', 'ValidationError', (['"""no value"""'], {'code': '"""required"""'}), "('no value', code='required')\n", (834, 863), False, 'from django.forms import ValidationError\n'), ((2622, 2671), 'django.forms.widgets.DateTimeInput', 'DateTimeInput', ([], {'attrs': "{'class': 'datepicker-flat'}"}), "(attrs={'class': 'datepicker-flat'})\n", (2635, 2671), False, 'from django.forms.widgets import DateTimeInput, TextInput\n'), ((2692, 2768), 'django.forms.widgets.DateTimeInput', 'DateTimeInput', ([], {'attrs': "{'class': 'datepicker-flat', 'placeholder': 'optional'}"}), "(attrs={'class': 'datepicker-flat', 'placeholder': 'optional'})\n", (2705, 2768), False, 'from django.forms.widgets import DateTimeInput, TextInput\n'), ((2790, 2834), 'django.forms.widgets.TextInput', 'TextInput', ([], {'attrs': "{'placeholder': 'https://'}"}), "(attrs={'placeholder': 'https://'})\n", (2799, 2834), False, 'from django.forms.widgets import DateTimeInput, TextInput\n'), ((2865, 2924), 'django.forms.widgets.TextInput', 'TextInput', ([], {'attrs': "{'placeholder': 'e.g. Café International'}"}), "(attrs={'placeholder': 'e.g. Café International'})\n", (2874, 2924), False, 'from django.forms.widgets import DateTimeInput, TextInput\n'), ((4133, 4163), 'json.dumps', 'json.dumps', (['d'], {'cls': 'JSONEncoder'}), '(d, cls=JSONEncoder)\n', (4143, 4163), False, 'import json\n'), ((1504, 1578), 'django.forms.CharField', 'forms.CharField', ([], {'label': 'question.question_text', 'required': 'question.mandatory'}), '(label=question.question_text, required=question.mandatory)\n', (1519, 1578), False, 'from django import forms\n'), ((1648, 1725), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': 'question.question_text', 'required': 'question.mandatory'}), '(label=question.question_text, required=question.mandatory)\n', (1666, 1725), False, 'from django import forms\n')]
|
from collections import namedtuple
SITE_URL = 'https://rent.591.com.tw'
LIST_ENDPOINT = '{}/home/search/rsList?is_new_list=1&type=1&kind=0&searchtype=1'.format(SITE_URL)
SESSION_ENDPOINT = '{}/?kind=0®ion=6'.format(SITE_URL)
ListRequestMeta = namedtuple('ListRequestMeta', ['id', 'name', 'page'])
DetailRequestMeta = namedtuple('DetailRequestMeta', ['id', 'gps'])
|
[
"collections.namedtuple"
] |
[((248, 301), 'collections.namedtuple', 'namedtuple', (['"""ListRequestMeta"""', "['id', 'name', 'page']"], {}), "('ListRequestMeta', ['id', 'name', 'page'])\n", (258, 301), False, 'from collections import namedtuple\n'), ((323, 369), 'collections.namedtuple', 'namedtuple', (['"""DetailRequestMeta"""', "['id', 'gps']"], {}), "('DetailRequestMeta', ['id', 'gps'])\n", (333, 369), False, 'from collections import namedtuple\n')]
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
import serial
import time
ser = serial.Serial('/dev/ttyUSB0',9600)
ser.write("255\r")
print ("ALL RELAIS: ON!")
time.sleep(10)
ser.write("0\r")
print ("ALL RELAIS: OFF!")
ser.close()
print ("ALL GOOD, EXIT!")
# ...quick and dirty by "<EMAIL>"
|
[
"serial.Serial",
"time.sleep"
] |
[((77, 112), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyUSB0"""', '(9600)'], {}), "('/dev/ttyUSB0', 9600)\n", (90, 112), False, 'import serial\n'), ((157, 171), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (167, 171), False, 'import time\n')]
|
import logging.config
import os
import re
from slack import RTMClient
from slack.errors import SlackApiError
from config.cofiguration import Configuration
from exception.invalid_command import InvalidCommand
from model.message import Message
from parser.command_parser import CommandParser
from service.service_accounts import ServiceAccounts
# log
logging.config.fileConfig(fname='log.conf', disable_existing_loggers=False)
logger = logging.getLogger(__name__)
# slack
slack_bot_token = os.environ.get('SLACK_TOKEN', '')
rtm_client = RTMClient(token=slack_bot_token)
# buffy configuration
config = Configuration()
config.read_env()
service_accounts = ServiceAccounts(config)
@RTMClient.run_on(event='message')
def message_event_handler(**payload):
logger.debug(f"payload : {payload}")
data = payload['data']
sub_type = data.get('subtype', None)
if sub_type is not None:
return
web_client = payload['web_client']
rtm_client = payload['rtm_client']
if 'text' in data:
message = Message(data)
is_mention, message.bot_id = check_mention(message.text)
commands = message.parse_message(is_mention)
logger.debug(f"message {commands}")
_parse_command(web_client, message, commands)
def check_mention(text):
pattern = re.compile('<@([a-zA-z0-9]*)>')
match = pattern.match(text)
if match is not None:
return [True, match.group(1)]
else:
return [False, None]
def _parse_command(web_client, message, commands):
try:
parser = CommandParser(service_accounts, commands)
blocks = parser.parse_command()
logger.debug(f"response message: {blocks}")
web_client.chat_postMessage(channel=message.channel, blocks=blocks, thread_ts=message.ts)
except InvalidCommand as e:
logger.error(e)
web_client.chat_postMessage(channel=message.channel, text=e)
except SlackApiError as e:
logger.error(f"Got an error: {e.response['error']}")
if __name__ == '__main__':
logger.info(f'Jira Configuration {config.jira}')
logger.info(f'Kubernetes Configuration {config.kubernetes}')
logger.info(f'RTM Client is started....')
rtm_client.start()
|
[
"slack.RTMClient",
"parser.command_parser.CommandParser",
"service.service_accounts.ServiceAccounts",
"slack.RTMClient.run_on",
"os.environ.get",
"config.cofiguration.Configuration",
"model.message.Message",
"re.compile"
] |
[((492, 525), 'os.environ.get', 'os.environ.get', (['"""SLACK_TOKEN"""', '""""""'], {}), "('SLACK_TOKEN', '')\n", (506, 525), False, 'import os\n'), ((539, 571), 'slack.RTMClient', 'RTMClient', ([], {'token': 'slack_bot_token'}), '(token=slack_bot_token)\n', (548, 571), False, 'from slack import RTMClient\n'), ((604, 619), 'config.cofiguration.Configuration', 'Configuration', ([], {}), '()\n', (617, 619), False, 'from config.cofiguration import Configuration\n'), ((657, 680), 'service.service_accounts.ServiceAccounts', 'ServiceAccounts', (['config'], {}), '(config)\n', (672, 680), False, 'from service.service_accounts import ServiceAccounts\n'), ((684, 717), 'slack.RTMClient.run_on', 'RTMClient.run_on', ([], {'event': '"""message"""'}), "(event='message')\n", (700, 717), False, 'from slack import RTMClient\n'), ((1300, 1331), 're.compile', 're.compile', (['"""<@([a-zA-z0-9]*)>"""'], {}), "('<@([a-zA-z0-9]*)>')\n", (1310, 1331), False, 'import re\n'), ((1029, 1042), 'model.message.Message', 'Message', (['data'], {}), '(data)\n', (1036, 1042), False, 'from model.message import Message\n'), ((1546, 1587), 'parser.command_parser.CommandParser', 'CommandParser', (['service_accounts', 'commands'], {}), '(service_accounts, commands)\n', (1559, 1587), False, 'from parser.command_parser import CommandParser\n')]
|
# pandas1.py
import pandas as pd
weekly_data = {'day':['Monday','Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday'],
'temp':[40, 33, 42, 31, 41, 40, 30],
'condition':['Sunny','Cloudy','Sunny','Rain','Sunny',
'Cloudy','Rain']
}
df = pd.DataFrame(weekly_data)
print(df)
df1 = df.set_index('day')
print(df1)
|
[
"pandas.DataFrame"
] |
[((336, 361), 'pandas.DataFrame', 'pd.DataFrame', (['weekly_data'], {}), '(weekly_data)\n', (348, 361), True, 'import pandas as pd\n')]
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
#!/usr/bin/env python3
# pyre-strict
import os.path
from unittest.mock import patch
import torchrecipes.text.doc_classification.conf # noqa
from torchrecipes.core.base_train_app import BaseTrainApp
from torchrecipes.core.test_utils.test_base import BaseTrainAppTestCase
from torchrecipes.text.doc_classification.tests.common.assets import (
copy_partial_sst2_dataset,
get_asset_path,
copy_asset,
)
from torchrecipes.utils.test import tempdir
class TestDocClassificationTrainApp(BaseTrainAppTestCase):
def setUp(self) -> None:
super().setUp()
# patch the _hash_check() fn output to make it work with the dummy dataset
self.patcher = patch(
"torchdata.datapipes.iter.util.cacheholder._hash_check", return_value=True
)
self.patcher.start()
def tearDown(self) -> None:
self.patcher.stop()
super().tearDown()
def get_train_app(self, root_dir: str) -> BaseTrainApp:
# copy the asset files into their expected download locations
# note we need to do this anywhere we use hydra overrides
# otherwise we get a `LexerNoViableAltException`
vocab_path = os.path.join(root_dir, "vocab_example.pt")
spm_model_path = os.path.join(root_dir, "spm_example.model")
copy_asset(get_asset_path("vocab_example.pt"), vocab_path)
copy_asset(get_asset_path("spm_example.model"), spm_model_path)
copy_partial_sst2_dataset(root_dir)
app = self.create_app_from_hydra(
config_module="torchrecipes.text.doc_classification.conf",
config_name="train_app",
overrides=[
"module.model.checkpoint=null",
"module.model.freeze_encoder=True",
f"datamodule.dataset.root={root_dir}",
f"trainer.default_root_dir={root_dir}",
"trainer.logger=False",
"trainer.checkpoint_callback=False",
f"transform.transform.vocab_path={vocab_path}",
f"transform.transform.spm_model_path={spm_model_path}",
"transform.num_labels=2",
],
)
self.mock_trainer_params(app)
return app
@tempdir
def test_doc_classification_task_train(self, root_dir: str) -> None:
train_app = self.get_train_app(root_dir=root_dir)
output = train_app.train()
self.assert_train_output(output)
@tempdir
def test_doc_classification_task_test(self, root_dir: str) -> None:
train_app = self.get_train_app(root_dir=root_dir)
train_app.train()
output = train_app.test()
self.assertIsNotNone(output)
|
[
"unittest.mock.patch",
"torchrecipes.text.doc_classification.tests.common.assets.copy_partial_sst2_dataset",
"torchrecipes.text.doc_classification.tests.common.assets.get_asset_path"
] |
[((864, 950), 'unittest.mock.patch', 'patch', (['"""torchdata.datapipes.iter.util.cacheholder._hash_check"""'], {'return_value': '(True)'}), "('torchdata.datapipes.iter.util.cacheholder._hash_check', return_value\n =True)\n", (869, 950), False, 'from unittest.mock import patch\n'), ((1619, 1654), 'torchrecipes.text.doc_classification.tests.common.assets.copy_partial_sst2_dataset', 'copy_partial_sst2_dataset', (['root_dir'], {}), '(root_dir)\n', (1644, 1654), False, 'from torchrecipes.text.doc_classification.tests.common.assets import copy_partial_sst2_dataset, get_asset_path, copy_asset\n'), ((1491, 1525), 'torchrecipes.text.doc_classification.tests.common.assets.get_asset_path', 'get_asset_path', (['"""vocab_example.pt"""'], {}), "('vocab_example.pt')\n", (1505, 1525), False, 'from torchrecipes.text.doc_classification.tests.common.assets import copy_partial_sst2_dataset, get_asset_path, copy_asset\n'), ((1558, 1593), 'torchrecipes.text.doc_classification.tests.common.assets.get_asset_path', 'get_asset_path', (['"""spm_example.model"""'], {}), "('spm_example.model')\n", (1572, 1593), False, 'from torchrecipes.text.doc_classification.tests.common.assets import copy_partial_sst2_dataset, get_asset_path, copy_asset\n')]
|
# isort: skip_file
# pylint: disable=unused-argument,reimported
from dagster import DependencyDefinition, GraphDefinition, job, op
@op
def my_op():
pass
# start_pipeline_example_marker
@op
def return_one(context):
return 1
@op
def add_one(context, number: int):
return number + 1
@job
def one_plus_one():
add_one(return_one())
# end_pipeline_example_marker
# start_multiple_usage_pipeline
@job
def multiple_usage():
add_one(add_one(return_one()))
# end_multiple_usage_pipeline
# start_alias_pipeline
@job
def alias():
add_one.alias("second_addition")(add_one(return_one()))
# end_alias_pipeline
# start_tag_pipeline
@job
def tagged_add_one():
add_one.tag({"my_tag": "my_value"})(add_one(return_one()))
# end_tag_pipeline
# start_pipeline_definition_marker
one_plus_one_from_constructor = GraphDefinition(
name="one_plus_one",
node_defs=[return_one, add_one],
dependencies={"add_one": {"number": DependencyDefinition("return_one")}},
).to_job()
# end_pipeline_definition_marker
# start_tags_pipeline
@job(tags={"my_tag": "my_value"})
def my_tags_job():
my_op()
# end_tags_pipeline
def do_something(x):
return x
# start_top_level_input_graph
from dagster import graph, op
@op
def op_with_input(x):
return do_something(x)
@graph
def wires_input(x):
op_with_input(x)
# end_top_level_input_graph
# start_top_level_input_job
the_job = wires_input.to_job(input_values={"x": 5})
# end_top_level_input_job
# start_execute_in_process_input
graph_result = wires_input.execute_in_process(input_values={"x": 5})
job_result = the_job.execute_in_process(
input_values={"x": 6}
) # Overrides existing input value
# end_execute_in_process_input
|
[
"dagster.job",
"dagster.DependencyDefinition"
] |
[((1066, 1098), 'dagster.job', 'job', ([], {'tags': "{'my_tag': 'my_value'}"}), "(tags={'my_tag': 'my_value'})\n", (1069, 1098), False, 'from dagster import DependencyDefinition, GraphDefinition, job, op\n'), ((959, 993), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""return_one"""'], {}), "('return_one')\n", (979, 993), False, 'from dagster import DependencyDefinition, GraphDefinition, job, op\n')]
|
import socket as socket
import time as t
import random as r
# creates socket object
def receve_MSG(IP="",port=1337,MSG_Size=1024,TEXT_Decode='ascii',ExpectedFileType="TXT"):
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
if(IP==""):
IP = socket.gethostname()
s.connect((IP, port))
tm = s.recv(MSG_Size) # msg can only be x bytes long
if(ExpectedFileType == "TXT"):
RecevedText = tm.decode(TEXT_Decode)
if(ExpectedFileType == "IMG"):
RecevedText = tm
s.close()
return RecevedText
def WriteToFile(toBewritten,FileName="out",FileType=".txt",WriteType="w+"):
FileName = FileName+FileType
text_file = open(FileName, WriteType)
text_file.write(toBewritten)
text_file.close()
x=0
while(1==1):
print("Mesage Receved")
#Only can revece 9Mb of file
WriteToFile(receve_MSG("",1337,9999999,'ascii',"IMG"),"Output",".jpg","wb+")
t.sleep(1)
x=x+1
print(x)
|
[
"socket.gethostname",
"socket.socket",
"time.sleep"
] |
[((191, 240), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (204, 240), True, 'import socket as socket\n'), ((944, 954), 'time.sleep', 't.sleep', (['(1)'], {}), '(1)\n', (951, 954), True, 'import time as t\n'), ((271, 291), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (289, 291), True, 'import socket as socket\n')]
|
from models.sample import SampleModel
def build_model(args):
if args.model == 'sample':
return SampleModel(args)
else :
raise NotImplementedError(f"check model name : {args.model}")
|
[
"models.sample.SampleModel"
] |
[((110, 127), 'models.sample.SampleModel', 'SampleModel', (['args'], {}), '(args)\n', (121, 127), False, 'from models.sample import SampleModel\n')]
|
"""Check the schemas."""
from assertionlib import assertion
from nanoqm.workflows.input_validation import process_input
from .utilsTest import PATH_TEST
def test_input_validation():
"""Test the input validation schema."""
schemas = ("absorption_spectrum", "derivative_couplings")
paths = [PATH_TEST / x for x in
["input_test_absorption_spectrum.yml", "input_fast_test_derivative_couplings.yml"]]
for s, p in zip(schemas, paths):
d = process_input(p, s)
assertion.isinstance(d, dict)
|
[
"assertionlib.assertion.isinstance",
"nanoqm.workflows.input_validation.process_input"
] |
[((474, 493), 'nanoqm.workflows.input_validation.process_input', 'process_input', (['p', 's'], {}), '(p, s)\n', (487, 493), False, 'from nanoqm.workflows.input_validation import process_input\n'), ((502, 531), 'assertionlib.assertion.isinstance', 'assertion.isinstance', (['d', 'dict'], {}), '(d, dict)\n', (522, 531), False, 'from assertionlib import assertion\n')]
|
"""
selenium script to scrape google map POI with rotating IP proxy
@author: <NAME>
@date: 09/02/2019
"""
import os,time
from helper import CoordBound
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.common.proxy import Proxy, ProxyType
# BIG_BOUND = [(38.896211, -77.032005), (38.902540, -77.018926)] # downtown test
BIG_BOUND = [(38.875, -77.072), (38.918, -77.002)]
PROXY = '192.168.3.11:13010'
# 细分
coords = CoordBound(BIG_BOUND[0][0], BIG_BOUND[0][1], BIG_BOUND[1][0], BIG_BOUND[1][1])
grids = coords.dividify()
print("total number of grids: {}".format(len(grids)))
# print(grids)
# chrome webdriver
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--proxy-server=%s' % PROXY)
# start
driver = webdriver.Chrome('./chromedriver', chrome_options=chrome_options)
driver.get('localhost:5000')
time.sleep(2)
index = 0
# driver.get('http://whatismyipaddress.com')
# while (index < 10):
# driver.get('http://whatismyipaddress.com')
# # driver.execute_script("window.open('http://whatismyipaddress.com');")
# time.sleep(2)
# driver.quit()
# driver = webdriver.Chrome('./chromedriver', chrome_options=chrome_options)
# driver.switch_to_window(driver.window_handles[0])
# for grid in grids[162:]:
while index < len(grids):
grid = grids[index]
print("scarping index: {}\ngrid : {}".format(index, grid))
if index > 0 and index % 6 == 0:
# restart driver to change IP
driver.quit()
driver = webdriver.Chrome('./chromedriver', chrome_options=chrome_options)
driver.get('localhost:5000')
time.sleep(2)
# call it
try:
driver.execute_script('continueSearch({},{},{},{});'.format(
grid.sw_lat, grid.sw_lng, grid.ne_lat, grid.ne_lng
))
wait = WebDriverWait(driver, 180)
out = wait.until(ec.text_to_be_present_in_element((By.ID, 'soutput'), '{},{},{},{}: done'.format(
grid.sw_lat, grid.sw_lng, grid.ne_lat, grid.ne_lng
)))
print("done grid index {}".format(index))
index += 1
except TimeoutException:
continue
except JavascriptException:
# page not loaded properly
continue
|
[
"helper.CoordBound",
"time.sleep",
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.Chrome",
"selenium.webdriver.support.ui.WebDriverWait"
] |
[((584, 662), 'helper.CoordBound', 'CoordBound', (['BIG_BOUND[0][0]', 'BIG_BOUND[0][1]', 'BIG_BOUND[1][0]', 'BIG_BOUND[1][1]'], {}), '(BIG_BOUND[0][0], BIG_BOUND[0][1], BIG_BOUND[1][0], BIG_BOUND[1][1])\n', (594, 662), False, 'from helper import CoordBound\n'), ((795, 820), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (818, 820), False, 'from selenium import webdriver\n'), ((896, 961), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['"""./chromedriver"""'], {'chrome_options': 'chrome_options'}), "('./chromedriver', chrome_options=chrome_options)\n", (912, 961), False, 'from selenium import webdriver\n'), ((991, 1004), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1001, 1004), False, 'import os, time\n'), ((1641, 1706), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['"""./chromedriver"""'], {'chrome_options': 'chrome_options'}), "('./chromedriver', chrome_options=chrome_options)\n", (1657, 1706), False, 'from selenium import webdriver\n'), ((1752, 1765), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1762, 1765), False, 'import os, time\n'), ((1948, 1974), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', '(180)'], {}), '(driver, 180)\n', (1961, 1974), False, 'from selenium.webdriver.support.ui import WebDriverWait\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 6 20:17:09 2018
@author: tyler
"""
import numpy as np
import sys
#%%
def karger(G,vertex_label,vertex_degree,size_V):
size_V = len(vertex_label)
#N = int(size_V*(1-1/np.sqrt(2)))
iteration_schedule = [size_V-2]
for N in iteration_schedule:
for n in range(N):
# if n%1000==0: print('iteration:',n)
# uniformly at random pick e = (v0,v1)
cs0 = np.cumsum(vertex_degree)
rand_idx0 = np.random.randint(cs0[-1])
e0 = np.searchsorted(cs0,rand_idx0,side='right')
#cs1 = np.cumsum(np.append(G[e0,e0:],G[:e0,e0]))
cs1 = np.cumsum(G[e0])
rand_idx1 = np.random.randint(vertex_degree[e0])
e1 = np.searchsorted(cs1,rand_idx1,side='right')
if(G[e0,e1] == 0):
print('picked empty edge')
v0 = e0
v1 = e1
# bring edges from v1 into v0
# add new edges to v0
G[v0] += G[v1]
G[:,v0] += G[v1]
new_edge_count = vertex_degree[v1] - G[v0,v0] #- G[v1,v1]
# delete old edges from v1
G[v1] = 0
G[:,v1] = 0
# delete any created loops
G[v0,v0] = 0
np.putmask(vertex_label,vertex_label==v1,v0)
vertex_degree[v0] += new_edge_count
vertex_degree[v1] = 0
nz = np.nonzero(vertex_degree)[0]
if(len(nz) != 2):
print('did not find well defined cut')
SN0 = np.where(vertex_label == nz[0])[0]
SN1 = np.where(vertex_label == nz[1])[0]
if len(SN0) + len(SN1) != size_V:
print('lost nodes')
if len(SN0) < len(SN1):
cut = SN0
else:
cut = SN1
return cut,vertex_degree[nz[0]]
#%%
#python p1.py z N ID
z = sys.argv[1] # 0,1,2,3
N = int(sys.argv[2]) # integer number of runs
ID = sys.argv[3] # output file id
#%%
E_raw = np.loadtxt('b'+str(z)+'.in',dtype='int')
min_E = np.min(E_raw)
E = E_raw - min_E
size_V = np.max(E)+1
G = np.zeros((size_V,size_V),dtype='int64')
vertex_degree = np.zeros(size_V,dtype='int64')
for e0,e1 in E:
vertex_degree[e0] += 1;
vertex_degree[e1] += 1;
G[min(e0,e1),max(e0,e1)] += 1;
G[max(e0,e1),min(e0,e1)] += 1;
vertex_label = np.arange(size_V,dtype='int64') # gives index of supervertex containg vertex
#%%
f=open('b'+z+'/cuts_'+ID+'.dat','ab')
g=open('b'+z+'/cut_sizes_'+ID+'.dat','ab')
#
for n in range(N):
if n%500 == 0:
print(ID+'_trial :', n,' of ',N)
vl,cut_size = karger(np.copy(G),np.copy(vertex_label),np.copy(vertex_degree),size_V)
np.savetxt(f,[vl],fmt='%d',delimiter=',')
np.savetxt(g,[cut_size],fmt='%d',delimiter=',')
f.close()
g.close()
|
[
"numpy.copy",
"numpy.savetxt",
"numpy.zeros",
"numpy.searchsorted",
"numpy.nonzero",
"numpy.cumsum",
"numpy.min",
"numpy.max",
"numpy.arange",
"numpy.where",
"numpy.random.randint",
"numpy.putmask"
] |
[((2209, 2222), 'numpy.min', 'np.min', (['E_raw'], {}), '(E_raw)\n', (2215, 2222), True, 'import numpy as np\n'), ((2267, 2308), 'numpy.zeros', 'np.zeros', (['(size_V, size_V)'], {'dtype': '"""int64"""'}), "((size_V, size_V), dtype='int64')\n", (2275, 2308), True, 'import numpy as np\n'), ((2323, 2354), 'numpy.zeros', 'np.zeros', (['size_V'], {'dtype': '"""int64"""'}), "(size_V, dtype='int64')\n", (2331, 2354), True, 'import numpy as np\n'), ((2513, 2545), 'numpy.arange', 'np.arange', (['size_V'], {'dtype': '"""int64"""'}), "(size_V, dtype='int64')\n", (2522, 2545), True, 'import numpy as np\n'), ((2250, 2259), 'numpy.max', 'np.max', (['E'], {}), '(E)\n', (2256, 2259), True, 'import numpy as np\n'), ((2855, 2899), 'numpy.savetxt', 'np.savetxt', (['f', '[vl]'], {'fmt': '"""%d"""', 'delimiter': '""","""'}), "(f, [vl], fmt='%d', delimiter=',')\n", (2865, 2899), True, 'import numpy as np\n'), ((2901, 2951), 'numpy.savetxt', 'np.savetxt', (['g', '[cut_size]'], {'fmt': '"""%d"""', 'delimiter': '""","""'}), "(g, [cut_size], fmt='%d', delimiter=',')\n", (2911, 2951), True, 'import numpy as np\n'), ((1615, 1640), 'numpy.nonzero', 'np.nonzero', (['vertex_degree'], {}), '(vertex_degree)\n', (1625, 1640), True, 'import numpy as np\n'), ((1733, 1764), 'numpy.where', 'np.where', (['(vertex_label == nz[0])'], {}), '(vertex_label == nz[0])\n', (1741, 1764), True, 'import numpy as np\n'), ((1778, 1809), 'numpy.where', 'np.where', (['(vertex_label == nz[1])'], {}), '(vertex_label == nz[1])\n', (1786, 1809), True, 'import numpy as np\n'), ((2783, 2793), 'numpy.copy', 'np.copy', (['G'], {}), '(G)\n', (2790, 2793), True, 'import numpy as np\n'), ((2794, 2815), 'numpy.copy', 'np.copy', (['vertex_label'], {}), '(vertex_label)\n', (2801, 2815), True, 'import numpy as np\n'), ((2816, 2838), 'numpy.copy', 'np.copy', (['vertex_degree'], {}), '(vertex_degree)\n', (2823, 2838), True, 'import numpy as np\n'), ((498, 522), 'numpy.cumsum', 'np.cumsum', (['vertex_degree'], {}), '(vertex_degree)\n', (507, 522), True, 'import numpy as np\n'), ((547, 573), 'numpy.random.randint', 'np.random.randint', (['cs0[-1]'], {}), '(cs0[-1])\n', (564, 573), True, 'import numpy as np\n'), ((591, 636), 'numpy.searchsorted', 'np.searchsorted', (['cs0', 'rand_idx0'], {'side': '"""right"""'}), "(cs0, rand_idx0, side='right')\n", (606, 636), True, 'import numpy as np\n'), ((727, 743), 'numpy.cumsum', 'np.cumsum', (['G[e0]'], {}), '(G[e0])\n', (736, 743), True, 'import numpy as np\n'), ((768, 804), 'numpy.random.randint', 'np.random.randint', (['vertex_degree[e0]'], {}), '(vertex_degree[e0])\n', (785, 804), True, 'import numpy as np\n'), ((822, 867), 'numpy.searchsorted', 'np.searchsorted', (['cs1', 'rand_idx1'], {'side': '"""right"""'}), "(cs1, rand_idx1, side='right')\n", (837, 867), True, 'import numpy as np\n'), ((1435, 1483), 'numpy.putmask', 'np.putmask', (['vertex_label', '(vertex_label == v1)', 'v0'], {}), '(vertex_label, vertex_label == v1, v0)\n', (1445, 1483), True, 'import numpy as np\n')]
|
from itertools import accumulate
# Initial Permutation
IP = [
57, 49, 41, 33, 25, 17, 9, 1,
59, 51, 43, 35, 27, 19, 11, 3,
61, 53, 45, 37, 29, 21, 13, 5,
63, 55, 47, 39, 31, 23, 15, 7,
56, 48, 40, 32, 24, 16, 8, 0,
58, 50, 42, 34, 26, 18, 10, 2,
60, 52, 44, 36, 28, 20, 12, 4,
62, 54, 46, 38, 30, 22, 14, 6
]
# Final Permutation
FP = [
39, 7, 47, 15, 55, 23, 63, 31,
38, 6, 46, 14, 54, 22, 62, 30,
37, 5, 45, 13, 53, 21, 61, 29,
36, 4, 44, 12, 52, 20, 60, 28,
35, 3, 43, 11, 51, 19, 59, 27,
34, 2, 42, 10, 50, 18, 58, 26,
33, 1, 41, 9, 49, 17, 57, 25,
32, 0, 40, 8, 48, 16, 56, 24
]
# Expansion Function: from 32 bit to 48 bit
E = [
31, 0, 1, 2, 3, 4,
3, 4, 5, 6, 7, 8,
7, 8, 9, 10, 11, 12,
11, 12, 13, 14, 15, 16,
15, 16, 17, 18, 19, 20,
19, 20, 21, 22, 23, 24,
23, 24, 25, 26, 27, 28,
27, 28, 29, 30, 31, 0
]
# Permutation
P = [
15, 6, 19, 20, 28, 11, 27, 16,
0, 14, 22, 25, 4, 17, 30, 9,
1, 7, 23, 13, 31, 26, 2, 8,
18, 12, 29, 5, 21, 10, 3, 24
]
PC1_LEFT = [
56, 48, 40, 32, 24, 16, 8,
0, 57, 49, 41, 33, 25, 17,
9, 1, 58, 50, 42, 34, 26,
18, 10, 2, 59, 51, 43, 35,
]
PC1_RIGHT = [
62, 54, 46, 38, 30, 22, 14,
6, 61, 53, 45, 37, 29, 21,
13, 5, 60, 52, 44, 36, 28,
20, 12, 4, 27, 19, 11, 3
]
PC2 = [
13, 16, 10, 23, 0, 4,
2, 27, 14, 5, 20, 9,
22, 18, 11, 3, 25, 7,
15, 6, 26, 19, 12, 1,
40, 51, 30, 36, 46, 54,
29, 39, 50, 44, 32, 47,
43, 48, 38, 55, 33, 52,
45, 41, 49, 35, 28, 31
]
Rotations = [1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]
# Substitution Boxes
SBox = [
# S1
[
14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7,
0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8,
4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0,
15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13
],
# S2
[
15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10,
3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5,
0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15,
13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9
],
# S3
[
10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8,
13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1,
13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7,
1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12
],
# S4
[
7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15,
13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9,
10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4,
3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14
],
# S5
[
2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9,
14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6,
4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14,
11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3
],
# S6
[
12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11,
10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8,
9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6,
4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13
],
# S7
[
4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1,
13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6,
1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2,
6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12
],
# S8
[
13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7,
1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2,
7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8,
2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11
]
]
def DES(decrypt, MD, keys):
sub_keys = generate_sub_keys(keys)
data = permutation(MD, IP)
left = data[:32]
right = data[32:]
if decrypt:
sub_keys = reversed(sub_keys)
for sub_key in sub_keys:
left, right = right, xor(left, F(right, sub_key))
data = permutation(right + left, FP)
return data
def F(c, key):
new_c = expansion(c)
mixed_data = key_mixing(new_c, key)
s_box_result = substitution(mixed_data)
return permutation(s_box_result)
def generate_sub_keys(keys):
left = permutation(keys, PC1_LEFT)
right = permutation(keys, PC1_RIGHT)
sub_keys = []
for i in accumulate(Rotations):
sub_keys.append(permutation(left[i:] + left[:i] + right[i:] + right[:i], PC2))
return sub_keys
def expansion(c):
return permutation(c, E)
def permutation(data, table=P):
return [data[i] for i in table]
def key_mixing(data, key):
return xor(data, key)
def xor(data1, data2):
return [d1 ^ d2 for d1, d2 in zip(data1, data2)]
def substitution(data):
'''
data: 48 bit
'''
box_size = 6
boxes = [data[i:i + box_size] for i in range(0, 48, box_size)]
result = []
for box, s_box in zip(boxes, SBox):
outer = (box[0] << 1) + box[5]
inner = (box[1] << 3) + (box[2] << 2) + (box [3] << 1) + box[4]
value = s_box[(outer << 4) + inner]
for i in range(3, -1, -1):
result.append((value & 2**i) >> i)
return result
def string_to_bitlist(data):
result = []
for ch in data:
for i in range(7, -1, -1):
result.append(1 if ord(ch) & (1 << i) != 0 else 0)
return result
def hex_to_bitlist(data):
result = []
for ch in data:
int(ch, 16)
for i in range(3, -1, -1):
result.append(1 if int(ch, 16) & (1 << i) != 0 else 0)
return result
def bitlist_to_hex(data):
result = []
buf = 0
for i, value in enumerate(data):
buf = (buf << 1) + value
if i % 4 == 3:
result.append(hex(buf)[2:])
buf = 0
return ''.join(result)
def binary_to_bitlist(data):
result = []
for ch in data:
for i in range(7, -1, -1):
result.append(1 if ch & (1 << i) != 0 else 0)
return result
def bitlist_to_binary(data):
result = []
buf = 0
for i, value in enumerate(data):
buf = (buf << 1) + value
if i % 8 == 7:
result.append(buf)
buf = 0
return bytes(result)
def des_with_file(decrypt, in_file, out_file, key):
with open(in_file, 'rb') as f:
data = f.read()
result = DES(decrypt, binary_to_bitlist(data), string_to_bitlist(key))
with open(out_file, 'wb') as f:
f.write(bitlist_to_binary(result))
def encryption(in_file, out_file, key):
des_with_file(False, in_file, out_file, key)
def decryption(in_file, out_file, key):
des_with_file(True, in_file, out_file, key)
def test():
key = string_to_bitlist('TESTTEST')
# plain = string_to_bitlist('DESTESTT')
plain = hex_to_bitlist('4445535445535454') # DESTESTT
encrypt = hex_to_bitlist('01ecf0428c98db57')
data = DES(False, plain, key)
print(encrypt == data)
new_data = DES(True, data, key)
print(new_data == plain)
if __name__ == '__main__':
from sys import argv
modes = {
'e': encryption,
'd': decryption
}
if argv[1] not in modes:
print('mode must be \'e\' or \'d\'')
else:
modes[argv[1]](*argv[2:])
|
[
"itertools.accumulate"
] |
[((4533, 4554), 'itertools.accumulate', 'accumulate', (['Rotations'], {}), '(Rotations)\n', (4543, 4554), False, 'from itertools import accumulate\n')]
|
#!/usr/bin/python
# -*- coding:utf8 -*-
import os, re
import collections
from pylogger import getLogger
from package_defines import PackageInfo
from deprecated.ast_pyclass_parser import ConanFileParserWarapper
class ConanPkgAnalyzer(object):
'''conan包分析器'''
def __init__(self, scanpath, type):
self._scanpath = scanpath
self._channel = ""
self._pkgPattern = re.compile(r"(\w+)_(\w+)_(\w+)")
self._pkgInfoMap = collections.OrderedDict()
self._type = type
def analyse(self):
channelTxtPath = self._scanpath + "/channel.txt"
if (not os.path.exists(channelTxtPath)) :
getLogger().fatal("No channel.txt file found")
return False
with open(channelTxtPath, "r") as channelTxtHdr :
self._channel = channelTxtHdr.readline()
self.doScan(self._scanpath, self._channel, self._type)
def getPkgName(self, dirPath):
pos = len(self._scanpath)
subPath = dirPath[pos + 1:]
pkgName = subPath.split("\\")[0]
return pkgName
def parseType(self, pkgName, default):
if (default != "auto") :
return default
if (pkgName.find("_plat_") != -1) :
return "platform"
elif (pkgName.find("_msmp_") != -1):
return "msmp"
else:
return "unknown"
def doScan(self, scanPath, pkgChannel, type):
'运行主函数,收集目录下符合条件的文件,准备比较'
pkgMap = {}
for dir, subdirs, fileList in os.walk(scanPath):
# print "directory = %s | subdir = %s | filename = %s" %(dir, subdirs, fs)
if (dir == scanPath):
continue
pkgName = self.getPkgName(dir)
packgeUserType = self.parseType(pkgName, self._type)
if ("ZERO_CHECK.dir" == pkgName or "CMakeFiles" == pkgName):
continue
# 为了解决head-only模块的识别
if (None == pkgMap.get(pkgName)):
pkgMap[pkgName] = False # 防止反复重置为False
for fname in fileList:
if ("conanfile.py" != fname) :
continue
fullFileName = dir + "/" + fname
parser = ConanFileParserWarapper(fullFileName)
parser.parse()
packageInfo = PackageInfo()
name = parser.getAttribute("name")
packageInfo.packageName = name
packageInfo.channel = self._channel
packageInfo.version = parser.getAttribute("version")
packageInfo.packageFullName = pkgName
packageInfo.user = packgeUserType
if (None == name) :
getLogger().error("%s parse version failed!" %fullFileName)
continue
self._pkgInfoMap[name] = packageInfo
def getResult(self):
return self._pkgInfoMap
|
[
"deprecated.ast_pyclass_parser.ConanFileParserWarapper",
"os.walk",
"os.path.exists",
"package_defines.PackageInfo",
"collections.OrderedDict",
"pylogger.getLogger",
"re.compile"
] |
[((392, 426), 're.compile', 're.compile', (['"""(\\\\w+)_(\\\\w+)_(\\\\w+)"""'], {}), "('(\\\\w+)_(\\\\w+)_(\\\\w+)')\n", (402, 426), False, 'import os, re\n'), ((452, 477), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (475, 477), False, 'import collections\n'), ((1498, 1515), 'os.walk', 'os.walk', (['scanPath'], {}), '(scanPath)\n', (1505, 1515), False, 'import os, re\n'), ((601, 631), 'os.path.exists', 'os.path.exists', (['channelTxtPath'], {}), '(channelTxtPath)\n', (615, 631), False, 'import os, re\n'), ((2189, 2226), 'deprecated.ast_pyclass_parser.ConanFileParserWarapper', 'ConanFileParserWarapper', (['fullFileName'], {}), '(fullFileName)\n', (2212, 2226), False, 'from deprecated.ast_pyclass_parser import ConanFileParserWarapper\n'), ((2288, 2301), 'package_defines.PackageInfo', 'PackageInfo', ([], {}), '()\n', (2299, 2301), False, 'from package_defines import PackageInfo\n'), ((647, 658), 'pylogger.getLogger', 'getLogger', ([], {}), '()\n', (656, 658), False, 'from pylogger import getLogger\n'), ((2681, 2692), 'pylogger.getLogger', 'getLogger', ([], {}), '()\n', (2690, 2692), False, 'from pylogger import getLogger\n')]
|
#replace MySQLdb with pymysql
import pymysql
pymysql.install_as_MySQLdb()
|
[
"pymysql.install_as_MySQLdb"
] |
[((47, 75), 'pymysql.install_as_MySQLdb', 'pymysql.install_as_MySQLdb', ([], {}), '()\n', (73, 75), False, 'import pymysql\n')]
|
# Generated by Django 2.1 on 2021-01-08 08:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0010_auto_20210106_1426'),
]
operations = [
migrations.AlterField(
model_name='collaborater',
name='Society',
field=models.CharField(max_length=200, verbose_name='société'),
),
]
|
[
"django.db.models.CharField"
] |
[((337, 393), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""société"""'}), "(max_length=200, verbose_name='société')\n", (353, 393), False, 'from django.db import migrations, models\n')]
|
import os
import csv
import itertools
import datetime
import string
import subprocess
from email.message import EmailMessage
import smtplib
import ssl
import zipfile
import xlwings as xw
import xlsxwriter
"""
This module provides convenient objects for pulling,
cleaning, and writing data between Excel and Python.
It includes functions for common tasks needed to
locate and timestamp Excel file names.
"""
def remove_files(path, exclude=None):
"""
:param path: Directory containing folders to be purged
:param exclude: Folders to be left unmodified
"""
with os.scandir(path) as iter_dir:
for subdir in iter_dir:
if os.DirEntry.is_dir(subdir) and (not exclude or all(exclude not in subdir.name)):
with os.scandir(os.path.join(path, subdir)) as iter_subdir:
for item in iter_subdir:
os.remove(os.path.join(path, subdir, item))
return
def mod_date(foo):
"""
:param foo: path or path-like object representing a file
:return: file modification date
Requires Python version 3.6+ to accept path-like objects.
"""
print(foo)
if foo == os.path.split(foo)[1]:
t = os.path.getmtime(foo)
date = datetime.datetime.fromtimestamp(t)
else:
date = mod_date(os.path.split(foo)[1])
return date
def find_file(dir_path, keywords):
"""
Searches for the newest version of a given file.
:param dir_path: directory containing the desired file
:param keywords: string of keywords from the keywords of the desired file
:return: path of the desired file
"""
dir_list = os.listdir(dir_path)
if isinstance(keywords, str):
keywords = keywords.split()
matches = list()
initial_dir = os.getcwd()
os.chdir(dir_path)
for item in dir_list:
while "." in item:
loc = item.find(".")
if loc == len(item) - 1:
item = item[:-1]
else:
item = item[:loc] + item[loc + 1:]
if os.path.isfile(os.path.join(dir_path, item)):
item_list = item.split()
if all(component in item_list for component in keywords):
matches.append(item)
if not matches:
print(f"There is no file containing keywords '{keywords}' in"
f"{dir_path}.")
else:
matches.sort(key=mod_date)
os.chdir(initial_dir)
return os.path.join(dir_path, matches[-1])
def empty_check(lst):
"""
Determines whether the nested n-layer list contains only empty
and/or None-type items.
:param lst: any list, integer, float, or string
:return: True if the nested list is (a) a list and (b) contains
only empty lists, type objects, or None; otherwise, False
"""
try:
if not lst:
return True
if (isinstance(lst, str) or isinstance(lst, int) or
isinstance(lst, float)):
return False
else:
return all(map(empty_check, lst))
except TypeError:
# This indicates that lst contains None as an object
return True
def terminate_excel():
"""
Terminates all running Excel processes in Windows OS
"""
while True:
try:
subprocess.check_call("TASKKILL /F /IM excel.exe")
except subprocess.CalledProcessError:
break
return
def hide_excel(boolean):
"""
Hides Excel from the user interface and suppresses alerts if the
input value is True. This script must be run again with False
input to enable viewing for output Excel files, after which all
Excel processes are exited.
:param boolean: True or False boolean constant
"""
for app in xw.apps:
app.display_alerts = not boolean
app.screen_updating = not boolean
if boolean is False:
terminate_excel()
return
def csv_extract(file, directory, header=None):
"""
Converts a given CSV file into a pandas dataframe.
:param file: Name of the CSV file
:param directory: Name of the directory containing the CSV file
:param header: Sequence containing all columns from the CSV to be
included in the output. If None, the CSV's first line will be used.
:return: pandas dataframe
"""
initial_dir = os.getcwd()
os.chdir(directory)
with open(file, newline='') as csvfile:
reader = csv.DictReader(csvfile, fieldnames=header)
for row in reader:
new_key = row[header[0]]
if new_key is not None and new_key != "":
csv_dict[new_key] = list()
for column in header[1:]:
csv_dict[new_key].append(row[header[column]])
os.chdir(initial_dir)
return csv_dict
def create_zip(directory, zip_name, files):
"""
Removes all existing .zip files in the chosen directory with the given
zip_name and creates a new .zip file with
this name that contains the chosen files.
:param directory: The directory where the zip file will be created
:param zip_name: The name of the new zip file
:param files: List of the files to be zipped (as filenames)
"""
# Compile zip archive for reports if not comprised of a singled file
initial_dir = os.getcwd()
os.chdir(directory)
if len(files) > 1:
with os.scandir(os.getcwd()) as scan:
for entry in scan:
if zip_name in str(entry):
os.remove(entry)
for foo in files:
with zipfile.ZipFile(zip_name, "a") as my_zip:
my_zip.write(foo)
os.chdir(initial_dir)
def send_email(sender, recipients, subject, html, html_dir, cc=None,
bcc=None, attachments=None, attachments_dir=None):
"""
Sends out an SMTP email using SSL, HTML content, and up to one
attachment (including .zip). Recipients' names must have the form
"required_first_name optional_middle_name optional_last_name". The
sender's email is assumed to be Gmail/Google Inbox.
:param sender: Sequence (a, b) where a is the sender's email and
b is their email account password
:param recipients: Sequence of pairs (a, b) where a is the
recipient's name and b is their email
:param cc: Sequence of pairs (a, b) where a is the cc
recipient's name and b is their email
:param bcc: Sequence of pairs (a, b) where a is the bcc
recipient's name and b is their email
:param subject: Subject title for the email
:param attachments: File name of the attachment (including
.zip) - no more than 1 per email
:param html: File name of the html script defining the email
body's content and signature
:param attachments_dir: Directory containing the attachments
:param html_dir: Directory containing the html script
"""
# Construct formatted strings of names/emails for Message module
recipient_names, cc_names, bcc_names = list(), list(), list()
recipient_emails, cc_emails, bcc_emails = list(), list(), list()
contact_lists = {'recipients': recipients, 'cc': cc, 'bcc': bcc}
contact_names = {'recipients': recipient_names, 'cc': cc_names,
'bcc': bcc_names}
contact_emails = {'recipients': recipient_emails, 'cc': cc_emails,
'bcc': bcc_emails}
for group, contact_list in contact_lists.items():
for contact in contact_list:
contact_names[group].append(contact[0].split()[0])
contact_emails[group].append(contact[1])
contact_names[group] = ", ".join(contact_names[group])
contact_emails[group] = "; ".join(contact_emails[group])
# Extract HTML content for email body
initial_dir = os.getcwd()
os.chdir(html_dir)
with open(html) as f:
email_body = f.read()
os.chdir(initial_dir)
# Construct email
msg = EmailMessage()
msg['Subject'] = subject
msg['From'] = sender[0]
msg['To'] = contact_emails['recipients']
if not cc:
msg['Cc'] = contact_emails['cc']
if not bcc:
msg['Bcc'] = contact_emails['bcc']
msg.set_content("""\
<html>
<head></head>
<body>
<body style="font-family:calibri; font-size: 16px" >
<p> Hi, {}, </p>
<p> {}
</p>
</body>
</html>
""".format(contact_names[recipients], email_body),
subtype='html')
if attachments is not None and attachments_dir is not None:
# Prepare the attachment(s) for delivery
initial_dir = os.getcwd()
os.chdir(attachments_dir)
if attachments[len(attachments) - 4:] == ".zip":
with open(attachments, 'rb') as myzip:
msg.add_attachment(myzip.read(), maintype="multipart",
subtype="mixed", filename=attachments)
else:
with open(attachments, 'rb') as fp:
msg.add_attachment(fp.read(), maintype="multipart",
subtype="mixed", filename=attachments)
os.chdir(initial_dir)
# Connect with the server and send the email with its attachment(s)
with smtplib.SMTP(host='smtp.gmail.com', port=587) as s:
context = ssl.create_default_context()
s.starttls(context=context)
s.login(sender[0], sender[1])
s.send_message(msg)
return
def range_converter(xl_col_length=3):
"""
Construct conversions between Excel array ranges and
Pythonic indices (up to column ZZ in Excel)
:param xl_col_length: Length of the longest desired
Excel column (e.g., 2 for "A" to "ZZ", 3 for "A" to "ZZZ")
"""
alpha_initial = string.ascii_uppercase
alpha_extended = list(string.ascii_uppercase)
if xl_col_length == 1:
pass
else: # Expand list with same lexicographic ordering as
# Excel (e.g. "Z" is followed by "AA", "AZ" by "BA")
for k in range(2, xl_col_length + 1):
new_sequences = list()
for letter_sequence in alpha_extended:
for new_letter in alpha_initial:
new_sequences.append("".join([letter_sequence,
new_letter]))
alpha_extended.extend(new_sequences)
convert = zip(range(1, len(alpha_extended) + 1), alpha_extended)
convert_to_alpha = {x: y for x, y in convert}
convert_to_num = {y: x for x, y in convert_to_alpha.items()}
return convert_to_alpha, convert_to_num
class XlArray:
"""
This class is meant for two-layer nested lists representing an
Excel array: e.g., [[row_1], [row_2],...]
"""
# Construct conversions between Excel array ranges and Pythonic indices
converter = range_converter()
convert_to_alpha = converter[0]
convert_to_num = converter[1]
def __init__(self, data, row, col):
"""
:param data: Nested (or mono-layer) list representing an
excel array (or row)
:param row: Row location of the upper-left cell in the array
(in Excel format, e.g., "2")
:param col: Column location of the upper-left cell in the array
(in Excel format - e.g., "B")
"""
# If data is a mono-layer list (representing a row), convert it
# into a nested list (representing an array)
if not all(itertools.starmap(isinstance, zip(data,
[list] * len(data)))):
data = [data]
self.data = data
self.col = col
self.row = row
self.len = len(data) # Indicates the number of rows
# Determine the finalized Excel array range
self.empty = empty_check(data)
if not self.empty:
self.header = self.data[0]
excel_range = (col + str(row) + ":" +
XlArray.convert_to_alpha[len(self.header) +
XlArray.convert_to_num[col] - 1] + str(self.len))
# modified 5/24
self.col_num = XlArray.convert_to_num[self.col]
# XlArray.remove (below) may interfere with self.col_num
self.last_col_num = self.col_num + len(self.header) - 1
self.last_col = XlArray.convert_to_alpha[self.last_col_num]
self.range = excel_range
self.name = ""
def empty(self, row_as_list):
row_num = self.data.index(row_as_list)
return empty_check(self.data[row_num])
def remove(self, columns):
"""
Removes the chosen columns in the instance's source array
from the instance's own array with columns understood
in Excel range terms.
For instance, if the source array is [[a, b], [c,d]]
with (row, col) = (2, "F"), the
Excel interpretation is that the upper-left cell of the
instance array is F2 while the range is F2:G3.
Meanwhile, the instance's array's range is understood as
[(i, j) for i, j in zip(range(2), range(2))].
In the above case, self.remove(["G"]) would reduce the source
array to [[a], [c]] as "b" and "d" represent cells
G2 and G3, respectively.
:param columns: Column (as string) or columns (as list of
strings) in the source array in Excel's range
interpretation - e.g., "A" for the 0th column
"""
# Note that this section assumes no two rows/lists in the
# data array are identical due to list.index()
for excluded_col in columns:
excluded_col_num = XlArray.convert_to_num[excluded_col] # e.g., column "B" becomes 2
if not self.empty and excluded_col_num == self.col_num: # if the first column is to be excluded
for record in self.data:
index = self.data.index(record)
self.data[index] = record[1:] # remove the first column in all rows
self.col = XlArray.convert_to_alpha[self.col_num + 1] # adjust the Excel representation attributes
self.col_num = XlArray.convert_to_num[self.col]
elif not self.empty and excluded_col_num == \
self.last_col_num: # if the last column is to be excluded
for record in self.data:
index = self.data.index(record)
self.data[index] = record[:-1]
elif not self.empty and self.col_num < excluded_col_num \
< self.last_col_num: # if another column is to be excluded
for record in self.data:
index = self.data.index(record)
self.data[index] = record[:excluded_col_num - 1] \
+ record[excluded_col_num:] # Pythonic indexes!
else: # if the column isn't in the instance array
pass
return
def filter(self, column, value, strict=True):
"""
:param column: The column that will be searched in
the array
:param value: The cell content that will be searched
for in the array
:param strict: If true, the filter requires exact
equivalence.
:return: Filtered copy of the array with only those
rows containing the desired entry in the desired column
"""
filtered_array = list()
filter_row = ""
for record in self.data: # Here, rows are represented by lists
if record[column] == value: # Strict equivalency required for a match
if not filter_row: # Determine upper-left range value for the filtered array
filter_row = (self.data.index(record)
+ self.row - 1)
filtered_array.append(record)
elif not strict:
if not filter_row: # Determine upper-left range value for the filtered array
filter_row = (self.data.index(record)
+ self.row - 1)
try:
# if record[column] and value are splittable,
# see if all components of the former are in the latter
entry = record[column].split()
if all(entry[i] in value.split() for
i in list(range(len(entry)))):
filtered_array.append(record)
except TypeError:
pass
return XlArray(filtered_array, filter_row, self.col)
class XlExtract:
"""
Class Dependency: XlArray (for XlEdit.extract())
Extract data from an existing Excel documents using
the xlwings module.
"""
def __init__(self, dir_path):
hide_excel(True)
self.path = dir_path
self.name = os.path.split(dir_path)[1]
self.date = mod_date(dir_path)
self.wb = xw.Book(self.path) # xw.books.open(self.path) returns error
self.sheets = self.wb.sheets
def open(self):
hide_excel(True)
return self.wb
def close(self):
try:
hide_excel(False)
self.wb.close()
finally:
return
def init_sht(self, sheet_name, prior_sheet=None):
"""
Create a new sheet in the workbook
:param sheet_name: Desired name for the new sheet
:param prior_sheet: Optional - the new sheet will
be inserted after this sheet in the workbook
"""
if prior_sheet is None:
self.wb.sheets.add(sheet_name)
else:
self.wb.sheets.add(sheet_name, after=self.sheets)
# create and name sheet
pass
def extract(self, exclude_sheets=None, exclude_cols=None,
max_row=50000, max_col=100):
"""
Imports all data in the workbook with each sheet represented
by a different XlArray object
:param exclude_sheets: List of the names of the sheets from
which data won't be collected
:param exclude_cols: List of pairs (a,b) where a is the sheet
name and b lists the columns to be excluded
:param max_row: Rows beyond this point will not be extracted
:param max_col: Columns beyond this point will not be extracted
:return: Pairs consisting of each sheet number and the array in
that sheet with all empty rows removed.
"""
wb_data = list()
if exclude_sheets:
sht_list = [sheet.name for sheet in self.sheets if sheet
not in exclude_sheets]
else:
sht_list = [sheet.name for sheet in self.sheets]
for sht_name in sht_list:
sht_xl = self.wb.sheets(sht_name)
# Determine endpoints of the range to be extracted
raw_data = sht_xl.range((1, 1), (max_row, max_col)).value
col_len, row_len = list(), -1
for row in raw_data:
if empty_check(row):
row_len += 1
break
else:
row_len += 1
j = -1
while j in list(range(-1, len(row))):
j += 1
if empty_check(row[j:]):
col_len.append(j)
break
col_len = max(col_len)
if col_len < max_col and row_len < max_row:
last_cell_location = (XlArray.convert_to_alpha[col_len]
+ str(row_len))
else:
last_cell_location = (XlArray.convert_to_alpha[max_col]
+ str(max_row))
sht_range = "A1:" + last_cell_location
sht_data = sht_xl.range(sht_range).value
sht_array = XlArray(sht_data, 1, "A")
for row in sht_array.data:
if empty_check(row):
sht_array.data.remove(row)
try:
for x_sheet, x_columns in exclude_cols:
if x_sheet == sht_name:
sht_array.remove(x_columns)
except TypeError: # raised if no columns excluded
pass
wb_data.append((sht_xl.index - 1, sht_array)) # sht.index is 1-based (as in Excel)
self.close()
return wb_data
# create a range method here that opens a chosen sheet and
# scans it for the first completely empty row & column
class XlCreate:
"""
Class Dependency: XlArray
Write XlArray objects to an Excel file with easily-customized
formatting. Instantiating immediately opens a new
Excel workbook, so consider instantiating within a "with" statement.
(Otherwise, use XlCreate.close()) No extension is to be included
in the filename.
"""
def __init__(self, filename, dir_path):
self.initial_dir = os.getcwd()
os.chdir(dir_path)
self.path = dir_path
self.name = os.path.split(dir_path)[1]
hide_excel(True)
self.wb = xlsxwriter.Workbook(filename + ".xlsx")
self.arrays = dict()
self.header_bold = self.wb.add_format({'bold': True,
'text_wrap': 1}) # Format object: Bold/wrap the header
self.wrap = self.wb.add_format({'text_wrap': 1, 'align': 'top'})
self.date_format = self.wb.add_format({'num_format': 'm/d/yy',
'align': 'top'}) # Format object
def close(self):
self.wb.close()
hide_excel(False)
os.chdir(self.initial_dir)
return
def write(self, sheet_name, sheet_data, row=1, column="A",
date_col=None, custom_width=None):
"""
Adds a mapping between the new sheet name and its data
to self.arrays. Writes the data to the new sheet.
:param sheet_name: Name to be used for the new sheet
:param sheet_data: Data to be mapped onto the new sheet
starting with cell A1. Include the header.
:param row: New sheet's row location of the upper-left
cell in the array (in Excel format, e.g., "2")
:param column: New sheet's column location of the
upper-left cell in the array (in Excel format, e.g., "B")
:param date_col: Columns (in Excel format) that are to be
written as dates
:param custom_width: Pairs (column, width) that determine
column-specific width
"""
# Construct conversions between Excel array ranges and Pythonic indices
converter = range_converter()
convert_to_alpha = converter[0]
convert_to_num = converter[1]
# Add mapping between new sheet name and its
# data (translated into a XlArray object)
self.arrays[sheet_name] = data = XlArray(sheet_data,
row, column)
# Add a sheet with the chosen name and set the table name
sht = self.wb.add_worksheet(sheet_name)
table_name = "_".join(sheet_name.split())
# Create list of table header formatting objects
header_formatting = [{'header': col, 'header_format':
self.header_bold} for col in data.header]
# 5/23 This is running correctly
# Insert the table and its data
sht.add_table(data.range, {'columns': header_formatting,
'name': table_name})
for item in data.header:
sht.write(0, data.col_num + data.header.index(item)
- 1, item, self.header_bold)
# 5/24: added -1 above, verify that this works
all_columns_xl = list()
# represents the destination columns
all_columns_py = dict()
# represents the indexes of these columns in the source data.data
for k in range(data.col_num, data.last_col_num + 1): # Added "+1" on 5/23 - check!
all_columns_xl.append(convert_to_alpha[k])
for col in all_columns_xl:
all_columns_py[col] = convert_to_num[col] -\
convert_to_num[all_columns_xl[0]]
for row_py in range(1, data.len):
for col in all_columns_xl:
col_py = all_columns_py[col]
if date_col and col in date_col:
if not isinstance(data.data[row_py][col_py],
datetime.datetime):
sht.write(row_py, col_py, "NO DATE",
self.date_format) # sht.write() uses 0-base indexes
else:
sht.write_datetime(row_py, col_py,
data.data[row_py][col_py],
self.date_format)
else:
sht.write(row_py, col_py,
data.data[row_py][col_py], self.wrap)
# Adjust the column widths
for col in all_columns_xl:
if not custom_width or col not in custom_width:
col_py = all_columns_py[col]
len_lst = [len(str(record[col_py])) for
record in data.data[1:]]
if not len_lst:
max_len = 16
elif max(len_lst) > 50:
max_len = 50
else:
max_len = max(max(len_lst), 16)
sht.set_column(col + ":" + col, max_len)
elif custom_width:
custom_dict = {x: y for x, y in custom_width}
sht.set_column(col + ":" + col, custom_dict[col])
else:
pass
return
|
[
"os.remove",
"xlwings.Book",
"subprocess.check_call",
"zipfile.ZipFile",
"smtplib.SMTP",
"os.getcwd",
"csv.DictReader",
"email.message.EmailMessage",
"ssl.create_default_context",
"xlsxwriter.Workbook",
"os.DirEntry.is_dir",
"os.path.getmtime",
"os.chdir",
"datetime.datetime.fromtimestamp",
"os.path.split",
"os.path.join",
"os.listdir",
"os.scandir"
] |
[((1642, 1662), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (1652, 1662), False, 'import os\n'), ((1772, 1783), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1781, 1783), False, 'import os\n'), ((1788, 1806), 'os.chdir', 'os.chdir', (['dir_path'], {}), '(dir_path)\n', (1796, 1806), False, 'import os\n'), ((2403, 2424), 'os.chdir', 'os.chdir', (['initial_dir'], {}), '(initial_dir)\n', (2411, 2424), False, 'import os\n'), ((2437, 2472), 'os.path.join', 'os.path.join', (['dir_path', 'matches[-1]'], {}), '(dir_path, matches[-1])\n', (2449, 2472), False, 'import os\n'), ((4311, 4322), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4320, 4322), False, 'import os\n'), ((4327, 4346), 'os.chdir', 'os.chdir', (['directory'], {}), '(directory)\n', (4335, 4346), False, 'import os\n'), ((4724, 4745), 'os.chdir', 'os.chdir', (['initial_dir'], {}), '(initial_dir)\n', (4732, 4745), False, 'import os\n'), ((5271, 5282), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5280, 5282), False, 'import os\n'), ((5287, 5306), 'os.chdir', 'os.chdir', (['directory'], {}), '(directory)\n', (5295, 5306), False, 'import os\n'), ((5610, 5631), 'os.chdir', 'os.chdir', (['initial_dir'], {}), '(initial_dir)\n', (5618, 5631), False, 'import os\n'), ((7730, 7741), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7739, 7741), False, 'import os\n'), ((7746, 7764), 'os.chdir', 'os.chdir', (['html_dir'], {}), '(html_dir)\n', (7754, 7764), False, 'import os\n'), ((7825, 7846), 'os.chdir', 'os.chdir', (['initial_dir'], {}), '(initial_dir)\n', (7833, 7846), False, 'import os\n'), ((7880, 7894), 'email.message.EmailMessage', 'EmailMessage', ([], {}), '()\n', (7892, 7894), False, 'from email.message import EmailMessage\n'), ((583, 599), 'os.scandir', 'os.scandir', (['path'], {}), '(path)\n', (593, 599), False, 'import os\n'), ((1201, 1222), 'os.path.getmtime', 'os.path.getmtime', (['foo'], {}), '(foo)\n', (1217, 1222), False, 'import os\n'), ((1238, 1272), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['t'], {}), '(t)\n', (1269, 1272), False, 'import datetime\n'), ((4408, 4450), 'csv.DictReader', 'csv.DictReader', (['csvfile'], {'fieldnames': 'header'}), '(csvfile, fieldnames=header)\n', (4422, 4450), False, 'import csv\n'), ((8585, 8596), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8594, 8596), False, 'import os\n'), ((8605, 8630), 'os.chdir', 'os.chdir', (['attachments_dir'], {}), '(attachments_dir)\n', (8613, 8630), False, 'import os\n'), ((9096, 9117), 'os.chdir', 'os.chdir', (['initial_dir'], {}), '(initial_dir)\n', (9104, 9117), False, 'import os\n'), ((9200, 9245), 'smtplib.SMTP', 'smtplib.SMTP', ([], {'host': '"""smtp.gmail.com"""', 'port': '(587)'}), "(host='smtp.gmail.com', port=587)\n", (9212, 9245), False, 'import smtplib\n'), ((9270, 9298), 'ssl.create_default_context', 'ssl.create_default_context', ([], {}), '()\n', (9296, 9298), False, 'import ssl\n'), ((17143, 17161), 'xlwings.Book', 'xw.Book', (['self.path'], {}), '(self.path)\n', (17150, 17161), True, 'import xlwings as xw\n'), ((21304, 21315), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (21313, 21315), False, 'import os\n'), ((21324, 21342), 'os.chdir', 'os.chdir', (['dir_path'], {}), '(dir_path)\n', (21332, 21342), False, 'import os\n'), ((21462, 21501), 'xlsxwriter.Workbook', 'xlsxwriter.Workbook', (["(filename + '.xlsx')"], {}), "(filename + '.xlsx')\n", (21481, 21501), False, 'import xlsxwriter\n'), ((22022, 22048), 'os.chdir', 'os.chdir', (['self.initial_dir'], {}), '(self.initial_dir)\n', (22030, 22048), False, 'import os\n'), ((1166, 1184), 'os.path.split', 'os.path.split', (['foo'], {}), '(foo)\n', (1179, 1184), False, 'import os\n'), ((2058, 2086), 'os.path.join', 'os.path.join', (['dir_path', 'item'], {}), '(dir_path, item)\n', (2070, 2086), False, 'import os\n'), ((3271, 3321), 'subprocess.check_call', 'subprocess.check_call', (['"""TASKKILL /F /IM excel.exe"""'], {}), "('TASKKILL /F /IM excel.exe')\n", (3292, 3321), False, 'import subprocess\n'), ((17059, 17082), 'os.path.split', 'os.path.split', (['dir_path'], {}), '(dir_path)\n', (17072, 17082), False, 'import os\n'), ((21392, 21415), 'os.path.split', 'os.path.split', (['dir_path'], {}), '(dir_path)\n', (21405, 21415), False, 'import os\n'), ((660, 686), 'os.DirEntry.is_dir', 'os.DirEntry.is_dir', (['subdir'], {}), '(subdir)\n', (678, 686), False, 'import os\n'), ((1307, 1325), 'os.path.split', 'os.path.split', (['foo'], {}), '(foo)\n', (1320, 1325), False, 'import os\n'), ((5354, 5365), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5363, 5365), False, 'import os\n'), ((5530, 5560), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_name', '"""a"""'], {}), "(zip_name, 'a')\n", (5545, 5560), False, 'import zipfile\n'), ((5470, 5486), 'os.remove', 'os.remove', (['entry'], {}), '(entry)\n', (5479, 5486), False, 'import os\n'), ((773, 799), 'os.path.join', 'os.path.join', (['path', 'subdir'], {}), '(path, subdir)\n', (785, 799), False, 'import os\n'), ((896, 928), 'os.path.join', 'os.path.join', (['path', 'subdir', 'item'], {}), '(path, subdir, item)\n', (908, 928), False, 'import os\n')]
|
# Copyright 2021 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import logging
import os
from enum import Enum
from pathlib import Path
from typing import Callable, Optional, Union
import numpy
import torch
from datasets import Dataset, load_dataset
from torch.utils.data import DataLoader, RandomSampler
from transformers import AutoTokenizer, default_data_collator
from transformers.onnx import export
from transformers.onnx.features import FeaturesManager
import onnx
from onnxruntime.quantization import (
CalibrationDataReader,
CalibrationMethod,
QuantFormat,
QuantType,
quantize_dynamic,
quantize_static,
)
from optimum.onnxruntime.configuration import ORTConfig
from optimum.onnxruntime.utils import generate_identified_filename
logger = logging.getLogger(__name__)
class ORTQuantizationMode(Enum):
DYNAMIC = "dynamic"
STATIC = "static"
SUPPORTED_QUANT_MODE = set([approach.value for approach in ORTQuantizationMode])
CALIB_METHOD = {"minmax": "MinMax", "entropy": "Entropy"}
Q_FORMAT = {"operator": "QOperator", "qdq": "QDQ"}
Q_TYPE = {"int8": "QInt8", "uint8": "QUInt8"}
class ORTCalibrationDataReader(CalibrationDataReader):
def __init__(self, calib_dataloader: DataLoader):
self._iter = iter([{key: data[key].numpy() for key in data} for data in calib_dataloader])
def get_next(self):
return next(self._iter, None)
class ORTQuantizer:
def __init__(
self,
model_name_or_path: str,
ort_config: Union[str, ORTConfig],
feature: str = "default",
calib_dataset: Optional[Dataset] = None,
dataset_name: Optional[str] = None,
dataset_config_name: Optional[str] = None,
data_files: Optional[str] = None,
preprocess_function: Optional[Callable] = None,
**kwargs
):
"""
Args:
model_name_or_path (`str`):
Repository name in the Hugging Face Hub or path to a local directory hosting the model.
ort_config (`Union[ORTConfig, str]`):
Configuration file containing all the information related to the model quantization.
Can be either:
- an instance of the class :class:`ORTConfig`,
- a string valid as input to :func:`ORTConfig.from_pretrained`.
feature (`str`):
Feature used when exporting the model.
calib_dataset (`Dataset`, `optional`):
Dataset to use for the calibration step.
dataset_name (`str`, `optional`):
Dataset repository name on the Hugging Face Hub or path to a local directory containing data files to
load to use for the calibration step.
dataset_config_name (`str`, `optional`):
Name of the dataset configuration.
data_files (`str`, `optional`):
Path to source data files.
preprocess_function (`Callable`, `optional`):
Processing function to apply to each example after loading dataset.
cache_dir (`str`, `optional`):
Path to a directory in which a downloaded configuration should be cached if the standard cache should
not be used.
force_download (`bool`, `optional`, defaults to `False`):
Whether or not to force to (re-)download the configuration files and override the cached versions if
they exist.
resume_download (`bool`, `optional`, defaults to `False`):
Whether or not to delete incompletely received file. Attempts to resume the download if such a file
exists.
revision(`str`, `optional`):
The specific version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so ``revision`` can be any
identifier allowed by git.
"""
config_kwargs_default = [
("cache_dir", None),
("force_download", False),
("resume_download", False),
("revision", None),
]
config_kwargs = {name: kwargs.get(name, default_value) for (name, default_value) in config_kwargs_default}
model_kwargs = copy.deepcopy(config_kwargs)
tokenizer_kwargs = copy.deepcopy(config_kwargs)
self.cache_dir = config_kwargs.get("cache_dir")
self.model_name_or_path = model_name_or_path
if not isinstance(ort_config, ORTConfig):
ort_config = ORTConfig.from_pretrained(ort_config, **config_kwargs)
self.ort_config = ort_config
self.quantization_approach = ORTQuantizationMode(ort_config.quantization_approach)
self.activation_type = QuantType[Q_TYPE.get(ort_config.activation_type)]
self.weight_type = QuantType[Q_TYPE.get(ort_config.weight_type)]
self.quant_format = QuantFormat[Q_FORMAT.get(ort_config.quant_format)]
self.calibrate_method = CalibrationMethod[CALIB_METHOD.get(ort_config.calibration_method)]
self.seed = ort_config.seed
self.calib_dataset = calib_dataset
self.dataset_name = dataset_name
self.dataset_config_name = dataset_config_name
self.data_files = data_files
self.preprocess_function = preprocess_function
self.onnx_config = None
self.feature = feature
self.tokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path, **tokenizer_kwargs)
model_class = FeaturesManager.get_model_class_for_feature(self.feature)
self.model = model_class.from_pretrained(self.model_name_or_path, **model_kwargs)
def export(self, model_path: os.PathLike) -> None:
"""
Load and export a model to an ONNX Intermediate Representation (IR).
Args:
model_path (`os.PathLike`):
The path used to save the model exported to an ONNX Intermediate Representation (IR).
"""
model_type, model_onnx_config = FeaturesManager.check_supported_model_or_raise(
self.model, feature=self.feature
)
self.onnx_config = model_onnx_config(self.model.config)
opset = self.onnx_config.default_onnx_opset if self.ort_config.opset is None else self.ort_config.opset
_ = export(self.tokenizer, self.model, self.onnx_config, opset, model_path)
def fit(self, output_dir: Union[str, os.PathLike]) -> None:
"""
Load and export a model to an ONNX Intermediate Representation (IR) and apply the specified quantization
approach.
Args:
output_dir (`Union[str, os.PathLike]`):
The output directory where the quantized model will be saved.
"""
output_dir = output_dir if isinstance(output_dir, Path) else Path(output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
model_path = output_dir.joinpath("model.onnx")
quant_model_path = generate_identified_filename(model_path, "-quantized")
self.export(model_path)
if self.quantization_approach == ORTQuantizationMode.DYNAMIC:
quantize_dynamic(
model_path,
quant_model_path,
per_channel=self.ort_config.per_channel,
reduce_range=self.ort_config.reduce_range,
activation_type=self.activation_type,
weight_type=self.weight_type,
optimize_model=self.ort_config.optimize_model,
use_external_data_format=self.ort_config.use_external_data_format,
)
elif self.quantization_approach == ORTQuantizationMode.STATIC:
calib_dataset = self.calib_dataset if self.calib_dataset is not None else self.get_calib_dataset()
calib_dataloader = self.get_calib_dataloader(calib_dataset)
calib_data_reader = self.get_data_reader(calib_dataloader)
quantize_static(
model_path,
quant_model_path,
calib_data_reader,
quant_format=self.quant_format,
per_channel=self.ort_config.per_channel,
reduce_range=self.ort_config.reduce_range,
activation_type=self.activation_type,
weight_type=self.weight_type,
optimize_model=self.ort_config.optimize_model,
use_external_data_format=self.ort_config.use_external_data_format,
calibrate_method=self.calibrate_method,
nodes_to_quantize=self.ort_config.nodes_to_quantize,
nodes_to_exclude=self.ort_config.nodes_to_exclude,
extra_options=self.ort_config.extra_options,
)
else:
raise ValueError(
f"Unknown quantization approach: `quantization_approach` was set to {self.quantization_approach}. "
f"Supported quantization approaches are " + ", ".join(SUPPORTED_QUANT_MODE)
)
def get_calib_dataset(self) -> Dataset:
"""
Returns the calibration :class:`~datasets.arrow_dataset.Dataset` to use for the post-training static
quantization calibration step.
"""
if self.dataset_name is None:
raise ValueError(
"ORTQuantizer: Static quantization calibration step requires a dataset_name if no calib_dataset is "
"provided."
)
if self.preprocess_function is None:
raise ValueError(
"ORTQuantizer: Processing function to apply after loading the dataset used for static quantization "
"calibration step was not provided."
)
calib_dataset = load_dataset(
self.dataset_name,
name=self.dataset_config_name,
data_files=self.data_files,
split=self.ort_config.split,
cache_dir=self.cache_dir,
)
calib_dataset = calib_dataset.map(self.preprocess_function, batched=True)
return calib_dataset
def get_calib_dataloader(self, calib_dataset: Optional[Dataset] = None) -> DataLoader:
"""
Returns the calibration :class:`~torch.utils.data.DataLoader`.
Args:
calib_dataset (`torch.utils.data.Dataset`, `optional`):
If provided, will override `self.calib_dataset`.
"""
if calib_dataset is None and self.calib_dataset is None:
raise ValueError("ORTQuantizer: static quantization calibration step requires a calib_dataset.")
calib_dataset = calib_dataset if calib_dataset is not None else self.calib_dataset
if self.ort_config.max_samples is not None and len(calib_dataset) > self.ort_config.max_samples:
calib_dataset = calib_dataset.select(range(self.ort_config.max_samples))
ignored_columns = list(set(calib_dataset.column_names) - set(self.onnx_config.inputs.keys()))
calib_dataset = calib_dataset.remove_columns(ignored_columns)
generator = torch.Generator()
generator.manual_seed(self.seed)
sampler = RandomSampler(calib_dataset, generator=generator)
return DataLoader(
calib_dataset,
batch_size=self.ort_config.calib_batch_size,
sampler=sampler,
collate_fn=default_data_collator,
)
@staticmethod
def get_data_reader(calib_dataloader: DataLoader) -> ORTCalibrationDataReader:
"""
Returns the calibration :class:`~optimum.onnxruntime.quantization.ORTCalibrationDataReader`.
Args:
calib_dataloader (`torch.utils.data.DataLoader`):
Calibration dataloader to use for the post-training static quantization calibration step.
"""
return ORTCalibrationDataReader(calib_dataloader)
|
[
"datasets.load_dataset",
"copy.deepcopy",
"onnxruntime.quantization.quantize_static",
"torch.utils.data.RandomSampler",
"torch.utils.data.DataLoader",
"transformers.onnx.features.FeaturesManager.check_supported_model_or_raise",
"onnxruntime.quantization.quantize_dynamic",
"transformers.onnx.features.FeaturesManager.get_model_class_for_feature",
"transformers.AutoTokenizer.from_pretrained",
"optimum.onnxruntime.configuration.ORTConfig.from_pretrained",
"pathlib.Path",
"torch.Generator",
"optimum.onnxruntime.utils.generate_identified_filename",
"transformers.onnx.export",
"logging.getLogger"
] |
[((1338, 1365), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1355, 1365), False, 'import logging\n'), ((4906, 4934), 'copy.deepcopy', 'copy.deepcopy', (['config_kwargs'], {}), '(config_kwargs)\n', (4919, 4934), False, 'import copy\n'), ((4962, 4990), 'copy.deepcopy', 'copy.deepcopy', (['config_kwargs'], {}), '(config_kwargs)\n', (4975, 4990), False, 'import copy\n'), ((6045, 6119), 'transformers.AutoTokenizer.from_pretrained', 'AutoTokenizer.from_pretrained', (['self.model_name_or_path'], {}), '(self.model_name_or_path, **tokenizer_kwargs)\n', (6074, 6119), False, 'from transformers import AutoTokenizer, default_data_collator\n'), ((6142, 6199), 'transformers.onnx.features.FeaturesManager.get_model_class_for_feature', 'FeaturesManager.get_model_class_for_feature', (['self.feature'], {}), '(self.feature)\n', (6185, 6199), False, 'from transformers.onnx.features import FeaturesManager\n'), ((6644, 6729), 'transformers.onnx.features.FeaturesManager.check_supported_model_or_raise', 'FeaturesManager.check_supported_model_or_raise', (['self.model'], {'feature': 'self.feature'}), '(self.model, feature=self.feature\n )\n', (6690, 6729), False, 'from transformers.onnx.features import FeaturesManager\n'), ((6935, 7006), 'transformers.onnx.export', 'export', (['self.tokenizer', 'self.model', 'self.onnx_config', 'opset', 'model_path'], {}), '(self.tokenizer, self.model, self.onnx_config, opset, model_path)\n', (6941, 7006), False, 'from transformers.onnx import export\n'), ((7594, 7648), 'optimum.onnxruntime.utils.generate_identified_filename', 'generate_identified_filename', (['model_path', '"""-quantized"""'], {}), "(model_path, '-quantized')\n", (7622, 7648), False, 'from optimum.onnxruntime.utils import generate_identified_filename\n'), ((10341, 10491), 'datasets.load_dataset', 'load_dataset', (['self.dataset_name'], {'name': 'self.dataset_config_name', 'data_files': 'self.data_files', 'split': 'self.ort_config.split', 'cache_dir': 'self.cache_dir'}), '(self.dataset_name, name=self.dataset_config_name, data_files=\n self.data_files, split=self.ort_config.split, cache_dir=self.cache_dir)\n', (10353, 10491), False, 'from datasets import Dataset, load_dataset\n'), ((11654, 11671), 'torch.Generator', 'torch.Generator', ([], {}), '()\n', (11669, 11671), False, 'import torch\n'), ((11731, 11780), 'torch.utils.data.RandomSampler', 'RandomSampler', (['calib_dataset'], {'generator': 'generator'}), '(calib_dataset, generator=generator)\n', (11744, 11780), False, 'from torch.utils.data import DataLoader, RandomSampler\n'), ((11797, 11922), 'torch.utils.data.DataLoader', 'DataLoader', (['calib_dataset'], {'batch_size': 'self.ort_config.calib_batch_size', 'sampler': 'sampler', 'collate_fn': 'default_data_collator'}), '(calib_dataset, batch_size=self.ort_config.calib_batch_size,\n sampler=sampler, collate_fn=default_data_collator)\n', (11807, 11922), False, 'from torch.utils.data import DataLoader, RandomSampler\n'), ((5175, 5229), 'optimum.onnxruntime.configuration.ORTConfig.from_pretrained', 'ORTConfig.from_pretrained', (['ort_config'], {}), '(ort_config, **config_kwargs)\n', (5200, 5229), False, 'from optimum.onnxruntime.configuration import ORTConfig\n'), ((7441, 7457), 'pathlib.Path', 'Path', (['output_dir'], {}), '(output_dir)\n', (7445, 7457), False, 'from pathlib import Path\n'), ((7764, 8096), 'onnxruntime.quantization.quantize_dynamic', 'quantize_dynamic', (['model_path', 'quant_model_path'], {'per_channel': 'self.ort_config.per_channel', 'reduce_range': 'self.ort_config.reduce_range', 'activation_type': 'self.activation_type', 'weight_type': 'self.weight_type', 'optimize_model': 'self.ort_config.optimize_model', 'use_external_data_format': 'self.ort_config.use_external_data_format'}), '(model_path, quant_model_path, per_channel=self.ort_config.\n per_channel, reduce_range=self.ort_config.reduce_range, activation_type\n =self.activation_type, weight_type=self.weight_type, optimize_model=\n self.ort_config.optimize_model, use_external_data_format=self.\n ort_config.use_external_data_format)\n', (7780, 8096), False, 'from onnxruntime.quantization import CalibrationDataReader, CalibrationMethod, QuantFormat, QuantType, quantize_dynamic, quantize_static\n'), ((8557, 9145), 'onnxruntime.quantization.quantize_static', 'quantize_static', (['model_path', 'quant_model_path', 'calib_data_reader'], {'quant_format': 'self.quant_format', 'per_channel': 'self.ort_config.per_channel', 'reduce_range': 'self.ort_config.reduce_range', 'activation_type': 'self.activation_type', 'weight_type': 'self.weight_type', 'optimize_model': 'self.ort_config.optimize_model', 'use_external_data_format': 'self.ort_config.use_external_data_format', 'calibrate_method': 'self.calibrate_method', 'nodes_to_quantize': 'self.ort_config.nodes_to_quantize', 'nodes_to_exclude': 'self.ort_config.nodes_to_exclude', 'extra_options': 'self.ort_config.extra_options'}), '(model_path, quant_model_path, calib_data_reader,\n quant_format=self.quant_format, per_channel=self.ort_config.per_channel,\n reduce_range=self.ort_config.reduce_range, activation_type=self.\n activation_type, weight_type=self.weight_type, optimize_model=self.\n ort_config.optimize_model, use_external_data_format=self.ort_config.\n use_external_data_format, calibrate_method=self.calibrate_method,\n nodes_to_quantize=self.ort_config.nodes_to_quantize, nodes_to_exclude=\n self.ort_config.nodes_to_exclude, extra_options=self.ort_config.\n extra_options)\n', (8572, 9145), False, 'from onnxruntime.quantization import CalibrationDataReader, CalibrationMethod, QuantFormat, QuantType, quantize_dynamic, quantize_static\n')]
|
import sys
import nfqueue
import socket
import signal
import time
import whitelist
from collections import defaultdict
class GZA(object):
def __init__(self, vmnum, opts):
self.gamestate = defaultdict(int)
self.vmnum = vmnum
self.iface = 'tap%d' % vmnum
self.opts = opts
self.mac = 'ca:fe:de:ad:be:ef'
signal.signal(signal.SIGUSR1, self.reset) # So we can reset gamestate
if self.opts.whitelist:
whitelist.makewhitelist(self.opts.whitelistpath)
self.whitelisted = whitelist.whitelisted
whitelist.makeipwhitelist(self.opts.ipwhitelistpath)
self.whitelistedip = whitelist.whitelistedip
# Set the game (only used in subclass games)
if self.opts.taken > 0:
self.game = 'taken'
elif self.opts.dropn > 0:
self.game = 'dropn'
elif self.opts.dropall:
self.game = 'dropall'
def reset(self, signum, frame):
sys.stderr.write('Cleared game state!\n')
self.gamestate.clear()
try:
self.q.try_run()
except KeyboardInterrupt:
print('Clean shutdown')
self.q.unbind(socket.AF_INET)
sys.exit(0)
def playgame(self, payload):
payload.set_verdict(nfqueue.NF_ACCEPT)
def startgame(self):
good = False
while not good:
try:
self.q = nfqueue.queue()
self.q.open()
self.q.set_callback(self.playgame)
self.q.fast_open(self.vmnum, socket.AF_INET)
good = True
except RuntimeError as e:
sys.stderr.write(str(e) + '\n')
sys.stderr.write('Retrying to connect to nfqueue #%d...\n'
% self.vmnum)
time.sleep(3)
try:
sys.stderr.write('Successfully bound to nfqueue #%d\n' % self.vmnum)
self.q.try_run()
except KeyboardInterrupt:
print('Clean shutdown')
self.q.unbind(socket.AF_INET)
sys.exit(0)
|
[
"whitelist.makeipwhitelist",
"time.sleep",
"collections.defaultdict",
"sys.stderr.write",
"nfqueue.queue",
"whitelist.makewhitelist",
"signal.signal",
"sys.exit"
] |
[((201, 217), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (212, 217), False, 'from collections import defaultdict\n'), ((354, 395), 'signal.signal', 'signal.signal', (['signal.SIGUSR1', 'self.reset'], {}), '(signal.SIGUSR1, self.reset)\n', (367, 395), False, 'import signal\n'), ((987, 1028), 'sys.stderr.write', 'sys.stderr.write', (['"""Cleared game state!\n"""'], {}), "('Cleared game state!\\n')\n", (1003, 1028), False, 'import sys\n'), ((468, 516), 'whitelist.makewhitelist', 'whitelist.makewhitelist', (['self.opts.whitelistpath'], {}), '(self.opts.whitelistpath)\n', (491, 516), False, 'import whitelist\n'), ((582, 634), 'whitelist.makeipwhitelist', 'whitelist.makeipwhitelist', (['self.opts.ipwhitelistpath'], {}), '(self.opts.ipwhitelistpath)\n', (607, 634), False, 'import whitelist\n'), ((1872, 1940), 'sys.stderr.write', 'sys.stderr.write', (["('Successfully bound to nfqueue #%d\\n' % self.vmnum)"], {}), "('Successfully bound to nfqueue #%d\\n' % self.vmnum)\n", (1888, 1940), False, 'import sys\n'), ((1226, 1237), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1234, 1237), False, 'import sys\n'), ((1432, 1447), 'nfqueue.queue', 'nfqueue.queue', ([], {}), '()\n', (1445, 1447), False, 'import nfqueue\n'), ((2094, 2105), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2102, 2105), False, 'import sys\n'), ((1720, 1792), 'sys.stderr.write', 'sys.stderr.write', (["('Retrying to connect to nfqueue #%d...\\n' % self.vmnum)"], {}), "('Retrying to connect to nfqueue #%d...\\n' % self.vmnum)\n", (1736, 1792), False, 'import sys\n'), ((1833, 1846), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1843, 1846), False, 'import time\n')]
|
from django.db import models
from django.conf import settings
BOOKING_PERIOD = (
("5","5M"),
("10","10M"),
("15","15M"),
("20","20M"),
("25","25M"),
("30","30M"),
("35","35M"),
("40","40M"),
("45","45M"),
("60","1H"),
("75","1H 15M"),
("90","1H 30M"),
("105","1H 45M"),
("120","2H"),
("150","2H 30M"),
("180","3H"),
)
class Booking(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL,
on_delete=models.CASCADE, blank=True, null=True)
date = models.DateField()
time = models.TimeField()
user_name = models.CharField(max_length=250)
user_email = models.EmailField()
approved = models.BooleanField(default=False)
user_mobile = models.CharField(blank=True, null=True, max_length=10)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self) -> str:
return self.user_name or "(No Name)"
class BookingManager(models.Model):
# General
booking_enable = models.BooleanField(default=True)
confirmation_required = models.BooleanField(default=True)
# Date
disable_weekend = models.BooleanField(default=True)
available_booking_months = models.IntegerField(default=1, help_text="if 2, user can only book appointment for next two months.")
max_appointment_per_day = models.IntegerField(null=True, blank=True)
# Time
start_time = models.TimeField()
end_time = models.TimeField()
period_of_each_booking = models.CharField(max_length=3, default="30", choices=BOOKING_PERIOD, help_text="How long each appointment take.")
max_appointment_per_time = models.IntegerField(default=1, help_text="how much appointment can be book for each time.")
|
[
"django.db.models.TimeField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.EmailField",
"django.db.models.IntegerField",
"django.db.models.DateField",
"django.db.models.DateTimeField"
] |
[((427, 524), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, blank\n =True, null=True)\n', (444, 524), False, 'from django.db import models\n'), ((560, 578), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (576, 578), False, 'from django.db import models\n'), ((590, 608), 'django.db.models.TimeField', 'models.TimeField', ([], {}), '()\n', (606, 608), False, 'from django.db import models\n'), ((625, 657), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (641, 657), False, 'from django.db import models\n'), ((675, 694), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (692, 694), False, 'from django.db import models\n'), ((710, 744), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (729, 744), False, 'from django.db import models\n'), ((763, 817), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(10)'}), '(blank=True, null=True, max_length=10)\n', (779, 817), False, 'from django.db import models\n'), ((836, 875), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (856, 875), False, 'from django.db import models\n'), ((893, 928), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (913, 928), False, 'from django.db import models\n'), ((1079, 1112), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1098, 1112), False, 'from django.db import models\n'), ((1141, 1174), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1160, 1174), False, 'from django.db import models\n'), ((1208, 1241), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1227, 1241), False, 'from django.db import models\n'), ((1273, 1379), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)', 'help_text': '"""if 2, user can only book appointment for next two months."""'}), "(default=1, help_text=\n 'if 2, user can only book appointment for next two months.')\n", (1292, 1379), False, 'from django.db import models\n'), ((1405, 1447), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1424, 1447), False, 'from django.db import models\n'), ((1476, 1494), 'django.db.models.TimeField', 'models.TimeField', ([], {}), '()\n', (1492, 1494), False, 'from django.db import models\n'), ((1510, 1528), 'django.db.models.TimeField', 'models.TimeField', ([], {}), '()\n', (1526, 1528), False, 'from django.db import models\n'), ((1558, 1675), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'default': '"""30"""', 'choices': 'BOOKING_PERIOD', 'help_text': '"""How long each appointment take."""'}), "(max_length=3, default='30', choices=BOOKING_PERIOD,\n help_text='How long each appointment take.')\n", (1574, 1675), False, 'from django.db import models\n'), ((1703, 1799), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)', 'help_text': '"""how much appointment can be book for each time."""'}), "(default=1, help_text=\n 'how much appointment can be book for each time.')\n", (1722, 1799), False, 'from django.db import models\n')]
|
import xbmc
import xbmcgui
import urllib
import os
import time
import urlparse
from xbmctorrent import plugin, torrent2http
from xbmctorrent.ga import track_event
from xbmctorrent.utils import url_get_json
from contextlib import contextmanager, closing, nested
TORRENT2HTTP_TIMEOUT = 20
TORRENT2HTTP_POLL = 200
WINDOW_FULLSCREEN_VIDEO = 12005
XBFONT_LEFT = 0x00000000
XBFONT_RIGHT = 0x00000001
XBFONT_CENTER_X = 0x00000002
XBFONT_CENTER_Y = 0x00000004
XBFONT_TRUNCATED = 0x00000008
XBFONT_JUSTIFY = 0x00000010
STATE_STRS = [
'Queued',
'Checking',
'Downloading metadata',
'Downloading',
'Finished',
'Seeding',
'Allocating',
'Allocating file & Checking resume'
]
VIEWPORT_WIDTH = 1920.0
VIEWPORT_HEIGHT = 1088.0
OVERLAY_WIDTH = int(VIEWPORT_WIDTH * 0.7) # 70% size
OVERLAY_HEIGHT = 150
ENCRYPTION_SETTINGS = {
"Forced": 0,
"Enabled": 1,
"Disabled": 2,
}
# Somehow if we close the dialog too fast, it makes XBMC go bonkers
class SafeDialogProgress(xbmcgui.DialogProgress):
def close(self):
xbmc.sleep(1000)
super(SafeDialogProgress, self).close()
class OverlayText(object):
def __init__(self, w, h, *args, **kwargs):
self.window = xbmcgui.Window(WINDOW_FULLSCREEN_VIDEO)
viewport_w, viewport_h = self._get_skin_resolution()
# Adjust size based on viewport, we are using 1080p coordinates
w = int(w * viewport_w / VIEWPORT_WIDTH)
h = int(h * viewport_h / VIEWPORT_HEIGHT)
x = (viewport_w - w) / 2
y = (viewport_h - h) / 2
self._shown = False
self._text = ""
self._label = xbmcgui.ControlLabel(x, y, w, h, self._text, *args, **kwargs)
self._background = xbmcgui.ControlImage(x, y, w, h, os.path.join(plugin.addon.getAddonInfo("path"), "resources", "images", "black.png"))
self._background.setColorDiffuse("0xD0000000")
def show(self):
if not self._shown:
self.window.addControls([self._background, self._label])
self._shown = True
def hide(self):
if self._shown:
self._shown = False
self.window.removeControls([self._background, self._label])
def close(self):
self.hide()
@property
def text(self):
return self._text
@text.setter
def text(self, text):
self._text = text
if self._shown:
self._label.setLabel(self._text)
# This is so hackish it hurts.
def _get_skin_resolution(self):
import xml.etree.ElementTree as ET
skin_path = xbmc.translatePath("special://skin/")
tree = ET.parse(os.path.join(skin_path, "addon.xml"))
res = tree.findall("./extension/res")[0]
return int(res.attrib["width"]), int(res.attrib["height"])
class TorrentPlayer(xbmc.Player):
def init(self, magnet_uri):
track_event("torrent_player", "start")
self.magnet_uri = magnet_uri
self.magnet_args = urlparse.parse_qs(self.magnet_uri.replace("magnet:?", "")) # I know about urlparse.urlsplit but this is faster
self.magnet_display_name = ""
if self.magnet_args["dn"]:
self.magnet_display_name = self.magnet_args["dn"][0]
self.torrent2http_options = {
"magnet": magnet_uri,
"dlpath": plugin.get_setting("dlpath") or ".",
"dlrate": plugin.get_setting("max_download_rate") or "0",
"ulrate": plugin.get_setting("max_upload_rate") or "0",
"ulrate": plugin.get_setting("max_upload_rate") or "0",
"encryption": plugin.get_setting("encryption"),
}
if plugin.get_setting("keep"):
self.torrent2http_options["keep"] = None
track_event("download", "start", magnet_uri)
self.on_playback_started = []
self.on_playback_resumed = []
self.on_playback_paused = []
self.on_playback_stopped = []
return self
def onPlayBackStarted(self):
for f in self.on_playback_started:
f()
track_event("video", "play", self.magnet_display_name)
def onPlayBackResumed(self):
for f in self.on_playback_resumed:
f()
self.onPlayBackStarted()
def onPlayBackPaused(self):
for f in self.on_playback_paused:
f()
track_event("video", "pause", self.magnet_display_name)
def onPlayBackStopped(self):
for f in self.on_playback_stopped:
f()
track_event("video", "stop", self.magnet_display_name)
def _get_status_lines(self, status):
return [
self.magnet_display_name,
"%.2f%% %s" % (status["progress"] * 100, STATE_STRS[status["state"]]),
"D:%(download_rate).2fkb/s U:%(upload_rate).2fkb/s S:%(num_seeds)d P:%(num_peers)d" % status
]
@contextmanager
def attach(self, callback, *events):
for event in events:
event.append(callback)
yield
for event in events:
event.remove(callback)
def _wait_t2h_startup(self, t2h):
start = time.time()
while (time.time() - start) < TORRENT2HTTP_TIMEOUT:
try:
t2h("status")
return True
except:
pass
xbmc.sleep(TORRENT2HTTP_POLL)
return False
def loop(self):
has_resolved = False
plugin.log.info("Starting torrent2http...")
with closing(torrent2http.start(**self.torrent2http_options)) as t2h_instance:
t2h = lambda cmd: url_get_json("http://%s/%s" % (t2h_instance.bind_address, cmd))
if not self._wait_t2h_startup(t2h):
return
plugin.log.info("Opening download dialog...")
with closing(SafeDialogProgress()) as dialog:
dialog.create(plugin.name)
plugin.log.info("Waiting for file resolution...")
while not has_resolved:
if xbmc.abortRequested or dialog.iscanceled():
return
status = t2h("status")
if status["state"] >= 0:
dialog.update(int(status["progress"] * 100), *self._get_status_lines(status))
if status["state"] >= 3 and not has_resolved: # Downloading?
files = t2h("ls")["files"]
biggest_file = sorted(files, key=lambda x: x["size"])[-1]
percent_complete = float(biggest_file["complete_pieces"]) / float(biggest_file["total_pieces"]) * 100.0
if percent_complete >= 0.5:
plugin.log.info("Resolving to http://%s/files/%s" % (t2h_instance.bind_address, biggest_file["name"]))
has_resolved = True
url_name = "/".join(map(urllib.quote, biggest_file["name"].split("/")))
plugin.set_resolved_url({
"label": self.magnet_display_name,
"path": "http://%s/files/%s" % (t2h_instance.bind_address, url_name),
"is_playable": True,
})
break
xbmc.sleep(TORRENT2HTTP_POLL)
# We are now playing
plugin.log.info("Now playing torrent...")
with closing(OverlayText(w=OVERLAY_WIDTH, h=OVERLAY_HEIGHT, alignment=XBFONT_CENTER_X | XBFONT_CENTER_Y)) as overlay:
with nested(self.attach(overlay.show, self.on_playback_paused),
self.attach(overlay.hide, self.on_playback_resumed, self.on_playback_stopped)):
while not xbmc.abortRequested and self.isPlaying():
overlay.text = "\n".join(self._get_status_lines(t2h("status")))
xbmc.sleep(TORRENT2HTTP_POLL)
plugin.log.info("Closing Torrent player.")
|
[
"xbmctorrent.plugin.log.info",
"xbmc.sleep",
"xbmcgui.Window",
"xbmctorrent.ga.track_event",
"xbmc.translatePath",
"xbmctorrent.plugin.get_setting",
"xbmctorrent.plugin.set_resolved_url",
"xbmctorrent.torrent2http.start",
"time.time",
"xbmctorrent.utils.url_get_json",
"xbmctorrent.plugin.addon.getAddonInfo",
"os.path.join",
"xbmcgui.ControlLabel"
] |
[((1052, 1068), 'xbmc.sleep', 'xbmc.sleep', (['(1000)'], {}), '(1000)\n', (1062, 1068), False, 'import xbmc\n'), ((1215, 1254), 'xbmcgui.Window', 'xbmcgui.Window', (['WINDOW_FULLSCREEN_VIDEO'], {}), '(WINDOW_FULLSCREEN_VIDEO)\n', (1229, 1254), False, 'import xbmcgui\n'), ((1627, 1688), 'xbmcgui.ControlLabel', 'xbmcgui.ControlLabel', (['x', 'y', 'w', 'h', 'self._text', '*args'], {}), '(x, y, w, h, self._text, *args, **kwargs)\n', (1647, 1688), False, 'import xbmcgui\n'), ((2564, 2601), 'xbmc.translatePath', 'xbmc.translatePath', (['"""special://skin/"""'], {}), "('special://skin/')\n", (2582, 2601), False, 'import xbmc\n'), ((2856, 2894), 'xbmctorrent.ga.track_event', 'track_event', (['"""torrent_player"""', '"""start"""'], {}), "('torrent_player', 'start')\n", (2867, 2894), False, 'from xbmctorrent.ga import track_event\n'), ((3626, 3652), 'xbmctorrent.plugin.get_setting', 'plugin.get_setting', (['"""keep"""'], {}), "('keep')\n", (3644, 3652), False, 'from xbmctorrent import plugin, torrent2http\n'), ((3715, 3759), 'xbmctorrent.ga.track_event', 'track_event', (['"""download"""', '"""start"""', 'magnet_uri'], {}), "('download', 'start', magnet_uri)\n", (3726, 3759), False, 'from xbmctorrent.ga import track_event\n'), ((4032, 4086), 'xbmctorrent.ga.track_event', 'track_event', (['"""video"""', '"""play"""', 'self.magnet_display_name'], {}), "('video', 'play', self.magnet_display_name)\n", (4043, 4086), False, 'from xbmctorrent.ga import track_event\n'), ((4312, 4367), 'xbmctorrent.ga.track_event', 'track_event', (['"""video"""', '"""pause"""', 'self.magnet_display_name'], {}), "('video', 'pause', self.magnet_display_name)\n", (4323, 4367), False, 'from xbmctorrent.ga import track_event\n'), ((4469, 4523), 'xbmctorrent.ga.track_event', 'track_event', (['"""video"""', '"""stop"""', 'self.magnet_display_name'], {}), "('video', 'stop', self.magnet_display_name)\n", (4480, 4523), False, 'from xbmctorrent.ga import track_event\n'), ((5078, 5089), 'time.time', 'time.time', ([], {}), '()\n', (5087, 5089), False, 'import time\n'), ((5388, 5431), 'xbmctorrent.plugin.log.info', 'plugin.log.info', (['"""Starting torrent2http..."""'], {}), "('Starting torrent2http...')\n", (5403, 5431), False, 'from xbmctorrent import plugin, torrent2http\n'), ((7938, 7980), 'xbmctorrent.plugin.log.info', 'plugin.log.info', (['"""Closing Torrent player."""'], {}), "('Closing Torrent player.')\n", (7953, 7980), False, 'from xbmctorrent import plugin, torrent2http\n'), ((2626, 2662), 'os.path.join', 'os.path.join', (['skin_path', '"""addon.xml"""'], {}), "(skin_path, 'addon.xml')\n", (2638, 2662), False, 'import os\n'), ((3571, 3603), 'xbmctorrent.plugin.get_setting', 'plugin.get_setting', (['"""encryption"""'], {}), "('encryption')\n", (3589, 3603), False, 'from xbmctorrent import plugin, torrent2http\n'), ((5278, 5307), 'xbmc.sleep', 'xbmc.sleep', (['TORRENT2HTTP_POLL'], {}), '(TORRENT2HTTP_POLL)\n', (5288, 5307), False, 'import xbmc\n'), ((5698, 5743), 'xbmctorrent.plugin.log.info', 'plugin.log.info', (['"""Opening download dialog..."""'], {}), "('Opening download dialog...')\n", (5713, 5743), False, 'from xbmctorrent import plugin, torrent2http\n'), ((7355, 7396), 'xbmctorrent.plugin.log.info', 'plugin.log.info', (['"""Now playing torrent..."""'], {}), "('Now playing torrent...')\n", (7370, 7396), False, 'from xbmctorrent import plugin, torrent2http\n'), ((1762, 1795), 'xbmctorrent.plugin.addon.getAddonInfo', 'plugin.addon.getAddonInfo', (['"""path"""'], {}), "('path')\n", (1787, 1795), False, 'from xbmctorrent import plugin, torrent2http\n'), ((3302, 3330), 'xbmctorrent.plugin.get_setting', 'plugin.get_setting', (['"""dlpath"""'], {}), "('dlpath')\n", (3320, 3330), False, 'from xbmctorrent import plugin, torrent2http\n'), ((3361, 3400), 'xbmctorrent.plugin.get_setting', 'plugin.get_setting', (['"""max_download_rate"""'], {}), "('max_download_rate')\n", (3379, 3400), False, 'from xbmctorrent import plugin, torrent2http\n'), ((3431, 3468), 'xbmctorrent.plugin.get_setting', 'plugin.get_setting', (['"""max_upload_rate"""'], {}), "('max_upload_rate')\n", (3449, 3468), False, 'from xbmctorrent import plugin, torrent2http\n'), ((3499, 3536), 'xbmctorrent.plugin.get_setting', 'plugin.get_setting', (['"""max_upload_rate"""'], {}), "('max_upload_rate')\n", (3517, 3536), False, 'from xbmctorrent import plugin, torrent2http\n'), ((5105, 5116), 'time.time', 'time.time', ([], {}), '()\n', (5114, 5116), False, 'import time\n'), ((5453, 5500), 'xbmctorrent.torrent2http.start', 'torrent2http.start', ([], {}), '(**self.torrent2http_options)\n', (5471, 5500), False, 'from xbmctorrent import plugin, torrent2http\n'), ((5549, 5612), 'xbmctorrent.utils.url_get_json', 'url_get_json', (["('http://%s/%s' % (t2h_instance.bind_address, cmd))"], {}), "('http://%s/%s' % (t2h_instance.bind_address, cmd))\n", (5561, 5612), False, 'from xbmctorrent.utils import url_get_json\n'), ((5862, 5911), 'xbmctorrent.plugin.log.info', 'plugin.log.info', (['"""Waiting for file resolution..."""'], {}), "('Waiting for file resolution...')\n", (5877, 5911), False, 'from xbmctorrent import plugin, torrent2http\n'), ((7279, 7308), 'xbmc.sleep', 'xbmc.sleep', (['TORRENT2HTTP_POLL'], {}), '(TORRENT2HTTP_POLL)\n', (7289, 7308), False, 'import xbmc\n'), ((7899, 7928), 'xbmc.sleep', 'xbmc.sleep', (['TORRENT2HTTP_POLL'], {}), '(TORRENT2HTTP_POLL)\n', (7909, 7928), False, 'import xbmc\n'), ((6666, 6773), 'xbmctorrent.plugin.log.info', 'plugin.log.info', (["('Resolving to http://%s/files/%s' % (t2h_instance.bind_address,\n biggest_file['name']))"], {}), "('Resolving to http://%s/files/%s' % (t2h_instance.\n bind_address, biggest_file['name']))\n", (6681, 6773), False, 'from xbmctorrent import plugin, torrent2http\n'), ((6945, 7105), 'xbmctorrent.plugin.set_resolved_url', 'plugin.set_resolved_url', (["{'label': self.magnet_display_name, 'path': 'http://%s/files/%s' % (\n t2h_instance.bind_address, url_name), 'is_playable': True}"], {}), "({'label': self.magnet_display_name, 'path': \n 'http://%s/files/%s' % (t2h_instance.bind_address, url_name),\n 'is_playable': True})\n", (6968, 7105), False, 'from xbmctorrent import plugin, torrent2http\n')]
|
#!/usr/bin/env python3
"""
Polyglot v2 node server for Ambient Weather data.
Copyright (c) 2018 <NAME>
"""
CLOUD = False
try:
import polyinterface
except ImportError:
import pgc_interface as polyinterface
CLOUD = True
import sys
import time
import requests
import json
LOGGER = polyinterface.LOGGER
class Controller(polyinterface.Controller):
id = 'Ambient'
def __init__(self, polyglot):
super(Controller, self).__init__(polyglot)
self.name = 'AmbientWeather'
self.address = 'ambient'
self.primary = self.address
self.api_key = ''
self.mac_address = ''
self.indoor = 'disabled'
self.myParams = {
'APIKey': '<your value here>',
'macAddress': '<your value here>',
'indoor': 'disabled',
}
self.url_str = 'http://api.ambientweather.net/v1/devices/'
self.default = '<your value here>'
self.configured = False
self.started = False
self.first_poll = True
self.poly.onConfig(self.process_config)
LOGGER.info('Finished controller init.')
'''
This is called whenever there is a configuration change. Somehow
we need to detect if it is a change in custom parameters and then
process those changes.
'''
def process_config(self, config):
if self.started == False:
LOGGER.debug('Ignore config, NS not yet started')
return
changed = False
if 'customParams' in config:
LOGGER.debug('pc: Incoming config = {}'.format(config['customParams']))
if 'APIKey' in config['customParams']:
if self.myParams['APIKey'] != config['customParams']['APIKey']:
self.myParams['APIKey'] = config['customParams']['APIKey']
self.api_key = config['customParams']['APIKey']
changed = True
if 'macAddress' in config['customParams']:
if self.myParams['macAddress'] != config['customParams']['macAddress']:
self.myParams['macAddress'] = config['customParams']['macAddress']
self.mac_address = config['customParams']['macAddress']
changed = True
if 'indoor' in config['customParams']:
if self.myParams['indoor'] != config['customParams']['indoor']:
self.myParams['indoor'] = config['customParams']['indoor']
self.indoor = config['customParams']['indoor']
changed = True
if changed:
LOGGER.debug('Configuration change detected.')
# Update notices. Make sure we restrict setting notices
# to only when something was updated. Otherwise we can
# end up with an infinite loop as setting a notice will
# trigger a configuration change.
self.removeNoticesAll()
notices = {}
self.configured = True
if self.mac_address == self.default:
notices['mac'] = 'Please set your station macAddress (1)'
LOGGER.debug('mac address net set, set configured to false')
self.configured = False
if self.api_key == self.default:
notices['key'] = 'Please set APIKey to your Ambient API Key (1)'
LOGGER.debug('api key net set, set configured to false')
self.configured = False
self.addNotice(notices)
def start(self):
LOGGER.info('Started Ambient Weather Node Server')
if self.check_params():
LOGGER.info('AmbientWeatherNS has been configured.')
self.configured = True
else:
LOGGER.info('APIKey and macAddress not set.')
self.configured = False
self.discover()
LOGGER.info('Ambient Weather Node Server initialization complete.')
self.started = True
def shortPoll(self):
pass
def longPoll(self):
"""
Here is where we want to query the server and update all
the drivers.
https://api.ambientweather.net/v1/devices/macAddress?apiKey=&applicationKey
States that data is updated every 5 or 30 minutes (so which is it?)
"""
if self.configured == False:
if self.first_poll:
LOGGER.info('Waiting to be configured.')
LOGGER.info(' key = ' + self.api_key)
LOGGER.info(' mac = ' + self.mac_address)
self.first_poll = False
return
LOGGER.info('Connecting to Ambient Weather server')
path_str = self.url_str + self.mac_address
path_str += '?apiKey=' + self.api_key
path_str += '&applicationKey=<KEY>'
path_str += '&limit=1'
LOGGER.info(path_str)
try:
c = requests.get(path_str)
except:
LOGGER.error('Request to Ambient servers failed.')
return
try:
awdata = c.json()
except:
LOGGER.error('Ambient sent no data in response to request.')
LOGGER.error(str(c))
return
# deserialize data into an object?
try:
LOGGER.info(awdata[0])
d = awdata[0]
except:
LOGGER.error('Failed to get data from server: ' + str(awdata))
return
# TODO: calculate additional data values
# pressure trend
# heat index
# windchill
# rain rate
for node in self.nodes:
if self.nodes[node].id == 'pressure':
self.set_driver(node, 'GV0', d, 'baromrelin')
if 'baromabsin' in d:
self.set_driver(node, 'ST', d, 'baromabsin')
trend = self.nodes[node].updateTrend(d['baromabsin'])
self.nodes[node].setDriver('GV1', trend, report = True, force = True)
elif self.nodes[node].id == 'temperature':
self.set_driver(node, 'ST', d, 'tempf')
self.set_driver(node, 'GV0', d, 'feelsLike')
self.set_driver(node, 'GV1', d, 'dewPoint')
#self.set_driver(node, 'GV2', d, 'heatIndex')
#self.set_driver(node, 'GV3', d, 'windchill')
elif self.nodes[node].id == 'humidity':
self.set_driver(node, 'ST', d, 'humidity')
elif self.nodes[node].id == 'wind':
self.set_driver(node, 'ST', d, 'windspeedmph')
self.set_driver(node, 'GV0', d, 'winddir')
self.set_driver(node, 'GV1', d, 'windgustmph')
#self.set_driver(node, 'GV2', d, 'windgustdir')
elif self.nodes[node].id == 'precipitation':
#self.set_driver(node, 'ST', d, 'rainrate')
self.set_driver(node, 'GV0', d, 'hourlyrainin')
self.set_driver(node, 'GV1', d, 'dailyrainin')
self.set_driver(node, 'GV2', d, 'weeklyrainin')
self.set_driver(node, 'GV3', d, 'monthlyrainin')
self.set_driver(node, 'GV4', d, 'yearlyrainin')
elif self.nodes[node].id == 'light':
self.set_driver(node, 'ST', d, 'uv')
self.set_driver(node, 'GV0', d, 'solarradiation')
elif self.nodes[node].id == 'indoor':
self.set_driver(node, 'ST', d, 'tempinf')
self.set_driver(node, 'GV0', d, 'humidityin')
self.first_poll = False
def set_driver(self, node, driver, data, index):
try:
self.nodes[node].setDriver(driver, data[index],
report = True, force = self.first_poll)
except (ValueError, KeyError, TypeError):
LOGGER.warning('Missing data: ' + index)
def query(self):
for node in self.nodes:
self.nodes[node].reportDrivers()
def discover(self, *args, **kwargs):
self.addNode(TemperatureNode(self, self.address, 'temperature', 'Temperatures'))
self.addNode(HumidityNode(self, self.address, 'humidity', 'Humidity'))
self.addNode(PressureNode(self, self.address, 'pressure', 'Barometric Pressure'))
self.addNode(WindNode(self, self.address, 'wind', 'Wind'))
self.addNode(PrecipitationNode(self, self.address, 'rain', 'Precipitation'))
self.addNode(LightNode(self, self.address, 'light', 'Illumination'))
if self.indoor.lower() == 'enabled':
self.addNode(IndoorNode(self, self.address, 'indoor', 'Indoor Sensor'))
def delete(self):
LOGGER.info('Deleting the Ambient Weather node server.')
def stop(self):
LOGGER.debug('NodeServer stopped.')
def check_param(self, name, myParams, default, notices, notice):
param = default
st = True
if name in self.polyConfig['customParams']:
if self.polyConfig['customParams'][name] != default:
param = self.polyConfig['customParams']['macAddress']
myParams[name] = param
else:
if notice != '':
notices[name] = notice
st = False
else:
LOGGER.error('check_params: %s not defined in customParams' % name)
if notice != '':
notices[name] = notice
st = False
return st, param
def check_params(self):
st = True
self.removeNoticesAll()
notices = {}
default = '<your value here>'
st1, self.mac_address = self.check_param('macAddress', self.myParams, default, notices, 'Missing station MAC address')
st2, self.api_key = self.check_param('APIKey', self.myParams, default, notices, 'Missing Ambient API key')
st3, self.indoor = self.check_param('indoor', self.myParams, 'disabled', notices, '')
if 'macAddress' in self.polyConfig['customParams']:
if self.polyConfig['customParams']['macAddress'] != default:
self.mac_address = self.polyConfig['customParams']['macAddress']
self.myParams['macAddress'] = self.mac_address
else:
notices['macaddress'] = 'Please set your station macAddress'
st = False
else:
st = False
self.mac_address = default
LOGGER.error('check_params: macAddress not defined in customParams, please add it.')
notices['macaddress'] = 'Please add a customParam with key "macAddress" and value set to your Ambient station MAC address'
if 'APIKey' in self.polyConfig['customParams']:
if self.polyConfig['customParams']['APIKey'] != default:
self.api_key = self.polyConfig['customParams']['APIKey']
self.myParams['APIKey'] = self.api_key
else:
notices['apikey'] = 'Please set APIKey to your Ambient API Key'
st = False
else:
st = False
self.api_key = default
LOGGER.error('check_params: APIKey not defined in customParams, please add it.')
notices['apikey'] = 'Please add a customParam with key "APIKey" and value set to your Ambient API Key'
if 'indoor' in self.polyConfig['customParams']:
if self.polyConfig['customParams']['indoor'] != 'disabled':
self.indoor = self.polyConfig['customParams']['indoor']
self.myParams['indoor'] = self.indoor
else:
self.indoor = 'disabled'
# Must be called with all parameters and all notices!
self.addCustomParam(self.myParams)
self.addNotice(notices)
return (st1 and st2)
def remove_notices_all(self,command):
LOGGER.info('remove_notices_all:')
# Remove all existing notices
self.removeNoticesAll()
def update_profile(self,command):
LOGGER.info('update_profile:')
st = self.poly.installprofile()
return st
commands = {
'DISCOVER': discover,
'UPDATE_PROFILE': update_profile,
'REMOVE_NOTICES_ALL': remove_notices_all
}
drivers = [
{'driver': 'ST', 'value': 1, 'uom': 2},
{'driver': 'BATLVL', 'value': 0, 'uom': 72} # battery level
]
class TemperatureNode(polyinterface.Node):
id = 'temperature'
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 17},
{'driver': 'GV0', 'value': 0, 'uom': 17}, # feels like
{'driver': 'GV1', 'value': 0, 'uom': 17}, # dewpoint
{'driver': 'GV2', 'value': 0, 'uom': 17}, # heat index
{'driver': 'GV3', 'value': 0, 'uom': 17} # windchill
]
class HumidityNode(polyinterface.Node):
id = 'humidity'
drivers = [{'driver': 'ST', 'value': 0, 'uom': 22}]
class PressureNode(polyinterface.Node):
id = 'pressure'
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 23}, # abs press
{'driver': 'GV0', 'value': 0, 'uom': 23}, # rel press
{'driver': 'GV1', 'value': 0, 'uom': 25} # trend
]
mytrend = []
def updateTrend(self, current):
t = 0
past = 0
if (len(self.mytrend) == 180):
past = self.mytrend.pop()
if self.mytrend != []:
past = self.mytrend[0]
# calculate trend
if (past - current) > 0.01:
t = -1
elif (past - current) < 0.01:
t = 1
self.mytrend.insert(0, current)
return t
class WindNode(polyinterface.Node):
id = 'wind'
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 48}, # speed
{'driver': 'GV0', 'value': 0, 'uom': 76}, # direction
{'driver': 'GV1', 'value': 0, 'uom': 48}, # gust
{'driver': 'GV2', 'value': 0, 'uom': 76} # gust direction
]
class PrecipitationNode(polyinterface.Node):
id = 'precipitation'
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 24}, # rate
{'driver': 'GV0', 'value': 0, 'uom': 105}, # hourly
{'driver': 'GV1', 'value': 0, 'uom': 105}, # daily
{'driver': 'GV2', 'value': 0, 'uom': 105}, # weekly
{'driver': 'GV2', 'value': 0, 'uom': 105}, # monthly
{'driver': 'GV2', 'value': 0, 'uom': 105} # yearly
]
class LightNode(polyinterface.Node):
id = 'light'
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 71}, # UV
{'driver': 'GV0', 'value': 0, 'uom': 74}, # solar radiation
{'driver': 'GV1', 'value': 0, 'uom': 36}, # Lux
]
class IndoorNode(polyinterface.Node):
id = 'indoor'
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 17}, # indoor temp
{'driver': 'GV0', 'value': 0, 'uom': 22}, # indoor humidity
]
if __name__ == "__main__":
try:
polyglot = polyinterface.Interface('AmbientWeather')
polyglot.start()
control = Controller(polyglot)
control.runForever()
except (KeyboardInterrupt, SystemExit):
sys.exit(0)
|
[
"pgc_interface.Interface",
"sys.exit",
"requests.get"
] |
[((15080, 15121), 'pgc_interface.Interface', 'polyinterface.Interface', (['"""AmbientWeather"""'], {}), "('AmbientWeather')\n", (15103, 15121), True, 'import pgc_interface as polyinterface\n'), ((5001, 5023), 'requests.get', 'requests.get', (['path_str'], {}), '(path_str)\n', (5013, 5023), False, 'import requests\n'), ((15267, 15278), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (15275, 15278), False, 'import sys\n')]
|
# Copyright (c) OpenMMLab. All rights reserved.
from mmdeploy.core import FUNCTION_REWRITER
@FUNCTION_REWRITER.register_rewriter(
'mmdet3d.models.detectors.voxelnet.VoxelNet.simple_test')
def voxelnet__simple_test(ctx,
self,
voxels,
num_points,
coors,
img_metas=None,
imgs=None,
rescale=False):
"""Test function without augmentaiton. Rewrite this func to remove model
post process.
Args:
voxels (torch.Tensor): Point features or raw points in shape (N, M, C).
num_points (torch.Tensor): Number of points in each pillar.
coors (torch.Tensor): Coordinates of each voxel.
input_metas (list[dict]): Contain pcd meta info.
Returns:
List: Result of model.
"""
x = self.extract_feat(voxels, num_points, coors, img_metas)
bbox_preds, scores, dir_scores = self.bbox_head(x)
return bbox_preds, scores, dir_scores
@FUNCTION_REWRITER.register_rewriter(
'mmdet3d.models.detectors.voxelnet.VoxelNet.extract_feat')
def voxelnet__extract_feat(ctx,
self,
voxels,
num_points,
coors,
img_metas=None):
"""Extract features from points. Rewrite this func to remove voxelize op.
Args:
voxels (torch.Tensor): Point features or raw points in shape (N, M, C).
num_points (torch.Tensor): Number of points in each pillar.
coors (torch.Tensor): Coordinates of each voxel.
input_metas (list[dict]): Contain pcd meta info.
Returns:
torch.Tensor: Features from points.
"""
voxel_features = self.voxel_encoder(voxels, num_points, coors)
batch_size = coors[-1, 0] + 1 # refactor
assert batch_size == 1
x = self.middle_encoder(voxel_features, coors, batch_size)
x = self.backbone(x)
if self.with_neck:
x = self.neck(x)
return x
|
[
"mmdeploy.core.FUNCTION_REWRITER.register_rewriter"
] |
[((95, 193), 'mmdeploy.core.FUNCTION_REWRITER.register_rewriter', 'FUNCTION_REWRITER.register_rewriter', (['"""mmdet3d.models.detectors.voxelnet.VoxelNet.simple_test"""'], {}), "(\n 'mmdet3d.models.detectors.voxelnet.VoxelNet.simple_test')\n", (130, 193), False, 'from mmdeploy.core import FUNCTION_REWRITER\n'), ((1068, 1167), 'mmdeploy.core.FUNCTION_REWRITER.register_rewriter', 'FUNCTION_REWRITER.register_rewriter', (['"""mmdet3d.models.detectors.voxelnet.VoxelNet.extract_feat"""'], {}), "(\n 'mmdet3d.models.detectors.voxelnet.VoxelNet.extract_feat')\n", (1103, 1167), False, 'from mmdeploy.core import FUNCTION_REWRITER\n')]
|
# Copyright © 2021 <NAME>
# Distributed under the MIT license.
"""
This module defines the function
def generate_extmod(module_name, module_doc, funcs,
c_filename=None, setup_filename="setup.py")
It generates C code for a Python extension module, with boilerplate code
for defining functions with in the extension module that have signatures
like those in the list of functions provided by the `funcs` parameter.
Only the function signatures of the functions in `funcs` are used; the
bodies of the functions are ignored.
`generate_extmod` generates the boilerplate code for the extension module,
but the code will not do anything useful. The intent is for a developer to
run this once, and then edit the C file to implement whatever the extension
module is supposed to do.
"""
import textwrap
import inspect
def quote_wrap(s):
return '"' + s + '"'
header = """
#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include <stddef.h>
// Only need stdio.h for the demo code that prints the arguments.
#include <stdio.h>
"""
numpy_header = """
#define NPY_NO_DEPRECATED_API NPY_API_VERSION
#include <numpy/ndarrayobject.h>
"""
init_start = """
PyMODINIT_FUNC
PyInit_{module_name}(void)
{{
PyObject *module;
module = PyModule_Create(&{module_name}module);
if (module == NULL) {{
return NULL;
}}
"""
init_end = """
return module;
}
"""
func_start = """
static PyObject *
{func_name}(PyObject *self, PyObject *args, PyObject *kwargs)
{{
"""
func_end = """
// The demo code returns None; modify as needed.
Py_RETURN_NONE;
}
"""
methods_table_start = """
static PyMethodDef {module_name}_methods[] = {{
"""
methods_table_entry = """\
{{"{func_name}", (PyCFunction)(void(*)(void)) {func_name}, METH_VARARGS | METH_KEYWORDS,
{doc}}},
"""
methods_table_end = """\
{NULL, NULL, 0, NULL}
};
"""
module_definition_struct = """
static struct PyModuleDef {module_name}module = {{
PyModuleDef_HEAD_INIT,
"{module_name}",
{module_doc},
-1,
{module_name}_methods
}};
"""
numpy_setup = """
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(None, parent_package, top_path)
config.add_extension('{module_name}',
sources=['{c_filename}'])
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(name='{module_name}',
version='0.1',
configuration=configuration)
"""
def _generate_function(func, out):
sig = inspect.signature(func)
param_names = list(sig.parameters)
func_name = func.__name__
out.write(func_start.format(func_name=func_name))
# fmt is the format string that will be used in PyArg_ParseTupleAndKeywords
fmt = ''
kwlist = []
has_default = False
for name, param_type in sig.parameters.items():
if func.__kwdefaults__ and name in func.__kwdefaults__:
# Ignore the default value, and use Py_None as the default.
out.write(f' PyObject *{name} = Py_None;\n')
if not has_default:
has_default = True
fmt += '|$'
else:
out.write(f' PyObject *{name} = NULL;\n')
if param_type.kind != param_type.POSITIONAL_ONLY:
kwlist.append(name)
else:
kwlist.append('')
fmt += 'O'
fmt += f':{func_name}'
kwlist_str = ", ".join([quote_wrap(kw) for kw in kwlist])
param_refs = ", ".join(['&' + kw for kw in param_names])
out.write(f' static char *kwlist[] = {{{kwlist_str}, NULL}};\n')
fmt = quote_wrap(fmt)
out.write(f' if (!PyArg_ParseTupleAndKeywords(args, kwargs, {fmt}, kwlist,\n')
out.write(f' {param_refs})) {{\n')
out.write(' return NULL;\n')
out.write(' }\n')
out.write('\n')
out.write(' // This demo code just prints the arguments to stdout.\n')
for param_name in param_names:
out.write(f' printf("{param_name}:\\n");\n')
out.write(f' PyObject_Print({param_name}, stdout, 0);\n')
out.write(' printf("\\n");\n')
out.write(func_end)
def _docstring_literal(doc, name, out):
if doc is None:
return 'NULL'
doc = textwrap.dedent(doc).strip()
lines = doc.splitlines()
if len(lines) > 1:
macro_name = f'{name.upper()}_DOCSTRING'
out.write(f"\n#define {macro_name} \\\n")
for line in doc.splitlines():
out.write(f'"{line}\\n"\\\n')
out.write('""\n')
return macro_name
else:
return quote_wrap(doc)
def _generate_methods_table(module_name, funcs, out):
docstrings = []
for func in funcs:
docstrings.append(_docstring_literal(func.__doc__, func.__name__, out))
out.write(methods_table_start.format(module_name=module_name))
for func, doc in zip(funcs, docstrings):
func_name = func.__name__
out.write(methods_table_entry.format(func_name=func_name, doc=doc))
out.write(methods_table_end)
def _generate_module_definition_struct(module_name, module_doc, out):
doc = _docstring_literal(module_doc, module_name + "_MODULE", out)
out.write(module_definition_struct.format(module_name=module_name,
module_doc=doc))
def _create_setup_numpy(module_name, c_filename, setup_out):
setup_out.write(numpy_setup.format(module_name=module_name,
c_filename=c_filename))
def _create_setup_plain(module_name, c_filename, setup_out):
setup_out.write('# This file follows example shown at\n')
setup_out.write('# https://docs.python.org/3/extending/building.html#building-c-and-c-extensions-with-distutils\n')
setup_out.write('\n')
setup_out.write('from distutils.core import setup, Extension\n')
setup_out.write('\n')
setup_out.write(f"{module_name} = Extension('{module_name}',\n")
setup_out.write(f"{' '*len(module_name)} "
f"sources=['{c_filename}'])\n")
setup_out.write("\n")
setup_out.write(f"setup(name='{module_name}',\n")
setup_out.write(" version='0.1',\n")
setup_out.write(f" ext_modules=[{module_name}])\n")
def generate_extmod(module_name, module_doc, funcs, numpy=False,
c_filename=None, setup_filename="setup.py"):
"""
Generate the boilerplate code for a Python extenstion module.
Parameters
----------
module_name : str
The extension module name.
module_doc : str or None
The docstring for the module.
funcs : list[callable]
For each function in ``funcs``, a function with the same name is
created in the extension module. The function will parse its arguments
as objects, and print them to stdout. (This is just so the module can
be compiled and tested; the intent is for the user to edit the file
to do something useful.)
c_filename : str, optional
The name of the C file for the extension module. If not given, the
name will be generated as ``f"{module_name}module.c".
setup_filename : str
The name of the setup script. The default is `"setup.py"`.
"""
if not module_name.isidentifier():
raise ValueError(f"invalid name {module_name!r}; name must be a "
"valid identifier.")
if c_filename is None:
c_filename = f'{module_name}module.c'
with open(c_filename, 'w') as out:
out.write(header)
if numpy:
out.write(numpy_header)
if callable(funcs):
funcs = [funcs]
for func in funcs:
_generate_function(func, out)
_generate_methods_table(module_name, funcs, out)
_generate_module_definition_struct(module_name, module_doc, out)
out.write(init_start.format(module_name=module_name))
if numpy:
out.write('\n')
out.write(' // Required to access the NumPy C API.\n')
out.write(' import_array();\n')
out.write(init_end)
with open(setup_filename, 'w') as setup_out:
if numpy:
_create_setup_numpy(module_name, c_filename, setup_out)
else:
_create_setup_plain(module_name, c_filename, setup_out)
|
[
"textwrap.dedent",
"inspect.signature"
] |
[((2588, 2611), 'inspect.signature', 'inspect.signature', (['func'], {}), '(func)\n', (2605, 2611), False, 'import inspect\n'), ((4329, 4349), 'textwrap.dedent', 'textwrap.dedent', (['doc'], {}), '(doc)\n', (4344, 4349), False, 'import textwrap\n')]
|
import logging
import disnake
import requests
from bs4 import BeautifulSoup
from bots import imps
from gamestonk_terminal.decorators import log_start_end
logger = logging.getLogger(__name__)
@log_start_end(log=logger)
def supplier_command(ticker=""):
"""Displays suppliers of the company [CSIMarket]"""
# Debug user input
if imps.DEBUG:
logger.debug("dd supplier %s", ticker)
if not ticker:
raise Exception("A ticker is required")
url_supply_chain = (
f"https://csimarket.com/stocks/competitionNO3.php?supply&code={ticker.upper()}"
)
text_supplier_chain = BeautifulSoup(requests.get(url_supply_chain).text, "lxml")
l_suppliers = list()
for supplier in text_supplier_chain.findAll(
"td", {"class": "svjetlirub11 block al"}
):
l_suppliers.append(supplier.text.replace("\n", "").strip())
if not l_suppliers:
raise Exception("No suppliers found.\n")
# Debug user output
if imps.DEBUG:
logger.debug(l_suppliers)
suppliers, unique = [], []
i = 0
for value in l_suppliers:
name = value
if name in unique: # pylint: disable=R1724
continue
else:
unique.append(name)
while i < len(unique):
warp = unique[i][0:28]
text = f"{warp:<30}" if (i % 2) == 0 else f"{warp}\n"
suppliers.append(text)
i += 1
title = f"Stocks: [CSIMarket] {ticker.upper()} Suppliers"
reports = []
embeds = []
choices = [
disnake.SelectOption(label="Home", value="0", emoji="🟢"),
]
if len(suppliers) < 30:
description = f"```{''.join(suppliers)}```"
embeds.append(
disnake.Embed(
title=title,
description=suppliers,
colour=imps.COLOR,
).set_author(
name=imps.AUTHOR_NAME,
icon_url=imps.AUTHOR_ICON_URL,
)
)
reports.append(f"{description}")
# Output data
output = {
"title": title,
"description": reports,
"embed": embeds,
}
else:
i, end = 0, 30
while i < len(suppliers):
description = f"```{''.join(suppliers[i:end])}```"
embeds.append(
disnake.Embed(
title=title,
description=description,
colour=imps.COLOR,
).set_author(
name=imps.AUTHOR_NAME,
icon_url=imps.AUTHOR_ICON_URL,
)
)
i += 30
end += 30
reports.append(f"{description}")
# Output data
output = {
"view": imps.Menu,
"title": title,
"description": reports,
"embed": embeds,
"choices": choices,
}
return output
|
[
"disnake.Embed",
"disnake.SelectOption",
"logging.getLogger",
"requests.get",
"gamestonk_terminal.decorators.log_start_end"
] |
[((166, 193), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (183, 193), False, 'import logging\n'), ((197, 222), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', ([], {'log': 'logger'}), '(log=logger)\n', (210, 222), False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((1526, 1582), 'disnake.SelectOption', 'disnake.SelectOption', ([], {'label': '"""Home"""', 'value': '"""0"""', 'emoji': '"""🟢"""'}), "(label='Home', value='0', emoji='🟢')\n", (1546, 1582), False, 'import disnake\n'), ((630, 660), 'requests.get', 'requests.get', (['url_supply_chain'], {}), '(url_supply_chain)\n', (642, 660), False, 'import requests\n'), ((1706, 1774), 'disnake.Embed', 'disnake.Embed', ([], {'title': 'title', 'description': 'suppliers', 'colour': 'imps.COLOR'}), '(title=title, description=suppliers, colour=imps.COLOR)\n', (1719, 1774), False, 'import disnake\n'), ((2319, 2389), 'disnake.Embed', 'disnake.Embed', ([], {'title': 'title', 'description': 'description', 'colour': 'imps.COLOR'}), '(title=title, description=description, colour=imps.COLOR)\n', (2332, 2389), False, 'import disnake\n')]
|
import cv2
#Our Image
img_file='CarImage.jpg'
#Our pre-trained car classifier
classifier_file = 'car_detector.xml'
#create opencv image
img = cv2.imread(img_file)
# Display the image with the cars spotted
cv2.imshow('Car_detector',img) #pops a window with the image
#Don't autoclose
cv2.waitKey() # waits for a key to be pressed to close window
print("cc")
|
[
"cv2.waitKey",
"cv2.imread",
"cv2.imshow"
] |
[((156, 176), 'cv2.imread', 'cv2.imread', (['img_file'], {}), '(img_file)\n', (166, 176), False, 'import cv2\n'), ((223, 254), 'cv2.imshow', 'cv2.imshow', (['"""Car_detector"""', 'img'], {}), "('Car_detector', img)\n", (233, 254), False, 'import cv2\n'), ((306, 319), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (317, 319), False, 'import cv2\n')]
|
# %% # Definition of function whose analytical and autograd gradient are compared
#
# $$
# \begin{align}
# f(x, y) & =
# x^3 y^4, \\
# \nabla (f(x, y)) & =
# \begin{pmatrix}
# 3 x^2 y^4 \\
# 4 x^3 y^3
# \end{pmatrix}.
# \end{align}
# $$
# %% Import packages
import torch
import unittest
# %% Define function f whose gradient is computed
def f(theta):
return (theta[0]**3)*(theta[1]**4)
# %% Define analytical gradient of f
def analytical_gradf(theta):
return torch.tensor([3*(theta[0]**2)*(theta[1]**4), 4*(theta[0]**3)*(theta[1]**3)], dtype=torch.float)
# %% Class for running tests
class TestDerivatives(unittest.TestCase):
def test_grad(self):
theta = torch.tensor([2., 3.], dtype=torch.float, requires_grad=True)
f_val = f(theta)
f_val.backward()
self.assertTrue(torch.equal(analytical_gradf(theta), theta.grad))
# %% Enable running the tests from the command line
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"torch.tensor"
] |
[((474, 582), 'torch.tensor', 'torch.tensor', (['[3 * theta[0] ** 2 * theta[1] ** 4, 4 * theta[0] ** 3 * theta[1] ** 3]'], {'dtype': 'torch.float'}), '([3 * theta[0] ** 2 * theta[1] ** 4, 4 * theta[0] ** 3 * theta[\n 1] ** 3], dtype=torch.float)\n', (486, 582), False, 'import torch\n'), ((961, 976), 'unittest.main', 'unittest.main', ([], {}), '()\n', (974, 976), False, 'import unittest\n'), ((684, 747), 'torch.tensor', 'torch.tensor', (['[2.0, 3.0]'], {'dtype': 'torch.float', 'requires_grad': '(True)'}), '([2.0, 3.0], dtype=torch.float, requires_grad=True)\n', (696, 747), False, 'import torch\n')]
|
#!/usr/bin/env python
# encoding: utf-8
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'SUPER-SECRET'
LOGFILE = "server.log"
class DevelopmentConfig(Config):
DEBUG = True
LOG_BACKTRACE = True
LOG_LEVEL = 'DEBUG'
DB_HOST = '192.168.1.122'
DB_PORT = 3306
DB_NAME = 'test'
DB_USER = 'root'
DB_PASSWORD = '<PASSWORD>'
DB_USE_UNICODE = True
DB_CHARSET = 'utf8'
LDB_PATH = "../ldb_path"
SHARES_SERVER_IP = "0.0.0.0"
SHARES_SERVER_PORT = 9999
WSGI_SERVER_IP = "0.0.0.0"
WSGI_SERVER_PORT = 8085
@staticmethod
def init_app(app):
pass
class ProductionConfig(Config):
LOG_BACKTRACE = False
LOG_LEVEL = 'INFO'
DB_HOST = '127.0.0.1'
DB_PORT = 3306
DB_NAME = 'ufodb'
DB_USER = ''
DB_PASSWORD = ''
DB_USE_UNICODE = True
DB_CHARSET = 'utf8'
LDB_PATH = "../ldb_path"
SHARES_SERVER_IP = "127.0.0.1"
SHARES_SERVER_PORT = 9999
WSGI_SERVER_IP = "127.0.0.1"
WSGI_SERVER_PORT = 8085
@staticmethod
def init_app(app):
pass
config = {
'development': DevelopmentConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
|
[
"os.environ.get",
"os.path.dirname"
] |
[((78, 103), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (93, 103), False, 'import os\n'), ((146, 174), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (160, 174), False, 'import os\n')]
|
from typing import Any, Callable, List
import random
from gurun.node import Node, WrapperNode
try:
import pyautogui
except ImportError:
raise ImportError(
"pyautogui is not installed. Please install it with `pip install pyautogui`."
)
class Typewrite(WrapperNode):
def __init__(self, **kwargs: Any) -> None:
super().__init__(pyautogui.typewrite, **kwargs)
class Scroll(Node):
def __init__(self, **kwargs: Any) -> None:
super().__init__(pyautogui.scroll, **kwargs)
class Click(WrapperNode):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(pyautogui.click, **kwargs)
class HotKey(WrapperNode):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(pyautogui.hotkey, **kwargs)
class MoveRel(Node):
def __init__(
self,
x: int = 0,
y: int = 0,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self._x = x
self._y = y
def run(self, *args: Any, **kwargs: Any) -> Any:
pyautogui.moveRel(self._x, self._y)
class MoveTo(WrapperNode):
def __init__(self, **kwargs: Any) -> None:
super().__init__(pyautogui.moveTo, **kwargs)
class DragRel(WrapperNode):
def __init__(self, **kwargs: Any) -> None:
super().__init__(pyautogui.dragRel, **kwargs)
class MultipleClicks(Click):
def run(self, positions: List[List[int]], *args: Any, **kwargs: Any):
for x, y in positions:
super().run(*args, x=x, y=y, **kwargs)
class NaturalClick(Click):
def __init__(
self,
easing_functions: List[Callable] = [
pyautogui.easeInQuad,
pyautogui.easeOutQuad,
pyautogui.easeInOutQuad,
],
minimum_duration: int = 1,
maximum_duration: int = 1.5,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self._easing_functions = easing_functions
self._minimum_duration = minimum_duration
self._maximum_duration = maximum_duration
def run(self, *args: Any, **kwargs: Any):
return super().run(
*args,
tween=random.choice(self._easing_functions),
duration=random.uniform(self._minimum_duration, self._maximum_duration),
**kwargs,
)
class MultipleNaturalClicks(NaturalClick):
def run(self, positions: List[List[int]], *args: Any, **kwargs: Any):
for x, y in positions:
super().run(*args, x=x, y=y, **kwargs)
|
[
"pyautogui.moveRel",
"random.choice",
"random.uniform"
] |
[((1065, 1100), 'pyautogui.moveRel', 'pyautogui.moveRel', (['self._x', 'self._y'], {}), '(self._x, self._y)\n', (1082, 1100), False, 'import pyautogui\n'), ((2178, 2215), 'random.choice', 'random.choice', (['self._easing_functions'], {}), '(self._easing_functions)\n', (2191, 2215), False, 'import random\n'), ((2238, 2300), 'random.uniform', 'random.uniform', (['self._minimum_duration', 'self._maximum_duration'], {}), '(self._minimum_duration, self._maximum_duration)\n', (2252, 2300), False, 'import random\n')]
|
# Enter your Python code here
import pya
from time import sleep
print("Starting...")
app = pya.Application.instance()
win = app.main_window()
# Load technology file
#tech = pya.Technology()
#tech.load(tech_file)
#layoutOptions = tech.load_layout_options
# Load def/gds file in the main window
#cell_view = win.load_layout(input_layout, layoutOptions, 0)
#layout_view = cell_view.view()
#layout_view.max_hier()
# gets the corresponding layout object
#layout = cell_view.layout()
layout = pya.Layout()
layout.read(input_layout)
#layout.clear_layer(81)
#layout.delete_layer(81)
# gets the cell to change is "INV2X"
# cell = layout.cell("Active_area")
#cell = cell_view.cell
# finds source layer
#areaid_layer = layout.layer(81, 14)
#areaid_layer.delete()
#layout.write(input_layout)
layout.write('junk.gds')
print("Successfully wrote", input_layout)
app.exit(0)
|
[
"pya.Layout",
"pya.Application.instance"
] |
[((94, 120), 'pya.Application.instance', 'pya.Application.instance', ([], {}), '()\n', (118, 120), False, 'import pya\n'), ((493, 505), 'pya.Layout', 'pya.Layout', ([], {}), '()\n', (503, 505), False, 'import pya\n')]
|
#! /usr/bin/env python
import rospy
import actionlib
import dynamic_reconfigure.client
from riptide_msgs.msg import AttitudeCommand, LinearCommand, Imu
from std_msgs.msg import Float32, Float64, Int32
import riptide_controllers.msg
import time
import math
import numpy as np
def angleDiff(a, b):
return ((a-b+180) % 360)-180
class GateManeuver(object):
ROLL_P = 2
CRUISE_VELOCITY = 45
DRIVE_FORCE = 30
def __init__(self):
self.rollPub = rospy.Publisher(
"/command/roll", AttitudeCommand, queue_size=5)
self.yawPub = rospy.Publisher(
"/command/yaw", AttitudeCommand, queue_size=5)
self.XPub = rospy.Publisher(
"/command/x", LinearCommand, queue_size=5)
self.YPub = rospy.Publisher(
"/command/y", LinearCommand, queue_size=5)
self.ZPub = rospy.Publisher(
"/command/force_z", Float64, queue_size=5)
self._as = actionlib.SimpleActionServer(
"gate_maneuver", riptide_controllers.msg.GateManeuverAction, execute_cb=self.execute_cb, auto_start=False)
self._as.start()
def execute_cb(self, goal):
rospy.loginfo("Starting gate maneuver")
self.startAngle = rospy.wait_for_message("/state/imu", Imu).rpy_deg.z
self.angleTraveled = 0
self.pastHalf = False
self.yawPub.publish(self.CRUISE_VELOCITY, AttitudeCommand.VELOCITY)
self.rollPub.publish(self.CRUISE_VELOCITY, AttitudeCommand.VELOCITY)
self.imuSub = rospy.Subscriber("/state/imu", Imu, self.imuCb)
while self.angleTraveled < 330 and not rospy.is_shutdown():
rospy.sleep(0.05)
if self._as.is_preempt_requested():
rospy.loginfo('Preempted Gate Maneuver')
self.cleanup()
self._as.set_preempted()
return
rospy.loginfo("Leveling")
self.cleanup()
while abs(rospy.wait_for_message("/state/imu", Imu).rpy_deg.x) > 5 and not rospy.is_shutdown():
rospy.sleep(0.05)
rospy.loginfo("Done")
self._as.set_succeeded()
def cleanup(self):
self.yawPub.publish(0, AttitudeCommand.POSITION)
self.rollPub.publish(0, AttitudeCommand.POSITION)
self.imuSub.unregister()
self.XPub.publish(0, LinearCommand.FORCE)
self.YPub.publish(0, LinearCommand.FORCE)
self.ZPub.publish(0)
def imuCb(self, msg):
self.angleTraveled = angleDiff(msg.rpy_deg.z, self.startAngle)
roll = msg.rpy_deg.x
if self.angleTraveled < -90:
self.pastHalf = True
if self.pastHalf and self.angleTraveled < 0:
self.angleTraveled += 360
if roll < 0:
roll += 360
self.rollPub.publish(self.CRUISE_VELOCITY + self.ROLL_P * (self.angleTraveled - roll), AttitudeCommand.VELOCITY)
sr = math.sin(roll * math.pi / 180)
cr = math.cos(roll * math.pi / 180)
sy = math.sin(self.angleTraveled * math.pi / 180)
cy = math.cos(self.angleTraveled * math.pi / 180)
rRotMat = np.matrix([[1,0,0],[0,cr,-sr],[0,sr,cr]])
yRotMat = np.matrix([[cy,-sy,0],[sy,cy,0],[0,0,1]])
outVector = np.dot(np.linalg.inv(np.dot(yRotMat, rRotMat)), np.matrix([[self.DRIVE_FORCE],[0],[0]]))
self.XPub.publish(outVector.item(0), LinearCommand.FORCE)
self.YPub.publish(outVector.item(1), LinearCommand.FORCE)
self.ZPub.publish(outVector.item(2))
if __name__ == '__main__':
rospy.init_node('gate_maneuver')
server = GateManeuver()
rospy.spin()
|
[
"numpy.matrix",
"rospy.Subscriber",
"rospy.wait_for_message",
"rospy.Publisher",
"math.sin",
"rospy.sleep",
"rospy.loginfo",
"rospy.is_shutdown",
"actionlib.SimpleActionServer",
"rospy.init_node",
"math.cos",
"numpy.dot",
"rospy.spin"
] |
[((3524, 3556), 'rospy.init_node', 'rospy.init_node', (['"""gate_maneuver"""'], {}), "('gate_maneuver')\n", (3539, 3556), False, 'import rospy\n'), ((3589, 3601), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3599, 3601), False, 'import rospy\n'), ((473, 536), 'rospy.Publisher', 'rospy.Publisher', (['"""/command/roll"""', 'AttitudeCommand'], {'queue_size': '(5)'}), "('/command/roll', AttitudeCommand, queue_size=5)\n", (488, 536), False, 'import rospy\n'), ((572, 634), 'rospy.Publisher', 'rospy.Publisher', (['"""/command/yaw"""', 'AttitudeCommand'], {'queue_size': '(5)'}), "('/command/yaw', AttitudeCommand, queue_size=5)\n", (587, 634), False, 'import rospy\n'), ((668, 726), 'rospy.Publisher', 'rospy.Publisher', (['"""/command/x"""', 'LinearCommand'], {'queue_size': '(5)'}), "('/command/x', LinearCommand, queue_size=5)\n", (683, 726), False, 'import rospy\n'), ((760, 818), 'rospy.Publisher', 'rospy.Publisher', (['"""/command/y"""', 'LinearCommand'], {'queue_size': '(5)'}), "('/command/y', LinearCommand, queue_size=5)\n", (775, 818), False, 'import rospy\n'), ((852, 910), 'rospy.Publisher', 'rospy.Publisher', (['"""/command/force_z"""', 'Float64'], {'queue_size': '(5)'}), "('/command/force_z', Float64, queue_size=5)\n", (867, 910), False, 'import rospy\n'), ((944, 1084), 'actionlib.SimpleActionServer', 'actionlib.SimpleActionServer', (['"""gate_maneuver"""', 'riptide_controllers.msg.GateManeuverAction'], {'execute_cb': 'self.execute_cb', 'auto_start': '(False)'}), "('gate_maneuver', riptide_controllers.msg.\n GateManeuverAction, execute_cb=self.execute_cb, auto_start=False)\n", (972, 1084), False, 'import actionlib\n'), ((1159, 1198), 'rospy.loginfo', 'rospy.loginfo', (['"""Starting gate maneuver"""'], {}), "('Starting gate maneuver')\n", (1172, 1198), False, 'import rospy\n'), ((1515, 1562), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/state/imu"""', 'Imu', 'self.imuCb'], {}), "('/state/imu', Imu, self.imuCb)\n", (1531, 1562), False, 'import rospy\n'), ((1872, 1897), 'rospy.loginfo', 'rospy.loginfo', (['"""Leveling"""'], {}), "('Leveling')\n", (1885, 1897), False, 'import rospy\n'), ((2066, 2087), 'rospy.loginfo', 'rospy.loginfo', (['"""Done"""'], {}), "('Done')\n", (2079, 2087), False, 'import rospy\n'), ((2892, 2922), 'math.sin', 'math.sin', (['(roll * math.pi / 180)'], {}), '(roll * math.pi / 180)\n', (2900, 2922), False, 'import math\n'), ((2936, 2966), 'math.cos', 'math.cos', (['(roll * math.pi / 180)'], {}), '(roll * math.pi / 180)\n', (2944, 2966), False, 'import math\n'), ((2980, 3024), 'math.sin', 'math.sin', (['(self.angleTraveled * math.pi / 180)'], {}), '(self.angleTraveled * math.pi / 180)\n', (2988, 3024), False, 'import math\n'), ((3038, 3082), 'math.cos', 'math.cos', (['(self.angleTraveled * math.pi / 180)'], {}), '(self.angleTraveled * math.pi / 180)\n', (3046, 3082), False, 'import math\n'), ((3102, 3151), 'numpy.matrix', 'np.matrix', (['[[1, 0, 0], [0, cr, -sr], [0, sr, cr]]'], {}), '([[1, 0, 0], [0, cr, -sr], [0, sr, cr]])\n', (3111, 3151), True, 'import numpy as np\n'), ((3162, 3211), 'numpy.matrix', 'np.matrix', (['[[cy, -sy, 0], [sy, cy, 0], [0, 0, 1]]'], {}), '([[cy, -sy, 0], [sy, cy, 0], [0, 0, 1]])\n', (3171, 3211), True, 'import numpy as np\n'), ((1644, 1661), 'rospy.sleep', 'rospy.sleep', (['(0.05)'], {}), '(0.05)\n', (1655, 1661), False, 'import rospy\n'), ((2039, 2056), 'rospy.sleep', 'rospy.sleep', (['(0.05)'], {}), '(0.05)\n', (2050, 2056), False, 'import rospy\n'), ((3272, 3313), 'numpy.matrix', 'np.matrix', (['[[self.DRIVE_FORCE], [0], [0]]'], {}), '([[self.DRIVE_FORCE], [0], [0]])\n', (3281, 3313), True, 'import numpy as np\n'), ((1225, 1266), 'rospy.wait_for_message', 'rospy.wait_for_message', (['"""/state/imu"""', 'Imu'], {}), "('/state/imu', Imu)\n", (1247, 1266), False, 'import rospy\n'), ((1611, 1630), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (1628, 1630), False, 'import rospy\n'), ((1727, 1767), 'rospy.loginfo', 'rospy.loginfo', (['"""Preempted Gate Maneuver"""'], {}), "('Preempted Gate Maneuver')\n", (1740, 1767), False, 'import rospy\n'), ((2006, 2025), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (2023, 2025), False, 'import rospy\n'), ((3245, 3269), 'numpy.dot', 'np.dot', (['yRotMat', 'rRotMat'], {}), '(yRotMat, rRotMat)\n', (3251, 3269), True, 'import numpy as np\n'), ((1941, 1982), 'rospy.wait_for_message', 'rospy.wait_for_message', (['"""/state/imu"""', 'Imu'], {}), "('/state/imu', Imu)\n", (1963, 1982), False, 'import rospy\n')]
|
import requests
import pymongo
from splinter import Browser
from bs4 import BeautifulSoup
import pandas as pd
from flask import Flask, render_template
# Create an instance of our Flask app.
app = Flask(__name__)
# Create connection variable
conn = 'mongodb://localhost:27017'
# Pass connection to the pymongo instance.
client = pymongo.MongoClient(conn)
# Route that will trigger the scrape function
@app.route("/")
def scrape():
url = 'https://mars.nasa.gov/news/'
# Retrieve page with the requests module
response = requests.get(url)
# Create BeautifulSoup object; parse with 'lxml'
soup = BeautifulSoup(response.text, 'lxml')
# Retrieve the parent divs for all articles
firstTitle = soup.find('div', class_='content_title').text
firstGraf = soup.find('div', class_="rollover_description_inner").text
# Testing code
# firstTitle = "line of dialogue"
executable_path = {'executable_path': 'chromedriver.exe'}
browser = Browser('chrome', **executable_path, headless=False)
url_jpl = 'https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars'
browser.visit(url_jpl)
try:
browser.click_link_by_partial_text('FULL IMAGE')
except:
print("Scraping Complete")
newHtml = browser.html
soup = BeautifulSoup(newHtml, 'html.parser')
images = soup.findAll('img')
# images.find(class_=)
extractImage = images[3]
extractImageSrc = extractImage['src']
featured_image_url = 'https://www.jpl.nasa.gov' + extractImageSrc
# Tweet place holder while I figure out the twitter scrape here ./testPython/twitter Test.ipynb
mars_weather = 'Sol 1801 (Aug 30, 2017), Sunny, high -21C/-5F, low -80C/-112F, pressure at 8.82 hPa, daylight 06:09-17:55'
url = 'https://space-facts.com/mars/'
# Retrieve page with the requests module
response = requests.get(url)
# Create BeautifulSoup object; parse with 'html'
soup = BeautifulSoup(response.text, 'html.parser')
# Code from here
# https://pythonprogramminglanguage.com/web-scraping-with-pandas-and-beautifulsoup/
# https://stackoverflow.com/questions/50633050/scrape-tables-into-dataframe-with-beautifulsoup
table = soup.find_all('table')[0]
table_rows = table.find_all('tr')
l = []
for tr in table_rows:
td = tr.find_all('td')
row = [tr.text for tr in td]
l.append(row)
factsDf = pd.DataFrame(l)
factsDf.columns = (['Mars Metrics','Measurements'])
factsDf.set_index('Mars Metrics')
htmlOutput = factsDf.to_html()
# # https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars
# # The website is failing to load in two browsers. Seems like a bad thing
# hemisphere_image_urls = [
# {"title": "Valles Marineris Hemisphere", "img_url": "..."},
# {"title": "Cerberus Hemisphere", "img_url": "..."},
# {"title": "Schiaparelli Hemisphere", "img_url": "..."},
# {"title": "Syrtis Major Hemisphere", "img_url": "..."},
# ]
# Scraping Wikipedia
url_jpl = 'https://en.wikipedia.org/wiki/Chrysler_Hemi_engine'
browser.visit(url_jpl)
newHtml = browser.html
soup = BeautifulSoup(newHtml, 'html.parser')
images = soup.findAll('img')
# creating a list of images
extImgList = []
count =0
for image in images:
extractImage = images[count]
extractImageSrc = extractImage['src']
extImgList.append(extractImageSrc)
count = count +1
# selecting the ones I like
extractImageSrc0 = extImgList[15]
extractImageSrc1 = extImgList[3]
extractImageSrc2 = extImgList[16]
extractImageSrc3 = extImgList[6]
link0 = "https:" + extractImageSrc0
link1 = "https:" + extractImageSrc1
link2 = "https:" + extractImageSrc2
link3 = "https:" + extractImageSrc3
hemisphere_image_urls = [
{"title": "5 7 Hemi", "img_url": link0},
{"title": "Hemi in 300C", "img_url": link1},
{"title": "6 1 Hemi", "img_url": link2},
{"title": "FiredomeV8", "img_url": link3},
]
# Connect to a database. Will create one if not already available.
db = client.marsdb
# Drops collection if available to remove duplicates
db.marsdb.drop()
# Building DB
db.marsdb.insert_many(
[
{
"Title": firstTitle,
"Paragraph": firstGraf,
"Image": featured_image_url,
"Tweet":mars_weather,
"Table":htmlOutput
}
]
)
# sending info to index.html
marsInfoDb = list(db.marsdb.find())
print (marsInfoDb)
return render_template('index.html', marsInfoDb=marsInfoDb, hemisphere_image_urls=hemisphere_image_urls)
if __name__ == "__main__":
app.run(debug=True)
|
[
"pymongo.MongoClient",
"pandas.DataFrame",
"flask.Flask",
"requests.get",
"flask.render_template",
"bs4.BeautifulSoup",
"splinter.Browser"
] |
[((198, 213), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (203, 213), False, 'from flask import Flask, render_template\n'), ((332, 357), 'pymongo.MongoClient', 'pymongo.MongoClient', (['conn'], {}), '(conn)\n', (351, 357), False, 'import pymongo\n'), ((537, 554), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (549, 554), False, 'import requests\n'), ((619, 655), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.text', '"""lxml"""'], {}), "(response.text, 'lxml')\n", (632, 655), False, 'from bs4 import BeautifulSoup\n'), ((993, 1045), 'splinter.Browser', 'Browser', (['"""chrome"""'], {'headless': '(False)'}), "('chrome', **executable_path, headless=False)\n", (1000, 1045), False, 'from splinter import Browser\n'), ((1313, 1350), 'bs4.BeautifulSoup', 'BeautifulSoup', (['newHtml', '"""html.parser"""'], {}), "(newHtml, 'html.parser')\n", (1326, 1350), False, 'from bs4 import BeautifulSoup\n'), ((1891, 1908), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1903, 1908), False, 'import requests\n'), ((1973, 2016), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.text', '"""html.parser"""'], {}), "(response.text, 'html.parser')\n", (1986, 2016), False, 'from bs4 import BeautifulSoup\n'), ((2444, 2459), 'pandas.DataFrame', 'pd.DataFrame', (['l'], {}), '(l)\n', (2456, 2459), True, 'import pandas as pd\n'), ((3218, 3255), 'bs4.BeautifulSoup', 'BeautifulSoup', (['newHtml', '"""html.parser"""'], {}), "(newHtml, 'html.parser')\n", (3231, 3255), False, 'from bs4 import BeautifulSoup\n'), ((4641, 4743), 'flask.render_template', 'render_template', (['"""index.html"""'], {'marsInfoDb': 'marsInfoDb', 'hemisphere_image_urls': 'hemisphere_image_urls'}), "('index.html', marsInfoDb=marsInfoDb, hemisphere_image_urls=\n hemisphere_image_urls)\n", (4656, 4743), False, 'from flask import Flask, render_template\n')]
|
import os
import json
import sys
file = sys.argv[1]
buildsite = sys.argv[2]
buildnum = sys.argv[3]
fileid = sys.argv[4]
with open(file, "r") as jsonFile:
data = json.load(jsonFile)
if fileid is not "":
data["default_attributes"]["Sites"][buildsite.upper()]["BUILD"] = str(buildnum) + "." + str(fileid)
else:
data["default_attributes"]["Sites"][buildsite.upper()]["BUILD"] = str(buildnum)
with open(file, "w") as jsonFile:
json.dump(data, jsonFile)
|
[
"json.dump",
"json.load"
] |
[((168, 187), 'json.load', 'json.load', (['jsonFile'], {}), '(jsonFile)\n', (177, 187), False, 'import json\n'), ((443, 468), 'json.dump', 'json.dump', (['data', 'jsonFile'], {}), '(data, jsonFile)\n', (452, 468), False, 'import json\n')]
|
import BaseHTTPServer, SimpleHTTPServer
import ssl
httpd = BaseHTTPServer.HTTPServer(('0.0.0.0', 443), SimpleHTTPServer.SimpleHTTPRequestHandler)
httpd.socket = ssl.wrap_socket (httpd.socket, certfile='{{ certfile_pem }}', server_side=True)
httpd.serve_forever()
|
[
"BaseHTTPServer.HTTPServer",
"ssl.wrap_socket"
] |
[((60, 151), 'BaseHTTPServer.HTTPServer', 'BaseHTTPServer.HTTPServer', (["('0.0.0.0', 443)", 'SimpleHTTPServer.SimpleHTTPRequestHandler'], {}), "(('0.0.0.0', 443), SimpleHTTPServer.\n SimpleHTTPRequestHandler)\n", (85, 151), False, 'import BaseHTTPServer, SimpleHTTPServer\n'), ((162, 240), 'ssl.wrap_socket', 'ssl.wrap_socket', (['httpd.socket'], {'certfile': '"""{{ certfile_pem }}"""', 'server_side': '(True)'}), "(httpd.socket, certfile='{{ certfile_pem }}', server_side=True)\n", (177, 240), False, 'import ssl\n')]
|
from rest_framework.decorators import api_view, authentication_classes, permission_classes
import requests
from rest_framework.response import Response
from .models import Author, Followers
from rest_framework import status
from .serializers import AuthorSerializer, FollowersSerializer
from permissions import CustomAuthentication, AccessPermission
from django.core.paginator import Paginator
################ FOLLOWERS API ##############################
@api_view(['GET',])
@authentication_classes([CustomAuthentication])
@permission_classes([AccessPermission])
def APIGetFollowers(request, auth_pk):
followersObj = Followers.objects.get(auth_pk = auth_pk)
authors = FollowersSerializer(followersObj)
return Response(authors.data, status=status.HTTP_200_OK)
@api_view(['GET','PUT','DELETE'])
@authentication_classes([CustomAuthentication])
@permission_classes([AccessPermission])
def ForeignAuthorAPI(request, auth_pk, fr_auth_pk):
followersObj = Followers.objects.get(auth_pk = auth_pk)
if request.method == "GET":
detail = False
foreign_author = Author.objects.get(pk = fr_auth_pk)
if foreign_author in followersObj.items.all():
detail = True
response_dict = {
"detail": detail
}
return Response(response_dict)
elif request.method == "PUT":
foreign_author = Author.objects.get(pk = fr_auth_pk)
followersObj.items.add(foreign_author)
elif request.method == "DELETE":
foreign_author = Author.objects.get(pk = fr_auth_pk)
followersObj.items.remove(foreign_author)
authors = FollowersSerializer(followersObj)
return Response(authors.data, status=status.HTTP_200_OK)
###############################################################
@api_view(['GET',])
@authentication_classes([CustomAuthentication])
@permission_classes([AccessPermission])
def AuthorsListAPIView(request):
authors = Author.objects.filter(url__icontains = "linkedspace")
page_number = request.GET.get('page')
if 'size' in request.GET:
page_size = request.GET.get('size')
else:
page_size = 5
paginator = Paginator(authors, page_size)
page_obj = paginator.get_page(page_number)
serializer = AuthorSerializer(page_obj.object_list, many=True)
response_dict = {
"type": "authors",
"items": serializer.data
}
return Response(response_dict)
@api_view(['GET', 'POST',])
@authentication_classes([CustomAuthentication])
@permission_classes([AccessPermission])
def AuthorDetailAPIView(request, auth_pk):
try:
author = Author.objects.get(pk=auth_pk)
except Author.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == "GET":
serializer = AuthorSerializer(instance=author)
return Response(serializer.data, status=status.HTTP_200_OK)
if request.method == "POST":
if 'displayName' in request.data.keys():
author.displayName = request.data['displayName']
if 'email' in request.data.keys():
if not len(Author.objects.filter(email=request.data['email'])):
author.email = request.data['email'] # update email field
else:
# email already exists
serializer = AuthorSerializer(author)
return Response(serializer.data, status=status.HTTP_400_BAD_REQUEST)
if 'github' in request.data.keys():
github_user = request.data['github']
author.github = f'http://github.com/{github_user}'
author.save()
serializer = AuthorSerializer(author)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET',])
def AuthorsConnection(request, auth_id=None):
data = []
team3 = requests.get('https://social-dis.herokuapp.com/authors', auth=('socialdistribution_t03','c404t03'))
if team3.status_code == 200:
data.append(team3.json())
team15 = requests.get('https://unhindled.herokuapp.com/service/authors/', auth=('connectionsuperuser','404connection'))
if team15.status_code == 200:
data.append(team15.json())
team17 = requests.get('https://cmput404f21t17.herokuapp.com/service/connect/public/author/', auth=('<PASSWORD>','123456'))
if team17.status_code == 200:
data.append(team17.json())
return Response({'connection': data})
|
[
"rest_framework.decorators.authentication_classes",
"rest_framework.response.Response",
"django.core.paginator.Paginator",
"requests.get",
"rest_framework.decorators.permission_classes",
"rest_framework.decorators.api_view"
] |
[((461, 478), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (469, 478), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((481, 527), 'rest_framework.decorators.authentication_classes', 'authentication_classes', (['[CustomAuthentication]'], {}), '([CustomAuthentication])\n', (503, 527), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((529, 567), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[AccessPermission]'], {}), '([AccessPermission])\n', (547, 567), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((784, 818), 'rest_framework.decorators.api_view', 'api_view', (["['GET', 'PUT', 'DELETE']"], {}), "(['GET', 'PUT', 'DELETE'])\n", (792, 818), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((818, 864), 'rest_framework.decorators.authentication_classes', 'authentication_classes', (['[CustomAuthentication]'], {}), '([CustomAuthentication])\n', (840, 864), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((866, 904), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[AccessPermission]'], {}), '([AccessPermission])\n', (884, 904), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((1817, 1834), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (1825, 1834), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((1837, 1883), 'rest_framework.decorators.authentication_classes', 'authentication_classes', (['[CustomAuthentication]'], {}), '([CustomAuthentication])\n', (1859, 1883), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((1885, 1923), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[AccessPermission]'], {}), '([AccessPermission])\n', (1903, 1923), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((2464, 2489), 'rest_framework.decorators.api_view', 'api_view', (["['GET', 'POST']"], {}), "(['GET', 'POST'])\n", (2472, 2489), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((2492, 2538), 'rest_framework.decorators.authentication_classes', 'authentication_classes', (['[CustomAuthentication]'], {}), '([CustomAuthentication])\n', (2514, 2538), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((2540, 2578), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[AccessPermission]'], {}), '([AccessPermission])\n', (2558, 2578), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((3827, 3844), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (3835, 3844), False, 'from rest_framework.decorators import api_view, authentication_classes, permission_classes\n'), ((727, 776), 'rest_framework.response.Response', 'Response', (['authors.data'], {'status': 'status.HTTP_200_OK'}), '(authors.data, status=status.HTTP_200_OK)\n', (735, 776), False, 'from rest_framework.response import Response\n'), ((1700, 1749), 'rest_framework.response.Response', 'Response', (['authors.data'], {'status': 'status.HTTP_200_OK'}), '(authors.data, status=status.HTTP_200_OK)\n', (1708, 1749), False, 'from rest_framework.response import Response\n'), ((2192, 2221), 'django.core.paginator.Paginator', 'Paginator', (['authors', 'page_size'], {}), '(authors, page_size)\n', (2201, 2221), False, 'from django.core.paginator import Paginator\n'), ((2438, 2461), 'rest_framework.response.Response', 'Response', (['response_dict'], {}), '(response_dict)\n', (2446, 2461), False, 'from rest_framework.response import Response\n'), ((3779, 3823), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(status=status.HTTP_400_BAD_REQUEST)\n', (3787, 3823), False, 'from rest_framework.response import Response\n'), ((3919, 4024), 'requests.get', 'requests.get', (['"""https://social-dis.herokuapp.com/authors"""'], {'auth': "('socialdistribution_t03', 'c404t03')"}), "('https://social-dis.herokuapp.com/authors', auth=(\n 'socialdistribution_t03', 'c404t03'))\n", (3931, 4024), False, 'import requests\n'), ((4100, 4216), 'requests.get', 'requests.get', (['"""https://unhindled.herokuapp.com/service/authors/"""'], {'auth': "('connectionsuperuser', '404connection')"}), "('https://unhindled.herokuapp.com/service/authors/', auth=(\n 'connectionsuperuser', '404connection'))\n", (4112, 4216), False, 'import requests\n'), ((4294, 4417), 'requests.get', 'requests.get', (['"""https://cmput404f21t17.herokuapp.com/service/connect/public/author/"""'], {'auth': "('<PASSWORD>', '123456')"}), "(\n 'https://cmput404f21t17.herokuapp.com/service/connect/public/author/',\n auth=('<PASSWORD>', '123456'))\n", (4306, 4417), False, 'import requests\n'), ((4489, 4519), 'rest_framework.response.Response', 'Response', (["{'connection': data}"], {}), "({'connection': data})\n", (4497, 4519), False, 'from rest_framework.response import Response\n'), ((1311, 1334), 'rest_framework.response.Response', 'Response', (['response_dict'], {}), '(response_dict)\n', (1319, 1334), False, 'from rest_framework.response import Response\n'), ((2872, 2924), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (2880, 2924), False, 'from rest_framework.response import Response\n'), ((3710, 3762), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (3718, 3762), False, 'from rest_framework.response import Response\n'), ((2726, 2768), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_404_NOT_FOUND'}), '(status=status.HTTP_404_NOT_FOUND)\n', (2734, 2768), False, 'from rest_framework.response import Response\n'), ((3401, 3462), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(serializer.data, status=status.HTTP_400_BAD_REQUEST)\n', (3409, 3462), False, 'from rest_framework.response import Response\n')]
|
# -*- coding: UTF-8 -*-
# pylint: disable=missing-docstring, too-few-public-methods
"""
Test the trait-type ``UseEnum``.
"""
import unittest
import enum
from ipython_genutils.py3compat import string_types
from traitlets import HasTraits, TraitError, UseEnum
# -----------------------------------------------------------------------------
# TEST SUPPORT:
# -----------------------------------------------------------------------------
class Color(enum.Enum):
red = 1
green = 2
blue = 3
yellow = 4
class OtherColor(enum.Enum):
red = 0
green = 1
# -----------------------------------------------------------------------------
# TESTSUITE:
# -----------------------------------------------------------------------------
class TestUseEnum(unittest.TestCase):
# pylint: disable=invalid-name
class Example(HasTraits):
color = UseEnum(Color, help="Color enum")
def test_assign_enum_value(self):
example = self.Example()
example.color = Color.green
self.assertEqual(example.color, Color.green)
def test_assign_all_enum_values(self):
# pylint: disable=no-member
enum_values = [value for value in Color.__members__.values()]
for value in enum_values:
self.assertIsInstance(value, Color)
example = self.Example()
example.color = value
self.assertEqual(example.color, value)
self.assertIsInstance(value, Color)
def test_assign_enum_value__with_other_enum_raises_error(self):
example = self.Example()
with self.assertRaises(TraitError):
example.color = OtherColor.green
def test_assign_enum_name_1(self):
# -- CONVERT: string => Enum value (item)
example = self.Example()
example.color = "red"
self.assertEqual(example.color, Color.red)
def test_assign_enum_value_name(self):
# -- CONVERT: string => Enum value (item)
# pylint: disable=no-member
enum_names = [enum_val.name for enum_val in Color.__members__.values()]
for value in enum_names:
self.assertIsInstance(value, string_types)
example = self.Example()
enum_value = Color.__members__.get(value)
example.color = value
self.assertIs(example.color, enum_value)
self.assertEqual(example.color.name, value)
def test_assign_scoped_enum_value_name(self):
# -- CONVERT: string => Enum value (item)
scoped_names = ["Color.red", "Color.green", "Color.blue", "Color.yellow"]
for value in scoped_names:
example = self.Example()
example.color = value
self.assertIsInstance(example.color, Color)
self.assertEqual(str(example.color), value)
def test_assign_bad_enum_value_name__raises_error(self):
# -- CONVERT: string => Enum value (item)
bad_enum_names = ["UNKNOWN_COLOR", "RED", "Green", "blue2"]
for value in bad_enum_names:
example = self.Example()
with self.assertRaises(TraitError):
example.color = value
def test_assign_enum_value_number_1(self):
# -- CONVERT: number => Enum value (item)
example = self.Example()
example.color = 1 # == Color.red.value
example.color = Color.red.value
self.assertEqual(example.color, Color.red)
def test_assign_enum_value_number(self):
# -- CONVERT: number => Enum value (item)
# pylint: disable=no-member
enum_numbers = [enum_val.value
for enum_val in Color.__members__.values()]
for value in enum_numbers:
self.assertIsInstance(value, int)
example = self.Example()
example.color = value
self.assertIsInstance(example.color, Color)
self.assertEqual(example.color.value, value)
def test_assign_bad_enum_value_number__raises_error(self):
# -- CONVERT: number => Enum value (item)
bad_numbers = [-1, 0, 5]
for value in bad_numbers:
self.assertIsInstance(value, int)
assert UseEnum(Color).select_by_number(value, None) is None
example = self.Example()
with self.assertRaises(TraitError):
example.color = value
def test_ctor_without_default_value(self):
# -- IMPLICIT: default_value = Color.red (first enum-value)
class Example2(HasTraits):
color = UseEnum(Color)
example = Example2()
self.assertEqual(example.color, Color.red)
def test_ctor_with_default_value_as_enum_value(self):
# -- CONVERT: number => Enum value (item)
class Example2(HasTraits):
color = UseEnum(Color, default_value=Color.green)
example = Example2()
self.assertEqual(example.color, Color.green)
def test_ctor_with_default_value_none_and_not_allow_none(self):
# -- IMPLICIT: default_value = Color.red (first enum-value)
class Example2(HasTraits):
color1 = UseEnum(Color, default_value=None, allow_none=False)
color2 = UseEnum(Color, default_value=None)
example = Example2()
self.assertEqual(example.color1, Color.red)
self.assertEqual(example.color2, Color.red)
def test_ctor_with_default_value_none_and_allow_none(self):
class Example2(HasTraits):
color1 = UseEnum(Color, default_value=None, allow_none=True)
color2 = UseEnum(Color, allow_none=True)
example = Example2()
self.assertIs(example.color1, None)
self.assertIs(example.color2, None)
def test_assign_none_without_allow_none_resets_to_default_value(self):
class Example2(HasTraits):
color1 = UseEnum(Color, allow_none=False)
color2 = UseEnum(Color)
example = Example2()
example.color1 = None
example.color2 = None
self.assertIs(example.color1, Color.red)
self.assertIs(example.color2, Color.red)
def test_assign_none_to_enum_or_none(self):
class Example2(HasTraits):
color = UseEnum(Color, allow_none=True)
example = Example2()
example.color = None
self.assertIs(example.color, None)
def test_assign_bad_value_with_to_enum_or_none(self):
class Example2(HasTraits):
color = UseEnum(Color, allow_none=True)
example = Example2()
with self.assertRaises(TraitError):
example.color = "BAD_VALUE"
|
[
"traitlets.UseEnum"
] |
[((899, 932), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'help': '"""Color enum"""'}), "(Color, help='Color enum')\n", (906, 932), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((4621, 4635), 'traitlets.UseEnum', 'UseEnum', (['Color'], {}), '(Color)\n', (4628, 4635), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((4889, 4930), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'default_value': 'Color.green'}), '(Color, default_value=Color.green)\n', (4896, 4930), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((5217, 5269), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'default_value': 'None', 'allow_none': '(False)'}), '(Color, default_value=None, allow_none=False)\n', (5224, 5269), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((5292, 5326), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'default_value': 'None'}), '(Color, default_value=None)\n', (5299, 5326), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((5588, 5639), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'default_value': 'None', 'allow_none': '(True)'}), '(Color, default_value=None, allow_none=True)\n', (5595, 5639), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((5662, 5693), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'allow_none': '(True)'}), '(Color, allow_none=True)\n', (5669, 5693), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((5952, 5984), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'allow_none': '(False)'}), '(Color, allow_none=False)\n', (5959, 5984), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((6007, 6021), 'traitlets.UseEnum', 'UseEnum', (['Color'], {}), '(Color)\n', (6014, 6021), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((6324, 6355), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'allow_none': '(True)'}), '(Color, allow_none=True)\n', (6331, 6355), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((6580, 6611), 'traitlets.UseEnum', 'UseEnum', (['Color'], {'allow_none': '(True)'}), '(Color, allow_none=True)\n', (6587, 6611), False, 'from traitlets import HasTraits, TraitError, UseEnum\n'), ((4266, 4280), 'traitlets.UseEnum', 'UseEnum', (['Color'], {}), '(Color)\n', (4273, 4280), False, 'from traitlets import HasTraits, TraitError, UseEnum\n')]
|
from typing import List, Union, Tuple, Dict, Set
import googlemaps
import networkx as nx
from networkx.algorithms import shortest_paths
from domain.gateways import DirectionsGateway
from domain.models import Location
class NetworkXGateway(DirectionsGateway):
def __init__(self, graph: nx.Graph):
"""
Uses the networkx package to create a graph on which to a network for which directions and travel times can be
generated. For a list of functions that generate commonly useful graphs please see:
https://networkx.github.io/documentation/stable/reference/generators.html
:param graph:
"""
assert graph.number_of_nodes() > 0, "Graph cannot empty"
self._graph = graph
print('Graph initialized')
def validate_location(self, location: Location):
assert location.coordinates in self._graph.nodes
def get_next_destination(self, origin: Location, destinations: List[Location]) -> Location:
assert isinstance(origin, Location)
for d in destinations:
assert isinstance(d, Location)
destination_lengths = [
shortest_paths.shortest_path_length(self._graph, origin.coordinates, d.coordinates) for d in destinations
]
closest_destination = destinations[destination_lengths.index(min(destination_lengths))]
return closest_destination
def shortest_path_to_destination(self, origin: Location, destination: Location) -> List[Location]:
path: List[Tuple[int]] = shortest_paths.shortest_path(self._graph, origin.coordinates, destination.coordinates)
return [Location(node[0], node[1]) for node in path]
class GoogleDirectionsGateway(DirectionsGateway):
"""
https://developers.google.com/maps/documentation/
"""
def __init__(self, api_key: str):
"""
To get an API get from google:
https://cloud.google.com/docs/authentication/api-keys#creating_an_api_key
Make sure to enable products: Directions API, Distance Matrix API, and Geocoding API
:param api_key:
"""
self._client = googlemaps.Client(key=api_key)
def _geocode(self, request):
# TODO: create request and response schema for api
raise NotImplemented
def _distance_matrix(self, request):
# TODO: create request and response schema for api
raise NotImplemented
def get_address_location(self, address: str) -> Location:
"""
Convenience method for converting an address to a Location type
:param address:
:return:
"""
result: dict = self._client.geocode(address)
x, y = result[0]['geometry']['location'].values()
return Location(x, y)
def _get_distance_matrix(self, origin: Location, destinations: List[Location]) -> List[dict]:
"""
Accepts an origin and a list of destinations and returns a list that contains the distance to each destination
from the origin
:param origin:
:param destinations:
:return:
"""
destinations: List[Tuple[str]] = self._convert_locations_to_coordinates(destinations)
result = self._client.distance_matrix(origin.coordinates, destinations)
destinations: List[dict] = [
{**cost, 'location': destination} for destination, cost in zip(destinations, result['rows'][0]['elements'])
]
return destinations
@staticmethod
def _convert_locations_to_coordinates(locations: List[Location]) -> List[tuple]:
"""
Converts Location type to a coordinate tuple, (x,y)
:param locations:
:return:
"""
return [l.coordinates for l in locations]
def get_next_destination(self, origin: Location, destinations: List[Location]) -> Location:
"""
Accepts an origin and a list of destinations and returns an itinerary (route) that's optimized so that each
destination can be reached in the least amount of time
:param origin:
:param destinations:
:return:
"""
# Make sure origin and destinations are of type Location (just in case)
origin = self.get_address_location(origin) if isinstance(origin, str) else origin
destinations: List[Location] = [
self.get_address_location(d) if isinstance(d, str) else d for d in destinations
]
path_costs = self._get_distance_matrix(origin, destinations)
next_destination = destinations[
path_costs.index(min(path_costs, key=lambda x: x['distance']['value']))
]
return next_destination
def shortest_path_to_destination(self, origin: Location, destination: Location) -> List[Location]:
raise NotImplemented
|
[
"googlemaps.Client",
"networkx.algorithms.shortest_paths.shortest_path",
"networkx.algorithms.shortest_paths.shortest_path_length",
"domain.models.Location"
] |
[((1527, 1618), 'networkx.algorithms.shortest_paths.shortest_path', 'shortest_paths.shortest_path', (['self._graph', 'origin.coordinates', 'destination.coordinates'], {}), '(self._graph, origin.coordinates, destination.\n coordinates)\n', (1555, 1618), False, 'from networkx.algorithms import shortest_paths\n'), ((2121, 2151), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': 'api_key'}), '(key=api_key)\n', (2138, 2151), False, 'import googlemaps\n'), ((2731, 2745), 'domain.models.Location', 'Location', (['x', 'y'], {}), '(x, y)\n', (2739, 2745), False, 'from domain.models import Location\n'), ((1143, 1231), 'networkx.algorithms.shortest_paths.shortest_path_length', 'shortest_paths.shortest_path_length', (['self._graph', 'origin.coordinates', 'd.coordinates'], {}), '(self._graph, origin.coordinates, d.\n coordinates)\n', (1178, 1231), False, 'from networkx.algorithms import shortest_paths\n'), ((1630, 1656), 'domain.models.Location', 'Location', (['node[0]', 'node[1]'], {}), '(node[0], node[1])\n', (1638, 1656), False, 'from domain.models import Location\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019-06-13 14:53
# @Author : liupan
# @Site :
# @File : demo5.py
# @Software: PyCharm
import csv
with open('data.csv', 'a') as csvfile:
fieldnames = ['id', 'name', 'age']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow({'id': '10004', 'name': 'Durant', 'age': 22})
|
[
"csv.DictWriter"
] |
[((257, 303), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': 'fieldnames'}), '(csvfile, fieldnames=fieldnames)\n', (271, 303), False, 'import csv\n')]
|
import logging
import json
import traceback
from typing import List
from datetime import datetime, timedelta
from django.db import transaction
from django.db.models import Q
from django_q.tasks import Chain
from django_q.models import Schedule
from qbosdk.exceptions import WrongParamsError
from fyle_accounting_mappings.models import Mapping, ExpenseAttribute, DestinationAttribute, EmployeeMapping
from fyle_qbo_api.exceptions import BulkError
from apps.fyle.models import ExpenseGroup, Reimbursement, Expense
from apps.tasks.models import TaskLog
from apps.mappings.models import GeneralMapping
from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings
from apps.fyle.utils import FyleConnector
from .models import Bill, BillLineitem, Cheque, ChequeLineitem, CreditCardPurchase, CreditCardPurchaseLineitem, \
JournalEntry, JournalEntryLineitem, BillPayment, BillPaymentLineitem, QBOExpense, QBOExpenseLineitem
from .utils import QBOConnector
logger = logging.getLogger(__name__)
logger.level = logging.INFO
def get_or_create_credit_card_vendor(workspace_id: int, merchant: str):
"""
Get or create car default vendor
:param workspace_id: Workspace Id
:param merchant: Fyle Expense Merchant
:return:
"""
qbo_credentials = QBOCredential.objects.get(workspace_id=workspace_id)
qbo_connection = QBOConnector(credentials_object=qbo_credentials, workspace_id=workspace_id)
vendor = None
if merchant:
try:
vendor = qbo_connection.get_or_create_vendor(merchant, create=False)
except WrongParamsError as bad_request:
logger.error(bad_request.response)
if not vendor:
vendor = qbo_connection.get_or_create_vendor('Credit Card Misc', create=True)
return vendor
def load_attachments(qbo_connection: QBOConnector, ref_id: str, ref_type: str, expense_group: ExpenseGroup):
"""
Get attachments from fyle
:param qbo_connection: QBO Connection
:param ref_id: object id
:param ref_type: type of object
:param expense_group: Expense group
"""
try:
fyle_credentials = FyleCredential.objects.get(workspace_id=expense_group.workspace_id)
expense_ids = expense_group.expenses.values_list('expense_id', flat=True)
fyle_connector = FyleConnector(fyle_credentials.refresh_token, expense_group.workspace_id)
attachments = fyle_connector.get_attachments(expense_ids)
qbo_connection.post_attachments(ref_id, ref_type, attachments)
except Exception:
error = traceback.format_exc()
logger.error(
'Attachment failed for expense group id %s / workspace id %s \n Error: %s',
expense_group.id, expense_group.workspace_id, {'error': error}
)
def create_or_update_employee_mapping(expense_group: ExpenseGroup, qbo_connection: QBOConnector,
auto_map_employees_preference: str):
try:
vendor_mapping = EmployeeMapping.objects.get(
source_employee__value=expense_group.description.get('employee_email'),
workspace_id=expense_group.workspace_id
).destination_vendor
if not vendor_mapping:
raise EmployeeMapping.DoesNotExist
except EmployeeMapping.DoesNotExist:
source_employee = ExpenseAttribute.objects.get(
workspace_id=expense_group.workspace_id,
attribute_type='EMPLOYEE',
value=expense_group.description.get('employee_email')
)
try:
if auto_map_employees_preference == 'EMAIL':
filters = {
'detail__email__iexact': source_employee.value,
'attribute_type': 'VENDOR'
}
else:
filters = {
'value__iexact': source_employee.detail['full_name'],
'attribute_type': 'VENDOR'
}
entity = DestinationAttribute.objects.filter(
workspace_id=expense_group.workspace_id,
**filters
).first()
if entity is None:
entity: DestinationAttribute = qbo_connection.get_or_create_vendor(
vendor_name=source_employee.detail['full_name'],
email=source_employee.value,
create=True
)
existing_employee_mapping = EmployeeMapping.objects.filter(
source_employee=source_employee
).first()
destination = {}
if existing_employee_mapping:
destination['destination_employee_id'] = existing_employee_mapping.destination_employee_id
destination['destination_card_account_id'] = existing_employee_mapping.destination_card_account_id
mapping = EmployeeMapping.create_or_update_employee_mapping(
source_employee_id=source_employee.id,
destination_vendor_id=entity.id,
workspace=expense_group.workspace,
**destination
)
mapping.source_employee.auto_mapped = True
mapping.source_employee.save()
mapping.destination_vendor.auto_created = True
mapping.destination_vendor.save()
except WrongParamsError as bad_request:
logger.error(bad_request.response)
error_response = json.loads(bad_request.response)['Fault']['Error'][0]
# This error code comes up when the vendor or employee already exists
if error_response['code'] == '6240':
logger.error(
'Destination Attribute with value %s not found in workspace %s',
source_employee.detail['full_name'],
expense_group.workspace_id
)
raise BulkError('Mappings are missing', [{
'row': None,
'expense_group_id': expense_group.id,
'value': expense_group.description.get('employee_email'),
'type': 'Employee Mapping',
'message': 'Employee mapping not found'
}])
def handle_quickbooks_error(exception, expense_group: ExpenseGroup, task_log: TaskLog, export_type: str):
logger.info(exception.response)
response = json.loads(exception.response)
quickbooks_errors = response['Fault']['Error']
error_msg = 'Failed to create {0}'.format(export_type)
errors = []
for error in quickbooks_errors:
errors.append({
'expense_group_id': expense_group.id,
'type': '{0} / {1}'.format(response['Fault']['type'], error['code']),
'short_description': error['Message'] if error['Message'] else '{0} error'.format(export_type),
'long_description': error['Detail'] if error['Detail'] else error_msg
})
task_log.status = 'FAILED'
task_log.detail = None
task_log.quickbooks_errors = errors
task_log.save()
def schedule_bills_creation(workspace_id: int, expense_group_ids: List[str]):
"""
Schedule bills creation
:param expense_group_ids: List of expense group ids
:param workspace_id: workspace id
:return: None
"""
if expense_group_ids:
expense_groups = ExpenseGroup.objects.filter(
Q(tasklog__id__isnull=True) | ~Q(tasklog__status__in=['IN_PROGRESS', 'COMPLETE']),
workspace_id=workspace_id, id__in=expense_group_ids, bill__id__isnull=True, exported_at__isnull=True
).all()
chain = Chain()
for expense_group in expense_groups:
task_log, _ = TaskLog.objects.get_or_create(
workspace_id=expense_group.workspace_id,
expense_group=expense_group,
defaults={
'status': 'ENQUEUED',
'type': 'CREATING_BILL'
}
)
if task_log.status not in ['IN_PROGRESS', 'ENQUEUED']:
task_log.type = 'CREATING_BILL'
task_log.status = 'ENQUEUED'
task_log.save()
chain.append('apps.quickbooks_online.tasks.create_bill', expense_group, task_log.id)
if chain.length():
chain.run()
def create_bill(expense_group, task_log_id):
task_log = TaskLog.objects.get(id=task_log_id)
if task_log.status not in ['IN_PROGRESS', 'COMPLETE']:
task_log.status = 'IN_PROGRESS'
task_log.save()
else:
return
general_settings = WorkspaceGeneralSettings.objects.get(workspace_id=expense_group.workspace_id)
try:
qbo_credentials = QBOCredential.objects.get(workspace_id=expense_group.workspace_id)
qbo_connection = QBOConnector(qbo_credentials, expense_group.workspace_id)
if expense_group.fund_source == 'PERSONAL' and general_settings.auto_map_employees \
and general_settings.auto_create_destination_entity \
and general_settings.auto_map_employees != 'EMPLOYEE_CODE':
create_or_update_employee_mapping(expense_group, qbo_connection, general_settings.auto_map_employees)
with transaction.atomic():
__validate_expense_group(expense_group, general_settings)
bill_object = Bill.create_bill(expense_group)
bill_lineitems_objects = BillLineitem.create_bill_lineitems(expense_group, general_settings)
created_bill = qbo_connection.post_bill(bill_object, bill_lineitems_objects)
task_log.detail = created_bill
task_log.bill = bill_object
task_log.quickbooks_errors = None
task_log.status = 'COMPLETE'
task_log.save()
expense_group.exported_at = datetime.now()
expense_group.response_logs = created_bill
expense_group.save()
load_attachments(qbo_connection, created_bill['Bill']['Id'], 'Bill', expense_group)
except QBOCredential.DoesNotExist:
logger.info(
'QBO Credentials not found for workspace_id %s / expense group %s',
expense_group.workspace_id,
expense_group.id
)
detail = {
'expense_group_id': expense_group.id,
'message': 'QBO Account not connected'
}
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except BulkError as exception:
logger.info(exception.response)
detail = exception.response
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except WrongParamsError as exception:
handle_quickbooks_error(exception, expense_group, task_log, 'Bill')
except Exception:
error = traceback.format_exc()
task_log.detail = {
'error': error
}
task_log.status = 'FATAL'
task_log.save()
logger.error('Something unexpected happened workspace_id: %s %s', task_log.workspace_id, task_log.detail)
def __validate_expense_group(expense_group: ExpenseGroup, general_settings: WorkspaceGeneralSettings):
bulk_errors = []
row = 0
general_mapping = None
try:
general_mapping = GeneralMapping.objects.get(workspace_id=expense_group.workspace_id)
except GeneralMapping.DoesNotExist:
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': 'bank account',
'type': 'General Mapping',
'message': 'General mapping not found'
})
if general_settings.corporate_credit_card_expenses_object and \
general_settings.corporate_credit_card_expenses_object == 'BILL' and \
expense_group.fund_source == 'CCC':
if general_mapping:
if not (general_mapping.default_ccc_vendor_id or general_mapping.default_ccc_vendor_name):
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': expense_group.description.get('employee_email'),
'type': 'General Mapping',
'message': 'Default Credit Card Vendor not found'
})
if general_mapping and not (general_mapping.accounts_payable_id or general_mapping.accounts_payable_name):
if (general_settings.reimbursable_expenses_object == 'BILL' or \
general_settings.corporate_credit_card_expenses_object == 'BILL') or (
general_settings.reimbursable_expenses_object == 'JOURNAL ENTRY' and
general_settings.employee_field_mapping == 'VENDOR' and expense_group.fund_source == 'PERSONAL'):
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': 'Accounts Payable',
'type': 'General Mapping',
'message': 'Accounts Payable not found'
})
if general_mapping and not (general_mapping.bank_account_id or general_mapping.bank_account_name) and \
(
(
general_settings.reimbursable_expenses_object == 'CHECK'
or (
general_settings.reimbursable_expenses_object == 'JOURNAL ENTRY' and
general_settings.employee_field_mapping == 'EMPLOYEE' and expense_group.fund_source == 'PERSONAL'
)
)
):
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': 'Bank Account',
'type': 'General Mapping',
'message': 'Bank Account not found'
})
if general_mapping and not (general_mapping.qbo_expense_account_id or general_mapping.qbo_expense_account_name)\
and general_settings.reimbursable_expenses_object == 'EXPENSE':
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': 'Expense Payment Account',
'type': 'General Mapping',
'message': 'Expense Payment Account not found'
})
if general_settings.corporate_credit_card_expenses_object == 'CREDIT CARD PURCHASE' or \
general_settings.corporate_credit_card_expenses_object == 'JOURNAL ENTRY':
ccc_account_mapping: EmployeeMapping = EmployeeMapping.objects.filter(
source_employee__value=expense_group.description.get('employee_email'),
workspace_id=expense_group.workspace_id
).first()
ccc_account_id = None
if ccc_account_mapping and ccc_account_mapping.destination_card_account:
ccc_account_id = ccc_account_mapping.destination_card_account.destination_id
elif general_mapping:
ccc_account_id = general_mapping.default_ccc_account_id
if not ccc_account_id:
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': expense_group.description.get('employee_email'),
'type': 'Employee / General Mapping',
'message': 'CCC account mapping / Default CCC account mapping not found'
})
if general_settings.corporate_credit_card_expenses_object != 'BILL' and expense_group.fund_source == 'CCC':
if not (general_mapping.default_ccc_account_id or general_mapping.default_ccc_account_name):
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': 'Default Credit Card Account',
'type': 'General Mapping',
'message': 'Default Credit Card Account not found'
})
if general_settings.import_tax_codes and not (general_mapping.default_tax_code_id or general_mapping.default_tax_code_name):
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': 'Default Tax Code',
'type': 'General Mapping',
'message': 'Default Tax Code not found'
})
if not (expense_group.fund_source == 'CCC' and \
((general_settings.corporate_credit_card_expenses_object == 'CREDIT CARD PURCHASE' and \
general_settings.map_merchant_to_vendor) or \
general_settings.corporate_credit_card_expenses_object == 'BILL')):
try:
entity = EmployeeMapping.objects.get(
source_employee__value=expense_group.description.get('employee_email'),
workspace_id=expense_group.workspace_id
)
if general_settings.employee_field_mapping == 'EMPLOYEE':
entity = entity.destination_employee
else:
entity = entity.destination_vendor
if not entity:
raise EmployeeMapping.DoesNotExist
except EmployeeMapping.DoesNotExist:
bulk_errors.append({
'row': None,
'expense_group_id': expense_group.id,
'value': expense_group.description.get('employee_email'),
'type': 'Employee Mapping',
'message': 'Employee mapping not found'
})
expenses = expense_group.expenses.all()
for lineitem in expenses:
category = lineitem.category if lineitem.category == lineitem.sub_category else '{0} / {1}'.format(
lineitem.category, lineitem.sub_category)
account = Mapping.objects.filter(
source_type='CATEGORY',
source__value=category,
workspace_id=expense_group.workspace_id
).first()
if not account:
bulk_errors.append({
'row': row,
'expense_group_id': expense_group.id,
'value': category,
'type': 'Category Mapping',
'message': 'Category Mapping not found'
})
if general_settings.import_tax_codes and lineitem.tax_group_id:
tax_group = ExpenseAttribute.objects.get(
workspace_id=expense_group.workspace_id,
attribute_type='TAX_GROUP',
source_id=lineitem.tax_group_id
)
tax_code = Mapping.objects.filter(
source_type='TAX_GROUP',
source__value=tax_group.value,
workspace_id=expense_group.workspace_id
).first()
if not tax_code:
bulk_errors.append({
'row': row,
'expense_group_id': expense_group.id,
'value': tax_group.value,
'type': 'Tax Group Mapping',
'message': 'Tax Group Mapping not found'
})
row = row + 1
if bulk_errors:
raise BulkError('Mappings are missing', bulk_errors)
def schedule_cheques_creation(workspace_id: int, expense_group_ids: List[str]):
"""
Schedule cheque creation
:param expense_group_ids: List of expense group ids
:param workspace_id: workspace id
:return: None
"""
if expense_group_ids:
expense_groups = ExpenseGroup.objects.filter(
Q(tasklog__id__isnull=True) | ~Q(tasklog__status__in=['IN_PROGRESS', 'COMPLETE']),
workspace_id=workspace_id, id__in=expense_group_ids, cheque__id__isnull=True, exported_at__isnull=True
).all()
chain = Chain()
for expense_group in expense_groups:
task_log, _ = TaskLog.objects.get_or_create(
workspace_id=expense_group.workspace_id,
expense_group=expense_group,
defaults={
'status': 'ENQUEUED',
'type': 'CREATING_CHECK'
}
)
if task_log.status not in ['IN_PROGRESS', 'ENQUEUED']:
task_log.type = 'CREATING_CHECK'
task_log.status = 'ENQUEUED'
task_log.save()
chain.append('apps.quickbooks_online.tasks.create_cheque', expense_group, task_log.id)
if chain.length():
chain.run()
def create_cheque(expense_group, task_log_id):
task_log = TaskLog.objects.get(id=task_log_id)
if task_log.status not in ['IN_PROGRESS', 'COMPLETE']:
task_log.status = 'IN_PROGRESS'
task_log.save()
else:
return
general_settings = WorkspaceGeneralSettings.objects.get(workspace_id=expense_group.workspace_id)
try:
qbo_credentials = QBOCredential.objects.get(workspace_id=expense_group.workspace_id)
qbo_connection = QBOConnector(qbo_credentials, expense_group.workspace_id)
if general_settings.auto_map_employees and general_settings.auto_create_destination_entity:
create_or_update_employee_mapping(expense_group, qbo_connection, general_settings.auto_map_employees)
with transaction.atomic():
__validate_expense_group(expense_group, general_settings)
cheque_object = Cheque.create_cheque(expense_group)
cheque_line_item_objects = ChequeLineitem.create_cheque_lineitems(expense_group, general_settings)
created_cheque = qbo_connection.post_cheque(cheque_object, cheque_line_item_objects)
task_log.detail = created_cheque
task_log.cheque = cheque_object
task_log.quickbooks_errors = None
task_log.status = 'COMPLETE'
task_log.save()
expense_group.exported_at = datetime.now()
expense_group.response_logs = created_cheque
expense_group.save()
load_attachments(qbo_connection, created_cheque['Purchase']['Id'], 'Purchase', expense_group)
except QBOCredential.DoesNotExist:
logger.info(
'QBO Credentials not found for workspace_id %s / expense group %s',
expense_group.id,
expense_group.workspace_id
)
detail = {
'expense_group_id': expense_group.id,
'message': 'QBO Account not connected'
}
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except BulkError as exception:
logger.info(exception.response)
detail = exception.response
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except WrongParamsError as exception:
handle_quickbooks_error(exception, expense_group, task_log, 'Check')
except Exception:
error = traceback.format_exc()
task_log.detail = {
'error': error
}
task_log.status = 'FATAL'
task_log.save()
logger.error('Something unexpected happened workspace_id: %s %s', task_log.workspace_id, task_log.detail)
def schedule_qbo_expense_creation(workspace_id: int, expense_group_ids: List[str]):
"""
Schedule QBO expense creation
:param expense_group_ids: List of expense group ids
:param workspace_id: workspace id
:return: None
"""
if expense_group_ids:
expense_groups = ExpenseGroup.objects.filter(
Q(tasklog__id__isnull=True) | ~Q(tasklog__status__in=['IN_PROGRESS', 'COMPLETE']),
workspace_id=workspace_id, id__in=expense_group_ids, qboexpense__id__isnull=True, exported_at__isnull=True
).all()
chain = Chain()
for expense_group in expense_groups:
task_log, _ = TaskLog.objects.get_or_create(
workspace_id=expense_group.workspace_id,
expense_group=expense_group,
defaults={
'status': 'ENQUEUED',
'type': 'CREATING_EXPENSE'
}
)
if task_log.status not in ['IN_PROGRESS', 'ENQUEUED']:
task_log.type = 'CREATING_EXPENSE'
task_log.status = 'ENQUEUED'
task_log.save()
chain.append('apps.quickbooks_online.tasks.create_qbo_expense', expense_group, task_log.id)
if chain.length():
chain.run()
def create_qbo_expense(expense_group, task_log_id):
task_log = TaskLog.objects.get(id=task_log_id)
if task_log.status not in ['IN_PROGRESS', 'COMPLETE']:
task_log.status = 'IN_PROGRESS'
task_log.save()
else:
return
general_settings = WorkspaceGeneralSettings.objects.get(workspace_id=expense_group.workspace_id)
try:
qbo_credentials = QBOCredential.objects.get(workspace_id=expense_group.workspace_id)
qbo_connection = QBOConnector(qbo_credentials, expense_group.workspace_id)
if general_settings.auto_map_employees and general_settings.auto_create_destination_entity:
create_or_update_employee_mapping(expense_group, qbo_connection, general_settings.auto_map_employees)
with transaction.atomic():
__validate_expense_group(expense_group, general_settings)
qbo_expense_object = QBOExpense.create_qbo_expense(expense_group)
qbo_expense_line_item_objects = QBOExpenseLineitem.create_qbo_expense_lineitems(
expense_group, general_settings
)
created_qbo_expense = qbo_connection.post_qbo_expense(qbo_expense_object, qbo_expense_line_item_objects)
task_log.detail = created_qbo_expense
task_log.qbo_expense = qbo_expense_object
task_log.quickbooks_errors = None
task_log.status = 'COMPLETE'
task_log.save()
expense_group.exported_at = datetime.now()
expense_group.response_logs = created_qbo_expense
expense_group.save()
load_attachments(qbo_connection, created_qbo_expense['Purchase']['Id'], 'Purchase', expense_group)
except QBOCredential.DoesNotExist:
logger.info(
'QBO Credentials not found for workspace_id %s / expense group %s',
expense_group.id,
expense_group.workspace_id
)
detail = {
'expense_group_id': expense_group.id,
'message': 'QBO Account not connected'
}
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except BulkError as exception:
logger.info(exception.response)
detail = exception.response
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except WrongParamsError as exception:
handle_quickbooks_error(exception, expense_group, task_log, 'Expense')
except Exception:
error = traceback.format_exc()
task_log.detail = {
'error': error
}
task_log.status = 'FATAL'
task_log.save()
logger.error('Something unexpected happened workspace_id: %s %s', task_log.workspace_id, task_log.detail)
def schedule_credit_card_purchase_creation(workspace_id: int, expense_group_ids: List[str]):
"""
Schedule credit card purchase creation
:param expense_group_ids: List of expense group ids
:param workspace_id: workspace id
:return: None
"""
if expense_group_ids:
expense_groups = ExpenseGroup.objects.filter(
Q(tasklog__id__isnull=True) | ~Q(tasklog__status__in=['IN_PROGRESS', 'COMPLETE']),
workspace_id=workspace_id, id__in=expense_group_ids, creditcardpurchase__id__isnull=True,
exported_at__isnull=True
).all()
chain = Chain()
for expense_group in expense_groups:
task_log, _ = TaskLog.objects.get_or_create(
workspace_id=expense_group.workspace_id,
expense_group=expense_group,
defaults={
'status': 'ENQUEUED',
'type': 'CREATING_CREDIT_CARD_PURCHASE'
}
)
if task_log.status not in ['IN_PROGRESS', 'ENQUEUED']:
task_log.type = 'CREATING_CREDIT_CARD_PURCHASE'
task_log.status = 'ENQUEUED'
task_log.save()
chain.append('apps.quickbooks_online.tasks.create_credit_card_purchase', expense_group, task_log.id)
if chain.length():
chain.run()
def create_credit_card_purchase(expense_group: ExpenseGroup, task_log_id):
task_log = TaskLog.objects.get(id=task_log_id)
if task_log.status not in ['IN_PROGRESS', 'COMPLETE']:
task_log.status = 'IN_PROGRESS'
task_log.save()
else:
return
general_settings = WorkspaceGeneralSettings.objects.get(workspace_id=expense_group.workspace_id)
try:
qbo_credentials = QBOCredential.objects.get(workspace_id=expense_group.workspace_id)
qbo_connection = QBOConnector(qbo_credentials, int(expense_group.workspace_id))
if not general_settings.map_merchant_to_vendor:
if general_settings.auto_map_employees and general_settings.auto_create_destination_entity \
and general_settings.auto_map_employees != 'EMPLOYEE_CODE':
create_or_update_employee_mapping(expense_group, qbo_connection, general_settings.auto_map_employees)
else:
merchant = expense_group.expenses.first().vendor
get_or_create_credit_card_vendor(expense_group.workspace_id, merchant)
with transaction.atomic():
__validate_expense_group(expense_group, general_settings)
credit_card_purchase_object = CreditCardPurchase.create_credit_card_purchase(
expense_group, general_settings.map_merchant_to_vendor)
credit_card_purchase_lineitems_objects = CreditCardPurchaseLineitem.create_credit_card_purchase_lineitems(
expense_group, general_settings
)
created_credit_card_purchase = qbo_connection.post_credit_card_purchase(
credit_card_purchase_object, credit_card_purchase_lineitems_objects
)
task_log.detail = created_credit_card_purchase
task_log.credit_card_purchase = credit_card_purchase_object
task_log.quickbooks_errors = None
task_log.status = 'COMPLETE'
task_log.save()
expense_group.exported_at = datetime.now()
expense_group.response_logs = created_credit_card_purchase
expense_group.save()
load_attachments(qbo_connection, created_credit_card_purchase['Purchase']['Id'], 'Purchase', expense_group)
except QBOCredential.DoesNotExist:
logger.info(
'QBO Credentials not found for workspace_id %s / expense group %s',
expense_group.id,
expense_group.workspace_id
)
detail = {
'expense_group_id': expense_group.id,
'message': 'QBO Account not connected'
}
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except BulkError as exception:
logger.info(exception.response)
detail = exception.response
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except WrongParamsError as exception:
handle_quickbooks_error(exception, expense_group, task_log, 'Credit Card Purchase')
except Exception:
error = traceback.format_exc()
task_log.detail = {
'error': error
}
task_log.status = 'FATAL'
task_log.save()
logger.error('Something unexpected happened workspace_id: %s %s', task_log.workspace_id, task_log.detail)
def schedule_journal_entry_creation(workspace_id: int, expense_group_ids: List[str]):
"""
Schedule journal_entry creation
:param expense_group_ids: List of expense group ids
:param workspace_id: workspace id
:return: None
"""
if expense_group_ids:
expense_groups = ExpenseGroup.objects.filter(
Q(tasklog__id__isnull=True) | ~Q(tasklog__status__in=['IN_PROGRESS', 'COMPLETE']),
workspace_id=workspace_id, id__in=expense_group_ids, journalentry__id__isnull=True, exported_at__isnull=True
).all()
chain = Chain()
for expense_group in expense_groups:
task_log, _ = TaskLog.objects.get_or_create(
workspace_id=expense_group.workspace_id,
expense_group=expense_group,
defaults={
'status': 'ENQUEUED',
'type': 'CREATING_JOURNAL_ENTRY'
}
)
if task_log.status not in ['IN_PROGRESS', 'ENQUEUED']:
task_log.type = 'CREATING_JOURNAL_ENTRY'
task_log.status = 'ENQUEUED'
task_log.save()
chain.append('apps.quickbooks_online.tasks.create_journal_entry', expense_group, task_log.id)
if chain.length():
chain.run()
def create_journal_entry(expense_group, task_log_id):
task_log = TaskLog.objects.get(id=task_log_id)
if task_log.status not in ['IN_PROGRESS', 'COMPLETE']:
task_log.status = 'IN_PROGRESS'
task_log.save()
else:
return
general_settings = WorkspaceGeneralSettings.objects.get(workspace_id=expense_group.workspace_id)
try:
qbo_credentials = QBOCredential.objects.get(workspace_id=expense_group.workspace_id)
qbo_connection = QBOConnector(qbo_credentials, expense_group.workspace_id)
if general_settings.auto_map_employees and general_settings.auto_create_destination_entity \
and general_settings.auto_map_employees != 'EMPLOYEE_CODE':
create_or_update_employee_mapping(expense_group, qbo_connection, general_settings.auto_map_employees)
with transaction.atomic():
__validate_expense_group(expense_group, general_settings)
journal_entry_object = JournalEntry.create_journal_entry(expense_group)
journal_entry_lineitems_objects = JournalEntryLineitem.create_journal_entry_lineitems(
expense_group, general_settings
)
created_journal_entry = qbo_connection.post_journal_entry(
journal_entry_object, journal_entry_lineitems_objects, general_settings.je_single_credit_line)
task_log.detail = created_journal_entry
task_log.journal_entry = journal_entry_object
task_log.quickbooks_errors = None
task_log.status = 'COMPLETE'
task_log.save()
expense_group.exported_at = datetime.now()
expense_group.response_logs = created_journal_entry
expense_group.save()
load_attachments(qbo_connection, created_journal_entry['JournalEntry']['Id'], 'JournalEntry', expense_group)
except QBOCredential.DoesNotExist:
logger.info(
'QBO Credentials not found for workspace_id %s / expense group %s',
expense_group.id,
expense_group.workspace_id
)
detail = {
'expense_group_id': expense_group.id,
'message': 'QBO Account not connected'
}
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except BulkError as exception:
logger.info(exception.response)
detail = exception.response
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except WrongParamsError as exception:
handle_quickbooks_error(exception, expense_group, task_log, 'Journal Entries')
except Exception:
error = traceback.format_exc()
task_log.detail = {
'error': error
}
task_log.status = 'FATAL'
task_log.save()
logger.error('Something unexpected happened workspace_id: %s %s', task_log.workspace_id, task_log.detail)
def check_expenses_reimbursement_status(expenses):
all_expenses_paid = True
for expense in expenses:
reimbursement = Reimbursement.objects.filter(settlement_id=expense.settlement_id).first()
if reimbursement.state != 'COMPLETE':
all_expenses_paid = False
return all_expenses_paid
def create_bill_payment(workspace_id):
fyle_credentials = FyleCredential.objects.get(workspace_id=workspace_id)
fyle_connector = FyleConnector(fyle_credentials.refresh_token, workspace_id)
fyle_connector.sync_reimbursements()
bills = Bill.objects.filter(
payment_synced=False, expense_group__workspace_id=workspace_id,
expense_group__fund_source='PERSONAL'
).all()
if bills:
for bill in bills:
expense_group_reimbursement_status = check_expenses_reimbursement_status(
bill.expense_group.expenses.all())
if expense_group_reimbursement_status:
task_log, _ = TaskLog.objects.update_or_create(
workspace_id=workspace_id,
task_id='PAYMENT_{}'.format(bill.expense_group.id),
defaults={
'status': 'IN_PROGRESS',
'type': 'CREATING_BILL_PAYMENT'
}
)
try:
qbo_credentials = QBOCredential.objects.get(workspace_id=workspace_id)
qbo_connection = QBOConnector(qbo_credentials, workspace_id)
with transaction.atomic():
bill_payment_object = BillPayment.create_bill_payment(bill.expense_group)
qbo_object_task_log = TaskLog.objects.get(expense_group=bill.expense_group)
linked_transaction_id = qbo_object_task_log.detail['Bill']['Id']
bill_payment_lineitems_objects = BillPaymentLineitem.create_bill_payment_lineitems(
bill_payment_object.expense_group, linked_transaction_id
)
created_bill_payment = qbo_connection.post_bill_payment(
bill_payment_object, bill_payment_lineitems_objects
)
bill.payment_synced = True
bill.paid_on_qbo = True
bill.save()
task_log.detail = created_bill_payment
task_log.bill_payment = bill_payment_object
task_log.quickbooks_errors = None
task_log.status = 'COMPLETE'
task_log.save()
except QBOCredential.DoesNotExist:
logger.info(
'QBO Credentials not found for workspace_id %s / expense group %s',
workspace_id,
bill.expense_group
)
detail = {
'expense_group_id': bill.expense_group,
'message': 'QBO Account not connected'
}
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except BulkError as exception:
logger.info(exception.response)
detail = exception.response
task_log.status = 'FAILED'
task_log.detail = detail
task_log.save()
except WrongParamsError as exception:
handle_quickbooks_error(exception, bill.expense_group, task_log, 'Bill Payment')
except Exception:
error = traceback.format_exc()
task_log.detail = {
'error': error
}
task_log.status = 'FATAL'
task_log.save()
logger.error(
'Something unexpected happened workspace_id: %s %s', task_log.workspace_id, task_log.detail)
def schedule_bill_payment_creation(sync_fyle_to_qbo_payments, workspace_id):
general_mappings: GeneralMapping = GeneralMapping.objects.filter(workspace_id=workspace_id).first()
if general_mappings:
if sync_fyle_to_qbo_payments and general_mappings.bill_payment_account_id:
start_datetime = datetime.now()
schedule, _ = Schedule.objects.update_or_create(
func='apps.quickbooks_online.tasks.create_bill_payment',
args='{}'.format(workspace_id),
defaults={
'schedule_type': Schedule.MINUTES,
'minutes': 24 * 60,
'next_run': start_datetime
}
)
if not sync_fyle_to_qbo_payments:
schedule: Schedule = Schedule.objects.filter(
func='apps.quickbooks_online.tasks.create_bill_payment',
args='{}'.format(workspace_id)
).first()
if schedule:
schedule.delete()
def get_all_qbo_object_ids(qbo_objects):
qbo_objects_details = {}
expense_group_ids = [qbo_object.expense_group_id for qbo_object in qbo_objects]
task_logs = TaskLog.objects.filter(expense_group_id__in=expense_group_ids).all()
for task_log in task_logs:
qbo_objects_details[task_log.expense_group.id] = {
'expense_group': task_log.expense_group,
'qbo_object_id': task_log.detail['Bill']['Id']
}
return qbo_objects_details
def check_qbo_object_status(workspace_id):
qbo_credentials = QBOCredential.objects.get(workspace_id=workspace_id)
qbo_connection = QBOConnector(qbo_credentials, workspace_id)
bills = Bill.objects.filter(
expense_group__workspace_id=workspace_id, paid_on_qbo=False, expense_group__fund_source='PERSONAL'
).all()
if bills:
bill_ids = get_all_qbo_object_ids(bills)
for bill in bills:
bill_object = qbo_connection.get_bill(bill_ids[bill.expense_group.id]['qbo_object_id'])
if 'LinkedTxn' in bill_object:
line_items = BillLineitem.objects.filter(bill_id=bill.id)
for line_item in line_items:
expense = line_item.expense
expense.paid_on_qbo = True
expense.save()
bill.paid_on_qbo = True
bill.payment_synced = True
bill.save()
def schedule_qbo_objects_status_sync(sync_qbo_to_fyle_payments, workspace_id):
if sync_qbo_to_fyle_payments:
start_datetime = datetime.now()
schedule, _ = Schedule.objects.update_or_create(
func='apps.quickbooks_online.tasks.check_qbo_object_status',
args='{}'.format(workspace_id),
defaults={
'schedule_type': Schedule.MINUTES,
'minutes': 24 * 60,
'next_run': start_datetime
}
)
else:
schedule: Schedule = Schedule.objects.filter(
func='apps.quickbooks_online.tasks.check_qbo_object_status',
args='{}'.format(workspace_id)
).first()
if schedule:
schedule.delete()
def process_reimbursements(workspace_id):
fyle_credentials = FyleCredential.objects.get(workspace_id=workspace_id)
fyle_connector = FyleConnector(fyle_credentials.refresh_token, workspace_id)
fyle_connector.sync_reimbursements()
reimbursements = Reimbursement.objects.filter(state='PENDING', workspace_id=workspace_id).all()
reimbursement_ids = []
if reimbursements:
for reimbursement in reimbursements:
expenses = Expense.objects.filter(settlement_id=reimbursement.settlement_id, fund_source='PERSONAL').all()
paid_expenses = expenses.filter(paid_on_qbo=True)
all_expense_paid = False
if len(expenses):
all_expense_paid = len(expenses) == len(paid_expenses)
if all_expense_paid:
reimbursement_ids.append(reimbursement.reimbursement_id)
if reimbursement_ids:
fyle_connector.post_reimbursement(reimbursement_ids)
fyle_connector.sync_reimbursements()
def schedule_reimbursements_sync(sync_qbo_to_fyle_payments, workspace_id):
if sync_qbo_to_fyle_payments:
start_datetime = datetime.now() + timedelta(hours=12)
schedule, _ = Schedule.objects.update_or_create(
func='apps.quickbooks_online.tasks.process_reimbursements',
args='{}'.format(workspace_id),
defaults={
'schedule_type': Schedule.MINUTES,
'minutes': 24 * 60,
'next_run': start_datetime
}
)
else:
schedule: Schedule = Schedule.objects.filter(
func='apps.quickbooks_online.tasks.process_reimbursements',
args='{}'.format(workspace_id)
).first()
if schedule:
schedule.delete()
def async_sync_accounts(workspace_id):
qbo_credentials: QBOCredential = QBOCredential.objects.get(workspace_id=workspace_id)
qbo_connection = QBOConnector(
credentials_object=qbo_credentials,
workspace_id=workspace_id
)
qbo_connection.sync_accounts()
|
[
"fyle_accounting_mappings.models.DestinationAttribute.objects.filter",
"apps.tasks.models.TaskLog.objects.get_or_create",
"apps.workspaces.models.FyleCredential.objects.get",
"logging.getLogger",
"fyle_qbo_api.exceptions.BulkError",
"fyle_accounting_mappings.models.Mapping.objects.filter",
"django.db.transaction.atomic",
"json.loads",
"apps.tasks.models.TaskLog.objects.get",
"apps.fyle.utils.FyleConnector",
"datetime.timedelta",
"traceback.format_exc",
"apps.tasks.models.TaskLog.objects.filter",
"datetime.datetime.now",
"apps.mappings.models.GeneralMapping.objects.get",
"apps.fyle.models.Reimbursement.objects.filter",
"fyle_accounting_mappings.models.EmployeeMapping.objects.filter",
"fyle_accounting_mappings.models.EmployeeMapping.create_or_update_employee_mapping",
"apps.workspaces.models.WorkspaceGeneralSettings.objects.get",
"django_q.tasks.Chain",
"django.db.models.Q",
"apps.workspaces.models.QBOCredential.objects.get",
"apps.mappings.models.GeneralMapping.objects.filter",
"apps.fyle.models.Expense.objects.filter",
"fyle_accounting_mappings.models.ExpenseAttribute.objects.get"
] |
[((998, 1025), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1015, 1025), False, 'import logging\n'), ((1296, 1348), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'workspace_id'}), '(workspace_id=workspace_id)\n', (1321, 1348), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((6367, 6397), 'json.loads', 'json.loads', (['exception.response'], {}), '(exception.response)\n', (6377, 6397), False, 'import json\n'), ((8364, 8399), 'apps.tasks.models.TaskLog.objects.get', 'TaskLog.objects.get', ([], {'id': 'task_log_id'}), '(id=task_log_id)\n', (8383, 8399), False, 'from apps.tasks.models import TaskLog\n'), ((8572, 8649), 'apps.workspaces.models.WorkspaceGeneralSettings.objects.get', 'WorkspaceGeneralSettings.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (8608, 8649), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((20292, 20327), 'apps.tasks.models.TaskLog.objects.get', 'TaskLog.objects.get', ([], {'id': 'task_log_id'}), '(id=task_log_id)\n', (20311, 20327), False, 'from apps.tasks.models import TaskLog\n'), ((20500, 20577), 'apps.workspaces.models.WorkspaceGeneralSettings.objects.get', 'WorkspaceGeneralSettings.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (20536, 20577), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((24243, 24278), 'apps.tasks.models.TaskLog.objects.get', 'TaskLog.objects.get', ([], {'id': 'task_log_id'}), '(id=task_log_id)\n', (24262, 24278), False, 'from apps.tasks.models import TaskLog\n'), ((24451, 24528), 'apps.workspaces.models.WorkspaceGeneralSettings.objects.get', 'WorkspaceGeneralSettings.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (24487, 24528), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((28395, 28430), 'apps.tasks.models.TaskLog.objects.get', 'TaskLog.objects.get', ([], {'id': 'task_log_id'}), '(id=task_log_id)\n', (28414, 28430), False, 'from apps.tasks.models import TaskLog\n'), ((28603, 28680), 'apps.workspaces.models.WorkspaceGeneralSettings.objects.get', 'WorkspaceGeneralSettings.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (28639, 28680), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((33016, 33051), 'apps.tasks.models.TaskLog.objects.get', 'TaskLog.objects.get', ([], {'id': 'task_log_id'}), '(id=task_log_id)\n', (33035, 33051), False, 'from apps.tasks.models import TaskLog\n'), ((33224, 33301), 'apps.workspaces.models.WorkspaceGeneralSettings.objects.get', 'WorkspaceGeneralSettings.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (33260, 33301), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((36285, 36338), 'apps.workspaces.models.FyleCredential.objects.get', 'FyleCredential.objects.get', ([], {'workspace_id': 'workspace_id'}), '(workspace_id=workspace_id)\n', (36311, 36338), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((36361, 36420), 'apps.fyle.utils.FyleConnector', 'FyleConnector', (['fyle_credentials.refresh_token', 'workspace_id'], {}), '(fyle_credentials.refresh_token, workspace_id)\n', (36374, 36420), False, 'from apps.fyle.utils import FyleConnector\n'), ((41564, 41616), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'workspace_id'}), '(workspace_id=workspace_id)\n', (41589, 41616), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((43256, 43309), 'apps.workspaces.models.FyleCredential.objects.get', 'FyleCredential.objects.get', ([], {'workspace_id': 'workspace_id'}), '(workspace_id=workspace_id)\n', (43282, 43309), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((43332, 43391), 'apps.fyle.utils.FyleConnector', 'FyleConnector', (['fyle_credentials.refresh_token', 'workspace_id'], {}), '(fyle_credentials.refresh_token, workspace_id)\n', (43345, 43391), False, 'from apps.fyle.utils import FyleConnector\n'), ((45042, 45094), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'workspace_id'}), '(workspace_id=workspace_id)\n', (45067, 45094), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((2136, 2203), 'apps.workspaces.models.FyleCredential.objects.get', 'FyleCredential.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (2162, 2203), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((2311, 2384), 'apps.fyle.utils.FyleConnector', 'FyleConnector', (['fyle_credentials.refresh_token', 'expense_group.workspace_id'], {}), '(fyle_credentials.refresh_token, expense_group.workspace_id)\n', (2324, 2384), False, 'from apps.fyle.utils import FyleConnector\n'), ((7602, 7609), 'django_q.tasks.Chain', 'Chain', ([], {}), '()\n', (7607, 7609), False, 'from django_q.tasks import Chain\n'), ((8686, 8752), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (8711, 8752), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((11262, 11329), 'apps.mappings.models.GeneralMapping.objects.get', 'GeneralMapping.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (11288, 11329), False, 'from apps.mappings.models import GeneralMapping\n'), ((18915, 18961), 'fyle_qbo_api.exceptions.BulkError', 'BulkError', (['"""Mappings are missing"""', 'bulk_errors'], {}), "('Mappings are missing', bulk_errors)\n", (18924, 18961), False, 'from fyle_qbo_api.exceptions import BulkError\n'), ((19524, 19531), 'django_q.tasks.Chain', 'Chain', ([], {}), '()\n', (19529, 19531), False, 'from django_q.tasks import Chain\n'), ((20613, 20679), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (20638, 20679), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((23461, 23468), 'django_q.tasks.Chain', 'Chain', ([], {}), '()\n', (23466, 23468), False, 'from django_q.tasks import Chain\n'), ((24564, 24630), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (24589, 24630), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((27555, 27562), 'django_q.tasks.Chain', 'Chain', ([], {}), '()\n', (27560, 27562), False, 'from django_q.tasks import Chain\n'), ((28717, 28783), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (28742, 28783), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((32218, 32225), 'django_q.tasks.Chain', 'Chain', ([], {}), '()\n', (32223, 32225), False, 'from django_q.tasks import Chain\n'), ((33338, 33404), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id)\n', (33363, 33404), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((42573, 42587), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (42585, 42587), False, 'from datetime import datetime, timedelta\n'), ((2560, 2582), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2580, 2582), False, 'import traceback\n'), ((7682, 7847), 'apps.tasks.models.TaskLog.objects.get_or_create', 'TaskLog.objects.get_or_create', ([], {'workspace_id': 'expense_group.workspace_id', 'expense_group': 'expense_group', 'defaults': "{'status': 'ENQUEUED', 'type': 'CREATING_BILL'}"}), "(workspace_id=expense_group.workspace_id,\n expense_group=expense_group, defaults={'status': 'ENQUEUED', 'type':\n 'CREATING_BILL'})\n", (7711, 7847), False, 'from apps.tasks.models import TaskLog\n'), ((9205, 9225), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (9223, 9225), False, 'from django.db import transaction\n'), ((9793, 9807), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9805, 9807), False, 'from datetime import datetime, timedelta\n'), ((10800, 10822), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (10820, 10822), False, 'import traceback\n'), ((18118, 18252), 'fyle_accounting_mappings.models.ExpenseAttribute.objects.get', 'ExpenseAttribute.objects.get', ([], {'workspace_id': 'expense_group.workspace_id', 'attribute_type': '"""TAX_GROUP"""', 'source_id': 'lineitem.tax_group_id'}), "(workspace_id=expense_group.workspace_id,\n attribute_type='TAX_GROUP', source_id=lineitem.tax_group_id)\n", (18146, 18252), False, 'from fyle_accounting_mappings.models import Mapping, ExpenseAttribute, DestinationAttribute, EmployeeMapping\n'), ((19604, 19770), 'apps.tasks.models.TaskLog.objects.get_or_create', 'TaskLog.objects.get_or_create', ([], {'workspace_id': 'expense_group.workspace_id', 'expense_group': 'expense_group', 'defaults': "{'status': 'ENQUEUED', 'type': 'CREATING_CHECK'}"}), "(workspace_id=expense_group.workspace_id,\n expense_group=expense_group, defaults={'status': 'ENQUEUED', 'type':\n 'CREATING_CHECK'})\n", (19633, 19770), False, 'from apps.tasks.models import TaskLog\n'), ((20993, 21013), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (21011, 21013), False, 'from django.db import transaction\n'), ((21607, 21621), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (21619, 21621), False, 'from datetime import datetime, timedelta\n'), ((22626, 22648), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (22646, 22648), False, 'import traceback\n'), ((23541, 23709), 'apps.tasks.models.TaskLog.objects.get_or_create', 'TaskLog.objects.get_or_create', ([], {'workspace_id': 'expense_group.workspace_id', 'expense_group': 'expense_group', 'defaults': "{'status': 'ENQUEUED', 'type': 'CREATING_EXPENSE'}"}), "(workspace_id=expense_group.workspace_id,\n expense_group=expense_group, defaults={'status': 'ENQUEUED', 'type':\n 'CREATING_EXPENSE'})\n", (23570, 23709), False, 'from apps.tasks.models import TaskLog\n'), ((24944, 24964), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (24962, 24964), False, 'from django.db import transaction\n'), ((25651, 25665), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (25663, 25665), False, 'from datetime import datetime, timedelta\n'), ((26682, 26704), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (26702, 26704), False, 'import traceback\n'), ((27635, 27816), 'apps.tasks.models.TaskLog.objects.get_or_create', 'TaskLog.objects.get_or_create', ([], {'workspace_id': 'expense_group.workspace_id', 'expense_group': 'expense_group', 'defaults': "{'status': 'ENQUEUED', 'type': 'CREATING_CREDIT_CARD_PURCHASE'}"}), "(workspace_id=expense_group.workspace_id,\n expense_group=expense_group, defaults={'status': 'ENQUEUED', 'type':\n 'CREATING_CREDIT_CARD_PURCHASE'})\n", (27664, 27816), False, 'from apps.tasks.models import TaskLog\n'), ((29405, 29425), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (29423, 29425), False, 'from django.db import transaction\n'), ((30315, 30329), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (30327, 30329), False, 'from datetime import datetime, timedelta\n'), ((31377, 31399), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (31397, 31399), False, 'import traceback\n'), ((32298, 32472), 'apps.tasks.models.TaskLog.objects.get_or_create', 'TaskLog.objects.get_or_create', ([], {'workspace_id': 'expense_group.workspace_id', 'expense_group': 'expense_group', 'defaults': "{'status': 'ENQUEUED', 'type': 'CREATING_JOURNAL_ENTRY'}"}), "(workspace_id=expense_group.workspace_id,\n expense_group=expense_group, defaults={'status': 'ENQUEUED', 'type':\n 'CREATING_JOURNAL_ENTRY'})\n", (32327, 32472), False, 'from apps.tasks.models import TaskLog\n'), ((33795, 33815), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (33813, 33815), False, 'from django.db import transaction\n'), ((34585, 34599), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (34597, 34599), False, 'from datetime import datetime, timedelta\n'), ((35636, 35658), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (35656, 35658), False, 'import traceback\n'), ((40135, 40191), 'apps.mappings.models.GeneralMapping.objects.filter', 'GeneralMapping.objects.filter', ([], {'workspace_id': 'workspace_id'}), '(workspace_id=workspace_id)\n', (40164, 40191), False, 'from apps.mappings.models import GeneralMapping\n'), ((40337, 40351), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (40349, 40351), False, 'from datetime import datetime, timedelta\n'), ((41183, 41245), 'apps.tasks.models.TaskLog.objects.filter', 'TaskLog.objects.filter', ([], {'expense_group_id__in': 'expense_group_ids'}), '(expense_group_id__in=expense_group_ids)\n', (41205, 41245), False, 'from apps.tasks.models import TaskLog\n'), ((43456, 43528), 'apps.fyle.models.Reimbursement.objects.filter', 'Reimbursement.objects.filter', ([], {'state': '"""PENDING"""', 'workspace_id': 'workspace_id'}), "(state='PENDING', workspace_id=workspace_id)\n", (43484, 43528), False, 'from apps.fyle.models import ExpenseGroup, Reimbursement, Expense\n'), ((44328, 44342), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (44340, 44342), False, 'from datetime import datetime, timedelta\n'), ((44345, 44364), 'datetime.timedelta', 'timedelta', ([], {'hours': '(12)'}), '(hours=12)\n', (44354, 44364), False, 'from datetime import datetime, timedelta\n'), ((4849, 5030), 'fyle_accounting_mappings.models.EmployeeMapping.create_or_update_employee_mapping', 'EmployeeMapping.create_or_update_employee_mapping', ([], {'source_employee_id': 'source_employee.id', 'destination_vendor_id': 'entity.id', 'workspace': 'expense_group.workspace'}), '(source_employee_id=\n source_employee.id, destination_vendor_id=entity.id, workspace=\n expense_group.workspace, **destination)\n', (4898, 5030), False, 'from fyle_accounting_mappings.models import Mapping, ExpenseAttribute, DestinationAttribute, EmployeeMapping\n'), ((17556, 17671), 'fyle_accounting_mappings.models.Mapping.objects.filter', 'Mapping.objects.filter', ([], {'source_type': '"""CATEGORY"""', 'source__value': 'category', 'workspace_id': 'expense_group.workspace_id'}), "(source_type='CATEGORY', source__value=category,\n workspace_id=expense_group.workspace_id)\n", (17578, 17671), False, 'from fyle_accounting_mappings.models import Mapping, ExpenseAttribute, DestinationAttribute, EmployeeMapping\n'), ((36032, 36097), 'apps.fyle.models.Reimbursement.objects.filter', 'Reimbursement.objects.filter', ([], {'settlement_id': 'expense.settlement_id'}), '(settlement_id=expense.settlement_id)\n', (36060, 36097), False, 'from apps.fyle.models import ExpenseGroup, Reimbursement, Expense\n'), ((18335, 18459), 'fyle_accounting_mappings.models.Mapping.objects.filter', 'Mapping.objects.filter', ([], {'source_type': '"""TAX_GROUP"""', 'source__value': 'tax_group.value', 'workspace_id': 'expense_group.workspace_id'}), "(source_type='TAX_GROUP', source__value=tax_group.\n value, workspace_id=expense_group.workspace_id)\n", (18357, 18459), False, 'from fyle_accounting_mappings.models import Mapping, ExpenseAttribute, DestinationAttribute, EmployeeMapping\n'), ((37275, 37327), 'apps.workspaces.models.QBOCredential.objects.get', 'QBOCredential.objects.get', ([], {'workspace_id': 'workspace_id'}), '(workspace_id=workspace_id)\n', (37300, 37327), False, 'from apps.workspaces.models import QBOCredential, FyleCredential, WorkspaceGeneralSettings\n'), ((43655, 43748), 'apps.fyle.models.Expense.objects.filter', 'Expense.objects.filter', ([], {'settlement_id': 'reimbursement.settlement_id', 'fund_source': '"""PERSONAL"""'}), "(settlement_id=reimbursement.settlement_id,\n fund_source='PERSONAL')\n", (43677, 43748), False, 'from apps.fyle.models import ExpenseGroup, Reimbursement, Expense\n'), ((3963, 4054), 'fyle_accounting_mappings.models.DestinationAttribute.objects.filter', 'DestinationAttribute.objects.filter', ([], {'workspace_id': 'expense_group.workspace_id'}), '(workspace_id=expense_group.workspace_id,\n **filters)\n', (3998, 4054), False, 'from fyle_accounting_mappings.models import Mapping, ExpenseAttribute, DestinationAttribute, EmployeeMapping\n'), ((4430, 4493), 'fyle_accounting_mappings.models.EmployeeMapping.objects.filter', 'EmployeeMapping.objects.filter', ([], {'source_employee': 'source_employee'}), '(source_employee=source_employee)\n', (4460, 4493), False, 'from fyle_accounting_mappings.models import Mapping, ExpenseAttribute, DestinationAttribute, EmployeeMapping\n'), ((7373, 7400), 'django.db.models.Q', 'Q', ([], {'tasklog__id__isnull': '(True)'}), '(tasklog__id__isnull=True)\n', (7374, 7400), False, 'from django.db.models import Q\n'), ((19293, 19320), 'django.db.models.Q', 'Q', ([], {'tasklog__id__isnull': '(True)'}), '(tasklog__id__isnull=True)\n', (19294, 19320), False, 'from django.db.models import Q\n'), ((23226, 23253), 'django.db.models.Q', 'Q', ([], {'tasklog__id__isnull': '(True)'}), '(tasklog__id__isnull=True)\n', (23227, 23253), False, 'from django.db.models import Q\n'), ((27300, 27327), 'django.db.models.Q', 'Q', ([], {'tasklog__id__isnull': '(True)'}), '(tasklog__id__isnull=True)\n', (27301, 27327), False, 'from django.db.models import Q\n'), ((31981, 32008), 'django.db.models.Q', 'Q', ([], {'tasklog__id__isnull': '(True)'}), '(tasklog__id__isnull=True)\n', (31982, 32008), False, 'from django.db.models import Q\n'), ((37435, 37455), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (37453, 37455), False, 'from django.db import transaction\n'), ((37603, 37656), 'apps.tasks.models.TaskLog.objects.get', 'TaskLog.objects.get', ([], {'expense_group': 'bill.expense_group'}), '(expense_group=bill.expense_group)\n', (37622, 37656), False, 'from apps.tasks.models import TaskLog\n'), ((39660, 39682), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (39680, 39682), False, 'import traceback\n'), ((7404, 7454), 'django.db.models.Q', 'Q', ([], {'tasklog__status__in': "['IN_PROGRESS', 'COMPLETE']"}), "(tasklog__status__in=['IN_PROGRESS', 'COMPLETE'])\n", (7405, 7454), False, 'from django.db.models import Q\n'), ((19324, 19374), 'django.db.models.Q', 'Q', ([], {'tasklog__status__in': "['IN_PROGRESS', 'COMPLETE']"}), "(tasklog__status__in=['IN_PROGRESS', 'COMPLETE'])\n", (19325, 19374), False, 'from django.db.models import Q\n'), ((23257, 23307), 'django.db.models.Q', 'Q', ([], {'tasklog__status__in': "['IN_PROGRESS', 'COMPLETE']"}), "(tasklog__status__in=['IN_PROGRESS', 'COMPLETE'])\n", (23258, 23307), False, 'from django.db.models import Q\n'), ((27331, 27381), 'django.db.models.Q', 'Q', ([], {'tasklog__status__in': "['IN_PROGRESS', 'COMPLETE']"}), "(tasklog__status__in=['IN_PROGRESS', 'COMPLETE'])\n", (27332, 27381), False, 'from django.db.models import Q\n'), ((32012, 32062), 'django.db.models.Q', 'Q', ([], {'tasklog__status__in': "['IN_PROGRESS', 'COMPLETE']"}), "(tasklog__status__in=['IN_PROGRESS', 'COMPLETE'])\n", (32013, 32062), False, 'from django.db.models import Q\n'), ((5429, 5461), 'json.loads', 'json.loads', (['bad_request.response'], {}), '(bad_request.response)\n', (5439, 5461), False, 'import json\n')]
|
import cv2 as cv
img = cv.imread("me1.jpg")
cv.imshow('me', img)
blured = cv.GaussianBlur(img, (3,3), cv.BORDER_DEFAULT)
cv.imshow('blured', blured)
canny = cv.Canny(img, 60,70)
cv.imshow('canny edges', canny)
cv.waitKey(0)
|
[
"cv2.GaussianBlur",
"cv2.Canny",
"cv2.waitKey",
"cv2.imread",
"cv2.imshow"
] |
[((24, 44), 'cv2.imread', 'cv.imread', (['"""me1.jpg"""'], {}), "('me1.jpg')\n", (33, 44), True, 'import cv2 as cv\n'), ((45, 65), 'cv2.imshow', 'cv.imshow', (['"""me"""', 'img'], {}), "('me', img)\n", (54, 65), True, 'import cv2 as cv\n'), ((77, 124), 'cv2.GaussianBlur', 'cv.GaussianBlur', (['img', '(3, 3)', 'cv.BORDER_DEFAULT'], {}), '(img, (3, 3), cv.BORDER_DEFAULT)\n', (92, 124), True, 'import cv2 as cv\n'), ((124, 151), 'cv2.imshow', 'cv.imshow', (['"""blured"""', 'blured'], {}), "('blured', blured)\n", (133, 151), True, 'import cv2 as cv\n'), ((161, 182), 'cv2.Canny', 'cv.Canny', (['img', '(60)', '(70)'], {}), '(img, 60, 70)\n', (169, 182), True, 'import cv2 as cv\n'), ((182, 213), 'cv2.imshow', 'cv.imshow', (['"""canny edges"""', 'canny'], {}), "('canny edges', canny)\n", (191, 213), True, 'import cv2 as cv\n'), ((217, 230), 'cv2.waitKey', 'cv.waitKey', (['(0)'], {}), '(0)\n', (227, 230), True, 'import cv2 as cv\n')]
|
# Generated by Django 3.1.3 on 2020-11-29 13:25
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='file',
field=models.FileField(blank=True, upload_to='attachments'),
),
migrations.AlterField(
model_name='session',
name='attendance',
field=models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL),
),
]
|
[
"django.db.models.FileField",
"django.db.models.ManyToManyField"
] |
[((357, 410), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'upload_to': '"""attachments"""'}), "(blank=True, upload_to='attachments')\n", (373, 410), False, 'from django.db import migrations, models\n'), ((537, 600), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, to=settings.AUTH_USER_MODEL)\n', (559, 600), False, 'from django.db import migrations, models\n')]
|
import os
import sys
import stat
import h5py
import time
import shutil
import subprocess
import numpy as np
import scipy.io as sio
from data_analysis import find_caffe
# import caffe
import data_analysis.get_feature_from_model as feature
caffe_root = find_caffe.caffe_root
def mkdir_if_not_exist(the_dir):
if not os.path.isdir(the_dir) :
os.makedirs(the_dir)
def get_indian_pines_features_from_indian_pines_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('../hyperspectral_datas/indian_pines/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['labels']
data.result_dir = '../result/indian_pines/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/ip_feature_ip_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/indian_pines_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
def get_salina_features_from_salina_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('~/hyperspectral_datas/salina/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['labels']
data.result_dir = '../result/salina/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/salina_feature_salina_5x5_mean_std_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/salina_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
def get_indian_pines_features_from_salina_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('../hyperspectral_datas/indian_pines/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['labels']
data.result_dir = '../result/salina/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/ip_feature_salina_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/salina_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
def get_salina_features_from_indian_pines_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('../hyperspectral_datas/salina/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['labels']
data.result_dir = '../result/indian_pines/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/salina_feature_ip_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/indian_pines_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
if __name__ == '__main__':
start = time.time()
get_indian_pines_features_from_indian_pines_model()
get_salina_features_from_salina_model()
get_indian_pines_features_from_salina_model()
get_salina_features_from_indian_pines_model()
end = time.time()
print(end - start)
|
[
"os.makedirs",
"scipy.io.loadmat",
"os.path.isdir",
"scipy.io.savemat",
"time.time",
"data_analysis.get_feature_from_model.GetFeatureFromCaffe",
"os.path.expanduser"
] |
[((5687, 5698), 'time.time', 'time.time', ([], {}), '()\n', (5696, 5698), False, 'import time\n'), ((5909, 5920), 'time.time', 'time.time', ([], {}), '()\n', (5918, 5920), False, 'import time\n'), ((320, 342), 'os.path.isdir', 'os.path.isdir', (['the_dir'], {}), '(the_dir)\n', (333, 342), False, 'import os\n'), ((353, 373), 'os.makedirs', 'os.makedirs', (['the_dir'], {}), '(the_dir)\n', (364, 373), False, 'import os\n'), ((507, 570), 'os.path.expanduser', 'os.path.expanduser', (['"""../hyperspectral_datas/indian_pines/data/"""'], {}), "('../hyperspectral_datas/indian_pines/data/')\n", (525, 570), False, 'import os\n'), ((1369, 1461), 'data_analysis.get_feature_from_model.GetFeatureFromCaffe', 'feature.GetFeatureFromCaffe', ([], {'deploy_file': 'deploy_file', 'pretrained_model': 'pretrained_model'}), '(deploy_file=deploy_file, pretrained_model=\n pretrained_model)\n', (1396, 1461), True, 'import data_analysis.get_feature_from_model as feature\n'), ((1658, 1705), 'scipy.io.savemat', 'sio.savemat', (['data.result_file', 'data.result_dict'], {}), '(data.result_file, data.result_dict)\n', (1669, 1705), True, 'import scipy.io as sio\n'), ((1827, 1883), 'os.path.expanduser', 'os.path.expanduser', (['"""~/hyperspectral_datas/salina/data/"""'], {}), "('~/hyperspectral_datas/salina/data/')\n", (1845, 1883), False, 'import os\n'), ((2679, 2771), 'data_analysis.get_feature_from_model.GetFeatureFromCaffe', 'feature.GetFeatureFromCaffe', ([], {'deploy_file': 'deploy_file', 'pretrained_model': 'pretrained_model'}), '(deploy_file=deploy_file, pretrained_model=\n pretrained_model)\n', (2706, 2771), True, 'import data_analysis.get_feature_from_model as feature\n'), ((2968, 3015), 'scipy.io.savemat', 'sio.savemat', (['data.result_file', 'data.result_dict'], {}), '(data.result_file, data.result_dict)\n', (2979, 3015), True, 'import scipy.io as sio\n'), ((3143, 3206), 'os.path.expanduser', 'os.path.expanduser', (['"""../hyperspectral_datas/indian_pines/data/"""'], {}), "('../hyperspectral_datas/indian_pines/data/')\n", (3161, 3206), False, 'import os\n'), ((3997, 4089), 'data_analysis.get_feature_from_model.GetFeatureFromCaffe', 'feature.GetFeatureFromCaffe', ([], {'deploy_file': 'deploy_file', 'pretrained_model': 'pretrained_model'}), '(deploy_file=deploy_file, pretrained_model=\n pretrained_model)\n', (4024, 4089), True, 'import data_analysis.get_feature_from_model as feature\n'), ((4286, 4333), 'scipy.io.savemat', 'sio.savemat', (['data.result_file', 'data.result_dict'], {}), '(data.result_file, data.result_dict)\n', (4297, 4333), True, 'import scipy.io as sio\n'), ((4461, 4518), 'os.path.expanduser', 'os.path.expanduser', (['"""../hyperspectral_datas/salina/data/"""'], {}), "('../hyperspectral_datas/salina/data/')\n", (4479, 4518), False, 'import os\n'), ((5309, 5401), 'data_analysis.get_feature_from_model.GetFeatureFromCaffe', 'feature.GetFeatureFromCaffe', ([], {'deploy_file': 'deploy_file', 'pretrained_model': 'pretrained_model'}), '(deploy_file=deploy_file, pretrained_model=\n pretrained_model)\n', (5336, 5401), True, 'import data_analysis.get_feature_from_model as feature\n'), ((5598, 5645), 'scipy.io.savemat', 'sio.savemat', (['data.result_file', 'data.result_dict'], {}), '(data.result_file, data.result_dict)\n', (5609, 5645), True, 'import scipy.io as sio\n'), ((604, 665), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/indian_pines_5x5_mean_std.mat')"], {}), "(data.data_dir + '/indian_pines_5x5_mean_std.mat')\n", (615, 665), True, 'import scipy.io as sio\n'), ((709, 770), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/indian_pines_5x5_mean_std.mat')"], {}), "(data.data_dir + '/indian_pines_5x5_mean_std.mat')\n", (720, 770), True, 'import scipy.io as sio\n'), ((1917, 1972), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/salina_5x5_mean_std.mat')"], {}), "(data.data_dir + '/salina_5x5_mean_std.mat')\n", (1928, 1972), True, 'import scipy.io as sio\n'), ((2016, 2071), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/salina_5x5_mean_std.mat')"], {}), "(data.data_dir + '/salina_5x5_mean_std.mat')\n", (2027, 2071), True, 'import scipy.io as sio\n'), ((3240, 3301), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/indian_pines_5x5_mean_std.mat')"], {}), "(data.data_dir + '/indian_pines_5x5_mean_std.mat')\n", (3251, 3301), True, 'import scipy.io as sio\n'), ((3345, 3406), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/indian_pines_5x5_mean_std.mat')"], {}), "(data.data_dir + '/indian_pines_5x5_mean_std.mat')\n", (3356, 3406), True, 'import scipy.io as sio\n'), ((4552, 4607), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/salina_5x5_mean_std.mat')"], {}), "(data.data_dir + '/salina_5x5_mean_std.mat')\n", (4563, 4607), True, 'import scipy.io as sio\n'), ((4651, 4706), 'scipy.io.loadmat', 'sio.loadmat', (["(data.data_dir + '/salina_5x5_mean_std.mat')"], {}), "(data.data_dir + '/salina_5x5_mean_std.mat')\n", (4662, 4706), True, 'import scipy.io as sio\n')]
|
import hashlib
import itertools
class AdventCoinMiner():
def solve(self, prefix, check="00000"):
for i in itertools.count(1):
hash = hashlib.md5("{}{}".format(prefix, i).encode('utf-8')).hexdigest()
if check == hash[:len(check)]:
return i
|
[
"itertools.count"
] |
[((119, 137), 'itertools.count', 'itertools.count', (['(1)'], {}), '(1)\n', (134, 137), False, 'import itertools\n')]
|
# Created by <NAME>
# import module
import msread
# init path
path = r"sample.raw"
import msread
# open file
with msread.open(path) as reader:
# show summary
reader.summary(show=True)
# read headers only
for header in reader.headers(min_rt=5*60, max_rt=10*60, ms_level=1):
print(header)
# read scans
for scan in reader.scans(min_rt=5*60, max_rt=10*60, ms_level=1):
print(scan.header)
print(scan.centroids)
|
[
"msread.open"
] |
[((119, 136), 'msread.open', 'msread.open', (['path'], {}), '(path)\n', (130, 136), False, 'import msread\n')]
|
"""
Mongo logger package
"""
from setuptools import setup, find_packages
import monolog
DESCRIPTION = 'MongoDB logger + std_logger'
AUTHOR = '<NAME>'
AUTHOR_EMAIL = "<EMAIL>"
URL = "https://github.com/Ckoetael/monolog"
VERSION = monolog.__version__
setup(
name="monolog",
version=VERSION,
description=DESCRIPTION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="BSD",
url=URL,
packages=find_packages(),
install_requires=['pymongo >= 3.10'],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
zip_safe=False,
)
|
[
"setuptools.find_packages"
] |
[((425, 440), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (438, 440), False, 'from setuptools import setup, find_packages\n')]
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013, 2014, 2015 <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
Neither the name of the involved organizations nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
"""
from os.path import expanduser, isfile
from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery
class DatabaseLogLite(object):
""" Low load only; using SQLite
To store bookmarks, configuration, etc.
AB01 CFG02
"""
def __init__(self):
# ###### STARTUP
super(DatabaseLogLite, self).__init__()
self.litedb = QSqlDatabase("QSQLITE")
db_file = expanduser("~/.eilat/eilat.db")
rebuild = not isfile(db_file)
self.litedb.setDatabaseName(db_file)
self.litedb.open()
if rebuild:
query_mknav = (
"CREATE TABLE navigation (host TEXT NOT NULL," +
" path TEXT, count INTEGER default 0, prefix char(2)," +
" PRIMARY KEY (host, path))")
self.litedb.exec_(query_mknav)
# ###### VALIDATION
# verifies database structure, not datatypes
tables = self.litedb.tables()
tables_ok = [k in tables for k in ['navigation']]
if not all(tables_ok):
raise RuntimeError("tables missing from database")
fnav_ok = [self.litedb.record('navigation').contains(k)
for k in ['host', 'path', 'count', 'prefix']]
if not all(fnav_ok):
raise RuntimeError("bad structure for 'navigation' table")
def model(self, prefix=None):
""" recreate the model each call; opening a new window will not
be needed to use the recent completions
"""
if prefix is None:
query_nav = QSqlQuery(
"select host || path from navigation " +
"order by count desc",
self.litedb)
else: # CFG02
query_nav = QSqlQuery(
"select host || path from navigation " +
"where prefix = '{}' ".format(prefix) +
"order by count desc",
self.litedb)
ret_model = QSqlQueryModel()
ret_model.setQuery(query_nav) # AB01
return ret_model
def store_navigation(self, host, path, prefix):
""" save host, path and increase its count AB01 """
host = host.replace("'", "%27")
path = path.replace("'", "%27")
insert_or_ignore = (
"insert or ignore into navigation (host, path, prefix) " +
"values ('{}', '{}', '{}')".format(host, path, prefix))
update = (
"update navigation set count = count + 1 where " +
"host = '{}' and path = '{}'".format(host, path))
self.litedb.exec_(insert_or_ignore)
self.litedb.exec_(update)
|
[
"PyQt5.QtSql.QSqlQuery",
"PyQt5.QtSql.QSqlDatabase",
"os.path.isfile",
"PyQt5.QtSql.QSqlQueryModel",
"os.path.expanduser"
] |
[((1953, 1976), 'PyQt5.QtSql.QSqlDatabase', 'QSqlDatabase', (['"""QSQLITE"""'], {}), "('QSQLITE')\n", (1965, 1976), False, 'from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery\n'), ((1996, 2027), 'os.path.expanduser', 'expanduser', (['"""~/.eilat/eilat.db"""'], {}), "('~/.eilat/eilat.db')\n", (2006, 2027), False, 'from os.path import expanduser, isfile\n'), ((3536, 3552), 'PyQt5.QtSql.QSqlQueryModel', 'QSqlQueryModel', ([], {}), '()\n', (3550, 3552), False, 'from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery\n'), ((2050, 2065), 'os.path.isfile', 'isfile', (['db_file'], {}), '(db_file)\n', (2056, 2065), False, 'from os.path import expanduser, isfile\n'), ((3140, 3230), 'PyQt5.QtSql.QSqlQuery', 'QSqlQuery', (["('select host || path from navigation ' + 'order by count desc')", 'self.litedb'], {}), "('select host || path from navigation ' + 'order by count desc',\n self.litedb)\n", (3149, 3230), False, 'from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery\n')]
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #내 맥북에서 발생되는 에러를 없애기 위한 코드
import tensorflow as tf
#trainable variable이다. 학습과정에서 변경될 수 있는 값이다.
# x_train = [1, 2, 3]
# y_train = [1, 2, 3]
#placeholder를 사용해서 출력단에서 값 입력받기
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)
#W,b를 모르기 때문에 랜덤한 값을 만든다.
W = tf.Variable(tf.random_normal([1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
#Our hypothesis XW+b
# hypothesis = x_train * W + b
hypothesis = X * W + b
#cost/loss function
#cost = tf.reduce_mean(tf.square(hypothesis - y_train))
cost = tf.reduce_mean(tf.square(hypothesis - Y))
#reduce_mean은 tensor가 주어지면 그것의 평균을 내주는 것임
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(cost)
sess = tf.Session()
sess.run(tf.global_variables_initializer()) #variable을 실행하기전에는 무조건 이 함수를 통해 초기화시켜줘야함
for step in range(4001):
# sess.run(train)
cost_val, W_val, b_val, _ = sess.run([cost, W, b, train], feed_dict={X:[1, 2, 3, 4, 5], Y:[2.1, 3.1, 4.1, 5.1, 6.1]})
if step % 20 == 0:
#print(step, sess.run(cost), sess.run(W), sess.run(b))
print(step, cost_val, W_val, b_val)
|
[
"tensorflow.global_variables_initializer",
"tensorflow.Session",
"tensorflow.placeholder",
"tensorflow.random_normal",
"tensorflow.square",
"tensorflow.train.GradientDescentOptimizer"
] |
[((228, 254), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (242, 254), True, 'import tensorflow as tf\n'), ((259, 285), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (273, 285), True, 'import tensorflow as tf\n'), ((676, 729), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (709, 729), True, 'import tensorflow as tf\n'), ((771, 783), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (781, 783), True, 'import tensorflow as tf\n'), ((329, 350), 'tensorflow.random_normal', 'tf.random_normal', (['[1]'], {}), '([1])\n', (345, 350), True, 'import tensorflow as tf\n'), ((383, 404), 'tensorflow.random_normal', 'tf.random_normal', (['[1]'], {}), '([1])\n', (399, 404), True, 'import tensorflow as tf\n'), ((594, 619), 'tensorflow.square', 'tf.square', (['(hypothesis - Y)'], {}), '(hypothesis - Y)\n', (603, 619), True, 'import tensorflow as tf\n'), ((793, 826), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (824, 826), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python3
import sys
import os
import tempfile
words = []
found_code = False
for line in sys.stdin.readlines():
if line.startswith("Machine code:"):
found_code = True
continue
if found_code:
words.append(int("0x" + line.strip(), 16))
fd, filename = tempfile.mkstemp(suffix = ".bin")
f = os.fdopen(fd, "wb")
for word in words:
f.write(bytes([word & 0xff, (word >> 8) & 0xff, (word >> 16) & 0xff, (word >> 24) & 0xff]))
f.close()
os.system("aarch64-linux-gnu-objdump -b binary -m aarch64 -EL -D %s" % filename)
os.unlink(filename)
|
[
"os.unlink",
"tempfile.mkstemp",
"os.system",
"sys.stdin.readlines",
"os.fdopen"
] |
[((105, 126), 'sys.stdin.readlines', 'sys.stdin.readlines', ([], {}), '()\n', (124, 126), False, 'import sys\n'), ((298, 329), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".bin"""'}), "(suffix='.bin')\n", (314, 329), False, 'import tempfile\n'), ((336, 355), 'os.fdopen', 'os.fdopen', (['fd', '"""wb"""'], {}), "(fd, 'wb')\n", (345, 355), False, 'import os\n'), ((482, 567), 'os.system', 'os.system', (["('aarch64-linux-gnu-objdump -b binary -m aarch64 -EL -D %s' % filename)"], {}), "('aarch64-linux-gnu-objdump -b binary -m aarch64 -EL -D %s' % filename\n )\n", (491, 567), False, 'import os\n'), ((563, 582), 'os.unlink', 'os.unlink', (['filename'], {}), '(filename)\n', (572, 582), False, 'import os\n')]
|
import openbrokerapi
import pytest
from broker.validators import CNAME
def test_one_layer_of_cnames(dns):
dns.add_cname("_acme-challenge.foo.example.gov")
# we're just making sure we don't raise an exception here
CNAME(["foo.example.gov"]).validate()
def test_two_layers_of_cnames(dns):
dns.add_cname(
"_acme-challenge.foo.example.gov", target="_acme-challenge.bar.example.gov"
)
dns.add_cname(
"_acme-challenge.bar.example.gov",
target="_acme-challenge.foo.example.gov.domains.cloud.test",
)
# we're just making sure we don't raise an exception here
CNAME(["foo.example.gov"]).validate()
def test_three_layers_of_cnames(dns):
dns.add_cname(
"_acme-challenge.foo.example.gov", target="_acme-challenge.bar.example.gov"
)
dns.add_cname(
"_acme-challenge.bar.example.gov", target="_acme-challenge.baz.example.gov"
)
dns.add_cname(
"_acme-challenge.baz.example.gov",
target="_acme-challenge.foo.example.gov.domains.cloud.test",
)
# we're just making sure we don't raise an exception here
CNAME(["foo.example.gov"]).validate()
def test_detects_looping_cnames(dns):
dns.add_cname(
"_acme-challenge.foo.example.gov", target="_acme-challenge.bar.example.gov"
)
dns.add_cname(
"_acme-challenge.bar.example.gov", target="_acme-challenge.foo.example.gov"
)
# we're just making sure we don't raise an exception here
with pytest.raises(
openbrokerapi.errors.ErrBadRequest,
match=r"_acme-challenge.foo.example.gov points to itself. Resolution chain: \['_acme-challenge.foo.example.gov', '_acme-challenge.bar.example.gov'\]",
):
CNAME(["foo.example.gov"]).validate()
|
[
"broker.validators.CNAME",
"pytest.raises"
] |
[((1482, 1693), 'pytest.raises', 'pytest.raises', (['openbrokerapi.errors.ErrBadRequest'], {'match': '"""_acme-challenge.foo.example.gov points to itself. Resolution chain: \\\\[\'_acme-challenge.foo.example.gov\', \'_acme-challenge.bar.example.gov\'\\\\]"""'}), '(openbrokerapi.errors.ErrBadRequest, match=\n "_acme-challenge.foo.example.gov points to itself. Resolution chain: \\\\[\'_acme-challenge.foo.example.gov\', \'_acme-challenge.bar.example.gov\'\\\\]"\n )\n', (1495, 1693), False, 'import pytest\n'), ((228, 254), 'broker.validators.CNAME', 'CNAME', (["['foo.example.gov']"], {}), "(['foo.example.gov'])\n", (233, 254), False, 'from broker.validators import CNAME\n'), ((616, 642), 'broker.validators.CNAME', 'CNAME', (["['foo.example.gov']"], {}), "(['foo.example.gov'])\n", (621, 642), False, 'from broker.validators import CNAME\n'), ((1115, 1141), 'broker.validators.CNAME', 'CNAME', (["['foo.example.gov']"], {}), "(['foo.example.gov'])\n", (1120, 1141), False, 'from broker.validators import CNAME\n'), ((1715, 1741), 'broker.validators.CNAME', 'CNAME', (["['foo.example.gov']"], {}), "(['foo.example.gov'])\n", (1720, 1741), False, 'from broker.validators import CNAME\n')]
|
import sys, os
sys.path.append(os.pardir)
import numpy as np
from dataset.mnist import load_mnist
from PIL import Image
def img_show(img):
pil_img = Image.fromarray(np.uint8(img))
pil_img.show()
(x_train, t_train), (x_test, t_test) = load_mnist(flatten=True, normalize = False)
img = x_train[0]
label = t_train[0]
print(label)
print(img.shape)
img = img.reshape(28, 28)
print(img.shape)
img_show(img)
|
[
"sys.path.append",
"dataset.mnist.load_mnist",
"numpy.uint8"
] |
[((16, 42), 'sys.path.append', 'sys.path.append', (['os.pardir'], {}), '(os.pardir)\n', (31, 42), False, 'import sys, os\n'), ((254, 295), 'dataset.mnist.load_mnist', 'load_mnist', ([], {'flatten': '(True)', 'normalize': '(False)'}), '(flatten=True, normalize=False)\n', (264, 295), False, 'from dataset.mnist import load_mnist\n'), ((177, 190), 'numpy.uint8', 'np.uint8', (['img'], {}), '(img)\n', (185, 190), True, 'import numpy as np\n')]
|
#!/usr/bin/python3
"""Alta3 Research | <EMAIL>
Exploring using pandas to create dataframes, and output graphs"""
import pandas as pd
def main():
# define the name of our xls file
excel_file = 'files/movies.xls'
# create a DataFrame (DF) object. EASY!
# because we did not specify a sheet
# only the first sheet was read into the DF
movies = pd.read_excel(excel_file)
# show the first five rows of our DF
# DF has 5 rows and 25 columns (indexed by integer)
print(movies.head())
# Choose the first column "Title" as
# index (index=0)
movies_sheet1 = pd.read_excel(excel_file, sheet_name=0, index_col=0)
# DF has 5 rows and 24 columns (indexed by title)
# print the top 10 movies in the dataframe
print(movies_sheet1.head())
# export 5 movies from the top dataframe to excel
movies_sheet1.head(5).to_excel("5movies.xlsx")
# export 5 movies from the top of the dataframe to json
movies_sheet1.head(5).to_json("5movies.json")
# export 5 movies from the top of the dataframe to csv
movies_sheet1.head(5).to_csv("5movies.csv")
if __name__ == "__main__":
main()
|
[
"pandas.read_excel"
] |
[((371, 396), 'pandas.read_excel', 'pd.read_excel', (['excel_file'], {}), '(excel_file)\n', (384, 396), True, 'import pandas as pd\n'), ((604, 656), 'pandas.read_excel', 'pd.read_excel', (['excel_file'], {'sheet_name': '(0)', 'index_col': '(0)'}), '(excel_file, sheet_name=0, index_col=0)\n', (617, 656), True, 'import pandas as pd\n')]
|
from django.contrib import admin
from .models import TokenAuthorization
class TokenAuthorizationAdmin(admin.ModelAdmin):
"""Model admin for the TokenAuthorization model."""
list_display = ('id', 'reason', 'user', 'token', 'email_address',
'created_user', 'expires')
readonly_fields = list_display + ('email_sent', 'text')
fields = readonly_fields
admin.site.register(TokenAuthorization, TokenAuthorizationAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((388, 452), 'django.contrib.admin.site.register', 'admin.site.register', (['TokenAuthorization', 'TokenAuthorizationAdmin'], {}), '(TokenAuthorization, TokenAuthorizationAdmin)\n', (407, 452), False, 'from django.contrib import admin\n')]
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import pytest
import re
from math import ceil
from pprint import pprint
from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import CatalogInputParameters as cp # pylint: disable=import-error
from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog # pylint: disable=import-error
__metaclass__ = type
BYTES_PER_TRK = 56664
BYTES_PER_CYL = BYTES_PER_TRK * 15
BYTES_PER_KB = 1024
BYTES_PER_MB = 1048576
# Scenario 2: Load mode, managed_acbs - setup=True
def test_catalog_load_managed_acbs(ansible_zos_module):
hosts = ansible_zos_module
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.LOADMODE,
validation_msg="DFS4533I",
control_statements={'managed_acbs': {"setup": True}})
purge_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.PURGEMODE,
validation_msg="",
delete=cp.DELETES,
managed_acbs=True)
# Scenario 3: Update mode, managed_acbs - stage options(save_acb=UNCOND and clean_staging_dataset=True)
# and update option(replace_acb=UNCOND)
def test_catalog_update_managed_acbs_stage_and_update(ansible_zos_module):
hosts = ansible_zos_module
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.UPDATEMODE,
validation_msg="DFS4536I",
control_statements={
'managed_acbs': {
'stage': {
'save_acb': "UNCOND",
'clean_staging_dataset': True
}
}
})
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.UPDATEMODE,
validation_msg="DFS4534I",
control_statements={'managed_acbs': {'update': {'replace_acb': "UNCOND"}}})
purge_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.PURGEMODE,
validation_msg="",
delete=cp.DELETES,
managed_acbs=True)
# Setup the Catalog while defining the bootstrap dataset.
def test_catalog_define_bootstrap(ansible_zos_module):
hosts = ansible_zos_module
# Delete the bootstrap dataset first
response = hosts.all.zos_data_set(name=cp.BSDS, state="absent")
for result in response.contacted.values():
assert result['message'] == ''
if result['changed'] is False:
response = hosts.all.zos_data_set(name=cp.BSDS, state="absent", volume="SCR03")
# Load catalog while defining the bootstrap dataset
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.LOADMODE,
validation_msg="DFS4533I",
bootstrap_dataset={
'dataset_name': cp.BSDS,
'disposition': 'NEW',
'normal_disposition': 'CATLG',
'primary': 350,
'volumes': ['222222']
},
control_statements={'managed_acbs': {"setup": True}})
# Verify the bootstrap dataset was created with the specified parameters
estimated_size_in_bytes = 0
response = hosts.all.command("dls -s " + cp.BSDS)
for result in response.contacted.values():
for line in result.get("stdout_lines", []):
lineList = line.split()
estimated_size_in_bytes = int(lineList[-1])
estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK")
assert estimated_size_in_unit == 350
# Purge the catalog
purge_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.PURGEMODE,
validation_msg="",
delete=cp.DELETES,
managed_acbs=True)
# Finally delete the boostrap dataset again
response = hosts.all.zos_data_set(name=cp.BSDS, state="absent")
for result in response.contacted.values():
assert result['changed'] is True
assert result['message'] == ''
# Setup the Catalog while defining the staging dataset.
def test_catalog_define_staging(ansible_zos_module):
hosts = ansible_zos_module
# Delete the staging dataset first
response = hosts.all.zos_data_set(name=cp.STAGE, state="absent")
for result in response.contacted.values():
assert result['message'] == ''
if result['changed'] is False:
response = hosts.all.zos_data_set(name=cp.STAGE, state="absent", volume="SCR03")
# Load catalog while defining the staging dataset
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.LOADMODE,
validation_msg="DFS4533I",
directory_staging_dataset={
'dataset_name': cp.STAGE,
'disposition': 'NEW',
'normal_disposition': 'CATLG',
'primary': 300,
'volumes': ['222222']
},
control_statements={'managed_acbs': {"setup": True}})
# Verify the staging dataset was created with the specified parameters
estimated_size_in_bytes = 0
response = hosts.all.command("dls -s " + cp.STAGE)
for result in response.contacted.values():
for line in result.get("stdout_lines", []):
pprint("dls stdout: " + line)
lineList = line.split()
estimated_size_in_bytes = int(lineList[-1])
estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK")
assert estimated_size_in_unit == 300
# Purge the catalog
purge_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.PURGEMODE,
validation_msg="",
delete=cp.DELETES,
managed_acbs=True)
# Finally delete the staging dataset again
response = hosts.all.zos_data_set(name=cp.STAGE, state="absent")
for result in response.contacted.values():
assert result['changed'] is True
assert result['message'] == ''
# Setup the Catalog while defining the directory datasets.
def test_catalog_define_directory(ansible_zos_module):
hosts = ansible_zos_module
# Delete the directory datasets first
response = hosts.all.zos_data_set(batch=cp.DIR_BATCH)
for result in response.contacted.values():
assert result['message'] == ''
if result['changed'] is False:
response = hosts.all.zos_data_set(name=cp.DIR_BATCH, state="absent", volume="SCR03")
# Load catalog while defining the directory datasets
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.LOADMODE,
validation_msg="DFS4533I",
directory_datasets=[
{
'dataset_name': cp.DIR1,
'disposition': 'NEW',
'normal_disposition': 'CATLG',
'primary': 200,
'volumes': ['222222']
},
{
'dataset_name': cp.DIR2,
'disposition': 'NEW',
'normal_disposition': 'CATLG',
'primary': 200,
'volumes': ['222222']
},
],
control_statements={'managed_acbs': {"setup": True}})
# Verify the directory datasets were created with the specified parameters
estimated_size_in_bytes = 0
response = hosts.all.command("dls -s " + cp.DIR1)
for result in response.contacted.values():
for line in result.get("stdout_lines", []):
lineList = line.split()
estimated_size_in_bytes = int(lineList[-1])
estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK")
assert estimated_size_in_unit == 200
response = hosts.all.command("dls -s " + cp.DIR2)
for result in response.contacted.values():
for line in result.get("stdout_lines", []):
lineList = line.split()
estimated_size_in_bytes = int(lineList[-1])
estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK")
assert estimated_size_in_unit == 200
# Purge the catalog
purge_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.PURGEMODE,
validation_msg="",
delete=cp.DELETES,
managed_acbs=True)
# Finally delete the directory datasets again
response = hosts.all.zos_data_set(batch=cp.DIR_BATCH)
for result in response.contacted.values():
assert result['changed'] is True
assert result['message'] == ''
"""
Scenario 7: Test the creation of the temp_acb_dataset, which holds ACBs that reference
GSAM database. Test catalog in load mode with managed acbs setup = true or no managedacbs
options specified. Specify the temp_acb_dataset fields. The temp_acb_dataset can be named
anything, I recommend sticking with your first two IMS library qualifiers with the 3rd
qualifier being whatever you want. Verify the temp acb dataset is created with the specified
values. Purge the catalog.
"""
def test_creation_of_temp_acb_dataset_with_managed_acbs(ansible_zos_module):
hosts = ansible_zos_module
# Delete TEMP_ACB data set before the test
response = hosts.all.zos_data_set(name=cp.TEMP_ACB, state="absent")
for result in response.contacted.values():
assert result['message'] == ''
temp_acb_data_set = {
'dataset_name': cp.TEMP_ACB,
'disposition': 'NEW',
'normal_disposition': 'CATLG',
'primary': 200,
'volumes': ['222222']
}
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
temp_acb_dataset=temp_acb_data_set,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.LOADMODE,
validation_msg="DFS4533I",
control_statements={
'managed_acbs': {
'setup': True
}
})
estimated_size_in_bytes = 0
response = hosts.all.command("dls -s " + cp.TEMP_ACB)
for result in response.contacted.values():
for line in result.get("stdout_lines", []):
lineList = line.split()
estimated_size_in_bytes = int(lineList[-1])
estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK")
assert estimated_size_in_unit == 200
purge_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.PURGEMODE,
validation_msg="",
delete=cp.DELETES,
managed_acbs=True)
# Delete TEMP_ACB data set after the test
response = hosts.all.zos_data_set(name=cp.TEMP_ACB, state="absent")
for result in response.contacted.values():
assert result['changed'] is True
assert result['message'] == ''
def test_creation_of_temp_acb_dataset_without_managed_acbs(ansible_zos_module):
hosts = ansible_zos_module
# Delete TEMP_ACB data set before the test
response = hosts.all.zos_data_set(name=cp.TEMP_ACB, state="absent")
for result in response.contacted.values():
assert result['message'] == ''
temp_acb_data_set = {
'dataset_name': cp.TEMP_ACB,
'disposition': 'NEW',
'normal_disposition': 'CATLG',
'primary': 200,
'volumes': ['222222']
}
load_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
acb_lib=cp.ACBLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
temp_acb_dataset=temp_acb_data_set,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.LOADMODE,
validation_msg="DFS4434I"
)
estimated_size_in_bytes = 0
response = hosts.all.command("dls -s " + cp.TEMP_ACB)
for result in response.contacted.values():
for line in result.get("stdout_lines", []):
lineList = line.split()
estimated_size_in_bytes = int(lineList[-1])
estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK")
assert estimated_size_in_unit == 200
purge_catalog(hosts,
psb_lib=cp.PSBLIB,
dbd_lib=cp.DBDLIB,
steplib=cp.STEPLIB,
reslib=cp.RESLIB,
proclib=cp.PROCLIB,
primary_log_dataset=cp.PRIMARYLOG,
buffer_pool_param_dataset=cp.BUFFERPOOL,
mode=cp.PURGEMODE,
validation_msg="",
delete=cp.DELETES,
managed_acbs=True)
# Delete TEMP_ACB data set after the test
response = hosts.all.zos_data_set(name=cp.TEMP_ACB, state="absent")
for result in response.contacted.values():
assert result['changed'] is True
assert result['message'] == ''
def bytes_to_unit(number_of_bytes, unit):
size = 0
unit = unit.lower()
if number_of_bytes == 0:
number_of_bytes = 1
if unit == "cyl":
size = byte_to_cyl(number_of_bytes)
elif unit == "kb" or unit == "k":
size = byte_to_kilobyte(number_of_bytes)
elif unit == "mb" or unit == "m":
size = byte_to_megabyte(number_of_bytes)
else:
size = byte_to_trk(number_of_bytes)
return size
def byte_to_trk(number_of_bytes):
return ceil(number_of_bytes / BYTES_PER_TRK)
def byte_to_cyl(number_of_bytes):
return ceil(number_of_bytes / BYTES_PER_CYL)
def byte_to_kilobyte(number_of_bytes):
return ceil(number_of_bytes / BYTES_PER_KB)
def byte_to_megabyte(number_of_bytes):
return ceil(number_of_bytes / BYTES_PER_MB)
|
[
"ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog",
"math.ceil",
"pprint.pprint",
"ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog"
] |
[((709, 1035), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.LOADMODE', 'validation_msg': '"""DFS4533I"""', 'control_statements': "{'managed_acbs': {'setup': True}}"}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.\n BUFFERPOOL, mode=cp.LOADMODE, validation_msg='DFS4533I',\n control_statements={'managed_acbs': {'setup': True}})\n", (721, 1035), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((1211, 1493), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog', 'purge_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.PURGEMODE', 'validation_msg': '""""""', 'delete': 'cp.DELETES', 'managed_acbs': '(True)'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.\n STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.\n PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE,\n validation_msg='', delete=cp.DELETES, managed_acbs=True)\n", (1224, 1493), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((1934, 2315), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.UPDATEMODE', 'validation_msg': '"""DFS4536I"""', 'control_statements': "{'managed_acbs': {'stage': {'save_acb': 'UNCOND', 'clean_staging_dataset': \n True}}}"}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.\n BUFFERPOOL, mode=cp.UPDATEMODE, validation_msg='DFS4536I',\n control_statements={'managed_acbs': {'stage': {'save_acb': 'UNCOND',\n 'clean_staging_dataset': True}}})\n", (1946, 2315), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((2660, 3010), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.UPDATEMODE', 'validation_msg': '"""DFS4534I"""', 'control_statements': "{'managed_acbs': {'update': {'replace_acb': 'UNCOND'}}}"}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.\n BUFFERPOOL, mode=cp.UPDATEMODE, validation_msg='DFS4534I',\n control_statements={'managed_acbs': {'update': {'replace_acb': 'UNCOND'}}})\n", (2672, 3010), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((3186, 3468), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog', 'purge_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.PURGEMODE', 'validation_msg': '""""""', 'delete': 'cp.DELETES', 'managed_acbs': '(True)'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.\n STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.\n PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE,\n validation_msg='', delete=cp.DELETES, managed_acbs=True)\n", (3199, 3468), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((4187, 4658), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.LOADMODE', 'validation_msg': '"""DFS4533I"""', 'bootstrap_dataset': "{'dataset_name': cp.BSDS, 'disposition': 'NEW', 'normal_disposition':\n 'CATLG', 'primary': 350, 'volumes': ['222222']}", 'control_statements': "{'managed_acbs': {'setup': True}}"}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.\n BUFFERPOOL, mode=cp.LOADMODE, validation_msg='DFS4533I',\n bootstrap_dataset={'dataset_name': cp.BSDS, 'disposition': 'NEW',\n 'normal_disposition': 'CATLG', 'primary': 350, 'volumes': ['222222']},\n control_statements={'managed_acbs': {'setup': True}})\n", (4199, 4658), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((5471, 5753), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog', 'purge_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.PURGEMODE', 'validation_msg': '""""""', 'delete': 'cp.DELETES', 'managed_acbs': '(True)'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.\n STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.\n PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE,\n validation_msg='', delete=cp.DELETES, managed_acbs=True)\n", (5484, 5753), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((6710, 7191), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.LOADMODE', 'validation_msg': '"""DFS4533I"""', 'directory_staging_dataset': "{'dataset_name': cp.STAGE, 'disposition': 'NEW', 'normal_disposition':\n 'CATLG', 'primary': 300, 'volumes': ['222222']}", 'control_statements': "{'managed_acbs': {'setup': True}}"}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.\n BUFFERPOOL, mode=cp.LOADMODE, validation_msg='DFS4533I',\n directory_staging_dataset={'dataset_name': cp.STAGE, 'disposition':\n 'NEW', 'normal_disposition': 'CATLG', 'primary': 300, 'volumes': [\n '222222']}, control_statements={'managed_acbs': {'setup': True}})\n", (6722, 7191), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((8044, 8326), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog', 'purge_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.PURGEMODE', 'validation_msg': '""""""', 'delete': 'cp.DELETES', 'managed_acbs': '(True)'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.\n STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.\n PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE,\n validation_msg='', delete=cp.DELETES, managed_acbs=True)\n", (8057, 8326), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((9287, 9889), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.LOADMODE', 'validation_msg': '"""DFS4533I"""', 'directory_datasets': "[{'dataset_name': cp.DIR1, 'disposition': 'NEW', 'normal_disposition':\n 'CATLG', 'primary': 200, 'volumes': ['222222']}, {'dataset_name': cp.\n DIR2, 'disposition': 'NEW', 'normal_disposition': 'CATLG', 'primary': \n 200, 'volumes': ['222222']}]", 'control_statements': "{'managed_acbs': {'setup': True}}"}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.\n BUFFERPOOL, mode=cp.LOADMODE, validation_msg='DFS4533I',\n directory_datasets=[{'dataset_name': cp.DIR1, 'disposition': 'NEW',\n 'normal_disposition': 'CATLG', 'primary': 200, 'volumes': ['222222']},\n {'dataset_name': cp.DIR2, 'disposition': 'NEW', 'normal_disposition':\n 'CATLG', 'primary': 200, 'volumes': ['222222']}], control_statements={\n 'managed_acbs': {'setup': True}})\n", (9299, 9889), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((11299, 11581), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog', 'purge_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.PURGEMODE', 'validation_msg': '""""""', 'delete': 'cp.DELETES', 'managed_acbs': '(True)'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.\n STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.\n PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE,\n validation_msg='', delete=cp.DELETES, managed_acbs=True)\n", (11312, 11581), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((12999, 13364), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'temp_acb_dataset': 'temp_acb_data_set', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.LOADMODE', 'validation_msg': '"""DFS4533I"""', 'control_statements': "{'managed_acbs': {'setup': True}}"}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, temp_acb_dataset=temp_acb_data_set,\n buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE,\n validation_msg='DFS4533I', control_statements={'managed_acbs': {'setup':\n True}})\n", (13011, 13364), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((14048, 14330), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog', 'purge_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.PURGEMODE', 'validation_msg': '""""""', 'delete': 'cp.DELETES', 'managed_acbs': '(True)'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.\n STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.\n PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE,\n validation_msg='', delete=cp.DELETES, managed_acbs=True)\n", (14061, 14330), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((15277, 15584), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.load_catalog', 'load_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'acb_lib': 'cp.ACBLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'temp_acb_dataset': 'temp_acb_data_set', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.LOADMODE', 'validation_msg': '"""DFS4434I"""'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB,\n steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB,\n primary_log_dataset=cp.PRIMARYLOG, temp_acb_dataset=temp_acb_data_set,\n buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE,\n validation_msg='DFS4434I')\n", (15289, 15584), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((16185, 16467), 'ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils.purge_catalog', 'purge_catalog', (['hosts'], {'psb_lib': 'cp.PSBLIB', 'dbd_lib': 'cp.DBDLIB', 'steplib': 'cp.STEPLIB', 'reslib': 'cp.RESLIB', 'proclib': 'cp.PROCLIB', 'primary_log_dataset': 'cp.PRIMARYLOG', 'buffer_pool_param_dataset': 'cp.BUFFERPOOL', 'mode': 'cp.PURGEMODE', 'validation_msg': '""""""', 'delete': 'cp.DELETES', 'managed_acbs': '(True)'}), "(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.\n STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.\n PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE,\n validation_msg='', delete=cp.DELETES, managed_acbs=True)\n", (16198, 16467), False, 'from ibm_zos_ims.tests.functional.module_utils.ims_test_catalog_utils import load_catalog, purge_catalog\n'), ((17393, 17430), 'math.ceil', 'ceil', (['(number_of_bytes / BYTES_PER_TRK)'], {}), '(number_of_bytes / BYTES_PER_TRK)\n', (17397, 17430), False, 'from math import ceil\n'), ((17478, 17515), 'math.ceil', 'ceil', (['(number_of_bytes / BYTES_PER_CYL)'], {}), '(number_of_bytes / BYTES_PER_CYL)\n', (17482, 17515), False, 'from math import ceil\n'), ((17568, 17604), 'math.ceil', 'ceil', (['(number_of_bytes / BYTES_PER_KB)'], {}), '(number_of_bytes / BYTES_PER_KB)\n', (17572, 17604), False, 'from math import ceil\n'), ((17657, 17693), 'math.ceil', 'ceil', (['(number_of_bytes / BYTES_PER_MB)'], {}), '(number_of_bytes / BYTES_PER_MB)\n', (17661, 17693), False, 'from math import ceil\n'), ((7769, 7798), 'pprint.pprint', 'pprint', (["('dls stdout: ' + line)"], {}), "('dls stdout: ' + line)\n", (7775, 7798), False, 'from pprint import pprint\n')]
|
import requests
class xrptipbot:
def __init__(self, token):
self.token = token
self.baseUrl = "https://www.xrptipbot.com/app/api"
def login(self, platform, model):
url = self.baseUrl + "/action:login/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token,
"platform":platform,
"model":model
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def unlink(self):
url = self.baseUrl + "/action:unlink/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def get_balance(self):
url = self.baseUrl + "/action:balance/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def tip(self, amount, to, existingDestination):
url = self.baseUrl + "/action:tip/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token,
"amount":amount,
"to":to,
"existingDestination":existingDestination
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def get_stats(self):
url = self.baseUrl + "/action:userinfo/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def get_contacts(self):
url = self.baseUrl + "/action:contacts/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def lookup_user(self, query):
url = self.baseUrl + "/action:lookup/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token,
"query":query
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def create_paper_wallet(self, note):
url = self.baseUrl + "/action:paper-proposal/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token,
"note":note
}
r = requests.post(url=url, json=payload, headers=headers)
return r
def bump(self, amount, aps=None, geo=None, nfc=None):
url = self.baseUrl + "/action:bump/"
headers = {"Content-Type":"application/json"}
payload = {
"token":self.token,
"amount":amount,
"aps":aps,
"geo":geo,
"nfc":nfc
}
r = requests.post(url=url, json=payload, headers=headers)
return r
|
[
"requests.post"
] |
[((348, 401), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (361, 401), False, 'import requests\n'), ((568, 621), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (581, 621), False, 'import requests\n'), ((794, 847), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (807, 847), False, 'import requests\n'), ((1119, 1172), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (1132, 1172), False, 'import requests\n'), ((1344, 1397), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (1357, 1397), False, 'import requests\n'), ((1572, 1625), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (1585, 1625), False, 'import requests\n'), ((1822, 1875), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (1835, 1875), False, 'import requests\n'), ((2085, 2138), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (2098, 2138), False, 'import requests\n'), ((2401, 2454), 'requests.post', 'requests.post', ([], {'url': 'url', 'json': 'payload', 'headers': 'headers'}), '(url=url, json=payload, headers=headers)\n', (2414, 2454), False, 'import requests\n')]
|
from django.http import HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from django.views import View
from django.views.generic import ListView
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from polygon.base_views import PolygonBaseMixin
from polygon.models import Run
from polygon.rejudge import rejudge_submission, rejudge_all_submission_on_problem
from problem.models import Problem
from submission.models import Submission
from utils.permission import is_problem_manager, is_contest_manager
def authorization(user):
return False
# TODO: open polygon
# return get_accept_problem_count(user.id) >= 100
def home_view(request):
return render(request, 'polygon/home.jinja2', context={'polygon_authorized': authorization(request.user)})
def register_view(request):
template_name = 'polygon/register.jinja2'
if request.method == 'GET':
return render(request, template_name)
else:
if request.POST.get('terms') != 'on':
return render(request, template_name, context={'register_error': 'You did\'nt accept terms of use.'})
if not authorization(request.user):
return render(request, template_name, context={'register_error': 'You are not authorized.'})
request.user.polygon_enabled = True
request.user.save(update_fields=['polygon_enabled'])
return redirect(reverse('polygon:home'))
class RejudgeSubmission(PolygonBaseMixin, APIView):
def dispatch(self, request, *args, **kwargs):
self.submission = get_object_or_404(Submission, pk=kwargs.get('sid'))
return super(RejudgeSubmission, self).dispatch(request, *args, **kwargs)
def test_func(self):
if is_problem_manager(self.request.user, self.submission.problem) or \
is_contest_manager(self.request.user, self.submission.contest):
return super(RejudgeSubmission, self).test_func()
return False
def post(self, request, sid):
rejudge_submission(self.submission)
return Response()
class RunsList(PolygonBaseMixin, ListView):
template_name = 'polygon/runs.jinja2'
paginate_by = 100
context_object_name = 'runs_list'
def get_queryset(self):
return Run.objects.filter(user=self.request.user).order_by("-pk").all()
class RunMessageView(PolygonBaseMixin, View):
def get(self, request, pk):
message = ''
try:
run = Run.objects.get(pk=pk, user=request.user)
message = run.message
except Run.DoesNotExist:
pass
return HttpResponse(message, content_type='text/plain')
|
[
"utils.permission.is_problem_manager",
"utils.permission.is_contest_manager",
"polygon.rejudge.rejudge_submission",
"django.http.HttpResponse",
"django.urls.reverse",
"rest_framework.response.Response",
"django.shortcuts.render",
"polygon.models.Run.objects.filter",
"polygon.models.Run.objects.get"
] |
[((1015, 1045), 'django.shortcuts.render', 'render', (['request', 'template_name'], {}), '(request, template_name)\n', (1021, 1045), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2094, 2129), 'polygon.rejudge.rejudge_submission', 'rejudge_submission', (['self.submission'], {}), '(self.submission)\n', (2112, 2129), False, 'from polygon.rejudge import rejudge_submission, rejudge_all_submission_on_problem\n'), ((2145, 2155), 'rest_framework.response.Response', 'Response', ([], {}), '()\n', (2153, 2155), False, 'from rest_framework.response import Response\n'), ((2686, 2734), 'django.http.HttpResponse', 'HttpResponse', (['message'], {'content_type': '"""text/plain"""'}), "(message, content_type='text/plain')\n", (2698, 2734), False, 'from django.http import HttpResponse\n'), ((1121, 1218), 'django.shortcuts.render', 'render', (['request', 'template_name'], {'context': '{\'register_error\': "You did\'nt accept terms of use."}'}), '(request, template_name, context={\'register_error\':\n "You did\'nt accept terms of use."})\n', (1127, 1218), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1279, 1368), 'django.shortcuts.render', 'render', (['request', 'template_name'], {'context': "{'register_error': 'You are not authorized.'}"}), "(request, template_name, context={'register_error':\n 'You are not authorized.'})\n", (1285, 1368), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1494, 1517), 'django.urls.reverse', 'reverse', (['"""polygon:home"""'], {}), "('polygon:home')\n", (1501, 1517), False, 'from django.urls import reverse\n'), ((1820, 1882), 'utils.permission.is_problem_manager', 'is_problem_manager', (['self.request.user', 'self.submission.problem'], {}), '(self.request.user, self.submission.problem)\n', (1838, 1882), False, 'from utils.permission import is_problem_manager, is_contest_manager\n'), ((1904, 1966), 'utils.permission.is_contest_manager', 'is_contest_manager', (['self.request.user', 'self.submission.contest'], {}), '(self.request.user, self.submission.contest)\n', (1922, 1966), False, 'from utils.permission import is_problem_manager, is_contest_manager\n'), ((2545, 2586), 'polygon.models.Run.objects.get', 'Run.objects.get', ([], {'pk': 'pk', 'user': 'request.user'}), '(pk=pk, user=request.user)\n', (2560, 2586), False, 'from polygon.models import Run\n'), ((2348, 2390), 'polygon.models.Run.objects.filter', 'Run.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (2366, 2390), False, 'from polygon.models import Run\n')]
|
import enum
from django.utils.translation import ugettext_lazy as _
class MaxLength(enum.Enum):
SHORT = 128
MEDIUM = 256
LONG = 512
XLONG = 1024
TEXT = 2048
RICHTEXT = 10000
class ActiveStatus(enum.Enum):
ACTIVE = 'ACT'
INACTIVE = 'INC'
CHOICES = (
(ACTIVE, _("active").title()),
(INACTIVE, _("inactive").title()),
)
class PrivacyStatus(enum.Enum):
ANYONE = 'anyone'
USERS = 'users'
FRIENDS = 'friends'
STUDENTS = 'students'
TEACHERS = 'teachers'
EMPLOYEES = 'employees'
MANAGERS = 'managers'
ME = 'me'
CHOICES = (
(ANYONE, _("anyone").title()),
(USERS, _('all users').title()),
(FRIENDS, _('all friends').title()),
(STUDENTS, _('all students').title()),
(TEACHERS, _('all teachers').title()),
(EMPLOYEES, _('all employees').title()),
(MANAGERS, _('all managers').title()),
(ME, _('only me').title())
)
class Gender(enum.Enum):
MALE = 'L'
FEMALE = 'P'
CHOICES = (
(MALE, _("male").title()),
(FEMALE, _("female").title()),
)
class AddressName(enum.Enum):
HOME = 'home'
OFFICE = 'office'
CHOICES = (
(HOME, _("home").title()),
(OFFICE, _("office").title()),
)
class EducationStatus(enum.Enum):
FINISHED = 'FNS'
ONGOING = 'ONG'
UNFINISHED = 'UNF'
CHOICES = (
(FINISHED, _("finished").title()),
(ONGOING, _("ongoing").title()),
(UNFINISHED, _("unfinished").title()),
)
class WorkingStatus(enum.Enum):
CONTRACT = 'CTR'
FIXED = 'FXD'
OUTSOURCE = 'OSR'
ELSE = 'ELS'
CHOICES = (
(CONTRACT, _("contract").title()),
(FIXED, _("fixed").title()),
(OUTSOURCE, _("outsource").title()),
(ELSE, _("else").title())
)
class FamilyRelation(enum.Enum):
FATHER = 1
MOTHER = 2
SIBLING = 3
CHILD = 4
HUSBAND = 5
WIFE = 6
OTHER = 99
CHOICES = (
(FATHER, _('father').title()),
(MOTHER, _('mother').title()),
(HUSBAND, _('husband').title()),
(WIFE, _('wife').title()),
(CHILD, _('children').title()),
(SIBLING, _('sibling').title()),
(OTHER, _('other').title()),
)
|
[
"django.utils.translation.ugettext_lazy"
] |
[((307, 318), 'django.utils.translation.ugettext_lazy', '_', (['"""active"""'], {}), "('active')\n", (308, 318), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((348, 361), 'django.utils.translation.ugettext_lazy', '_', (['"""inactive"""'], {}), "('inactive')\n", (349, 361), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((632, 643), 'django.utils.translation.ugettext_lazy', '_', (['"""anyone"""'], {}), "('anyone')\n", (633, 643), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((670, 684), 'django.utils.translation.ugettext_lazy', '_', (['"""all users"""'], {}), "('all users')\n", (671, 684), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((713, 729), 'django.utils.translation.ugettext_lazy', '_', (['"""all friends"""'], {}), "('all friends')\n", (714, 729), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((759, 776), 'django.utils.translation.ugettext_lazy', '_', (['"""all students"""'], {}), "('all students')\n", (760, 776), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((806, 823), 'django.utils.translation.ugettext_lazy', '_', (['"""all teachers"""'], {}), "('all teachers')\n", (807, 823), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((854, 872), 'django.utils.translation.ugettext_lazy', '_', (['"""all employees"""'], {}), "('all employees')\n", (855, 872), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((902, 919), 'django.utils.translation.ugettext_lazy', '_', (['"""all managers"""'], {}), "('all managers')\n", (903, 919), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((943, 955), 'django.utils.translation.ugettext_lazy', '_', (['"""only me"""'], {}), "('only me')\n", (944, 955), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1062, 1071), 'django.utils.translation.ugettext_lazy', '_', (['"""male"""'], {}), "('male')\n", (1063, 1071), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1099, 1110), 'django.utils.translation.ugettext_lazy', '_', (['"""female"""'], {}), "('female')\n", (1100, 1110), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1231, 1240), 'django.utils.translation.ugettext_lazy', '_', (['"""home"""'], {}), "('home')\n", (1232, 1240), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1268, 1279), 'django.utils.translation.ugettext_lazy', '_', (['"""office"""'], {}), "('office')\n", (1269, 1279), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1432, 1445), 'django.utils.translation.ugettext_lazy', '_', (['"""finished"""'], {}), "('finished')\n", (1433, 1445), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1474, 1486), 'django.utils.translation.ugettext_lazy', '_', (['"""ongoing"""'], {}), "('ongoing')\n", (1475, 1486), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1518, 1533), 'django.utils.translation.ugettext_lazy', '_', (['"""unfinished"""'], {}), "('unfinished')\n", (1519, 1533), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1698, 1711), 'django.utils.translation.ugettext_lazy', '_', (['"""contract"""'], {}), "('contract')\n", (1699, 1711), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1738, 1748), 'django.utils.translation.ugettext_lazy', '_', (['"""fixed"""'], {}), "('fixed')\n", (1739, 1748), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1779, 1793), 'django.utils.translation.ugettext_lazy', '_', (['"""outsource"""'], {}), "('outsource')\n", (1780, 1793), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1819, 1828), 'django.utils.translation.ugettext_lazy', '_', (['"""else"""'], {}), "('else')\n", (1820, 1828), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2017, 2028), 'django.utils.translation.ugettext_lazy', '_', (['"""father"""'], {}), "('father')\n", (2018, 2028), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2056, 2067), 'django.utils.translation.ugettext_lazy', '_', (['"""mother"""'], {}), "('mother')\n", (2057, 2067), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2096, 2108), 'django.utils.translation.ugettext_lazy', '_', (['"""husband"""'], {}), "('husband')\n", (2097, 2108), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2134, 2143), 'django.utils.translation.ugettext_lazy', '_', (['"""wife"""'], {}), "('wife')\n", (2135, 2143), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2170, 2183), 'django.utils.translation.ugettext_lazy', '_', (['"""children"""'], {}), "('children')\n", (2171, 2183), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2212, 2224), 'django.utils.translation.ugettext_lazy', '_', (['"""sibling"""'], {}), "('sibling')\n", (2213, 2224), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2251, 2261), 'django.utils.translation.ugettext_lazy', '_', (['"""other"""'], {}), "('other')\n", (2252, 2261), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.