code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from rest_framework.renderers import JSONRenderer
from django.utils import simplejson
def prepare_for_emit(obj):
"""
Prepare the object for emit() by Tornadio2's (too simple) JSON renderer
- render to JSON using Django REST Framework 2's JSON renderer
- convert back to _simple_ Python object using Django's simplejson
"""
json = JSONRenderer().render(obj)
return simplejson.loads(json)
|
[
"django.utils.simplejson.loads",
"rest_framework.renderers.JSONRenderer"
] |
[((396, 418), 'django.utils.simplejson.loads', 'simplejson.loads', (['json'], {}), '(json)\n', (412, 418), False, 'from django.utils import simplejson\n'), ((357, 371), 'rest_framework.renderers.JSONRenderer', 'JSONRenderer', ([], {}), '()\n', (369, 371), False, 'from rest_framework.renderers import JSONRenderer\n')]
|
from pwn import *
import sys
import os
from pathlib import Path
from threading import Thread
from clients.GhidraCommandClient import GhidraCommandClient
class GhidraGdb:
"""The main class which encapsulates the whole GhidraGdb framework
"""
def __init__(self, process=None):
self.fifo = None
self.process = process
self.FIFO = "/tmp/gdbPipe"
try:
os.mkfifo(self.FIFO)
except Exception as e:
print(e)
if not "File exists" in str(e):
print("sys.exit")
return
self.client = GhidraCommandClient(self)
self.parserMode = None
self.breakpointAddr = None
self.currRet = None
self.removals = []
def removeBpByPattern(self, pattern):
"""Removes a breakpoint before it is inserted
:param pattern: the pattern to identify the breakpoint
:return: None
"""
self.removals.append(pattern)
def excAndGet(self, exc, strip=True):
"""This function executes a command within the gdb session
:param exc: String value containing the gdb command
:param strip: Boolean, optional - remove the EOF delimiter automatically(this might create issues in some cases) - default: True
:return: String value containing the gdb response unparsed
"""
self.currRet = ""
self.parserMode = "GETDAT"
self.gdb.execute(exc.split("\n")[0])
self.gdb.execute("print \"ggdb__EOF\"")
while self.parserMode == "GETDAT":
time.sleep(0.01)
if strip:
return self.currRet.split("$")[0]
else:
return self.currRet
def readFifo(self, fifo):
"""read the ouput of the gdbPipe te receive the data
:param fifo: the fifo object to read from
:return: None
"""
while True:
#time.sleep(0.05)
line = fifo.readline()
if len(line) > 2:
line = line.replace("\n", "")
if self.parserMode == "WAITBP":
if "Breakpoint" in line:
for part in line.split(" "):
if "0x" in part:
self.breakpointAddr = part.split("x")[1]
#print("found Breakpoint Address: " + self.breakpointAddr)
elif self.parserMode == "GETDAT":
self.currRet = self.currRet + line + "\n"
if "ggdb__EOF" in line:
self.parserMode = "WAITBP"
def setupFifo(self, FIFO):
"""Create the Fifo which is used to read the data comming from the gdb
:param FIFO: The filename where the fifo will be created
:return: None
"""
print("setting up fifo now: " + str(FIFO))
with open(FIFO, 'r') as fifo:
self.fifo = fifo
print("fiifo opened")
self.readFifo(fifo)
def setupFifoNonBlock(self, Fifo):
"""Run the function "setupFifo" in None-blocking mode
:param FIFO: The filename where the fifo will be created
:return: None
"""
Thread(target=self.setupFifo, args=(Fifo,), daemon=True).start()
def setupGdbInteractive(self):
"""Setup the GdbSession as an interactive shell(the user can interact with GDB as usual) - Non-blocking
:return: None
"""
Thread(target=self.process.interactive).start()
def getProcOffset(self, procName):
"""Get the Proc Offset of a particular mapping
:param procName: String value containing the Name of the mapping
:return: The start Address of the mapped space
"""
while self.checkThreadRunning():
time.sleep(0.05)
print("getting proc mapping")
#get the proc mappings from gdb
procMappings = self.excAndGet("i proc mappings")
proc_maps = []
#get and format the memory mappings which are mapping the main executable
for line in procMappings.split("\n"):
if procName in line:
ln = line.replace("\t", " ")
#turn multiple whitespaces into single whitespaces
while " " in ln:
ln = ln.replace(" ", " ")
#create an array, containing the different columns
arr = ln.split(" ")
if len(arr[0]) < 2:
arr.pop(0)
proc_maps.append(arr)
## get the lowest Start Address
offset = 0
procStartAddresss = 0
for i, map in enumerate(proc_maps):
if i == 0 or offset > int(map[3].split("x")[1],16) :
offset = int(map[3].split("x")[1],16)
procStartAddresss = map[0]
return procStartAddresss
def run(self, cmd, interactive=True, startCommands="", args=""):
"""This is the entry function that spawns a new process and connects the debugger to it
:param String cmd: value containing the path to your executable
:param Boolean interactive: optional - open a regular GDB Window which the user can interact with. Default: True
:param String startCommands: optional - Initial GDB Commands which are executed before the program starts
:param String args: - Arguments to start the executable with
"""
#connect reader thread to read gdb pipe
self.setupFifoNonBlock(self.FIFO)
self.process = gdb.debug(cmd, '''
set logging file /tmp/gdbPipe
set logging on
starti''' + str(args) + "\n" + startCommands, api=True)
self.gdb = self.process.gdb
#self
if interactive:
self.setupGdbInteractive()
self.runtimeAnalysisNonBlock()
#we need to calculate the offset between Ghidra and the process mapping here (Because of ...)
imageBase = self.client.br.remote_eval("str(getState().getCurrentProgram().getAddressMap().getImageBase())")
procOffset = self.getProcOffset(Path(cmd).name)
if procOffset == 0:
return self.process, False
print("Found proc offset: " + str(procOffset))
#calculate final dynamic offset
self.procOffset = str(hex(int(procOffset.split("x")[1],16) - int(imageBase,16)))
print("final offset: " + str(self.procOffset))
print("EXECUTING GDB BP SETUP")
for bp in self.client.breakpoints:
skip = False
for line in bp.pyExc.split("\n"):
for line2 in self.removals:
if line2 in line:
skip = True
if skip:
continue
print("ADDING BP")
bp.rebuiltWithOffset(self.procOffset)
bp.setHitLimit(0)
ret = self.excAndGet(str(bp.setup))
#we parse the number of the breakpoint (in gdb)
parts = ret.split(" ")
parse = False
number = 0
for part in parts:
if parse:
try:
number = int(part)
except:
pass
if "Breakpoint" in part:
parse = True
bp.number = number
print("return from setup: " + str(ret))
#self.gdb.execute(str(bp.setup))
self.gdb.execute(str("continue"))
return self.process, True
def setupGdb(self, interactive=True, startCommands=""):
""" Deprecated - attaches the gdb to an existing program instance instead of spawning the program
:param interactive: interactive: Boolean, optional - open a regular GDB Window which the user can interact with. Default: True
:param startCommands: Sting - Initial GDB Commands which are executed before the program starts
:return: None
"""
#connect reader thread to read gdb pipe
self.setupFifoNonBlock(self.FIFO)
self.pid, self.gdb = gdb.attach(self.process, '''
set logging file /tmp/gdbPipe
set logging on
''' + startCommands, api=True)
if interactive:
self.setupGdbInteractive()
self.runtimeAnalysisNonBlock()
def analyze(self, funcs):
"""Analyze the Ghidra project - this command will create all the functions, breakpoints and classes from the Ghidra Code/Comments
:param funcs: A list of functions which are to be analyzed
:return: None
"""
self.client.analyze(funcs)
def runtimeAnalysis(self):
"""This function runs arbitrary code in either python or GDB everytime a breakpoint is hit
:return: None
"""
#the first breakpoint has to install the other breakpoints - then continue ...
while self.checkThreadRunning():
time.sleep(0.05)
#time.sleep(5)
print("CONTINUE")
self.parserMode = "WAITBP"
while True:
time.sleep(0.05)
while self.checkThreadRunning():
time.sleep(0.05)
finBp = None
try:
if self.breakpointAddr:
#print("breakpoint hit")
for bp in self.client.breakpoints:
if bp.address.split("x")[1] in self.breakpointAddr:
finBp = bp
self.breakpointAddr = None
break
except:
continue
if not finBp:
continue
finBp.hit()
#todo - this has to be in parallel
for line in finBp.pyExc.split("\n"):
if len(line) > 1:
try:
finBp.exec_(line)
except Exception as e:
print("Exception during code execution: " + str(line))
print(str(e))
for line in finBp.dbExc.split("\n"):
if len(line) > 0:
try:
self.gdb.execute(line)
if line[0] == "c" or "continue" in line:
finBp.deactivate()
except Exception as e:
print("Error in GDB execution of:" + str(line))
print("Exception: " + str(e))
def runtimeAnalysisNonBlock(self):
"""Run the function 'runtimeAnalysis' in Non-blocking mode
:return: None
"""
Thread(target=self.runtimeAnalysis, daemon=True).start()
#check if current thread is running ... (if gdb hits breakpoint ...)
def checkThreadRunning(self):
"""check if the current GDB Thread is running
:return: Boolean - True if the Thread is running
"""
#Todo -- check this
try:
#print(dir(self.gdb.conn.root.gdb))#.selected_inferior().threads())
#print(dir(self.gdb.conn.root.gdb.InferiorThread))
#print(self.gdb.conn.root.gdb.selected_thread().is_running())
#if self.gdb.conn.root.gdb.selected_inferior().threads()[0].is_running():
if self.gdb.conn.root.gdb.selected_thread().is_running():
return True
else:
return False
except Exception as e:
return True
|
[
"threading.Thread",
"clients.GhidraCommandClient.GhidraCommandClient",
"pathlib.Path",
"os.mkfifo"
] |
[((613, 638), 'clients.GhidraCommandClient.GhidraCommandClient', 'GhidraCommandClient', (['self'], {}), '(self)\n', (632, 638), False, 'from clients.GhidraCommandClient import GhidraCommandClient\n'), ((416, 436), 'os.mkfifo', 'os.mkfifo', (['self.FIFO'], {}), '(self.FIFO)\n', (425, 436), False, 'import os\n'), ((3300, 3356), 'threading.Thread', 'Thread', ([], {'target': 'self.setupFifo', 'args': '(Fifo,)', 'daemon': '(True)'}), '(target=self.setupFifo, args=(Fifo,), daemon=True)\n', (3306, 3356), False, 'from threading import Thread\n'), ((3559, 3598), 'threading.Thread', 'Thread', ([], {'target': 'self.process.interactive'}), '(target=self.process.interactive)\n', (3565, 3598), False, 'from threading import Thread\n'), ((6243, 6252), 'pathlib.Path', 'Path', (['cmd'], {}), '(cmd)\n', (6247, 6252), False, 'from pathlib import Path\n'), ((10988, 11036), 'threading.Thread', 'Thread', ([], {'target': 'self.runtimeAnalysis', 'daemon': '(True)'}), '(target=self.runtimeAnalysis, daemon=True)\n', (10994, 11036), False, 'from threading import Thread\n')]
|
from dataclasses import make_dataclass
from fractal.core.specifications.object_of_account_specification import (
ObjectOfAccountSpecification,
)
def test_object_of_account_specification():
spec = ObjectOfAccountSpecification("abc", "def")
DC = make_dataclass("DC", [("id", str), ("account_id", str)])
assert spec.is_satisfied_by(DC(**dict(id="abc", account_id="def")))
|
[
"fractal.core.specifications.object_of_account_specification.ObjectOfAccountSpecification",
"dataclasses.make_dataclass"
] |
[((207, 249), 'fractal.core.specifications.object_of_account_specification.ObjectOfAccountSpecification', 'ObjectOfAccountSpecification', (['"""abc"""', '"""def"""'], {}), "('abc', 'def')\n", (235, 249), False, 'from fractal.core.specifications.object_of_account_specification import ObjectOfAccountSpecification\n'), ((259, 315), 'dataclasses.make_dataclass', 'make_dataclass', (['"""DC"""', "[('id', str), ('account_id', str)]"], {}), "('DC', [('id', str), ('account_id', str)])\n", (273, 315), False, 'from dataclasses import make_dataclass\n')]
|
import json
import imports.fileReader as reader
class dataBase:
def __init__(self, path, key):
self.file = reader.efile(path, key);
if(len(self.file.data) > 0):
self.json = json.loads(self.file.data)
else:
self.json = json.loads("{}")
self.json["key"] = key.decode()
def save(self):
self.file.data = json.dumps(self.json)
self.file.save();
|
[
"imports.fileReader.efile",
"json.dumps",
"json.loads"
] |
[((120, 143), 'imports.fileReader.efile', 'reader.efile', (['path', 'key'], {}), '(path, key)\n', (132, 143), True, 'import imports.fileReader as reader\n'), ((382, 403), 'json.dumps', 'json.dumps', (['self.json'], {}), '(self.json)\n', (392, 403), False, 'import json\n'), ((206, 232), 'json.loads', 'json.loads', (['self.file.data'], {}), '(self.file.data)\n', (216, 232), False, 'import json\n'), ((271, 287), 'json.loads', 'json.loads', (['"""{}"""'], {}), "('{}')\n", (281, 287), False, 'import json\n')]
|
"""
This is where all the general routes and controllers are defined.
"""
from flask import Blueprint
from flask import current_app as app
from flask import make_response
main_blueprint = Blueprint('main_blueprint', __name__)
@main_blueprint.route('/')
def index():
return make_response()
@main_blueprint.route('/health')
def health():
app.prom_init.up_gauge.set(1)
return make_response()
|
[
"flask.make_response",
"flask.current_app.prom_init.up_gauge.set",
"flask.Blueprint"
] |
[((190, 227), 'flask.Blueprint', 'Blueprint', (['"""main_blueprint"""', '__name__'], {}), "('main_blueprint', __name__)\n", (199, 227), False, 'from flask import Blueprint\n'), ((281, 296), 'flask.make_response', 'make_response', ([], {}), '()\n', (294, 296), False, 'from flask import make_response\n'), ((350, 379), 'flask.current_app.prom_init.up_gauge.set', 'app.prom_init.up_gauge.set', (['(1)'], {}), '(1)\n', (376, 379), True, 'from flask import current_app as app\n'), ((391, 406), 'flask.make_response', 'make_response', ([], {}), '()\n', (404, 406), False, 'from flask import make_response\n')]
|
from django.urls import path
from . import views
app_name = "trade"
urlpatterns = [
path('start/<str:receiver_username>/', views.start, name="start"),
path('<str:other_username>/', views.see_trade, name="see"),
path('change/<str:other_username>', views.change_trade, name="change"),
]
|
[
"django.urls.path"
] |
[((92, 157), 'django.urls.path', 'path', (['"""start/<str:receiver_username>/"""', 'views.start'], {'name': '"""start"""'}), "('start/<str:receiver_username>/', views.start, name='start')\n", (96, 157), False, 'from django.urls import path\n'), ((163, 221), 'django.urls.path', 'path', (['"""<str:other_username>/"""', 'views.see_trade'], {'name': '"""see"""'}), "('<str:other_username>/', views.see_trade, name='see')\n", (167, 221), False, 'from django.urls import path\n'), ((227, 297), 'django.urls.path', 'path', (['"""change/<str:other_username>"""', 'views.change_trade'], {'name': '"""change"""'}), "('change/<str:other_username>', views.change_trade, name='change')\n", (231, 297), False, 'from django.urls import path\n')]
|
#!/usr/bin/python3
# ******************************************************************************
# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved.
# licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
# http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# ******************************************************************************/
# -*- coding:utf-8 -*-
"""
Count all test cases
"""
import os
import sys
import unittest
import coverage
from coverage import CoverageException
suite = unittest.TestSuite()
BASE_PATH = os.path.join(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
TEST_CASE_PATH = os.path.join(BASE_PATH, "test")
cov = coverage.coverage(include=[BASE_PATH + "/javcra/*"],
omit=["*__init__.py", "*/check_requires/*.py", "*/api/obscloud.py"])
def specify_case(file_path):
"""
Test specify test cases
Args:
file_path: test cases file path
Returns: discover result
"""
discover = unittest.defaultTestLoader.discover(
file_path, pattern="test*.py", top_level_dir=file_path
)
return discover
if __name__ == "__main__":
runner = unittest.TextTestRunner()
args = sys.argv
cov.start()
test_case_files = [
os.path.join(TEST_CASE_PATH, "test_start/"),
os.path.join(TEST_CASE_PATH, "test_modify/"),
os.path.join(TEST_CASE_PATH, "test_check/"),
os.path.join(TEST_CASE_PATH, "test_release/")
]
errors = []
failures = []
for file in test_case_files:
runner_result = runner.run(specify_case(file))
errors.extend(runner_result.errors)
failures.extend(runner_result.failures)
if any([errors, failures]):
sys.exit(1)
cov.stop()
try:
cov.report(show_missing=True)
# cov.html_report()
except CoverageException:
print("No data to report")
sys.exit(1)
|
[
"unittest.TextTestRunner",
"unittest.TestSuite",
"coverage.coverage",
"os.path.dirname",
"unittest.defaultTestLoader.discover",
"os.path.join",
"sys.exit"
] |
[((873, 893), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (891, 893), False, 'import unittest\n'), ((999, 1030), 'os.path.join', 'os.path.join', (['BASE_PATH', '"""test"""'], {}), "(BASE_PATH, 'test')\n", (1011, 1030), False, 'import os\n'), ((1038, 1163), 'coverage.coverage', 'coverage.coverage', ([], {'include': "[BASE_PATH + '/javcra/*']", 'omit': "['*__init__.py', '*/check_requires/*.py', '*/api/obscloud.py']"}), "(include=[BASE_PATH + '/javcra/*'], omit=['*__init__.py',\n '*/check_requires/*.py', '*/api/obscloud.py'])\n", (1055, 1163), False, 'import coverage\n'), ((1354, 1449), 'unittest.defaultTestLoader.discover', 'unittest.defaultTestLoader.discover', (['file_path'], {'pattern': '"""test*.py"""', 'top_level_dir': 'file_path'}), "(file_path, pattern='test*.py',\n top_level_dir=file_path)\n", (1389, 1449), False, 'import unittest\n'), ((1522, 1547), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (1545, 1547), False, 'import unittest\n'), ((1616, 1659), 'os.path.join', 'os.path.join', (['TEST_CASE_PATH', '"""test_start/"""'], {}), "(TEST_CASE_PATH, 'test_start/')\n", (1628, 1659), False, 'import os\n'), ((1669, 1713), 'os.path.join', 'os.path.join', (['TEST_CASE_PATH', '"""test_modify/"""'], {}), "(TEST_CASE_PATH, 'test_modify/')\n", (1681, 1713), False, 'import os\n'), ((1723, 1766), 'os.path.join', 'os.path.join', (['TEST_CASE_PATH', '"""test_check/"""'], {}), "(TEST_CASE_PATH, 'test_check/')\n", (1735, 1766), False, 'import os\n'), ((1776, 1821), 'os.path.join', 'os.path.join', (['TEST_CASE_PATH', '"""test_release/"""'], {}), "(TEST_CASE_PATH, 'test_release/')\n", (1788, 1821), False, 'import os\n'), ((2084, 2095), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2092, 2095), False, 'import sys\n'), ((952, 977), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (967, 977), False, 'import os\n'), ((2260, 2271), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2268, 2271), False, 'import sys\n')]
|
import numpy as np
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms, utils
import torch
from torch.autograd import Variable
from load_memmap import *
class AxonDataset(Dataset):
"""" Inherits pytorch Dataset class to load Axon Dataset """
def __init__(self, data_name='crops64_axons_only', folder='axon_data', type='train', transform=None, resize=None, normalise=False, read='npy'):
"""
:param data_name (string)- data name to load/ save
:param folder- location of dataset
:param type - train or test dataset
"""
self.data_name = data_name
self.read = read
self.transform = transform
self.resize = resize
self.normalise = normalise
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
if self.read == 'npy':
self.x_data, self.y_data, _ = load_dataset(type, folder, data_name)
self.len_data = len(self.x_data)
elif self.read == 'image':
self.folder = os.path.join(__location__,self.data_name,'train')
images_original = [img for img in
os.listdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), self.folder, "original"))]
images_mask = [img for img in
os.listdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), self.folder, "mask"))]
self.images_mask = images_mask
self.images_original = images_original
self.images_mask.sort()
self.images_original.sort()
self.len_data = len(images_original)
def __len__(self):
""" get length of data
example: len(data) """
return self.len_data
def __getitem__(self, idx):
"""gets samples from data according to idx
:param idx- index to take
example: data[10] -to get the 10th data sample"""
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
if self.read == 'npy':
if self.resize:
sample_x_data = np.resize(np.array([self.x_data[idx]]), (1, self.resize,self.resize))
sample_y_data = np.resize(np.array([self.y_data[idx]]), (1, self.resize,self.resize))
else:
sample_x_data = self.x_data[idx]
sample_y_data = self.y_data[idx]
elif self.read == 'image':
data_path = self.images_original[idx]
mask_path = self.images_mask[idx]
sample_x_data = plt.imread(
os.path.join(os.path.dirname(os.path.abspath(__file__)), self.folder, "original", data_path))
sample_y_data = (plt.imread(
os.path.join(os.path.dirname(os.path.abspath(__file__)), self.folder, "mask", mask_path))).astype(
float)
sample_x_data = torch.Tensor(sample_x_data)
sample_y_data = torch.Tensor(sample_y_data)
if len(sample_x_data.shape) == 2:
sample_x_data.unsqueeze_(0)
if len(sample_y_data.shape) == 2:
sample_y_data.unsqueeze_(0)
# normalise between [-1,1]
if self.normalise:
sample_x_data = 2*((sample_x_data - torch.min(sample_x_data))/ (torch.max(sample_x_data) - torch.min(sample_x_data)) ) - 1
data = [sample_x_data, sample_y_data]
return data
class SyntheticDataset(Dataset):
"""" Inherits pytorch Dataset class to load Synthetic Axon Dataset """
def __init__(self, num=50000, data_name='syn256', type='val', transform=None, resize=None):
"""
:param num - number of data to generate
:param data_name (string)- data name to load/ save
:param type - train or test dataset
"""
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
name_x = os.path.join(__location__, 'npy_data/' + data_name + '_x_data_' + type + '.npy')
name_y = os.path.join(__location__,'npy_data/' + data_name + '_y_data_' + type + '.npy')
name_y_points = os.path.join(__location__,'npy_data/' + data_name + '_y_points_data_' + type + '.npy')
try:
self.x_data = np.load(name_x, mmap_mode='r')
self.y_data = np.load(name_y, mmap_mode='r')
self.y_data_points = np.load(name_y_points)
except:
# if no dataset currently created, generate a new synthetic dataset with parameters args
print('no dataset with the name')
self.data_name = data_name
self.transform = transform
self.resize = resize
def read_tensor_dataset(self):
""" converts dataset to tensors """
tt = ToTensor()
x_data = tt(self.x_data)
y_data = tt(self.y_data)
return x_data, y_data
def __len__(self):
""" get length of data
example: len(data) """
return (len(self.x_data))
def __getitem__(self, idx):
"""gets samples from data according to idx
:param idx- index to take
example: data[10] -to get the 10th data sample"""
if self.resize:
sample_x_data = np.resize(np.array([self.x_data[idx]]), (1, self.resize,self.resize))
sample_y_data = np.resize(np.array([self.y_data[idx]]), (1, self.resize,self.resize))
else:
sample_x_data = self.x_data[idx]
sample_y_data = self.y_data[idx]
sample_x_data = np.expand_dims(sample_x_data, axis=0)
sample_y_data = np.expand_dims(sample_y_data, axis=0)
sample_x_data = torch.Tensor(sample_x_data)
sample_y_data = torch.Tensor(sample_y_data)
data = [sample_x_data, sample_y_data]
return data
class ToTensor:
"""Convert ndarrays in data to Tensors."""
@staticmethod
def __call__(data):
# swap color axis because
# numpy image: H x W x C
# torch image: C X H X W
#data = data.transpose((1, 0))
data = np.array([data])
data = torch.Tensor(data)
if torch.cuda.is_available():
data = data.cuda()
return data
@staticmethod
def data_to_tensor(x_data, y_data):
"""takes data and splits into a list of tensors- of which each list contains
tensors of several samples (i.e. one id)
:param x_data - the data
:param y_data - the labels
"""
tt = ToTensor()
x_train_temp = tt(x_data)
y_train_temp = tt(y_data)
data = [x_train_temp, y_train_temp]
return data
@staticmethod
def data_ids_to_tensor_list(x_data, y_data, ids):
"""takes data and splits into a list of tensors- of which each list contains
tensors of several samples (i.e. one id)
:param x_data - the data
:param y_data - the labels
:param ids - the ids corresponding to each sample
"""
tt = ToTensor()
unique_ids = np.unique(ids)
data = [None] * unique_ids.size
len = np.zeros(unique_ids.size).astype(int)
for i in np.arange(unique_ids.size):
ind_id = np.nonzero(unique_ids[i] == ids)[0].astype(int)
len[i] = int(ind_id.size)
x_train_temp = tt(x_data[ind_id])
y_train_temp = tt(y_data[ind_id])
data[i] = [x_train_temp[0], y_train_temp[0], len[i]]
max_len = int(np.max(len))
return data, max_len
@staticmethod
def create_variable(tensor):
"""creates a Variable tensor with gpu if available
:param tensor - the tensor to wrap with Variable """
# Do cuda() before wrapping with variable
if torch.cuda.is_available():
return Variable(tensor.cuda())
else:
return Variable(tensor)
|
[
"numpy.load",
"torch.autograd.Variable",
"numpy.zeros",
"numpy.expand_dims",
"numpy.nonzero",
"numpy.max",
"torch.Tensor",
"numpy.array",
"numpy.arange",
"torch.cuda.is_available",
"torch.max",
"torch.min",
"numpy.unique"
] |
[((2945, 2972), 'torch.Tensor', 'torch.Tensor', (['sample_x_data'], {}), '(sample_x_data)\n', (2957, 2972), False, 'import torch\n'), ((2997, 3024), 'torch.Tensor', 'torch.Tensor', (['sample_y_data'], {}), '(sample_y_data)\n', (3009, 3024), False, 'import torch\n'), ((5674, 5701), 'torch.Tensor', 'torch.Tensor', (['sample_x_data'], {}), '(sample_x_data)\n', (5686, 5701), False, 'import torch\n'), ((5726, 5753), 'torch.Tensor', 'torch.Tensor', (['sample_y_data'], {}), '(sample_y_data)\n', (5738, 5753), False, 'import torch\n'), ((6085, 6101), 'numpy.array', 'np.array', (['[data]'], {}), '([data])\n', (6093, 6101), True, 'import numpy as np\n'), ((6117, 6135), 'torch.Tensor', 'torch.Tensor', (['data'], {}), '(data)\n', (6129, 6135), False, 'import torch\n'), ((6147, 6172), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6170, 6172), False, 'import torch\n'), ((7048, 7062), 'numpy.unique', 'np.unique', (['ids'], {}), '(ids)\n', (7057, 7062), True, 'import numpy as np\n'), ((7172, 7198), 'numpy.arange', 'np.arange', (['unique_ids.size'], {}), '(unique_ids.size)\n', (7181, 7198), True, 'import numpy as np\n'), ((7762, 7787), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (7785, 7787), False, 'import torch\n'), ((4289, 4319), 'numpy.load', 'np.load', (['name_x'], {'mmap_mode': '"""r"""'}), "(name_x, mmap_mode='r')\n", (4296, 4319), True, 'import numpy as np\n'), ((4346, 4376), 'numpy.load', 'np.load', (['name_y'], {'mmap_mode': '"""r"""'}), "(name_y, mmap_mode='r')\n", (4353, 4376), True, 'import numpy as np\n'), ((4410, 4432), 'numpy.load', 'np.load', (['name_y_points'], {}), '(name_y_points)\n', (4417, 4432), True, 'import numpy as np\n'), ((5545, 5582), 'numpy.expand_dims', 'np.expand_dims', (['sample_x_data'], {'axis': '(0)'}), '(sample_x_data, axis=0)\n', (5559, 5582), True, 'import numpy as np\n'), ((5611, 5648), 'numpy.expand_dims', 'np.expand_dims', (['sample_y_data'], {'axis': '(0)'}), '(sample_y_data, axis=0)\n', (5625, 5648), True, 'import numpy as np\n'), ((7486, 7497), 'numpy.max', 'np.max', (['len'], {}), '(len)\n', (7492, 7497), True, 'import numpy as np\n'), ((7865, 7881), 'torch.autograd.Variable', 'Variable', (['tensor'], {}), '(tensor)\n', (7873, 7881), False, 'from torch.autograd import Variable\n'), ((5255, 5283), 'numpy.array', 'np.array', (['[self.x_data[idx]]'], {}), '([self.x_data[idx]])\n', (5263, 5283), True, 'import numpy as np\n'), ((5353, 5381), 'numpy.array', 'np.array', (['[self.y_data[idx]]'], {}), '([self.y_data[idx]])\n', (5361, 5381), True, 'import numpy as np\n'), ((7117, 7142), 'numpy.zeros', 'np.zeros', (['unique_ids.size'], {}), '(unique_ids.size)\n', (7125, 7142), True, 'import numpy as np\n'), ((2183, 2211), 'numpy.array', 'np.array', (['[self.x_data[idx]]'], {}), '([self.x_data[idx]])\n', (2191, 2211), True, 'import numpy as np\n'), ((2285, 2313), 'numpy.array', 'np.array', (['[self.y_data[idx]]'], {}), '([self.y_data[idx]])\n', (2293, 2313), True, 'import numpy as np\n'), ((7221, 7253), 'numpy.nonzero', 'np.nonzero', (['(unique_ids[i] == ids)'], {}), '(unique_ids[i] == ids)\n', (7231, 7253), True, 'import numpy as np\n'), ((3301, 3325), 'torch.min', 'torch.min', (['sample_x_data'], {}), '(sample_x_data)\n', (3310, 3325), False, 'import torch\n'), ((3329, 3353), 'torch.max', 'torch.max', (['sample_x_data'], {}), '(sample_x_data)\n', (3338, 3353), False, 'import torch\n'), ((3356, 3380), 'torch.min', 'torch.min', (['sample_x_data'], {}), '(sample_x_data)\n', (3365, 3380), False, 'import torch\n')]
|
import os
from library.connecter.ansible.yaml import Yaml_Base
from library.utils.file import read_file
from library.utils.path import get_pathlist
class Read_File(Yaml_Base):
def router(self, this_path, this_basedir=None, yaml_tpye='main', preserve=True, together=False, name='', describe=''):
'''
检测来自文件的yaml语法等是否正确的路由器
:参数
filename:文件
name:名称
this_basedir:目录
yaml_tpye:yaml文件类型
preserve:是否写入数据库
together:是否返回该main下所有文件内容
name:yaml文件内容写入数据的名称
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,文件内容(格式为字典))
失败为False,返回失败原因
'''
if yaml_tpye in ('full_roles' , 'main') :
result = self.main(this_path, preserve=preserve, together=together, name=name, describe=describe)
elif yaml_tpye == 'include' :
result = self.include(this_path, this_basedir=this_basedir, file_type='tasks', preserve=preserve, name=name, describe=describe)
elif yaml_tpye == 'roles' :
result = self.roles(this_path, this_basedir=this_basedir, preserve=preserve, together=together, name=name, describe=describe)
else :
self.logger.error('检测yaml文件的语法失败,原因:参数yaml_data' + yaml_tpye + '不是接受值,只能接受full_roles、main、include、roles')
return (False, '参数yaml_data' + yaml_tpye + '不是接受值,只能接受full_roles、main、include、roles')
return result
def main(self, filename, preserve=True, together=False, name='', describe=''):
'''
检测main文件的语法等是否正确,如果含有include或/和roles,会逐个检查
include:只能为相对路径
roles:只能为字母和数字组合
:参数
filename:文件
name:名称
preserve:是否写入数据库
together:是否返回该main下所有文件内容
name:yaml文件内容写入数据的名称
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,文件内容(格式为字典))
失败为False,返回失败原因
'''
if preserve and together:
sub_preserve = False
else :
sub_preserve = preserve
result = self.yaml_loader(filename)
if result[0] :
(filename, content, yaml_data) = result[1:]
else :
self.logger.error('检测yaml文件' + filename + '类型为full_roles或者main语法失败,转化成yaml数据时失败,原因:' + result[1])
return (False, '文件' + filename + '转化成yaml数据时失败,' + result[1])
result = self.check_main(yaml_data)
if result[0] :
(roles_list, includefile_dict) = result[1:]
else :
self.logger.error('检测yaml文件' + filename + '类型为full_roles或者main语法失败,通过yaml语法检测,原因:' + result[1])
return (False, '文件' + filename + '未通过yaml语法检测,' + result[1])
this_basedir = os.path.dirname(filename)
include_content = {}
roles_content = {}
for file, file_type in includefile_dict.items() :
result = self.include(file, this_basedir=this_basedir, file_type=file_type, preserve=sub_preserve)
if not result[0] :
self.logger.error('检测yaml文件' + filename + '类型为full_roles或者main语法失败,通过yaml语法检测,原因:' + result[1])
return (False, '文件' + filename + '中的include文件名为' + file + '未通过yaml语法检测,' + result[1])
else :
file = os.path.basename(file)
include_content.update({file:result[1]})
for roles in roles_list :
result = self.roles(roles, this_basedir=this_basedir, preserve=sub_preserve, together=together)
if result[0] :
include_content.update(result[2])
roles = os.path.basename(roles)
roles_content.update({roles:result[1]})
else :
self.logger.error('检测yaml文件' + filename + '类型为full_roles或者main语法失败,roles名为' + roles + '未通过yaml语法检测,原因:' + result[1])
return (False, '文件' + filename + '中的roles名为' + roles + '未通过yaml语法检测,' + result[1])
data = {
'main' : content,
'include': include_content,
'roles': roles_content,
}
if preserve :
result = self.write2db(name, data, 'main', describe=describe)
if not result[0] :
self.logger.error('检测yaml文件' + filename + '类型为full_roles或者main语法失败,通过yaml语法检测,但无法写入数据库,原因:' + result[1])
return (False, '文件' + filename + '通过yaml语法检测,但无法写入数据库' + result[1])
self.logger.info('检测yaml文件' + filename + '类型为full_roles或者main语法成功')
if together :
return (True, data)
else :
return (True, content)
def include(self, file, this_basedir=None, file_type='main', preserve=True, name='', describe=''):
'''
检测include文件的语法等是否正确
:参数
this_basedir:引用该文件的上级目录
file:文件
this_path:引用时的路径
file_type:类型
preserve:是否写入数据库
name:yaml文件内容写入数据的名称
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,include文件内容(格式为字典,可能为空))
失败为False,返回失败原因
'''
if file_type not in ('main', 'tasks', 'var') :
self.logger.error('检测yaml文件' + file + '类型为include语法失败,参数file_type错误')
return (False, '参数file_type错误')
result = self._isinclude(file)
if not result[0] :
self.logger.error('检测yaml文件' + file + '类型为include语法失败,参数file_type错误,原因:' + result[1])
return result
if this_basedir is None or not this_basedir :
filename = file
else :
try :
filename = this_basedir + '/' + file
except :
filename = file
result = self.yaml_loader(filename)
if result[0] :
(content, yaml_data) = result[2:]
else :
self.logger.error('检测yaml文件' + file + '类型为include语法失败,转化为yaml数据时失败,原因:' + result[1])
return (False, result[1])
result = self.check_include(yaml_data, file_type=file_type)
if not result[0] :
self.logger.error('检测yaml文件' + file + '类型为include语法失败,语法检测未通过,原因:' + result[1])
return (False, result[1])
if preserve :
result = self.write2db(name, content, 'include', describe=describe)
if not result[0] :
self.logger.error('检测yaml文件' + file + '类型为include语法失败,但无法写入数据库,原因:' + result[1])
return (False, '无法写入数据库' + result[1])
self.logger.info('检测yaml文件' + filename + '类型为include语法成功')
return (True, content)
def roles(self, roles_path, this_basedir=None, preserve=True, together=False, name='', describe=''):
'''
检测单个roles的语法等是否正确
:参数
this_basedir:引用该roles的main文件的上级目录,例如/opt/lykops/example/ansible/roles/nginx/main.yaml引用一个roles,那么该值为/opt/lykops/example/ansible/roles/nginx/
roles_path:引用该roles的main文件写的roles路径
preserve:是否写入数据库
together:是否返回该roles下所有文件内容
name:yaml文件内容写入数据的名称
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,返回内容为(True,roles下所有文件内容(格式为字典,可能为空), roles下所有文件中include文件内容(格式为字典,可能为空))
失败为False,返回失败原因
'''
content_dict = {}
if preserve and together:
sub_preserve = False
else :
sub_preserve = preserve
if not name :
name = roles_path
result = self._isrolesname(name)
if not result :
self.logger.error('检测yaml文件roles名为' + roles_path + '失败,roles名不符合本系统要求的,注:虽然原生ansible支持这样写')
return (False, '语法错误,roles名不符合本系统要求的,注:虽然原生ansible支持这样写')
else :
if this_basedir is None or not this_basedir:
this_roles_path = roles_path
else :
try :
this_roles_path = this_basedir + '/roles/' + roles_path
except :
this_roles_path = roles_path
include_content = {}
for this_dir in ('tasks', 'vars', 'handlers', 'meta', 'defaults') :
yaml_file = this_roles_path + '/' + this_dir + '/main.yaml'
result = read_file(yaml_file)
if not result[0] :
if this_dir == 'tasks' :
self.logger.error('检测yaml文件roles名为' + roles_path + '失败,' + this_dir + '/main.yaml不存在')
return (False, this_dir + '/main.yaml不存在')
continue
else :
content_dict[this_dir] = result[1]
temp_dir = this_roles_path + '/templates/'
content_dict['templates'] = {}
result = get_pathlist(temp_dir, get_death=0, max_size=4 * 1024 * 1024)
if result[0] :
temp_list = result[1]
for temp in temp_list :
result = read_file(temp)
if result[0] :
temp_file = os.path.basename(temp)
content_dict['templates'][temp_file] = result[1]
if not content_dict['templates'] :
del content_dict['templates']
result = self.check_roles(content_dict)
if result[0] :
includefile_dict = result[1]
for file, file_type in includefile_dict.items() :
result = self.include(file, this_basedir=this_basedir, file_type=file_type, preserve=sub_preserve)
if not result[0] :
self.logger.error('检测yaml文件roles名为' + roles_path + '失败,roles包含的include文件' + file + '未通过语法检测,原因:' + result[1])
return (False, 'roles包含的include文件' + file + '未通过语法检测,' + result[1])
else :
include_content.update({file:result[1]})
else :
self.logger.error('检测yaml文件roles名为' + roles_path + '失败,' + this_dir + '/main.yaml语法错误,原因:' + result[1])
return (False, this_dir + '/main.yaml语法错误,' + result[1])
data = {
'main' : {},
'include': include_content,
'roles': {name:content_dict},
}
if preserve :
result = self.write2db(name, data, 'roles', describe=describe)
if not result[0] :
self.logger.error('检测yaml文件roles名为' + roles_path + '失败,无法写入数据库,' + result[1])
return (False, '无法写入数据库,' + result[1])
self.logger.info('检测yaml文件roles名为' + roles_path + '成功')
if together :
return (True, content_dict, include_content)
else :
return (True, {}, {})
|
[
"os.path.basename",
"os.path.dirname",
"library.utils.file.read_file",
"library.utils.path.get_pathlist"
] |
[((2954, 2979), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2969, 2979), False, 'import os\n'), ((9110, 9171), 'library.utils.path.get_pathlist', 'get_pathlist', (['temp_dir'], {'get_death': '(0)', 'max_size': '(4 * 1024 * 1024)'}), '(temp_dir, get_death=0, max_size=4 * 1024 * 1024)\n', (9122, 9171), False, 'from library.utils.path import get_pathlist\n'), ((8632, 8652), 'library.utils.file.read_file', 'read_file', (['yaml_file'], {}), '(yaml_file)\n', (8641, 8652), False, 'from library.utils.file import read_file\n'), ((3503, 3525), 'os.path.basename', 'os.path.basename', (['file'], {}), '(file)\n', (3519, 3525), False, 'import os\n'), ((3839, 3862), 'os.path.basename', 'os.path.basename', (['roles'], {}), '(roles)\n', (3855, 3862), False, 'import os\n'), ((9290, 9305), 'library.utils.file.read_file', 'read_file', (['temp'], {}), '(temp)\n', (9299, 9305), False, 'from library.utils.file import read_file\n'), ((9369, 9391), 'os.path.basename', 'os.path.basename', (['temp'], {}), '(temp)\n', (9385, 9391), False, 'import os\n')]
|
import json
from urllib import request
import requests
#for rest api
repository_url = 'http://10.3.100.22:8080'
restpath = '/rest'
xmlpath = '/xmlui'
def get_communities():
communities = request.urlopen(repository_url + restpath + '/communities')
communities_json = communities.read().decode('utf-8')
communities_load = json.loads(communities_json)
communities_processed = []
for dictionary in communities_load:
if dictionary['name'] and dictionary['name'] != '':
communities_processed.append(dictionary)
#print(communities_processed)
with open("test.json", 'w') as jsonfile:
text = json.dumps(communities_processed)
jsonfile.write(text)
return communities_processed
def get_by_year(cp):
for dictionary in cp:
try:
year = int(dictionary['name'])
id = dictionary['id']
print(year)
#ccj = curr_collections.read().decode('utf-8')
except:
year = 0
if year != 0:
path = repository_url + dictionary['link'] + '/collections'
print(path)
curr_collections = request.urlopen(path)
curr_json = json.loads(curr_collections.read().decode('utf-8'))
print(curr_json[0]['handle'])
path += str(curr_json[0]['id'])
temp = requests.get(path)
print(temp)
if __name__ == '__main__':
get_by_year(get_communities())
|
[
"requests.get",
"urllib.request.urlopen",
"json.dumps",
"json.loads"
] |
[((193, 252), 'urllib.request.urlopen', 'request.urlopen', (["(repository_url + restpath + '/communities')"], {}), "(repository_url + restpath + '/communities')\n", (208, 252), False, 'from urllib import request\n'), ((335, 363), 'json.loads', 'json.loads', (['communities_json'], {}), '(communities_json)\n', (345, 363), False, 'import json\n'), ((642, 675), 'json.dumps', 'json.dumps', (['communities_processed'], {}), '(communities_processed)\n', (652, 675), False, 'import json\n'), ((1147, 1168), 'urllib.request.urlopen', 'request.urlopen', (['path'], {}), '(path)\n', (1162, 1168), False, 'from urllib import request\n'), ((1350, 1368), 'requests.get', 'requests.get', (['path'], {}), '(path)\n', (1362, 1368), False, 'import requests\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for Truthcoin's consensus functions.
Verifies that the consensus algorithm works as expected.
Check test_answers.txt for expected results.
"""
from __future__ import division, unicode_literals, absolute_import
import os
import sys
import platform
import json
import numpy as np
import numpy.ma as ma
if platform.python_version() < "2.7":
unittest = __import__("unittest2")
else:
import unittest
HERE = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(HERE, os.pardir))
import consensus
def prp(o):
print(json.dumps(outcome, indent=3, sort_keys=True))
class TestConsensus(unittest.TestCase):
def setUp(self):
self.votes_unmasked = np.array([
[1, 1, 0, 0],
[1, 0, 0, 0],
[1, 1, 0, 0],
[1, 1, 1, 0],
[0, 0, 1, 1],
[0, 0, 1, 1],
])
self.votes = ma.masked_array(self.votes_unmasked, np.isnan(self.votes_unmasked))
def test_Factory(self):
outcome = consensus.Factory(self.votes)
self.assertAlmostEquals(outcome["Certainty"], 0.228237569613, places=11)
def test_Factory_scaled(self):
scalar_decision_params = [
{"scaled": True, "min": 0.1, "max": 0.5},
{"scaled": True, "min": 0.2, "max": 0.7},
{"scaled": False, "min": 0, "max": 1},
{"scaled": False, "min": 0, "max": 1},
]
outcome = consensus.Factory(self.votes, Scales=scalar_decision_params)
self.assertAlmostEquals(outcome["Certainty"], 0.618113325804, places=11)
def tearDown(self):
del self.votes_unmasked
del self.votes
if __name__ == "__main__":
suite = unittest.TestLoader().loadTestsFromTestCase(TestConsensus)
unittest.TextTestRunner(verbosity=2).run(suite)
|
[
"platform.python_version",
"unittest.TextTestRunner",
"os.path.realpath",
"json.dumps",
"numpy.isnan",
"numpy.array",
"unittest.TestLoader",
"os.path.join",
"consensus.Factory"
] |
[((360, 385), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (383, 385), False, 'import platform\n'), ((484, 510), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (500, 510), False, 'import os\n'), ((531, 560), 'os.path.join', 'os.path.join', (['HERE', 'os.pardir'], {}), '(HERE, os.pardir)\n', (543, 560), False, 'import os\n'), ((603, 648), 'json.dumps', 'json.dumps', (['outcome'], {'indent': '(3)', 'sort_keys': '(True)'}), '(outcome, indent=3, sort_keys=True)\n', (613, 648), False, 'import json\n'), ((743, 841), 'numpy.array', 'np.array', (['[[1, 1, 0, 0], [1, 0, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0], [0, 0, 1, 1], [0, \n 0, 1, 1]]'], {}), '([[1, 1, 0, 0], [1, 0, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0], [0, 0, 1,\n 1], [0, 0, 1, 1]])\n', (751, 841), True, 'import numpy as np\n'), ((1057, 1086), 'consensus.Factory', 'consensus.Factory', (['self.votes'], {}), '(self.votes)\n', (1074, 1086), False, 'import consensus\n'), ((1477, 1537), 'consensus.Factory', 'consensus.Factory', (['self.votes'], {'Scales': 'scalar_decision_params'}), '(self.votes, Scales=scalar_decision_params)\n', (1494, 1537), False, 'import consensus\n'), ((979, 1008), 'numpy.isnan', 'np.isnan', (['self.votes_unmasked'], {}), '(self.votes_unmasked)\n', (987, 1008), True, 'import numpy as np\n'), ((1739, 1760), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (1758, 1760), False, 'import unittest\n'), ((1802, 1838), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (1825, 1838), False, 'import unittest\n')]
|
from rest_framework import status
from rest_framework.test import APISimpleTestCase
from rest_framework_jwt.settings import api_settings
from happ.models import User, Interest, LogEntry
from happ.factories import (
UserFactory,
InterestFactory,
CityFactory,
)
from happ.tests import *
class Tests(APISimpleTestCase):
def test_get_without_auth(self):
"""
Resourse is not available without authentication
"""
url = prepare_url('admin-interests-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_with_auth_not_staff(self):
"""
Resourse is not available for non-staff users
"""
u = UserFactory()
u.set_password('<PASSWORD>')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
url = prepare_url('admin-interests-list')
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_get_with_auth(self):
"""
Resourse is available with authentication only and for staff
"""
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
url = prepare_url('admin-interests-list')
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_search_interests(self):
"""
We can search interests
"""
Interest.objects.delete()
for i in range(3):
interest = InterestFactory(title='Hockey')
interest.save()
interest = InterestFactory(title='Beer')
interest.save()
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
url = prepare_url('admin-interests-list', query={'search': 'hoc'})
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 3)
def test_create_interest(self):
"""
we can create interest
"""
n = Interest.objects.count()
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
log_n = LogEntry.objects.count()
url = prepare_url('admin-interests-list')
interest_data = {
'title': 'NewInterest name',
'parent_id': None,
'is_global': True,
'local_cities': [],
}
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
# restricted for moderator
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.post(url, data=interest_data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
# ok for administrator
u.role = User.ADMINISTRATOR
u.save()
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.post(url, data=interest_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Interest.objects.count(), n+1)
self.assertEqual(response.data['title'], 'NewInterest name')
self.assertEqual(LogEntry.objects.count(), log_n+1)
# ok for root
u.role = User.ROOT
u.save()
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.post(url, data=interest_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Interest.objects.count(), n+2)
self.assertEqual(response.data['title'], 'NewInterest name')
self.assertEqual(LogEntry.objects.count(), log_n+2)
def test_update_interest(self):
"""
we can update interest
"""
cities = map(lambda x: str(CityFactory().id), range(3))
interest = InterestFactory()
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
log_n = LogEntry.objects.count()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
url = prepare_url('admin-interests-detail', kwargs={'id': str(interest.id)})
data = {
'title': 'NewInterest name',
'parent_id': None,
'is_global': False,
'local_cities': cities,
}
n = Interest.objects.count()
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.patch(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Interest.objects.count(), n)
self.assertEqual(response.data['title'], 'NewInterest name')
self.assertEqual(LogEntry.objects.count(), log_n+1)
def test_delete_interest(self):
"""
we can delete interest
"""
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
log_n = LogEntry.objects.count()
i = InterestFactory()
i.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
url = prepare_url('admin-interests-detail', kwargs={'id': str(i.id)})
n = Interest.objects.count()
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(Interest.objects.count(), n-1)
self.assertEqual(LogEntry.objects.count(), log_n+1)
def test_get_categories(self):
"""
Ensure that we can get only categories with api
"""
Interest.objects.delete()
for i in range(3):
interest = InterestFactory(parent=None)
inter = InterestFactory(parent=interest)
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
url = prepare_url('admin-interests-categories')
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 3)
for data in response.data['results']:
if data['id'] == str(interest.id):
self.assertEqual(len(data['children']), 1)
else:
self.assertEqual(len(data['children']), 0)
def test_get_children(self):
"""
Ensure that we can get only children with api
"""
Interest.objects.delete()
for i in range(3):
interest = InterestFactory(parent=None)
interest = InterestFactory(parent=interest)
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
url = prepare_url('admin-interests-children')
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
self.assertNotEqual(response.data['results'][0]['parent'], None)
def test_activate(self):
"""
we can activate interest through API
"""
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
log_n = LogEntry.objects.count()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
i = InterestFactory(is_active=False)
url = prepare_url('admin-interests-activate', kwargs={'id': str(i.id)})
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.post(url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
i = Interest.objects.get(id=i.id)
self.assertTrue(i.is_active)
self.assertEqual(LogEntry.objects.count(), log_n+1)
def test_deactivate(self):
"""
we can deactivate interest through API
"""
u = UserFactory(role=User.MODERATOR)
u.set_password('<PASSWORD>')
u.save()
log_n = LogEntry.objects.count()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '<PASSWORD>'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
i = InterestFactory(is_active=True)
url = prepare_url('admin-interests-deactivate', kwargs={'id': str(i.id)})
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
response = self.client.post(url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
i = Interest.objects.get(id=i.id)
self.assertFalse(i.is_active)
self.assertEqual(LogEntry.objects.count(), log_n+1)
|
[
"happ.factories.InterestFactory",
"happ.models.LogEntry.objects.count",
"happ.models.Interest.objects.delete",
"happ.models.Interest.objects.get",
"happ.factories.CityFactory",
"happ.factories.UserFactory",
"happ.models.Interest.objects.count"
] |
[((768, 781), 'happ.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (779, 781), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((1519, 1551), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (1530, 1551), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((2244, 2269), 'happ.models.Interest.objects.delete', 'Interest.objects.delete', ([], {}), '()\n', (2267, 2269), False, 'from happ.models import User, Interest, LogEntry\n'), ((2400, 2429), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'title': '"""Beer"""'}), "(title='Beer')\n", (2415, 2429), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((2467, 2499), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (2478, 2499), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((3271, 3295), 'happ.models.Interest.objects.count', 'Interest.objects.count', ([], {}), '()\n', (3293, 3295), False, 'from happ.models import User, Interest, LogEntry\n'), ((3308, 3340), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (3319, 3340), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((3411, 3435), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (3433, 3435), False, 'from happ.models import User, Interest, LogEntry\n'), ((5645, 5662), 'happ.factories.InterestFactory', 'InterestFactory', ([], {}), '()\n', (5660, 5662), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((5675, 5707), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (5686, 5707), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((5778, 5802), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (5800, 5802), False, 'from happ.models import User, Interest, LogEntry\n'), ((6320, 6344), 'happ.models.Interest.objects.count', 'Interest.objects.count', ([], {}), '()\n', (6342, 6344), False, 'from happ.models import User, Interest, LogEntry\n'), ((6879, 6911), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (6890, 6911), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((6982, 7006), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (7004, 7006), False, 'from happ.models import User, Interest, LogEntry\n'), ((7020, 7037), 'happ.factories.InterestFactory', 'InterestFactory', ([], {}), '()\n', (7035, 7037), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((7398, 7422), 'happ.models.Interest.objects.count', 'Interest.objects.count', ([], {}), '()\n', (7420, 7422), False, 'from happ.models import User, Interest, LogEntry\n'), ((7908, 7933), 'happ.models.Interest.objects.delete', 'Interest.objects.delete', ([], {}), '()\n', (7931, 7933), False, 'from happ.models import User, Interest, LogEntry\n'), ((8030, 8062), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'parent': 'interest'}), '(parent=interest)\n', (8045, 8062), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((8076, 8108), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (8087, 8108), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((9113, 9138), 'happ.models.Interest.objects.delete', 'Interest.objects.delete', ([], {}), '()\n', (9136, 9138), False, 'from happ.models import User, Interest, LogEntry\n'), ((9238, 9270), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'parent': 'interest'}), '(parent=interest)\n', (9253, 9270), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((9284, 9316), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (9295, 9316), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((10147, 10179), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (10158, 10179), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((10250, 10274), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (10272, 10274), False, 'from happ.models import User, Interest, LogEntry\n'), ((10540, 10572), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'is_active': '(False)'}), '(is_active=False)\n', (10555, 10572), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((10908, 10937), 'happ.models.Interest.objects.get', 'Interest.objects.get', ([], {'id': 'i.id'}), '(id=i.id)\n', (10928, 10937), False, 'from happ.models import User, Interest, LogEntry\n'), ((11150, 11182), 'happ.factories.UserFactory', 'UserFactory', ([], {'role': 'User.MODERATOR'}), '(role=User.MODERATOR)\n', (11161, 11182), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((11253, 11277), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (11275, 11277), False, 'from happ.models import User, Interest, LogEntry\n'), ((11543, 11574), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'is_active': '(True)'}), '(is_active=True)\n', (11558, 11574), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((11912, 11941), 'happ.models.Interest.objects.get', 'Interest.objects.get', ([], {'id': 'i.id'}), '(id=i.id)\n', (11932, 11941), False, 'from happ.models import User, Interest, LogEntry\n'), ((2320, 2351), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'title': '"""Hockey"""'}), "(title='Hockey')\n", (2335, 2351), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((4688, 4712), 'happ.models.Interest.objects.count', 'Interest.objects.count', ([], {}), '()\n', (4710, 4712), False, 'from happ.models import User, Interest, LogEntry\n'), ((4813, 4837), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (4835, 4837), False, 'from happ.models import User, Interest, LogEntry\n'), ((5310, 5334), 'happ.models.Interest.objects.count', 'Interest.objects.count', ([], {}), '()\n', (5332, 5334), False, 'from happ.models import User, Interest, LogEntry\n'), ((5435, 5459), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (5457, 5459), False, 'from happ.models import User, Interest, LogEntry\n'), ((6617, 6641), 'happ.models.Interest.objects.count', 'Interest.objects.count', ([], {}), '()\n', (6639, 6641), False, 'from happ.models import User, Interest, LogEntry\n'), ((6740, 6764), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (6762, 6764), False, 'from happ.models import User, Interest, LogEntry\n'), ((7693, 7717), 'happ.models.Interest.objects.count', 'Interest.objects.count', ([], {}), '()\n', (7715, 7717), False, 'from happ.models import User, Interest, LogEntry\n'), ((7749, 7773), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (7771, 7773), False, 'from happ.models import User, Interest, LogEntry\n'), ((7984, 8012), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'parent': 'None'}), '(parent=None)\n', (7999, 8012), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((9189, 9217), 'happ.factories.InterestFactory', 'InterestFactory', ([], {'parent': 'None'}), '(parent=None)\n', (9204, 9217), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n'), ((11000, 11024), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (11022, 11024), False, 'from happ.models import User, Interest, LogEntry\n'), ((12005, 12029), 'happ.models.LogEntry.objects.count', 'LogEntry.objects.count', ([], {}), '()\n', (12027, 12029), False, 'from happ.models import User, Interest, LogEntry\n'), ((5597, 5610), 'happ.factories.CityFactory', 'CityFactory', ([], {}), '()\n', (5608, 5610), False, 'from happ.factories import UserFactory, InterestFactory, CityFactory\n')]
|
import sys
from PyQt5 import QtGui
import PyQt5.QtWidgets as qw
class Mensaje(qw.QWidget):
def __init__(self, parent=None):
qw.QWidget.__init__(self, parent)
self.setGeometry(700, 300, 640, 640)
self.setWindowTitle("Basico 03")
self.setWindowIcon(QtGui.QIcon("Recursos/Icon-Python_PyQt5.png"))
self.setToolTip("Esto es un <b><i>Widget</i></b> hecho con PyQt.") # Mensaje tooltip, puede usar RTF
qw.QToolTip.setFont(QtGui.QFont("OldEnglish", 11)) # Fuente y tamaño de fuente
apli = qw.QApplication(sys.argv)
tip = Mensaje()
tip.show()
apli.exec_() # También se puede poner así.
|
[
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtGui.QIcon",
"PyQt5.QtWidgets.QWidget.__init__",
"PyQt5.QtGui.QFont"
] |
[((537, 562), 'PyQt5.QtWidgets.QApplication', 'qw.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (552, 562), True, 'import PyQt5.QtWidgets as qw\n'), ((137, 170), 'PyQt5.QtWidgets.QWidget.__init__', 'qw.QWidget.__init__', (['self', 'parent'], {}), '(self, parent)\n', (156, 170), True, 'import PyQt5.QtWidgets as qw\n'), ((285, 330), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['"""Recursos/Icon-Python_PyQt5.png"""'], {}), "('Recursos/Icon-Python_PyQt5.png')\n", (296, 330), False, 'from PyQt5 import QtGui\n'), ((470, 499), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""OldEnglish"""', '(11)'], {}), "('OldEnglish', 11)\n", (481, 499), False, 'from PyQt5 import QtGui\n')]
|
import os
import psutil
class InfoOS:
'''
This class retrieves and prints information on the current OS.
Public methods:
Attributes:
os: the current OS,
kernel: the current release,
arch: the current architecture,
threads: the number of available CPU threads,
freq: the current CPU frequency,
freqm: the maximum CPU frequency,
vmtot: the total virtual memory (in MB),
vmav: the available virtual memory (in MB).
'''
def __init__(self):
'''
Constructor of the class.
'''
self.os = os.uname().sysname
self.kernel = os.uname().release
self.arch = os.uname().machine
self.threads = psutil.cpu_count()
self.freq = psutil.cpu_freq().current
self.freqm = psutil.cpu_freq().max
self.vmtot = int(psutil.virtual_memory().total / 1024 / 1024)
self.vmav = int(psutil.virtual_memory().available / 1024 / 1024)
|
[
"psutil.cpu_freq",
"psutil.virtual_memory",
"os.uname",
"psutil.cpu_count"
] |
[((775, 793), 'psutil.cpu_count', 'psutil.cpu_count', ([], {}), '()\n', (791, 793), False, 'import psutil\n'), ((649, 659), 'os.uname', 'os.uname', ([], {}), '()\n', (657, 659), False, 'import os\n'), ((691, 701), 'os.uname', 'os.uname', ([], {}), '()\n', (699, 701), False, 'import os\n'), ((733, 743), 'os.uname', 'os.uname', ([], {}), '()\n', (741, 743), False, 'import os\n'), ((817, 834), 'psutil.cpu_freq', 'psutil.cpu_freq', ([], {}), '()\n', (832, 834), False, 'import psutil\n'), ((866, 883), 'psutil.cpu_freq', 'psutil.cpu_freq', ([], {}), '()\n', (881, 883), False, 'import psutil\n'), ((915, 938), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (936, 938), False, 'import psutil\n'), ((987, 1010), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (1008, 1010), False, 'import psutil\n')]
|
from fsspec.implementations.local import LocalFileSystem
from s3fs import S3FileSystem
def is_s3_path(path: str) -> bool:
if path.startswith("s3://"):
return True
return False
def bucket_name_from_path(path: str) -> str:
path_parts = path.replace("s3://", "").split("/")
return path_parts.pop(0)
def get_fs(path: str):
if is_s3_path(path):
return S3FileSystem()
return LocalFileSystem(auto_mkdir="True")
|
[
"s3fs.S3FileSystem",
"fsspec.implementations.local.LocalFileSystem"
] |
[((415, 449), 'fsspec.implementations.local.LocalFileSystem', 'LocalFileSystem', ([], {'auto_mkdir': '"""True"""'}), "(auto_mkdir='True')\n", (430, 449), False, 'from fsspec.implementations.local import LocalFileSystem\n'), ((389, 403), 's3fs.S3FileSystem', 'S3FileSystem', ([], {}), '()\n', (401, 403), False, 'from s3fs import S3FileSystem\n')]
|
from django.db import models
from django.contrib.auth.models import User
class Book(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(User)
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey"
] |
[((113, 145), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (129, 145), False, 'from django.db import models\n'), ((159, 182), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (176, 182), False, 'from django.db import models\n')]
|
from flask import Flask, redirect, url_for, render_template
#import ip_finder
app = Flask(__name__)
@app.route("/<name>")
def home(name):
return render_template("index.html", content=name)
# @app.route("/<name>")
# def user(name):
# return f"Hello {name}!"
# # Working on it!
# @app.route("/<ipF>")
# def ip(ipF):
# return f"{ipF}"
# @app.route("/admin")
# def admin():
# return redirect(url_for("user", name="Admin!"))
if __name__ == "__main__":
app.run()
|
[
"flask.Flask",
"flask.render_template"
] |
[((84, 99), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (89, 99), False, 'from flask import Flask, redirect, url_for, render_template\n'), ((150, 193), 'flask.render_template', 'render_template', (['"""index.html"""'], {'content': 'name'}), "('index.html', content=name)\n", (165, 193), False, 'from flask import Flask, redirect, url_for, render_template\n')]
|
from django.conf.urls.defaults import patterns, include, url
from django.contrib.auth import views as authviews
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# favicon
(r'^favicon\.ico$', 'django.views.generic.simple.redirect_to', {'url': '/static/images/favicon.ico'}),
# default to projdb app
(r'^$', 'projdb.views.index'),
(r'^login/$', 'workspace.views.Exit'), # Workspace Logout menu item currently currently points to /login
# projects
(r'^projects/', include('projdb.urls')),
# workspace
(r'^workspace/', include('workspace.urls')),
# registration view is in projdb at the moment
(r'^accounts/register/$', 'projdb.views.register'),
# authentication
(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'}),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
(r'^accounts/password_change/$', 'django.contrib.auth.views.password_change'),
(r'^accounts/password_reset/$', 'django.contrib.auth.views.password_reset'),
(r'^accounts/password_reset/done/$', 'django.contrib.auth.views.password_reset_done'),
# admin
(r'^admin/', include(admin.site.urls)),
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
)
|
[
"django.contrib.admin.autodiscover",
"django.conf.urls.defaults.include"
] |
[((203, 223), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (221, 223), False, 'from django.contrib import admin\n'), ((612, 634), 'django.conf.urls.defaults.include', 'include', (['"""projdb.urls"""'], {}), "('projdb.urls')\n", (619, 634), False, 'from django.conf.urls.defaults import patterns, include, url\n'), ((682, 707), 'django.conf.urls.defaults.include', 'include', (['"""workspace.urls"""'], {}), "('workspace.urls')\n", (689, 707), False, 'from django.conf.urls.defaults import patterns, include, url\n'), ((1354, 1378), 'django.conf.urls.defaults.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (1361, 1378), False, 'from django.conf.urls.defaults import patterns, include, url\n'), ((1403, 1443), 'django.conf.urls.defaults.include', 'include', (['"""django.contrib.admindocs.urls"""'], {}), "('django.contrib.admindocs.urls')\n", (1410, 1443), False, 'from django.conf.urls.defaults import patterns, include, url\n')]
|
import subprocess as sp
import platform
import os.path
import logging
def install_prerequisites(os_platform):
"""
Installs prerequisites for the landscape CLI tool
Returns: None
"""
install_gsed(os_platform)
install_minikube(os_platform)
install_lastpass(os_platform)
install_vault(os_platform)
install_kubectl(os_platform)
install_helm(os_platform)
install_landscaper(os_platform)
install_terraform(os_platform)
install_helm_plugins()
def install_gsed(os_platform):
"""Install minikube"""
install_cmds = {
'Darwin': 'brew install gnu-sed'
}
dst = '/usr/local/bin/gsed'
if not os.path.isfile(dst):
logging.info("installing gnu-sed")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("gnu-sed already installed in {0}".format(dst))
def install_minikube(os_platform):
"""Install minikube"""
install_cmds = {
'Darwin': 'curl -LO https://storage.googleapis.com/minikube/releases/v0.22.3/minikube-darwin-amd64 && \
chmod +x minikube-darwin-amd64 && \
mv minikube-darwin-amd64 /usr/local/bin/minikube'
}
dst = '/usr/local/bin/minikube'
if not os.path.isfile(dst):
logging.info("installing minikube")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("minikube already installed in {0}".format(dst))
def install_lastpass(os_platform):
"""Install LastPass"""
install_cmds = {
'Darwin': 'brew update && brew install lastpass-cli --with-pinentry'
}
dst = '/usr/local/bin/lpass'
if not os.path.isfile(dst):
logging.info("installing lastpass")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("lastpass already installed in {0}".format(dst))
def install_vault(os_platform):
"""Installs Hashicorp Vault"""
install_cmds = {
'Darwin': 'curl -LO https://releases.hashicorp.com/vault/0.8.3/vault_0.8.3_darwin_amd64.zip && \
unzip -d /usr/local/bin/ vault_0.8.3_darwin_amd64.zip && \
rm vault_0.8.3_darwin_amd64.zip'
}
dst = '/usr/local/bin/vault'
if not os.path.isfile(dst):
logging.info("installing vault")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("vault already installed in {0}".format(dst))
def install_kubectl(os_platform):
"""Installs Kubernetes kubectl"""
install_cmds = {
'Darwin': 'curl -LO https://storage.googleapis.com/kubernetes-release/release/v1.8.1/bin/darwin/amd64/kubectl && \
chmod +x kubectl && \
mv kubectl /usr/local/bin/'
}
dst = '/usr/local/bin/kubectl'
if not os.path.isfile(dst):
logging.info("installing kubectl")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("kubectl already installed in {0}".format(dst))
def install_helm(os_platform):
"""Installs Kubernetes Helm"""
install_cmds = {
'Darwin': 'curl -LO https://storage.googleapis.com/kubernetes-helm/helm-v2.7.2-darwin-amd64.tar.gz && \
tar zvxf helm-v2.7.2-darwin-amd64.tar.gz --strip-components=1 darwin-amd64/helm && \
chmod +x helm && \
mv helm /usr/local/bin/ && \
rm helm-v2.7.2-darwin-amd64.tar.gz'
}
dst = '/usr/local/bin/helm'
if not os.path.isfile(dst):
logging.info("installing helm")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("helm already installed in {0}".format(dst))
def install_landscaper(os_platform):
"""Installs Helm Landscaper"""
install_cmds = {
'Darwin': 'curl -LO https://github.com/Eneco/landscaper/releases/download/1.0.10/landscaper-1.0.11-darwin-amd64.tar.gz && \
tar zvxf landscaper-1.0.11-darwin-amd64.tar.gz landscaper && \
mv landscaper /usr/local/bin/ && \
rm landscaper-1.0.11-darwin-amd64.tar.gz'
}
dst = '/usr/local/bin/landscaper'
if not os.path.isfile(dst):
logging.info("installing landscaper")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("landscaper already installed in {0}".format(dst))
def install_terraform(os_platform):
"""Installs Terraform"""
install_cmds = {
'Darwin': 'curl -LO https://releases.hashicorp.com/terraform/0.10.2/terraform_0.10.7_darwin_amd64.zip && \
unzip -d /usr/local/bin terraform_0.10.7_darwin_amd64.zip && \
rm terraform_0.10.7_darwin_amd64.zip'
}
dst = '/usr/local/bin/terraform'
if not os.path.isfile(dst):
logging.info("installing terraform")
sp.call(install_cmds[os_platform], shell=True)
else:
logging.info("terraform already installed in {0}".format(dst))
def install_helm_plugins():
"""Install helm plugins. Requires helm to be installed"""
plugins = {
'https://github.com/technosophos/helm-gpg': '0.1.0',
}
for plugin_url, version in plugins.items():
install_cmd = "helm plugin install {0} --version={1}".format(
plugin_url,
version)
logging.info("installing helm plugin with command: {0}".format(install_cmd))
sp.call(install_cmd, shell=True)
|
[
"logging.info",
"subprocess.call"
] |
[((690, 724), 'logging.info', 'logging.info', (['"""installing gnu-sed"""'], {}), "('installing gnu-sed')\n", (702, 724), False, 'import logging\n'), ((733, 779), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (740, 779), True, 'import subprocess as sp\n'), ((1240, 1275), 'logging.info', 'logging.info', (['"""installing minikube"""'], {}), "('installing minikube')\n", (1252, 1275), False, 'import logging\n'), ((1284, 1330), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (1291, 1330), True, 'import subprocess as sp\n'), ((1652, 1687), 'logging.info', 'logging.info', (['"""installing lastpass"""'], {}), "('installing lastpass')\n", (1664, 1687), False, 'import logging\n'), ((1696, 1742), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (1703, 1742), True, 'import subprocess as sp\n'), ((2205, 2237), 'logging.info', 'logging.info', (['"""installing vault"""'], {}), "('installing vault')\n", (2217, 2237), False, 'import logging\n'), ((2246, 2292), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (2253, 2292), True, 'import subprocess as sp\n'), ((2735, 2769), 'logging.info', 'logging.info', (['"""installing kubectl"""'], {}), "('installing kubectl')\n", (2747, 2769), False, 'import logging\n'), ((2778, 2824), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (2785, 2824), True, 'import subprocess as sp\n'), ((3384, 3415), 'logging.info', 'logging.info', (['"""installing helm"""'], {}), "('installing helm')\n", (3396, 3415), False, 'import logging\n'), ((3424, 3470), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (3431, 3470), True, 'import subprocess as sp\n'), ((4022, 4059), 'logging.info', 'logging.info', (['"""installing landscaper"""'], {}), "('installing landscaper')\n", (4034, 4059), False, 'import logging\n'), ((4068, 4114), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (4075, 4114), True, 'import subprocess as sp\n'), ((4600, 4636), 'logging.info', 'logging.info', (['"""installing terraform"""'], {}), "('installing terraform')\n", (4612, 4636), False, 'import logging\n'), ((4645, 4691), 'subprocess.call', 'sp.call', (['install_cmds[os_platform]'], {'shell': '(True)'}), '(install_cmds[os_platform], shell=True)\n', (4652, 4691), True, 'import subprocess as sp\n'), ((5284, 5316), 'subprocess.call', 'sp.call', (['install_cmd'], {'shell': '(True)'}), '(install_cmd, shell=True)\n', (5291, 5316), True, 'import subprocess as sp\n')]
|
# -*- encoding: utf-8 -*-
import os
import subprocess
import settings
import git
import requests
def clone_challenge(challenge_repository, challenge_name):
try:
git.Git().clone(challenge_repository)
if not os.path.exists(challenge_name):
return "Can't download this repository", True
except git.GitCommandError:
pass
return '', False
def _run_make_command(challenge_name, make_parameter, background=False):
make_command = ["make", "-C", "{directory}".format(directory=challenge_name), make_parameter]
try:
if background:
bg_process = subprocess.Popen(make_command, stdin=None, stdout=None, stderr=None)
if bg_process.returncode != 0:
bg_process.kill()
else:
output = subprocess.check_output(make_command, stderr=subprocess.STDOUT)
return output, False
except Exception as e:
return "Have a error in make {parameter} error: {error}".format(parameter=make_parameter, error=e), True
def run_make_setup(challenge_name):
return _run_make_command(challenge_name, "setup")
def run_make_run(challenge_name):
return _run_make_command(challenge_name, "run", background=True)
def send_status(challenge_name, status_json):
requests.post(settings.API_URL, status_json)
def main():
status_json = dict()
challenge_repository = os.environ.get("REPO")
challenge_name = challenge_repository.split('/')[-1].replace('.git', '')
msg, error = clone_challenge(challenge_repository, challenge_name)
if error:
status_json['clone_error'] = msg
return
msg, setup_error = run_make_setup(challenge_name)
status_json['setup_output'] = msg
if setup_error:
return
run_make_run(challenge_name)
send_status(challenge_name, status_json)
if __name__ == '__main__':
status = main()
|
[
"git.Git",
"subprocess.Popen",
"subprocess.check_output",
"os.path.exists",
"os.environ.get",
"requests.post"
] |
[((1283, 1327), 'requests.post', 'requests.post', (['settings.API_URL', 'status_json'], {}), '(settings.API_URL, status_json)\n', (1296, 1327), False, 'import requests\n'), ((1394, 1416), 'os.environ.get', 'os.environ.get', (['"""REPO"""'], {}), "('REPO')\n", (1408, 1416), False, 'import os\n'), ((230, 260), 'os.path.exists', 'os.path.exists', (['challenge_name'], {}), '(challenge_name)\n', (244, 260), False, 'import os\n'), ((616, 684), 'subprocess.Popen', 'subprocess.Popen', (['make_command'], {'stdin': 'None', 'stdout': 'None', 'stderr': 'None'}), '(make_command, stdin=None, stdout=None, stderr=None)\n', (632, 684), False, 'import subprocess\n'), ((797, 860), 'subprocess.check_output', 'subprocess.check_output', (['make_command'], {'stderr': 'subprocess.STDOUT'}), '(make_command, stderr=subprocess.STDOUT)\n', (820, 860), False, 'import subprocess\n'), ((177, 186), 'git.Git', 'git.Git', ([], {}), '()\n', (184, 186), False, 'import git\n')]
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import os
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.conf import settings
from awx.main.models import Instance
class Command(BaseCommand):
"""
Internal tower command.
Register this instance with the database for HA tracking.
"""
help = (
"Add instance to the database. "
"When no options are provided, values from Django settings will be used to register the current system, "
"as well as the default queues if needed (only used or enabled for Kubernetes installs). "
"Override with `--hostname`."
)
def add_arguments(self, parser):
parser.add_argument('--hostname', dest='hostname', type=str, help="Hostname used during provisioning")
parser.add_argument('--node_type', type=str, default='hybrid', choices=['control', 'execution', 'hop', 'hybrid'], help="Instance Node type")
parser.add_argument('--uuid', type=str, help="Instance UUID")
def _register_hostname(self, hostname, node_type, uuid):
if not hostname:
if not settings.AWX_AUTO_DEPROVISION_INSTANCES:
raise CommandError('Registering with values from settings only intended for use in K8s installs')
from awx.main.management.commands.register_queue import RegisterQueue
(changed, instance) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', uuid=settings.SYSTEM_UUID)
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
RegisterQueue(
settings.DEFAULT_EXECUTION_QUEUE_NAME, 100, 0, [], is_container_group=True, pod_spec_override=settings.DEFAULT_EXECUTION_QUEUE_POD_SPEC_OVERRIDE
).register()
else:
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, uuid=uuid)
if changed:
print("Successfully registered instance {}".format(hostname))
else:
print("Instance already registered {}".format(instance.hostname))
self.changed = changed
@transaction.atomic
def handle(self, **options):
self.changed = False
self._register_hostname(options.get('hostname'), options.get('node_type'), options.get('uuid'))
if self.changed:
print("(changed: True)")
|
[
"os.environ.get",
"awx.main.models.Instance.objects.register",
"django.core.management.base.CommandError",
"awx.main.management.commands.register_queue.RegisterQueue"
] |
[((1928, 2004), 'awx.main.models.Instance.objects.register', 'Instance.objects.register', ([], {'hostname': 'hostname', 'node_type': 'node_type', 'uuid': 'uuid'}), '(hostname=hostname, node_type=node_type, uuid=uuid)\n', (1953, 2004), False, 'from awx.main.models import Instance\n'), ((1225, 1326), 'django.core.management.base.CommandError', 'CommandError', (['"""Registering with values from settings only intended for use in K8s installs"""'], {}), "(\n 'Registering with values from settings only intended for use in K8s installs'\n )\n", (1237, 1326), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((1472, 1499), 'os.environ.get', 'os.environ.get', (['"""MY_POD_IP"""'], {}), "('MY_POD_IP')\n", (1486, 1499), False, 'import os\n'), ((1561, 1659), 'awx.main.management.commands.register_queue.RegisterQueue', 'RegisterQueue', (['settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME', '(100)', '(0)', '[]'], {'is_container_group': '(False)'}), '(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [],\n is_container_group=False)\n', (1574, 1659), False, 'from awx.main.management.commands.register_queue import RegisterQueue\n'), ((1679, 1847), 'awx.main.management.commands.register_queue.RegisterQueue', 'RegisterQueue', (['settings.DEFAULT_EXECUTION_QUEUE_NAME', '(100)', '(0)', '[]'], {'is_container_group': '(True)', 'pod_spec_override': 'settings.DEFAULT_EXECUTION_QUEUE_POD_SPEC_OVERRIDE'}), '(settings.DEFAULT_EXECUTION_QUEUE_NAME, 100, 0, [],\n is_container_group=True, pod_spec_override=settings.\n DEFAULT_EXECUTION_QUEUE_POD_SPEC_OVERRIDE)\n', (1692, 1847), False, 'from awx.main.management.commands.register_queue import RegisterQueue\n')]
|
# reduced from https://github.com/blainegarrett/urequests2
import binascii
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz'
'0123456789' '_.-')
def quote(s):
res = []
for c in s:
if c in always_safe:
res.append(c)
continue
res.append('%%%x' % ord(c))
return ''.join(res)
def quote_plus(s):
if ' ' in s:
s = s.replace(' ', '+')
return quote(s)
def urlencode(query):
if isinstance(query, dict):
query = query.items()
l = []
for k, v in query:
if not isinstance(v, list):
v = [v]
for value in v:
k = quote_plus(str(k))
v = quote_plus(str(value))
l.append(k + '=' + v)
return '&'.join(l)
def b64encode(s):
"""Reproduced from micropython base64"""
if not isinstance(s, (bytes, bytearray)):
raise TypeError("expected bytes, not %s" % s.__class__.__name__)
# Strip off the trailing newline
encoded = binascii.b2a_base64(s)[:-1]
return encoded
|
[
"binascii.b2a_base64"
] |
[((1033, 1055), 'binascii.b2a_base64', 'binascii.b2a_base64', (['s'], {}), '(s)\n', (1052, 1055), False, 'import binascii\n')]
|
from django.http import HttpResponse
from fluent_pages.extensions import PageTypePlugin, page_type_pool
from .models import TextFile
@page_type_pool.register
class TextFilePlugin(PageTypePlugin):
model = TextFile
is_file = True
def get_response(self, request, textfile, **kwargs):
content_type = textfile.content_type
if content_type in TextFile.UTF8_TYPES:
content_type += "; charset=utf-8" # going to enforce this.
return HttpResponse(content=textfile.content, content_type=content_type)
|
[
"django.http.HttpResponse"
] |
[((479, 544), 'django.http.HttpResponse', 'HttpResponse', ([], {'content': 'textfile.content', 'content_type': 'content_type'}), '(content=textfile.content, content_type=content_type)\n', (491, 544), False, 'from django.http import HttpResponse\n')]
|
"""
Comparing different kernels using cv2.filter2D()
"""
# Import required packages:
import cv2
import numpy as np
import matplotlib.pyplot as plt
def show_with_matplotlib(color_img, title, pos):
"""Shows an image using matplotlib capabilities"""
# Convert BGR image to RGB
img_RGB = color_img[:, :, ::-1]
ax = plt.subplot(3, 4, pos)
plt.imshow(img_RGB)
plt.title(title)
plt.axis('off')
# Create the dimensions of the figure and set title:
plt.figure(figsize=(12, 6))
plt.suptitle("Comparing different kernels using cv2.filter2D()", fontsize=14, fontweight='bold')
# Load the original image:
image = cv2.imread('cat-face.png')
# We try different kernels
# Identify kernel (does not modify the image)
kernel_identity = np.array([[0, 0, 0],
[0, 1, 0],
[0, 0, 0]])
# Try different kernels for edge detection:
kernel_edge_detection_1 = np.array([[1, 0, -1],
[0, 0, 0],
[-1, 0, 1]])
kernel_edge_detection_2 = np.array([[0, 1, 0],
[1, -4, 1],
[0, 1, 0]])
kernel_edge_detection_3 = np.array([[-1, -1, -1],
[-1, 8, -1],
[-1, -1, -1]])
# Try different kernels for sharpening:
kernel_sharpen = np.array([[0, -1, 0],
[-1, 5, -1],
[0, -1, 0]])
kernel_unsharp_masking = -1 / 256 * np.array([[1, 4, 6, 4, 1],
[4, 16, 24, 16, 4],
[6, 24, -476, 24, 6],
[4, 16, 24, 16, 4],
[1, 4, 6, 4, 1]])
# Try different kernels for smoothing:
kernel_blur = 1 / 9 * np.array([[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
gaussian_blur = 1 / 16 * np.array([[1, 2, 1],
[2, 4, 2],
[1, 2, 1]])
# Try a kernel for embossing:
kernel_emboss = np.array([[-2, -1, 0],
[-1, 1, 1],
[0, 1, 2]])
# Try different kernels for edge detection:
sobel_x_kernel = np.array([[1, 0, -1],
[2, 0, -2],
[1, 0, -1]])
sobel_y_kernel = np.array([[1, 2, 1],
[0, 0, 0],
[-1, -2, -1]])
outline_kernel = np.array([[-1, -1, -1],
[-1, 8, -1],
[-1, -1, -1]])
# Apply all the kernels:
original_image = cv2.filter2D(image, -1, kernel_identity)
edge_image_1 = cv2.filter2D(image, -1, kernel_edge_detection_1)
edge_image_2 = cv2.filter2D(image, -1, kernel_edge_detection_2)
edge_image_3 = cv2.filter2D(image, -1, kernel_edge_detection_3)
sharpen_image = cv2.filter2D(image, -1, kernel_sharpen)
unsharp_masking_image = cv2.filter2D(image, -1, kernel_unsharp_masking)
blur_image = cv2.filter2D(image, -1, kernel_blur)
gaussian_blur_image = cv2.filter2D(image, -1, gaussian_blur)
emboss_image = cv2.filter2D(image, -1, kernel_emboss)
sobel_x_image = cv2.filter2D(image, -1, sobel_x_kernel)
sobel_y_image = cv2.filter2D(image, -1, sobel_y_kernel)
outline_image = cv2.filter2D(image, -1, outline_kernel)
# Show all the images:
show_with_matplotlib(original_image, "identity kernel", 1)
show_with_matplotlib(edge_image_1, "edge detection 1", 2)
show_with_matplotlib(edge_image_2, "edge detection 2", 3)
show_with_matplotlib(edge_image_3, "edge detection 3", 4)
show_with_matplotlib(sharpen_image, "sharpen", 5)
show_with_matplotlib(unsharp_masking_image, "unsharp masking", 6)
show_with_matplotlib(blur_image, "blur image", 7)
show_with_matplotlib(gaussian_blur_image, "gaussian blur image", 8)
show_with_matplotlib(emboss_image, "emboss image", 9)
show_with_matplotlib(sobel_x_image, "sobel x image", 10)
show_with_matplotlib(sobel_y_image, "sobel y image", 11)
show_with_matplotlib(outline_image, "outline image", 12)
# Show the Figure:
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"cv2.filter2D",
"matplotlib.pyplot.suptitle",
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.axis",
"cv2.imread",
"matplotlib.pyplot.figure",
"numpy.array"
] |
[((475, 502), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 6)'}), '(figsize=(12, 6))\n', (485, 502), True, 'import matplotlib.pyplot as plt\n'), ((503, 604), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['"""Comparing different kernels using cv2.filter2D()"""'], {'fontsize': '(14)', 'fontweight': '"""bold"""'}), "('Comparing different kernels using cv2.filter2D()', fontsize=\n 14, fontweight='bold')\n", (515, 604), True, 'import matplotlib.pyplot as plt\n'), ((636, 662), 'cv2.imread', 'cv2.imread', (['"""cat-face.png"""'], {}), "('cat-face.png')\n", (646, 662), False, 'import cv2\n'), ((755, 798), 'numpy.array', 'np.array', (['[[0, 0, 0], [0, 1, 0], [0, 0, 0]]'], {}), '([[0, 0, 0], [0, 1, 0], [0, 0, 0]])\n', (763, 798), True, 'import numpy as np\n'), ((926, 971), 'numpy.array', 'np.array', (['[[1, 0, -1], [0, 0, 0], [-1, 0, 1]]'], {}), '([[1, 0, -1], [0, 0, 0], [-1, 0, 1]])\n', (934, 971), True, 'import numpy as np\n'), ((1071, 1115), 'numpy.array', 'np.array', (['[[0, 1, 0], [1, -4, 1], [0, 1, 0]]'], {}), '([[0, 1, 0], [1, -4, 1], [0, 1, 0]])\n', (1079, 1115), True, 'import numpy as np\n'), ((1215, 1266), 'numpy.array', 'np.array', (['[[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]]'], {}), '([[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]])\n', (1223, 1266), True, 'import numpy as np\n'), ((1397, 1444), 'numpy.array', 'np.array', (['[[0, -1, 0], [-1, 5, -1], [0, -1, 0]]'], {}), '([[0, -1, 0], [-1, 5, -1], [0, -1, 0]])\n', (1405, 1444), True, 'import numpy as np\n'), ((2184, 2230), 'numpy.array', 'np.array', (['[[-2, -1, 0], [-1, 1, 1], [0, 1, 2]]'], {}), '([[-2, -1, 0], [-1, 1, 1], [0, 1, 2]])\n', (2192, 2230), True, 'import numpy as np\n'), ((2345, 2391), 'numpy.array', 'np.array', (['[[1, 0, -1], [2, 0, -2], [1, 0, -1]]'], {}), '([[1, 0, -1], [2, 0, -2], [1, 0, -1]])\n', (2353, 2391), True, 'import numpy as np\n'), ((2464, 2510), 'numpy.array', 'np.array', (['[[1, 2, 1], [0, 0, 0], [-1, -2, -1]]'], {}), '([[1, 2, 1], [0, 0, 0], [-1, -2, -1]])\n', (2472, 2510), True, 'import numpy as np\n'), ((2583, 2634), 'numpy.array', 'np.array', (['[[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]]'], {}), '([[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]])\n', (2591, 2634), True, 'import numpy as np\n'), ((2732, 2772), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_identity'], {}), '(image, -1, kernel_identity)\n', (2744, 2772), False, 'import cv2\n'), ((2788, 2836), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_edge_detection_1'], {}), '(image, -1, kernel_edge_detection_1)\n', (2800, 2836), False, 'import cv2\n'), ((2852, 2900), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_edge_detection_2'], {}), '(image, -1, kernel_edge_detection_2)\n', (2864, 2900), False, 'import cv2\n'), ((2916, 2964), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_edge_detection_3'], {}), '(image, -1, kernel_edge_detection_3)\n', (2928, 2964), False, 'import cv2\n'), ((2981, 3020), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_sharpen'], {}), '(image, -1, kernel_sharpen)\n', (2993, 3020), False, 'import cv2\n'), ((3045, 3092), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_unsharp_masking'], {}), '(image, -1, kernel_unsharp_masking)\n', (3057, 3092), False, 'import cv2\n'), ((3106, 3142), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_blur'], {}), '(image, -1, kernel_blur)\n', (3118, 3142), False, 'import cv2\n'), ((3165, 3203), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'gaussian_blur'], {}), '(image, -1, gaussian_blur)\n', (3177, 3203), False, 'import cv2\n'), ((3219, 3257), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel_emboss'], {}), '(image, -1, kernel_emboss)\n', (3231, 3257), False, 'import cv2\n'), ((3274, 3313), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'sobel_x_kernel'], {}), '(image, -1, sobel_x_kernel)\n', (3286, 3313), False, 'import cv2\n'), ((3330, 3369), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'sobel_y_kernel'], {}), '(image, -1, sobel_y_kernel)\n', (3342, 3369), False, 'import cv2\n'), ((3386, 3425), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'outline_kernel'], {}), '(image, -1, outline_kernel)\n', (3398, 3425), False, 'import cv2\n'), ((4162, 4172), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4170, 4172), True, 'import matplotlib.pyplot as plt\n'), ((332, 354), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(4)', 'pos'], {}), '(3, 4, pos)\n', (343, 354), True, 'import matplotlib.pyplot as plt\n'), ((359, 378), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img_RGB'], {}), '(img_RGB)\n', (369, 378), True, 'import matplotlib.pyplot as plt\n'), ((383, 399), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (392, 399), True, 'import matplotlib.pyplot as plt\n'), ((404, 419), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (412, 419), True, 'import matplotlib.pyplot as plt\n'), ((1536, 1646), 'numpy.array', 'np.array', (['[[1, 4, 6, 4, 1], [4, 16, 24, 16, 4], [6, 24, -476, 24, 6], [4, 16, 24, 16,\n 4], [1, 4, 6, 4, 1]]'], {}), '([[1, 4, 6, 4, 1], [4, 16, 24, 16, 4], [6, 24, -476, 24, 6], [4, 16,\n 24, 16, 4], [1, 4, 6, 4, 1]])\n', (1544, 1646), True, 'import numpy as np\n'), ((1889, 1932), 'numpy.array', 'np.array', (['[[1, 1, 1], [1, 1, 1], [1, 1, 1]]'], {}), '([[1, 1, 1], [1, 1, 1], [1, 1, 1]])\n', (1897, 1932), True, 'import numpy as np\n'), ((2023, 2066), 'numpy.array', 'np.array', (['[[1, 2, 1], [2, 4, 2], [1, 2, 1]]'], {}), '([[1, 2, 1], [2, 4, 2], [1, 2, 1]])\n', (2031, 2066), True, 'import numpy as np\n')]
|
from flask import Flask,render_template
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
@app.route("/welcome")
def welcome():
return render_template("welcome.html")
if __name__ == '__main__':
app.run(host='0.0.0.0',debug=True)
|
[
"flask.Flask",
"flask.render_template"
] |
[((47, 62), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (52, 62), False, 'from flask import Flask, render_template\n'), ((168, 199), 'flask.render_template', 'render_template', (['"""welcome.html"""'], {}), "('welcome.html')\n", (183, 199), False, 'from flask import Flask, render_template\n')]
|
from collections import defaultdict
from .metrics import calculate_metrics_list
class Graph:
"""
The code for this class is based on geeksforgeeks.com
"""
def __init__(self, vertices):
self.graph = defaultdict(list)
self.V = vertices
def addEdge(self, u, v):
self.graph[u].append([v])
def topologicalSortUtil(self, v, visited, stack):
visited[v] = True
for i in self.graph[v]:
if not visited[i[0]]:
self.topologicalSortUtil(i[0], visited, stack)
stack.insert(0, v)
def topologicalSort(self):
visited = [False] * self.V
stack = []
for i in range(self.V):
if not visited[i]:
self.topologicalSortUtil(i, visited, stack)
return stack
def isCyclicUtil(self, v, visited, recStack):
visited[v] = True
recStack[v] = True
for neighbour in self.graph[v]:
if not visited[neighbour[0]]:
if self.isCyclicUtil(
neighbour[0], visited, recStack):
return True
elif recStack[neighbour[0]]:
self.graph[v].remove(neighbour)
return True
recStack[v] = False
return False
def isCyclic(self):
visited = [False] * self.V
recStack = [False] * self.V
for node in range(self.V):
if not visited[node]:
if self.isCyclicUtil(node, visited, recStack):
return True
return False
def convert_to_graph(logits, positions, flipped=False):
# get no vertices (len logits = n(n-1)/2
nvert = int((2 * len(logits)) ** 0.5)+1
# create graph obj
g = Graph(nvert)
# read pred label
for logit, pos in zip(logits, positions):
if flipped:
pred = 1 if logit < 0 else 0
else:
pred = 1 if logit > 0 else 0
pos_s1, pos_s2 = pos[0], pos[1]
if pred == 0:
g.addEdge(pos_s1, pos_s2)
elif pred == 1:
g.addEdge(pos_s2, pos_s1)
while g.isCyclic():
g.isCyclic()
order = g.topologicalSort()
gold_order = list(range(nvert))
return calculate_metrics_list(order, gold_order)
|
[
"collections.defaultdict"
] |
[((226, 243), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (237, 243), False, 'from collections import defaultdict\n')]
|
import sys
from sqlalchemy import create_engine
import pg_copy
if __name__ == "__main__":
engine = create_engine(sys.argv[1])
target_table = 'example_table'
objs = [
{
'id': i,
'description': f'record description {i}'
} for i in range(100_000)
]
pg_copy.insert_with_copy(engine, objs, target_table)
|
[
"sqlalchemy.create_engine",
"pg_copy.insert_with_copy"
] |
[((106, 132), 'sqlalchemy.create_engine', 'create_engine', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (119, 132), False, 'from sqlalchemy import create_engine\n'), ((311, 363), 'pg_copy.insert_with_copy', 'pg_copy.insert_with_copy', (['engine', 'objs', 'target_table'], {}), '(engine, objs, target_table)\n', (335, 363), False, 'import pg_copy\n')]
|
import os
import re
import sys
import argparse
import json
import numpy as np
from glob import glob
import cv2
from utils.plot_utils import RandomColor
def parse_args():
parser = argparse.ArgumentParser(
description='Monocular 3D Tracking Visualizer',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('set', choices=['gta', 'kitti'])
parser.add_argument('split', choices=['train', 'val', 'test'],
help='Which data split to use in testing')
parser.add_argument('--session', default='623',
help='Name of the session, to separate exp')
parser.add_argument('--epoch', default='100',
help='How many epochs you used to separate exp')
parser.add_argument('--flag', default='kf3doccdeep_age15_aff0.1_hit0_80m_pd',
help='Flags for running evaluation code')
parser.add_argument('--save_vid', action='store_true', default=False,
help='Flags for saving video')
parser.add_argument('--save_txt', action='store_true', default=False,
help='Flags for saving txt')
parser.add_argument('--dry_run', action='store_true', default=False,
help='Show command without running')
parser.add_argument('--overwrite', action='store_true', default=False,
help='Overwrite the output files')
args = parser.parse_args()
return args
print(' '.join(sys.argv))
args = parse_args()
if args.set == 'kitti':
IMAGE_PATH = 'data/kitti_tracking/{SPLIT}ing/image_02/{SEQ}/*.png'.format(**{'SPLIT': args.split, 'SEQ': '{:04d}'})
re_pattern = re.compile('[0-9]{4}')
else:
IMAGE_PATH = 'data/gta5_tracking/{SPLIT}/image/{SEQ}/*.jpg'.format(**{'SPLIT': args.split, 'SEQ': '{}'})
re_pattern = re.compile('rec_(.{8})_(.+)_(.+)h(.+)m_(.+[0-9])')
SAVE_PATH = 'output/{SESS}_{EP}_{SET}_{SPLIT}_set/'.format(
**{'SESS': args.session, 'EP': args.epoch, 'SET': args.set, 'SPLIT': args.split})
out_name = '{SESS}_{EP}_{SET}_{SETTING}'.format(
**{'SESS': args.session, 'EP': args.epoch, 'SET': args.set, 'SETTING': args.flag})
FONT = cv2.FONT_HERSHEY_SIMPLEX
FOURCC = cv2.VideoWriter_fourcc(*'mp4v')
fps = 15
np.random.seed(777)
rm_color = RandomColor(30)
tid2color = {}
def mkdir(path):
if not os.path.isdir(path):
print("Making directory {}".format(path))
os.makedirs(path) # Use with care
def gen_result(out_path, out_name, save_vid=False, save_txt=True,
dry_run=False, overwrite=False):
print("Reading meta data...")
info = json.load(open('{}{}.json'.format(out_path, out_name), 'r'))
if not dry_run: mkdir('{}{}/data/'.format(out_path, out_name))
for seqid in range(len(info)):
file_seq = re_pattern.search(info[seqid]['filename']).group(0)
print('Reading {} from {}{}...'.format(file_seq, out_path, out_name))
if dry_run:
continue
seqout = []
vid_name = '{}{}/data/{}.mp4'.format(out_path, out_name, file_seq)
txt_name = '{}{}/data/{}.txt'.format(out_path, out_name, file_seq)
if not overwrite:
if not os.path.isfile(txt_name) and save_txt:
pass
elif not os.path.isfile(vid_name) and save_vid:
pass
else:
print("SKIP running. Generated file {} Found".format(txt_name))
continue
if save_vid:
images = sorted(glob(IMAGE_PATH.format(file_seq)))
img = cv2.imread(images[0])
vidsize = (img.shape[1], img.shape[0]) # height, width
out = cv2.VideoWriter(vid_name, FOURCC, fps, vidsize)
demoinfo = info[seqid]['frames']
for idx, frame in enumerate(demoinfo):
if save_vid:
img = cv2.imread(images[idx])
img = cv2.putText(img, str(idx), (20, 30),
cv2.FONT_HERSHEY_COMPLEX, 1,
(180, 180, 180), 2)
for trk in frame['hypotheses']:
x1, y1, x2, y2, conf = trk['det_box']
xc, yc = trk['xc'], trk['yc']
if save_vid:
if trk['id'] not in tid2color:
tid2color[trk['id']] = rm_color.get_random_color(scale=255)
img = cv2.rectangle(img, (int(xc-1), int(yc-1)), (int(xc+1), int(yc+1)),
tid2color[trk['id']], 2)
img = cv2.rectangle(img, (int(x1), int(y1)), (int(x2), int(y2)),
tid2color[trk['id']], 4)
img = cv2.putText(img, str(int(trk['id'])), (int(x1), int(y1)),
cv2.FONT_HERSHEY_COMPLEX, 1,
tid2color[trk['id']], 2)
img = cv2.putText(img, str(int(trk['depth'])), (int(x2)-14, int(y2)),
cv2.FONT_HERSHEY_COMPLEX, 0.8,
tid2color[trk['id']], 2)
if save_txt:
'''
submit_txt = ' '.join([
str(idx),
str(int(trk['id'])),
'Car',
'-1 -1',
trk['alpha'],
str(x1), str(y1), str(x2), str(y2),
trk['dim'],
trk['loc'],
trk['rot'],
str(conf)])
'''
submit_txt = ' '.join([
str(idx),
str(int(trk['id'])),
'Car',
'-1 -1 -10',
str(x1), str(y1), str(x2), str(y2),
'-1 -1 -1',
'-1000 -1000 -1000 -10',
str(conf)])
#'''
submit_txt += '\n'
seqout.append(submit_txt)
if save_vid: out.write(img)
if save_txt:
print("{} saved.".format(txt_name))
with open(txt_name, 'w') as f:
f.writelines(seqout)
if save_vid:
print("{} saved.".format(vid_name))
out.release()
if __name__ == '__main__':
# Not using out_name, too slow
output_list = [os.path.splitext(item)[0] for item in os.listdir(SAVE_PATH) if item.endswith('_pd.json')]
my_list = ['none', 'kf2ddeep', 'kf3doccdeep', 'lstmdeep', 'lstmoccdeep']
for dir_name in output_list:
print(dir_name)
save_vid = args.save_vid
if save_vid:
is_in = False
for ml in my_list:
is_in = is_in or (ml in dir_name)
save_vid = is_in
gen_result(SAVE_PATH,
dir_name,
save_vid=save_vid,
save_txt=args.save_txt,
dry_run=args.dry_run,
overwrite=args.overwrite
)
|
[
"numpy.random.seed",
"cv2.VideoWriter_fourcc",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.isdir",
"utils.plot_utils.RandomColor",
"cv2.imread",
"os.path.isfile",
"os.path.splitext",
"cv2.VideoWriter",
"os.listdir",
"re.compile"
] |
[((2253, 2284), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'mp4v'"], {}), "(*'mp4v')\n", (2275, 2284), False, 'import cv2\n'), ((2295, 2314), 'numpy.random.seed', 'np.random.seed', (['(777)'], {}), '(777)\n', (2309, 2314), True, 'import numpy as np\n'), ((2326, 2341), 'utils.plot_utils.RandomColor', 'RandomColor', (['(30)'], {}), '(30)\n', (2337, 2341), False, 'from utils.plot_utils import RandomColor\n'), ((186, 317), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Monocular 3D Tracking Visualizer"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='Monocular 3D Tracking Visualizer',\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n", (209, 317), False, 'import argparse\n'), ((1721, 1743), 're.compile', 're.compile', (['"""[0-9]{4}"""'], {}), "('[0-9]{4}')\n", (1731, 1743), False, 'import re\n'), ((1876, 1926), 're.compile', 're.compile', (['"""rec_(.{8})_(.+)_(.+)h(.+)m_(.+[0-9])"""'], {}), "('rec_(.{8})_(.+)_(.+)h(.+)m_(.+[0-9])')\n", (1886, 1926), False, 'import re\n'), ((2386, 2405), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2399, 2405), False, 'import os\n'), ((2465, 2482), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2476, 2482), False, 'import os\n'), ((3620, 3641), 'cv2.imread', 'cv2.imread', (['images[0]'], {}), '(images[0])\n', (3630, 3641), False, 'import cv2\n'), ((3727, 3774), 'cv2.VideoWriter', 'cv2.VideoWriter', (['vid_name', 'FOURCC', 'fps', 'vidsize'], {}), '(vid_name, FOURCC, fps, vidsize)\n', (3742, 3774), False, 'import cv2\n'), ((6804, 6826), 'os.path.splitext', 'os.path.splitext', (['item'], {}), '(item)\n', (6820, 6826), False, 'import os\n'), ((6842, 6863), 'os.listdir', 'os.listdir', (['SAVE_PATH'], {}), '(SAVE_PATH)\n', (6852, 6863), False, 'import os\n'), ((3912, 3935), 'cv2.imread', 'cv2.imread', (['images[idx]'], {}), '(images[idx])\n', (3922, 3935), False, 'import cv2\n'), ((3253, 3277), 'os.path.isfile', 'os.path.isfile', (['txt_name'], {}), '(txt_name)\n', (3267, 3277), False, 'import os\n'), ((3334, 3358), 'os.path.isfile', 'os.path.isfile', (['vid_name'], {}), '(vid_name)\n', (3348, 3358), False, 'import os\n')]
|
# --------------
#Importing header files
import pandas as pd
from sklearn.model_selection import train_test_split as tts
# Code starts here
data= pd.read_csv(path)
X= data.drop(['customer.id','paid.back.loan'],1)
y=data['paid.back.loan']
X_train, X_test, y_train, y_test = tts(X,y,random_state=0,test_size=0.3)
# Code ends here
# --------------
#Importing header files
import matplotlib.pyplot as plt
# Code starts here
import pandas as pd
from sklearn.model_selection import train_test_split as tts
# Code starts here
fully_paid = y_train.value_counts()
plt.figure()
fully_paid.plot(kind='bar')
# Code ends here
# --------------
#Importing header files
import numpy as np
from sklearn.preprocessing import LabelEncoder
# Code starts here
X_train['int.rate'] = X_train['int.rate'].str.replace('%','').astype(float)
X_train['int.rate'] = X_train['int.rate']/100
X_test['int.rate'] = X_test['int.rate'].str.replace('%','').astype(float)
X_test['int.rate'] = X_test['int.rate']/100
num_df = X_train.select_dtypes(include = np.number)
cat_df = X_train.select_dtypes(exclude = np.number)
# Code ends here
# --------------
#Importing header files
import seaborn as sns
# Code starts here
# Code ends
cols = list(num_df)
fig, axes = plt.subplots(nrows =9, ncols= 1)
for i in range(1,9):
sns.boxplot(x=y_train, y=num_df[cols[i]], ax=axes[i])
# --------------
# Code starts here
# Code ends here
cols= list(cat_df)
fig, axes = plt.subplots(nrows = 2, ncols= 2)
for i in range (0,2):
for j in range(0,2):
sns.countplot(x=X_train[cols[i*2+j]], hue=y_train, ax=axes[i,j])
# --------------
#Importing header files
from sklearn.tree import DecisionTreeClassifier
from sklearn.preprocessing import LabelEncoder
# Code starts here
for i in list(cat_df):
X_train[i].fillna('NA')
le = LabelEncoder()
X_train[i] = le.fit_transform(X_train[i])
X_test[i].fillna('NA')
le = LabelEncoder()
X_test[i] = le.fit_transform(X_test[i])
#y_test = y_test.str.replace('No',0)
y_train.replace({'No':0,'Yes':1},inplace=True)
y_test.replace({'No':0,'Yes':1},inplace=True)
# Code ends here
from sklearn.metrics import accuracy_score
model = DecisionTreeClassifier(random_state = 0)
model.fit(X_train, y_train)
y_preds = model.predict(X_test)
acc= accuracy_score(y_test, y_preds)
# --------------
#Importing header files
from sklearn.model_selection import GridSearchCV
#Parameter grid
parameter_grid = {'max_depth': np.arange(3,10), 'min_samples_leaf': range(10,50,10)}
# Code starts here
model_2 = DecisionTreeClassifier(random_state =0)
p_tree = GridSearchCV(estimator=model_2, param_grid=parameter_grid, cv=5)
p_tree.fit(X_train,y_train)
# Code ends here
ypreds2 = p_tree.predict(X_test)
acc_2 = accuracy_score(y_test, ypreds2)
acc_2
# --------------
#Importing header files
from io import StringIO
from sklearn.tree import export_graphviz
from sklearn import tree
from sklearn import metrics
from IPython.display import Image
import pydotplus
# Code starts here
dot_data = export_graphviz(decision_tree=p_tree.best_estimator_, out_file=None, feature_names=X.columns, filled = True, class_names=['loan_paid_back_yes','loan_paid_back_no'])
graph_big=pydotplus.graph_from_dot_data(dot_data)
# show graph - do not delete/modify the code below this line
img_path = user_data_dir+'/file.png'
graph_big.write_png(img_path)
plt.figure(figsize=(20,15))
plt.imshow(plt.imread(img_path))
plt.axis('off')
plt.show()
# Code ends here
|
[
"sklearn.model_selection.GridSearchCV",
"matplotlib.pyplot.show",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.accuracy_score",
"matplotlib.pyplot.axis",
"sklearn.tree.DecisionTreeClassifier",
"sklearn.tree.export_graphviz",
"sklearn.preprocessing.LabelEncoder",
"pydotplus.graph_from_dot_data",
"matplotlib.pyplot.figure",
"seaborn.boxplot",
"numpy.arange",
"seaborn.countplot",
"matplotlib.pyplot.imread",
"matplotlib.pyplot.subplots"
] |
[((150, 167), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (161, 167), True, 'import pandas as pd\n'), ((279, 319), 'sklearn.model_selection.train_test_split', 'tts', (['X', 'y'], {'random_state': '(0)', 'test_size': '(0.3)'}), '(X, y, random_state=0, test_size=0.3)\n', (282, 319), True, 'from sklearn.model_selection import train_test_split as tts\n'), ((569, 581), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (579, 581), True, 'import matplotlib.pyplot as plt\n'), ((1258, 1288), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(9)', 'ncols': '(1)'}), '(nrows=9, ncols=1)\n', (1270, 1288), True, 'import matplotlib.pyplot as plt\n'), ((1459, 1489), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'ncols': '(2)'}), '(nrows=2, ncols=2)\n', (1471, 1489), True, 'import matplotlib.pyplot as plt\n'), ((2185, 2223), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {'random_state': '(0)'}), '(random_state=0)\n', (2207, 2223), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((2291, 2322), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_preds'], {}), '(y_test, y_preds)\n', (2305, 2322), False, 'from sklearn.metrics import accuracy_score\n'), ((2547, 2585), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {'random_state': '(0)'}), '(random_state=0)\n', (2569, 2585), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((2596, 2660), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', ([], {'estimator': 'model_2', 'param_grid': 'parameter_grid', 'cv': '(5)'}), '(estimator=model_2, param_grid=parameter_grid, cv=5)\n', (2608, 2660), False, 'from sklearn.model_selection import GridSearchCV\n'), ((2748, 2779), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'ypreds2'], {}), '(y_test, ypreds2)\n', (2762, 2779), False, 'from sklearn.metrics import accuracy_score\n'), ((3032, 3203), 'sklearn.tree.export_graphviz', 'export_graphviz', ([], {'decision_tree': 'p_tree.best_estimator_', 'out_file': 'None', 'feature_names': 'X.columns', 'filled': '(True)', 'class_names': "['loan_paid_back_yes', 'loan_paid_back_no']"}), "(decision_tree=p_tree.best_estimator_, out_file=None,\n feature_names=X.columns, filled=True, class_names=['loan_paid_back_yes',\n 'loan_paid_back_no'])\n", (3047, 3203), False, 'from sklearn.tree import export_graphviz\n'), ((3208, 3247), 'pydotplus.graph_from_dot_data', 'pydotplus.graph_from_dot_data', (['dot_data'], {}), '(dot_data)\n', (3237, 3247), False, 'import pydotplus\n'), ((3380, 3408), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 15)'}), '(figsize=(20, 15))\n', (3390, 3408), True, 'import matplotlib.pyplot as plt\n'), ((3441, 3456), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3449, 3456), True, 'import matplotlib.pyplot as plt\n'), ((3457, 3467), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3465, 3467), True, 'import matplotlib.pyplot as plt\n'), ((1316, 1369), 'seaborn.boxplot', 'sns.boxplot', ([], {'x': 'y_train', 'y': 'num_df[cols[i]]', 'ax': 'axes[i]'}), '(x=y_train, y=num_df[cols[i]], ax=axes[i])\n', (1327, 1369), True, 'import seaborn as sns\n'), ((1828, 1842), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (1840, 1842), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((1926, 1940), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (1938, 1940), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((2463, 2479), 'numpy.arange', 'np.arange', (['(3)', '(10)'], {}), '(3, 10)\n', (2472, 2479), True, 'import numpy as np\n'), ((3419, 3439), 'matplotlib.pyplot.imread', 'plt.imread', (['img_path'], {}), '(img_path)\n', (3429, 3439), True, 'import matplotlib.pyplot as plt\n'), ((1543, 1612), 'seaborn.countplot', 'sns.countplot', ([], {'x': 'X_train[cols[i * 2 + j]]', 'hue': 'y_train', 'ax': 'axes[i, j]'}), '(x=X_train[cols[i * 2 + j]], hue=y_train, ax=axes[i, j])\n', (1556, 1612), True, 'import seaborn as sns\n')]
|
# Copyright (c) 2012-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pkg_resources
__all__ = ['version_info', 'version']
try:
# First, try to get our version out of PKG-INFO. If we're installed,
# this'll let us find our version without pulling in pbr. After all, if
# we're installed on a system, we're not in a Git-managed source tree, so
# pbr doesn't really buy us anything.
__version__ = pkg_resources.get_provider(
pkg_resources.Requirement.parse('swift3')).version
except pkg_resources.DistributionNotFound:
# No PKG-INFO? We're probably running from a checkout, then. Let pbr do
# its thing to figure out a version number.
import pbr.version
__version__ = pbr.version.VersionInfo('swift3').release_string()
#: Version information ``(major, minor, revision)``.
version_info = tuple(map(int, __version__.split('.')[:3]))
#: Version string ``'major.minor.revision'``.
version = '.'.join(map(str, version_info))
|
[
"pkg_resources.Requirement.parse"
] |
[((983, 1024), 'pkg_resources.Requirement.parse', 'pkg_resources.Requirement.parse', (['"""swift3"""'], {}), "('swift3')\n", (1014, 1024), False, 'import pkg_resources\n')]
|
# Copyright 2021 Uber Technologies, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import io
import re
import unittest
from horovod.runner.common.service.task_service import BasicTaskService, BasicTaskClient
from horovod.runner.common.util import secret
class FaultyStream:
"""This stream raises an exception after some text has been written."""
def __init__(self, stream):
self.stream = stream
self.raised = False
def write(self, b):
if not self.raised and len(self.stream.getvalue()) > 1024:
self.raised = True
raise RuntimeError()
self.stream.write(b)
def close(self):
pass
class TaskServiceTest(unittest.TestCase):
cmd = 'for i in {1..10000}; do echo "a very very useful log line #$i"; done'
cmd_single_line = f'{cmd} | wc'
@staticmethod
def cmd_with(stdout, stderr):
return f"bash -c '{stderr} >&2 & {stdout}'"
def test_run_command(self):
key = secret.make_secret_key()
service = BasicTaskService('test service', 0, key, nics=None, verbose=2)
try:
client = BasicTaskClient('test service', service.addresses(), key, verbose=2, attempts=1)
client.run_command(self.cmd_with(self.cmd_single_line, self.cmd_single_line), {})
exit = client.wait_for_command_exit_code()
self.assertEqual(0, exit)
self.assertEqual((True, 0), client.command_result())
finally:
service.shutdown()
def test_stream_command_output(self):
self.do_test_stream_command_output(
self.cmd_with(self.cmd, self.cmd),
capture_stdout=True, capture_stderr=True,
prefix_output_with_timestamp=True
)
def test_stream_command_output_stdout(self):
self.do_test_stream_command_output(
self.cmd_with(self.cmd, self.cmd_single_line),
capture_stdout=True, capture_stderr=False,
prefix_output_with_timestamp=True
)
def test_stream_command_output_stderr(self):
self.do_test_stream_command_output(
self.cmd_with(self.cmd_single_line, self.cmd),
capture_stdout=False, capture_stderr=True,
prefix_output_with_timestamp=True
)
def test_stream_command_output_neither(self):
self.do_test_stream_command_output(
self.cmd_with(self.cmd_single_line, self.cmd_single_line),
capture_stdout=False, capture_stderr=False,
prefix_output_with_timestamp=True
)
def test_stream_command_output_un_prefixed(self):
self.do_test_stream_command_output(
self.cmd_with(self.cmd, self.cmd),
capture_stdout=True, capture_stderr=True,
prefix_output_with_timestamp=False
)
def do_test_stream_command_output(self,
command,
capture_stdout, capture_stderr,
prefix_output_with_timestamp):
stdout = io.StringIO()
stderr = io.StringIO()
key = secret.make_secret_key()
service = BasicTaskService('test service', 0, key, nics=None, verbose=2)
try:
client = BasicTaskClient('test service', service.addresses(), key, verbose=2, attempts=1)
stdout_t, stderr_t = client.stream_command_output(stdout, stderr)
client.run_command(command, {},
capture_stdout=capture_stdout, capture_stderr=capture_stderr,
prefix_output_with_timestamp=prefix_output_with_timestamp)
client.wait_for_command_termination(delay=0.2)
self.assertEqual((True, 0), client.command_result())
if stdout_t is not None:
stdout_t.join(1.0)
self.assertEqual(False, stdout_t.is_alive())
if stderr_t is not None:
stderr_t.join(1.0)
self.assertEqual(False, stderr_t.is_alive())
finally:
service.shutdown()
stdout = stdout.getvalue()
stderr = stderr.getvalue()
# remove timestamps from each line in outputs
if prefix_output_with_timestamp:
stdout_no_ts = re.sub('^[^[]+', '', stdout, flags=re.MULTILINE)
stderr_no_ts = re.sub('^[^[]+', '', stderr, flags=re.MULTILINE)
# test we are removing something (hopefully timestamps)
if capture_stdout:
self.assertNotEqual(stdout_no_ts, stdout)
if capture_stderr:
self.assertNotEqual(stderr_no_ts, stderr)
stdout = stdout_no_ts
stderr = stderr_no_ts
# remove prefix
stdout_no_prefix = re.sub('\[0\]<stdout>:', '', stdout, flags=re.MULTILINE)
stderr_no_prefix = re.sub('\[0\]<stderr>:', '', stderr, flags=re.MULTILINE)
# test we are removing something (hopefully prefixes)
if capture_stdout:
self.assertNotEqual(stdout_no_prefix, stdout)
if capture_stderr:
self.assertNotEqual(stderr_no_prefix, stderr)
stdout = stdout_no_prefix
stderr = stderr_no_prefix
if capture_stdout and capture_stderr:
# both streams should be equal
self.assertEqual(stdout, stderr)
# streams should have meaningful number of lines and characters
if capture_stdout:
self.assertTrue(len(stdout) > 1024)
self.assertTrue(len(stdout.splitlines()) > 10)
if capture_stderr:
self.assertTrue(len(stderr) > 1024)
self.assertTrue(len(stderr.splitlines()) > 10)
def test_stream_command_output_reconnect(self):
self.do_test_stream_command_output_reconnect(attempts=3, succeeds=True)
def test_stream_command_output_no_reconnect(self):
self.do_test_stream_command_output_reconnect(attempts=1, succeeds=None)
def do_test_stream_command_output_reconnect(self, attempts, succeeds):
key = secret.make_secret_key()
stdout = io.StringIO()
stderr = io.StringIO()
stdout_s = FaultyStream(stdout)
stderr_s = FaultyStream(stderr)
service = BasicTaskService('test service', 0, key, nics=None, verbose=2)
try:
client = BasicTaskClient('test service', service.addresses(), key, verbose=2, attempts=attempts)
stdout_t, stderr_t = client.stream_command_output(stdout_s, stderr_s)
client.run_command(self.cmd_with(self.cmd, self.cmd), {},
capture_stdout=True, capture_stderr=True,
prefix_output_with_timestamp=False)
client.wait_for_command_termination(delay=0.2)
terminated, exit = client.command_result()
self.assertEqual(True, terminated)
if succeeds is not None:
self.assertEqual(succeeds, exit == 0)
if stdout_t is not None:
stdout_t.join(1.0)
self.assertEqual(False, stdout_t.is_alive())
if stderr_t is not None:
stderr_t.join(1.0)
self.assertEqual(False, stderr_t.is_alive())
finally:
service.shutdown()
stdout = stdout.getvalue()
stderr = stderr.getvalue()
# we are likely to loose some lines, so output is hard to evaluate
if succeeds:
self.assertGreaterEqual(len(stdout), 1024)
self.assertGreater(len(stdout.splitlines()), 10)
self.assertTrue(stdout_s.raised)
self.assertGreaterEqual(len(stderr), 1024)
self.assertGreater(len(stderr.splitlines()), 10)
self.assertTrue(stderr_s.raised)
# assert stdout and stderr similarity (how many lines both have in common)
stdout = re.sub('\[0\]<stdout>:', '', stdout, flags=re.MULTILINE)
stderr = re.sub('\[0\]<stderr>:', '', stderr, flags=re.MULTILINE)
stdout_set = set(stdout.splitlines())
stderr_set = set(stderr.splitlines())
intersect = stdout_set.intersection(stderr_set)
self.assertGreater(len(intersect) / min(len(stdout_set), len(stderr_set)), 0.90)
else:
# we might have retrieved data only for one of stdout and stderr
# so we expect some data for at least one of them
self.assertGreaterEqual(len(stdout) + len(stderr), 1024)
self.assertGreater(len(stdout.splitlines()) + len(stderr.splitlines()), 10)
self.assertTrue(stdout_s.raised or stderr_s.raised)
|
[
"horovod.runner.common.util.secret.make_secret_key",
"horovod.runner.common.service.task_service.BasicTaskService",
"re.sub",
"io.StringIO"
] |
[((1583, 1607), 'horovod.runner.common.util.secret.make_secret_key', 'secret.make_secret_key', ([], {}), '()\n', (1605, 1607), False, 'from horovod.runner.common.util import secret\n'), ((1626, 1688), 'horovod.runner.common.service.task_service.BasicTaskService', 'BasicTaskService', (['"""test service"""', '(0)', 'key'], {'nics': 'None', 'verbose': '(2)'}), "('test service', 0, key, nics=None, verbose=2)\n", (1642, 1688), False, 'from horovod.runner.common.service.task_service import BasicTaskService, BasicTaskClient\n'), ((3659, 3672), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (3670, 3672), False, 'import io\n'), ((3690, 3703), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (3701, 3703), False, 'import io\n'), ((3719, 3743), 'horovod.runner.common.util.secret.make_secret_key', 'secret.make_secret_key', ([], {}), '()\n', (3741, 3743), False, 'from horovod.runner.common.util import secret\n'), ((3762, 3824), 'horovod.runner.common.service.task_service.BasicTaskService', 'BasicTaskService', (['"""test service"""', '(0)', 'key'], {'nics': 'None', 'verbose': '(2)'}), "('test service', 0, key, nics=None, verbose=2)\n", (3778, 3824), False, 'from horovod.runner.common.service.task_service import BasicTaskService, BasicTaskClient\n'), ((5369, 5427), 're.sub', 're.sub', (['"""\\\\[0\\\\]<stdout>:"""', '""""""', 'stdout'], {'flags': 're.MULTILINE'}), "('\\\\[0\\\\]<stdout>:', '', stdout, flags=re.MULTILINE)\n", (5375, 5427), False, 'import re\n'), ((5453, 5511), 're.sub', 're.sub', (['"""\\\\[0\\\\]<stderr>:"""', '""""""', 'stderr'], {'flags': 're.MULTILINE'}), "('\\\\[0\\\\]<stderr>:', '', stderr, flags=re.MULTILINE)\n", (5459, 5511), False, 'import re\n'), ((6645, 6669), 'horovod.runner.common.util.secret.make_secret_key', 'secret.make_secret_key', ([], {}), '()\n', (6667, 6669), False, 'from horovod.runner.common.util import secret\n'), ((6687, 6700), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (6698, 6700), False, 'import io\n'), ((6718, 6731), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (6729, 6731), False, 'import io\n'), ((6831, 6893), 'horovod.runner.common.service.task_service.BasicTaskService', 'BasicTaskService', (['"""test service"""', '(0)', 'key'], {'nics': 'None', 'verbose': '(2)'}), "('test service', 0, key, nics=None, verbose=2)\n", (6847, 6893), False, 'from horovod.runner.common.service.task_service import BasicTaskService, BasicTaskClient\n'), ((4878, 4926), 're.sub', 're.sub', (['"""^[^[]+"""', '""""""', 'stdout'], {'flags': 're.MULTILINE'}), "('^[^[]+', '', stdout, flags=re.MULTILINE)\n", (4884, 4926), False, 'import re\n'), ((4954, 5002), 're.sub', 're.sub', (['"""^[^[]+"""', '""""""', 'stderr'], {'flags': 're.MULTILINE'}), "('^[^[]+', '', stderr, flags=re.MULTILINE)\n", (4960, 5002), False, 'import re\n'), ((8476, 8534), 're.sub', 're.sub', (['"""\\\\[0\\\\]<stdout>:"""', '""""""', 'stdout'], {'flags': 're.MULTILINE'}), "('\\\\[0\\\\]<stdout>:', '', stdout, flags=re.MULTILINE)\n", (8482, 8534), False, 'import re\n'), ((8554, 8612), 're.sub', 're.sub', (['"""\\\\[0\\\\]<stderr>:"""', '""""""', 'stderr'], {'flags': 're.MULTILINE'}), "('\\\\[0\\\\]<stderr>:', '', stderr, flags=re.MULTILINE)\n", (8560, 8612), False, 'import re\n')]
|
from collections import namedtuple
from anchore_engine.services.policy_engine.engine.policy.params import InputValidator
from anchore_engine.services.policy_engine.engine.policy.gate import Gate, GateMeta, BaseTrigger
class AttributeListValidator(InputValidator):
def __init__(self, attrs):
self.attrs = attrs
def validation_criteria(self):
return 'In: {}'.format(','.join(self.attrs))
def __call__(self, *args, **kwargs):
if args and args[0]:
parts = map(lambda x: x.strip(), args[0].split(','))
return not bool(filter(lambda x: x not in self.attrs, parts))
else:
return False
CheckOperation = namedtuple('CheckOperation', ['requires_rvalue','eval_function'])
class CheckOperations(InputValidator):
"""
A very generic condition validator. Child classes can override the __conditions__ list for different values.
"""
# Map of tuples from an operator name to a tuple of (bool, function) where arg 0 is whether an rvalue is required and arg 1 is function taking 2 args to return evaluation
def __init__(self, ops):
"""
:param ops: a dict of string keys mapped to CheckOperation tuples
"""
self.ops = ops
def get_op(self, name):
return self.ops[name]
def validation_criteria(self):
return 'In: {}'.format(','.join(self.ops.keys()))
def __call__(self, *args, **kwargs):
if args and args[0]:
return args[0].strip() in self.ops.keys()
return False
from anchore_engine.services.policy_engine.engine.policy.gate import BaseTrigger, Gate
#
#
# class MetadataConditionGate(Gate):
# """
# A generic conditional check gate on specific data items in the image metadata.
# """
# __gate_name__ = 'attribute_condition'
#
# class ExistsTrigger(BaseTrigger):
# __trigger_name__ = 'exists'
# __params__ = {'key': str}
#
# class LikeTrigger(BaseTrigger):
# __trigger_name__ = 'like_match'
# __params__ = {
# 'key': str,
# 'pattern': str,
# }
#
# class EqualsTrigger(BaseTrigger):
# __trigger_name__ = 'equals'
# __params__ = {
# 'key': str,
# 'value': str
# }
#
# class NotExists(BaseTrigger):
# __trigger_name__ = 'not_exists'
# __params__ = {'key': str}
#
# @staticmethod
# def resolve_key(key, image_obj):
# """
# Resolves a text key to a specific attribute of an image and returns it.
# Examples:
# $image.dockerfile.from -> image.dockerfile_contents['from']
#
#
# :param key:
# :param image_obj:
# :return:
# """
# # Resolves a key to a specific image element and retrieves it from the image object
# key_components = key.split('.')
# if key_components[0] != '$image':
# raise ValueError('Invalid key format: {}. Must be $image.p1.p2.p3...pN')
# else:
# key_components.pop()
#
# obj = image_obj
# for k in key_components:
# obj = model.get_lookup(k, obj)
#
#
#
# # TODO: zhill - Just jotted down these notes for future work
#
# # Powerful, but need to ensure consistency, may need to add statement Ids to the language to facilitate
# # direct references here
# class BooleanOperatorGate(Gate):
# __gate_name__ = 'combiner'
#
# class AndTrigger(BaseTrigger):
# __trigger_name__ = 'and'
# __params__ = {
# 'gate_1': str,
# 'trigger_1': str,
# 'result_1': str,
# 'gate_2': str,
# 'trigger_2': str,
# 'result_2': str
# }
#
# class OrTrigger(BaseTrigger):
# __trigger_name__ = 'or'
# __params__ = {
# 'gate_1': str,
# 'trigger_1': str,
# 'result_1': str,
# 'gate_2': str,
# 'trigger_2': str,
# 'result_2': str
# }
#
# class XorTrigger(BaseTrigger):
# __trigger_name__ = 'xor'
# __params__ = {
# 'gate_1': str,
# 'trigger_1': str,
# 'result_1': str,
# 'gate_2': str,
# 'trigger_2': str,
# 'result_2': str
# }
#
# class NotTrigger(BaseTrigger):
# __trigger_name__ = 'not'
# __params__ = {
# 'gate_1': str,
# 'trigger_1': str,
# 'result_1': str
# }
#
#
#
|
[
"collections.namedtuple"
] |
[((681, 747), 'collections.namedtuple', 'namedtuple', (['"""CheckOperation"""', "['requires_rvalue', 'eval_function']"], {}), "('CheckOperation', ['requires_rvalue', 'eval_function'])\n", (691, 747), False, 'from collections import namedtuple\n')]
|
# -*- coding: utf-8 -*-
# @Time : 19-11-19 22:25
# @Author : <NAME>
# @Reference : None
# @File : cut_twist_join.py
# @IDE : PyCharm Community Edition
"""
将身份证正反面从原始图片中切分出来。
需要的参数有:
1.图片所在路径。
输出结果为:
切分后的身份证正反面图片。
"""
import os
import cv2
import numpy as np
def point_judge(center, bbox):
"""
用于将矩形框的边界按顺序排列
:param center: 矩形中心的坐标[x, y]
:param bbox: 矩形顶点坐标[[x1, y1], [x2, y2], [x3, y3], [x4, y4]]
:return: 矩形顶点坐标,依次是 左下, 右下, 左上, 右上
"""
left = []
right = []
for i in range(4):
if bbox[i][0] > center[0]: # 只要是x坐标比中心点坐标大,一定是右边
right.append(bbox[i])
else:
left.append(bbox[i])
if right[0][1] > right[1][1]: # 如果y点坐标大,则是右上
right_down = right[1]
right_up = right[0]
else:
right_down = right[0]
right_up = right[1]
if left[0][1] > left[1][1]: # 如果y点坐标大,则是左上
left_down = left[1]
left_up = left[0]
else:
left_down = left[0]
left_up = left[1]
return left_down, right_down, left_up, right_up
def gray_and_fliter(img, image_name='1.jpg', save_path='./'): # 转为灰度图并滤波,后面两个参数调试用
"""
将图片灰度化,并滤波
:param img: 输入RGB图片
:param image_name: 输入图片名称,测试时使用
:param save_path: 滤波结果保存路径,测试时使用
:return: 灰度化、滤波后图片
"""
# img = cv2.imread(image_path + image_name) # 读取图片
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # 转换为灰度图片
# cv2.imwrite(os.path.join(save_path, image_name + '_gray.jpg'), img_gray) # 保存,方便查看
img_blurred = cv2.filter2D(img_gray, -1,
kernel=np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]], np.float32)) # 对图像进行滤波,是锐化操作
img_blurred = cv2.filter2D(img_blurred, -1, kernel=np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]], np.float32))
# cv2.imwrite(os.path.join(save_path, img_name + '_blurred.jpg'), img_blurred) # 锐化, 这里的卷积核可以更改
return img_blurred
def gradient_and_binary(img_blurred, image_name='1.jpg', save_path='./'): # 将灰度图二值化,后面两个参数调试用
"""
求取梯度,二值化
:param img_blurred: 滤波后的图片
:param image_name: 图片名,测试用
:param save_path: 保存路径,测试用
:return: 二值化后的图片
"""
gradX = cv2.Sobel(img_blurred, ddepth=cv2.CV_32F, dx=1, dy=0)
gradY = cv2.Sobel(img_blurred, ddepth=cv2.CV_32F, dx=0, dy=1)
img_gradient = cv2.subtract(gradX, gradY)
img_gradient = cv2.convertScaleAbs(img_gradient) # sobel算子,计算梯度, 也可以用canny算子替代
# 这里改进成自适应阈值,貌似没用
img_thresh = cv2.adaptiveThreshold(img_gradient, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 3, -3)
# cv2.imwrite(os.path.join(save_path, img_name + '_binary.jpg'), img_thresh) # 二值化 阈值未调整好
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))
img_closed = cv2.morphologyEx(img_thresh, cv2.MORPH_CLOSE, kernel)
img_closed = cv2.morphologyEx(img_closed, cv2.MORPH_OPEN, kernel)
img_closed = cv2.erode(img_closed, None, iterations=9)
img_closed = cv2.dilate(img_closed, None, iterations=9) # 腐蚀膨胀
# 这里调整了kernel大小(减小),腐蚀膨胀次数后(增大),出错的概率大幅减小
return img_closed
def find_bbox(img, img_closed): # 寻找身份证正反面区域
"""
根据二值化结果判定并裁剪出身份证正反面区域
:param img: 原始RGB图片
:param img_closed: 二值化后的图片
:return: 身份证正反面区域
"""
(contours, _) = cv2.findContours(img_closed.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) # 求出框的个数
# 这里opencv如果版本不对(4.0或以上)会报错,只需把(contours, _)改成 (_, contours, _)
contours = sorted(contours, key=cv2.contourArea, reverse=True) # 按照面积大小排序
countours_res = []
for i in range(0, len(contours)):
area = cv2.contourArea(contours[i]) # 计算面积
if (area <= 0.4 * img.shape[0] * img.shape[1]) and (area >= 0.05 * img.shape[0] * img.shape[1]):
# 人为设定,身份证正反面框的大小不会超过整张图片大小的0.4,不会小于0.05(这个参数随便设置的)
rect = cv2.minAreaRect(contours[i]) # 最小外接矩,返回值有中心点坐标,矩形宽高,倾斜角度三个参数
box = cv2.boxPoints(rect)
left_down, right_down, left_up, right_up = point_judge([int(rect[0][0]), int(rect[0][1])], box)
src = np.float32([left_down, right_down, left_up, right_up]) # 这里注意必须对应
dst = np.float32([[0, 0], [int(max(rect[1][0], rect[1][1])), 0], [0, int(min(rect[1][0], rect[1][1]))],
[int(max(rect[1][0], rect[1][1])),
int(min(rect[1][0], rect[1][1]))]]) # rect中的宽高不清楚是个怎么机制,但是对于身份证,肯定是宽大于高,因此加个判定
m = cv2.getPerspectiveTransform(src, dst) # 得到投影变换矩阵
result = cv2.warpPerspective(img, m, (int(max(rect[1][0], rect[1][1])), int(min(rect[1][0], rect[1][1]))),
flags=cv2.INTER_CUBIC) # 投影变换
countours_res.append(result)
return countours_res # 返回身份证区域
def find_cut_line(img_closed_original): # 对于正反面粘连情况的处理,求取最小点作为中线
"""
根据规则,强行将粘连的区域切分
:param img_closed_original: 二值化图片
:return: 处理后的二值化图片
"""
img_closed = img_closed_original.copy()
img_closed = img_closed // 250
#print(img_closed.shape)
width_sum = img_closed.sum(axis=1) # 沿宽度方向求和,统计宽度方向白点个数
start_region_flag = 0
start_region_index = 0 # 身份证起始点高度值
end_region_index = 0 # 身份证结束点高度值
for i in range(img_closed_original.shape[0]): # 1000是原始图片高度值,当然, 这里也可以用 img_closed_original.shape[0]替代
if start_region_flag == 0 and width_sum[i] > 330:
start_region_flag = 1
start_region_index = i # 判定第一个白点个数大于330的是身份证区域的起始点
if width_sum[i] > 330:
end_region_index = i # 只要白点个数大于330,便认为是身份证区域,更新结束点
# 身份证区域中白点最少的高度值,认为这是正反面的交点
# argsort函数中,只取width_sum中判定区域开始和结束的部分,因此结果要加上开始点的高度值
min_line_position = start_region_index + np.argsort(width_sum[start_region_index:end_region_index])[0]
img_closed_original[min_line_position][:] = 0
for i in range(1, 11): # 参数可变,分割10个点
temp_line_position = start_region_index + np.argsort(width_sum[start_region_index:end_region_index])[i]
if abs(temp_line_position - min_line_position) < 30: # 限定范围,在最小点距离【-30, 30】的区域内
img_closed_original[temp_line_position][:] = 0 # 强制变为0
return img_closed_original
def cut_part_img(img, cut_percent):
"""
# 从宽度和高度两个方向,裁剪身份证边缘
:param img: 身份证区域
:param cut_percent: 裁剪的比例
:return: 裁剪后的身份证区域
"""
height, width, _ = img.shape
height_num = int(height * cut_percent) # 需要裁剪的高度值
h_start = 0 + height_num // 2 # 左右等比例切分
h_end = height - height_num // 2 - 1
width_num = int(width * cut_percent) # 需要裁剪的宽度值
w_start = 0 + width_num // 2
w_end = width - width_num // 2 - 1
return img[h_start:h_end, w_start:w_end] # 返回裁剪后的图片
def preprocess_cut_one_img(img_path, img_name, save_path='./save_imgs/', problem_path='./problem_save/'): # 处理一张图片
"""
裁剪出一张图片中的身份证正反面区域
:param img_path: 图片所在路径
:param img_name: 图片名称
:param save_path: 结果保存路径 测试用
:param problem_path: 出错图片中间结果保存 测试用
:return: 身份证正反面图片
"""
img_path_name = os.path.join(img_path, img_name)
if not os.path.exists(img_path_name): # 判断图片是否存在
print('img {name} is not exits'.format(name=img_path_name))
return 1, [] # 图片不存在,直接返回,报错加一
img = cv2.imread(img_path_name) # 读取图片
img_blurred = gray_and_fliter(img, img_name) # 灰度化并滤波
img_t = cv2.filter2D(img, -1, kernel=np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]], np.float32))
# 对图像进行锐化
img_binary = gradient_and_binary(img_blurred) # 二值化
res_bbox = find_bbox(img_t, img_binary) # 切分正反面
if len(res_bbox) != 2: # 异常处理
print('Error happened when cut img {name}, try exception cut program '.format(name=img_path_name))
# cv2.imwrite(os.path.join(problem_path, img_name.split('.')[0] + '_blurred.jpg'), img_blurred)
# cv2.imwrite(os.path.join(problem_path, img_name.split('.')[0] + '_binary.jpg'), img_binary)
# cv2.imwrite(os.path.join(problem_path, img_name), img) # 调试用,保存中间处理结果
img_binary = find_cut_line(img_binary) # 强制分割正反面
res_bbox = find_bbox(img_t, img_binary)
if len(res_bbox) != 2: # 纠正失败
print('Failed to cut img {name}, exception program end'.format(name=img_path_name))
return 1, None
else: # 纠正成功
print('Correctly cut img {name}, exception program end'.format(name=img_path_name))
return 0, res_bbox
else: # 裁剪过程正常
# cv2.imwrite(os.path.join(save_path, img_name.split('.')[0] + '_0.jpg'), cut_part_img(res_bbox[0], 0.0))
# cv2.imwrite(os.path.join(save_path, img_name.split('.')[0] + '_1.jpg'), cut_part_img(res_bbox[1], 0.0))
# cv2.imwrite(os.path.join(save_path, img_name.split('.')[0] + '_original.jpg'), img)
return 0, res_bbox
def process_img(img_path, save_path, problem_path):
"""
切分一个目录下的所有图片
:param img_path: 图片所在路径
:param save_path: 结果保存路径
:param problem_path: 问题图片保存路径
:return: None
"""
if not os.path.exists(img_path): # 判断图片路径是否存在
print('img path {name} is not exits, program break.'.format(name=img_path))
return
if not os.path.exists(save_path): # 保存路径不存在,则创建路径
os.makedirs(save_path)
if not os.path.exists(problem_path): # 保存路径不存在,则创建路径
os.makedirs(problem_path)
img_names = os.listdir(img_path)
error_count = 0
error_names = []
for img_name in img_names:
error_temp, res_bbox = preprocess_cut_one_img(img_path, img_name, save_path, problem_path)
error_count += error_temp
if error_temp == 0:
cv2.imwrite(os.path.join(save_path, img_name.split('.')[0] + '_0.jpg'), cut_part_img(res_bbox[0], 0.0))
cv2.imwrite(os.path.join(save_path, img_name.split('.')[0] + '_1.jpg'), cut_part_img(res_bbox[1], 0.0))
else:
error_names.append(img_name)
print('total error number is: ', error_count)
print('error images mame :')
for error_img_name in error_names:
print(error_img_name)
return
if __name__ == '__main__':
origin_img_path = './problem_imgs/'
cutted_save_path = './res_imgs/'
cut_problem_path = './temp_imgs/'
#process_img(img_path=origin_img_path, save_path=cutted_save_path, problem_path=cut_problem_path)
|
[
"cv2.getPerspectiveTransform",
"cv2.adaptiveThreshold",
"numpy.argsort",
"cv2.boxPoints",
"cv2.minAreaRect",
"cv2.erode",
"os.path.join",
"cv2.contourArea",
"cv2.subtract",
"cv2.dilate",
"cv2.cvtColor",
"os.path.exists",
"cv2.convertScaleAbs",
"cv2.morphologyEx",
"os.listdir",
"cv2.Sobel",
"os.makedirs",
"cv2.getStructuringElement",
"numpy.float32",
"cv2.imread",
"numpy.array"
] |
[((1470, 1507), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (1482, 1507), False, 'import cv2\n'), ((2259, 2312), 'cv2.Sobel', 'cv2.Sobel', (['img_blurred'], {'ddepth': 'cv2.CV_32F', 'dx': '(1)', 'dy': '(0)'}), '(img_blurred, ddepth=cv2.CV_32F, dx=1, dy=0)\n', (2268, 2312), False, 'import cv2\n'), ((2326, 2379), 'cv2.Sobel', 'cv2.Sobel', (['img_blurred'], {'ddepth': 'cv2.CV_32F', 'dx': '(0)', 'dy': '(1)'}), '(img_blurred, ddepth=cv2.CV_32F, dx=0, dy=1)\n', (2335, 2379), False, 'import cv2\n'), ((2400, 2426), 'cv2.subtract', 'cv2.subtract', (['gradX', 'gradY'], {}), '(gradX, gradY)\n', (2412, 2426), False, 'import cv2\n'), ((2447, 2480), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['img_gradient'], {}), '(img_gradient)\n', (2466, 2480), False, 'import cv2\n'), ((2555, 2654), 'cv2.adaptiveThreshold', 'cv2.adaptiveThreshold', (['img_gradient', '(255)', 'cv2.ADAPTIVE_THRESH_MEAN_C', 'cv2.THRESH_BINARY', '(3)', '(-3)'], {}), '(img_gradient, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.\n THRESH_BINARY, 3, -3)\n', (2576, 2654), False, 'import cv2\n'), ((2762, 2814), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_ELLIPSE', '(5, 5)'], {}), '(cv2.MORPH_ELLIPSE, (5, 5))\n', (2787, 2814), False, 'import cv2\n'), ((2833, 2886), 'cv2.morphologyEx', 'cv2.morphologyEx', (['img_thresh', 'cv2.MORPH_CLOSE', 'kernel'], {}), '(img_thresh, cv2.MORPH_CLOSE, kernel)\n', (2849, 2886), False, 'import cv2\n'), ((2905, 2957), 'cv2.morphologyEx', 'cv2.morphologyEx', (['img_closed', 'cv2.MORPH_OPEN', 'kernel'], {}), '(img_closed, cv2.MORPH_OPEN, kernel)\n', (2921, 2957), False, 'import cv2\n'), ((2976, 3017), 'cv2.erode', 'cv2.erode', (['img_closed', 'None'], {'iterations': '(9)'}), '(img_closed, None, iterations=9)\n', (2985, 3017), False, 'import cv2\n'), ((3036, 3078), 'cv2.dilate', 'cv2.dilate', (['img_closed', 'None'], {'iterations': '(9)'}), '(img_closed, None, iterations=9)\n', (3046, 3078), False, 'import cv2\n'), ((7141, 7173), 'os.path.join', 'os.path.join', (['img_path', 'img_name'], {}), '(img_path, img_name)\n', (7153, 7173), False, 'import os\n'), ((7350, 7375), 'cv2.imread', 'cv2.imread', (['img_path_name'], {}), '(img_path_name)\n', (7360, 7375), False, 'import cv2\n'), ((9477, 9497), 'os.listdir', 'os.listdir', (['img_path'], {}), '(img_path)\n', (9487, 9497), False, 'import os\n'), ((3674, 3702), 'cv2.contourArea', 'cv2.contourArea', (['contours[i]'], {}), '(contours[i])\n', (3689, 3702), False, 'import cv2\n'), ((7186, 7215), 'os.path.exists', 'os.path.exists', (['img_path_name'], {}), '(img_path_name)\n', (7200, 7215), False, 'import os\n'), ((9135, 9159), 'os.path.exists', 'os.path.exists', (['img_path'], {}), '(img_path)\n', (9149, 9159), False, 'import os\n'), ((9288, 9313), 'os.path.exists', 'os.path.exists', (['save_path'], {}), '(save_path)\n', (9302, 9313), False, 'import os\n'), ((9341, 9363), 'os.makedirs', 'os.makedirs', (['save_path'], {}), '(save_path)\n', (9352, 9363), False, 'import os\n'), ((9376, 9404), 'os.path.exists', 'os.path.exists', (['problem_path'], {}), '(problem_path)\n', (9390, 9404), False, 'import os\n'), ((9432, 9457), 'os.makedirs', 'os.makedirs', (['problem_path'], {}), '(problem_path)\n', (9443, 9457), False, 'import os\n'), ((1674, 1733), 'numpy.array', 'np.array', (['[[0, -1, 0], [-1, 5, -1], [0, -1, 0]]', 'np.float32'], {}), '([[0, -1, 0], [-1, 5, -1], [0, -1, 0]], np.float32)\n', (1682, 1733), True, 'import numpy as np\n'), ((1808, 1867), 'numpy.array', 'np.array', (['[[0, -1, 0], [-1, 5, -1], [0, -1, 0]]', 'np.float32'], {}), '([[0, -1, 0], [-1, 5, -1], [0, -1, 0]], np.float32)\n', (1816, 1867), True, 'import numpy as np\n'), ((3904, 3932), 'cv2.minAreaRect', 'cv2.minAreaRect', (['contours[i]'], {}), '(contours[i])\n', (3919, 3932), False, 'import cv2\n'), ((3985, 4004), 'cv2.boxPoints', 'cv2.boxPoints', (['rect'], {}), '(rect)\n', (3998, 4004), False, 'import cv2\n'), ((4133, 4187), 'numpy.float32', 'np.float32', (['[left_down, right_down, left_up, right_up]'], {}), '([left_down, right_down, left_up, right_up])\n', (4143, 4187), True, 'import numpy as np\n'), ((4514, 4551), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['src', 'dst'], {}), '(src, dst)\n', (4541, 4551), False, 'import cv2\n'), ((5798, 5856), 'numpy.argsort', 'np.argsort', (['width_sum[start_region_index:end_region_index]'], {}), '(width_sum[start_region_index:end_region_index])\n', (5808, 5856), True, 'import numpy as np\n'), ((7486, 7545), 'numpy.array', 'np.array', (['[[0, -1, 0], [-1, 5, -1], [0, -1, 0]]', 'np.float32'], {}), '([[0, -1, 0], [-1, 5, -1], [0, -1, 0]], np.float32)\n', (7494, 7545), True, 'import numpy as np\n'), ((6005, 6063), 'numpy.argsort', 'np.argsort', (['width_sum[start_region_index:end_region_index]'], {}), '(width_sum[start_region_index:end_region_index])\n', (6015, 6063), True, 'import numpy as np\n')]
|
import time
import concurrent
import asyncio
import bleak
async def main():
loop = asyncio.new_event_loop()
client = bleak.BleakClient('D8:A9:8B:7E:1E:D2')
is_connected = await client.connect()
print(is_connected)
response = await client.write_gatt_char('0000ffe1-0000-1000-8000-00805f9b34fb', b'MOVE X 0.000000')
print(response)
if __name__ == "__main__":
asyncio.run(main())
|
[
"bleak.BleakClient",
"asyncio.new_event_loop"
] |
[((88, 112), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (110, 112), False, 'import asyncio\n'), ((127, 165), 'bleak.BleakClient', 'bleak.BleakClient', (['"""D8:A9:8B:7E:1E:D2"""'], {}), "('D8:A9:8B:7E:1E:D2')\n", (144, 165), False, 'import bleak\n')]
|
#!/usr/bin/env python3
import re
CCNUM = re.compile(r'(?!.*(\d)(?:\D?\1){3})[456]\d{3}(-?)(?:\d{4}\2){2}\d{4}')
for _ in range(int(input())):
print('Valid' if CCNUM.fullmatch(input().strip()) else 'Invalid')
|
[
"re.compile"
] |
[((43, 119), 're.compile', 're.compile', (['"""(?!.*(\\\\d)(?:\\\\D?\\\\1){3})[456]\\\\d{3}(-?)(?:\\\\d{4}\\\\2){2}\\\\d{4}"""'], {}), "('(?!.*(\\\\d)(?:\\\\D?\\\\1){3})[456]\\\\d{3}(-?)(?:\\\\d{4}\\\\2){2}\\\\d{4}')\n", (53, 119), False, 'import re\n')]
|
from m5stack import *
from m5ui import *
import espnow
import wifiCfg
import hat
joy_pos = None
paired = False
addr = None
data = None
setScreenColor(0x000000)
axp.setLDO2Volt(2.8)
hat_joyc0 = hat.get(hat.JOYC)
label0 = M5TextBox(22, 48, "Text", lcd.FONT_Default, 0xFFFFFF, rotate=0)
label1 = M5TextBox(22, 62, "Text", lcd.FONT_Default, 0xFFFFFF, rotate=0)
label2 = M5TextBox(22, 76, "Text", lcd.FONT_Default, 0xFFFFFF, rotate=0)
label3 = M5TextBox(22, 90, "Text", lcd.FONT_Default, 0xFFFFFF, rotate=0)
label4 = M5TextBox(22, 104, "Unpaired", lcd.FONT_Default, 0xFFFFFF, rotate=0)
titlebar = M5Title(title="text", x=3, fgcolor=0xFFFFFF, bgcolor=0x5b5b5b)
def main():
hat_joyc0.SetLedColor(0x3232ff)
wifiCfg.wlan_ap.active(True)
wifiCfg.wlan_sta.active(True)
espnow.init()
espnow.recv_cb(receive_msg)
timerSch.run('UpdatePosition', 10, 0x00)
timerSch.run('UpdateBattery', 1000, 0x00)
@timerSch.event('UpdatePosition')
def tUpdatePosition():
global joy_pos
joy_pos = [hat_joyc0.GetX(0), hat_joyc0.GetY(0), hat_joyc0.GetX(1), hat_joyc0.GetY(1)]
label0.setText(str(joy_pos[0]))
label1.setText(str(joy_pos[1]))
label2.setText(str(joy_pos[2]))
label3.setText(str(joy_pos[3]))
if paired == True:
#TODO: Add msg type code, and check at receiver.
espnow.send(id=1, data=bytes(joy_pos))
pass
@timerSch.event('UpdateBattery')
def tUpdateBattery():
titlebar.setTitle(str("%.1fv %.0fma"%(float(axp.getBatVoltage()), float(axp.getBatCurrent()))))
pass
def receive_msg(_):
global addr, data, paired
addr, _, data = espnow.recv_data(encoder='str')
label4.setText(str(data))
if paired == False:
#TODO: check if is this a mac address?
espnow.add_peer(str(data), id=1)
espnow.send(id=1, data=str('connected'))
paired = True
label4.setText(str('paired'))
pass
else:
pass
main()
|
[
"espnow.init",
"wifiCfg.wlan_sta.active",
"espnow.recv_data",
"hat.get",
"espnow.recv_cb",
"wifiCfg.wlan_ap.active"
] |
[((195, 212), 'hat.get', 'hat.get', (['hat.JOYC'], {}), '(hat.JOYC)\n', (202, 212), False, 'import hat\n'), ((710, 738), 'wifiCfg.wlan_ap.active', 'wifiCfg.wlan_ap.active', (['(True)'], {}), '(True)\n', (732, 738), False, 'import wifiCfg\n'), ((741, 770), 'wifiCfg.wlan_sta.active', 'wifiCfg.wlan_sta.active', (['(True)'], {}), '(True)\n', (764, 770), False, 'import wifiCfg\n'), ((773, 786), 'espnow.init', 'espnow.init', ([], {}), '()\n', (784, 786), False, 'import espnow\n'), ((789, 816), 'espnow.recv_cb', 'espnow.recv_cb', (['receive_msg'], {}), '(receive_msg)\n', (803, 816), False, 'import espnow\n'), ((1561, 1592), 'espnow.recv_data', 'espnow.recv_data', ([], {'encoder': '"""str"""'}), "(encoder='str')\n", (1577, 1592), False, 'import espnow\n')]
|
# -*- coding:utf-8 -*-
# --------------------------------------------------------
# Copyright (C), 2016-2021, lizhe, All rights reserved
# --------------------------------------------------------
# @Name: gui.py.py
# @Author: lizhe
# @Created: 2021/12/15 - 21:24
# --------------------------------------------------------
import copy
from time import sleep
from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, \
HORIZONTAL, E , PhotoImage, LEFT
from tkinter.ttk import Combobox, Notebook, Separator
from typing import List, Dict, Any, Union, Optional
from automotive.logger.logger import logger
from automotive.core.can.can_service import CANService
from automotive.core.can.common.enums import CanBoxDeviceEnum, BaudRateEnum
from .reader import ConfigReader
from .reader import check_buttons, thread_buttons, comboxs, entries, buttons, receive_buttons
from ..common.constants import OPEN_DEVICE, CLOSE_DEVICE, CLEAR_STACK, DEFAULT_MESSAGE, BUS_LOST, \
MESSAGE_LOST, TEXT, ON, OFF, VALUES, ACTIONS, COMMON, CHECK_MSGS, CHECK_MESSAGE, SIGNAL_NAME, \
SIGNAL_VALUE, SIGNAL_VALUES, SEARCH_COUNT, EXACT_SEARCH, YES_OR_NO, CHECK_SIGNAL, CHECK_SIGNAL_NAME
from ...utils.common.enums import ExcelEnum
class TabFrame(Frame):
def __init__(self, master, can_service: CANService, config: Dict[str, Any], filter_nodes: List[str],
common_panel: bool = False, max_line_count: int = None):
super().__init__(master)
self.can_service = can_service
self.thread_pool = can_service.can_bus.thread_pool
self.__filter_nodes = filter_nodes
# 单选框按钮配置
self.__check_buttons = config[check_buttons] if config[check_buttons] else dict()
logger.debug(f"check_buttons = {self.__check_buttons}")
# 闪烁单选框按钮配置
self.__thread_buttons = config[thread_buttons] if config[thread_buttons] else dict()
logger.debug(f"thread_buttons = {self.__thread_buttons}")
# 下拉框按钮配置
self.__comboxs = config[comboxs] if config[comboxs] else dict()
logger.debug(f"comboxs = {self.__comboxs}")
# 输入框按钮配置
self.__entries = config[entries] if config[entries] else dict()
logger.debug(f"entries = {self.__entries}")
# 按钮框配置
self.__buttons = config[buttons] if config[buttons] else dict()
logger.debug(f"buttons = {self.__buttons}")
# 接收按钮框配置
self.__receive_buttons = config[receive_buttons] if config[receive_buttons] else dict()
logger.debug(f"receive_buttons = {self.__receive_buttons}")
# 每行能够容纳的数量
self.__max_line_count = max_line_count # 36
# 双行能够容纳的数量
self.__max_double_line_count = int(self.__max_line_count / 2)
# 设置标签(label)默认宽度
self.__label_width = 25
# 设置下拉框(comboxs)默认宽度
self.__comboxs_width = 20
# 设置单选按钮(checkBut)默认宽度
self.__checkBut_width = 25
# 设置多线程按钮框(thread_buttons)默认宽度
self.__thread_buttons_width = 20
# 设置按钮(button)默认宽度
self.__buttons_width = 24
# 设置输入框(entrie)默认宽度
self.__entrie_width = 10
# 输入框支持的事件列表
self.support_event_keys = "<Return>",
# 单选框值
self.check_button_bool_vars = dict()
# 闪烁单选框值
self.thread_button_bool_vars = dict()
# 按钮框对象字典
self.buttons = dict()
# 单选框对象字典
self.check_buttons = dict()
# 闪烁单选框对象字典
self.thread_buttons = dict()
# 下拉框对象字典
self.comboxs = dict()
# 输入框对象字典
self.entries = dict()
# 闪烁事件Task
self.thread_task = dict()
# 总线丢失按钮 =
# 开始的行列
self.row = 0
self.column = 0
# 布局显示
self.pack()
# todo 64*64 3 3比较合适
# self.open_image = PhotoImage(file=rf"D:\Download\Chrome\打开 (1).png").subsample(3, 3)
# 创建公共按钮
if common_panel:
self.create_common_widget()
# 创建单选按钮
self.create_check_buttons()
# 创建下拉按钮
self.create_comboxs()
# 创建输入框
self.create_entries()
# 创建事件单选按钮
self.create_thread_buttons()
# 创建按钮框(多线程)
self.create_buttons()
# 创建接收检查按钮
self.create_receive_buttons()
def create_common_widget(self):
"""
创建 打开设备、关闭设备、清除数据(清除接收到的数据)、发送默认消息(通过初始化的filter_node过滤消息), 总线丢失、丢失部分信号等按键
"""
# ********** 创建打开设备按钮 check_button **********
text_name, show_name = OPEN_DEVICE
# 创建Button对象
self.buttons[text_name] = Button(self, text=show_name,
command=lambda x=OPEN_DEVICE: self.__special_button_event(x))
# 布局button
self.buttons[text_name].grid(row=self.row, column=self.column, sticky=W)
self.buttons[text_name]["state"] = NORMAL
self.column += 1
# ********** 创建关闭设备按钮 **********
text_name, show_name = CLOSE_DEVICE
# 创建Button对象
self.buttons[text_name] = Button(self, text=show_name,
command=lambda x=CLOSE_DEVICE: self.__special_button_event(x))
# 布局button
self.buttons[text_name].grid(row=self.row, column=self.column, sticky=W)
self.buttons[text_name]["state"] = DISABLED
self.column += 1
# ********** 创建清除接收到的CAN信号按钮 **********
text_name, show_name = CLEAR_STACK
# 创建Button对象
self.buttons[text_name] = Button(self, text=show_name,
command=lambda x=CLEAR_STACK: self.__special_button_event(x))
# 布局button
self.buttons[text_name].grid(row=self.row, column=self.column, sticky=W)
self.column += 1
# ********** 创建一个发送默认消息的按钮 button **********
text_name, show_name = DEFAULT_MESSAGE
# 创建Button对象
self.buttons[text_name] = Button(self, text=show_name,
command=lambda x=DEFAULT_MESSAGE: self.__special_button_event(x))
# 布局button
self.buttons[text_name].grid(row=self.row, column=self.column, sticky=W)
self.column += 1
# ********** 创建一个总线丢失的按钮 button **********
text_name, show_name = BUS_LOST
# 创建CheckButton对象并放到check_buttons中方便调用
self.buttons[text_name] = Button(self, text=show_name,
command=lambda x=BUS_LOST: self.__special_button_event(x))
# 布局checkbutton
self.buttons[text_name].grid(row=self.row, column=self.column, sticky=W)
self.column += 1
# ********** 创建一个信号丢失的输入框 entry **********
text_name, show_name = MESSAGE_LOST
# 获取输入框的名称
Label(self, text=show_name).grid(row=self.row, column=self.column, sticky=W)
self.column += 1
self.entries[text_name] = Entry(self, width=10)
self.entries[text_name].grid(row=self.row, column=self.column, sticky=W, columnspan=2)
self.entries[text_name].bind(self.support_event_keys[0],
lambda x, y=("", text_name): self.__entry_event(x, y))
self.row += 1
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
# ********** 创建信号检查部分 **********
self.__create_message_check()
# ********** 创建检测信号是否之前发送值部分 *******
self.row += 1
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
self.__create_message_signal_check()
def __create_message_check(self):
"""
创建信号检查部分
帧ID, 信号名称 信号值, 出现次数 精确查找等选中,用于在主机操作后的检查
"""
self.column = 0
text_name, show_name = SIGNAL_NAME
Label(self, text=show_name).grid(row=self.row, column=self.column, sticky=W)
self.column += 1
self.entries[text_name] = Entry(self, width=20) # 等同于signal_name = Entry
self.entries[text_name].grid(row=self.row, column=self.column, sticky=W, columnspan=2)
self.column += 2
text_name, show_name = SIGNAL_VALUE
Label(self, text=show_name).grid(row=self.row, column=self.column, sticky=W)
self.column += 1
self.entries[text_name] = Entry(self, width=8) # 等同于signal_value = Entry
self.entries[text_name].grid(row=self.row, column=self.column, sticky=W)
self.column += 1
text_name, show_name = SEARCH_COUNT
Label(self, text=show_name).grid(row=self.row, column=self.column, sticky=W)
self.column += 1
self.entries[text_name] = Entry(self, width=8)
self.entries[text_name].grid(row=self.row, column=self.column, sticky=W)
self.column += 1
text_name, show_name = EXACT_SEARCH
Label(self, text=show_name).grid(row=self.row, column=self.column, sticky=W)
self.column += 1
# 创建下拉框
self.comboxs[text_name] = Combobox(self, values=YES_OR_NO, state="readonly", width=5)
# 设置下拉框初始值为第一个值
self.comboxs[text_name].current(0)
# 布局下拉框
self.comboxs[text_name].grid(row=self.row, column=self.column, sticky=W)
self.column += 1
text_name, show_name = CHECK_MESSAGE
# 创建Button对象
self.buttons[text_name] = Button(self, text=show_name,
command=lambda x=CHECK_MESSAGE: self.__special_button_event(x))
# 布局button
self.buttons[text_name].grid(row=self.row, column=self.column, sticky=W)
self.buttons[text_name]["state"] = NORMAL
def __create_message_signal_check(self):
"""
创建信号之前发送过那些值检测
帧ID,信号名称 精确查找的等选择
:return:
"""
self.column = 0
text_name, show_name = CHECK_SIGNAL_NAME
Label(self, text=show_name).grid(row=self.row, column=self.column, sticky=W)
self.column += 1
self.entries[text_name] = Entry(self, width=20) # 等同于signal_name = Entry
self.entries[text_name].grid(row=self.row, column=self.column, sticky=W, columnspan=2)
self.column += 2
text_name, show_name = SIGNAL_VALUES
Label(self, text=show_name).grid(row=self.row, column=self.column, sticky=W)
self.column += 1
self.entries[text_name] = Entry(self, width=40, state=DISABLED) # 等同于signal_value = Entry
self.entries[text_name].grid(row=self.row, column=self.column, sticky=W, columnspan=5)
self.column += 5
text_name, show_name = CHECK_SIGNAL
# 创建Button对象
self.buttons[text_name] = Button(self, text=show_name,
command=lambda x=CHECK_SIGNAL: self.__special_button_event(x))
# 布局button
self.buttons[text_name].grid(row=self.row, column=self.column, sticky=W)
self.buttons[text_name]["state"] = NORMAL
logger.debug(f"entries are {entries}")
def __special_button_event(self, button_type: tuple):
text_name, show_name = button_type
self.buttons[text_name]["state"] = DISABLED
try:
self.__special_actions(button_type)
except RuntimeError as e:
messagebox.showerror("出错了", f"【{e}】")
logger.error(e)
self.buttons[text_name]["state"] = NORMAL
def __special_actions(self, button_type: tuple):
open_text_name = OPEN_DEVICE[0]
close_text_name = CLOSE_DEVICE[0]
signal_name_text_name = SIGNAL_NAME[0]
check_signal_name_text_name = CHECK_SIGNAL_NAME[0]
signal_value_text_name = SIGNAL_VALUE[0]
signal_values_text_name = SIGNAL_VALUES[0]
search_count_text_name = SEARCH_COUNT[0]
exact_search_text_name = EXACT_SEARCH[0]
text_name, show_name = button_type
if button_type == DEFAULT_MESSAGE:
self.can_service.send_default_messages(filter_sender=self.__filter_nodes)
self.buttons[text_name]["state"] = NORMAL
elif button_type == BUS_LOST:
self.can_service.stop_transmit()
self.buttons[text_name]["state"] = NORMAL
elif button_type == OPEN_DEVICE:
self.can_service.open_can()
self.buttons[open_text_name]["state"] = DISABLED
self.buttons[close_text_name]["state"] = NORMAL
elif button_type == CLOSE_DEVICE:
self.can_service.close_can()
self.buttons[open_text_name]["state"] = NORMAL
self.buttons[close_text_name]["state"] = DISABLED
elif button_type == CLEAR_STACK:
self.can_service.clear_stack_data()
self.buttons[text_name]["state"] = NORMAL
elif button_type == CHECK_MESSAGE:
# 获取signal name
signal_name = self.entries[signal_name_text_name].get().strip()
# 获取signal value
signal_value_text = self.entries[signal_value_text_name].get()
if signal_value_text != "":
signal_value = int(signal_value_text)
# 获取次数
search_count_text = self.entries[search_count_text_name].get()
if search_count_text != "":
search_count = int(search_count_text)
else:
search_count = None
# 获取是否精确查找
index = self.comboxs[exact_search_text_name].current()
# 选中第一个则表示是True
exact_search = (index == 0)
stack = self.can_service.get_stack()
result = self.can_service.check_signal_value(stack=stack, signal_name =signal_name, expect_value=signal_value, count=search_count,
exact=exact_search)
show_message = "成功" if result else "失败"
exact_message = "精确" if exact_search else "不精确"
message = f"检查信号【{signal_name}】值为【{signal_value}】收到次数" \
f"为【{search_count}】,匹配方式是【{exact_message}】检查结果是【{show_message}】"
if result:
messagebox.showinfo(title=show_message, message=message)
else:
messagebox.showerror(title=show_message, message=message)
self.buttons[text_name]["state"] = NORMAL
else:
messagebox.showerror(title="失败", message="请填写需要查询的信号值")
self.buttons[text_name]["state"] = NORMAL
elif button_type == CHECK_SIGNAL:
# 获取signal name
signal_name = self.entries[check_signal_name_text_name].get().strip()
# 检测信号值是否已经发送过,并返回检测到的信号值 result
stack = self.can_service.get_stack()
result = self.can_service.get_receive_signal_values(stack, signal_name)
if len(result) > 0:
self.entries[signal_values_text_name]["state"] = NORMAL
# 将之前的值先清空
self.entries[signal_values_text_name].delete(0, "end")
# 将返回的值插入到输入框中
self.entries[signal_values_text_name].insert(0, result)
self.entries[signal_values_text_name]["state"] = DISABLED
else:
messagebox.showerror(title="失败", message=f"{signal_name} is not received")
self.buttons[text_name]["state"] = NORMAL
def create_check_buttons(self):
"""
创建选中框,适用于单选发送消息的情况
"""
# 创建下拉框
if self.row != 0:
self.row += 1
# 创建单选框
index = 0
for key, value in self.__check_buttons.items():
function_name = key
text_name = value[TEXT]
if index == 0:
self.column = 0
elif index % self.__max_line_count == 0:
self.row += 1
self.column = 0
else:
self.column += 1
# 创建bool对象接收值
self.check_button_bool_vars[function_name] = BooleanVar()
# 创建CheckButton对象并放到check_buttons中方便调用
button = Checkbutton(self, text=text_name,
variable=self.check_button_bool_vars[function_name],
onvalue=True,
offvalue=False,
command=lambda x=function_name: self.__check_button_event(x),
width=self.__checkBut_width,
anchor="w",wraplength=150,justify="left"
)
self.check_buttons[function_name] = button
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
# 布局checkbutton
self.check_buttons[function_name].grid(row=self.row, column=self.column, sticky=W)
index += 1
self.row += 1
if len(self.__check_buttons) != 0:
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
def __check_button_event(self, function_name):
values = self.__check_buttons[function_name]
text_name = values[TEXT]
on_actions = values[ON]
off_actions = values[OFF]
if self.check_button_bool_vars[function_name].get():
logger.debug(f"{text_name} ON")
self.__send_actions(on_actions)
else:
logger.debug(f"{text_name} OFF")
self.__send_actions(off_actions)
def create_comboxs(self):
"""
创建下拉框,选中的时候触发事件, 适用于枚举类型的选中框
"""
# 创建下拉框
if self.row != 0:
self.row += 1
index = 0
for key, value in self.__comboxs.items():
function_name = key
text_name = value[TEXT]
if index == 0:
self.column = 0
elif index % self.__max_double_line_count == 0:
self.row += 1
self.column = 0
else:
self.column += 1
# 获取下拉框的名称
values = list(value[VALUES].keys())
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
# 创建Label框
Label(self, text=text_name, width=self.__label_width, anchor="w",wraplength=180,justify="left").grid(row=self.row, column=self.column,
sticky=W)
# 创建下拉框
self.comboxs[function_name] = Combobox(self, values=values, state="readonly", width=self.__comboxs_width)
# 设置下拉框初始值为第一个值
self.comboxs[function_name].current(0)
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
# 布局下拉框
self.comboxs[function_name].grid(row=self.row, column=self.column + 1, sticky=W)
# 绑定下拉框事件
self.comboxs[function_name].bind("<<ComboboxSelected>>",
lambda x, y=("", function_name): self.__combox_event(x, y))
logger.debug(f"row = {self.row}, column = {self.column}")
self.column += 1
index += 1
self.row += 1
if len(self.__comboxs) != 0:
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
def __combox_event(self, event, function_name):
"""
能够找到下拉框,并根据下拉框的内容进行判断
后续能够根据内容进行消息的发送
"""
function_name = function_name[1]
combox_param = self.__comboxs[function_name]
# 字典中定义的值列表
values = combox_param[VALUES]
text_name = combox_param[TEXT]
actual_values = list(values.keys())
# 当前选中的是第几个
combox_index = self.comboxs[function_name].current()
select_name = actual_values[combox_index]
actions = values[select_name]
logger.debug(f"设置{text_name}为{select_name}")
self.__send_actions(actions)
logger.trace(event)
def create_entries(self):
"""
创建输入框,适用于车速类型的线性信号值
"""
# 创建输入框
if self.row != 0:
self.row += 1
index = 0
for key, value in self.__entries.items():
function_name = key
text_name = value[TEXT]
if index == 0:
self.column = 0
elif index % self.__max_double_line_count == 0:
self.row += 1
self.column = 0
else:
self.column += 1
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
# 获取输入框的名称
Label(self, text=text_name, width=self.__label_width, anchor="w",wraplength=180,justify="left").grid(row=self.row, column=self.column,
sticky=W)
# 创建输入框
self.entries[function_name] = Entry(self, width=self.__entrie_width)
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
self.entries[function_name].grid(row=self.row, column=self.column + 1, sticky=W)
# 绑定事件
for event_key in self.support_event_keys:
self.entries[function_name].bind(event_key,
lambda x, y=("", function_name): self.__entry_event(x, y))
self.column += 1
index += 1
self.row += 1
if len(self.__entries) != 0:
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
def __entry_event(self, event, params):
message_lost = MESSAGE_LOST[0]
logger.trace(event)
function_name = params[1]
if function_name == message_lost:
value = self.entries[function_name].get()
if value != "":
# 0x152,0x153, 0x154
value.replace(",", ",")
if "," in value:
values = value.split(",")
else:
# 0x164
values = [value]
for msg_id in values:
msg_id = msg_id.strip()
# 处理16进制
if "x" in msg_id or "X" in msg_id:
# 把16进制转换成10进制
message_id = int(msg_id, 16)
else:
message_id = int(f"0x{msg_id}", 16)
logger.debug(f"message_id = {message_id}")
try:
self.can_service.stop_transmit(message_id)
except RuntimeError as e:
logger.error(e)
messagebox.showerror("出错了", f"【{e}】")
else:
entry_value = self.entries[function_name].get()
params = self.__entries[function_name]
actions = params[ACTIONS]
text_name = params[TEXT]
logger.debug(f"设置{text_name}值为{entry_value}")
new_actions = copy.deepcopy(actions)
for action in new_actions:
if len(action) == 2:
msg_id, signals = action
for name, value in signals.items():
if value is None:
logger.debug(f"change {name} value to {entry_value}")
signals[name] = float(entry_value)
self.__send_actions(new_actions)
def create_thread_buttons(self):
"""
创建周期交替变化或者有时间延迟的信号发送, 如双闪灯
选中会发送,不选中则不发送
名字上以【】区别
"""
# 创建事件单选框
if self.row != 0:
self.row += 1
index = 0
for key, value in self.__thread_buttons.items():
function_name = key
text_name = value[TEXT]
if index == 0:
self.column = 0
elif index % self.__max_line_count == 0:
self.row += 1
self.column = 0
else:
self.column += 1
# 创建bool对象接收值
self.thread_button_bool_vars[text_name] = BooleanVar()
# 创建CheckButton对象并放到thread_buttons中方便调用
button = Checkbutton(self, text=f"【{text_name}】",
variable=self.thread_button_bool_vars[text_name],
onvalue=True,
offvalue=False,
command=lambda x=function_name: self.__thread_check_button_event(x),
width=self.__thread_buttons_width,
anchor="w",wraplength=180,justify="left"
)
self.thread_buttons[function_name] = button
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
self.thread_buttons[function_name].grid(row=self.row, column=self.column, sticky=W)
index += 1
self.row += 1
if len(self.__thread_buttons) != 0:
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
def __thread_check_button_event(self, function_name):
if function_name == DEFAULT_MESSAGE:
logger.info(f"send default messages and filter nodes {self.__filter_nodes}")
if self.thread_button_bool_vars[DEFAULT_MESSAGE].get():
self.thread_pool.submit(self.__special_actions, 1)
elif function_name == BUS_LOST:
logger.info("can bus lost")
if self.thread_button_bool_vars[BUS_LOST].get():
self.thread_pool.submit(self.__special_actions, 2)
else:
param = self.__thread_buttons[function_name]
text_name = param[TEXT]
actions = param[ACTIONS]
if self.thread_button_bool_vars[text_name].get():
if function_name not in self.thread_task:
task = self.thread_pool.submit(self.__thread_method, text_name, actions)
self.thread_task[function_name] = task
else:
if function_name in self.thread_task:
self.thread_task.pop(function_name)
def __thread_method(self, name, actions):
logger.debug(actions)
while self.thread_button_bool_vars[name].get():
self.__send_actions(actions)
def __send_actions(self, actions: List):
for action in actions:
if len(action) == 2:
msg_id, signals = action
logger.info(f"{hex(msg_id)} = {signals}")
try:
self.can_service.send_can_signal_message(msg_id, signals)
except RuntimeError as e:
logger.error(e)
messagebox.showerror("出错了", f"【{e}】")
elif len(action) == 1:
logger.debug(f"sleep {action} seconds")
sleep_time = float(action[0])
sleep(sleep_time)
else:
raise RuntimeError(f"value[{action}] incorrect")
def create_buttons(self):
"""
创建事件信号按钮,主要用于有时间延迟的部分,如长按或者短按方向盘按键, press release两种状态切换需要时间等待
"""
if self.row != 0:
self.row += 1
index = 0
for key, value in self.__buttons.items():
function_name = key
text_name = value[TEXT]
if index == 0:
self.column = 0
elif index % self.__max_line_count == 0:
self.row += 1
self.column = 0
else:
self.column += 1
# 创建CheckButton对象并放到thread_buttons中方便调用
self.buttons[function_name] = Button(self, text=text_name,
command=lambda x=function_name: self.__thread_button_event(x),
width=self.__buttons_width,wraplength=170,justify="left",anchor="w")
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
self.buttons[function_name].grid(row=self.row, column=self.column, sticky=W)
index += 1
self.row += 1
if len(self.__buttons) != 0:
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
def __thread_button_event(self, function_name):
try:
self.buttons[function_name]["state"] = DISABLED
param = self.__buttons[function_name]
text_name = param[TEXT]
logger.debug(f"press {text_name} button")
actions = param[ACTIONS]
self.thread_pool.submit(self.__send_actions, actions)
except RuntimeError as e:
logger.error(e)
messagebox.showerror("出错了", f"【{e}】")
finally:
self.buttons[function_name]["state"] = NORMAL
def create_receive_buttons(self):
"""
创建接收检查按钮, 模拟其他ECU接收
"""
if self.row != 0:
self.row += 1
index = 0
for key, value in self.__receive_buttons.items():
function_name = key
text_name = value[TEXT]
if index == 0:
self.column = 0
elif index % self.__max_line_count == 0:
self.row += 1
self.column = 0
else:
self.column += 1
# 创建CheckButton对象并放到thread_buttons中方便调用
logger.debug(f"add button {function_name} in buttons")
self.buttons[function_name] = Button(self, text=f"【{text_name}】",
command=lambda x=function_name: self.__receive_button_event(x))
logger.debug(f"row = {self.row}, column = {self.column}, index = {index}")
self.buttons[function_name].grid(row=self.row, column=self.column, sticky=W)
index += 1
self.row += 1
if len(self.__receive_buttons) != 0:
Separator(self, orient=HORIZONTAL).grid(row=self.row, column=0, pady=5, sticky=E + W,
columnspan=self.__max_line_count)
self.row += 1
def __receive_button_event(self, function_name):
self.buttons[function_name]["state"] = DISABLED
param = self.__receive_buttons[function_name]
text_name = param[TEXT]
logger.debug(f"press {text_name} button")
check_msgs = param[CHECK_MSGS]
msg_id, signal_name, signal_value, count, expect_value = check_msgs
try:
stack = self.can_service.get_stack()
result = self.can_service.check_signal_value(stack=stack, msg_id=msg_id, signal_name=signal_name, expect_value=signal_value, count=count, exact=expect_value)
show_message = "成功" if result else "失败"
exact_message = "精确" if expect_value else "不精确"
message = f"检查【{hex(msg_id)}】中信号【{signal_name}】值为【{signal_value}】收到次数" \
f"为【{count}】,匹配方式为【{exact_message}】的检查结果是【{show_message}】"
if result:
messagebox.showinfo(title=show_message, message=message)
else:
messagebox.showerror(title=show_message, message=message)
except RuntimeError as e:
logger.error(e)
messagebox.showerror(title="出错了", message=f"【{e}】")
finally:
self.can_service.clear_stack_data()
self.buttons[function_name]["state"] = NORMAL
class Gui(object):
def __init__(self, excel_file: str, dbc: str, can_box_device: Union[CanBoxDeviceEnum, str, None] = None,
baud_rate: Union[BaudRateEnum, int] = BaudRateEnum.HIGH,
data_rate: Union[BaudRateEnum, int] = BaudRateEnum.DATA,
channel_index: int = 1,
filter_nodes: Optional[List[str]] = None, can_fd: bool = False,
excel_type: ExcelEnum = ExcelEnum.OPENPYXL,
max_workers: int = 500,
max_line_count: int = 8):
"""
:param excel_file: Excel文件路径 (必填项)
:param dbc: 项目dbc文件路径 (必填项)
:param can_box_device:(选填)
:param filter_nodes:发送默认信号筛选器(默认值)
:param can_fd:(选填)
:param excel_type: (选填)
:param max_workers:默认值就行(选填)
:param max_line_count:面板一行中显示的最大数量,默认值为8,如果显示不全可以自己修改
"""
self.tk = Tk()
self.tk.title("CAN面板")
# 初始化 CANService
self.can_service = CANService(dbc, can_box_device=can_box_device, baud_rate=baud_rate, data_rate=data_rate,
channel_index=channel_index, can_fd=can_fd, max_workers=max_workers)
# 默认消息发送要过滤的节点
self.__filter_nodes = filter_nodes
# 获取按钮
service = ConfigReader(can_service=self.can_service,type_=excel_type)
tab_configs = dict()
tab_configs[COMMON] = {check_buttons: {}, thread_buttons: {}, comboxs: {},
entries: {}, buttons: {}, receive_buttons: {}}
config = service.read_from_file(excel_file)
tab_configs.update(config)
self.tab_control = Notebook(self.tk)
# tab选项框对象字典
self.tabs = []
for key, value in tab_configs.items():
logger.info(f"handle tab {key}")
if key == COMMON:
common_panel = True
else:
common_panel = False
tab = TabFrame(self.tk, can_service=self.can_service, filter_nodes=filter_nodes,
config=value, common_panel=common_panel, max_line_count=max_line_count)
self.tab_control.add(tab, text=key)
self.tabs.append(tab)
self.tab_control.pack(expand=1, fill="both")
# 第一个tab
self.tab_control.select(self.tabs[0])
self.tk.protocol('WM_DELETE_WINDOW', self.exit_root)
self.tk.mainloop()
def exit_root(self):
self.can_service.close_can()
self.tk.destroy()
|
[
"copy.deepcopy",
"tkinter.ttk.Separator",
"automotive.core.can.can_service.CANService",
"automotive.logger.logger.logger.info",
"automotive.logger.logger.logger.debug",
"tkinter.Entry",
"tkinter.messagebox.showerror",
"tkinter.messagebox.showinfo",
"time.sleep",
"tkinter.ttk.Combobox",
"tkinter.ttk.Notebook",
"tkinter.BooleanVar",
"automotive.logger.logger.logger.trace",
"tkinter.Label",
"tkinter.Tk",
"automotive.logger.logger.logger.error"
] |
[((1809, 1864), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""check_buttons = {self.__check_buttons}"""'], {}), "(f'check_buttons = {self.__check_buttons}')\n", (1821, 1864), False, 'from automotive.logger.logger import logger\n'), ((1989, 2046), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""thread_buttons = {self.__thread_buttons}"""'], {}), "(f'thread_buttons = {self.__thread_buttons}')\n", (2001, 2046), False, 'from automotive.logger.logger import logger\n'), ((2148, 2191), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""comboxs = {self.__comboxs}"""'], {}), "(f'comboxs = {self.__comboxs}')\n", (2160, 2191), False, 'from automotive.logger.logger import logger\n'), ((2293, 2336), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""entries = {self.__entries}"""'], {}), "(f'entries = {self.__entries}')\n", (2305, 2336), False, 'from automotive.logger.logger import logger\n'), ((2436, 2479), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""buttons = {self.__buttons}"""'], {}), "(f'buttons = {self.__buttons}')\n", (2448, 2479), False, 'from automotive.logger.logger import logger\n'), ((2605, 2664), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""receive_buttons = {self.__receive_buttons}"""'], {}), "(f'receive_buttons = {self.__receive_buttons}')\n", (2617, 2664), False, 'from automotive.logger.logger import logger\n'), ((7019, 7040), 'tkinter.Entry', 'Entry', (['self'], {'width': '(10)'}), '(self, width=10)\n', (7024, 7040), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((8267, 8288), 'tkinter.Entry', 'Entry', (['self'], {'width': '(20)'}), '(self, width=20)\n', (8272, 8288), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((8629, 8649), 'tkinter.Entry', 'Entry', (['self'], {'width': '(8)'}), '(self, width=8)\n', (8634, 8649), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((8977, 8997), 'tkinter.Entry', 'Entry', (['self'], {'width': '(8)'}), '(self, width=8)\n', (8982, 8997), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((9315, 9374), 'tkinter.ttk.Combobox', 'Combobox', (['self'], {'values': 'YES_OR_NO', 'state': '"""readonly"""', 'width': '(5)'}), "(self, values=YES_OR_NO, state='readonly', width=5)\n", (9323, 9374), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((10325, 10346), 'tkinter.Entry', 'Entry', (['self'], {'width': '(20)'}), '(self, width=20)\n', (10330, 10346), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((10688, 10725), 'tkinter.Entry', 'Entry', (['self'], {'width': '(40)', 'state': 'DISABLED'}), '(self, width=40, state=DISABLED)\n', (10693, 10725), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((11273, 11311), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""entries are {entries}"""'], {}), "(f'entries are {entries}')\n", (11285, 11311), False, 'from automotive.logger.logger import logger\n'), ((20583, 20627), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""设置{text_name}为{select_name}"""'], {}), "(f'设置{text_name}为{select_name}')\n", (20595, 20627), False, 'from automotive.logger.logger import logger\n'), ((20675, 20694), 'automotive.logger.logger.logger.trace', 'logger.trace', (['event'], {}), '(event)\n', (20687, 20694), False, 'from automotive.logger.logger import logger\n'), ((22541, 22560), 'automotive.logger.logger.logger.trace', 'logger.trace', (['event'], {}), '(event)\n', (22553, 22560), False, 'from automotive.logger.logger import logger\n'), ((27346, 27367), 'automotive.logger.logger.logger.debug', 'logger.debug', (['actions'], {}), '(actions)\n', (27358, 27367), False, 'from automotive.logger.logger import logger\n'), ((31691, 31732), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""press {text_name} button"""'], {}), "(f'press {text_name} button')\n", (31703, 31732), False, 'from automotive.logger.logger import logger\n'), ((33742, 33746), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (33744, 33746), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((33833, 33998), 'automotive.core.can.can_service.CANService', 'CANService', (['dbc'], {'can_box_device': 'can_box_device', 'baud_rate': 'baud_rate', 'data_rate': 'data_rate', 'channel_index': 'channel_index', 'can_fd': 'can_fd', 'max_workers': 'max_workers'}), '(dbc, can_box_device=can_box_device, baud_rate=baud_rate,\n data_rate=data_rate, channel_index=channel_index, can_fd=can_fd,\n max_workers=max_workers)\n', (33843, 33998), False, 'from automotive.core.can.can_service import CANService\n'), ((34513, 34530), 'tkinter.ttk.Notebook', 'Notebook', (['self.tk'], {}), '(self.tk)\n', (34521, 34530), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((16407, 16419), 'tkinter.BooleanVar', 'BooleanVar', ([], {}), '()\n', (16417, 16419), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((17054, 17128), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (17066, 17128), False, 'from automotive.logger.logger import logger\n'), ((17843, 17874), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""{text_name} ON"""'], {}), "(f'{text_name} ON')\n", (17855, 17874), False, 'from automotive.logger.logger import logger\n'), ((17948, 17980), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""{text_name} OFF"""'], {}), "(f'{text_name} OFF')\n", (17960, 17980), False, 'from automotive.logger.logger import logger\n'), ((18660, 18734), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (18672, 18734), False, 'from automotive.logger.logger import logger\n'), ((19065, 19140), 'tkinter.ttk.Combobox', 'Combobox', (['self'], {'values': 'values', 'state': '"""readonly"""', 'width': 'self.__comboxs_width'}), "(self, values=values, state='readonly', width=self.__comboxs_width)\n", (19073, 19140), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((19235, 19309), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (19247, 19309), False, 'from automotive.logger.logger import logger\n'), ((19637, 19694), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}"""'], {}), "(f'row = {self.row}, column = {self.column}')\n", (19649, 19694), False, 'from automotive.logger.logger import logger\n'), ((21246, 21320), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (21258, 21320), False, 'from automotive.logger.logger import logger\n'), ((21651, 21689), 'tkinter.Entry', 'Entry', (['self'], {'width': 'self.__entrie_width'}), '(self, width=self.__entrie_width)\n', (21656, 21689), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((21703, 21777), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (21715, 21777), False, 'from automotive.logger.logger import logger\n'), ((23852, 23897), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""设置{text_name}值为{entry_value}"""'], {}), "(f'设置{text_name}值为{entry_value}')\n", (23864, 23897), False, 'from automotive.logger.logger import logger\n'), ((23925, 23947), 'copy.deepcopy', 'copy.deepcopy', (['actions'], {}), '(actions)\n', (23938, 23947), False, 'import copy\n'), ((25044, 25056), 'tkinter.BooleanVar', 'BooleanVar', ([], {}), '()\n', (25054, 25056), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((25710, 25784), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (25722, 25784), False, 'from automotive.logger.logger import logger\n'), ((26307, 26383), 'automotive.logger.logger.logger.info', 'logger.info', (['f"""send default messages and filter nodes {self.__filter_nodes}"""'], {}), "(f'send default messages and filter nodes {self.__filter_nodes}')\n", (26318, 26383), False, 'from automotive.logger.logger import logger\n'), ((29120, 29194), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (29132, 29194), False, 'from automotive.logger.logger import logger\n'), ((29814, 29855), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""press {text_name} button"""'], {}), "(f'press {text_name} button')\n", (29826, 29855), False, 'from automotive.logger.logger import logger\n'), ((30749, 30803), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""add button {function_name} in buttons"""'], {}), "(f'add button {function_name} in buttons')\n", (30761, 30803), False, 'from automotive.logger.logger import logger\n'), ((31010, 31084), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""row = {self.row}, column = {self.column}, index = {index}"""'], {}), "(f'row = {self.row}, column = {self.column}, index = {index}')\n", (31022, 31084), False, 'from automotive.logger.logger import logger\n'), ((34638, 34670), 'automotive.logger.logger.logger.info', 'logger.info', (['f"""handle tab {key}"""'], {}), "(f'handle tab {key}')\n", (34649, 34670), False, 'from automotive.logger.logger import logger\n'), ((6881, 6908), 'tkinter.Label', 'Label', (['self'], {'text': 'show_name'}), '(self, text=show_name)\n', (6886, 6908), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((7328, 7362), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (7337, 7362), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((7679, 7713), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (7688, 7713), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((8129, 8156), 'tkinter.Label', 'Label', (['self'], {'text': 'show_name'}), '(self, text=show_name)\n', (8134, 8156), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((8491, 8518), 'tkinter.Label', 'Label', (['self'], {'text': 'show_name'}), '(self, text=show_name)\n', (8496, 8518), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((8839, 8866), 'tkinter.Label', 'Label', (['self'], {'text': 'show_name'}), '(self, text=show_name)\n', (8844, 8866), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((9160, 9187), 'tkinter.Label', 'Label', (['self'], {'text': 'show_name'}), '(self, text=show_name)\n', (9165, 9187), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((10187, 10214), 'tkinter.Label', 'Label', (['self'], {'text': 'show_name'}), '(self, text=show_name)\n', (10192, 10214), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((10550, 10577), 'tkinter.Label', 'Label', (['self'], {'text': 'show_name'}), '(self, text=show_name)\n', (10555, 10577), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((11581, 11618), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""出错了"""', 'f"""【{e}】"""'], {}), "('出错了', f'【{e}】')\n", (11601, 11618), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((11632, 11647), 'automotive.logger.logger.logger.error', 'logger.error', (['e'], {}), '(e)\n', (11644, 11647), False, 'from automotive.logger.logger import logger\n'), ((26575, 26602), 'automotive.logger.logger.logger.info', 'logger.info', (['"""can bus lost"""'], {}), "('can bus lost')\n", (26586, 26602), False, 'from automotive.logger.logger import logger\n'), ((30009, 30024), 'automotive.logger.logger.logger.error', 'logger.error', (['e'], {}), '(e)\n', (30021, 30024), False, 'from automotive.logger.logger import logger\n'), ((30038, 30075), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""出错了"""', 'f"""【{e}】"""'], {}), "('出错了', f'【{e}】')\n", (30058, 30075), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((32408, 32464), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', ([], {'title': 'show_message', 'message': 'message'}), '(title=show_message, message=message)\n', (32427, 32464), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((32501, 32558), 'tkinter.messagebox.showerror', 'messagebox.showerror', ([], {'title': 'show_message', 'message': 'message'}), '(title=show_message, message=message)\n', (32521, 32558), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((32607, 32622), 'automotive.logger.logger.logger.error', 'logger.error', (['e'], {}), '(e)\n', (32619, 32622), False, 'from automotive.logger.logger import logger\n'), ((32636, 32687), 'tkinter.messagebox.showerror', 'messagebox.showerror', ([], {'title': '"""出错了"""', 'message': 'f"""【{e}】"""'}), "(title='出错了', message=f'【{e}】')\n", (32656, 32687), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((17358, 17392), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (17367, 17392), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((18772, 18873), 'tkinter.Label', 'Label', (['self'], {'text': 'text_name', 'width': 'self.__label_width', 'anchor': '"""w"""', 'wraplength': '(180)', 'justify': '"""left"""'}), "(self, text=text_name, width=self.__label_width, anchor='w',\n wraplength=180, justify='left')\n", (18777, 18873), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((19823, 19857), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (19832, 19857), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((21358, 21459), 'tkinter.Label', 'Label', (['self'], {'text': 'text_name', 'width': 'self.__label_width', 'anchor': '"""w"""', 'wraplength': '(180)', 'justify': '"""left"""'}), "(self, text=text_name, width=self.__label_width, anchor='w',\n wraplength=180, justify='left')\n", (21363, 21459), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((22245, 22279), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (22254, 22279), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((23346, 23388), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""message_id = {message_id}"""'], {}), "(f'message_id = {message_id}')\n", (23358, 23388), False, 'from automotive.logger.logger import logger\n'), ((25987, 26021), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (25996, 26021), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((27987, 28026), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""sleep {action} seconds"""'], {}), "(f'sleep {action} seconds')\n", (27999, 28026), False, 'from automotive.logger.logger import logger\n'), ((28091, 28108), 'time.sleep', 'sleep', (['sleep_time'], {}), '(sleep_time)\n', (28096, 28108), False, 'from time import sleep\n'), ((29383, 29417), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (29392, 29417), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((31281, 31315), 'tkinter.ttk.Separator', 'Separator', (['self'], {'orient': 'HORIZONTAL'}), '(self, orient=HORIZONTAL)\n', (31290, 31315), False, 'from tkinter.ttk import Combobox, Notebook, Separator\n'), ((27859, 27874), 'automotive.logger.logger.logger.error', 'logger.error', (['e'], {}), '(e)\n', (27871, 27874), False, 'from automotive.logger.logger import logger\n'), ((27896, 27933), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""出错了"""', 'f"""【{e}】"""'], {}), "('出错了', f'【{e}】')\n", (27916, 27933), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((23555, 23570), 'automotive.logger.logger.logger.error', 'logger.error', (['e'], {}), '(e)\n', (23567, 23570), False, 'from automotive.logger.logger import logger\n'), ((23596, 23633), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""出错了"""', 'f"""【{e}】"""'], {}), "('出错了', f'【{e}】')\n", (23616, 23633), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((24201, 24254), 'automotive.logger.logger.logger.debug', 'logger.debug', (['f"""change {name} value to {entry_value}"""'], {}), "(f'change {name} value to {entry_value}')\n", (24213, 24254), False, 'from automotive.logger.logger import logger\n'), ((14760, 14815), 'tkinter.messagebox.showerror', 'messagebox.showerror', ([], {'title': '"""失败"""', 'message': '"""请填写需要查询的信号值"""'}), "(title='失败', message='请填写需要查询的信号值')\n", (14780, 14815), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((14506, 14562), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', ([], {'title': 'show_message', 'message': 'message'}), '(title=show_message, message=message)\n', (14525, 14562), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((14607, 14664), 'tkinter.messagebox.showerror', 'messagebox.showerror', ([], {'title': 'show_message', 'message': 'message'}), '(title=show_message, message=message)\n', (14627, 14664), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n'), ((15631, 15705), 'tkinter.messagebox.showerror', 'messagebox.showerror', ([], {'title': '"""失败"""', 'message': 'f"""{signal_name} is not received"""'}), "(title='失败', message=f'{signal_name} is not received')\n", (15651, 15705), False, 'from tkinter import Frame, Button, NORMAL, DISABLED, W, BooleanVar, Checkbutton, Entry, Label, Tk, messagebox, HORIZONTAL, E, PhotoImage, LEFT\n')]
|
# coding=utf-8
#
# pylint: disable = wildcard-import, unused-wildcard-import
# pylint: disable = missing-docstring, invalid-name
# pylint: disable = unused-argument, no-member, attribute-defined-outside-init
# pylint: disable = too-many-lines, too-many-branches, too-many-statements
"""
Copyright (c) 2020, <NAME>. All rights reserved.
license: BSD 3-Clause License, see LICENSE for more details.
"""
from zipfile import is_zipfile as iszip
import pytest
from zm import zipapp
from tests.func_utils import *
class TestIndyCmd(object):
@pytest.fixture(params = getZmExecutables(), autouse = True)
def allZmExe(self, request):
self.zmExe = zmExes[request.param]
def teardown():
printErrorOnFailed(self, request)
request.addfinalizer(teardown)
def testZipAppCmd(self, tmpdir):
cmdLine = ['zipapp']
self.cwd = str(tmpdir.realpath())
exitcode = runZm(self, cmdLine)[0]
assert exitcode == 0
zipAppPath = joinpath(self.cwd, zipapp.ZIPAPP_NAME)
assert isfile(zipAppPath)
assert iszip(zipAppPath)
def testVersionCmd(self, tmpdir):
cmdLine = ['version']
self.cwd = str(tmpdir.realpath())
exitcode, stdout, _ = runZm(self, cmdLine)
assert exitcode == 0
assert 'version' in stdout
def testSysInfoCmd(self, tmpdir):
cmdLine = ['sysinfo']
self.cwd = str(tmpdir.realpath())
exitcode, stdout, _ = runZm(self, cmdLine)
assert exitcode == 0
assert 'information' in stdout
|
[
"zipfile.is_zipfile"
] |
[((1086, 1103), 'zipfile.is_zipfile', 'iszip', (['zipAppPath'], {}), '(zipAppPath)\n', (1091, 1103), True, 'from zipfile import is_zipfile as iszip\n')]
|
# -*- coding: utf-8 -*-
import json
from django.http import Http404, HttpResponse
from django.views.decorators.csrf import csrf_protect
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout as lgout, authenticate, login as lgin
from django.shortcuts import render, redirect
from datetime import datetime
from Forum.models import *
from Forum.settings import *
from Forum.forms import *
from Forum.lib import *
from Forum.getInstanceLib import *
from Forum.modelsLib import *
import Forum.signals as signals
from math import ceil
# Create your views here.
def login(request, forum_id, template="Forum/forms/login.html", template_ajax="Forum/forms/ajax/login.html"):
form = None
if request.method == 'POST':
form = FormUserLogin(request.POST)
if form.is_valid():
user = authenticate(username=form.data['username'], password=form.data['password'])
if user:
lgin(request, user)
forum = get_forum_instance(forum_id)
if forum:
return redirect('base_forum', forum_id=forum.local_id)
else:
raise Http404
if not form:
form = FormUserLogin()
c = {
'forum_id':forum_id,
'form': form,
}
if request.is_ajax():
return render(request, template_ajax, c)
else:
return render(request, template, c)
@login_required
def logout(request, forum_id):
lgout(request)
forum = get_forum_instance(forum_id)
if forum:
return redirect('base_forum', forum_id=forum.local_id)
raise Http404
def forum(request, forum_id, page=1, template=MAIN_FORUM_TEMPLATE):
forum = get_forum_instance(forum_id)
if forum:
subforum_slug = forum.main_forum.slug()
return subforum(request, forum_id, 0, subforum_slug, page, template=template)
raise Http404
def subforum(request, forum_id, subforum_id, subforum_slug, page=1, template=SUBFORUM_TEMPLATE):
forum = get_forum_instance(forum_id)
if forum:
subforum = get_subforum_instance(forum, subforum_id)
if subforum:
if not check_slug(subforum, subforum_slug):
if page == 1:
return redirect('Forum.views.subforum', forum_id=forum_id, subforum_id=subforum_id, subforum_slug=subforum.slug())
else:
return redirect('Forum.views.subforum', forum_id=forum_id, subforum_id=subforum_id, subforum_slug=subforum.slug(), page=page)
if subforum.canView(request.user):
is_mod = subforum.canModerate(request.user)
can_create_thread = subforum.canCreateThread(request.user)
subforum_list = []
for sf in subforum.child_set.order_by('local_id'):
if sf.canView(request.user):
sf.is_visited = sf.isVisited(request.user)
subforum_list.append(sf)
sf_th_set = subforum.thread_set.order_by('-pinned', '-last_publication_datetime', 'name')
if not subforum.canModerate(request.user):
sf_th_set = sf_th_set.exclude(hidden=True)
thread_list = []
for th in sf_th_set:
th.is_visited = th.isVisited(request.user)
thread_list.append(th)
page = int(page) -1
subforum_num_pages = int(ceil(float(len(thread_list))/float(forum.threads_per_page)))
if (subforum_num_pages > page and 0 <= page) or subforum_num_pages == 0:
c = {
'forum_id':forum_id,
'forum': subforum,
'subforum_list':subforum_list,
'thread_list':thread_list[(page*forum.threads_per_page):(page*forum.threads_per_page)+forum.threads_per_page],
'subforum_current_page':page+1,
'subforum_pages':range(max(page, 1), min(page+3, subforum_num_pages+1)),
'is_admin':user_has_permission(forum.admin_permission, request.user),
'is_moderator': is_mod,
'can_create_thread':can_create_thread and request.user.is_authenticated(),
}
return render(request, template, c)
else:
c = {
'forum_id':forum_id,
}
return render(request, CANT_VIEW_CONTENT, c)
raise Http404
@login_required
@csrf_protect
def newSubforum(request, forum_id, subforum_id, subforum_slug, template=FORM_TEMPLATE):
check_user_is_spamming(request.user)
forum = get_forum_instance(forum_id)
if forum:
subforum = get_subforum_instance(forum, subforum_id)
if subforum:
if not check_slug(subforum, subforum_slug):
return redirect('Forum.views.newSubforum', forum_id=forum_id, subforum_id=subforum_id, subforum_slug=subforum.slug())
if forum.canAdministrate(request.user):
if request.method == 'POST':
new_subforum_form = Subforum(forum=forum)
new_subforum_form = FormSubforum(request.POST, instance=new_subforum_form)
if new_subforum_form.is_valid():
new_subforum = new_subforum_form.save(commit=False)
new_subforum.local_id = forum.subforum_set.count()
new_subforum.parent = subforum
new_subforum.forum = forum
new_subforum.creator = request.user
new_subforum.save()
return redirect('subforum', forum_id=forum_id, subforum_id=new_subforum.local_id, subforum_slug=new_subforum.slug())
else:
new_subforum = Subforum(
forum = subforum.forum,
view_permission = subforum.view_permission,
mod_permission = subforum.mod_permission,
create_thread_permission = subforum.create_thread_permission,
reply_thread_permission = subforum.reply_thread_permission,
)
new_subforum_form = FormSubforum(instance=new_subforum)
c = {
'forum_id':forum_id,
'form': new_subforum_form,
'page_title': 'Create Subforum',
'title': 'Create Subforum',
'submit_btn_text': 'Create',
}
return render(request, template, c)
else:
c = {
'forum_id':forum_id,
}
return render(request, CANT_VIEW_CONTENT, c)
raise Http404
def thread(request, forum_id, thread_id, thread_slug, page=1, template=THREAD_TEMPLATE):
forum = get_forum_instance(forum_id)
if forum:
thread = get_thread_instance(forum, thread_id)
if thread:
if not check_slug(thread, thread_slug):
if page == 1:
return redirect('Forum.views.thread', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
else:
return redirect('Forum.views.thread', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug(), page=page)
subforum = thread.parent
is_mod = subforum.canModerate(request.user)
if subforum.canView(request.user) and (not thread.hidden or is_mod):
can_post = subforum.canReplyThread(request.user)
post_list = []
unfiltered_post_list = thread.post_set.order_by('local_id')
if not subforum.canModerate(request.user):
unfiltered_post_list = unfiltered_post_list.exclude(hidden=True)
for pt in unfiltered_post_list:
if request.user.is_authenticated():
pt.is_quoted = get_quote_instance(request.user, pt)
pt.vote = get_vote_instance(request.user, pt)
post_list.append(pt)
if request.user.is_authenticated() and thread.poll_set.count() and thread.poll_set.first().userCanVote(request.user):
poll = thread.poll_set.first()
else:
poll = None
page = int(page) -1
thread_num_pages = int(ceil(float(len(post_list))/float(forum.posts_per_page)))
if thread_num_pages > page and 0 <= page:
set_visit(thread, request.user)
thread.visit_counter += 1
thread.save()
c = {
'forum_id':forum_id,
'thread': thread,
'post_list':post_list[(page*forum.posts_per_page):(page*forum.posts_per_page)+forum.posts_per_page],
'thread_current_page':page+1,
'thread_pages':range(max(page, 1), min(page+3, thread_num_pages+1)),
'is_moderator': is_mod,
'is_admin':forum.canAdministrate(request.user),
'can_post':can_post and request.user.is_authenticated() and (not thread.closed or is_mod),
'poll': poll,
}
return render(request, template, c)
else:
c = {
'forum_id':forum_id,
}
return render(request, CANT_VIEW_CONTENT, c)
raise Http404
def threadLastPage(request, forum_id, thread_id, thread_slug):
forum = get_forum_instance(forum_id)
if forum:
thread = get_thread_instance(forum, thread_id)
if thread:
if not check_slug(thread, thread_slug):
return redirect('Forum.views.threadLastPage', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
subforum = thread.parent
post_list = []
unfiltered_post_list = thread.post_set.order_by('local_id')
for pt in unfiltered_post_list:
if (not pt.hidden) or subforum.canModerate(request.user):
post_list.append(pt)
thread_num_pages = int(ceil(float(len(post_list))/float(forum.posts_per_page)))
page = thread_num_pages
return redirect('Forum.views.thread', forum_id=forum_id, thread_id=thread.local_id, thread_slug=thread.slug(), page=page)
raise Http404
@csrf_protect
def saveThreadSettings(request, forum_id, thread_id, thread_slug, template="Forum/forms/thread_settings.html"):
forum = get_forum_instance(forum_id)
if forum:
thread = get_thread_instance(forum, thread_id)
if thread:
if not check_slug(thread, thread_slug):
return redirect('Forum.views.saveThreadSettings', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
if thread.parent.canModerate(request.user):
if (request.method == 'POST'):
form = FormThreadSettings(request.POST, instance=thread)
if form.is_valid():
thread.save()
return redirect('Forum.views.thread', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
else:
form = FormThreadSettings(instance=thread)
c = {
'forum_id':forum_id,
'form': form,
'thread': thread,
}
return render(request, template, c)
raise Http404
@login_required
def firstPostUnreadThread(request, forum_id, thread_id, thread_slug):
forum = get_forum_instance(forum_id)
if forum:
thread = get_thread_instance(forum, thread_id)
if thread:
if not check_slug(thread, thread_slug):
return redirect('Forum.views.firstPostUnreadThread', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
last_visit = get_last_visit_instance(request.user, thread)
if last_visit:
last_post = Post.objects.order_by('publication_datetime').filter(thread=thread, publication_datetime__gt=last_visit.datetime).first()
if last_post:
return redirect('Forum.views.post', forum_id=forum_id, post_id=last_post.local_id)
print("shiet")
return redirect('Forum.views.post', forum_id=forum.local_id, post_id=thread.getLastPublishedPost().local_id)
raise Http404
@login_required
@csrf_protect
def newThread(request, forum_id, subforum_id, subforum_slug, template="Forum/forms/thread.html"):
check_user_is_spamming(request.user)
forum = get_forum_instance(forum_id)
if forum:
subforum = get_subforum_instance(forum, subforum_id)
if subforum:
if not check_slug(subforum, subforum_slug):
return redirect('Forum.views.newThread', forum_id=forum_id, subforum_id=subforum_id, subforum_slug=subforum.slug())
if subforum.canCreateThread(request.user):
if request.method == 'POST':
new_post = Post(publisher=request.user)
new_post_form = FormNewThread(request.POST, instance=new_post)
if new_post_form.is_valid():
new_post = new_post_form.save(commit=False)
new_post.local_id = forum.post_set.count()
new_thread = Thread(
local_id=forum.thread_set.count(),
name=new_post.title,
parent=subforum,
forum=forum,
creator=request.user,
last_publication_datetime=datetime.now(),
hidden=new_post.hidden,
)
new_thread.save()
if request.POST.get("add_poll", "False") == "True" and request.POST.get("question", "") != "":
rang = range(0, int(request.POST.get("poll_option_count", "2")))
question = request.POST.get("question")
option_list = []
for i in rang:
opt = request.POST.get("poll-option["+str(i)+"]", "")
if opt != "":
option_list.append(opt)
if len(option_list) >= 2:
new_thread.setPoll(question, option_list)
new_post.hidden=False
new_post.forum=forum
new_post.thread=new_thread
new_post.save()
# Send new thread signal
signals.thread_published.send(sender=forum, thread=new_thread)
return redirect('Forum.views.thread', forum_id=forum_id, thread_id=new_thread.local_id, thread_slug=new_thread.slug())
else:
new_post = Post()
new_post_form = FormNewThread(instance=new_post)
c = {
'forum_id':forum_id,
'form': new_post_form,
'page_title': 'New Thread',
'title': 'New Thread',
'submit_btn_text': 'Create',
}
return render(request, template, c)
else:
c = {
'forum_id':forum_id,
}
return render(request, CANT_VIEW_CONTENT, c)
raise Http404
@login_required
@csrf_protect
def replyThread(request, forum_id, thread_id, thread_slug, template="Forum/forms/post.html", template_ajax="Forum/forms/ajax/post.html"):
check_user_is_spamming(request.user)
forum = get_forum_instance(forum_id)
if forum:
thread = get_thread_instance(forum, thread_id)
if thread and (not thread.closed or thread.parent.canModerate(request.user)):
if not check_slug(thread, thread_slug):
return redirect('Forum.views.replythread', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
if thread.parent.canReplyThread(request.user) and request.user.is_authenticated():
if request.method == 'POST':
new_post = Post(publisher=request.user)
if thread.parent.canModerate(request.user):
new_post_form = FormPost_Mod(request.POST, instance=new_post)
else:
new_post_form = FormPost(request.POST, instance=new_post)
if new_post_form.is_valid():
new_post = new_post_form.save(commit=False)
new_post.local_id = forum.post_set.count()
new_post.forum=forum
new_post.thread=thread
new_post.save()
# Send signal new post published
signals.post_published.send(sender=forum, post=new_post)
thread.last_publication_datetime=new_post.publication_datetime
thread.save()
quote_list = Quote.objects.filter(user=request.user, thread=thread)
for quote in quote_list:
quote.delete()
return redirect('Forum.views.post', forum_id=forum_id, post_id=new_post.local_id)
else:
new_post = Post()
quotes_text = ""
quote_list = Quote.objects.filter(user=request.user, thread=thread)
for quote in quote_list:
quotes_text += "[quote="+quote.post.publisher.username+"]"+quote.post.content+"[/quote]\n\n"
new_post.content = quotes_text
if thread.parent.canModerate(request.user):
new_post_form = FormPost_Mod(instance=new_post)
else:
new_post_form = FormPost(instance=new_post)
if request.is_ajax():
template = template_ajax
c = {
'forum_id':forum_id,
'form': new_post_form,
'thread':thread,
}
else:
c = {
'forum_id':forum_id,
'form': new_post_form,
'page_title': 'Reply Thread',
'title': 'Reply Thread',
'submit_btn_text': 'Send',
}
return render(request, template, c)
else:
c = {
'forum_id':forum_id,
}
return render(request, CANT_VIEW_CONTENT, c)
raise Http404
@login_required
@csrf_protect
def voteThreadPoll(request, forum_id, thread_id, thread_slug):
forum = get_forum_instance(forum_id)
if forum:
thread = get_thread_instance(forum, thread_id)
if thread:
if not check_slug(thread, thread_slug):
return redirect('Forum.views.voteThreadPoll', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
subforum = thread.parent
is_mod = subforum.canModerate(request.user)
if subforum.canView(request.user) and (not thread.hidden or is_mod):
if thread.poll:
if thread.poll.userCanVote(request.user) and request.method == 'POST':
answer = request.POST.get("poll_answer", False)
if answer:
thread.poll.vote(request.user, answer)
return redirect('Forum.views.thread', forum_id=forum_id, thread_id=thread_id, thread_slug=thread.slug())
def post(request, forum_id, post_id):
forum = get_forum_instance(forum_id)
if forum:
post = get_post_instance(forum, post_id)
if post:
thread = post.thread
post_list = thread.post_set.order_by('local_id')
num = 0
found = False
for pt in post_list:
if pt == post:
found = True
break
num += 1
if found:
page = (num/forum.posts_per_page)+1
return redirect('Forum.views.thread', forum_id=forum_id, thread_id=post.thread.local_id, thread_slug=post.thread.slug(), page=page, post_id=post_id)
raise Http404
@login_required
@csrf_protect
def editPost(request, forum_id, post_id, template="Forum/forms/edit_post.html", template_ajax="Forum/forms/ajax/edit_post.html"):
check_user_is_spamming(request.user)
forum = get_forum_instance(forum_id)
if forum:
post = get_post_instance(forum, post_id)
if post and post.thread.parent.canView(request.user):
post_old_title = post.title
post_old_content = post.content
if request.method == 'POST':
if post.thread.parent.canModerate(request.user):
edit_post_form = FormPost_Mod(request.POST, instance=post)
else:
edit_post_form = FormPost(request.POST, instance=post)
if edit_post_form.is_valid():
post_edited = PostEdited(
post=post,
user=request.user,
datetime=datetime.now(),
reason='',
old_title=post_old_title,
old_content=post_old_content,
user_is_moderator = post.thread.parent.canModerate(request.user),
user_is_administrator = forum.canAdministrate(request.user),
)
post = edit_post_form.save(commit=False)
if post.thread.post_set.first() == post:
if post.title == "":
post.title = post_old_title
post.thread.name = post.title
post.thread.save()
post_edited.save()
post.save()
return redirect('Forum.views.post', forum_id=forum_id, post_id=post.local_id)
else:
if post.thread.parent.canModerate(request.user):
edit_post_form = FormPost_Mod(instance=post)
elif post.publisher == request.user:
edit_post_form = FormPost(instance=post)
else:
c = {
'forum_id':forum_id,
}
return render(request, CANT_VIEW_CONTENT, c)
c = {
'forum_id':forum_id,
'form': edit_post_form,
'post':post,
'user_is_mod':user_has_permission(post.thread.parent.mod_permission, request.user),
}
if request.is_ajax():
return render(request, template_ajax, c)
else:
return render(request, template, c)
raise Http404
@login_required
@csrf_protect
def reportPost(request, forum_id, post_id, template="Forum/forms/report_post.html", template_ajax="Forum/forms/ajax/report_post.html"):
check_user_is_spamming(request.user)
forum = get_forum_instance(forum_id)
if forum:
post = get_post_instance(forum, post_id)
if post and post.thread.parent.canView(request.user):
if request.method == 'POST':
report_post_form = FormReportPost(request.POST)
if report_post_form.is_valid():
report_post = report_post_form.save(commit=False)
report_post.user = request.user
report_post.post = post
report_post.save()
return redirect('Forum.views.post', forum_id=forum_id, post_id=post.local_id)
else:
report_post_form = FormReportPost()
c = {
'forum_id':forum_id,
'form': report_post_form,
'post': post,
}
if request.is_ajax():
return render(request, template_ajax, c)
else:
return render(request, template, c)
raise Http404
@login_required
def quotePost(request, forum_id, post_id):
forum = get_forum_instance(forum_id)
if forum:
post = get_post_instance(forum, post_id)
if post and post.thread.parent.canView(request.user):
quote = get_quote_instance(request.user, post)
response_data = {}
if quote:
quote.delete()
response_data['action'] = 'removed'
else:
Quote(user=request.user, post=post, thread=post.thread).save()
response_data['action'] = 'added'
return HttpResponse(json.dumps(response_data), content_type="application/json")
raise Http404
@login_required
def votePostUp(request, forum_id, post_id):
forum = get_forum_instance(forum_id)
if forum and forum.allow_up_votes:
post = get_post_instance(forum, post_id)
if post and post.thread.parent.canView(request.user):
vote = get_vote_instance(request.user, post)
response_data = {}
if vote:
if vote.type == "Up":
vote.delete()
response_data['action'] = 'removed'
else:
vote.type = "Up"
vote.save()
response_data['action'] = 'added'
else:
Vote(user=request.user, post=post, type="Up").save()
response_data['action'] = 'added'
# Send signal
signals.upvote.send(sender=forum, user=request.user, post=post)
if not post.score_event_sent and post.score() >= forum.positive_score_event:
post.score_event_sent = True
post.save()
signals.positive_score_event.send(sender=forum, post=post)
response_data['score'] = post.score()
return HttpResponse(json.dumps(response_data), content_type="application/json")
raise Http404
@login_required
def votePostDown(request, forum_id, post_id):
forum = get_forum_instance(forum_id)
if forum and forum.allow_down_votes:
post = get_post_instance(forum, post_id)
if post and post.thread.parent.canView(request.user):
vote = get_vote_instance(request.user, post)
response_data = {}
if vote:
if vote.type == "Down":
vote.delete()
response_data['action'] = 'removed'
elif vote.type == "Up":
vote.type = "Down"
vote.save()
response_data['action'] = 'added'
else:
Vote(user=request.user, post=post, type="Down").save()
response_data['action'] = 'added'
# Send signal
signals.downvote.send(sender=forum, user=request.user, post=post)
if not post.score_event_sent and post.score() <= forum.negative_score_event:
post.score_event_sent = True
post.save()
signals.negative_score_event.send(sender=forum, post=post)
response_data['score'] = post.score()
return HttpResponse(json.dumps(response_data), content_type="application/json")
raise Http404
|
[
"Forum.signals.positive_score_event.send",
"Forum.signals.downvote.send",
"Forum.signals.upvote.send",
"Forum.signals.thread_published.send",
"django.shortcuts.redirect",
"Forum.signals.negative_score_event.send",
"json.dumps",
"datetime.datetime.now",
"django.contrib.auth.logout",
"Forum.signals.post_published.send",
"django.contrib.auth.authenticate",
"django.shortcuts.render",
"django.contrib.auth.login"
] |
[((1330, 1344), 'django.contrib.auth.logout', 'lgout', (['request'], {}), '(request)\n', (1335, 1344), True, 'from django.contrib.auth import logout as lgout, authenticate, login as lgin\n'), ((1201, 1234), 'django.shortcuts.render', 'render', (['request', 'template_ajax', 'c'], {}), '(request, template_ajax, c)\n', (1207, 1234), False, 'from django.shortcuts import render, redirect\n'), ((1251, 1279), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (1257, 1279), False, 'from django.shortcuts import render, redirect\n'), ((1403, 1450), 'django.shortcuts.redirect', 'redirect', (['"""base_forum"""'], {'forum_id': 'forum.local_id'}), "('base_forum', forum_id=forum.local_id)\n", (1411, 1450), False, 'from django.shortcuts import render, redirect\n'), ((821, 897), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': "form.data['username']", 'password': "form.data['password']"}), "(username=form.data['username'], password=form.data['password'])\n", (833, 897), False, 'from django.contrib.auth import logout as lgout, authenticate, login as lgin\n'), ((914, 933), 'django.contrib.auth.login', 'lgin', (['request', 'user'], {}), '(request, user)\n', (918, 933), True, 'from django.contrib.auth import logout as lgout, authenticate, login as lgin\n'), ((3750, 3787), 'django.shortcuts.render', 'render', (['request', 'CANT_VIEW_CONTENT', 'c'], {}), '(request, CANT_VIEW_CONTENT, c)\n', (3756, 3787), False, 'from django.shortcuts import render, redirect\n'), ((5439, 5467), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (5445, 5467), False, 'from django.shortcuts import render, redirect\n'), ((5532, 5569), 'django.shortcuts.render', 'render', (['request', 'CANT_VIEW_CONTENT', 'c'], {}), '(request, CANT_VIEW_CONTENT, c)\n', (5538, 5569), False, 'from django.shortcuts import render, redirect\n'), ((7732, 7769), 'django.shortcuts.render', 'render', (['request', 'CANT_VIEW_CONTENT', 'c'], {}), '(request, CANT_VIEW_CONTENT, c)\n', (7738, 7769), False, 'from django.shortcuts import render, redirect\n'), ((12478, 12506), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (12484, 12506), False, 'from django.shortcuts import render, redirect\n'), ((12571, 12608), 'django.shortcuts.render', 'render', (['request', 'CANT_VIEW_CONTENT', 'c'], {}), '(request, CANT_VIEW_CONTENT, c)\n', (12577, 12608), False, 'from django.shortcuts import render, redirect\n'), ((14956, 14984), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (14962, 14984), False, 'from django.shortcuts import render, redirect\n'), ((15056, 15093), 'django.shortcuts.render', 'render', (['request', 'CANT_VIEW_CONTENT', 'c'], {}), '(request, CANT_VIEW_CONTENT, c)\n', (15062, 15093), False, 'from django.shortcuts import render, redirect\n'), ((18352, 18385), 'django.shortcuts.render', 'render', (['request', 'template_ajax', 'c'], {}), '(request, template_ajax, c)\n', (18358, 18385), False, 'from django.shortcuts import render, redirect\n'), ((18406, 18434), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (18412, 18434), False, 'from django.shortcuts import render, redirect\n'), ((19327, 19360), 'django.shortcuts.render', 'render', (['request', 'template_ajax', 'c'], {}), '(request, template_ajax, c)\n', (19333, 19360), False, 'from django.shortcuts import render, redirect\n'), ((19381, 19409), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (19387, 19409), False, 'from django.shortcuts import render, redirect\n'), ((19914, 19939), 'json.dumps', 'json.dumps', (['response_data'], {}), '(response_data)\n', (19924, 19939), False, 'import json\n'), ((20605, 20668), 'Forum.signals.upvote.send', 'signals.upvote.send', ([], {'sender': 'forum', 'user': 'request.user', 'post': 'post'}), '(sender=forum, user=request.user, post=post)\n', (20624, 20668), True, 'import Forum.signals as signals\n'), ((20929, 20954), 'json.dumps', 'json.dumps', (['response_data'], {}), '(response_data)\n', (20939, 20954), False, 'import json\n'), ((21648, 21713), 'Forum.signals.downvote.send', 'signals.downvote.send', ([], {'sender': 'forum', 'user': 'request.user', 'post': 'post'}), '(sender=forum, user=request.user, post=post)\n', (21669, 21713), True, 'import Forum.signals as signals\n'), ((21974, 21999), 'json.dumps', 'json.dumps', (['response_data'], {}), '(response_data)\n', (21984, 21999), False, 'import json\n'), ((1001, 1048), 'django.shortcuts.redirect', 'redirect', (['"""base_forum"""'], {'forum_id': 'forum.local_id'}), "('base_forum', forum_id=forum.local_id)\n", (1009, 1048), False, 'from django.shortcuts import render, redirect\n'), ((3657, 3685), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (3663, 3685), False, 'from django.shortcuts import render, redirect\n'), ((7639, 7667), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (7645, 7667), False, 'from django.shortcuts import render, redirect\n'), ((9468, 9496), 'django.shortcuts.render', 'render', (['request', 'template', 'c'], {}), '(request, template, c)\n', (9474, 9496), False, 'from django.shortcuts import render, redirect\n'), ((10125, 10200), 'django.shortcuts.redirect', 'redirect', (['"""Forum.views.post"""'], {'forum_id': 'forum_id', 'post_id': 'last_post.local_id'}), "('Forum.views.post', forum_id=forum_id, post_id=last_post.local_id)\n", (10133, 10200), False, 'from django.shortcuts import render, redirect\n'), ((17762, 17832), 'django.shortcuts.redirect', 'redirect', (['"""Forum.views.post"""'], {'forum_id': 'forum_id', 'post_id': 'post.local_id'}), "('Forum.views.post', forum_id=forum_id, post_id=post.local_id)\n", (17770, 17832), False, 'from django.shortcuts import render, redirect\n'), ((19080, 19150), 'django.shortcuts.redirect', 'redirect', (['"""Forum.views.post"""'], {'forum_id': 'forum_id', 'post_id': 'post.local_id'}), "('Forum.views.post', forum_id=forum_id, post_id=post.local_id)\n", (19088, 19150), False, 'from django.shortcuts import render, redirect\n'), ((20806, 20864), 'Forum.signals.positive_score_event.send', 'signals.positive_score_event.send', ([], {'sender': 'forum', 'post': 'post'}), '(sender=forum, post=post)\n', (20839, 20864), True, 'import Forum.signals as signals\n'), ((21851, 21909), 'Forum.signals.negative_score_event.send', 'signals.negative_score_event.send', ([], {'sender': 'forum', 'post': 'post'}), '(sender=forum, post=post)\n', (21884, 21909), True, 'import Forum.signals as signals\n'), ((12021, 12083), 'Forum.signals.thread_published.send', 'signals.thread_published.send', ([], {'sender': 'forum', 'thread': 'new_thread'}), '(sender=forum, thread=new_thread)\n', (12050, 12083), True, 'import Forum.signals as signals\n'), ((13780, 13836), 'Forum.signals.post_published.send', 'signals.post_published.send', ([], {'sender': 'forum', 'post': 'new_post'}), '(sender=forum, post=new_post)\n', (13807, 13836), True, 'import Forum.signals as signals\n'), ((14066, 14140), 'django.shortcuts.redirect', 'redirect', (['"""Forum.views.post"""'], {'forum_id': 'forum_id', 'post_id': 'new_post.local_id'}), "('Forum.views.post', forum_id=forum_id, post_id=new_post.local_id)\n", (14074, 14140), False, 'from django.shortcuts import render, redirect\n'), ((18101, 18138), 'django.shortcuts.render', 'render', (['request', 'CANT_VIEW_CONTENT', 'c'], {}), '(request, CANT_VIEW_CONTENT, c)\n', (18107, 18138), False, 'from django.shortcuts import render, redirect\n'), ((17246, 17260), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (17258, 17260), False, 'from datetime import datetime\n'), ((11327, 11341), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (11339, 11341), False, 'from datetime import datetime\n')]
|
#!/usr/bin/env python
# coding: utf-8
# # Desafio 4
#
# Neste desafio, vamos praticar um pouco sobre testes de hipóteses. Utilizaremos o _data set_ [2016 Olympics in Rio de Janeiro](https://www.kaggle.com/rio2016/olympic-games/), que contém dados sobre os atletas das Olimpíadas de 2016 no Rio de Janeiro.
#
# Esse _data set_ conta com informações gerais sobre 11538 atletas como nome, nacionalidade, altura, peso e esporte praticado. Estaremos especialmente interessados nas variáveis numéricas altura (`height`) e peso (`weight`). As análises feitas aqui são parte de uma Análise Exploratória de Dados (EDA).
#
# > Obs.: Por favor, não modifique o nome das funções de resposta.
# ## _Setup_ geral
# In[1]:
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import scipy.stats as sct
import seaborn as sns
import statsmodels.api as sm
# In[2]:
#%matplotlib inline
from IPython.core.pylabtools import figsize
figsize(12, 8)
sns.set()
# In[3]:
athletes = pd.read_csv("athletes.csv")
# In[4]:
athletes.info()
# In[5]:
athletes.head()
# In[6]:
athletes[['height','weight']].describe()
# In[7]:
athletes[['height','weight']].hist()
# In[8]:
def get_sample(df, col_name, n=100, seed=42):
"""Get a sample from a column of a dataframe.
It drops any numpy.nan entries before sampling. The sampling
is performed without replacement.
Example of numpydoc for those who haven't seen yet.
Parameters
----------
df : pandas.DataFrame
Source dataframe.
col_name : str
Name of the column to be sampled.
n : int
Sample size. Default is 100.
seed : int
Random seed. Default is 42.
Returns
-------
pandas.Series
Sample of size n from dataframe's column.
"""
np.random.seed(seed)
random_idx = np.random.choice(df[col_name].dropna().index, size=n, replace=False) #retorna uma array com index das colunas
return df.loc[random_idx, col_name] #retorna uma series com index e valor da coluna
# ## Inicia sua análise a partir daqui
# In[9]:
# Sua análise começa aqui.
# ## Questão 1
#
# Considerando uma amostra de tamanho 3000 da coluna `height` obtida com a função `get_sample()`, execute o teste de normalidade de Shapiro-Wilk com a função `scipy.stats.shapiro()`. Podemos afirmar que as alturas são normalmente distribuídas com base nesse teste (ao nível de significância de 5%)? Responda com um boolean (`True` ou `False`).
# In[10]:
def q1():
amostra_q1 = get_sample(athletes,'height', n=3000, seed=42)
stat, p = sct.shapiro(amostra_q1)
print('stat= {}, p={}'.format(stat,p))
return bool(p> 0.05)
# In[11]:
q1()
# __Para refletir__:
#
# * Plote o histograma dessa variável (com, por exemplo, `bins=25`). A forma do gráfico e o resultado do teste são condizentes? Por que?
# * Plote o qq-plot para essa variável e a analise.
# * Existe algum nível de significância razoável que nos dê outro resultado no teste? (Não faça isso na prática. Isso é chamado _p-value hacking_, e não é legal).
# In[12]:
amostra_q1 = get_sample(athletes,'height', n=3000, seed=42)
# In[13]:
sns.distplot(amostra_q1, bins=25, hist_kws={"density": True})
plt.show ()
# In[14]:
sm.qqplot(amostra_q1, fit=True, line="45")
plt.show ()
# In[15]:
amostra_q1 = get_sample(athletes,'height', n=3000, seed=42)
stat, p = sct.shapiro(amostra_q1)
p > 0.0000001
# ## Questão 2
#
# Repita o mesmo procedimento acima, mas agora utilizando o teste de normalidade de Jarque-Bera através da função `scipy.stats.jarque_bera()`. Agora podemos afirmar que as alturas são normalmente distribuídas (ao nível de significância de 5%)? Responda com um boolean (`True` ou `False`).
# In[16]:
def q2():
amostra_q2 = get_sample(athletes,'height', n=3000, seed=42)
stat, p = sct.jarque_bera(amostra_q2)
print('stat= {}, p={}'.format(stat,p))
return bool(p> 0.05)
# In[17]:
q2()
# __Para refletir__:
#
# * Esse resultado faz sentido?
# In[18]:
amostra_q2 = get_sample(athletes,'height', n=3000, seed=42)
sm.qqplot(amostra_q2, fit=True, line="45")
plt.show ()
# ## Questão 3
#
# Considerando agora uma amostra de tamanho 3000 da coluna `weight` obtida com a função `get_sample()`. Faça o teste de normalidade de D'Agostino-Pearson utilizando a função `scipy.stats.normaltest()`. Podemos afirmar que os pesos vêm de uma distribuição normal ao nível de significância de 5%? Responda com um boolean (`True` ou `False`).
# In[19]:
def q3():
amostra_q3 = get_sample(athletes,'weight', n=3000, seed=42)
stat, p = sct.normaltest(amostra_q3)
print('stat= {}, p={}'.format(stat,p))
return bool(p> 0.05)
# In[20]:
q3()
# __Para refletir__:
#
# * Plote o histograma dessa variável (com, por exemplo, `bins=25`). A forma do gráfico e o resultado do teste são condizentes? Por que?
# * Um _box plot_ também poderia ajudar a entender a resposta.
# In[21]:
amostra_q3 = get_sample(athletes,'weight', n=3000, seed=42)
sns.distplot(amostra_q3, bins=25, hist_kws={"density": True})
plt.show ()
# In[22]:
sns.boxplot(data = amostra_q3)
# ## Questão 4
#
# Realize uma transformação logarítmica em na amostra de `weight` da questão 3 e repita o mesmo procedimento. Podemos afirmar a normalidade da variável transformada ao nível de significância de 5%? Responda com um boolean (`True` ou `False`).
# In[23]:
def q4():
amostra_q4 = get_sample(athletes,'weight', n=3000, seed=42)
amostra_q4_transformada = np.log(amostra_q4)
stat, p = sct.normaltest(amostra_q4_transformada)
print('stat= {}, p={}'.format(stat,p))
return bool(p> 0.05)
# In[24]:
q4()
# __Para refletir__:
#
# * Plote o histograma dessa variável (com, por exemplo, `bins=25`). A forma do gráfico e o resultado do teste são condizentes? Por que?
# * Você esperava um resultado diferente agora?
# In[25]:
amostra_q4 = get_sample(athletes,'weight', n=3000, seed=42)
amostra_q4_transformada = np.log(amostra_q4)
sns.distplot(amostra_q4_transformada, bins=25, hist_kws={"density": True})
plt.show ()
# In[26]:
sns.boxplot(data = amostra_q4_transformada)
# > __Para as questão 5 6 e 7 a seguir considere todos testes efetuados ao nível de significância de 5%__.
# ## Questão 5
#
# Obtenha todos atletas brasileiros, norte-americanos e canadenses em `DataFrame`s chamados `bra`, `usa` e `can`,respectivamente. Realize um teste de hipóteses para comparação das médias das alturas (`height`) para amostras independentes e variâncias diferentes com a função `scipy.stats.ttest_ind()` entre `bra` e `usa`. Podemos afirmar que as médias são estatisticamente iguais? Responda com um boolean (`True` ou `False`).
# In[27]:
athletes.columns
# In[45]:
athletes[(athletes.nationality == 'BRA') | (athletes.nationality == 'USA') | (athletes.nationality == 'CAN')]
# In[28]:
bra = athletes[athletes.nationality == 'BRA']
usa = athletes[athletes.nationality == 'USA']
can = athletes[athletes.nationality == 'CAN']
# In[29]:
bra['height'].describe()
# In[30]:
bra.isna().sum()
# In[31]:
usa['height'].describe()
# In[32]:
usa.isna().sum()
# In[46]:
can['height'].describe()
# In[47]:
can.isna().sum()
# In[33]:
def q5():
stat, p = sct.ttest_ind(bra['height'], usa['height'], equal_var = False, nan_policy = 'omit') #False: se falso, execute o teste t de Welch, que não assume igual variação populaciona
print('stat= {}, p={}'.format(stat,p))
return bool(p> 0.05)
# In[34]:
q5()
# In[35]:
sns.distplot(bra['height'], bins=25, hist=False, rug=True, label='BRA')
sns.distplot(usa['height'], bins=25, hist=False, rug=True, label='USA')
# ## Questão 6
#
# Repita o procedimento da questão 5, mas agora entre as alturas de `bra` e `can`. Podemos afimar agora que as médias são estatisticamente iguais? Reponda com um boolean (`True` ou `False`).
# In[48]:
def q6():
stat, p = sct.ttest_ind(bra['height'], can['height'], equal_var = False, nan_policy = 'omit') #False: se falso, execute o teste t de Welch, que não assume igual variação populaciona
print('stat= {}, p={}'.format(stat,p))
return bool(p> 0.05)
# In[49]:
q6()
# In[50]:
sns.distplot(bra['height'], bins=25, hist=False, rug=True, label='BRA')
sns.distplot(can['height'], bins=25, hist=False, rug=True, label='CAN')
# ## Questão 7
#
# Repita o procedimento da questão 6, mas agora entre as alturas de `usa` e `can`. Qual o valor do p-valor retornado? Responda como um único escalar arredondado para oito casas decimais.
# In[87]:
def q7():
stat, p = sct.ttest_ind(usa['height'], can['height'], equal_var = False, nan_policy = 'omit') #False: se falso, execute o teste t de Welch, que não assume igual variação populaciona
print('stat= {}, p={}'.format(stat,p))
if p > 0.05:
print('Probably the same distribution')
else:
print('Probably different distributions')
return float(np.round(p, 8))
# In[88]:
q7()
# __Para refletir__:
#
# * O resultado faz sentido?
# * Você consegue interpretar esse p-valor?
# * Você consegue chegar a esse valor de p-valor a partir da variável de estatística?
# In[72]:
stat, p = sct.ttest_ind(usa['height'], can['height'], equal_var = True, nan_policy = 'omit')
print('stat= {}, p={}'.format(stat,p))
# In[69]:
#grau de liberdade para o teste t independente com variancias semelhantes: df = n1 + n2 - 2
gl = len(usa) + len(can) - 2
print(f"Graus de liberdade: {gl}")
q7_sf = sct.t.sf(stat, gl)*2 #Para Hipótese Bicaudal
print(q7_sf)
# In[77]:
sns.distplot(usa['height'], bins=25, hist=False, rug=True, label='USA')
sns.distplot(can['height'], bins=25, hist=False, rug=True, label='CAN')
|
[
"matplotlib.pyplot.show",
"numpy.log",
"numpy.random.seed",
"scipy.stats.shapiro",
"pandas.read_csv",
"scipy.stats.normaltest",
"scipy.stats.ttest_ind",
"IPython.core.pylabtools.figsize",
"seaborn.boxplot",
"seaborn.distplot",
"statsmodels.api.qqplot",
"scipy.stats.t.sf",
"scipy.stats.jarque_bera",
"numpy.round",
"seaborn.set"
] |
[((944, 958), 'IPython.core.pylabtools.figsize', 'figsize', (['(12)', '(8)'], {}), '(12, 8)\n', (951, 958), False, 'from IPython.core.pylabtools import figsize\n'), ((960, 969), 'seaborn.set', 'sns.set', ([], {}), '()\n', (967, 969), True, 'import seaborn as sns\n'), ((994, 1021), 'pandas.read_csv', 'pd.read_csv', (['"""athletes.csv"""'], {}), "('athletes.csv')\n", (1005, 1021), True, 'import pandas as pd\n'), ((3187, 3248), 'seaborn.distplot', 'sns.distplot', (['amostra_q1'], {'bins': '(25)', 'hist_kws': "{'density': True}"}), "(amostra_q1, bins=25, hist_kws={'density': True})\n", (3199, 3248), True, 'import seaborn as sns\n'), ((3249, 3259), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3257, 3259), True, 'import matplotlib.pyplot as plt\n'), ((3275, 3317), 'statsmodels.api.qqplot', 'sm.qqplot', (['amostra_q1'], {'fit': '(True)', 'line': '"""45"""'}), "(amostra_q1, fit=True, line='45')\n", (3284, 3317), True, 'import statsmodels.api as sm\n'), ((3318, 3328), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3326, 3328), True, 'import matplotlib.pyplot as plt\n'), ((3414, 3437), 'scipy.stats.shapiro', 'sct.shapiro', (['amostra_q1'], {}), '(amostra_q1)\n', (3425, 3437), True, 'import scipy.stats as sct\n'), ((4108, 4150), 'statsmodels.api.qqplot', 'sm.qqplot', (['amostra_q2'], {'fit': '(True)', 'line': '"""45"""'}), "(amostra_q2, fit=True, line='45')\n", (4117, 4150), True, 'import statsmodels.api as sm\n'), ((4151, 4161), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4159, 4161), True, 'import matplotlib.pyplot as plt\n'), ((5037, 5098), 'seaborn.distplot', 'sns.distplot', (['amostra_q3'], {'bins': '(25)', 'hist_kws': "{'density': True}"}), "(amostra_q3, bins=25, hist_kws={'density': True})\n", (5049, 5098), True, 'import seaborn as sns\n'), ((5099, 5109), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5107, 5109), True, 'import matplotlib.pyplot as plt\n'), ((5125, 5153), 'seaborn.boxplot', 'sns.boxplot', ([], {'data': 'amostra_q3'}), '(data=amostra_q3)\n', (5136, 5153), True, 'import seaborn as sns\n'), ((6010, 6028), 'numpy.log', 'np.log', (['amostra_q4'], {}), '(amostra_q4)\n', (6016, 6028), True, 'import numpy as np\n'), ((6029, 6103), 'seaborn.distplot', 'sns.distplot', (['amostra_q4_transformada'], {'bins': '(25)', 'hist_kws': "{'density': True}"}), "(amostra_q4_transformada, bins=25, hist_kws={'density': True})\n", (6041, 6103), True, 'import seaborn as sns\n'), ((6104, 6114), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6112, 6114), True, 'import matplotlib.pyplot as plt\n'), ((6130, 6171), 'seaborn.boxplot', 'sns.boxplot', ([], {'data': 'amostra_q4_transformada'}), '(data=amostra_q4_transformada)\n', (6141, 6171), True, 'import seaborn as sns\n'), ((7556, 7627), 'seaborn.distplot', 'sns.distplot', (["bra['height']"], {'bins': '(25)', 'hist': '(False)', 'rug': '(True)', 'label': '"""BRA"""'}), "(bra['height'], bins=25, hist=False, rug=True, label='BRA')\n", (7568, 7627), True, 'import seaborn as sns\n'), ((7628, 7699), 'seaborn.distplot', 'sns.distplot', (["usa['height']"], {'bins': '(25)', 'hist': '(False)', 'rug': '(True)', 'label': '"""USA"""'}), "(usa['height'], bins=25, hist=False, rug=True, label='USA')\n", (7640, 7699), True, 'import seaborn as sns\n'), ((8222, 8293), 'seaborn.distplot', 'sns.distplot', (["bra['height']"], {'bins': '(25)', 'hist': '(False)', 'rug': '(True)', 'label': '"""BRA"""'}), "(bra['height'], bins=25, hist=False, rug=True, label='BRA')\n", (8234, 8293), True, 'import seaborn as sns\n'), ((8294, 8365), 'seaborn.distplot', 'sns.distplot', (["can['height']"], {'bins': '(25)', 'hist': '(False)', 'rug': '(True)', 'label': '"""CAN"""'}), "(can['height'], bins=25, hist=False, rug=True, label='CAN')\n", (8306, 8365), True, 'import seaborn as sns\n'), ((9212, 9290), 'scipy.stats.ttest_ind', 'sct.ttest_ind', (["usa['height']", "can['height']"], {'equal_var': '(True)', 'nan_policy': '"""omit"""'}), "(usa['height'], can['height'], equal_var=True, nan_policy='omit')\n", (9225, 9290), True, 'import scipy.stats as sct\n'), ((9585, 9656), 'seaborn.distplot', 'sns.distplot', (["usa['height']"], {'bins': '(25)', 'hist': '(False)', 'rug': '(True)', 'label': '"""USA"""'}), "(usa['height'], bins=25, hist=False, rug=True, label='USA')\n", (9597, 9656), True, 'import seaborn as sns\n'), ((9657, 9728), 'seaborn.distplot', 'sns.distplot', (["can['height']"], {'bins': '(25)', 'hist': '(False)', 'rug': '(True)', 'label': '"""CAN"""'}), "(can['height'], bins=25, hist=False, rug=True, label='CAN')\n", (9669, 9728), True, 'import seaborn as sns\n'), ((1819, 1839), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1833, 1839), True, 'import numpy as np\n'), ((2611, 2634), 'scipy.stats.shapiro', 'sct.shapiro', (['amostra_q1'], {}), '(amostra_q1)\n', (2622, 2634), True, 'import scipy.stats as sct\n'), ((3862, 3889), 'scipy.stats.jarque_bera', 'sct.jarque_bera', (['amostra_q2'], {}), '(amostra_q2)\n', (3877, 3889), True, 'import scipy.stats as sct\n'), ((4624, 4650), 'scipy.stats.normaltest', 'sct.normaltest', (['amostra_q3'], {}), '(amostra_q3)\n', (4638, 4650), True, 'import scipy.stats as sct\n'), ((5536, 5554), 'numpy.log', 'np.log', (['amostra_q4'], {}), '(amostra_q4)\n', (5542, 5554), True, 'import numpy as np\n'), ((5569, 5608), 'scipy.stats.normaltest', 'sct.normaltest', (['amostra_q4_transformada'], {}), '(amostra_q4_transformada)\n', (5583, 5608), True, 'import scipy.stats as sct\n'), ((7282, 7361), 'scipy.stats.ttest_ind', 'sct.ttest_ind', (["bra['height']", "usa['height']"], {'equal_var': '(False)', 'nan_policy': '"""omit"""'}), "(bra['height'], usa['height'], equal_var=False, nan_policy='omit')\n", (7295, 7361), True, 'import scipy.stats as sct\n'), ((7948, 8027), 'scipy.stats.ttest_ind', 'sct.ttest_ind', (["bra['height']", "can['height']"], {'equal_var': '(False)', 'nan_policy': '"""omit"""'}), "(bra['height'], can['height'], equal_var=False, nan_policy='omit')\n", (7961, 8027), True, 'import scipy.stats as sct\n'), ((8610, 8689), 'scipy.stats.ttest_ind', 'sct.ttest_ind', (["usa['height']", "can['height']"], {'equal_var': '(False)', 'nan_policy': '"""omit"""'}), "(usa['height'], can['height'], equal_var=False, nan_policy='omit')\n", (8623, 8689), True, 'import scipy.stats as sct\n'), ((9513, 9531), 'scipy.stats.t.sf', 'sct.t.sf', (['stat', 'gl'], {}), '(stat, gl)\n', (9521, 9531), True, 'import scipy.stats as sct\n'), ((8968, 8982), 'numpy.round', 'np.round', (['p', '(8)'], {}), '(p, 8)\n', (8976, 8982), True, 'import numpy as np\n')]
|
from __future__ import absolute_import
import itertools
import datetime
from django.utils import timezone
from django.test.client import Client
from django.template.defaultfilters import slugify
from bulbs.content.models import Content, Tag, FeatureType
from elastimorphic.tests.base import BaseIndexableTestCase
from tests.testcontent.models import TestContentObj, TestContentObjTwo
class PolyContentTestCase(BaseIndexableTestCase):
def setUp(self):
super(PolyContentTestCase, self).setUp()
"""
Normally, the "Content" class picks up available doctypes from installed apps, but
in this case, our test models don't exist in a real app, so we'll hack them on.
"""
# generate some data
one_hour_ago = timezone.now() - datetime.timedelta(hours=1)
two_days_ago = timezone.now() - datetime.timedelta(days=2)
words = ['spam', 'driver', 'dump truck', 'restaurant']
self.num_subclasses = 2
self.combos = list(itertools.combinations(words, 2))
self.all_tags = []
ft_one = FeatureType.objects.create(name="Obj one", slug="obj-one")
ft_two = FeatureType.objects.create(name="Obj two", slug="obj-two")
for i, combo in enumerate(self.combos):
tags = []
for atom in combo:
tag, created = Tag.objects.get_or_create(name=atom, slug=slugify(atom))
tags.append(tag)
self.all_tags.append(tag)
obj = TestContentObj.objects.create(
title=' '.join(combo),
description=' '.join(reversed(combo)),
foo=combo[0],
published=one_hour_ago,
feature_type=ft_one
)
obj.tags.add(*tags)
obj.index()
obj2 = TestContentObjTwo.objects.create(
title=' '.join(reversed(combo)),
description=' '.join(combo),
foo=combo[1],
bar=i,
published=two_days_ago,
feature_type=ft_two
)
obj2.tags.add(*tags)
obj2.index()
obj = TestContentObj.objects.create(
title="Unpublished draft",
description="Just to throw a wrench",
foo="bar",
feature_type=ft_one
)
# We need to let the index refresh
TestContentObj.search_objects.refresh()
TestContentObjTwo.search_objects.refresh()
def test_filter_search_content(self):
self.assertEqual(Content.objects.count(), 13) # The 12, plus the unpublished one
q = Content.search_objects.search()
self.assertEqual(q.count(), 12)
q = Content.search_objects.search(query="spam")
self.assertEqual(q.count(), 6)
q = Content.search_objects.search(tags=["spam"])
self.assertEqual(q.count(), 6)
for content in q.full():
self.assertTrue("spam" in content.tags.values_list("slug", flat=True))
q = Content.search_objects.search(feature_types=["obj-one"])
self.assertEqual(q.count(), 6)
for content in q.full():
self.assertEqual("Obj one", content.feature_type.name)
q = Content.search_objects.search(types=["testcontent_testcontentobj"])
self.assertEqual(q.count(), 6)
q = Content.search_objects.search(before=timezone.now())
self.assertEqual(q.count(), 12)
q = Content.search_objects.search(before=timezone.now() - datetime.timedelta(hours=4))
self.assertEqual(q.count(), 6)
q = Content.search_objects.search(after=timezone.now() - datetime.timedelta(hours=4))
self.assertEqual(q.count(), 6)
q = Content.search_objects.search(after=timezone.now() - datetime.timedelta(days=40))
self.assertEqual(q.count(), 12)
q = Content.search_objects.search(types=["testcontent_testcontentobjtwo"]).full()
self.assertEqual(q.count(), 6)
q = Content.search_objects.search(types=[
"testcontent_testcontentobjtwo", "testcontent_testcontentobj"])
self.assertEqual(q.count(), 12)
def test_status_filter(self):
q = Content.search_objects.search(status="final")
self.assertEqual(q.count(), 12)
q = Content.search_objects.search(status="draft")
self.assertEqual(q.count(), 1)
def test_negative_filters(self):
q = Content.search_objects.search(tags=["-spam"])
self.assertEqual(q.count(), 6)
q = Content.search_objects.search(feature_types=["-obj-one"])
self.assertEqual(q.count(), 6)
for content in q.full():
self.assertNotEqual("Obj one", content.feature_type.name)
def test_content_subclasses(self):
# We created one of each subclass per combination so the following should be true:
self.assertEqual(Content.objects.count(), (len(self.combos) * self.num_subclasses) + 1)
self.assertEqual(TestContentObj.objects.count(), len(self.combos) + 1)
self.assertEqual(TestContentObjTwo.objects.count(), len(self.combos))
def test_content_list_view(self):
client = Client()
response = client.get('/content_list_one.html')
self.assertEqual(response.status_code, 200)
self.assertEqual(
len(response.context['object_list']), len(self.combos) * self.num_subclasses)
def test_num_polymorphic_queries(self):
with self.assertNumQueries(1 + self.num_subclasses):
for content in Content.objects.all():
self.assertIsInstance(content, (TestContentObj, TestContentObjTwo))
def test_add_remove_tags(self):
content = Content.objects.all()[0]
original_tag_count = len(content.tags.all())
new_tag = Tag.objects.create(name='crankdat')
content.tags.add(new_tag)
self.assertEqual(len(content.tags.all()), original_tag_count + 1)
self.assertEqual(len(content.tags.all()), len(content.extract_document()['tags']))
def test_search_exact_name_tags(self):
Tag.objects.create(name='Beeftank')
Tag.search_objects.refresh()
results = Tag.search_objects.query(name__match='beeftank').full()
self.assertTrue(len(results) > 0)
tag_result = results[0]
self.assertIsInstance(tag_result, Tag)
def test_in_bulk_performs_polymorphic_query(self):
content_ids = [c.id for c in Content.objects.all()]
results = Content.objects.in_bulk(content_ids)
subclasses = tuple(Content.__subclasses__())
for result in results.values():
self.assertIsInstance(result, subclasses)
|
[
"bulbs.content.models.FeatureType.objects.create",
"bulbs.content.models.Tag.search_objects.query",
"tests.testcontent.models.TestContentObj.objects.count",
"django.utils.timezone.now",
"bulbs.content.models.Tag.search_objects.refresh",
"bulbs.content.models.Content.objects.count",
"datetime.timedelta",
"tests.testcontent.models.TestContentObjTwo.objects.count",
"bulbs.content.models.Content.objects.in_bulk",
"django.test.client.Client",
"itertools.combinations",
"django.template.defaultfilters.slugify",
"bulbs.content.models.Content.objects.all",
"bulbs.content.models.Tag.objects.create",
"bulbs.content.models.Content.search_objects.search",
"tests.testcontent.models.TestContentObj.objects.create",
"tests.testcontent.models.TestContentObj.search_objects.refresh",
"bulbs.content.models.Content.__subclasses__",
"tests.testcontent.models.TestContentObjTwo.search_objects.refresh"
] |
[((1078, 1136), 'bulbs.content.models.FeatureType.objects.create', 'FeatureType.objects.create', ([], {'name': '"""Obj one"""', 'slug': '"""obj-one"""'}), "(name='Obj one', slug='obj-one')\n", (1104, 1136), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((1154, 1212), 'bulbs.content.models.FeatureType.objects.create', 'FeatureType.objects.create', ([], {'name': '"""Obj two"""', 'slug': '"""obj-two"""'}), "(name='Obj two', slug='obj-two')\n", (1180, 1212), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((2159, 2290), 'tests.testcontent.models.TestContentObj.objects.create', 'TestContentObj.objects.create', ([], {'title': '"""Unpublished draft"""', 'description': '"""Just to throw a wrench"""', 'foo': '"""bar"""', 'feature_type': 'ft_one'}), "(title='Unpublished draft', description=\n 'Just to throw a wrench', foo='bar', feature_type=ft_one)\n", (2188, 2290), False, 'from tests.testcontent.models import TestContentObj, TestContentObjTwo\n'), ((2396, 2435), 'tests.testcontent.models.TestContentObj.search_objects.refresh', 'TestContentObj.search_objects.refresh', ([], {}), '()\n', (2433, 2435), False, 'from tests.testcontent.models import TestContentObj, TestContentObjTwo\n'), ((2444, 2486), 'tests.testcontent.models.TestContentObjTwo.search_objects.refresh', 'TestContentObjTwo.search_objects.refresh', ([], {}), '()\n', (2484, 2486), False, 'from tests.testcontent.models import TestContentObj, TestContentObjTwo\n'), ((2635, 2666), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {}), '()\n', (2664, 2666), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((2720, 2763), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'query': '"""spam"""'}), "(query='spam')\n", (2749, 2763), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((2816, 2860), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'tags': "['spam']"}), "(tags=['spam'])\n", (2845, 2860), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((3029, 3085), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'feature_types': "['obj-one']"}), "(feature_types=['obj-one'])\n", (3058, 3085), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((3238, 3305), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'types': "['testcontent_testcontentobj']"}), "(types=['testcontent_testcontentobj'])\n", (3267, 3305), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((3998, 4102), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'types': "['testcontent_testcontentobjtwo', 'testcontent_testcontentobj']"}), "(types=['testcontent_testcontentobjtwo',\n 'testcontent_testcontentobj'])\n", (4027, 4102), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((4199, 4244), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'status': '"""final"""'}), "(status='final')\n", (4228, 4244), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((4298, 4343), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'status': '"""draft"""'}), "(status='draft')\n", (4327, 4343), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((4433, 4478), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'tags': "['-spam']"}), "(tags=['-spam'])\n", (4462, 4478), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((4531, 4588), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'feature_types': "['-obj-one']"}), "(feature_types=['-obj-one'])\n", (4560, 4588), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((5171, 5179), 'django.test.client.Client', 'Client', ([], {}), '()\n', (5177, 5179), False, 'from django.test.client import Client\n'), ((5795, 5830), 'bulbs.content.models.Tag.objects.create', 'Tag.objects.create', ([], {'name': '"""crankdat"""'}), "(name='crankdat')\n", (5813, 5830), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((6082, 6117), 'bulbs.content.models.Tag.objects.create', 'Tag.objects.create', ([], {'name': '"""Beeftank"""'}), "(name='Beeftank')\n", (6100, 6117), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((6126, 6154), 'bulbs.content.models.Tag.search_objects.refresh', 'Tag.search_objects.refresh', ([], {}), '()\n', (6152, 6154), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((6484, 6520), 'bulbs.content.models.Content.objects.in_bulk', 'Content.objects.in_bulk', (['content_ids'], {}), '(content_ids)\n', (6507, 6520), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((766, 780), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (778, 780), False, 'from django.utils import timezone\n'), ((783, 810), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (801, 810), False, 'import datetime\n'), ((834, 848), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (846, 848), False, 'from django.utils import timezone\n'), ((851, 877), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(2)'}), '(days=2)\n', (869, 877), False, 'import datetime\n'), ((1000, 1032), 'itertools.combinations', 'itertools.combinations', (['words', '(2)'], {}), '(words, 2)\n', (1022, 1032), False, 'import itertools\n'), ((2556, 2579), 'bulbs.content.models.Content.objects.count', 'Content.objects.count', ([], {}), '()\n', (2577, 2579), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((4887, 4910), 'bulbs.content.models.Content.objects.count', 'Content.objects.count', ([], {}), '()\n', (4908, 4910), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((4983, 5013), 'tests.testcontent.models.TestContentObj.objects.count', 'TestContentObj.objects.count', ([], {}), '()\n', (5011, 5013), False, 'from tests.testcontent.models import TestContentObj, TestContentObjTwo\n'), ((5062, 5095), 'tests.testcontent.models.TestContentObjTwo.objects.count', 'TestContentObjTwo.objects.count', ([], {}), '()\n', (5093, 5095), False, 'from tests.testcontent.models import TestContentObj, TestContentObjTwo\n'), ((5537, 5558), 'bulbs.content.models.Content.objects.all', 'Content.objects.all', ([], {}), '()\n', (5556, 5558), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((5699, 5720), 'bulbs.content.models.Content.objects.all', 'Content.objects.all', ([], {}), '()\n', (5718, 5720), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((6548, 6572), 'bulbs.content.models.Content.__subclasses__', 'Content.__subclasses__', ([], {}), '()\n', (6570, 6572), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((3395, 3409), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (3407, 3409), False, 'from django.utils import timezone\n'), ((3868, 3938), 'bulbs.content.models.Content.search_objects.search', 'Content.search_objects.search', ([], {'types': "['testcontent_testcontentobjtwo']"}), "(types=['testcontent_testcontentobjtwo'])\n", (3897, 3938), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((6173, 6221), 'bulbs.content.models.Tag.search_objects.query', 'Tag.search_objects.query', ([], {'name__match': '"""beeftank"""'}), "(name__match='beeftank')\n", (6197, 6221), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((6443, 6464), 'bulbs.content.models.Content.objects.all', 'Content.objects.all', ([], {}), '()\n', (6462, 6464), False, 'from bulbs.content.models import Content, Tag, FeatureType\n'), ((3501, 3515), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (3513, 3515), False, 'from django.utils import timezone\n'), ((3518, 3545), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (3536, 3545), False, 'import datetime\n'), ((3635, 3649), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (3647, 3649), False, 'from django.utils import timezone\n'), ((3652, 3679), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (3670, 3679), False, 'import datetime\n'), ((3769, 3783), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (3781, 3783), False, 'from django.utils import timezone\n'), ((3786, 3813), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(40)'}), '(days=40)\n', (3804, 3813), False, 'import datetime\n'), ((1387, 1400), 'django.template.defaultfilters.slugify', 'slugify', (['atom'], {}), '(atom)\n', (1394, 1400), False, 'from django.template.defaultfilters import slugify\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 24 16:03:28 2018
@author: dimitricoukos
Test: in command line:
python RetrieveUniProt.py 'Unit Tests/sample_brenda_parameters.json'
"""
import sys
import cobra_services as CS
from multiprocessing import Pool
from urllib.error import HTTPError
from DataTreatment import openJson, write
mammals = ['HSA', 'PTR', 'PPS', 'GGO', 'PON', 'NLE', 'MCC', 'MCF', 'RRO',
'RBB', 'CJC', 'SBQ', 'MMU', 'RNO', 'CGE', 'NGI', 'HGL', 'OCU',
'TUP', 'CFA', 'AML', 'UMR', 'ORO', 'FCA', 'PTG', 'AJU', 'BTA',
'BOM', 'BIU', 'PHD', 'CHX', 'OAS', 'SSC', 'CFR', 'CDK', 'LVE',
'OOR', 'ECB', 'EPZ', 'EAI', 'MYB', 'MYD', 'HAI', 'RSS', 'LAV',
'TMU', 'MDO', 'SHR', 'OAA']
animals = ['HSA', 'PTR', 'PPS', 'GGO', 'PON', 'NLE', 'MCC', 'MCF', 'RRO',
'RBB', 'CJC', 'SBQ', 'MMU', 'RNO', 'CGE', 'NGI', 'HGL', 'OCU',
'TUP', 'CFA', 'AML', 'UMR', 'ORO', 'FCA', 'PTG', 'AJU', 'BTA',
'BOM', 'BIU', 'PHD', 'CHX', 'OAS', 'SSC', 'CFR', 'CDK', 'LVE',
'OOR', 'ECB', 'EPZ', 'EAI', 'MYB', 'MYD', 'HAI', 'RSS', 'LAV',
'TMU', 'MDO', 'SHR', 'OAA', 'GGA', 'MGP', 'CJO', 'TGU', 'GFR',
'FAB', 'PHI', 'CCW', 'FPG', 'FCH', 'CLV', 'EGZ', 'AAM', 'ASN',
'AMJ', 'PSS', 'CMY', 'SEA', 'ACS', 'PVT', 'PBI', 'GJA', 'XLA',
'XTR', 'NPR', 'DRE', 'SRX', 'SGH', 'IPU', 'TRU', 'TNG', 'LCO',
'NCC', 'MZE', 'OLA', 'XMA', 'NFU', 'LCF', 'HCQ', 'ELS', 'SFM',
'LCM', 'CMK']
def returnBestAddress(genes, loop):
"""Searches for available genes matching kegg enzyme entry.
This function searches 'sequentially'. It returns the best available model
organism genes. Organisms phylogenetically closer to Cricetulus griseus are
preferred, but they are chosen by approximation. A detailed study of the
phylogenetic tree has not been done for this project. Hopefully going
sequentially increases both readability and efficiency.
Parameters
----------
genes : dict
key: value pair is organism: address
loop : string
Indicates the highest potential group of matching organisms to search
in.
Returns
-------
dict
key: kegg organism code. value: gene addresses for enzyme and organism
"""
if loop == 'best':
if 'CGE' in genes:
return genes['CGE']
elif 'MMU' in genes:
return genes['MMU']
elif 'RNO' in genes:
return genes['RNO']
elif 'HSA' in genes:
return genes['HSA']
else:
loop = 'mammals'
if loop == 'mammals':
mammal_match = set(genes.keys()).intersection(mammals)
if bool(mammal_match):
return mammal_match
else:
loop = 'vertebrates'
if loop == 'vertebrates':
animal_match = set(genes.keys()).intersection(animals)
if bool(animal_match):
return animal_match
else:
loop = 'csm' # Stands for "common simple models"
if loop == 'csm':
if 'DME' in genes:
return genes['DME']
elif 'SCE' in genes:
return genes['SCE']
elif 'ECO' in genes:
return genes['ECO']
def loopHandler(mol_weights, ec_number, genes, loop):
"""Calls the correct loop of returnBestAddress based on best potential genes
matches.
Parameters
----------
mol_weights : list
empty list. will contain estimated molecular weights of enzymes.
ec_number : string
genes : list
Addresses of genes corresponding to ec number.
loop : string
"""
searching = True
while searching:
best = returnBestAddress(genes, loop)
if not best:
if loop == 'best':
loop = 'mammals'
break
if loop == 'mammals':
loop = 'vertebrates'
break
if loop == 'vertebrates':
loop = 'csm'
break
if loop == 'csm':
searching = False
return None
searching = False
mol_weights[ec_number]['weights'] = []
mol_weights[ec_number]['uniprot_ids'] = []
if loop == 'best' or loop == 'csm':
for address in best:
organism = best # for readability
try:
fillData(mol_weights, ec_number, organism, address)
except HTTPError as err:
if err.code == 404:
pass
else:
for gene in best:
for address in best[gene]:
organism = best[gene] # for readability
try:
fillData(mol_weights, ec_number, organism, address)
except HTTPError as err:
if err.code == 404:
pass
def fillData(mol_weights, ec_number, organism, address):
"""Searches kegg for enzyme uniprot id and AA sequence.
Parameters
----------
mol_weights : dict
object containing all information collected by program.
ec_number : string
enzyme classification number used to organize data.
address : string
gene address for sequence lookup.
"""
mol_weights[ec_number]['genes'].append(organism.lower() + ':' + address)
sequence = CS.kegggene_to_sequence(organism, address)
weight = CS.sequence_weight(sequence)
mol_weights[ec_number]['weights'].append(weight)
uniprot = CS.kegggene_to_uniprotid(organism, address)
if uniprot:
mol_weights[ec_number]['uniprot_ids'].uniprot
def mainSubprocess(bigg_ids, del_ec):
"""Main function called by each multiprocessing.process.
Parameters
----------
bigg_ids : dict
key: ec_number. value: corresponding bigg ids.
del_ec : list
empty list which is appended to here containing depicrated ec numbers
Returns
-------
dict
key: ec number. value: all collected data in program by this process.
"""
try:
mol_weights = {}
for ec_number in bigg_ids: # WARNING: joblib may require list
mol_weights[ec_number] = {}
print('Currently processing BiGG id: ' + ec_number)
mol_weights[ec_number]['bigg ids'] = bigg_ids[ec_number]
try:
genes = CS.ecnumber_to_genes(ec_number)
except HTTPError as err:
if err.code == 404:
print('Excepted: No entry for ec number: '+ec_number)
continue
else:
raise
if genes:
loop = 'best'
searching = True
while searching:
try:
loopHandler(mol_weights, ec_number, genes, loop)
searching = False
except HTTPError as err:
if err.code == 404 and loop == 'csm':
searching = False
except TypeError as err:
if loop == 'best':
loop = 'mammals'
if loop == 'mammals':
loop = 'vertebrates'
if loop == 'vertebrates':
loop = 'csm'
if loop == 'csm':
searching = False
finally:
return mol_weights
if __name__ == '__main__':
sub_dict_1 = {}
sub_dict_2 = {}
sub_dict_3 = {}
sub_dict_4 = {}
mol_weights = {}
if len(sys.argv) == 1:
brenda_parameters = openJson('JSONs/brenda_parameters.json')
else:
brenda_parameters = openJson(sys.argv[1])
simplified_brenda = {}
for bigg_id in brenda_parameters:
simplified_brenda[bigg_id] = brenda_parameters[bigg_id][0]
optimized_bigg = {}
for k, v in simplified_brenda.items():
optimized_bigg[v] = optimized_bigg.get(v, [])
optimized_bigg[v].append(k)
counter = 0
for ec_number in optimized_bigg:
if counter % 4 == 0:
sub_dict_1[ec_number] = optimized_bigg[ec_number]
if counter % 4 == 1:
sub_dict_2[ec_number] = optimized_bigg[ec_number]
if counter % 4 == 2:
sub_dict_3[ec_number] = optimized_bigg[ec_number]
if counter % 4 == 3:
sub_dict_4[ec_number] = optimized_bigg[ec_number]
counter = counter + 1
try:
with Pool(processes=4) as pool:
del_ec1 = []
del_ec2 = []
del_ec3 = []
del_ec4 = []
mw_1 = pool.apply_async(mainSubprocess, (sub_dict_1, del_ec1,))
mw_2 = pool.apply_async(mainSubprocess, (sub_dict_2, del_ec2,))
mw_3 = pool.apply_async(mainSubprocess, (sub_dict_3, del_ec3,))
mw_4 = pool.apply_async(mainSubprocess, (sub_dict_4, del_ec4,))
pool.close()
pool.join()
for ec in del_ec1:
mw_1.pop(ec, None)
for ec in del_ec2:
mw_2.pop(ec, None)
for ec in del_ec3:
mw_3.pop(ec, None)
for ec in del_ec4:
mw_4.pop(ec, None)
finally:
mol_weights.update(mw_1.get())
mol_weights.update(mw_2.get())
mol_weights.update(mw_3.get())
mol_weights.update(mw_4.get())
if len(sys.argv) > 1:
write('Unit Tests/multiprocessing_sub_output1.json', mw_1.get())
write('Unit Tests/multiprocessing_sub_output3.json', mw_3.get())
mol_weights_to_write = {}
for ec_number in mol_weights:
for bigg_id in mol_weights[ec_number]['bigg ids']:
mol_weights_to_write[bigg_id] = {}
mol_weights_to_write[bigg_id]['ec_number'] = ec_number
mol_weights_to_write[bigg_id].update(mol_weights[ec_number])
write('JSONs/molecular_weights.json', mol_weights_to_write)
|
[
"cobra_services.kegggene_to_uniprotid",
"cobra_services.sequence_weight",
"cobra_services.kegggene_to_sequence",
"cobra_services.ecnumber_to_genes",
"DataTreatment.openJson",
"multiprocessing.Pool",
"DataTreatment.write"
] |
[((5395, 5437), 'cobra_services.kegggene_to_sequence', 'CS.kegggene_to_sequence', (['organism', 'address'], {}), '(organism, address)\n', (5418, 5437), True, 'import cobra_services as CS\n'), ((5451, 5479), 'cobra_services.sequence_weight', 'CS.sequence_weight', (['sequence'], {}), '(sequence)\n', (5469, 5479), True, 'import cobra_services as CS\n'), ((5547, 5590), 'cobra_services.kegggene_to_uniprotid', 'CS.kegggene_to_uniprotid', (['organism', 'address'], {}), '(organism, address)\n', (5571, 5590), True, 'import cobra_services as CS\n'), ((7704, 7744), 'DataTreatment.openJson', 'openJson', (['"""JSONs/brenda_parameters.json"""'], {}), "('JSONs/brenda_parameters.json')\n", (7712, 7744), False, 'from DataTreatment import openJson, write\n'), ((7783, 7804), 'DataTreatment.openJson', 'openJson', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (7791, 7804), False, 'from DataTreatment import openJson, write\n'), ((10002, 10061), 'DataTreatment.write', 'write', (['"""JSONs/molecular_weights.json"""', 'mol_weights_to_write'], {}), "('JSONs/molecular_weights.json', mol_weights_to_write)\n", (10007, 10061), False, 'from DataTreatment import openJson, write\n'), ((8563, 8580), 'multiprocessing.Pool', 'Pool', ([], {'processes': '(4)'}), '(processes=4)\n', (8567, 8580), False, 'from multiprocessing import Pool\n'), ((6404, 6435), 'cobra_services.ecnumber_to_genes', 'CS.ecnumber_to_genes', (['ec_number'], {}), '(ec_number)\n', (6424, 6435), True, 'import cobra_services as CS\n')]
|
import argparse
import datetime
import json
import logging
import pickle
import time
import shutil
from kite.graph_data.data_feeder import EndpointDataFeeder
from kite.graph_data.session import RequestInit
from kite.graph_data.graph_feed import GraphFeedConfig
from kite.infer_expr.config import MetaInfo, Config
from kite.infer_call.request import Request as CallRequest, KwargRequest, ArgTypeRequest, ArgPlaceholderRequest
from kite.infer_expr.request import Request as ExprRequest
from kite.infer_expr.attr_base import Request as AttrBaseRequest
from kite.infer_attr.request import Request as AttrRequest
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s')
def get_filename(cur_sample: int, total: int, timestamp: int) -> str:
n_digits = len(str(total))
format_str = "{{:0{}d}}".format(n_digits) + "-of-{}-{}.pickle"
return format_str.format(cur_sample, total, timestamp)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--endpoint', type=str, default='http://localhost:3039')
parser.add_argument('--random_seed', type=int)
parser.add_argument('--batch', type=int, default=10)
parser.add_argument('--samples', type=int, default=1000, help='number of samples to generate')
parser.add_argument('--meta_info', type=str)
parser.add_argument('--out_dir', type=str, default='data')
parser.add_argument('--samples_per_file', type=int, default=500)
parser.add_argument('--max_samples', type=int)
parser.add_argument('--attr_base_proportion', type=float)
parser.add_argument('--attr_proportion', type=float)
parser.add_argument('--call_proportion', type=float)
parser.add_argument('--arg_type_proportion', type=float)
parser.add_argument('--kwarg_name_proportion', type=float)
parser.add_argument('--arg_placeholder_proportion', type=float)
args = parser.parse_args()
meta_info = MetaInfo.from_json(json.load(open(args.meta_info, 'r')))
config = Config()
req = RequestInit(
config=GraphFeedConfig(edge_set=config.ggnn.edge_set),
random_seed=args.random_seed,
num_batches=args.batch,
max_hops=config.max_hops,
name_subtoken_index=meta_info.name_subtoken_index,
type_subtoken_index=meta_info.type_subtoken_index,
production_index=meta_info.production,
expr=ExprRequest(
max_samples=args.max_samples,
call=CallRequest(
symbols=meta_info.call.dist,
batch_proportion=args.call_proportion,
),
attr=AttrRequest(
symbols=meta_info.attr.dist,
batch_proportion=args.attr_proportion,
parents=meta_info.attr.parents,
),
attr_base=AttrBaseRequest(
symbols=meta_info.attr_base.dist,
batch_proportion=args.attr_base_proportion,
),
arg_type=ArgTypeRequest(
symbols=meta_info.call.dist,
batch_proportion=args.arg_type_proportion,
),
kwarg_name=KwargRequest(
symbols=meta_info.call.dist,
keywords=meta_info.call.keywords,
batch_proportion=args.kwarg_name_proportion,
),
arg_placeholder=ArgPlaceholderRequest(
symbols=meta_info.call.dist,
batch_proportion=args.arg_placeholder_proportion,
)
),
)
logging.info("will write {} samples to {}, random seed = {}".format(
args.samples, args.out_dir, args.random_seed))
feeder = EndpointDataFeeder(args.endpoint, req)
try:
tmp_filename = None
filename = None
file = None
file_samples = 0
start = None
n_names = 0
n_production = 0
def finish_file():
file.close()
shutil.move(tmp_filename, filename)
end = datetime.datetime.now()
logging.info(
"sample {}: saved {} with {} samples ({} name, {} production), took {}".format(
i, filename, args.samples_per_file, n_names, n_production, end - start
))
for i in range(args.samples):
if not file or file_samples >= args.samples_per_file:
if file:
finish_file()
file_samples = 0
ts = int(time.time() * 1000)
filename = "{}/{}".format(args.out_dir, get_filename(i, args.samples, ts))
tmp_filename = "{}.part".format(filename)
file = open(tmp_filename, 'wb')
start = datetime.datetime.now()
logging.info("writing to {}".format(tmp_filename))
sample = feeder.next()
pickle.dump(sample, file)
n_names += len(sample.data.expr.infer_name.prediction_nodes)
n_production += len(sample.data.expr.infer_production.prediction_nodes)
file_samples += 1
if file_samples > 0:
finish_file()
finally:
feeder.stop()
if __name__ == "__main__":
main()
|
[
"kite.graph_data.data_feeder.EndpointDataFeeder",
"pickle.dump",
"argparse.ArgumentParser",
"logging.basicConfig",
"kite.infer_call.request.Request",
"kite.infer_call.request.KwargRequest",
"kite.infer_call.request.ArgPlaceholderRequest",
"kite.infer_expr.config.Config",
"kite.graph_data.graph_feed.GraphFeedConfig",
"kite.infer_expr.attr_base.Request",
"time.time",
"kite.infer_call.request.ArgTypeRequest",
"shutil.move",
"kite.infer_attr.request.Request",
"datetime.datetime.now"
] |
[((613, 708), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(asctime)s %(levelname)-8s %(message)s"""'}), "(level=logging.DEBUG, format=\n '%(asctime)s %(levelname)-8s %(message)s')\n", (632, 708), False, 'import logging\n'), ((960, 985), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (983, 985), False, 'import argparse\n'), ((1993, 2001), 'kite.infer_expr.config.Config', 'Config', ([], {}), '()\n', (1999, 2001), False, 'from kite.infer_expr.config import MetaInfo, Config\n'), ((3628, 3666), 'kite.graph_data.data_feeder.EndpointDataFeeder', 'EndpointDataFeeder', (['args.endpoint', 'req'], {}), '(args.endpoint, req)\n', (3646, 3666), False, 'from kite.graph_data.data_feeder import EndpointDataFeeder\n'), ((2041, 2087), 'kite.graph_data.graph_feed.GraphFeedConfig', 'GraphFeedConfig', ([], {'edge_set': 'config.ggnn.edge_set'}), '(edge_set=config.ggnn.edge_set)\n', (2056, 2087), False, 'from kite.graph_data.graph_feed import GraphFeedConfig\n'), ((3906, 3941), 'shutil.move', 'shutil.move', (['tmp_filename', 'filename'], {}), '(tmp_filename, filename)\n', (3917, 3941), False, 'import shutil\n'), ((3961, 3984), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3982, 3984), False, 'import datetime\n'), ((4825, 4850), 'pickle.dump', 'pickle.dump', (['sample', 'file'], {}), '(sample, file)\n', (4836, 4850), False, 'import pickle\n'), ((4686, 4709), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4707, 4709), False, 'import datetime\n'), ((2443, 2522), 'kite.infer_call.request.Request', 'CallRequest', ([], {'symbols': 'meta_info.call.dist', 'batch_proportion': 'args.call_proportion'}), '(symbols=meta_info.call.dist, batch_proportion=args.call_proportion)\n', (2454, 2522), True, 'from kite.infer_call.request import Request as CallRequest, KwargRequest, ArgTypeRequest, ArgPlaceholderRequest\n'), ((2588, 2704), 'kite.infer_attr.request.Request', 'AttrRequest', ([], {'symbols': 'meta_info.attr.dist', 'batch_proportion': 'args.attr_proportion', 'parents': 'meta_info.attr.parents'}), '(symbols=meta_info.attr.dist, batch_proportion=args.\n attr_proportion, parents=meta_info.attr.parents)\n', (2599, 2704), True, 'from kite.infer_attr.request import Request as AttrRequest\n'), ((2786, 2884), 'kite.infer_expr.attr_base.Request', 'AttrBaseRequest', ([], {'symbols': 'meta_info.attr_base.dist', 'batch_proportion': 'args.attr_base_proportion'}), '(symbols=meta_info.attr_base.dist, batch_proportion=args.\n attr_base_proportion)\n', (2801, 2884), True, 'from kite.infer_expr.attr_base import Request as AttrBaseRequest\n'), ((2949, 3040), 'kite.infer_call.request.ArgTypeRequest', 'ArgTypeRequest', ([], {'symbols': 'meta_info.call.dist', 'batch_proportion': 'args.arg_type_proportion'}), '(symbols=meta_info.call.dist, batch_proportion=args.\n arg_type_proportion)\n', (2963, 3040), False, 'from kite.infer_call.request import Request as CallRequest, KwargRequest, ArgTypeRequest, ArgPlaceholderRequest\n'), ((3107, 3231), 'kite.infer_call.request.KwargRequest', 'KwargRequest', ([], {'symbols': 'meta_info.call.dist', 'keywords': 'meta_info.call.keywords', 'batch_proportion': 'args.kwarg_name_proportion'}), '(symbols=meta_info.call.dist, keywords=meta_info.call.keywords,\n batch_proportion=args.kwarg_name_proportion)\n', (3119, 3231), False, 'from kite.infer_call.request import Request as CallRequest, KwargRequest, ArgTypeRequest, ArgPlaceholderRequest\n'), ((3320, 3425), 'kite.infer_call.request.ArgPlaceholderRequest', 'ArgPlaceholderRequest', ([], {'symbols': 'meta_info.call.dist', 'batch_proportion': 'args.arg_placeholder_proportion'}), '(symbols=meta_info.call.dist, batch_proportion=args.\n arg_placeholder_proportion)\n', (3341, 3425), False, 'from kite.infer_call.request import Request as CallRequest, KwargRequest, ArgTypeRequest, ArgPlaceholderRequest\n'), ((4444, 4455), 'time.time', 'time.time', ([], {}), '()\n', (4453, 4455), False, 'import time\n')]
|
from collections import OrderedDict
from django.utils.module_loading import import_string
from django.conf import settings
from django.urls.resolvers import URLResolver, URLPattern
import re
def check_url_exclude(url):
for regex in settings.AUTO_DISCOVER_EXCLUDE:
if re.match(regex, url):
return True
def recursive_url(pre_namespace, pre_url, urlpattern, url_order_dict):
"""
递归发现url
:param pre_namespace: 根别名
:param pre_url: url前缀
:param urlpattern: 路由关系表
:param url_order_dict 有序url字典,用于保存递归中获取的所有路由
:return:
"""
for item in urlpattern:
if isinstance(item, URLPattern): # 非路由分发
if not item.name:
continue
if pre_namespace:
name = '%s:%s' % (pre_namespace, item.name)
else:
name = item.name
url = pre_url + item.pattern.regex.pattern
url = url.replace('^', '').replace('$', '') # 去掉正则表达式里的前缀和后缀
if check_url_exclude(url):
continue
url_order_dict[name] = {'name': name, 'url': url}
elif isinstance(item, URLResolver): # 路由分发
if pre_namespace:
if item.namespace:
namespace = '%s:%s' % (pre_namespace, item.namespace)
else:
# namespace = item.namespace # 另一种写法
namespace = pre_namespace
else:
if item.namespace:
namespace = item.namespace
else:
namespace = None
# print(item.pattern.regex.pattern)
recursive_url(namespace, pre_url + item.pattern.regex.pattern, item.url_patterns, url_order_dict)
def get_all_url_dict():
url_order_dict = OrderedDict()
root = import_string(settings.ROOT_URLCONF)
recursive_url(None, '/', root.urlpatterns, url_order_dict)
return url_order_dict
|
[
"collections.OrderedDict",
"django.utils.module_loading.import_string",
"re.match"
] |
[((1787, 1800), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1798, 1800), False, 'from collections import OrderedDict\n'), ((1812, 1848), 'django.utils.module_loading.import_string', 'import_string', (['settings.ROOT_URLCONF'], {}), '(settings.ROOT_URLCONF)\n', (1825, 1848), False, 'from django.utils.module_loading import import_string\n'), ((281, 301), 're.match', 're.match', (['regex', 'url'], {}), '(regex, url)\n', (289, 301), False, 'import re\n')]
|
# This file is part of spot_motion_monitor.
#
# Developed for LSST System Integration, Test and Commissioning.
#
# See the LICENSE file at the top-level directory of this distribution
# for details of code ownership.
#
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.
from PyQt5.QtCore import Qt
from spot_motion_monitor.views.camera_control_widget import CameraControlWidget
class TestCameraControl():
def setup_class(self):
self.fast_timeout = 250 # ms
def stateIsFalse(self, state):
return not state
def stateIsTrue(self, state):
print("A:", state)
return state
def test_startStopCameraButton(self, qtbot):
cc = CameraControlWidget()
cc.show()
qtbot.addWidget(cc)
assert not cc.startStopButton.isChecked()
assert cc.startStopButton.text() == "Start Camera"
assert cc.acquireRoiCheckBox.isEnabled() is False
assert cc.acquireFramesButton.isEnabled() is False
with qtbot.waitSignal(cc.cameraState, timeout=self.fast_timeout,
check_params_cb=self.stateIsTrue):
qtbot.mouseClick(cc.startStopButton, Qt.LeftButton)
assert cc.startStopButton.isChecked()
assert cc.startStopButton.text() == "Stop Camera"
assert cc.acquireRoiCheckBox.isEnabled() is True
assert cc.acquireFramesButton.isEnabled() is True
with qtbot.waitSignal(cc.cameraState, timeout=self.fast_timeout,
check_params_cb=self.stateIsFalse):
qtbot.mouseClick(cc.startStopButton, Qt.LeftButton)
assert not cc.startStopButton.isChecked()
assert cc.startStopButton.text() == "Start Camera"
def test_acquireFramesButton(self, qtbot):
cc = CameraControlWidget()
cc.show()
qtbot.addWidget(cc)
qtbot.mouseClick(cc.startStopButton, Qt.LeftButton)
assert not cc.acquireFramesButton.isChecked()
assert cc.acquireFramesButton.text() == "Start Acquire Frames"
with qtbot.waitSignal(cc.acquireFramesState, timeout=self.fast_timeout,
check_params_cb=self.stateIsTrue):
qtbot.mouseClick(cc.acquireFramesButton, Qt.LeftButton)
assert cc.acquireFramesButton.isChecked()
assert not cc.startStopButton.isEnabled()
assert cc.acquireFramesButton.text() == "Stop Acquire Frames"
with qtbot.waitSignal(cc.acquireFramesState, timeout=self.fast_timeout,
check_params_cb=self.stateIsFalse):
qtbot.mouseClick(cc.acquireFramesButton, Qt.LeftButton)
assert not cc.acquireFramesButton.isChecked()
assert cc.acquireFramesButton.text() == "Start Acquire Frames"
assert cc.startStopButton.isEnabled()
def test_acquireRoiCheckbox(self, qtbot):
cc = CameraControlWidget()
cc.show()
qtbot.addWidget(cc)
qtbot.mouseClick(cc.startStopButton, Qt.LeftButton)
assert not cc.acquireRoiCheckBox.isChecked()
with qtbot.waitSignal(cc.acquireRoiState, timeout=self.fast_timeout,
check_params_cb=self.stateIsTrue):
qtbot.mouseClick(cc.acquireRoiCheckBox, Qt.LeftButton)
assert cc.acquireRoiCheckBox.isChecked()
assert not cc.roiFpsSpinBox.isEnabled()
assert not cc.bufferSizeSpinBox.isEnabled()
with qtbot.waitSignal(cc.acquireRoiState, timeout=self.fast_timeout,
check_params_cb=self.stateIsFalse):
qtbot.mouseClick(cc.acquireRoiCheckBox, Qt.LeftButton)
assert not cc.acquireRoiCheckBox.isChecked()
assert cc.roiFpsSpinBox.isEnabled()
assert cc.bufferSizeSpinBox.isEnabled()
def test_roiFpsSpinBox(self, qtbot):
cc = CameraControlWidget()
cc.show()
qtbot.addWidget(cc)
assert cc.roiFpsSpinBox.value() == 40
cc.roiFpsSpinBox.setValue(0)
assert cc.roiFpsSpinBox.value() == 1
cc.roiFpsSpinBox.setValue(200)
assert cc.roiFpsSpinBox.value() == 150
cc.roiFpsSpinBox.stepUp()
assert cc.roiFpsSpinBox.value() == 150
cc.roiFpsSpinBox.stepDown()
assert cc.roiFpsSpinBox.value() == 149
def test_bufferSizeSpinBox(self, qtbot):
cc = CameraControlWidget()
cc.show()
qtbot.addWidget(cc)
assert cc.bufferSizeSpinBox.value() == 1024
cc.bufferSizeSpinBox.stepUp()
assert cc.bufferSizeSpinBox.value() == 2048
cc.bufferSizeSpinBox.setValue(1024)
cc.bufferSizeSpinBox.stepDown()
assert cc.bufferSizeSpinBox.value() == 512
def test_showFramesCheckBox(self, qtbot):
cc = CameraControlWidget()
cc.show()
qtbot.addWidget(cc)
assert cc.showFramesCheckBox.isChecked()
qtbot.mouseClick(cc.showFramesCheckBox, Qt.LeftButton)
assert not cc.showFramesCheckBox.isChecked()
def test_takeScreenshotButton(self, qtbot):
cc = CameraControlWidget()
cc.show()
qtbot.addWidget(cc)
assert cc.takeScreenshotButton.isEnabled() is False
qtbot.mouseClick(cc.startStopButton, Qt.LeftButton)
qtbot.mouseClick(cc.acquireFramesButton, Qt.LeftButton)
assert cc.takeScreenshotButton.isEnabled() is True
with qtbot.waitSignal(cc.takeScreenshotState, timeout=self.fast_timeout):
qtbot.mouseClick(cc.takeScreenshotButton, Qt.LeftButton)
|
[
"spot_motion_monitor.views.camera_control_widget.CameraControlWidget"
] |
[((741, 762), 'spot_motion_monitor.views.camera_control_widget.CameraControlWidget', 'CameraControlWidget', ([], {}), '()\n', (760, 762), False, 'from spot_motion_monitor.views.camera_control_widget import CameraControlWidget\n'), ((1829, 1850), 'spot_motion_monitor.views.camera_control_widget.CameraControlWidget', 'CameraControlWidget', ([], {}), '()\n', (1848, 1850), False, 'from spot_motion_monitor.views.camera_control_widget import CameraControlWidget\n'), ((2910, 2931), 'spot_motion_monitor.views.camera_control_widget.CameraControlWidget', 'CameraControlWidget', ([], {}), '()\n', (2929, 2931), False, 'from spot_motion_monitor.views.camera_control_widget import CameraControlWidget\n'), ((3859, 3880), 'spot_motion_monitor.views.camera_control_widget.CameraControlWidget', 'CameraControlWidget', ([], {}), '()\n', (3878, 3880), False, 'from spot_motion_monitor.views.camera_control_widget import CameraControlWidget\n'), ((4364, 4385), 'spot_motion_monitor.views.camera_control_widget.CameraControlWidget', 'CameraControlWidget', ([], {}), '()\n', (4383, 4385), False, 'from spot_motion_monitor.views.camera_control_widget import CameraControlWidget\n'), ((4769, 4790), 'spot_motion_monitor.views.camera_control_widget.CameraControlWidget', 'CameraControlWidget', ([], {}), '()\n', (4788, 4790), False, 'from spot_motion_monitor.views.camera_control_widget import CameraControlWidget\n'), ((5064, 5085), 'spot_motion_monitor.views.camera_control_widget.CameraControlWidget', 'CameraControlWidget', ([], {}), '()\n', (5083, 5085), False, 'from spot_motion_monitor.views.camera_control_widget import CameraControlWidget\n')]
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import print_function
import os
try:
from collections import OrderedDict
except ImportError:
# Python 2.6
from ordereddict import OrderedDict
from dock.core import DockerTasker
from dock.inner import DockerBuildWorkflow
from dock.plugin import PreBuildPluginsRunner, PostBuildPluginsRunner
from dock.plugins.pre_inject_yum_repo import InjectYumRepoPlugin, alter_yum_commands
from dock.util import ImageName
from tests.constants import DOCKERFILE_GIT
class X(object):
pass
def test_yuminject_plugin(tmpdir):
df = """\
FROM fedora
RUN yum install -y python-django
CMD blabla"""
tmp_df = os.path.join(str(tmpdir), 'Dockerfile')
with open(tmp_df, mode="w") as fd:
fd.write(df)
tasker = DockerTasker()
workflow = DockerBuildWorkflow(DOCKERFILE_GIT, "test-image")
setattr(workflow, 'builder', X)
metalink = 'https://mirrors.fedoraproject.org/metalink?repo=fedora-$releasever&arch=$basearch'
workflow.repos['yum'] = [OrderedDict(
(('name', 'my-repo'),
('metalink', metalink),
('enabled', 1),
('gpgcheck', 0)),
)]
setattr(workflow.builder, 'image_id', "asd123")
setattr(workflow.builder, 'df_path', tmp_df)
setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='21'))
setattr(workflow.builder, 'git_dockerfile_path', None)
setattr(workflow.builder, 'git_path', None)
runner = PreBuildPluginsRunner(tasker, workflow, [{
'name': InjectYumRepoPlugin.key,
'args': {}}])
runner.run()
assert InjectYumRepoPlugin.key is not None
with open(tmp_df, 'r') as fd:
altered_df = fd.read()
expected_output = r"""FROM fedora
RUN printf "[my-repo]\nname=my-repo\nmetalink=https://mirrors.fedoraproject.org/metalink?repo=fedora-\$releasever&arch=\$basearch\nenabled=1\ngpgcheck=0\n" >/etc/yum.repos.d/dock-injected.repo && yum install -y python-django && yum clean all && rm -f /etc/yum.repos.d/dock-injected.repo
CMD blabla"""
assert expected_output == altered_df
def test_yuminject_multiline(tmpdir):
df = """\
FROM fedora
RUN yum install -y httpd \
uwsgi
CMD blabla"""
tmp_df = os.path.join(str(tmpdir), 'Dockerfile')
with open(tmp_df, mode="w") as fd:
fd.write(df)
tasker = DockerTasker()
workflow = DockerBuildWorkflow(DOCKERFILE_GIT, "test-image")
setattr(workflow, 'builder', X)
metalink = r'https://mirrors.fedoraproject.org/metalink?repo=fedora-$releasever&arch=$basearch'
workflow.repos['yum'] = [OrderedDict(
(('name', 'my-repo'),
('metalink', metalink),
('enabled', 1),
('gpgcheck', 0)),
)]
setattr(workflow.builder, 'image_id', "asd123")
setattr(workflow.builder, 'df_path', tmp_df)
setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='21'))
setattr(workflow.builder, 'git_dockerfile_path', None)
setattr(workflow.builder, 'git_path', None)
runner = PreBuildPluginsRunner(tasker, workflow,
[{'name': InjectYumRepoPlugin.key, 'args': {}}])
runner.run()
assert InjectYumRepoPlugin.key is not None
with open(tmp_df, 'r') as fd:
altered_df = fd.read()
expected_output = r"""FROM fedora
RUN printf "[my-repo]\nname=my-repo\nmetalink=https://mirrors.fedoraproject.org/metalink?repo=fedora-\$releasever&arch=\$basearch\nenabled=1\ngpgcheck=0\n" >/etc/yum.repos.d/dock-injected.repo && yum install -y httpd uwsgi && yum clean all && rm -f /etc/yum.repos.d/dock-injected.repo
CMD blabla"""
assert altered_df == expected_output
def test_complex_df():
df = """\
FROM fedora
RUN asd
RUN yum install x
ENV x=y
RUN yum install \
x \
y \
&& something else
CMD asd"""
wrap_cmd = "RUN test && %(yum_command)s && asd"
out = alter_yum_commands(df, wrap_cmd)
expected_output = """\
FROM fedora
RUN asd
RUN test && yum install x && asd
ENV x=y
RUN test && yum install x y && something else && asd
CMD asd"""
assert out == expected_output
|
[
"dock.plugins.pre_inject_yum_repo.alter_yum_commands",
"dock.util.ImageName",
"dock.inner.DockerBuildWorkflow",
"dock.core.DockerTasker",
"dock.plugin.PreBuildPluginsRunner",
"ordereddict.OrderedDict"
] |
[((922, 936), 'dock.core.DockerTasker', 'DockerTasker', ([], {}), '()\n', (934, 936), False, 'from dock.core import DockerTasker\n'), ((952, 1001), 'dock.inner.DockerBuildWorkflow', 'DockerBuildWorkflow', (['DOCKERFILE_GIT', '"""test-image"""'], {}), "(DOCKERFILE_GIT, 'test-image')\n", (971, 1001), False, 'from dock.inner import DockerBuildWorkflow\n'), ((1602, 1694), 'dock.plugin.PreBuildPluginsRunner', 'PreBuildPluginsRunner', (['tasker', 'workflow', "[{'name': InjectYumRepoPlugin.key, 'args': {}}]"], {}), "(tasker, workflow, [{'name': InjectYumRepoPlugin.key,\n 'args': {}}])\n", (1623, 1694), False, 'from dock.plugin import PreBuildPluginsRunner, PostBuildPluginsRunner\n'), ((2538, 2552), 'dock.core.DockerTasker', 'DockerTasker', ([], {}), '()\n', (2550, 2552), False, 'from dock.core import DockerTasker\n'), ((2568, 2617), 'dock.inner.DockerBuildWorkflow', 'DockerBuildWorkflow', (['DOCKERFILE_GIT', '"""test-image"""'], {}), "(DOCKERFILE_GIT, 'test-image')\n", (2587, 2617), False, 'from dock.inner import DockerBuildWorkflow\n'), ((3218, 3310), 'dock.plugin.PreBuildPluginsRunner', 'PreBuildPluginsRunner', (['tasker', 'workflow', "[{'name': InjectYumRepoPlugin.key, 'args': {}}]"], {}), "(tasker, workflow, [{'name': InjectYumRepoPlugin.key,\n 'args': {}}])\n", (3239, 3310), False, 'from dock.plugin import PreBuildPluginsRunner, PostBuildPluginsRunner\n'), ((4083, 4115), 'dock.plugins.pre_inject_yum_repo.alter_yum_commands', 'alter_yum_commands', (['df', 'wrap_cmd'], {}), '(df, wrap_cmd)\n', (4101, 4115), False, 'from dock.plugins.pre_inject_yum_repo import InjectYumRepoPlugin, alter_yum_commands\n'), ((1168, 1264), 'ordereddict.OrderedDict', 'OrderedDict', (["(('name', 'my-repo'), ('metalink', metalink), ('enabled', 1), ('gpgcheck', 0))"], {}), "((('name', 'my-repo'), ('metalink', metalink), ('enabled', 1), (\n 'gpgcheck', 0)))\n", (1179, 1264), False, 'from ordereddict import OrderedDict\n'), ((1446, 1480), 'dock.util.ImageName', 'ImageName', ([], {'repo': '"""Fedora"""', 'tag': '"""21"""'}), "(repo='Fedora', tag='21')\n", (1455, 1480), False, 'from dock.util import ImageName\n'), ((2785, 2881), 'ordereddict.OrderedDict', 'OrderedDict', (["(('name', 'my-repo'), ('metalink', metalink), ('enabled', 1), ('gpgcheck', 0))"], {}), "((('name', 'my-repo'), ('metalink', metalink), ('enabled', 1), (\n 'gpgcheck', 0)))\n", (2796, 2881), False, 'from ordereddict import OrderedDict\n'), ((3062, 3096), 'dock.util.ImageName', 'ImageName', ([], {'repo': '"""Fedora"""', 'tag': '"""21"""'}), "(repo='Fedora', tag='21')\n", (3071, 3096), False, 'from dock.util import ImageName\n')]
|
# -*- coding: utf-8 -*-
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
from .hrv_time import hrv_time
from .hrv_frequency import hrv_frequency
from .hrv_frequency import _hrv_frequency_show
from .hrv_nonlinear import hrv_nonlinear
from .hrv_nonlinear import _hrv_nonlinear_show
from .hrv_utils import _hrv_get_rri
from .hrv_utils import _hrv_sanitize_input
from ..stats import summary_plot
def hrv(peaks, sampling_rate=1000, show=False):
""" Computes indices of Heart Rate Variability (HRV).
Computes HRV indices in the time-, frequency-, and nonlinear domain. Note
that a minimum duration of the signal containing the peaks is recommended
for some HRV indices to be meaninful. For instance, 1, 2 and 5 minutes of
high quality signal are the recomended minima for HF, LF and LF/HF,
respectively. See references for details.
Parameters
----------
peaks : dict
Samples at which cardiac extrema (i.e., R-peaks, systolic peaks) occur.
Dictionary returned by ecg_findpeaks, ecg_peaks, ppg_findpeaks, or
ppg_peaks.
sampling_rate : int, optional
Sampling rate (Hz) of the continuous cardiac signal in which the peaks
occur. Should be at least twice as high as the highest frequency in vhf.
By default 1000.
show : bool, optional
If True, returns the plots that are generates for each of the domains.
Returns
-------
DataFrame
Contains HRV metrics from three domains:
- frequency (for details see hrv_frequency)
- time (for details see hrv_time)
- non-linear (for details see hrv_nonlinear)
See Also
--------
ecg_peaks, ppg_peaks, hrv_time, hrv_frequency, hrv_nonlinear
Examples
--------
>>> import neurokit2 as nk
>>>
>>> # Download data
>>> data = nk.data("bio_resting_5min_100hz")
>>>
>>> # Find peaks
>>> peaks, info = nk.ecg_peaks(data["ECG"], sampling_rate=100)
>>>
>>> # Compute HRV indices
>>> nk.hrv(peaks, sampling_rate=100, show=True)
References
----------
- <NAME>. (2002). Assessing heart rate variability from real-world
Holter reports. Cardiac electrophysiology review, 6(3), 239-244.
- <NAME>., & <NAME>. (2017). An overview of heart rate
variability metrics and norms. Frontiers in public health, 5, 258.
"""
# Get indices
out = [] # initialize empty container
# Gather indices
out.append(hrv_time(peaks, sampling_rate=sampling_rate))
out.append(hrv_frequency(peaks, sampling_rate=sampling_rate))
out.append(hrv_nonlinear(peaks, sampling_rate=sampling_rate))
out = pd.concat(out, axis=1)
# Plot
if show:
_hrv_plot(peaks, out, sampling_rate)
return out
def _hrv_plot(peaks, hrv, sampling_rate=1000):
fig = plt.figure(constrained_layout=False)
spec = matplotlib.gridspec.GridSpec(ncols=2, nrows=2,
height_ratios=[1, 1], width_ratios=[1, 1])
# Arrange grids
ax_distrib = fig.add_subplot(spec[0, :-1])
ax_distrib.set_xlabel('R-R intervals (ms)')
ax_distrib.set_title("Distribution of R-R intervals")
ax_psd = fig.add_subplot(spec[1, :-1])
ax_poincare = fig.add_subplot(spec[:, -1])
# Distribution of RR intervals
peaks = _hrv_sanitize_input(peaks)
rri = _hrv_get_rri(peaks, sampling_rate=sampling_rate, interpolate=False)
ax_distrib = summary_plot(rri, ax=ax_distrib)
# Poincare plot
out_poincare = hrv.copy()
out_poincare.columns = [col.replace('HRV_', '') for col in out_poincare.columns]
ax_poincare = _hrv_nonlinear_show(rri, out_poincare, ax=ax_poincare)
# PSD plot
rri, sampling_rate = _hrv_get_rri(peaks,
sampling_rate=sampling_rate, interpolate=True)
_hrv_frequency_show(rri, out_poincare,
sampling_rate=sampling_rate, ax=ax_psd)
|
[
"matplotlib.gridspec.GridSpec",
"matplotlib.pyplot.figure",
"pandas.concat"
] |
[((2670, 2692), 'pandas.concat', 'pd.concat', (['out'], {'axis': '(1)'}), '(out, axis=1)\n', (2679, 2692), True, 'import pandas as pd\n'), ((2840, 2876), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'constrained_layout': '(False)'}), '(constrained_layout=False)\n', (2850, 2876), True, 'import matplotlib.pyplot as plt\n'), ((2888, 2981), 'matplotlib.gridspec.GridSpec', 'matplotlib.gridspec.GridSpec', ([], {'ncols': '(2)', 'nrows': '(2)', 'height_ratios': '[1, 1]', 'width_ratios': '[1, 1]'}), '(ncols=2, nrows=2, height_ratios=[1, 1],\n width_ratios=[1, 1])\n', (2916, 2981), False, 'import matplotlib\n')]
|
import math
import tensorflow as tf
import unittest
from tensorstream.helpers.any_nan import any_nan
def in_tf(x):
with tf.Session() as sess:
return sess.run(x)
class AnyNanSpec(unittest.TestCase):
def test_any_nan_scalar(self):
x = any_nan(tf.constant(4.0))
self.assertEqual(in_tf(x), False)
y = any_nan(tf.constant(math.nan))
self.assertEqual(in_tf(y), True)
def test_any_nan_tensor(self):
x = any_nan(tf.constant([4.0, 3.0, 2.0]))
self.assertEqual(in_tf(x), False)
y = any_nan(tf.constant([math.nan, 3.0, 2.0]))
self.assertEqual(in_tf(y), True)
z = any_nan(tf.constant([math.nan, math.nan, math.nan]))
self.assertEqual(in_tf(z), True)
def test_any_nan_complex_type(self):
x = any_nan({
'a': tf.constant([3.0, 2.0]),
'b': [tf.constant(3.2), tf.constant([2.1, 2.3, 4.3])],
'c': {
'z': tf.constant([5.2, 5.2]),
'y': tf.constant([3.4, 5.2])
}
})
self.assertEqual(in_tf(x), False)
y = any_nan({
'a': tf.constant([3.0, 2.0]),
'b': [tf.constant(3.2), tf.constant([2.1, 2.3, math.nan])],
'c': {
'z': tf.constant([5.2, 5.2]),
'y': tf.constant([3.4, 5.2])
}
})
self.assertEqual(in_tf(y), True)
z = any_nan({
'a': tf.constant([math.nan, math.nan]),
'b': [tf.constant(math.nan), tf.constant([math.nan, math.nan, math.nan])],
'c': {
'z': tf.constant([math.nan, math.nan]),
'y': tf.constant([math.nan, math.nan])
}
})
self.assertEqual(in_tf(z), True)
|
[
"tensorflow.Session",
"tensorflow.constant"
] |
[((124, 136), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (134, 136), True, 'import tensorflow as tf\n'), ((256, 272), 'tensorflow.constant', 'tf.constant', (['(4.0)'], {}), '(4.0)\n', (267, 272), True, 'import tensorflow as tf\n'), ((329, 350), 'tensorflow.constant', 'tf.constant', (['math.nan'], {}), '(math.nan)\n', (340, 350), True, 'import tensorflow as tf\n'), ((439, 467), 'tensorflow.constant', 'tf.constant', (['[4.0, 3.0, 2.0]'], {}), '([4.0, 3.0, 2.0])\n', (450, 467), True, 'import tensorflow as tf\n'), ((524, 557), 'tensorflow.constant', 'tf.constant', (['[math.nan, 3.0, 2.0]'], {}), '([math.nan, 3.0, 2.0])\n', (535, 557), True, 'import tensorflow as tf\n'), ((613, 656), 'tensorflow.constant', 'tf.constant', (['[math.nan, math.nan, math.nan]'], {}), '([math.nan, math.nan, math.nan])\n', (624, 656), True, 'import tensorflow as tf\n'), ((764, 787), 'tensorflow.constant', 'tf.constant', (['[3.0, 2.0]'], {}), '([3.0, 2.0])\n', (775, 787), True, 'import tensorflow as tf\n'), ((1022, 1045), 'tensorflow.constant', 'tf.constant', (['[3.0, 2.0]'], {}), '([3.0, 2.0])\n', (1033, 1045), True, 'import tensorflow as tf\n'), ((1283, 1316), 'tensorflow.constant', 'tf.constant', (['[math.nan, math.nan]'], {}), '([math.nan, math.nan])\n', (1294, 1316), True, 'import tensorflow as tf\n'), ((801, 817), 'tensorflow.constant', 'tf.constant', (['(3.2)'], {}), '(3.2)\n', (812, 817), True, 'import tensorflow as tf\n'), ((819, 847), 'tensorflow.constant', 'tf.constant', (['[2.1, 2.3, 4.3]'], {}), '([2.1, 2.3, 4.3])\n', (830, 847), True, 'import tensorflow as tf\n'), ((876, 899), 'tensorflow.constant', 'tf.constant', (['[5.2, 5.2]'], {}), '([5.2, 5.2])\n', (887, 899), True, 'import tensorflow as tf\n'), ((914, 937), 'tensorflow.constant', 'tf.constant', (['[3.4, 5.2]'], {}), '([3.4, 5.2])\n', (925, 937), True, 'import tensorflow as tf\n'), ((1059, 1075), 'tensorflow.constant', 'tf.constant', (['(3.2)'], {}), '(3.2)\n', (1070, 1075), True, 'import tensorflow as tf\n'), ((1077, 1110), 'tensorflow.constant', 'tf.constant', (['[2.1, 2.3, math.nan]'], {}), '([2.1, 2.3, math.nan])\n', (1088, 1110), True, 'import tensorflow as tf\n'), ((1139, 1162), 'tensorflow.constant', 'tf.constant', (['[5.2, 5.2]'], {}), '([5.2, 5.2])\n', (1150, 1162), True, 'import tensorflow as tf\n'), ((1177, 1200), 'tensorflow.constant', 'tf.constant', (['[3.4, 5.2]'], {}), '([3.4, 5.2])\n', (1188, 1200), True, 'import tensorflow as tf\n'), ((1330, 1351), 'tensorflow.constant', 'tf.constant', (['math.nan'], {}), '(math.nan)\n', (1341, 1351), True, 'import tensorflow as tf\n'), ((1353, 1396), 'tensorflow.constant', 'tf.constant', (['[math.nan, math.nan, math.nan]'], {}), '([math.nan, math.nan, math.nan])\n', (1364, 1396), True, 'import tensorflow as tf\n'), ((1425, 1458), 'tensorflow.constant', 'tf.constant', (['[math.nan, math.nan]'], {}), '([math.nan, math.nan])\n', (1436, 1458), True, 'import tensorflow as tf\n'), ((1473, 1506), 'tensorflow.constant', 'tf.constant', (['[math.nan, math.nan]'], {}), '([math.nan, math.nan])\n', (1484, 1506), True, 'import tensorflow as tf\n')]
|
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.views.generic import ListView, CreateView, DetailView, FormView
from django.forms import ValidationError
from .models import Candidate, VoteBallot, CANDIDATE_POSITIONS, VoteService, VoteStatus, POSITION_NUMS
from .forms import StartElectionForm, CreateCandidateApplicationForm, VoteForm
from texaslan.utils.utils import PledgeOrActiveRequiredMixin, HasNotAppliedRequiredMixin, HasNotVotedRequiredMixin
from texaslan.site_settings.models import SiteSettingService
class CandidateListView(PledgeOrActiveRequiredMixin, FormView):
template_name = 'voting/candidate_list.html'
form_class = StartElectionForm
def get_context_data(self, **kwargs):
context = super(CandidateListView, self).get_context_data(**kwargs)
context['voting_closed'] = SiteSettingService.is_voting_closed()
if context['voting_closed']:
return context
context['voting_open'] = SiteSettingService.is_voting_currently()
context['has_not_voted'] = True
try:
vote_status = VoteStatus.objects.get(voter__username=self.request.user.username)
context['has_not_voted'] = not vote_status.has_voted
except VoteStatus.DoesNotExist:
pass
positions_list = []
for (position_code, position_name) in CANDIDATE_POSITIONS:
has_winner = False
has_applied = False
is_applying_open = SiteSettingService.is_voting_applications_open()
try:
list = Candidate.objects.filter(position=position_code)
for cand in list:
if cand.user.pk == self.request.user.pk:
has_applied = True
if cand.has_won:
has_winner = True
except Candidate.DoesNotExist:
list = []
positions_list.append((position_name, position_code, has_winner, has_applied, is_applying_open, list,))
context['positions'] = positions_list
return context
def get_success_url(self):
messages.add_message(self.request, messages.SUCCESS, 'Election was successful!')
return reverse('voting:list')
class CandidateApplyView(HasNotAppliedRequiredMixin, CreateView):
template_name = 'voting/candidate_apply.html'
model = Candidate
form_class = CreateCandidateApplicationForm
def get_context_data(self, **kwargs):
context = super(CandidateApplyView, self).get_context_data(**kwargs)
context['position_id'] = self.kwargs.get("position")
context['position'] = VoteService.get_position_str(context['position_id'])
return context
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
candidate = form.instance
candidate.position = form.data['position_id']
candidate.user = self.request.user
return super(CandidateApplyView, self).form_valid(form)
def get_success_url(self):
messages.add_message(self.request, messages.SUCCESS, 'Application was submitted!')
return reverse('voting:list')
class CandidateDetailsView(PledgeOrActiveRequiredMixin, DetailView):
template_name = 'voting/candidate_detail.html'
model = Candidate
def get_context_data(self, **kwargs):
context = super(CandidateDetailsView, self).get_context_data(**kwargs)
context['position_id'] = self.kwargs.get("position")
context['position'] = VoteService.get_position_str(context['position_id'])
return context
def get_object(self, queryset=None):
return get_object_or_404(Candidate,
position=self.kwargs.get('position'), user__username=self.kwargs.get('username'))
class VoteView(HasNotVotedRequiredMixin, FormView):
template_name = 'voting/vote.html'
form_class = VoteForm
def form_invalid(self, form):
messages.add_message(self.request, messages.ERROR, form.errors.as_data()['__all__'][0].message)
return super(VoteView, self).form_invalid(form)
def get_success_url(self):
messages.add_message(self.request, messages.SUCCESS, 'Successfully voted!')
return reverse('voting:list')
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.submit_ballot(self.request.user)
return super(VoteView, self).form_valid(form)
def get_form_kwargs(self):
kwargs = super(VoteView, self).get_form_kwargs()
extra = []
for (position_id, position) in CANDIDATE_POSITIONS:
# If we have all our winners, no need to fill this out.
if len(set(Candidate.objects.filter(position=position_id, has_won=True))) == POSITION_NUMS[position_id]:
continue
extra.append((position_id, position, set(Candidate.objects.filter(position=position_id)),))
kwargs['extra'] = extra
kwargs['user'] = self.request.user
return kwargs
|
[
"texaslan.site_settings.models.SiteSettingService.is_voting_applications_open",
"texaslan.site_settings.models.SiteSettingService.is_voting_closed",
"django.core.urlresolvers.reverse",
"texaslan.site_settings.models.SiteSettingService.is_voting_currently",
"django.contrib.messages.add_message"
] |
[((954, 991), 'texaslan.site_settings.models.SiteSettingService.is_voting_closed', 'SiteSettingService.is_voting_closed', ([], {}), '()\n', (989, 991), False, 'from texaslan.site_settings.models import SiteSettingService\n'), ((1090, 1130), 'texaslan.site_settings.models.SiteSettingService.is_voting_currently', 'SiteSettingService.is_voting_currently', ([], {}), '()\n', (1128, 1130), False, 'from texaslan.site_settings.models import SiteSettingService\n'), ((2239, 2324), 'django.contrib.messages.add_message', 'messages.add_message', (['self.request', 'messages.SUCCESS', '"""Election was successful!"""'], {}), "(self.request, messages.SUCCESS, 'Election was successful!'\n )\n", (2259, 2324), False, 'from django.contrib import messages\n'), ((2335, 2357), 'django.core.urlresolvers.reverse', 'reverse', (['"""voting:list"""'], {}), "('voting:list')\n", (2342, 2357), False, 'from django.core.urlresolvers import reverse\n'), ((3215, 3301), 'django.contrib.messages.add_message', 'messages.add_message', (['self.request', 'messages.SUCCESS', '"""Application was submitted!"""'], {}), "(self.request, messages.SUCCESS,\n 'Application was submitted!')\n", (3235, 3301), False, 'from django.contrib import messages\n'), ((3313, 3335), 'django.core.urlresolvers.reverse', 'reverse', (['"""voting:list"""'], {}), "('voting:list')\n", (3320, 3335), False, 'from django.core.urlresolvers import reverse\n'), ((4324, 4399), 'django.contrib.messages.add_message', 'messages.add_message', (['self.request', 'messages.SUCCESS', '"""Successfully voted!"""'], {}), "(self.request, messages.SUCCESS, 'Successfully voted!')\n", (4344, 4399), False, 'from django.contrib import messages\n'), ((4415, 4437), 'django.core.urlresolvers.reverse', 'reverse', (['"""voting:list"""'], {}), "('voting:list')\n", (4422, 4437), False, 'from django.core.urlresolvers import reverse\n'), ((1590, 1638), 'texaslan.site_settings.models.SiteSettingService.is_voting_applications_open', 'SiteSettingService.is_voting_applications_open', ([], {}), '()\n', (1636, 1638), False, 'from texaslan.site_settings.models import SiteSettingService\n')]
|
from multiprocessing import Process
import os
import time
def run_proc(process_name):
print('running subprocess %s(%s)......' % (process_name, os.getpid()))
count = 100
for i in range(count):
print("*** {} ***".format(i))
time.sleep(1)
os.mkdir(str(count))
print('sub process end')
if __name__ == '__main__':
print('Process %s' % os.getpid())
p = Process(target=run_proc, args=('test',))
print('sub process beginning')
p.start()
# p.join()
# print('sub process end')
print('Process end')
|
[
"multiprocessing.Process",
"os.getpid",
"time.sleep"
] |
[((394, 434), 'multiprocessing.Process', 'Process', ([], {'target': 'run_proc', 'args': "('test',)"}), "(target=run_proc, args=('test',))\n", (401, 434), False, 'from multiprocessing import Process\n'), ((251, 264), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (261, 264), False, 'import time\n'), ((373, 384), 'os.getpid', 'os.getpid', ([], {}), '()\n', (382, 384), False, 'import os\n'), ((148, 159), 'os.getpid', 'os.getpid', ([], {}), '()\n', (157, 159), False, 'import os\n')]
|
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
__all__ = ['ModelWithDateRange', 'ModelWithDateTimeRange',]
class ModelWithDateRange(models.Model):
# Attributes
start_date = models.DateField()
end_date = models.DateField()
# Methods
def clean(self):
if self.start_date and self.end_date\
and self.start_date > self.end_date:
raise ValidationError(_('End date must be greater or ' \
'equal to start date.'))
# Meta-data
class Meta:
abstract = True
class ModelWithDateTimeRange(models.Model):
# Attributes
start_datetime = models.DateTimeField()
end_datetime = models.DateTimeField()
# Methods
def clean(self):
if self.start_datetime and self.end_datetime\
and self.start_datetime > self.end_datetime:
raise ValidationError(_('End datetime must be greater or equal' \
' to start datetime.'))
# Meta-data
class Meta:
abstract = True
|
[
"django.db.models.DateField",
"django.db.models.DateTimeField",
"django.utils.translation.ugettext"
] |
[((269, 287), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (285, 287), False, 'from django.db import models\n'), ((303, 321), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (319, 321), False, 'from django.db import models\n'), ((720, 742), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (740, 742), False, 'from django.db import models\n'), ((762, 784), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (782, 784), False, 'from django.db import models\n'), ((483, 536), 'django.utils.translation.ugettext', '_', (['"""End date must be greater or equal to start date."""'], {}), "('End date must be greater or equal to start date.')\n", (484, 536), True, 'from django.utils.translation import ugettext as _\n'), ((962, 1023), 'django.utils.translation.ugettext', '_', (['"""End datetime must be greater or equal to start datetime."""'], {}), "('End datetime must be greater or equal to start datetime.')\n", (963, 1023), True, 'from django.utils.translation import ugettext as _\n')]
|
from django.urls.conf import path, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(
r"transactions",
views.TransactionViewSet,
)
user_transaction_urls = [
path(
"transactions/",
views.UserTransactionListCreateView.as_view(),
name="user-transactions",
),
path(
"transactions/<int:id>/",
views.UserTransactionView.as_view(),
name="user-transactions-detail",
),
]
urlpatterns = [
path(
"user/",
include(user_transaction_urls),
),
path(
"",
include(router.urls),
),
path(
"bank/transaction/",
views.ForeignTransactionView.as_view(),
name="bank-transaction",
),
]
|
[
"rest_framework.routers.DefaultRouter",
"django.urls.conf.include"
] |
[((109, 132), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (130, 132), False, 'from rest_framework import routers\n'), ((552, 582), 'django.urls.conf.include', 'include', (['user_transaction_urls'], {}), '(user_transaction_urls)\n', (559, 582), False, 'from django.urls.conf import path, include\n'), ((621, 641), 'django.urls.conf.include', 'include', (['router.urls'], {}), '(router.urls)\n', (628, 641), False, 'from django.urls.conf import path, include\n')]
|
#!/usr/bin/env python3
# cardinal_pythonlib/openxml/pause_process_by_disk_space.py
"""
===============================================================================
Original code copyright (C) 2009-2021 <NAME> (<EMAIL>).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Pauses and resumes a process by disk space; LINUX ONLY.**
"""
from argparse import ArgumentParser
import logging
import shutil
import subprocess
import sys
from time import sleep
from typing import NoReturn
from cardinal_pythonlib.logs import (
BraceStyleAdapter,
main_only_quicksetup_rootlogger,
)
from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt
log = BraceStyleAdapter(logging.getLogger(__name__))
def is_running(process_id: int) -> bool:
"""
Uses the Unix ``ps`` program to see if a process is running.
"""
pstr = str(process_id)
encoding = sys.getdefaultencoding()
s = subprocess.Popen(["ps", "-p", pstr], stdout=subprocess.PIPE)
for line in s.stdout:
strline = line.decode(encoding)
if pstr in strline:
return True
return False
def main() -> NoReturn:
"""
Command-line handler for the ``pause_process_by_disk_space`` tool.
Use the ``--help`` option for help.
"""
parser = ArgumentParser(
description="Pauses and resumes a process by disk space; LINUX ONLY."
)
parser.add_argument(
"process_id", type=int,
help="Process ID."
)
parser.add_argument(
"--path", required=True,
help="Path to check free space for (e.g. '/')"
)
parser.add_argument(
"--pause_when_free_below", type=str, required=True,
help="Pause process when free disk space below this value (in bytes "
"or as e.g. '50G')"
)
parser.add_argument(
"--resume_when_free_above", type=str, required=True,
help="Resume process when free disk space above this value (in bytes "
"or as e.g. '70G')"
)
parser.add_argument(
"--check_every", type=int, required=True,
help="Check every n seconds (where this is n)"
)
parser.add_argument(
"--verbose", action="store_true",
help="Verbose output"
)
args = parser.parse_args()
main_only_quicksetup_rootlogger(
level=logging.DEBUG if args.verbose else logging.INFO)
minimum = human2bytes(args.pause_when_free_below)
maximum = human2bytes(args.resume_when_free_above)
path = args.path
process_id = args.process_id
period = args.check_every
pause_args = ["kill", "-STOP", str(process_id)]
resume_args = ["kill", "-CONT", str(process_id)]
assert minimum < maximum, "Minimum must be less than maximum"
log.info(
f"Starting: controlling process {process_id}; "
f"checking disk space every {period} s; "
f"will pause when free space on {path} "
f"is less than {sizeof_fmt(minimum)} and "
f"resume when free space is at least {sizeof_fmt(maximum)}; "
f"pause command will be {pause_args}; "
f"resume command will be {resume_args}."
)
log.debug("Presuming that the process is RUNNING to begin with.")
paused = False
while True:
if not is_running(process_id):
log.info("Process {} is no longer running", process_id)
sys.exit(0)
space = shutil.disk_usage(path).free
log.debug("Disk space on {} is {}", path, sizeof_fmt(space))
if space < minimum and not paused:
log.info("Disk space down to {}: pausing process {}",
sizeof_fmt(space), process_id)
subprocess.check_call(pause_args)
paused = True
elif space >= maximum and paused:
log.info("Disk space up to {}: resuming process {}",
sizeof_fmt(space), process_id)
subprocess.check_call(resume_args)
paused = False
log.debug("Sleeping for {} seconds...", period)
sleep(period)
if __name__ == '__main__':
main()
|
[
"subprocess.Popen",
"subprocess.check_call",
"argparse.ArgumentParser",
"cardinal_pythonlib.sizeformatter.human2bytes",
"sys.getdefaultencoding",
"cardinal_pythonlib.sizeformatter.sizeof_fmt",
"time.sleep",
"shutil.disk_usage",
"cardinal_pythonlib.logs.main_only_quicksetup_rootlogger",
"sys.exit",
"logging.getLogger"
] |
[((1325, 1352), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1342, 1352), False, 'import logging\n'), ((1520, 1544), 'sys.getdefaultencoding', 'sys.getdefaultencoding', ([], {}), '()\n', (1542, 1544), False, 'import sys\n'), ((1553, 1613), 'subprocess.Popen', 'subprocess.Popen', (["['ps', '-p', pstr]"], {'stdout': 'subprocess.PIPE'}), "(['ps', '-p', pstr], stdout=subprocess.PIPE)\n", (1569, 1613), False, 'import subprocess\n'), ((1915, 2005), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Pauses and resumes a process by disk space; LINUX ONLY."""'}), "(description=\n 'Pauses and resumes a process by disk space; LINUX ONLY.')\n", (1929, 2005), False, 'from argparse import ArgumentParser\n'), ((2904, 2994), 'cardinal_pythonlib.logs.main_only_quicksetup_rootlogger', 'main_only_quicksetup_rootlogger', ([], {'level': '(logging.DEBUG if args.verbose else logging.INFO)'}), '(level=logging.DEBUG if args.verbose else\n logging.INFO)\n', (2935, 2994), False, 'from cardinal_pythonlib.logs import BraceStyleAdapter, main_only_quicksetup_rootlogger\n'), ((3015, 3054), 'cardinal_pythonlib.sizeformatter.human2bytes', 'human2bytes', (['args.pause_when_free_below'], {}), '(args.pause_when_free_below)\n', (3026, 3054), False, 'from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt\n'), ((3069, 3109), 'cardinal_pythonlib.sizeformatter.human2bytes', 'human2bytes', (['args.resume_when_free_above'], {}), '(args.resume_when_free_above)\n', (3080, 3109), False, 'from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt\n'), ((4641, 4654), 'time.sleep', 'sleep', (['period'], {}), '(period)\n', (4646, 4654), False, 'from time import sleep\n'), ((3985, 3996), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3993, 3996), False, 'import sys\n'), ((4013, 4036), 'shutil.disk_usage', 'shutil.disk_usage', (['path'], {}), '(path)\n', (4030, 4036), False, 'import shutil\n'), ((4092, 4109), 'cardinal_pythonlib.sizeformatter.sizeof_fmt', 'sizeof_fmt', (['space'], {}), '(space)\n', (4102, 4109), False, 'from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt\n'), ((4284, 4317), 'subprocess.check_call', 'subprocess.check_call', (['pause_args'], {}), '(pause_args)\n', (4305, 4317), False, 'import subprocess\n'), ((3560, 3579), 'cardinal_pythonlib.sizeformatter.sizeof_fmt', 'sizeof_fmt', (['minimum'], {}), '(minimum)\n', (3570, 3579), False, 'from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt\n'), ((3633, 3652), 'cardinal_pythonlib.sizeformatter.sizeof_fmt', 'sizeof_fmt', (['maximum'], {}), '(maximum)\n', (3643, 3652), False, 'from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt\n'), ((4241, 4258), 'cardinal_pythonlib.sizeformatter.sizeof_fmt', 'sizeof_fmt', (['space'], {}), '(space)\n', (4251, 4258), False, 'from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt\n'), ((4515, 4549), 'subprocess.check_call', 'subprocess.check_call', (['resume_args'], {}), '(resume_args)\n', (4536, 4549), False, 'import subprocess\n'), ((4472, 4489), 'cardinal_pythonlib.sizeformatter.sizeof_fmt', 'sizeof_fmt', (['space'], {}), '(space)\n', (4482, 4489), False, 'from cardinal_pythonlib.sizeformatter import human2bytes, sizeof_fmt\n')]
|
from turtle import forward
import torch
import torch.nn as nn
class Normalize(nn.Module):
def __init__(self, mean, std, *args, **kwargs):
super().__init__()
self.register_buffer('mean', torch.tensor(mean)[None, :, None, None])
self.register_buffer('std', torch.tensor(std)[None, :, None, None])
def forward(self, x):
return (x - self.mean) / self.std
# class ColorDetectorWrapper(nn.Module):
# def __init__(self, model):
# super().__init__()
# self.model = model
# self.color_dict = {
# 'circle-750.0': ['white', 'blue', 'red'], # (1) white+red, (2) blue+white
# 'triangle-900.0': ['white', 'yellow'], # (1) white, (2) yellow
# 'triangle_inverted-1220.0': [], # (1) white+red
# 'diamond-600.0': [], # (1) white+yellow
# 'diamond-915.0': [], # (1) yellow
# 'square-600.0': [], # (1) blue
# 'rect-458.0-610.0': ['white', 'other'], # (1) chevron (also multi-color), (2) white
# 'rect-762.0-915.0': [], # (1) white
# 'rect-915.0-1220.0': [], # (1) white
# 'pentagon-915.0': [], # (1) yellow
# 'octagon-915.0': [], # (1) red
# 'other': [],
# }
# self.class_list = list(self.color_dict.keys())
# self.class_idx = {
# 'circle-750.0': 0, # (1) white+red, (2) blue+white
# 'triangle-900.0': 3, # (1) white, (2) yellow
# 'triangle_inverted-1220.0': 5, # (1) white+red
# 'diamond-600.0': 6, # (1) white+yellow
# 'diamond-915.0': 7, # (1) yellow
# 'square-600.0': 8, # (1) blue
# 'rect-458.0-610.0': 9, # (1) chevron (also multi-color), (2) white
# 'rect-762.0-915.0': 11, # (1) white
# 'rect-915.0-1220.0': 12, # (1) white
# 'pentagon-915.0': 13, # (1) yellow
# 'octagon-915.0': 14, # (1) red
# 'other': 15,
# }
# # Define HSV range of the desired colors (H, S, L)
# WHITE = [[0, 0, 95], [360, 360, 100]]
# def forward(self, x):
# logits = self.model(x)
# y = logits.argmax(-1)
# # Change image to HSL color space
# # Count pixels that satisfy the color range
|
[
"torch.tensor"
] |
[((208, 226), 'torch.tensor', 'torch.tensor', (['mean'], {}), '(mean)\n', (220, 226), False, 'import torch\n'), ((285, 302), 'torch.tensor', 'torch.tensor', (['std'], {}), '(std)\n', (297, 302), False, 'import torch\n')]
|
#!/usr/bin/env python
# coding: utf-8
#########################################################################
#########################################################################
"""
File Name: level2hbase.py
Author: <NAME>
E-mail: <EMAIL>
Created on: Sat Jun 7 13:36:03 2014 CST
"""
DESCRIPTION = """
This program can transfer the data from LevelDB to HBase.
"""
import os
import sys
import argparse
import leveldb
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
def perr(msg):
""" Print error message.
"""
sys.stderr.write("%s" % msg)
sys.stderr.flush()
def pinfo(msg):
""" Print information message.
"""
sys.stdout.write("%s" % msg)
sys.stdout.flush()
def runcmd(cmd):
""" Run command.
"""
perr("%s\n" % cmd)
os.system(cmd)
def getargs():
""" Parse program arguments.
"""
parser = argparse.ArgumentParser(description=DESCRIPTION,
formatter_class=
argparse.RawTextHelpFormatter)
parser.add_argument('leveldb', type=str,
help='path to the LevelDB database')
parser.add_argument('table', type=str,
help='target table name in hbase')
parser.add_argument('host', type=str, nargs='?', default="127.0.0.1",
help='IP address / Host name of hbase server')
parser.add_argument('port', type=int, nargs='?', default=9090,
help='port number of hbase server')
parser.add_argument('pyhbase', type=str, nargs='?', default="gen-py",
help='python interface of hbase')
return parser.parse_args()
def main(args):
""" Main entry.
"""
transport = TSocket.TSocket(args.host, args.port)
transport = TTransport.TBufferedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = Hbase.Client(protocol)
transport.open()
contents = ColumnDescriptor(name='cf:', maxVersions=1)
ldb = leveldb.LevelDB(args.leveldb)
iter = ldb.RangeIter()
try:
client.createTable(args.table, [contents])
except AlreadyExists as err:
perr("ERROR: %s\n" % err.message)
sys.exit(1)
cnt = 0
pinfo("Processed image:\n")
pinfo("\r\t%d" % cnt)
while True:
try:
item = iter.next()
except StopIteration:
break
cnt += 1
if cnt % 100 == 0:
pinfo("\r\t%d" % cnt)
client.mutateRow(args.table, item[0],
[Mutation(column="cf:data", value=item[1])], None)
pinfo("\r\t%d\tDone!\n" % cnt)
if __name__ == '__main__':
args = getargs()
sys.path.append(args.pyhbase)
from hbase import Hbase
from hbase.ttypes import *
main(args)
|
[
"sys.stdout.write",
"thrift.transport.TSocket.TSocket",
"hbase.Hbase.Client",
"leveldb.LevelDB",
"sys.path.append",
"argparse.ArgumentParser",
"thrift.protocol.TBinaryProtocol.TBinaryProtocol",
"os.system",
"sys.stdout.flush",
"sys.stderr.write",
"sys.stderr.flush",
"sys.exit",
"thrift.transport.TTransport.TBufferedTransport"
] |
[((652, 680), 'sys.stderr.write', 'sys.stderr.write', (["('%s' % msg)"], {}), "('%s' % msg)\n", (668, 680), False, 'import sys\n'), ((685, 703), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (701, 703), False, 'import sys\n'), ((770, 798), 'sys.stdout.write', 'sys.stdout.write', (["('%s' % msg)"], {}), "('%s' % msg)\n", (786, 798), False, 'import sys\n'), ((803, 821), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (819, 821), False, 'import sys\n'), ((898, 912), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (907, 912), False, 'import os\n'), ((985, 1085), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'DESCRIPTION', 'formatter_class': 'argparse.RawTextHelpFormatter'}), '(description=DESCRIPTION, formatter_class=argparse.\n RawTextHelpFormatter)\n', (1008, 1085), False, 'import argparse\n'), ((1864, 1901), 'thrift.transport.TSocket.TSocket', 'TSocket.TSocket', (['args.host', 'args.port'], {}), '(args.host, args.port)\n', (1879, 1901), False, 'from thrift.transport import TSocket\n'), ((1918, 1958), 'thrift.transport.TTransport.TBufferedTransport', 'TTransport.TBufferedTransport', (['transport'], {}), '(transport)\n', (1947, 1958), False, 'from thrift.transport import TTransport\n'), ((1974, 2016), 'thrift.protocol.TBinaryProtocol.TBinaryProtocol', 'TBinaryProtocol.TBinaryProtocol', (['transport'], {}), '(transport)\n', (2005, 2016), False, 'from thrift.protocol import TBinaryProtocol\n'), ((2031, 2053), 'hbase.Hbase.Client', 'Hbase.Client', (['protocol'], {}), '(protocol)\n', (2043, 2053), False, 'from hbase import Hbase\n'), ((2145, 2174), 'leveldb.LevelDB', 'leveldb.LevelDB', (['args.leveldb'], {}), '(args.leveldb)\n', (2160, 2174), False, 'import leveldb\n'), ((2826, 2855), 'sys.path.append', 'sys.path.append', (['args.pyhbase'], {}), '(args.pyhbase)\n', (2841, 2855), False, 'import sys\n'), ((2345, 2356), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2353, 2356), False, 'import sys\n')]
|
import numpy as np
from tensorflow import keras
import pandas as pd
import os
class DcmDataGenerator(keras.utils.Sequence):
"""Generates data for Keras
Sequence based data generator. Suitable for building data generator for training and prediction.
"""
def __init__(self, images_path, dim=(15, 512, 512), window=None):
"""Initialization
:param images_path: path to images location
:param dim: tuple indicating image dimension in format CHW
"""
self.list_IDs = os.listdir(images_path)
self.images_path = images_path
self.dim = dim
self.indexes = np.arange(len(self.list_IDs))
self.on_epoch_end()
self.window = window
def __len__(self):
"""Denotes the number of batches per epoch
:return: number of batches per epoch
"""
return len(self.list_IDs)
def on_epoch_end(self):
"""Updates indexes after each epoch
"""
self.indexes = np.arange(len(self.list_IDs))
def flow(self, seed):
np.random.seed(seed)
i = int(np.random.randint(0, self.__len__(), size=(1,)))
while True:
yield self.__getitem__(i % self.__len__())
i += 1
def __getitem__(self, index):
"""Generate one patient's data
:param index: index of the patient
:return: X_dcm
"""
# Find list of IDs
patient_ID = self.list_IDs[index]
# Generate data
X_dcm = self._generate_X(patient_ID)
return X_dcm, np.array([1, ])
def _generate_X(self, patient_ID):
"""Generates data containing patient's images
:param patient_ID: ID of the patient
:return: patient's images
"""
# Initialization
X_dcm = np.empty((1, *self.dim), dtype=np.float32)
patient_path = os.path.join(self.images_path, patient_ID)
dcm_names = np.array([dcm_name[:-4] for dcm_name in os.listdir(patient_path)], dtype=int)
dcm_names = sorted(list(dcm_names))
patient_dcm_paths = [f'{self.images_path}/{patient_ID}/{dcm_num}.npy' for dcm_num in dcm_names]
# Generate data
for j, dcm_path in enumerate(patient_dcm_paths):
X_dcm[0, j] = self._load_dcm(dcm_path)
X_dcm = np.moveaxis(X_dcm, 1, -1)
return X_dcm
def _load_dcm(self, image_path):
"""Load grayscale image
:param image_path: path to image to load
:return: loaded image
"""
img = np.load(image_path, allow_pickle=True)
if self.window:
lb = self.window[0]
ub = self.window[1]
img[img < lb] = lb
img[img > ub] = ub
img = (img - lb) / (ub - lb)
return img
class CsvDataGenerator(keras.utils.Sequence):
"""Generates data for Keras
Sequence based data generator. Suitable for building data generator for training and prediction.
"""
def __init__(self, csv_path, to_fit=True, to_normalize=True):
"""Initialization
:param to_normalize: True to normalize, False otherwise
:param csv_path: path to csv file location
:param to_fit: True to return X and y, False to return X only
"""
self.to_normalize = to_normalize
self.list_IDs = os.listdir(csv_path[:-4])
self.csv_path = csv_path
self.to_fit = to_fit
self.indexes = np.arange(len(self.list_IDs))
self.on_epoch_end()
def __len__(self):
"""Denotes the number of batches per epoch
:return: number of batches per epoch
"""
return len(self.list_IDs)
def on_epoch_end(self):
"""Updates indexes after each epoch
"""
self.indexes = np.arange(len(self.list_IDs))
def flow(self, seed):
np.random.seed(seed)
i = int(np.random.randint(0, self.__len__(), size=(1,)))
while True:
yield self.__getitem__(i % self.__len__())
i += 1
def __getitem__(self, index):
"""Generate one patient's data
:param index: index of the patient
:return: X
"""
# Find list of IDs
patient_ID = self.list_IDs[index]
# Generate data
X = self._generate_X(patient_ID)
if self.to_fit:
y = self._generate_y(patient_ID)
return X, y
else:
return X
def _generate_X(self, patient_ID):
"""Generates data containing patient's first csv record
:param patient_ID: ID of the patient
:return: patient's first csv record
"""
X = np.empty(shape=(1, 7), dtype=np.float32)
# Generate data
X[0] = self._load_X(self.csv_path, patient_ID)
return X
def _load_X(self, csv_path, patient_ID):
"""Load csv with patient's weeks and corresponding FVC
:param csv_path: path to csv file with weeks and FVC file to load
:return: loaded csv file with weeks and FVC file to load
"""
patients_df = pd.read_csv(csv_path)
patient = patients_df[patients_df['Patient'] == patient_ID]
patient.reset_index(inplace=True)
X_columns = ['Weeks', 'FVC', 'Age', 'Ex-smoker', 'Never smoked', 'Currently smokes', 'Sex_n']
X_patient = patient.loc[0, X_columns]
if self.to_normalize:
X_patient['Age'] = (X_patient['Age'] - 67.18850871530019) / 7.055116199848975
X_patient['FVC'] = (X_patient['FVC'] - 2690.479018721756) / 832.5021066817238
X_patient['Weeks'] = (X_patient['Weeks'] - 31.861846352485475) / 23.265510111399017
X_patient = X_patient.to_numpy()
return X_patient
def _generate_y(self, patient_ID):
"""Generates data containing patient's [1:] csv records
:param patient_ID: ID of the patient
:return: patient's [1:] csv records
"""
y = np.empty(shape=(1, 146, 2), dtype=np.float32)
# Generate data
y[0] = self._load_y(self.csv_path, patient_ID)
return y
def _load_y(self, csv_path, patient_ID):
"""Load csv with patient's weeks and corresponding FVC
:param csv_path: path to csv file with weeks and FVC file to load
:return: loaded csv file with weeks and FVC file to load
"""
patients_df = pd.read_csv(csv_path)
patient = patients_df[patients_df['Patient'] == patient_ID]
patient.reset_index(inplace=True)
weeks_FVC = patient.loc[1:, ['Weeks', 'FVC']]
weeks_FVC = weeks_FVC[~weeks_FVC.duplicated(['Weeks'])]
weeks_FVC = self.pad_y(weeks_FVC)
weeks_FVC = weeks_FVC.to_numpy()
return weeks_FVC
def pad_y(self, csv_df):
csv_df['isRecord'] = 1
for i in range(-12, 134):
if not np.any(csv_df['Weeks'] == i):
csv_df = csv_df.append({'Weeks': i, 'FVC': 0, 'isRecord': 0}, ignore_index=True)
csv_df.sort_values('Weeks', inplace=True)
csv_df.drop(columns='Weeks', inplace=True)
if self.to_normalize:
csv_df.loc[:, 'FVC'] = (csv_df.loc[:, 'FVC'] - 2690.479018721756) / 832.5021066817238
csv_df.reset_index(drop=True, inplace=True)
return csv_df
# ==================================#
# Creating datagen
def _merge_datagens(csv_gen, dcm_gen, shuffle=True, is_patient_record=True):
seed = 0
while True:
csv_flow = csv_gen.flow(seed)
dcm_flow = dcm_gen.flow(seed)
patient_num = 1
while True:
csv_data = next(csv_flow)
dcm_data = next(dcm_flow)
csv_X = csv_data[0]
dcm_X_img = dcm_data[0]
csv_y = csv_data[1][:, :, 0]
csv_is_patient_record = csv_data[1][:, :, 1]
if is_patient_record:
yield [csv_X, dcm_X_img], csv_y, csv_is_patient_record
else:
yield [csv_X, dcm_X_img], csv_y
patient_num += 1
if patient_num > 175:
break
if shuffle:
seed += 1
def create_datagen(shuffle=True, window=None, is_patient_record=True):
"""Returns generator that yields [csv_X, dcm_X_img], csv_y, csv_is_patient_record"""
csv_datagen = CsvDataGenerator('../../data/processed/train.csv', to_normalize=True)
dcm_datagen = DcmDataGenerator('../../data/processed/train', window=window)
merged_gen = _merge_datagens(csv_datagen, dcm_datagen, shuffle=shuffle, is_patient_record=is_patient_record)
return merged_gen
# def gen_train_test_split(datagen):
# datagen.
# gen = create_datagen(shuffle=True)
# x1, y1, is_p_r1 = next(gen)
|
[
"numpy.moveaxis",
"numpy.load",
"numpy.random.seed",
"pandas.read_csv",
"numpy.empty",
"numpy.any",
"numpy.array",
"os.path.join",
"os.listdir"
] |
[((519, 542), 'os.listdir', 'os.listdir', (['images_path'], {}), '(images_path)\n', (529, 542), False, 'import os\n'), ((1054, 1074), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1068, 1074), True, 'import numpy as np\n'), ((1791, 1833), 'numpy.empty', 'np.empty', (['(1, *self.dim)'], {'dtype': 'np.float32'}), '((1, *self.dim), dtype=np.float32)\n', (1799, 1833), True, 'import numpy as np\n'), ((1858, 1900), 'os.path.join', 'os.path.join', (['self.images_path', 'patient_ID'], {}), '(self.images_path, patient_ID)\n', (1870, 1900), False, 'import os\n'), ((2298, 2323), 'numpy.moveaxis', 'np.moveaxis', (['X_dcm', '(1)', '(-1)'], {}), '(X_dcm, 1, -1)\n', (2309, 2323), True, 'import numpy as np\n'), ((2521, 2559), 'numpy.load', 'np.load', (['image_path'], {'allow_pickle': '(True)'}), '(image_path, allow_pickle=True)\n', (2528, 2559), True, 'import numpy as np\n'), ((3316, 3341), 'os.listdir', 'os.listdir', (['csv_path[:-4]'], {}), '(csv_path[:-4])\n', (3326, 3341), False, 'import os\n'), ((3824, 3844), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (3838, 3844), True, 'import numpy as np\n'), ((4634, 4674), 'numpy.empty', 'np.empty', ([], {'shape': '(1, 7)', 'dtype': 'np.float32'}), '(shape=(1, 7), dtype=np.float32)\n', (4642, 4674), True, 'import numpy as np\n'), ((5054, 5075), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {}), '(csv_path)\n', (5065, 5075), True, 'import pandas as pd\n'), ((5926, 5971), 'numpy.empty', 'np.empty', ([], {'shape': '(1, 146, 2)', 'dtype': 'np.float32'}), '(shape=(1, 146, 2), dtype=np.float32)\n', (5934, 5971), True, 'import numpy as np\n'), ((6351, 6372), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {}), '(csv_path)\n', (6362, 6372), True, 'import pandas as pd\n'), ((1549, 1562), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (1557, 1562), True, 'import numpy as np\n'), ((6826, 6854), 'numpy.any', 'np.any', (["(csv_df['Weeks'] == i)"], {}), "(csv_df['Weeks'] == i)\n", (6832, 6854), True, 'import numpy as np\n'), ((1961, 1985), 'os.listdir', 'os.listdir', (['patient_path'], {}), '(patient_path)\n', (1971, 1985), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import time
from common.decorators import detail_route, list_route
from common.log import logger
from common.transaction import auto_meta_sync
from common.views import APIViewSet
from datahub.common.const import DEFAULT
from datahub.databus.exceptions import (
MigrationCannotOperatError,
MigrationNotFoundError,
)
from datahub.databus.task.task_utils import check_task_auth
from django.forms import model_to_dict
from rest_framework.response import Response
from datahub.databus import exceptions, migration, models, rt, serializers, settings
class MigrationViewset(APIViewSet):
"""
对于资源 REST 操作逻辑统一放置在 APIViewSet 中提供接口
"""
serializer_class = serializers.MigrateCreateSerializer
# 对于URL中实例ID变量名进行重命名,默认为 pk
lookup_field = "id"
def create(self, request):
"""
@apiGroup migration
@api {post} /databus/migrations/ 创建迁移任务
@apiDescription 创建迁移任务
@apiParam {string} result_table_id result_table_id
@apiParam {string} source 源存储
@apiParam {string} dest 目标存储
@apiParam {string} start 起始时间
@apiParam {string} end 结束时间
@apiParam {int} parallelism 【选填】处理并发数,默认3
@apiParam {boolean} overwrite 是否覆盖已有数据
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": [{}],
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.MigrateCreateSerializer)
rt_id = args["result_table_id"]
check_task_auth(rt_id)
rt_info = rt.get_databus_rt_info(rt_id)
if not rt_info:
raise exceptions.NotFoundRtError()
# 源存储配置检查
if args["source"] not in rt_info["storages.list"]:
raise exceptions.TaskStorageNotFound(
message_kv={
"result_table_id": args["result_table_id"],
"storage": args["source"],
}
)
# 目的存储配置检查
if args["dest"] not in rt_info["storages.list"]:
raise exceptions.TaskStorageNotFound(
message_kv={
"result_table_id": args["result_table_id"],
"storage": args["dest"],
}
)
task_label = migration.create_task(rt_info, args)
objs = models.DatabusMigrateTask.objects.filter(task_label=task_label).values()
return Response(objs)
def partial_update(self, request, id):
"""
@apiGroup migration
@api {patch} /databus/migrations/:task_id/ 更新迁移任务
@apiDescription 更新迁移任务
@apiParam {string} status 状态
@apiParam {int} parallelism 处理并发数
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": {},
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.MigrateUpdateSerializer)
obj = models.DatabusMigrateTask.objects.get(id=id)
with auto_meta_sync(using=DEFAULT):
if args["status"] != "":
obj.status = args["status"]
if args["parallelism"] > 0:
obj.parallelism = args["parallelism"]
obj.save()
return Response(model_to_dict(models.DatabusMigrateTask.objects.get(id=id)))
def list(self, request):
"""
@apiGroup migration
@api {patch} /databus/migrations/ 查询未完成任务
@apiDescription 查询未完成任务
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": [{}],
"message": "ok",
"code": "1500200",
}
"""
tasks = models.DatabusMigrateTask.objects.exclude(status__in=["finish"]).values(
"id",
"task_label",
"task_type",
"result_table_id",
"parallelism",
"dest",
"dest_config",
"overwrite",
"start",
"end",
"status",
)
for task_obj in tasks:
if task_obj["task_type"] != "overall":
task_obj["source_config"] = ""
task_obj["dest_config"] = ""
return Response(tasks)
def retrieve(self, request, id):
"""
@apiGroup migration
@api {patch} /databus/migrations/:task_id/ 查询任务
@apiDescription 查询任务
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": [{}],
"message": "ok",
"code": "1500200",
}
"""
task_id = 0
try:
task_id = int(id)
except Exception:
pass
if task_id == 0:
return Response(
models.DatabusMigrateTask.objects.filter(result_table_id=id, task_type="overall")
.order_by("task_label")
.values(
"id",
"task_type",
"result_table_id",
"source",
"dest",
"start",
"end",
"created_at",
"created_by",
"updated_at",
"status",
)
)
else:
obj = models.DatabusMigrateTask.objects.get(id=task_id)
return Response(
models.DatabusMigrateTask.objects.filter(task_label=obj.task_label).values(
"id",
"task_type",
"result_table_id",
"source",
"dest",
"input",
"output",
"start",
"end",
"created_at",
"created_by",
"updated_at",
"status",
)
)
@detail_route(methods=["get"], url_path="start")
def start_task(self, request, id):
"""
@apiGroup migration
@api {post} /databus/migrations/:task_id/start/ 启动任务
@apiDescription 启动任务
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": True,
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.MigrateTaskTypeSerializer)
task_obj = migration.get_task(id)
if task_obj.status in ["finish"]:
raise MigrationCannotOperatError(message_kv={"type": task_obj.status})
return Response(migration.start_task(task_obj, args["type"]))
@detail_route(methods=["get"], url_path="stop")
def stop_task(self, request, id):
"""
@apiGroup migration
@api {post} /databus/migrations/:task_id/stop/ 停止任务
@apiDescription 停止任务
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": True,
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.MigrateTaskTypeSerializer)
task_obj = migration.get_task(id)
migration.stop_task(task_obj, args["type"])
return Response(True)
@detail_route(methods=["get"], url_path="status")
def get_status(self, request, id):
"""
@apiGroup migration
@api {get} /databus/migrations/:task_id/status/ 查询任务pulsar运行状态
@apiDescription 查询任务pulsar运行状态
@apiParam {string} type 查询的任务类型,选值:all—所有类型任务,默认;source;sink
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": {"source":{},
"sink":{}},
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.MigrateTaskTypeSerializer)
task_obj = migration.get_task(id)
result = migration.get_task_status(task_obj, args["type"])
return Response(result)
@list_route(methods=["get"], url_path="get_clusters")
def get_clusters(self, request):
"""
@apiGroup migration
@api {get} /databus/migrations/get_clusters/ 获取result_table_id支持迁移集群列表
@apiDescription 获取result_table_id支持迁移集群列表
@apiParam {string} result_table_id result_table_id
@apiParam {string} type 查询的任务类型,选值:all—所有类型任务,默认;source;sink
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": {"source":[],
"sink":[]},
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.TasksRtIdSerializer)
result = {"source": [], "dest": []}
# 查询已配置存储集群列表
rt_info = rt.get_rt_fields_storages(args["result_table_id"])
if not rt_info or not rt_info.get("storages"):
return Response(result)
storages = rt_info.get("storages")
for storage_type in storages.keys():
if storage_type in settings.migration_source_supported:
result["source"].append(storage_type)
elif storage_type in settings.migration_dest_supported:
result["dest"].append(storage_type)
return Response(result)
@list_route(methods=["get"], url_path="get_tasks")
def get_tasks(self, request):
"""
@apiGroup migration
@api {get} /databus/migrations/get_tasks/ 获取dataid下迁移任务列表
@apiDescription 获取dataid下迁移任务列表
@apiParam {string} raw_data_id 数据id
@apiParam {string} result_table_id result_table_id, 当存在raw_data_id时无效
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": {{}},
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.MigrationGetTasksVerifySerializer)
if "raw_data_id" in args:
# query by raw_data_id
raw_data_id = args["raw_data_id"]
objects = models.DatabusClean.objects.filter(raw_data_id=raw_data_id)
rts = [obj.processing_id for obj in objects]
elif "result_table_id" in args:
rts = [args["result_table_id"]]
else:
return Response([])
result = models.DatabusMigrateTask.objects.filter(result_table_id__in=rts, task_type="overall").values(
"id",
"task_type",
"result_table_id",
"source",
"dest",
"start",
"end",
"created_at",
"created_by",
"updated_at",
"status",
)
return Response(result)
@list_route(methods=["post"], url_path="update_task_status")
def update_task_status(self, request):
"""
@apiGroup migration
@api {post} /databus/migrations/update_task_status/ 更新任务状态
@apiDescription 更新任务状态
@apiParam {int} task_id 任务id
@apiParam {string} status 状态
@apiParam {int} input [选填]source任务处理数量,默认0
@apiParam {int} output [选填]sink任务处理数量,默认0
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": {{}},
"message": "ok",
"code": "1500200",
}
"""
args = self.params_valid(serializer=serializers.MigrateUpdateStateSerializer)
try:
obj = models.DatabusMigrateTask.objects.get(id=args["task_id"])
except models.DatabusMigrateTask.DoesNotExist:
raise MigrationNotFoundError()
with auto_meta_sync(using=DEFAULT):
obj.status = args["status"]
obj.input = args["input"]
obj.output = args["output"]
obj.updated_at = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
logger.info("update task:{} status:{} input:{} output:{}".format(obj.id, obj.status, obj.input, obj.output))
obj.save()
return Response("ok")
@list_route(methods=["get"], url_path="get_support_clusters")
def get_support_clusters(self, request):
"""
@apiGroup migration
@api {get} /databus/migrations/get_support_clusters/ 获取当前支持的迁移列表
@apiDescription 更新任务状态
@apiSuccessExample {json} Success-Response:
HTTP/1.1 200 OK
{
"result": true,
"data": {"source" : ["tspider"],
"dest": ["hdfs"]},
"message": "ok",
"code": "1500200",
}
"""
return Response(
{
"source": settings.migration_source_supported,
"dest": settings.migration_dest_supported,
}
)
|
[
"common.transaction.auto_meta_sync",
"datahub.databus.task.task_utils.check_task_auth",
"datahub.databus.migration.create_task",
"datahub.databus.migration.stop_task",
"datahub.databus.models.DatabusMigrateTask.objects.exclude",
"rest_framework.response.Response",
"datahub.databus.migration.get_task_status",
"datahub.databus.rt.get_rt_fields_storages",
"datahub.databus.exceptions.MigrationNotFoundError",
"datahub.databus.models.DatabusClean.objects.filter",
"common.decorators.detail_route",
"datahub.databus.migration.start_task",
"common.decorators.list_route",
"datahub.databus.migration.get_task",
"datahub.databus.models.DatabusMigrateTask.objects.filter",
"datahub.databus.exceptions.NotFoundRtError",
"time.localtime",
"datahub.databus.exceptions.MigrationCannotOperatError",
"datahub.databus.exceptions.TaskStorageNotFound",
"datahub.databus.models.DatabusMigrateTask.objects.get",
"datahub.databus.rt.get_databus_rt_info"
] |
[((7680, 7727), 'common.decorators.detail_route', 'detail_route', ([], {'methods': "['get']", 'url_path': '"""start"""'}), "(methods=['get'], url_path='start')\n", (7692, 7727), False, 'from common.decorators import detail_route, list_route\n'), ((8499, 8545), 'common.decorators.detail_route', 'detail_route', ([], {'methods': "['get']", 'url_path': '"""stop"""'}), "(methods=['get'], url_path='stop')\n", (8511, 8545), False, 'from common.decorators import detail_route, list_route\n'), ((9201, 9249), 'common.decorators.detail_route', 'detail_route', ([], {'methods': "['get']", 'url_path': '"""status"""'}), "(methods=['get'], url_path='status')\n", (9213, 9249), False, 'from common.decorators import detail_route, list_route\n'), ((10062, 10114), 'common.decorators.list_route', 'list_route', ([], {'methods': "['get']", 'url_path': '"""get_clusters"""'}), "(methods=['get'], url_path='get_clusters')\n", (10072, 10114), False, 'from common.decorators import detail_route, list_route\n'), ((11445, 11494), 'common.decorators.list_route', 'list_route', ([], {'methods': "['get']", 'url_path': '"""get_tasks"""'}), "(methods=['get'], url_path='get_tasks')\n", (11455, 11494), False, 'from common.decorators import detail_route, list_route\n'), ((12964, 13023), 'common.decorators.list_route', 'list_route', ([], {'methods': "['post']", 'url_path': '"""update_task_status"""'}), "(methods=['post'], url_path='update_task_status')\n", (12974, 13023), False, 'from common.decorators import detail_route, list_route\n'), ((14354, 14414), 'common.decorators.list_route', 'list_route', ([], {'methods': "['get']", 'url_path': '"""get_support_clusters"""'}), "(methods=['get'], url_path='get_support_clusters')\n", (14364, 14414), False, 'from common.decorators import detail_route, list_route\n'), ((2999, 3021), 'datahub.databus.task.task_utils.check_task_auth', 'check_task_auth', (['rt_id'], {}), '(rt_id)\n', (3014, 3021), False, 'from datahub.databus.task.task_utils import check_task_auth\n'), ((3041, 3070), 'datahub.databus.rt.get_databus_rt_info', 'rt.get_databus_rt_info', (['rt_id'], {}), '(rt_id)\n', (3063, 3070), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((3762, 3798), 'datahub.databus.migration.create_task', 'migration.create_task', (['rt_info', 'args'], {}), '(rt_info, args)\n', (3783, 3798), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((3902, 3916), 'rest_framework.response.Response', 'Response', (['objs'], {}), '(objs)\n', (3910, 3916), False, 'from rest_framework.response import Response\n'), ((4536, 4580), 'datahub.databus.models.DatabusMigrateTask.objects.get', 'models.DatabusMigrateTask.objects.get', ([], {'id': 'id'}), '(id=id)\n', (4573, 4580), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((5883, 5898), 'rest_framework.response.Response', 'Response', (['tasks'], {}), '(tasks)\n', (5891, 5898), False, 'from rest_framework.response import Response\n'), ((8273, 8295), 'datahub.databus.migration.get_task', 'migration.get_task', (['id'], {}), '(id)\n', (8291, 8295), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((9089, 9111), 'datahub.databus.migration.get_task', 'migration.get_task', (['id'], {}), '(id)\n', (9107, 9111), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((9120, 9163), 'datahub.databus.migration.stop_task', 'migration.stop_task', (['task_obj', "args['type']"], {}), "(task_obj, args['type'])\n", (9139, 9163), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((9180, 9194), 'rest_framework.response.Response', 'Response', (['(True)'], {}), '(True)\n', (9188, 9194), False, 'from rest_framework.response import Response\n'), ((9933, 9955), 'datahub.databus.migration.get_task', 'migration.get_task', (['id'], {}), '(id)\n', (9951, 9955), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((9973, 10022), 'datahub.databus.migration.get_task_status', 'migration.get_task_status', (['task_obj', "args['type']"], {}), "(task_obj, args['type'])\n", (9998, 10022), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((10039, 10055), 'rest_framework.response.Response', 'Response', (['result'], {}), '(result)\n', (10047, 10055), False, 'from rest_framework.response import Response\n'), ((10933, 10983), 'datahub.databus.rt.get_rt_fields_storages', 'rt.get_rt_fields_storages', (["args['result_table_id']"], {}), "(args['result_table_id'])\n", (10958, 10983), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((11422, 11438), 'rest_framework.response.Response', 'Response', (['result'], {}), '(result)\n', (11430, 11438), False, 'from rest_framework.response import Response\n'), ((12941, 12957), 'rest_framework.response.Response', 'Response', (['result'], {}), '(result)\n', (12949, 12957), False, 'from rest_framework.response import Response\n'), ((14333, 14347), 'rest_framework.response.Response', 'Response', (['"""ok"""'], {}), "('ok')\n", (14341, 14347), False, 'from rest_framework.response import Response\n'), ((14960, 15065), 'rest_framework.response.Response', 'Response', (["{'source': settings.migration_source_supported, 'dest': settings.\n migration_dest_supported}"], {}), "({'source': settings.migration_source_supported, 'dest': settings.\n migration_dest_supported})\n", (14968, 15065), False, 'from rest_framework.response import Response\n'), ((3114, 3142), 'datahub.databus.exceptions.NotFoundRtError', 'exceptions.NotFoundRtError', ([], {}), '()\n', (3140, 3142), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((3239, 3358), 'datahub.databus.exceptions.TaskStorageNotFound', 'exceptions.TaskStorageNotFound', ([], {'message_kv': "{'result_table_id': args['result_table_id'], 'storage': args['source']}"}), "(message_kv={'result_table_id': args[\n 'result_table_id'], 'storage': args['source']})\n", (3269, 3358), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((3538, 3655), 'datahub.databus.exceptions.TaskStorageNotFound', 'exceptions.TaskStorageNotFound', ([], {'message_kv': "{'result_table_id': args['result_table_id'], 'storage': args['dest']}"}), "(message_kv={'result_table_id': args[\n 'result_table_id'], 'storage': args['dest']})\n", (3568, 3655), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((4594, 4623), 'common.transaction.auto_meta_sync', 'auto_meta_sync', ([], {'using': 'DEFAULT'}), '(using=DEFAULT)\n', (4608, 4623), False, 'from common.transaction import auto_meta_sync\n'), ((7068, 7117), 'datahub.databus.models.DatabusMigrateTask.objects.get', 'models.DatabusMigrateTask.objects.get', ([], {'id': 'task_id'}), '(id=task_id)\n', (7105, 7117), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((8357, 8421), 'datahub.databus.exceptions.MigrationCannotOperatError', 'MigrationCannotOperatError', ([], {'message_kv': "{'type': task_obj.status}"}), "(message_kv={'type': task_obj.status})\n", (8383, 8421), False, 'from datahub.databus.exceptions import MigrationCannotOperatError, MigrationNotFoundError\n'), ((8447, 8491), 'datahub.databus.migration.start_task', 'migration.start_task', (['task_obj', "args['type']"], {}), "(task_obj, args['type'])\n", (8467, 8491), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((11058, 11074), 'rest_framework.response.Response', 'Response', (['result'], {}), '(result)\n', (11066, 11074), False, 'from rest_framework.response import Response\n'), ((12299, 12358), 'datahub.databus.models.DatabusClean.objects.filter', 'models.DatabusClean.objects.filter', ([], {'raw_data_id': 'raw_data_id'}), '(raw_data_id=raw_data_id)\n', (12333, 12358), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((13772, 13829), 'datahub.databus.models.DatabusMigrateTask.objects.get', 'models.DatabusMigrateTask.objects.get', ([], {'id': "args['task_id']"}), "(id=args['task_id'])\n", (13809, 13829), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((13942, 13971), 'common.transaction.auto_meta_sync', 'auto_meta_sync', ([], {'using': 'DEFAULT'}), '(using=DEFAULT)\n', (13956, 13971), False, 'from common.transaction import auto_meta_sync\n'), ((3814, 3877), 'datahub.databus.models.DatabusMigrateTask.objects.filter', 'models.DatabusMigrateTask.objects.filter', ([], {'task_label': 'task_label'}), '(task_label=task_label)\n', (3854, 3877), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((4861, 4905), 'datahub.databus.models.DatabusMigrateTask.objects.get', 'models.DatabusMigrateTask.objects.get', ([], {'id': 'id'}), '(id=id)\n', (4898, 4905), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((5350, 5414), 'datahub.databus.models.DatabusMigrateTask.objects.exclude', 'models.DatabusMigrateTask.objects.exclude', ([], {'status__in': "['finish']"}), "(status__in=['finish'])\n", (5391, 5414), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((12533, 12545), 'rest_framework.response.Response', 'Response', (['[]'], {}), '([])\n', (12541, 12545), False, 'from rest_framework.response import Response\n'), ((12564, 12655), 'datahub.databus.models.DatabusMigrateTask.objects.filter', 'models.DatabusMigrateTask.objects.filter', ([], {'result_table_id__in': 'rts', 'task_type': '"""overall"""'}), "(result_table_id__in=rts, task_type\n ='overall')\n", (12604, 12655), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((13903, 13927), 'datahub.databus.exceptions.MigrationNotFoundError', 'MigrationNotFoundError', ([], {}), '()\n', (13925, 13927), False, 'from datahub.databus.exceptions import MigrationCannotOperatError, MigrationNotFoundError\n'), ((14155, 14171), 'time.localtime', 'time.localtime', ([], {}), '()\n', (14169, 14171), False, 'import time\n'), ((7163, 7230), 'datahub.databus.models.DatabusMigrateTask.objects.filter', 'models.DatabusMigrateTask.objects.filter', ([], {'task_label': 'obj.task_label'}), '(task_label=obj.task_label)\n', (7203, 7230), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n'), ((6513, 6599), 'datahub.databus.models.DatabusMigrateTask.objects.filter', 'models.DatabusMigrateTask.objects.filter', ([], {'result_table_id': 'id', 'task_type': '"""overall"""'}), "(result_table_id=id, task_type=\n 'overall')\n", (6553, 6599), False, 'from datahub.databus import exceptions, migration, models, rt, serializers, settings\n')]
|
from fastkml import kml
from .utils import haversine
class GraphFromKmlDoc:
def __init__(self, filename='pracas'):
self._filename = filename
def _get_document(self):
doc = open("pracas.kml", "r").read().encode('utf-8')
self._document = kml.KML()
self._document.from_string(doc)
return self._document
def get_pracas(self):
self._pracas = dict()
for locais in self._get_document().features():
for idx, marcadores in enumerate(locais.features()):
lng, lat, *args = marcadores.geometry._coordinates
self._pracas[marcadores.name] = {
'id': idx,
'lat': lat,
'lng': lng,
}
return self._pracas
def get_matriz_adjacencias(self):
self._distancias=dict()
pracas = self.get_pracas()
for praca, coordenadas in pracas.items():
self._distancias[praca] = {}
for _praca, _coordenadas in pracas.items():
self._distancias[praca][_praca] = haversine(
lat1=coordenadas['lat'],
lon1=coordenadas['lng'],
lat2=_coordenadas['lat'],
lon2=_coordenadas['lng'],
)
return self._distancias
|
[
"fastkml.kml.KML"
] |
[((273, 282), 'fastkml.kml.KML', 'kml.KML', ([], {}), '()\n', (280, 282), False, 'from fastkml import kml\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from pyuploadcare.dj.models import ImageField
from taggit_autosuggest.managers import TaggableManager
@python_2_unicode_compatible
class Word(models.Model):
title = models.CharField(max_length=255)
image = ImageField(blank=True, manual_crop="")
tags = TaggableManager()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
[
"pyuploadcare.dj.models.ImageField",
"django.db.models.CharField",
"django.utils.timezone.now",
"taggit_autosuggest.managers.TaggableManager",
"django.db.models.DateTimeField"
] |
[((399, 431), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (415, 431), False, 'from django.db import models\n'), ((444, 482), 'pyuploadcare.dj.models.ImageField', 'ImageField', ([], {'blank': '(True)', 'manual_crop': '""""""'}), "(blank=True, manual_crop='')\n", (454, 482), False, 'from pyuploadcare.dj.models import ImageField\n'), ((494, 511), 'taggit_autosuggest.managers.TaggableManager', 'TaggableManager', ([], {}), '()\n', (509, 511), False, 'from taggit_autosuggest.managers import TaggableManager\n'), ((532, 574), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (552, 574), False, 'from django.db import models\n'), ((596, 639), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (616, 639), False, 'from django.db import models\n'), ((694, 708), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (706, 708), False, 'from django.utils import timezone\n')]
|
from typing import Callable
import sys
import os
from copy import deepcopy
from io import StringIO
import click
from click.core import MultiCommand, _check_multicommand
from colorama import Style
from . import globals as globs
from . import _colors as colors
from .chars import IGNORE_LINE
from .pretty import PrettyGroup, PrettyCommand
from .multicommand import CUSTOM_COMMAND_PROPS, CustomCommandPropsParser
from .utils import HasKey
from ._cmd_factories import ClickCmdShell
class Shell(PrettyGroup):
"""A :class:`Click Group` implementation with an (optionally) attatched shell.
Otherwise functions as a :class:`PrettyGroup`
Constructor Kwargs:
- :param:`isShell`: Attach a new shell instance?
- :param:`prompt`: Prompt Text
- :param:`intro`: Shell Intro Text
- :param:`hist_file`: Full Path & Filename to History File
- :param:`on_finished`: Callback function when shell closes
- :param:`add_command_callback`: Callback for extending command kwargs. See :func:`multicommand.CustomCommandPropsParser()`
- :param:`before_start`: os.system() command to execute prior to starting the shell
- :param:`readline`: If True, use pyreadline instead of any prompt_toolkit features
- :param:`complete_while_typing`: If True, prompt_toolkit suggestions will be live (on a separate thread)
- :param:`fuzzy_completion`: If True, use fuzzy completion for prompt_toolkit suggestions
- :param:`mouse_support`: If True, enables mouse support for prompt_toolkit
- :param:`lexer`: If True, enables the prompt_toolkit lexer
"""
def __init__(self,
isShell=False,
prompt=None,
intro=None,
hist_file=None,
on_finished=None,
add_command_callback: Callable[[ClickCmdShell, object, str], None] = None,
before_start=None,
readline=None,
complete_while_typing=True,
fuzzy_completion=True,
mouse_support=False,
lexer=True,
**attrs):
# Allows this class to be used as a subclass without a new shell instance attached
self.isShell = isShell
if isShell:
attrs['invoke_without_command'] = True
super(Shell, self).__init__(**attrs)
if not globs.__MASTER_SHELL__:
globs.__MASTER_SHELL__ = self.name
def on_shell_closed(ctx):
if len(globs.__SHELL_PATH__):
try: globs.__SHELL_PATH__.remove(self.name)
except: pass
if on_finished and callable(on_finished): on_finished(ctx)
def on_shell_start():
if before_start and callable(before_start): before_start()
if not self.name == globs.__MASTER_SHELL__:
globs.__SHELL_PATH__.append(self.name)
# Create the shell
self.shell = ClickCmdShell(hist_file=hist_file, on_finished=on_shell_closed,
add_command_callback=add_command_callback, before_start=on_shell_start, readline=readline,
complete_while_typing=complete_while_typing, fuzzy_completion=fuzzy_completion, mouse_support=mouse_support,
lexer=lexer
)
if prompt:
self.shell.prompt = prompt
self.shell.intro = intro
else:
super(Shell, self).__init__(**attrs)
def add_command(self, cmd: click.Command, name=None):
name = name or cmd.name
if name is None: raise TypeError("Command has no name.")
_check_multicommand(self, name, cmd, register=True)
if type(name) is str:
self.commands[name] = cmd
else:
for _name_ in name:
self.commands[_name_] = cmd
if self.isShell: self.shell.add_command(cmd, name)
def invoke(self, ctx: click.Context):
if self.isShell:
ret = super(Shell, self).invoke(ctx)
if not ctx.protected_args and not ctx.invoked_subcommand:
ctx.info_name = None
self.shell.ctx = ctx
return self.shell.cmdloop()
return ret
else:
return MultiCommand.invoke(self, ctx)
def new_shell(self, cls=None, **kwargs):
"""A shortcut decorator that instantiates a new Shell instance and attaches it to the existing Command
"""
from .pretty import prettyGroup
def decorator(f):
cmd = prettyGroup(cls=Shell if not cls else cls, isShell=True, **kwargs)(f)
self.add_command(cmd)
return cmd
return decorator
class MultiCommandShell(Shell):
""" A :class:`Click Group` implementation with an (optionally) attached shell, that also:
- Allows defining commands with multiple aliases
- Allows for addtional command options (hidden, exit, etc.)
- Implements pre-defined base shell commands
- Implements all pretty formatting features
If not attached to a shell, functions as a :class:`PrettyGroup` with the non-shell-related features listed above
Constructor Kwargs:
- :param:`isShell`: Attach a new shell instance?
- :param:`prompt`: Prompt Text
- :param:`intro`: Shell Intro Text
- :param:`hist_file`: Full Path & Filename to History File
- :param:`on_finished`: Callback function when shell closes
- :param:`add_command_callback`: Callback for extending command kwargs. See :func:`multicommand.CustomCommandPropsParser()`
- :param:`before_start`: os.system() command to execute prior to starting the shell
- :param:`readline`: If True, use pyreadline instead of any prompt_toolkit features
- :param:`complete_while_typing`: If True, prompt_toolkit suggestions will be live (on a separate thread)
- :param:`fuzzy_completion`: If True, use fuzzy completion for prompt_toolkit suggestions
- :param:`mouse_support`: If True, enables mouse support for prompt_toolkit
- :param:`lexer`: If True, enables the prompt_toolkit lexer
"""
def __init__(self, isShell=None, **attrs):
self.isShell = isShell
attrs['isShell'] = isShell
if self.isShell:
if not HasKey('add_command_callback', attrs) or not attrs['add_command_callback']:
attrs['add_command_callback'] = CustomCommandPropsParser
super(MultiCommandShell, self).__init__(**attrs)
if self.isShell: BaseShellCommands.addBasics(self)
if globs.__IsShell__ and self.isShell:
if globs.__MASTER_SHELL__ == self.name: BaseShellCommands.addMasters(self)
BaseShellCommands.addAll(self)
@staticmethod
def __strip_invalidKeys(kwargs):
for _kwarg_ in CUSTOM_COMMAND_PROPS:
if HasKey(_kwarg_, kwargs):
kwargs.pop(_kwarg_, None)
@staticmethod
def __assign_invalidKeys(kwargs, cmd):
for _kwarg_ in CUSTOM_COMMAND_PROPS:
if HasKey(_kwarg_, kwargs):
setattr(cmd, _kwarg_, kwargs[_kwarg_])
def group(self, *args, **kwargs):
"""A shortcut decorator for declaring and attaching a group to
the group. This takes the same arguments as :func:`group` but
immediately registers the created command with this instance by
calling into :meth:`add_command`.
"""
from .pretty import prettyGroup
def decorator(f):
cmd = prettyGroup(*args, **kwargs)(f)
cmd.alias = False
self.add_command(cmd)
return cmd
return decorator
def new_shell(self, cls=None, **kwargs):
"""A shortcut decorator that instantiates a new Shell instance and attaches it to the existing Command
"""
from .pretty import prettyGroup
def decorator(f):
cmd = prettyGroup(cls=MultiCommandShell if not cls else cls, isShell=True, **kwargs)(f)
cmd.alias = False
self.add_command(cmd)
return cmd
return decorator
def command(self, *args, **kwargs):
"""Behaves the same as `click.Group.command()` except if passed
a list of names, all after the first will be aliases for the first.
Also allows for use of custom kwargs defined in multicommand.py.
"""
def decorator(f):
old_kwargs = kwargs.copy()
self.__strip_invalidKeys(kwargs)
from .pretty import prettyCommand
tmpCommand = None
origHelpTxt = None
aliases = []
try:
if isinstance(args[0], list):
_args = [args[0][0]] + list(args[1:])
for alias in args[0][1:]:
if tmpCommand is None:
cmd: PrettyCommand = prettyCommand(alias, None, **kwargs)(f)
origHelpTxt = cmd.help
cmd.alias = True
cmd.aliases = []
cmd.help = "(Alias for '{c}') {h}".format(c = _args[0], h = cmd.help)
cmd.short_help = "Alias for '{}'".format(_args[0])
cmd.true_hidden = cmd.hidden
cmd.hidden = True
self.__assign_invalidKeys(old_kwargs, cmd)
super(MultiCommandShell, self).add_command(cmd)
tmpCommand = cmd
else:
cmd = deepcopy(tmpCommand)
cmd.alias = True
cmd.aliases = []
cmd.name = alias
cmd.help = "(Alias for '{c}') {h}".format(c = _args[0], h = origHelpTxt)
cmd.short_help = "Alias for '{}'".format(_args[0])
cmd.hidden = True
self.__assign_invalidKeys(old_kwargs, cmd)
super(MultiCommandShell, self).add_command(cmd)
aliases.append(alias)
else:
_args = args
if tmpCommand is None:
cmd: PrettyCommand = prettyCommand(*_args, **kwargs)(f)
cmd.alias = False
cmd.aliases = aliases
self.__assign_invalidKeys(old_kwargs, cmd)
super(MultiCommandShell, self).add_command(cmd)
return cmd
else:
cmd = deepcopy(tmpCommand)
cmd.alias = False
cmd.aliases = aliases
cmd.name = _args[0]
cmd.help = origHelpTxt
cmd.short_help = ''
cmd.hidden = cmd.true_hidden
self.__assign_invalidKeys(old_kwargs, cmd)
super(MultiCommandShell, self).add_command(cmd)
return cmd
except:
cmd: PrettyCommand = prettyCommand(*args, **kwargs)(f)
cmd.alias = False
cmd.aliases = aliases
self.__assign_invalidKeys(old_kwargs, cmd)
super(MultiCommandShell, self).add_command(cmd)
return cmd
return decorator
class BaseShellCommands:
@staticmethod
def addMasters(shell: MultiCommandShell):
@shell.command(globs.MASTERSHELL_COMMAND_ALIAS_RESTART, hidden=True)
def __restart_shell__():
"""Restarts the application"""
# Spawns a new shell within the current session by launching the python app again
os.system('python "%s"' % sys.argv[0].replace('\\', '/'))
# Exits the current shell once it's child has closed
globs.__IS_REPEAT__ = True
globs.__IS_EXITING__ = True
if shell.shell.readline:
globs.__PREV_STDIN__ = sys.stdin
sys.stdin = StringIO(globs.__LAST_COMMAND__)
else:
shell.shell._pipe_input.send_text('exit\r')
click.echo(IGNORE_LINE)
@staticmethod
def addBasics(shell: MultiCommandShell):
@shell.command(globs.BASIC_COMMAND_ALIAS_HELP, hidden=True)
def __get_help__():
with click.Context(shell) as ctx:
click.echo(shell.get_help(ctx))
@shell.command(globs.BASIC_COMMAND_ALIAS_CLEARHISTORY, hidden=True)
def __clear_history__():
"""Clears the CLI history for this terminal for the current user"""
result = shell.shell.clear_history()
print()
click.echo('\t{}{} {}{}{}'.format(
colors.SHELL_HISTORY_CLEARED_STYLE, 'History cleared' if result else 'Clear History',
colors.SHELL_HISTORY_CLEARED_TRUE if result else colors.SHELL_HISTORY_CLEARED_FALSE,
'successfully' if result else 'failed',
Style.RESET_ALL
))
@staticmethod
def addAll(shell: MultiCommandShell):
@shell.command(globs.SHELL_COMMAND_ALIAS_CLEAR, hidden=True)
def cls():
"""Clears the Terminal"""
click.clear()
@shell.command(globs.SHELL_COMMAND_ALIAS_QUIT, hidden=True, exit=True)
def _exit_():
"""Exits the Shell"""
pass
@shell.command(globs.SHELL_COMMAND_ALIAS_EXIT, exit=True)
def __exit__():
"""Exits the Shell"""
pass
@shell.command(globs.SHELL_COMMAND_ALIAS_REPEAT, hidden=True)
def __repeat_command__():
"""Repeats the last valid command with all previous parameters"""
if globs.__LAST_COMMAND__:
globs.__IS_REPEAT__ = True
if shell.shell.readline:
globs.__PREV_STDIN__ = sys.stdin
sys.stdin = StringIO(globs.__LAST_COMMAND__)
|
[
"io.StringIO",
"copy.deepcopy",
"click.core._check_multicommand",
"click.echo",
"click.Context",
"click.clear",
"click.core.MultiCommand.invoke"
] |
[((3549, 3600), 'click.core._check_multicommand', '_check_multicommand', (['self', 'name', 'cmd'], {'register': '(True)'}), '(self, name, cmd, register=True)\n', (3568, 3600), False, 'from click.core import MultiCommand, _check_multicommand\n'), ((4182, 4212), 'click.core.MultiCommand.invoke', 'MultiCommand.invoke', (['self', 'ctx'], {}), '(self, ctx)\n', (4201, 4212), False, 'from click.core import MultiCommand, _check_multicommand\n'), ((13203, 13216), 'click.clear', 'click.clear', ([], {}), '()\n', (13214, 13216), False, 'import click\n'), ((11983, 12015), 'io.StringIO', 'StringIO', (['globs.__LAST_COMMAND__'], {}), '(globs.__LAST_COMMAND__)\n', (11991, 12015), False, 'from io import StringIO\n'), ((12110, 12133), 'click.echo', 'click.echo', (['IGNORE_LINE'], {}), '(IGNORE_LINE)\n', (12120, 12133), False, 'import click\n'), ((12313, 12333), 'click.Context', 'click.Context', (['shell'], {}), '(shell)\n', (12326, 12333), False, 'import click\n'), ((10539, 10559), 'copy.deepcopy', 'deepcopy', (['tmpCommand'], {}), '(tmpCommand)\n', (10547, 10559), False, 'from copy import deepcopy\n'), ((13903, 13935), 'io.StringIO', 'StringIO', (['globs.__LAST_COMMAND__'], {}), '(globs.__LAST_COMMAND__)\n', (13911, 13935), False, 'from io import StringIO\n'), ((9500, 9520), 'copy.deepcopy', 'deepcopy', (['tmpCommand'], {}), '(tmpCommand)\n', (9508, 9520), False, 'from copy import deepcopy\n')]
|
from django.http import HttpRequest, HttpResponse
from main.util import render_template
TEMPLATE = "tasks/lesson03/task301.html"
def handler(request: HttpRequest) -> HttpResponse:
name = request.GET.get("name")
context = {
"input_name": name,
"greeting_name": name or "anonymous",
}
document = render_template(TEMPLATE, context)
response = HttpResponse(content=document)
return response
if __name__ == '__main__':
x = render_template(TEMPLATE, {'input_name': 1, 'greeting_name': 2})
print(x)
|
[
"main.util.render_template",
"django.http.HttpResponse"
] |
[((331, 365), 'main.util.render_template', 'render_template', (['TEMPLATE', 'context'], {}), '(TEMPLATE, context)\n', (346, 365), False, 'from main.util import render_template\n'), ((382, 412), 'django.http.HttpResponse', 'HttpResponse', ([], {'content': 'document'}), '(content=document)\n', (394, 412), False, 'from django.http import HttpRequest, HttpResponse\n'), ((471, 535), 'main.util.render_template', 'render_template', (['TEMPLATE', "{'input_name': 1, 'greeting_name': 2}"], {}), "(TEMPLATE, {'input_name': 1, 'greeting_name': 2})\n", (486, 535), False, 'from main.util import render_template\n')]
|
import unittest
from config import Config as cfg
import requests
class Camera(unittest.TestCase):
routeUrl = cfg.serverUrl + "gallery/camera"
camerasList = [1,2,3]
def test_IsAllCamerasAvailable(self):
for camera in self.camerasList:
r = requests.get(f"{self.routeUrl}/{camera}")
self.assertEqual(200, r.status_code)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"requests.get"
] |
[((398, 413), 'unittest.main', 'unittest.main', ([], {}), '()\n', (411, 413), False, 'import unittest\n'), ((274, 315), 'requests.get', 'requests.get', (['f"""{self.routeUrl}/{camera}"""'], {}), "(f'{self.routeUrl}/{camera}')\n", (286, 315), False, 'import requests\n')]
|
from typing import Any, Dict, Optional, Union, List
from fastapi.encoders import jsonable_encoder
from sqlalchemy.orm import Session
from app.crud.base import CRUDBase
from app.models.debit import Debit
from app.schemas.debit import DebitCreate, DebitUpdate
class CRUDDebit(CRUDBase[Debit, DebitCreate, DebitUpdate]):
def create_with_owner(self, db: Session, *, obj_in: DebitCreate,
owner_id: int) -> Debit:
obj_in_data = jsonable_encoder(obj_in)
db_obj = self.model(**obj_in_data, owner_id=owner_id)
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj
def get_by_owner(self, db: Session, *, owner_id: int) -> Optional[Debit]:
return db.query(Debit).filter(Debit.owner_id == owner_id).first()
def update_status(self, db: Session, *,
db_obj: Debit,
obj_in: Union[DebitUpdate, Dict[str, Any]]) -> Debit:
if isinstance(obj_in, dict):
update_data = obj_in
else:
update_data = obj_in.dict(exclude_unset=True)
return super().update(db, db_obj=db_obj, obj_in=obj_in)
# def get_multi(self, db: Session, *,
# skip: int = 0, limit: int = 100) -> List[Dict]:
# return (db.query(self.model).offset(skip).limit(limit).all())
debit = CRUDDebit(Debit)
|
[
"fastapi.encoders.jsonable_encoder"
] |
[((464, 488), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['obj_in'], {}), '(obj_in)\n', (480, 488), False, 'from fastapi.encoders import jsonable_encoder\n')]
|
import argparse
import json
import os
from scipy.sparse import csr_matrix
from tqdm import tqdm
import numpy as np
from multiprocessing import Pool, Manager
def token_dict_to_sparse_vector(token_dict, token2id):
matrix_row, matrix_col, matrix_data = [], [], []
tokens = token_dict.keys()
col = []
data = []
for tok in tokens:
if tok in token2id:
col.append(token2id[tok])
data.append(token_dict[tok])
matrix_row.extend([0] * len(col))
matrix_col.extend(col)
matrix_data.extend(data)
vector = csr_matrix((matrix_data, (matrix_row, matrix_col)), shape=(1, len(token2id)))
return vector
parser = argparse.ArgumentParser()
parser.add_argument('--corpus', type=str, help='path to corpus with vectors', required=True)
parser.add_argument('--topics', type=str, help='path to topics with vectors', required=True)
parser.add_argument('--tokens', type=str, help='path to token list', required=True)
parser.add_argument('--run', type=str, help='path to run file', required=True)
parser.add_argument('--threads', type=int, help='threads for hnsw', required=False, default=12)
args = parser.parse_args()
token2id = {}
with open(args.tokens) as tok_f:
for idx, line in enumerate(tok_f):
tok = line.rstrip()
token2id[tok] = idx
corpus = []
for file in sorted(os.listdir(args.corpus)):
file = os.path.join(args.corpus, file)
if file.endswith('json') or file.endswith('jsonl'):
print(f'Loading {file}')
with open(file, 'r') as f:
for idx, line in enumerate(tqdm(f.readlines())):
info = json.loads(line)
corpus.append(info)
ids = []
vectors = []
matrix_row, matrix_col, matrix_data = [], [], []
for i, d in enumerate(tqdm(corpus)):
weight_dict = d['vector']
tokens = weight_dict.keys()
col = [token2id[tok] for tok in tokens]
data = weight_dict.values()
matrix_row.extend([i] * len(weight_dict))
matrix_col.extend(col)
matrix_data.extend(data)
ids.append(d['id'])
vectors = csr_matrix((matrix_data, (matrix_row, matrix_col)), shape=(len(corpus), len(token2id)))
topic_ids = []
topic_vectors = []
with open(args.topics) as topic_f:
for line in topic_f:
info = json.loads(line)
topic_ids.append(info['id'])
topic_vectors.append(token_dict_to_sparse_vector(info['vector'], token2id))
vectors_T = vectors.T
manager = Manager()
results = manager.dict()
def run_search(idx):
global results
qid = topic_ids[idx]
t_vec = topic_vectors[idx]
scores = np.array(t_vec.dot(vectors_T).todense())[0]
top_idx = sorted(range(len(scores)), key=lambda x: scores[x], reverse=True)[:1000]
result = [(ids[x], scores[x]) for x in top_idx]
results[qid] = result
with Pool(args.threads) as p:
for _ in tqdm(p.imap_unordered(run_search, list(range(len(topic_ids)))), total=len(topic_ids)):
pass
with open(args.run, 'w') as f:
for qid in results:
for idx, item in enumerate(results[qid]):
did = item[0]
score = item[1]
f.write(f'{qid} Q0 {did} {idx+1} {score} bf\n')
|
[
"tqdm.tqdm",
"argparse.ArgumentParser",
"json.loads",
"multiprocessing.Manager",
"multiprocessing.Pool",
"os.path.join",
"os.listdir"
] |
[((670, 695), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (693, 695), False, 'import argparse\n'), ((2427, 2436), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (2434, 2436), False, 'from multiprocessing import Pool, Manager\n'), ((1344, 1367), 'os.listdir', 'os.listdir', (['args.corpus'], {}), '(args.corpus)\n', (1354, 1367), False, 'import os\n'), ((1381, 1412), 'os.path.join', 'os.path.join', (['args.corpus', 'file'], {}), '(args.corpus, file)\n', (1393, 1412), False, 'import os\n'), ((1768, 1780), 'tqdm.tqdm', 'tqdm', (['corpus'], {}), '(corpus)\n', (1772, 1780), False, 'from tqdm import tqdm\n'), ((2789, 2807), 'multiprocessing.Pool', 'Pool', (['args.threads'], {}), '(args.threads)\n', (2793, 2807), False, 'from multiprocessing import Pool, Manager\n'), ((2255, 2271), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2265, 2271), False, 'import json\n'), ((1621, 1637), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1631, 1637), False, 'import json\n')]
|
from googlesearch import search
import requests
import re
import csv
data=[]
a=[]
student="Mam"
def swap(text,j):
temp1=text[j]
temp2=text[j+1]
temp3=text[j+2]
temp4=text[j+3]
text[j]=text[j+4]
text[j+1]=text[j+5]
text[j+2]=text[j+6]
text[j+3]=text[j+7]
text[j+4]=temp1
text[j+5]=temp2
text[j+6]=temp3
text[j+7]=temp4
def sort_urls(data):
newdata=[]
for word,text in zip(a,data):
text=text.split()
i=0
while(i<len(text)-4):
j=0
while(j<len(text)-5):
if( int(text[j+1])<int(text[j+5]) ):
swap(text,j)
elif(int(text[j+1])==int(text[j+5])):
if( min(int(text[j+3]), min(int(text[j+1]),int(text[j+2]))) < min(int(text[j+7]), min(int(text[j+6]),int(text[j+5])) )):
swap(text,j)
elif( int(text[j+3])==0 or int(text[j+1])==0 or int(text[j+2])==0 and (int(text[j+7])!=0 and int(text[j+6]) !=0 and int(text[j+5])!=0 ) ):
swap(text,j)
elif( int(text[j+3]) + int(text[j+1]) + int(text[j+2]) < int(text[j+7]) + int(text[j+6]) + int(text[j+5])!=0 ):
swap(text,j)
j=j+4
i=i+4
strtemp=""
k=0
while(k<len(text)-3):
strtemp+=text[k]+"\n"+text[k+1]+" "+text[k+2]+" "+text[k+3]+"\n"
k=k+4
strtemp=strtemp+"-1\n"
newdata.append(strtemp)
#for x in newdata:
# print (x)
return newdata
def read_from_file():
try:
fin = open("Experiment2/urlw8"+student+"new.txt")
except :
return 0
query=fin.readline()
strtemp=""
query=query.replace("\n","")
var=query
while(query):
while(query and query!="-1"):
query=fin.readline()
strtemp+=query
query=query.replace("\n","")
query=fin.readline()
query=query.replace("\n","")
if(query):
a.append(var)
data.append(strtemp)
strtemp=""
var=query
fin.close()
return 1
read_from_file()
data=sort_urls(data)
open("Experiment2/urlw8"+student+"new.txt","w").close()
fout= open("Experiment2/urlw8"+student+"new.txt","w")
list_1=[]
for x,y in zip(a,data):
fout.write(x+"\n")
fout.write(y)
#----------------------------------
temp12=y
temp12=temp12.splitlines()
i=0
while( i < len(temp12)-1):
templist=[]
if ( temp12[i]=="-1" or temp12[i+1]=="-1" ) :
i=i+2
else:
w8=temp12[i+1].split()
templist.append(x)
templist.append(temp12[i])
templist.append(w8[0])
templist.append(w8[1])
templist.append(w8[2])
#print(templist)
if (int(w8[0])!=0):
# print(w8[0])
list_1.append(templist)
i=i+2
#print("\n")
#-------------------------------------------------------------------
header = ['Label', 'URL', ' Label Weight', 'Esemble Weight','ML Weight']
with open('Experiment2/'+student+'new.csv', 'wt') as f:
csv_writer = csv.writer(f, quoting=csv.QUOTE_ALL)
csv_writer.writerow(header) # write header
csv_writer.writerows(list_1)
#fin1 = open("Experiment2/Results(Mam).txt","r")
fin2 = open("Experiment2/Results("+student+").txt","r")
#set1=fin1.readline()
set2=fin2.readline()
#set1=set1.split(" ,")
set2=set2.split(" ,")
words=list(set2)
#word1=word1.replace("\n","")
i=0
#print(a)
while(i<len(words) ) :
word1=words[i]
i=i+1
if word1 not in a:
regex1='\W'+word1+'\W'
regex2='\Wensemble\W'
regex3='\Wmachine learning\W'
query='"'+word1+'" + "ensemble" + "machine learning" '
fout.write(word1+"\n")
print(word1)
for url in search(query, tld='com', stop=10):
if(url.find(".pdf",len(url)-5)==-1):
test=1
try:
page=requests.get(url).text
except :
test=0
if test!=0 :
print(url)
fout.write(url)
fout.write("\n")
fout.write(str(len(re.findall(regex1, page , re.IGNORECASE) ) ) )
fout.write(" ")
fout.write(str(len(re.findall(regex2, page , re.IGNORECASE) ) ) )
fout.write(" ")
fout.write(str(len(re.findall(regex3, page , re.IGNORECASE) ) ) )
fout.write("\n")
fout.write("-1\n")
fout.write("-2\n")
|
[
"requests.get",
"re.findall",
"csv.writer",
"googlesearch.search"
] |
[((3261, 3297), 'csv.writer', 'csv.writer', (['f'], {'quoting': 'csv.QUOTE_ALL'}), '(f, quoting=csv.QUOTE_ALL)\n', (3271, 3297), False, 'import csv\n'), ((3964, 3997), 'googlesearch.search', 'search', (['query'], {'tld': '"""com"""', 'stop': '(10)'}), "(query, tld='com', stop=10)\n", (3970, 3997), False, 'from googlesearch import search\n'), ((4117, 4134), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (4129, 4134), False, 'import requests\n'), ((4368, 4407), 're.findall', 're.findall', (['regex1', 'page', 're.IGNORECASE'], {}), '(regex1, page, re.IGNORECASE)\n', (4378, 4407), False, 'import re\n'), ((4496, 4535), 're.findall', 're.findall', (['regex2', 'page', 're.IGNORECASE'], {}), '(regex2, page, re.IGNORECASE)\n', (4506, 4535), False, 'import re\n'), ((4624, 4663), 're.findall', 're.findall', (['regex3', 'page', 're.IGNORECASE'], {}), '(regex3, page, re.IGNORECASE)\n', (4634, 4663), False, 'import re\n')]
|
"""
generate periodic boundary condition (PBC).
Two methods to detect and partition the surface-nodes:
1. graph-method: (recommended, can deal with arbitrary deformed shape):
use dictionary-data-structure to map facet-nodes to element-number,
where the surface-facet is shared by only one element.
Construct the node-linking graph of surface, and the node-linking graph of the outlines.
Using outlines as boundaries,
partition the graph into different faces (left-, right-, down-, up-, back-, front- surfaces) by union-find algorithm.
2. method of xMin, xMax, yMin, yMax, zMin, zMax:
detect the surface simply by coordinates of all nodes.
This method can only be applied to the object with cuboid shape.
Two methods match nodes on opposites of the surface:
1. BFS method to match the nodes (time complexity of O(V + E), V and E are number of nodes and edges respectively):
Matching nodes during traversing of surface-node-graphs of opposite faces.
Given a matched node-pair, use similar vectors (pointed from current node to neighbors) to match their neighbors.
2. nearest-coordinates method: Could be very slow when there are many many nodes on a surface (with time complexity of O(V^2)).
"""
import torch as tch
import numpy as np
from elementsBody import *
def write_PBC_equation(file, obj, instance):
"""
write the PBC for the 8 outer vertex, and 12 edges, and 6 faces, with three steps:
1. make the 8 outer vertexes to form a parallel hexahedron (平行六面体))
2. make 12 edges to satisfy PBC
3. make the inside nodes of face-pair to coincide
"""
if not isinstance(obj, ElementsBody):
raise ValueError("error, not isinstance(obj, ElementsBody)")
if not hasattr(obj, 'v_x0y0z0'):
obj.getEdgeVertexForPBC()
## 1.1 make the y0face to be parallogram
file.write('************************** make the y0face to be parallogram \n')
for dm in [1, 2, 3]:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.v_x1y0z0, dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.v_x0y0z0, dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.v_x1y0z1, dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.v_x0y0z1, dm))
## 1.2 make vertexes of ylines to form parallel hexahedron
file.write('************************** make vertexes of 4 ylines to coincide \n')
for yline in obj.ylines[1:]:
for dm in [1, 2, 3]:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, yline['end'], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, yline['beg'], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.ylines[0]['end'], dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.ylines[0]['beg'], dm))
# 2. make all outer edges to coincide
file.write('************************** make all outer edges to coincide \n')
xyzEdges = [obj.xlines, obj.ylines, obj.zlines]
for edges in xyzEdges:
for edge in edges[1:]:
for node in range(len(edge['inside'])):
for dm in [1, 2, 3]:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, edge['inside'][node], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, edge['beg'], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, edges[0]['inside'][node], dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, edges[0]['beg'], dm))
# 3. make all corresponding face-pairs to coincide
file.write('************************** make all corresponding face-pairs to coincide \n')
edgeNodes = set()
for edges in [obj.xlines, obj.ylines, obj.zlines]:
for edge in edges:
edgeNodes |= ({edge['beg']} | {edge['end']} | set(edge['inside']))
for iface, face in enumerate(obj.faceMatch):
for node in face:
for dm in [1, 2, 3]:
if node not in edgeNodes:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, node, dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.baseNodes[iface][0], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, face[node], dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.baseNodes[iface][1], dm))
def write_PBC_equation_byGraph(file, obj, instance):
"""
use graph-method to get the PBC info,
write the PBC for the 8 outer vertex, and 12 edges, and 6 faces, with three steps:
1. make the 8 outer vertexes to form a parallel hexahedron (平行六面体))
2. make 12 edges to satisfy PBC
3. make the inside nodes of face-pair to coincide
the node-number of megaElement
(composed of vertex of outer surface) is shown as follows,
v3------v7
/| /|
v0------v4|
| | | |
| v2----|-v6
y ^ |/ |/
| v1------v5
--->
/ x
z
"""
if not isinstance(obj, ElementsBody):
raise ValueError("error, not isinstance(obj, ElementsBody)")
obj.getFaceForPBC_byGraph()
obj.getEdgeForPBC_byGraph()
## 1.1 make the y0face to be parallogram
file.write('************************** make the y0face to be parallogram \n')
for dm in [1, 2, 3]:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.megaElement[6], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.megaElement[2], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.megaElement[5], dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.megaElement[1], dm))
## 1.2 make vertexes of ylines to form parallel hexahedron
file.write('************************** make vertexes of 4 ylines to coincide \n')
for i, j in [[7, 6], [3, 2], [0, 1]]:
for dm in [1, 2, 3]:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.megaElement[i], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.megaElement[j], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.megaElement[4], dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.megaElement[5], dm))
# 2. make all outer edges to coincide
file.write('************************** make all outer edges to coincide \n')
edgeId = [
[[0, 4], [3, 7], [2, 6], [1, 5]], # xEdges
[[1, 0], [5, 4], [6, 7], [2, 3]], # yEdges
[[2, 1], [6, 5], [7, 4], [3, 0]] # zEdges
]
for edges in edgeId: # edges = xEdges or yEdges or zEdges
edge0 = (obj.megaElement[edges[0][0]], obj.megaElement[edges[0][1]])
if edge0 in obj.outlines:
for edge in edges[1:]:
edge1 = (obj.megaElement[edge[0]], obj.megaElement[edge[1]])
for node in range(len(obj.outlines[edge0])):
for dm in [1, 2, 3]:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, obj.outlines[edge1][node], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, edge1[0], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, obj.outlines[edge0][node], dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, edge0[0], dm))
# 3. make all corresponding face-pairs to coincide
file.write('************************** make all corresponding face-pairs to coincide \n')
for twoFacets in obj.faceMatch:
faceMatch = obj.faceMatch[twoFacets]
for node in faceMatch:
for dm in [1, 2, 3]:
file.write('*Equation\n4 \n')
file.write('{}.N{}, {}, 1 \n'.format(instance, node, dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, twoFacets[0], dm))
file.write('{}.N{}, {}, -1 \n'.format(instance, faceMatch[node], dm))
file.write('{}.N{}, {}, 1 \n'.format(instance, twoFacets[4], dm))
def write_PBC_Nset(file, obj):
if not isinstance(obj, ElementsBody):
raise ValueError("error, not isinstance(obj, ElementsBody)")
if not hasattr(obj, 'faceNode'):
obj.getFaceNode()
for node in obj.getFaceNode():
file.write('*Nset, nset=N{} \n'.format(node))
file.write('{}, \n'.format(node))
def write_nodes(file, obj):
nodes = obj.nodes
for node in nodes:
file.write(' {}, {}, {}, {} \n'.format(
node, nodes[node][0], nodes[node][1], nodes[node][2]
))
def adjustCoordinatesForPBC_byGraph(obj):
"""
use graph method to get the node-relation,
adjust the nodal coordiantes for periodic boundary condition (PBC)
make the nodes at face-pair to be strictly coincide at initial state
"""
if not isinstance(obj, ElementsBody):
raise ValueError("error, not isinstance(obj, ElementsBody)")
obj.getFaceForPBC_byGraph()
obj.getEdgeForPBC_byGraph()
makenp = False
for node in obj.nodes:
if type(obj.nodes[node]) == type([]):
makenp = True
break
if makenp:
for node in obj.nodes:
obj.nodes[node] = np.array(obj.nodes[node])
## 1.1 make the y0face to be parallogram
obj.nodes[obj.megaElement[6]] = \
obj.nodes[obj.megaElement[2]] + \
(obj.nodes[obj.megaElement[5]] - obj.nodes[obj.megaElement[1]])
## 1.2 make vertexes of ylines to form parallel hexahedron
for i, j in [[7, 6], [3, 2], [0, 1]]:
obj.nodes[obj.megaElement[i]] = \
obj.nodes[obj.megaElement[j]] + \
obj.nodes[obj.megaElement[4]] - obj.nodes[obj.megaElement[5]]
# 2. make all outer edges to coincide
edgeId = [
[[0, 4], [3, 7], [2, 6], [1, 5]], # xEdges
[[1, 0], [5, 4], [6, 7], [2, 3]], # yEdges
[[2, 1], [6, 5], [7, 4], [3, 0]] # zEdges
]
for edges in edgeId: # edges = xEdges or yEdges or zEdges
edge0 = (obj.megaElement[edges[0][0]], obj.megaElement[edges[0][1]])
if edge0 in obj.outlines:
for edge in edges[1:]:
edge1 = (obj.megaElement[edge[0]], obj.megaElement[edge[1]])
for node in range(len(obj.outlines[edge0])):
obj.nodes[obj.outlines[edge1][node]] = \
obj.nodes[edge1[0]] + \
obj.nodes[obj.outlines[edge0][node]] - obj.nodes[edge0[0]]
# 3. make all corresponding face-pairs to coincide
for twoFacets in obj.faceMatch:
faceMatch = obj.faceMatch[twoFacets]
for node in faceMatch:
obj.nodes[faceMatch[node]] = \
obj.nodes[twoFacets[4]] + \
obj.nodes[node] - obj.nodes[twoFacets[0]]
obj.nodesAdjusted = True
def adjustCoordinatesForPBC(obj):
"""
adjust the nodal coordiantes for periodic boundary condition (PBC)
make the nodes at face-pair to be strictly coincide at initial state
"""
if not isinstance(obj, ElementsBody):
raise ValueError("error, not isinstance(obj, ElementsBody)")
if not hasattr(obj, 'v_x0y0z0'):
obj.getEdgeVertexForPBC()
makenp = False
for node in obj.nodes:
if type(obj.nodes[node]) == type([]):
makenp = True
break
if makenp:
for node in obj.nodes:
obj.nodes[node] = np.array(obj.nodes[node])
## 1.1 make the y0face to be parallogram
obj.nodes[obj.v_x1y0z0] = \
obj.nodes[obj.v_x0y0z0] + \
(obj.nodes[obj.v_x1y0z1] - obj.nodes[obj.v_x0y0z1])
## 1.2 make vertexes of ylines to form parallel hexahedron
for yline in obj.ylines[1:]:
obj.nodes[yline['end']] = \
obj.nodes[yline['beg']] + \
obj.nodes[obj.ylines[0]['end']] - obj.nodes[obj.ylines[0]['beg']]
# 2. make all outer edges to coincide
xyzEdges = [obj.xlines, obj.ylines, obj.zlines]
for edges in xyzEdges:
for edge in edges[1:]:
for node in range(len(edge['inside'])):
obj.nodes[edge['inside'][node]] = \
obj.nodes[edge['beg']] + \
obj.nodes[edges[0]['inside'][node]] - obj.nodes[edges[0]['beg']]
# 3. make all corresponding face-pairs to coincide
edgeNodes = set()
for edges in [obj.xlines, obj.ylines, obj.zlines]:
for edge in edges:
edgeNodes |= ({edge['beg']} | {edge['end']} | set(edge['inside']))
for iface, face in enumerate(obj.faceMatch):
for node in face:
if node not in edgeNodes:
obj.nodes[node] = \
obj.nodes[obj.baseNodes[iface][0]] + \
obj.nodes[face[node]] - obj.nodes[obj.baseNodes[iface][1]]
obj.nodesAdjusted = True
if __name__ == "__main__":
testState = False
# get the inp file and the object
inpFile = input("\033[0;33;40m{}\033[0m".format("please insert the .inp file name (include the path): "))
job = inpFile.split("/")[-1].split(".inp")[0] if "/" in inpFile else inpFile.split("\\")[-1].split(".inp")[0]
path = inpFile.split(job + ".inp")[0]
obj = ElementsBody(*readInp(inpFile))
key = input("\033[35;1m{}\033[0m".format(
"which method do you want to use? \n"
"1: graph-method (recomended); \n"
"2: xMin, xMax, yMin, yMax, zMin, zMax; \n(insert 1 or 2): "
))
if key == "1":
getFaceForPBC = obj.getFaceForPBC_byGraph
writeEquations = write_PBC_equation_byGraph
adjustCoordinate = adjustCoordinatesForPBC_byGraph
elif key == "2":
getFaceForPBC = obj.getFaceForPBC
writeEquations = write_PBC_equation
adjustCoordinate = adjustCoordinatesForPBC
getFaceForPBC()
adjustCoor = input("do you want to adjust the coordinates for PBC? "
"(not recommended)\n\033[33m{}\033[0m".format('(y/n): '))
while adjustCoor not in ['y', 'n']:
adjustCoor = input('\033[33m{}\033[0m'.format('please insert "y" or "n": '))
if adjustCoor == 'y':
adjustCoordinate(obj)
if testState:
del obj.faceMatch
getFaceForPBC()
# find the instance name
instance = 'Part-1'
with open(inpFile, 'r') as file:
for line in file:
if '*Instance' in line and 'name=' in line:
instance = line.split(',')
instance = instance[1].split('=')
instance = instance[-1]
print('instance =', instance)
break
writeInp = input(
'ok to write the .inp file with PBC inside the file ? \033[36m{}\033[0m'.format('(y/n): ')
)
while writeInp not in ['y', 'n']:
writeInp = input('\033[31m{}\033[0m'.format(
'please insert "y" or "n": '
))
if writeInp == 'y':
newFileName = path + job + "_PBC.inp"
with open(newFileName, 'w') as newFile, open(inpFile, 'r') as oldFile:
clone = True
for line in oldFile:
if "Section:" in line and "**" in line:
write_PBC_Nset(newFile, obj)
elif '*End Assembly' in line:
writeEquations(newFile, obj, instance)
if clone == False and '*' in line:
clone = True
if clone:
newFile.write(line) # write the line from old file to new file
if "*Node\n" in line:
if hasattr(obj, 'nodesAdjusted'):
clone = False
print("\033[35;1m{}\033[0m".format("write new nodes for obj"))
write_nodes(newFile, obj)
print("\033[40;36;1m {} {} \033[35;1m {} \033[0m".format(
"file", newFileName, "has been written. "
))
elif input(
"\033[32;1m write nset- and equations- files for PBC? (y/n): \033[0m"
) in ["y", ""]:
# write the Nset
with open(path + '{}_nset.txt'.format(job), 'w') as file:
for node in obj.getFaceNode():
file.write('*Nset, nset=N{} \n'.format(node))
file.write('{}, \n'.format(node))
print("\033[40;36;1m {} {} \033[35;1m {} \033[0m".format(
"file", path + '{}_nset.txt'.format(job), "has been written. "
))
# write the equation for PBC
with open(path + '{}_equation.txt'.format(job), 'w') as file:
writeEquations(file, obj, instance)
print("\033[40;36;1m {} {} \033[35;1m {} \033[0m".format(
"file", path + '{}_equation.txt'.format(job), "has been written. "
))
|
[
"numpy.array"
] |
[((9765, 9790), 'numpy.array', 'np.array', (['obj.nodes[node]'], {}), '(obj.nodes[node])\n', (9773, 9790), True, 'import numpy as np\n'), ((11978, 12003), 'numpy.array', 'np.array', (['obj.nodes[node]'], {}), '(obj.nodes[node])\n', (11986, 12003), True, 'import numpy as np\n')]
|
# Copyright 2019 Adobe
# All Rights Reserved.
#
# NOTICE: Adobe permits you to use, modify, and distribute this file in
# accordance with the terms of the Adobe license agreement accompanying
# it. If you have received this file from a source other than Adobe,
# then your use, modification, or distribution of it requires the prior
# written permission of Adobe.
#
import unittest
import json
from protector.query.query import OpenTSDBQuery, OpenTSDBResponse
import time
class TestQuery(unittest.TestCase):
def setUp(self):
self.response1 = "[]"
self.response2 = """
[
{
"metric": "this.metric",
"tags": {
"env": "prod",
"recipientDomain": "gmail.com",
"channel": "email"
},
"aggregateTags": [
"hostname"
],
"dps": {
"1623619500": 0,
"1623619560": 0,
"1623619620": 0
}
},
{
"metric": "this.metric",
"tags": {
"env": "prod",
"recipientDomain": "gmail.com",
"channel": "email"
},
"aggregateTags": [
"hostname"
],
"dps": {
"1623619500": 0,
"1623619560": 0,
"1623619620": 0
}
},
{
"statsSummary": {
"avgAggregationTime": 0.806912,
"avgHBaseTime": 3.874463,
"avgQueryScanTime": 5.436076,
"avgScannerTime": 3.888163,
"avgScannerUidToStringTime": 0,
"avgSerializationTime": 0.808312,
"dpsPostFilter": 145,
"dpsPreFilter": 145,
"emittedDPs": 1440,
"maxAggregationTime": 0.806912,
"maxHBaseTime": 5.170471,
"maxQueryScanTime": 5.436076,
"maxScannerUidToStringTime": 0,
"maxSerializationTime": 0.808312,
"maxUidToStringTime": 0.0255,
"processingPreWriteTime": 8.480518,
"queryIdx_00": {
"aggregationTime": 0.806912,
"avgHBaseTime": 3.874463,
"avgScannerTime": 3.888163,
"avgScannerUidToStringTime": 0,
"dpsPostFilter": 145,
"dpsPreFilter": 145,
"emittedDPs": 1440,
"groupByTime": 0,
"maxHBaseTime": 5.170471,
"maxScannerUidToStringTime": 0,
"queryIndex": 0,
"queryScanTime": 5.436076,
"rowsPostFilter": 129,
"rowsPreFilter": 129,
"saltScannerMergeTime": 0.163702,
"serializationTime": 0.808312,
"successfulScan": 20,
"uidPairsResolved": 0,
"uidToStringTime": 0.0255
},
"rowsPostFilter": 129,
"rowsPreFilter": 129,
"successfulScan": 20,
"uidPairsResolved": 0
}
}
]
"""
self.response2_ret = [
{
"metric": "this.metric",
"tags": {
"env": "prod",
"recipientDomain": "gmail.com",
"channel": "email"
},
"aggregateTags": [
"hostname"
],
"dps": {
"1623619500": 0,
"1623619560": 0,
"1623619620": 0
}
},
{
"metric": "this.metric",
"tags": {
"env": "prod",
"recipientDomain": "gmail.com",
"channel": "email"
},
"aggregateTags": [
"hostname"
],
"dps": {
"1623619500": 0,
"1623619560": 0,
"1623619620": 0
}
}
]
self.stats2 = {
"avgAggregationTime": 0.806912,
"avgHBaseTime": 3.874463,
"avgQueryScanTime": 5.436076,
"avgScannerTime": 3.888163,
"avgScannerUidToStringTime": 0,
"avgSerializationTime": 0.808312,
"dpsPostFilter": 145,
"dpsPreFilter": 145,
"emittedDPs": 1440,
"maxAggregationTime": 0.806912,
"maxHBaseTime": 5.170471,
"maxQueryScanTime": 5.436076,
"maxScannerUidToStringTime": 0,
"maxSerializationTime": 0.808312,
"maxUidToStringTime": 0.0255,
"processingPreWriteTime": 8.480518,
"rowsPostFilter": 129,
"rowsPreFilter": 129,
"successfulScan": 20,
"uidPairsResolved": 0
}
self.response3 = """
[
{
"metric": "this.metric",
"tags": {
"env": "prod",
"recipientDomain": "gmail.com",
"channel": "email"
},
"aggregateTags": [
"hostname"
],
"dps": {
"1623619500": 0,
"1623619560": 0,
"1623619620": 0
}
},
{
"metric": "this.metric",
"tags": {
"env": "prod",
"recipientDomain": "gmail.com",
"channel": "email"
},
"aggregateTags": [
"hostname"
],
"dps": {
"1623619500": 0,
"1623619560": 0,
"1623619620": 0
}
}
]
"""
def test_ok_empty_response(self):
r = OpenTSDBResponse(self.response1)
self.assertTrue(not r.get_stats())
def test_ok_normal_response(self):
r = OpenTSDBResponse(self.response2)
# expected response with summary stripped
p = json.dumps(self.response2_ret, sort_keys=True)
# test that response summary is correctly stripped
self.assertEqual(p, r.to_json(True))
# test that stats are properly collected
self.assertDictEqual(self.stats2, r.get_stats())
def test_missing_stats_response(self):
r = OpenTSDBResponse(self.response3)
# no error is raised, just logged
self.assertTrue(not r.get_stats())
|
[
"protector.query.query.OpenTSDBResponse",
"json.dumps"
] |
[((6536, 6568), 'protector.query.query.OpenTSDBResponse', 'OpenTSDBResponse', (['self.response1'], {}), '(self.response1)\n', (6552, 6568), False, 'from protector.query.query import OpenTSDBQuery, OpenTSDBResponse\n'), ((6665, 6697), 'protector.query.query.OpenTSDBResponse', 'OpenTSDBResponse', (['self.response2'], {}), '(self.response2)\n', (6681, 6697), False, 'from protector.query.query import OpenTSDBQuery, OpenTSDBResponse\n'), ((6761, 6807), 'json.dumps', 'json.dumps', (['self.response2_ret'], {'sort_keys': '(True)'}), '(self.response2_ret, sort_keys=True)\n', (6771, 6807), False, 'import json\n'), ((7077, 7109), 'protector.query.query.OpenTSDBResponse', 'OpenTSDBResponse', (['self.response3'], {}), '(self.response3)\n', (7093, 7109), False, 'from protector.query.query import OpenTSDBQuery, OpenTSDBResponse\n')]
|
import morepath
from .app import App
def run():
print('Running app...')
morepath.autoscan()
App.commit()
morepath.run(App())
if __name__ == '__main__':
run()
|
[
"morepath.autoscan"
] |
[((81, 100), 'morepath.autoscan', 'morepath.autoscan', ([], {}), '()\n', (98, 100), False, 'import morepath\n')]
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utility functions shared between the file and sqlite datastore stubs."""
import md5
from google.appengine.api import datastore_types
from google.appengine.api.datastore_errors import BadRequestError
from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import datastore_pb
from google.appengine.runtime import apiproxy_errors
def ValidateQuery(query, filters, orders, max_query_components):
"""Validate a datastore query with normalized filters, orders.
Raises an ApplicationError when any of the following conditions are violated:
- transactional queries have an ancestor
- queries that are not too large
(sum of filters, orders, ancestor <= max_query_components)
- ancestor (if any) app and namespace match query app and namespace
- kindless queries only filter on __key__ and only sort on __key__ ascending
- multiple inequality (<, <=, >, >=) filters all applied to the same property
- filters on __key__ compare to a reference in the same app and namespace as
the query
- if an inequality filter on prop X is used, the first order (if any) must
be on X
Args:
query: query to validate
filters: normalized (by datastore_index.Normalize) filters from query
orders: normalized (by datastore_index.Normalize) orders from query
max_query_components: limit on query complexity
"""
def BadRequest(message):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST, message)
key_prop_name = datastore_types._KEY_SPECIAL_PROPERTY
unapplied_log_timestamp_us_name = (
datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY)
if query.has_transaction():
if not query.has_ancestor():
BadRequest('Only ancestor queries are allowed inside transactions.')
num_components = len(filters) + len(orders)
if query.has_ancestor():
num_components += 1
if num_components > max_query_components:
BadRequest('query is too large. may not have more than %s filters'
' + sort orders ancestor total' % max_query_components)
if query.has_ancestor():
ancestor = query.ancestor()
if query.app() != ancestor.app():
BadRequest('query app is %s but ancestor app is %s' %
(query.app(), ancestor.app()))
if query.name_space() != ancestor.name_space():
BadRequest('query namespace is %s but ancestor namespace is %s' %
(query.name_space(), ancestor.name_space()))
ineq_prop_name = None
for filter in filters:
if filter.property_size() != 1:
BadRequest('Filter has %d properties, expected 1' %
filter.property_size())
prop = filter.property(0)
prop_name = prop.name().decode('utf-8')
if prop_name == key_prop_name:
if not prop.value().has_referencevalue():
BadRequest('%s filter value must be a Key' % key_prop_name)
ref_val = prop.value().referencevalue()
if ref_val.app() != query.app():
BadRequest('%s filter app is %s but query app is %s' %
(key_prop_name, ref_val.app(), query.app()))
if ref_val.name_space() != query.name_space():
BadRequest('%s filter namespace is %s but query namespace is %s' %
(key_prop_name, ref_val.name_space(), query.name_space()))
if (filter.op() in datastore_index.INEQUALITY_OPERATORS and
prop_name != unapplied_log_timestamp_us_name):
if ineq_prop_name is None:
ineq_prop_name = prop_name
elif ineq_prop_name != prop_name:
BadRequest(('Only one inequality filter per query is supported. '
'Encountered both %s and %s') % (ineq_prop_name, prop_name))
if ineq_prop_name is not None and orders:
first_order_prop = orders[0].property().decode('utf-8')
if first_order_prop != ineq_prop_name:
BadRequest('The first sort property must be the same as the property '
'to which the inequality filter is applied. In your query '
'the first sort property is %s but the inequality filter '
'is on %s' % (first_order_prop, ineq_prop_name))
if not query.has_kind():
for filter in filters:
prop_name = filter.property(0).name().decode('utf-8')
if (prop_name != key_prop_name and
prop_name != unapplied_log_timestamp_us_name):
BadRequest('kind is required for non-__key__ filters')
for order in orders:
prop_name = order.property().decode('utf-8')
if not (prop_name == key_prop_name and
order.direction() is datastore_pb.Query_Order.ASCENDING):
BadRequest('kind is required for all orders except __key__ ascending')
def ParseKeyFilteredQuery(filters, orders):
"""Parse queries which only allow filters and ascending-orders on __key__.
Raises exceptions for illegal queries.
Args:
filters: the normalized filters of a query.
orders: the normalized orders of a query.
Returns:
The key range (start, start_inclusive, end, end_inclusive) requested
in the query.
"""
remaining_filters = []
start_key = None
start_inclusive = False
end_key = None
end_inclusive = False
key_prop = datastore_types._KEY_SPECIAL_PROPERTY
for f in filters:
op = f.op()
if not (f.property_size() == 1 and
f.property(0).name() == key_prop and
not (op == datastore_pb.Query_Filter.IN or
op == datastore_pb.Query_Filter.EXISTS)):
remaining_filters.append(f)
continue
val = f.property(0).value()
if not val.has_referencevalue():
raise BadRequestError('__key__ kind must be compared to a key')
limit = datastore_types.FromReferenceProperty(val)
if op == datastore_pb.Query_Filter.LESS_THAN:
if end_key is None or limit <= end_key:
end_key = limit
end_inclusive = False
elif (op == datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL or
op == datastore_pb.Query_Filter.EQUAL):
if end_key is None or limit < end_key:
end_key = limit
end_inclusive = True
if op == datastore_pb.Query_Filter.GREATER_THAN:
if start_key is None or limit >= start_key:
start_key = limit
start_inclusive = False
elif (op == datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL or
op == datastore_pb.Query_Filter.EQUAL):
if start_key is None or limit > start_key:
start_key = limit
start_inclusive = True
remaining_orders = []
for o in orders:
if not (o.direction() == datastore_pb.Query_Order.ASCENDING and
o.property() == datastore_types._KEY_SPECIAL_PROPERTY):
remaining_orders.append(o)
else:
break
if remaining_filters:
raise BadRequestError(
'Only comparison filters on ' + key_prop + ' supported')
if remaining_orders:
raise BadRequestError('Only ascending order on ' + key_prop + ' supported')
return (start_key, start_inclusive, end_key, end_inclusive)
def ParseKindQuery(query, filters, orders):
"""Parse __kind__ (schema) queries.
Raises exceptions for illegal queries.
Args:
query: A Query PB.
filters: the normalized filters from query.
orders: the normalized orders from query.
Returns:
The kind range (start, start_inclusive, end, end_inclusive) requested
in the query.
"""
if query.has_ancestor():
raise BadRequestError('ancestor queries not allowed')
start_kind, start_inclusive, end_kind, end_inclusive = ParseKeyFilteredQuery(
filters, orders)
return (_KindKeyToString(start_kind), start_inclusive,
_KindKeyToString(end_kind), end_inclusive)
def _KindKeyToString(key):
"""Extract kind name from __kind__ key.
Raises an ApplicationError if the key is not of the form '__kind__'/name.
Args:
key: a key for a __kind__ instance, or a false value.
Returns:
kind specified by key, or key if key is a false value.
"""
if not key:
return key
key_path = key.to_path()
if (len(key_path) == 2 and key_path[0] == '__kind__' and
isinstance(key_path[1], basestring)):
return key_path[1]
raise BadRequestError('invalid Key for __kind__ table')
def ParseNamespaceQuery(query, filters, orders):
"""Parse __namespace__ queries.
Raises exceptions for illegal queries.
Args:
query: A Query PB.
filters: the normalized filters from query.
orders: the normalized orders from query.
Returns:
The kind range (start, start_inclusive, end, end_inclusive) requested
in the query.
"""
if query.has_ancestor():
raise BadRequestError('ancestor queries not allowed')
start_kind, start_inclusive, end_kind, end_inclusive = ParseKeyFilteredQuery(
filters, orders)
return (_NamespaceKeyToString(start_kind), start_inclusive,
_NamespaceKeyToString(end_kind), end_inclusive)
def _NamespaceKeyToString(key):
"""Extract namespace name from __namespace__ key.
Raises an ApplicationError if the key is not of the form '__namespace__'/name
or '__namespace__'/_EMPTY_NAMESPACE_ID.
Args:
key: a key for a __namespace__ instance, or a false value.
Returns:
namespace specified by key, or key if key is a false value.
"""
if not key:
return key
key_path = key.to_path()
if len(key_path) == 2 and key_path[0] == '__namespace__':
if key_path[1] == datastore_types._EMPTY_NAMESPACE_ID:
return ''
if isinstance(key_path[1], basestring):
return key_path[1]
raise BadRequestError('invalid Key for __namespace__ table')
def SynthesizeUserId(email):
"""Return a synthetic user ID from an email address.
Note that this is not the same user ID found in the production system.
Args:
email: An email address.
Returns:
A string userid derived from the email address.
"""
user_id_digest = md5.new(email.lower()).digest()
user_id = '1' + ''.join(['%02d' % ord(x) for x in user_id_digest])[:20]
return user_id
def FillUsersInQuery(filters):
"""Fill in a synthetic user ID for all user properties in a set of filters.
Args:
filters: The normalized filters from query.
"""
for filter in filters:
for property in filter.property_list():
FillUser(property)
def FillUser(property):
"""Fill in a synthetic user ID for a user properties.
Args:
property: A Property which may have a user value.
"""
if property.value().has_uservalue():
uid = SynthesizeUserId(property.value().uservalue().email())
if uid:
property.mutable_value().mutable_uservalue().set_obfuscated_gaiaid(uid)
|
[
"google.appengine.runtime.apiproxy_errors.ApplicationError",
"google.appengine.api.datastore_errors.BadRequestError",
"google.appengine.api.datastore_types.FromReferenceProperty"
] |
[((8759, 8808), 'google.appengine.api.datastore_errors.BadRequestError', 'BadRequestError', (['"""invalid Key for __kind__ table"""'], {}), "('invalid Key for __kind__ table')\n", (8774, 8808), False, 'from google.appengine.api.datastore_errors import BadRequestError\n'), ((10112, 10166), 'google.appengine.api.datastore_errors.BadRequestError', 'BadRequestError', (['"""invalid Key for __namespace__ table"""'], {}), "('invalid Key for __namespace__ table')\n", (10127, 10166), False, 'from google.appengine.api.datastore_errors import BadRequestError\n'), ((2064, 2137), 'google.appengine.runtime.apiproxy_errors.ApplicationError', 'apiproxy_errors.ApplicationError', (['datastore_pb.Error.BAD_REQUEST', 'message'], {}), '(datastore_pb.Error.BAD_REQUEST, message)\n', (2096, 2137), False, 'from google.appengine.runtime import apiproxy_errors\n'), ((6307, 6349), 'google.appengine.api.datastore_types.FromReferenceProperty', 'datastore_types.FromReferenceProperty', (['val'], {}), '(val)\n', (6344, 6349), False, 'from google.appengine.api import datastore_types\n'), ((7368, 7440), 'google.appengine.api.datastore_errors.BadRequestError', 'BadRequestError', (["('Only comparison filters on ' + key_prop + ' supported')"], {}), "('Only comparison filters on ' + key_prop + ' supported')\n", (7383, 7440), False, 'from google.appengine.api.datastore_errors import BadRequestError\n'), ((7483, 7552), 'google.appengine.api.datastore_errors.BadRequestError', 'BadRequestError', (["('Only ascending order on ' + key_prop + ' supported')"], {}), "('Only ascending order on ' + key_prop + ' supported')\n", (7498, 7552), False, 'from google.appengine.api.datastore_errors import BadRequestError\n'), ((8015, 8062), 'google.appengine.api.datastore_errors.BadRequestError', 'BadRequestError', (['"""ancestor queries not allowed"""'], {}), "('ancestor queries not allowed')\n", (8030, 8062), False, 'from google.appengine.api.datastore_errors import BadRequestError\n'), ((9210, 9257), 'google.appengine.api.datastore_errors.BadRequestError', 'BadRequestError', (['"""ancestor queries not allowed"""'], {}), "('ancestor queries not allowed')\n", (9225, 9257), False, 'from google.appengine.api.datastore_errors import BadRequestError\n'), ((6237, 6294), 'google.appengine.api.datastore_errors.BadRequestError', 'BadRequestError', (['"""__key__ kind must be compared to a key"""'], {}), "('__key__ kind must be compared to a key')\n", (6252, 6294), False, 'from google.appengine.api.datastore_errors import BadRequestError\n')]
|
import json
import logging
from datetime import datetime
from typing import Any, Dict
from flask import current_app, g, jsonify, request
from flask_cors import cross_origin
from alerta.auth.decorators import permission
from alerta.exceptions import ApiError, RejectException
from alerta.models.alert import Alert
from alerta.models.enums import Scope
from alerta.utils.api import add_remote_ip, assign_customer, process_alert
from alerta.utils.audit import write_audit_trail
from . import webhooks
LOG = logging.getLogger(__name__)
JSON = Dict[str, Any]
def parse_stackdriver(notification: JSON) -> Alert:
incident = notification['incident']
state = incident['state']
# 'documentation' is an optional field that you can use to customize
# your alert sending a json
if 'documentation' in incident:
try:
content = json.loads(incident['documentation']['content'])
incident.update(content)
except Exception as e:
LOG.warning("Invalid documentation content: '{}'".format(incident['documentation']))
service = []
status = None
create_time = None # type: ignore
severity = incident.get('severity', 'critical')
if incident['policy_name']:
service.append(incident['policy_name'])
if state == 'open':
create_time = datetime.utcfromtimestamp(incident['started_at'])
elif state == 'acknowledged':
status = 'ack'
elif state == 'closed':
severity = 'ok'
create_time = datetime.utcfromtimestamp(incident['ended_at'])
else:
severity = 'indeterminate'
return Alert(
resource=incident['resource_name'],
event=incident['condition_name'],
environment=incident.get('environment', 'Production'),
severity=severity,
status=status,
service=service,
group=incident.get('group', 'Cloud'),
text=incident['summary'],
attributes={
'incidentId': incident['incident_id'],
'resourceId': incident['resource_id'],
'moreInfo': '<a href="%s" target="_blank">Stackdriver Console</a>' % incident['url']
},
customer=incident.get('customer'),
origin=incident.get('origin', 'Stackdriver'),
event_type='stackdriverAlert',
create_time=create_time,
raw_data=notification
)
@webhooks.route('/webhooks/stackdriver', methods=['OPTIONS', 'POST'])
@cross_origin()
@permission(Scope.write_webhooks)
def stackdriver():
try:
incomingAlert = parse_stackdriver(request.get_json(force=True))
except ValueError as e:
raise ApiError(str(e), 400)
incomingAlert.customer = assign_customer(wanted=incomingAlert.customer)
add_remote_ip(request, incomingAlert)
try:
alert = process_alert(incomingAlert)
except RejectException as e:
raise ApiError(str(e), 403)
except Exception as e:
raise ApiError(str(e), 500)
text = 'stackdriver alert received via webhook'
write_audit_trail.send(current_app._get_current_object(), event='webhook-received', message=text, user=g.user,
customers=g.customers, scopes=g.scopes, resource_id=alert.id, type='alert', request=request)
if alert:
return jsonify(status='ok', id=alert.id, alert=alert.serialize), 201
else:
raise ApiError('insert or update of StackDriver notification failed', 500)
|
[
"alerta.utils.api.process_alert",
"alerta.utils.api.add_remote_ip",
"json.loads",
"flask_cors.cross_origin",
"datetime.datetime.utcfromtimestamp",
"flask.current_app._get_current_object",
"flask.jsonify",
"alerta.utils.api.assign_customer",
"alerta.auth.decorators.permission",
"alerta.exceptions.ApiError",
"flask.request.get_json",
"logging.getLogger"
] |
[((510, 537), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (527, 537), False, 'import logging\n'), ((2438, 2452), 'flask_cors.cross_origin', 'cross_origin', ([], {}), '()\n', (2450, 2452), False, 'from flask_cors import cross_origin\n'), ((2454, 2486), 'alerta.auth.decorators.permission', 'permission', (['Scope.write_webhooks'], {}), '(Scope.write_webhooks)\n', (2464, 2486), False, 'from alerta.auth.decorators import permission\n'), ((2682, 2728), 'alerta.utils.api.assign_customer', 'assign_customer', ([], {'wanted': 'incomingAlert.customer'}), '(wanted=incomingAlert.customer)\n', (2697, 2728), False, 'from alerta.utils.api import add_remote_ip, assign_customer, process_alert\n'), ((2733, 2770), 'alerta.utils.api.add_remote_ip', 'add_remote_ip', (['request', 'incomingAlert'], {}), '(request, incomingAlert)\n', (2746, 2770), False, 'from alerta.utils.api import add_remote_ip, assign_customer, process_alert\n'), ((1332, 1381), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (["incident['started_at']"], {}), "(incident['started_at'])\n", (1357, 1381), False, 'from datetime import datetime\n'), ((2797, 2825), 'alerta.utils.api.process_alert', 'process_alert', (['incomingAlert'], {}), '(incomingAlert)\n', (2810, 2825), False, 'from alerta.utils.api import add_remote_ip, assign_customer, process_alert\n'), ((3038, 3071), 'flask.current_app._get_current_object', 'current_app._get_current_object', ([], {}), '()\n', (3069, 3071), False, 'from flask import current_app, g, jsonify, request\n'), ((3362, 3430), 'alerta.exceptions.ApiError', 'ApiError', (['"""insert or update of StackDriver notification failed"""', '(500)'], {}), "('insert or update of StackDriver notification failed', 500)\n", (3370, 3430), False, 'from alerta.exceptions import ApiError, RejectException\n'), ((863, 911), 'json.loads', 'json.loads', (["incident['documentation']['content']"], {}), "(incident['documentation']['content'])\n", (873, 911), False, 'import json\n'), ((2558, 2586), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (2574, 2586), False, 'from flask import current_app, g, jsonify, request\n'), ((3276, 3332), 'flask.jsonify', 'jsonify', ([], {'status': '"""ok"""', 'id': 'alert.id', 'alert': 'alert.serialize'}), "(status='ok', id=alert.id, alert=alert.serialize)\n", (3283, 3332), False, 'from flask import current_app, g, jsonify, request\n'), ((1513, 1560), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (["incident['ended_at']"], {}), "(incident['ended_at'])\n", (1538, 1560), False, 'from datetime import datetime\n')]
|
import pytest
from wemake_python_styleguide.logic.tree import functions
@pytest.mark.parametrize(('function_call', 'function_name'), [
# Simple builtin functions
('print("Hello world!")', 'print'),
('int("10")', 'int'),
('bool(1)', 'bool'),
('open("/tmp/file.txt", "r")', 'open'),
('str(10)', 'str'),
# Functions in modules
('datetime.timedelta(days=1)', 'datetime.timedelta'),
('cmath.sqrt(100)', 'cmath.sqrt'),
# Functions in (made up) objects
('dt.strftime("%H:%M")', 'dt.strftime'),
('obj.funct()', 'obj.funct'),
])
def test_given_function_called_no_split(
parse_ast_tree, function_call: str, function_name: str,
) -> None:
"""Test given_function_called without splitting the modules."""
tree = parse_ast_tree(function_call)
node = tree.body[0].value
called_function = functions.given_function_called(node, [function_name])
assert called_function == function_name
@pytest.mark.parametrize(('function_call', 'function_name'), [
# Simple builtin functions
('print("Hello world!")', 'print'),
('int("10")', 'int'),
('bool(1)', 'bool'),
('open("/tmp/file.txt", "r")', 'open'),
('str(10)', 'str'),
# Functions in modules
('datetime.timedelta(days=1)', 'timedelta'),
('cmath.sqrt(100)', 'sqrt'),
# Functions in (made up) objects
('dt.strftime("%H:%M")', 'strftime'),
('obj.funct()', 'funct'),
])
def test_given_function_called_with_split(
parse_ast_tree, function_call: str, function_name: str,
) -> None:
"""Test given_function_called splitting the modules."""
tree = parse_ast_tree(function_call)
node = tree.body[0].value
called_function = functions.given_function_called(
node,
[function_name],
split_modules=True,
)
assert called_function == function_name
|
[
"pytest.mark.parametrize",
"wemake_python_styleguide.logic.tree.functions.given_function_called"
] |
[((76, 458), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('function_call', 'function_name')", '[(\'print("Hello world!")\', \'print\'), (\'int("10")\', \'int\'), (\'bool(1)\',\n \'bool\'), (\'open("/tmp/file.txt", "r")\', \'open\'), (\'str(10)\', \'str\'), (\n \'datetime.timedelta(days=1)\', \'datetime.timedelta\'), (\'cmath.sqrt(100)\',\n \'cmath.sqrt\'), (\'dt.strftime("%H:%M")\', \'dt.strftime\'), (\'obj.funct()\',\n \'obj.funct\')]'], {}), '((\'function_call\', \'function_name\'), [(\n \'print("Hello world!")\', \'print\'), (\'int("10")\', \'int\'), (\'bool(1)\',\n \'bool\'), (\'open("/tmp/file.txt", "r")\', \'open\'), (\'str(10)\', \'str\'), (\n \'datetime.timedelta(days=1)\', \'datetime.timedelta\'), (\'cmath.sqrt(100)\',\n \'cmath.sqrt\'), (\'dt.strftime("%H:%M")\', \'dt.strftime\'), (\'obj.funct()\',\n \'obj.funct\')])\n', (99, 458), False, 'import pytest\n'), ((948, 1304), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('function_call', 'function_name')", '[(\'print("Hello world!")\', \'print\'), (\'int("10")\', \'int\'), (\'bool(1)\',\n \'bool\'), (\'open("/tmp/file.txt", "r")\', \'open\'), (\'str(10)\', \'str\'), (\n \'datetime.timedelta(days=1)\', \'timedelta\'), (\'cmath.sqrt(100)\', \'sqrt\'),\n (\'dt.strftime("%H:%M")\', \'strftime\'), (\'obj.funct()\', \'funct\')]'], {}), '((\'function_call\', \'function_name\'), [(\n \'print("Hello world!")\', \'print\'), (\'int("10")\', \'int\'), (\'bool(1)\',\n \'bool\'), (\'open("/tmp/file.txt", "r")\', \'open\'), (\'str(10)\', \'str\'), (\n \'datetime.timedelta(days=1)\', \'timedelta\'), (\'cmath.sqrt(100)\', \'sqrt\'),\n (\'dt.strftime("%H:%M")\', \'strftime\'), (\'obj.funct()\', \'funct\')])\n', (971, 1304), False, 'import pytest\n'), ((846, 900), 'wemake_python_styleguide.logic.tree.functions.given_function_called', 'functions.given_function_called', (['node', '[function_name]'], {}), '(node, [function_name])\n', (877, 900), False, 'from wemake_python_styleguide.logic.tree import functions\n'), ((1690, 1764), 'wemake_python_styleguide.logic.tree.functions.given_function_called', 'functions.given_function_called', (['node', '[function_name]'], {'split_modules': '(True)'}), '(node, [function_name], split_modules=True)\n', (1721, 1764), False, 'from wemake_python_styleguide.logic.tree import functions\n')]
|
from django.shortcuts import render
# Create your views here.
from django.shortcuts import render, HttpResponse
from utils.tools.tools import unique
from devops_backend.settings import TMP_DIR
from rest_framework import viewsets, filters, mixins, status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from servers.models import Server
import os
# def index(request):
# return render(request, 'index.html')
class IndexViewSet(viewsets.ViewSet, mixins.ListModelMixin):
permission_classes = (IsAuthenticated, )
def list(self, request, *args, **kwargs):
remote_addr = self.request.META.get('HTTP_X_FORWARD_FOR') if self.request.META.get('HTTP_X_FORWARD_FOR') else self.request.META.get('REMOTE_ADDR')
try:
ip_addr = self.request.query_params['ip']
except:
return Response({"permission": False}, status=status.HTTP_403_FORBIDDEN)
permission_str = 'servers.login_' + ip_addr
if not (self.request.user.has_perm('servers.login_server') or self.request.user.has_perm(permission_str)):
return Response({"permission": False}, status=status.HTTP_403_FORBIDDEN)
try:
Server.objects.filter(ip=ip_addr)
except Exception as e:
return Response({"permission": False}, status=status.HTTP_400_BAD_REQUEST)
try:
port = self.request.query_params['port']
except Exception as e:
port = '22'
try:
user = self.request.query_params['user']
except Exception as e:
user = 'root'
content = {
'host': ip_addr,
'port': port,
'user': user,
'current_user': self.request.user,
'remote_addr': remote_addr
}
return render(request, 'index.html', content)
def upload_ssh_key(request):
if request.method == 'POST':
pkey = request.FILES.get('pkey')
ssh_key = pkey.read().decode('utf-8')
while True:
filename = unique()
ssh_key_path = os.path.join(TMP_DIR, filename)
if not os.path.isfile(ssh_key_path):
with open(ssh_key_path, 'w+') as f:
f.write(ssh_key)
break
else:
continue
return HttpResponse(filename)
|
[
"utils.tools.tools.unique",
"django.shortcuts.HttpResponse",
"os.path.isfile",
"rest_framework.response.Response",
"django.shortcuts.render",
"servers.models.Server.objects.filter",
"os.path.join"
] |
[((1830, 1868), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', 'content'], {}), "(request, 'index.html', content)\n", (1836, 1868), False, 'from django.shortcuts import render, HttpResponse\n'), ((2349, 2371), 'django.shortcuts.HttpResponse', 'HttpResponse', (['filename'], {}), '(filename)\n', (2361, 2371), False, 'from django.shortcuts import render, HttpResponse\n'), ((1131, 1196), 'rest_framework.response.Response', 'Response', (["{'permission': False}"], {'status': 'status.HTTP_403_FORBIDDEN'}), "({'permission': False}, status=status.HTTP_403_FORBIDDEN)\n", (1139, 1196), False, 'from rest_framework.response import Response\n'), ((1222, 1255), 'servers.models.Server.objects.filter', 'Server.objects.filter', ([], {'ip': 'ip_addr'}), '(ip=ip_addr)\n', (1243, 1255), False, 'from servers.models import Server\n'), ((2063, 2071), 'utils.tools.tools.unique', 'unique', ([], {}), '()\n', (2069, 2071), False, 'from utils.tools.tools import unique\n'), ((2099, 2130), 'os.path.join', 'os.path.join', (['TMP_DIR', 'filename'], {}), '(TMP_DIR, filename)\n', (2111, 2130), False, 'import os\n'), ((879, 944), 'rest_framework.response.Response', 'Response', (["{'permission': False}"], {'status': 'status.HTTP_403_FORBIDDEN'}), "({'permission': False}, status=status.HTTP_403_FORBIDDEN)\n", (887, 944), False, 'from rest_framework.response import Response\n'), ((1306, 1373), 'rest_framework.response.Response', 'Response', (["{'permission': False}"], {'status': 'status.HTTP_400_BAD_REQUEST'}), "({'permission': False}, status=status.HTTP_400_BAD_REQUEST)\n", (1314, 1373), False, 'from rest_framework.response import Response\n'), ((2150, 2178), 'os.path.isfile', 'os.path.isfile', (['ssh_key_path'], {}), '(ssh_key_path)\n', (2164, 2178), False, 'import os\n')]
|
from collections import OrderedDict as OD
from lxml import etree
# parser = etree.XMLParser(remove_blank_text=True)
import db.objects
import db.api
db_session = db.api.start_db_session() # need independent connection for reading
import os.path
import __main__
schema_path = os.path.join(os.path.dirname(__main__.__file__), 'schemas')
xsd_parser = etree.XMLParser(
schema=etree.XMLSchema(file=os.path.join(schema_path, 'form.xsd')),
attribute_defaults=True, remove_comments=True, remove_blank_text=True)
from common import AibError
from common import log, debug
async def init_xml(caller, xml):
# called from setup_form after form_name if form does not exist
form_defn = caller.data_objects['form']
form_xml = etree.Element('form')
form_xml.set('name', await form_defn.getval('form_name'))
etree.SubElement(form_xml, 'db_objects')
etree.SubElement(form_xml, 'mem_objects')
etree.SubElement(form_xml, 'input_params')
etree.SubElement(form_xml, 'output_params')
frame = etree.SubElement(form_xml, 'frame')
etree.SubElement(frame, 'toolbar')
etree.SubElement(frame, 'body')
etree.SubElement(frame, 'button_row')
etree.SubElement(frame, 'frame_methods')
await form_defn.setval('form_xml', form_xml)
await load_form_xml(caller, xml)
#-----------------------------------------------------------------------------
# form_funcs
#-----------------------------------------------------------------------------
async def load_form_xml(caller, xml):
# called from setup_form 'on_start_frame'
form_defn = caller.data_objects['form']
form_vars = caller.data_objects['form_vars']
frame_vars = caller.data_objects['frame_vars']
inline_vars = caller.data_objects['inline_vars']
await inline_vars.delete_all()
form_xml = await form_defn.getval('form_xml')
if form_xml is None:
await form_vars.init()
await frame_vars.init()
return
init_vals={}
init_vals['dbobj_xml'] = form_xml.find('db_objects')
init_vals['memobj_xml'] = form_xml.find('mem_objects')
init_vals['inputs_xml'] = form_xml.find('input_params')
init_vals['outputs_xml'] = form_xml.find('output_params')
init_vals['before_start_form'] = await form_vars.get_val_from_xml(
'before_start_form', form_xml.get('before_start_form'))
init_vals['after_start_form'] = await form_vars.get_val_from_xml(
'after_start_form', form_xml.get('after_start_form'))
init_vals['on_close_form'] = await form_vars.get_val_from_xml(
'on_close_form', form_xml.get('on_close_form'))
await form_vars.init(init_vals=init_vals)
obj_names = caller.data_objects['obj_names']
await obj_names.delete_all()
col_names = caller.data_objects['col_names']
await col_names.delete_all()
dbobj_xml = await form_vars.getval('dbobj_xml')
for dbobj_elem in dbobj_xml.iter('db_obj'):
"""
async with db_session.get_connection() as db_mem_conn:
conn = db_mem_conn.db
sql = (
"SELECT row_id, short_descr FROM {}.db_tables WHERE table_name = '{}'"
.format(caller.company, dbobj_elem.get('table_name'))
)
cur = await conn.exec_sql(sql)
table_id, descr = await cur.__anext__()
await obj_names.init(init_vals={
'name': dbobj_elem.get('name'), 'descr': descr})
await obj_names.save()
sql = (
"SELECT col_name, short_descr FROM {}.db_columns "
"WHERE table_id = {} "
"AND col_name NOT IN ('row_id', 'created_id', 'deleted_id') "
"ORDER BY col_type, seq"
.format(caller.company, table_id)
)
async for col_name, descr in await conn.exec_sql(sql):
await col_names.init(init_vals={ #'obj_id': obj_row_id,
'name': col_name, 'descr': descr})
await col_names.save()
"""
# """
obj_name = dbobj_elem.get('name')
table_name = dbobj_elem.get('table_name')
db_table = await db.objects.get_db_table(
form_defn.context, caller.company, table_name)
await obj_names.init(init_vals={
'name': obj_name, 'descr': db_table.short_descr})
await obj_names.save()
for seq, col_defn in enumerate(db_table.col_list):
await col_names.init(init_vals={'name': col_defn.col_name,
'descr': col_defn.short_descr, 'seq': seq})
await col_names.save()
# """
memobj_xml = await form_vars.getval('memobj_xml')
for memobj in memobj_xml.iter('mem_obj'):
await obj_names.init(init_vals={
'name': memobj.get('name'), 'descr': memobj.get('descr')})
await obj_names.save()
obj_row_id = await obj_names.getval('row_id')
for seq, memcol in enumerate(memobj.iter('mem_col')):
await col_names.init(init_vals={'name': memcol.get('col_name'),
'descr': memcol.get('short_descr'), 'seq': seq})
await col_names.save()
frame_xml = form_xml.find('frame')
init_vals={}
init_vals['toolbar_xml'] = frame_xml.find('toolbar')
init_vals['body_xml'] = frame_xml.find('body')
init_vals['buttonrow_xml'] = frame_xml.find('button_row')
init_vals['methods_xml'] = frame_xml.find('frame_methods')
init_vals['main_object'] = frame_xml.get('main_object')
init_vals['obj_descr'] = frame_xml.get('obj_descr')
await frame_vars.init(init_vals=init_vals)
for inline_xml in form_xml.iterchildren('inline_form'): # do not descend
init_vals = {
'name': inline_xml.get('name'),
'title': inline_xml.get('title'),
'frame_xml': inline_xml.find('frame'),
}
await inline_vars.init(init_vals=init_vals)
await inline_vars.save()
async def dump_form_xml(caller, xml):
# called from setup_form 'before_save'
form_defn = caller.data_objects['form']
form_vars = caller.data_objects['form_vars']
frame_vars = caller.data_objects['frame_vars']
form_xml = etree.Element('form')
form_xml.set('name', await form_defn.getval('form_name'))
form_xml.set('title', await form_defn.getval('title'))
await set_if_not_none(form_xml, form_vars, 'before_start_form')
await set_if_not_none(form_xml, form_vars, 'after_start_form')
await set_if_not_none(form_xml, form_vars, 'on_close_form')
form_xml.append(await form_vars.getval('dbobj_xml'))
form_xml.append(await form_vars.getval('memobj_xml'))
form_xml.append(await form_vars.getval('inputs_xml'))
form_xml.append(await form_vars.getval('outputs_xml'))
frame_xml = etree.SubElement(form_xml, 'frame')
await set_if_not_none(frame_xml, frame_vars, 'main_object')
await set_if_not_none(frame_xml, frame_vars, 'obj_descr')
frame_xml.append(await frame_vars.getval('toolbar_xml'))
frame_xml.append(await frame_vars.getval('body_xml'))
frame_xml.append(await frame_vars.getval('buttonrow_xml'))
frame_xml.append(await frame_vars.getval('methods_xml'))
inline_vars = caller.data_objects['inline_vars']
all_inline = inline_vars.select_many(where=[], order=[])
async for _ in all_inline:
inline_xml = etree.SubElement(form_xml, 'inline_form')
inline_xml.set('name', await inline_vars.getval('name'))
inline_xml.set('title', await inline_vars.getval('title'))
inline_xml.append(await inline_vars.getval('frame_xml'))
# inline_params = await form_vars.getval('inline_xml')
# for name, frame_xml in inline_params:
# inline_xml = etree.SubElement(form_xml, 'inline_form')
# inline_xml.set('name', name)
# inline_xml.append(frame_xml)
# validate result using schema
try:
etree.fromstring(etree.tostring(form_xml), parser=xsd_parser)
except (etree.XMLSyntaxError, ValueError, TypeError) as e:
raise AibError(head='XmlError', body=e.args[0])
# update form_definition with new form_xml
await form_defn.setval('form_xml', form_xml)
"""
# the next bit is a trick
# we want to 'save' form_vars, to trigger on_clean()
# however, inline_xml is a 'list' which includes etree Elements
# this cannot be serialised to JSON, so the save fails
# the trick is as follows -
# save all values in init_vals
# call form_vars.restore(), which triggers on_clean()
# call form_vars.init() with init_vals, which puts back the values
init_vals = {}
for col_defn in form_vars.db_table.col_list[1:]: # exclude 'row_id'
col_name = col_defn.col_name
init_vals[col_name] = await form_vars.getval(col_name)
await form_vars.restore()
await form_vars.init(init_vals=init_vals, display=False)
form_vars.init_vals = {}
"""
#-----------------------------------------------------------------------------
# db_obj
#-----------------------------------------------------------------------------
dbobj_cols = ('name', 'table_name', 'parent', 'fkey', 'cursor', 'is_formview_obj')
async def load_db_obj(caller, xml):
# called from setup_form_dbobj 'on_start_frame'
form_vars = caller.data_objects['form_vars']
dbobj_xml = await form_vars.getval('dbobj_xml')
dbobj = caller.data_objects['dbobj']
await dbobj.delete_all()
for seq, obj_xml in enumerate(dbobj_xml):
# init_vals = {col: await dbobj.get_val_from_xml(col, obj_xml.get(col))
# for col in dbobj_cols}
init_vals = {}
for col in dbobj_cols:
init_vals[col] = await dbobj.get_val_from_xml(col, obj_xml.get(col))
init_vals['seq'] = seq
await dbobj.init(display=False, init_vals=init_vals)
await dbobj.save()
await dbobj.init()
async def dump_db_obj(caller, xml):
# called from setup_form_dbobj 'do_save'
form_vars = caller.data_objects['form_vars']
dbobj_xml = await form_vars.getval('dbobj_xml')
orig_dbobj = set((dbobj.get('name') for dbobj in dbobj_xml))
obj_names = caller.data_objects['obj_names']
col_names = caller.data_objects['col_names']
dbobj = caller.data_objects['dbobj']
dbobjs_xml = etree.Element('db_objects')
all_dbobj = dbobj.select_many(where=[], order=[('seq', False)])
async for _ in all_dbobj:
dbobj_xml = etree.SubElement(dbobjs_xml, 'db_obj')
for col in dbobj_cols:
await set_if_not_none(dbobj_xml, dbobj, col)
obj_name = await dbobj.getval('name')
if obj_name in orig_dbobj:
orig_dbobj.remove(obj_name)
else:
"""
async with db_session.get_connection() as db_mem_conn:
conn = db_mem_conn.db
sql = (
"SELECT row_id, short_descr FROM {}.db_tables WHERE table_name = '{}'"
.format(caller.company, await dbobj.getval('table_name'))
)
cur = await conn.exec_sql(sql)
table_id, descr = await cur.__anext__()
await obj_names.init(init_vals={
'name': obj_name, 'descr': descr})
await obj_names.save()
sql = (
"SELECT col_name, short_descr FROM {}.db_columns "
"WHERE table_id = {} "
"AND col_name NOT IN ('row_id', 'created_id', 'deleted_id') "
"ORDER BY col_type, seq"
.format(caller.company, table_id)
)
async for col_name, descr in await conn.exec_sql(sql):
await col_names.init(init_vals={ #'obj_id': obj_row_id,
'name': col_name, 'descr': descr})
await col_names.save()
"""
# """
table_name = await dbobj.getval('table_name')
db_table = await db.objects.get_db_table(
form_vars.context, caller.company, table_name)
await obj_names.init(init_vals={
'name': obj_name, 'descr': db_table.short_descr})
await obj_names.save()
for col_defn in db_table.col_list:
await col_names.init(init_vals={
'name': col_defn.col_name, 'descr': col_defn.short_descr})
await col_names.save()
# """
for deleted_obj in orig_dbobj: # anything left has been deleted
await obj_names.init(init_vals={'name': deleted_obj})
await obj_names.delete()
await form_vars.setval('dbobj_xml', dbobjs_xml)
#-----------------------------------------------------------------------------
# mem_obj
#-----------------------------------------------------------------------------
memobj_cols = ('name', 'descr', 'parent', 'sequence', 'sub_types', 'tree_params',
'actions', 'clone_from')
memcol_cols = ('col_name', 'col_type', 'data_type', 'short_descr', 'long_descr',
'col_head', 'key_field', 'data_source', 'condition', 'allow_null', 'allow_amend', 'max_len',
'db_scale', 'scale_ptr', 'dflt_val', 'dflt_rule', 'col_checks', 'fkey', 'choices', 'sql')
async def load_mem_obj(caller, xml):
# called from setup_form_memobj 'on_start_frame'
form_vars = caller.data_objects['form_vars']
memobj_xml = await form_vars.getval('memobj_xml')
memobj = caller.data_objects['memobj']
memcol = caller.data_objects['memcol']
await memcol.delete_all()
await memobj.delete_all()
for seq, obj_xml in enumerate(memobj_xml):
# init_vals = {col: memobj.get_val_from_xml(col, obj_xml.get(col))
# for col in memobj_cols}
init_vals = {}
for col in memobj_cols:
init_vals[col] = await memobj.get_val_from_xml(col, obj_xml.get(col))
init_vals['seq'] = seq
await memobj.init(display=False, init_vals=init_vals)
await memobj.save()
#set up memcols for this memobj
for seq, memcol_xml in enumerate(obj_xml.iter('mem_col')):
# init_vals = {col: memcol.get_val_from_xml(col, memcol_xml.get(col))
# for col in memcol_cols}
init_vals = {}
for col in memcol_cols:
init_vals[col] = await memcol.get_val_from_xml(col, memcol_xml.get(col))
init_vals['seq'] = seq
await memcol.init(display=False, init_vals=init_vals)
await memcol.save()
await memobj.init()
await memobj.init()
async def dump_mem_obj(caller, xml):
# called from setup_form_memobj 'before_save'
form_vars = caller.data_objects['form_vars']
memobj_xml = await form_vars.getval('memobj_xml')
orig_memobj = set((memobj.get('name') for memobj in memobj_xml))
obj_names = caller.data_objects['obj_names']
col_names = caller.data_objects['col_names']
memobj = caller.data_objects['memobj']
memcol = caller.data_objects['memcol']
memobjs_xml = etree.Element('mem_objects')
all_memobj = memobj.select_many(where=[], order=[('seq', False)])
async for _ in all_memobj:
memobj_xml = etree.SubElement(memobjs_xml, 'mem_obj')
for col in memobj_cols:
await set_if_not_none(memobj_xml, memobj, col)
all_memcol = memcol.select_many(where=[], order=[('seq', False)])
async for _ in all_memcol:
memcol_xml = etree.SubElement(memobj_xml, 'mem_col')
for col in memcol_cols:
await set_if_not_none(memcol_xml, memcol, col)
obj_name = await memobj.getval('name')
if obj_name in orig_memobj:
await obj_names.init(init_vals={'name': obj_name})
orig_memobj.remove(obj_name)
else:
await obj_names.init(init_vals={
'name': obj_name,
'descr': await memobj.getval('descr'),
# 'seq': await memobj.getval('seq'), # seq is db_obj then mem_obj, so n/a
})
await obj_names.save()
all_cols = memcol.select_many(where=[], order=[])
async for _ in all_cols:
await col_names.init(init_vals={
'name': await memcol.getval('col_name'),
'descr': await memcol.getval('short_descr'),
'seq': await memcol.getval('seq')})
await col_names.save()
for deleted_obj in orig_memobj: # anything left has been deleted
await obj_names.init(init_vals={'name': deleted_obj})
await obj_names.delete()
await form_vars.setval('memobj_xml', memobjs_xml)
#-----------------------------------------------------------------------------
# io_parms
#-----------------------------------------------------------------------------
input_cols = ('name', 'type', 'target', 'required')
output_cols = ('name', 'type', 'source')
async def load_ioparms(caller, xml):
# called from setup_form_ioparams 'on_start_frame'
form_vars = caller.data_objects['form_vars']
inputs_xml = await form_vars.getval('inputs_xml')
outputs_xml = await form_vars.getval('outputs_xml')
inputs = caller.data_objects['inputs']
await inputs.delete_all()
for seq, input_xml in enumerate(inputs_xml):
# init_vals = {col: inputs.get_val_from_xml(col, input_xml.get(col))
# for col in input_cols}
init_vals = {}
for col in input_cols:
init_vals[col] = await inputs.get_val_from_xml(col, input_xml.get(col))
init_vals['seq'] = seq
await inputs.init(display=False, init_vals=init_vals)
await inputs.save()
outputs = caller.data_objects['outputs']
await outputs.delete_all()
for seq, output_xml in enumerate(outputs_xml):
# init_vals = {col: outputs.get_val_from_xml(col, output_xml.get(col))
# for col in output_cols}
init_vals = {}
for col in output_cols:
init_vals[col] = await outputs.get_val_from_xml(col, output_xml.get(col))
init_vals['seq'] = seq
await outputs.init(display=False, init_vals=init_vals)
await outputs.save()
async def dump_ioparms(caller, xml):
# called from setup_form_ioparams 'do_save'
form_vars = caller.data_objects['form_vars']
inputs_xml = etree.Element('input_params')
inputs = caller.data_objects['inputs']
all_inputs = inputs.select_many(where=[], order=[('seq', False)])
async for _ in all_inputs:
input_xml = etree.SubElement(inputs_xml, 'input_param')
for col in input_cols:
await set_if_not_none(input_xml, inputs, col)
await form_vars.setval('inputs_xml', inputs_xml)
outputs_xml = etree.Element('output_params')
outputs = caller.data_objects['outputs']
all_outputs = outputs.select_many(where=[], order=[('seq', False)])
async for _ in all_outputs:
output_xml = etree.SubElement(outputs_xml, 'output_param')
for col in output_cols:
await set_if_not_none(output_xml, outputs, col)
await form_vars.setval('outputs_xml', outputs_xml)
#-----------------------------------------------------------------------------
# inline forms
#-----------------------------------------------------------------------------
async def load_inline(caller, xml):
# called from setup_form_inline grid_frame 'on_start_frame'
inline_vars = caller.data_objects['inline_vars']
frame_vars = caller.data_objects['frame_vars']
if inline_vars.exists:
frame_xml = await inline_vars.getval('frame_xml')
init_vals={}
init_vals['toolbar_xml'] = frame_xml.find('toolbar')
init_vals['body_xml'] = frame_xml.find('body')
init_vals['buttonrow_xml'] = frame_xml.find('button_row')
init_vals['methods_xml'] = frame_xml.find('frame_methods')
init_vals['main_object'] = frame_xml.get('main_object')
init_vals['obj_descr'] = frame_xml.get('obj_descr')
else:
frame_xml = etree.Element('frame')
init_vals={}
init_vals['toolbar_xml'] = etree.SubElement(frame_xml, 'toolbar')
init_vals['body_xml'] = etree.SubElement(frame_xml, 'body')
init_vals['buttonrow_xml'] = etree.SubElement(frame_xml, 'button_row')
init_vals['methods_xml'] = etree.SubElement(frame_xml, 'frame_methods')
await frame_vars.init(init_vals=init_vals)
async def dump_inline(caller, xml):
# called from setup_form_inlline grid_frame 'before_save'
inline_vars = caller.data_objects['inline_vars']
frame_vars = caller.data_objects['frame_vars']
frame_xml = etree.Element('frame')
await set_if_not_none(frame_xml, frame_vars, 'main_object')
await set_if_not_none(frame_xml, frame_vars, 'obj_descr')
frame_xml.append(await frame_vars.getval('toolbar_xml'))
frame_xml.append(await frame_vars.getval('body_xml'))
frame_xml.append(await frame_vars.getval('buttonrow_xml'))
frame_xml.append(await frame_vars.getval('methods_xml'))
await inline_vars.setval('frame_xml', frame_xml)
#-----------------------------------------------------------------------------
# toolbar
#-----------------------------------------------------------------------------
tool_cols = ('type', 'label', 'tip', 'lng', 'name', 'obj_name', 'col_name', 'shortcut', 'action')
async def before_start_toolbar(caller, xml):
# called from setup_form_toolbar 'before_start_form'
# parent = caller.parent
# while True:
# if 'obj_names' in parent.data_objects:
# caller.data_objects['obj_names'] = parent.data_objects['obj_names']
# caller.data_objects['col_names'] = parent.data_objects['col_names']
# break
# parent = parent.parent
pass
async def load_toolbar(caller, xml):
# called from setup_form_frame.toolbar 'on_start_frame'
form_vars = caller.data_objects['form_vars']
toolbar_xml = await form_vars.getval('toolbar_xml')
if toolbar_xml is None:
toolbar_xml = etree.Element('toolbar')
await form_vars.setval('toolbar_xml', toolbar_xml)
await form_vars.setval('tb_template',
await form_vars.get_val_from_xml('tb_template', toolbar_xml.get('template')))
await form_vars.setval('tb_title',
await form_vars.get_val_from_xml('tb_title', toolbar_xml.get('title')))
await form_vars.save()
tool = caller.data_objects['tool']
await tool.delete_all()
for seq, tool_xml in enumerate(toolbar_xml):
# init_vals = {col: tool.get_val_from_xml(col, tool_xml.get(col))
# for col in tool_cols}
init_vals = {}
for col in tool_cols:
init_vals[col] = await tool.get_val_from_xml(col, tool_xml.get(col))
init_vals['seq'] = seq
await tool.init(display=False, init_vals=init_vals)
await tool.save()
async def dump_toolbar(caller, xml):
# called from setup_form_frame.toolbar 'before_save'
form_vars = caller.data_objects['form_vars']
tool = caller.data_objects['tool']
toolbar_xml = etree.Element('toolbar')
await set_if_not_none(toolbar_xml, form_vars, 'tb_template', 'template')
await set_if_not_none(toolbar_xml, form_vars, 'tb_title', 'title')
all_tools = tool.select_many(where=[], order=[('seq', False)])
async for _ in all_tools:
tool_xml = etree.SubElement(toolbar_xml, 'tool')
for col in tool_cols:
await set_if_not_none(tool_xml, tool, col)
await form_vars.setval('toolbar_xml', toolbar_xml)
#-----------------------------------------------------------------------------
# buttonrow
#-----------------------------------------------------------------------------
button_cols = ('btn_id', 'btn_label', 'lng', 'btn_default',
'btn_enabled', 'btn_validate', 'action', 'validation', 'help_msg')
async def load_buttonrow(caller, xml):
# called from setup_form_buttonrow 'on_start_frame'
form_vars = caller.data_objects['form_vars']
buttonrow_xml = await form_vars.getval('buttonrow_xml')
if buttonrow_xml is None:
buttonrow_xml = etree.Element('button_row')
await form_vars.setval('buttonrow_xml', buttonrow_xml)
await form_vars.setval('btn_template',
await form_vars.get_val_from_xml('btn_template', buttonrow_xml.get('template')))
await form_vars.save()
button = caller.data_objects['button']
await button.delete_all()
for seq, button_xml in enumerate(buttonrow_xml):
# init_vals = {col: button.get_val_from_xml(col, button_xml.get(col))
# for col in button_cols}
init_vals = {}
for col in button_cols:
init_vals[col] = await button.get_val_from_xml(col, button_xml.get(col))
init_vals['seq'] = seq
await button.init(display=False, init_vals=init_vals)
await button.save()
async def dump_buttonrow(caller, xml):
# called from setup_form_buttonrow 'before_save'
form_vars = caller.data_objects['form_vars']
button = caller.data_objects['button']
buttonrow_xml = etree.Element('button_row')
await set_if_not_none(buttonrow_xml, form_vars, 'btn_template', 'template')
all_buttons = button.select_many(where=[], order=[('seq', False)])
async for _ in all_buttons:
button_xml = etree.SubElement(buttonrow_xml, 'button')
for col in button_cols:
await set_if_not_none(button_xml, button, col)
await form_vars.setval('buttonrow_xml', buttonrow_xml)
#-----------------------------------------------------------------------------
# methods
#-----------------------------------------------------------------------------
method_cols = ('name', 'obj_name', 'action')
async def load_methods(caller, xml):
# called from setup_form_methods 'on_start_frame'
form_vars = caller.data_objects['form_vars']
method = caller.data_objects['method']
await method.delete_all()
methods_xml = await form_vars.getval('methods_xml')
if methods_xml is None:
methods_xml = etree.Element('frame_methods')
await form_vars.setval('methods_xml', methods_xml)
await form_vars.setval('method_template',
await form_vars.get_val_from_xml('method_template', methods_xml.get('template')))
await form_vars.save()
for seq, method_xml in enumerate(methods_xml):
# init_vals = {col: method.get_val_from_xml(col, method_xml.get(col))
# for col in method_cols}
init_vals = {}
for col in method_cols:
init_vals[col] = await method.get_val_from_xml(col, method_xml.get(col))
init_vals['seq'] = seq
await method.init(display=False, init_vals=init_vals)
await method.save()
async def dump_methods(caller, xml):
# called from setup_form_methods 'before_save'
form_vars = caller.data_objects['form_vars']
method = caller.data_objects['method']
methods_xml = etree.Element('frame_methods')
await set_if_not_none(methods_xml, form_vars, 'method_template', 'template')
all_methods = method.select_many(where=[], order=[('seq', False)])
async for _ in all_methods:
method_xml = etree.SubElement(methods_xml, 'method')
for col in method_cols:
await set_if_not_none(method_xml, method, col)
await form_vars.setval('methods_xml', methods_xml)
#-----------------------------------------------------------------------------
# body
#-----------------------------------------------------------------------------
body_cols = ('main_object', 'obj_descr', 'rowspan', 'colspan', 'value', 'obj_name', 'col_name',
'lng', 'height', 'pwd', 'readonly', 'choice', 'lookup', 'radio', 'before',
'form_dflt', 'validation', 'after', 'btn_id', 'btn_label', 'btn_enabled', 'btn_validate',
'action', 'help_msg', 'nb_label', 'subtype_obj', 'subtype_col', 'data_object', 'growable',
'num_grid_rows', 'cursor_name', 'form_name', 'auto_start', 'auto_startrow',
'toolbar', 'combo_type', 'group_name', 'member_name', 'pyfunc', 'prev', 'align', 'src', 'op', 'tgt')
async def before_start_body(caller, xml):
# called from setup_form_body 'before_start_form'
# parent = caller.parent
# while True:
# if 'obj_names' in parent.data_objects:
# caller.data_objects['obj_names'] = parent.data_objects['obj_names']
# caller.data_objects['col_names'] = parent.data_objects['col_names']
# break
# parent = parent.parent
pass
async def load_body(caller, xml):
# called from setup_form_body 'on_start_frame'
form_vars = caller.data_objects['form_vars']
body = caller.data_objects['body']
"""
obj_names = caller.data_objects['obj_names']
col_names = caller.data_objects['col_names']
all_obj = obj_names.select_many(where=[], order=[])
async for _ in all_obj:
print(obj_names)
all_col = col_names.select_many(where=[], order=[])
async for _ in all_col:
print(col_names)
print()
"""
"""
obj_name_fld = await body.getfld('obj_name')
obj_names = obj_name_fld.foreign_key['tgt_field'].db_obj
await obj_names.delete_all()
col_name_fld = await body.getfld('col_name')
col_names = col_name_fld.foreign_key['tgt_field'].db_obj
await col_names.delete_all()
dbobj_xml = await form_vars.getval('dbobj_xml')
for dbobj in dbobj_xml.iter('db_obj'):
async with db_session.get_connection() as db_mem_conn:
conn = db_mem_conn.db
sql = (
"SELECT row_id, short_descr FROM {}.db_tables WHERE table_name = '{}'"
.format(caller.company, dbobj.get('table_name'))
)
cur = await conn.exec_sql(sql)
table_id, descr = await cur.__anext__()
await obj_names.init(init_vals={
'name': dbobj.get('name'), 'descr': descr})
await obj_names.save()
sql = (
"SELECT col_name, short_descr FROM {}.db_columns "
"WHERE table_id = {} AND col_type != 'virt' "
"AND col_name NOT IN ('row_id', 'created_id', 'deleted_id') "
.format(caller.company, table_id)
)
async for col_name, descr in await conn.exec_sql(sql):
await col_names.init(init_vals={ #'obj_id': obj_row_id,
'name': col_name, 'descr': descr})
await col_names.save()
memobj_xml = await form_vars.getval('memobj_xml')
for memobj in memobj_xml.iter('mem_obj'):
await obj_names.init(init_vals={
'name': memobj.get('name'), 'descr': memobj.get('descr')})
await obj_names.save()
obj_row_id = await obj_names.getval('row_id')
for memcol in memobj.iter('mem_col'):
await col_names.init(init_vals={'obj_id': obj_row_id,
'name': memcol.get('col_name'), 'descr': memcol.get('short_descr')})
await col_names.save()
"""
body_xml = await form_vars.getval('body_xml')
if body_xml is None:
body_xml = etree.Element('body')
await form_vars.setval('body_xml', body_xml)
await body.delete_all(from_upd_on_save=True) # a trick to prevent running 'on_clean'
for seq, elem_xml in enumerate(body_xml):
init_vals = {}
init_vals['elem'] = elem_xml
init_vals['type'] = elem_xml.tag
init_vals['seq'] = seq
for fld in body.sub_types['type'][elem_xml.tag]:
val = await body.get_val_from_xml(fld.col_name, elem_xml.get(fld.col_name))
if val is not None:
init_vals[fld.col_name] = val
await body.init(display=False, init_vals=init_vals)
await body.save(from_upd_on_save=True) # a trick to prevent running 'on_clean'
# could make an alias, without gui link (cleaner?)
async def dump_body(caller, xml):
# called from setup_form_body 'before_save'
body = caller.data_objects['body']
body_xml = etree.Element('body')
all_body = body.select_many(where=[], order=[('seq', False)])
async for _ in all_body:
elem_xml = etree.SubElement(body_xml, await body.getval('type'))
for col in body_cols:
await set_if_not_none(elem_xml, body, col)
elem_xml[:] = (await body.getval('elem'))[:]
form_vars = caller.data_objects['form_vars']
await form_vars.setval('body_xml', body_xml)
#-----------------------------------------------------------------------------
# body_elem
#-----------------------------------------------------------------------------
async def load_body_elem(caller, xml):
# called from setup_form_body.grid_frame 'on_start_frame'
body = caller.data_objects['body']
# N.B. do not use this to store attributes - use sub_type columns instead
# only use it to store sub_elements
# P.S. it is ok to store copies of attributes in separate mem_objects,
# get the values from 'body' on loading, and replace the values
# in 'body' on dumping
elem_type = await body.getval('type')
elem_xml = await body.getval('elem')
if elem_type == 'grid':
grid_vars = caller.data_objects['grid_vars']
init_vals={}
init_vals['toolbar_xml'] = elem_xml.find('toolbar')
init_vals['columns_xml'] = elem_xml.find('cur_columns')
init_vals['filter_xml'] = elem_xml.find('cur_filter')
init_vals['sequence_xml'] = elem_xml.find('cur_sequence')
init_vals['methods_xml'] = elem_xml.find('grid_methods')
await grid_vars.init(init_vals=init_vals)
elif elem_type == 'grid_frame':
gridframe_vars = caller.data_objects['gridframe_vars']
init_vals={}
init_vals['main_object'] = await body.getval('main_object')
init_vals['obj_descr'] = await body.getval('obj_descr')
init_vals['toolbar_xml'] = elem_xml.find('toolbar')
init_vals['body_xml'] = elem_xml.find('body')
init_vals['buttonrow_xml'] = elem_xml.find('button_row')
init_vals['methods_xml'] = elem_xml.find('frame_methods')
await gridframe_vars.init(init_vals=init_vals)
elif elem_type == 'tree_frame':
treeframe_vars = caller.data_objects['treeframe_vars']
init_vals={}
init_vals['main_object'] = await body.getval('main_object')
init_vals['obj_descr'] = await body.getval('obj_descr')
init_vals['combo_type'] = await body.getval('combo_type')
init_vals['toolbar_xml'] = elem_xml.find('toolbar')
init_vals['body_xml'] = elem_xml.find('body')
init_vals['buttonrow_xml'] = elem_xml.find('button_row')
init_vals['methods_xml'] = elem_xml.find('frame_methods')
await treeframe_vars.init(init_vals=init_vals)
elif elem_type == 'subtype_frame':
subtype_vars = caller.data_objects['subtype_vars']
await subtype_vars.init(init_vals={
'subtype_obj': await body.getval('subtype_obj'),
'subtype_col': await body.getval('subtype_col'),
'lng': await body.getval('lng'),
})
subtypes = caller.data_objects['subtypes']
await subtypes.delete_all()
for subtype in elem_xml.iter('subtype_body'):
await subtypes.init(init_vals={
'subtype_id': subtype.get('subtype_id'),
'body_xml': subtype,
})
await subtypes.save()
async def dump_body_elem(caller, xml):
# called from setup_form_body.grid_frame 'before_save'
body = caller.data_objects['body']
elem_type = await body.getval('type')
elem_xml = await body.getval('elem')
if elem_type == 'grid':
grid_vars = caller.data_objects['grid_vars']
# await body.setval('data_object', await grid_vars.getval('data_object'))
# await body.setval('obj_descr', await grid_vars.getval('obj_descr'))
# await body.setval('growable', await grid_vars.getval('growable'))
# await body.setval('num_grid_rows', await grid_vars.getval('num_grid_rows'))
# await body.setval('cursor_name', await grid_vars.getval('cursor_name'))
# await body.setval('form_name', await grid_vars.getval('form_name'))
# await body.setval('auto_start', await grid_vars.getval('auto_start'))
# await body.setval('auto_startrow', await grid_vars.getval('auto_startrow'))
if elem_xml is None:
elem_xml = etree.Element(elem_type)
etree.SubElement(elem_xml, 'toolbar')
etree.SubElement(elem_xml, 'cur_columns')
etree.SubElement(elem_xml, 'cur_filter')
etree.SubElement(elem_xml, 'cur_sequence')
etree.SubElement(elem_xml, 'grid_methods')
elem_xml.find('toolbar')[:] = (await grid_vars.getval('toolbar_xml'))[:]
elem_xml.find('cur_columns')[:] = (await grid_vars.getval('columns_xml'))[:]
elem_xml.find('cur_filter')[:] = (await grid_vars.getval('filter_xml'))[:]
elem_xml.find('cur_sequence')[:] = (await grid_vars.getval('sequence_xml'))[:]
elem_xml.find('grid_methods')[:] = (await grid_vars.getval('methods_xml'))[:]
elif elem_type == 'grid_frame':
gridframe_vars = caller.data_objects['gridframe_vars']
# await body.setval('main_object', await gridframe_vars.getval('main_object'))
# await body.setval('obj_descr', await gridframe_vars.getval('obj_descr'))
if elem_xml is None:
elem_xml = etree.Element(elem_type)
etree.SubElement(elem_xml, 'toolbar')
etree.SubElement(elem_xml, 'body')
etree.SubElement(elem_xml, 'button_row')
etree.SubElement(elem_xml, 'frame_methods')
elem_xml.find('toolbar')[:] = (await gridframe_vars.getval('toolbar_xml'))[:]
elem_xml.find('body')[:] = (await gridframe_vars.getval('body_xml'))[:]
elem_xml.find('button_row')[:] = (await gridframe_vars.getval('buttonrow_xml'))[:]
elem_xml.find('frame_methods')[:] = (await gridframe_vars.getval('methods_xml'))[:]
elif elem_type == 'tree_frame':
treeframe_vars = caller.data_objects['treeframe_vars']
await body.setval('main_object', await treeframe_vars.getval('main_object'))
await body.setval('obj_descr', await treeframe_vars.getval('obj_descr'))
await body.setval('combo_type', await treeframe_vars.getval('combo_type'))
if elem_xml is None:
elem_xml = etree.Element(elem_type)
etree.SubElement(elem_xml, 'toolbar')
etree.SubElement(elem_xml, 'body')
etree.SubElement(elem_xml, 'button_row')
etree.SubElement(elem_xml, 'frame_methods')
elem_xml.find('toolbar')[:] = (await treeframe_vars.getval('toolbar_xml'))[:]
elem_xml.find('body')[:] = (await treeframe_vars.getval('body_xml'))[:]
elem_xml.find('button_row')[:] = (await treeframe_vars.getval('buttonrow_xml'))[:]
elem_xml.find('frame_methods')[:] = (await treeframe_vars.getval('methods_xml'))[:]
elif elem_type == 'subtype_frame':
subtype_vars = caller.data_objects['subtype_vars']
await body.setval('subtype_obj', await subtype_vars.getval('subtype_obj'))
await body.setval('subtype_col', await subtype_vars.getval('subtype_col'))
await body.setval('lng', await subtype_vars.getval('lng'))
if elem_xml is None:
elem_xml = etree.Element(elem_type)
subtypes = caller.data_objects['subtypes']
subtypes_xml = etree.Element('subtypes_temp')
all_subtypes = subtypes.select_many(where=[], order=[('subtype_id', False)])
async for _ in all_subtypes:
subtype_xml = etree.SubElement(subtypes_xml, 'subtype_body')
await set_if_not_none(subtype_xml, subtypes, 'subtype_id')
subtype_xml[:] = (await subtypes.getval('body_xml'))[:]
elem_xml[:] = subtypes_xml[:]
elif elem_xml is None:
elem_xml = etree.Element(elem_type)
await body.setval('elem', elem_xml)
async def set_if_not_none(elem_xml, db_obj, col_name, attr_name=None):
# create attribute on xml element, but only if not None or default
xml_val = await db_obj.get_val_for_xml(col_name) # returns None if None or equal to default
if xml_val is not None:
if attr_name is None: # if not specified, use col_name
attr_name = col_name
elem_xml.set(attr_name, xml_val)
|
[
"common.AibError",
"lxml.etree.tostring",
"lxml.etree.Element",
"lxml.etree.SubElement"
] |
[((735, 756), 'lxml.etree.Element', 'etree.Element', (['"""form"""'], {}), "('form')\n", (748, 756), False, 'from lxml import etree\n'), ((823, 863), 'lxml.etree.SubElement', 'etree.SubElement', (['form_xml', '"""db_objects"""'], {}), "(form_xml, 'db_objects')\n", (839, 863), False, 'from lxml import etree\n'), ((868, 909), 'lxml.etree.SubElement', 'etree.SubElement', (['form_xml', '"""mem_objects"""'], {}), "(form_xml, 'mem_objects')\n", (884, 909), False, 'from lxml import etree\n'), ((914, 956), 'lxml.etree.SubElement', 'etree.SubElement', (['form_xml', '"""input_params"""'], {}), "(form_xml, 'input_params')\n", (930, 956), False, 'from lxml import etree\n'), ((961, 1004), 'lxml.etree.SubElement', 'etree.SubElement', (['form_xml', '"""output_params"""'], {}), "(form_xml, 'output_params')\n", (977, 1004), False, 'from lxml import etree\n'), ((1017, 1052), 'lxml.etree.SubElement', 'etree.SubElement', (['form_xml', '"""frame"""'], {}), "(form_xml, 'frame')\n", (1033, 1052), False, 'from lxml import etree\n'), ((1057, 1091), 'lxml.etree.SubElement', 'etree.SubElement', (['frame', '"""toolbar"""'], {}), "(frame, 'toolbar')\n", (1073, 1091), False, 'from lxml import etree\n'), ((1096, 1127), 'lxml.etree.SubElement', 'etree.SubElement', (['frame', '"""body"""'], {}), "(frame, 'body')\n", (1112, 1127), False, 'from lxml import etree\n'), ((1132, 1169), 'lxml.etree.SubElement', 'etree.SubElement', (['frame', '"""button_row"""'], {}), "(frame, 'button_row')\n", (1148, 1169), False, 'from lxml import etree\n'), ((1174, 1214), 'lxml.etree.SubElement', 'etree.SubElement', (['frame', '"""frame_methods"""'], {}), "(frame, 'frame_methods')\n", (1190, 1214), False, 'from lxml import etree\n'), ((6172, 6193), 'lxml.etree.Element', 'etree.Element', (['"""form"""'], {}), "('form')\n", (6185, 6193), False, 'from lxml import etree\n'), ((6765, 6800), 'lxml.etree.SubElement', 'etree.SubElement', (['form_xml', '"""frame"""'], {}), "(form_xml, 'frame')\n", (6781, 6800), False, 'from lxml import etree\n'), ((10263, 10290), 'lxml.etree.Element', 'etree.Element', (['"""db_objects"""'], {}), "('db_objects')\n", (10276, 10290), False, 'from lxml import etree\n'), ((14999, 15027), 'lxml.etree.Element', 'etree.Element', (['"""mem_objects"""'], {}), "('mem_objects')\n", (15012, 15027), False, 'from lxml import etree\n'), ((18273, 18302), 'lxml.etree.Element', 'etree.Element', (['"""input_params"""'], {}), "('input_params')\n", (18286, 18302), False, 'from lxml import etree\n'), ((18672, 18702), 'lxml.etree.Element', 'etree.Element', (['"""output_params"""'], {}), "('output_params')\n", (18685, 18702), False, 'from lxml import etree\n'), ((20567, 20589), 'lxml.etree.Element', 'etree.Element', (['"""frame"""'], {}), "('frame')\n", (20580, 20589), False, 'from lxml import etree\n'), ((23004, 23028), 'lxml.etree.Element', 'etree.Element', (['"""toolbar"""'], {}), "('toolbar')\n", (23017, 23028), False, 'from lxml import etree\n'), ((24995, 25022), 'lxml.etree.Element', 'etree.Element', (['"""button_row"""'], {}), "('button_row')\n", (25008, 25022), False, 'from lxml import etree\n'), ((26840, 26870), 'lxml.etree.Element', 'etree.Element', (['"""frame_methods"""'], {}), "('frame_methods')\n", (26853, 26870), False, 'from lxml import etree\n'), ((31966, 31987), 'lxml.etree.Element', 'etree.Element', (['"""body"""'], {}), "('body')\n", (31979, 31987), False, 'from lxml import etree\n'), ((7339, 7380), 'lxml.etree.SubElement', 'etree.SubElement', (['form_xml', '"""inline_form"""'], {}), "(form_xml, 'inline_form')\n", (7355, 7380), False, 'from lxml import etree\n'), ((10409, 10447), 'lxml.etree.SubElement', 'etree.SubElement', (['dbobjs_xml', '"""db_obj"""'], {}), "(dbobjs_xml, 'db_obj')\n", (10425, 10447), False, 'from lxml import etree\n'), ((15150, 15190), 'lxml.etree.SubElement', 'etree.SubElement', (['memobjs_xml', '"""mem_obj"""'], {}), "(memobjs_xml, 'mem_obj')\n", (15166, 15190), False, 'from lxml import etree\n'), ((18467, 18510), 'lxml.etree.SubElement', 'etree.SubElement', (['inputs_xml', '"""input_param"""'], {}), "(inputs_xml, 'input_param')\n", (18483, 18510), False, 'from lxml import etree\n'), ((18873, 18918), 'lxml.etree.SubElement', 'etree.SubElement', (['outputs_xml', '"""output_param"""'], {}), "(outputs_xml, 'output_param')\n", (18889, 18918), False, 'from lxml import etree\n'), ((19955, 19977), 'lxml.etree.Element', 'etree.Element', (['"""frame"""'], {}), "('frame')\n", (19968, 19977), False, 'from lxml import etree\n'), ((20034, 20072), 'lxml.etree.SubElement', 'etree.SubElement', (['frame_xml', '"""toolbar"""'], {}), "(frame_xml, 'toolbar')\n", (20050, 20072), False, 'from lxml import etree\n'), ((20105, 20140), 'lxml.etree.SubElement', 'etree.SubElement', (['frame_xml', '"""body"""'], {}), "(frame_xml, 'body')\n", (20121, 20140), False, 'from lxml import etree\n'), ((20178, 20219), 'lxml.etree.SubElement', 'etree.SubElement', (['frame_xml', '"""button_row"""'], {}), "(frame_xml, 'button_row')\n", (20194, 20219), False, 'from lxml import etree\n'), ((20255, 20299), 'lxml.etree.SubElement', 'etree.SubElement', (['frame_xml', '"""frame_methods"""'], {}), "(frame_xml, 'frame_methods')\n", (20271, 20299), False, 'from lxml import etree\n'), ((21965, 21989), 'lxml.etree.Element', 'etree.Element', (['"""toolbar"""'], {}), "('toolbar')\n", (21978, 21989), False, 'from lxml import etree\n'), ((23293, 23330), 'lxml.etree.SubElement', 'etree.SubElement', (['toolbar_xml', '"""tool"""'], {}), "(toolbar_xml, 'tool')\n", (23309, 23330), False, 'from lxml import etree\n'), ((24034, 24061), 'lxml.etree.Element', 'etree.Element', (['"""button_row"""'], {}), "('button_row')\n", (24047, 24061), False, 'from lxml import etree\n'), ((25227, 25268), 'lxml.etree.SubElement', 'etree.SubElement', (['buttonrow_xml', '"""button"""'], {}), "(buttonrow_xml, 'button')\n", (25243, 25268), False, 'from lxml import etree\n'), ((25957, 25987), 'lxml.etree.Element', 'etree.Element', (['"""frame_methods"""'], {}), "('frame_methods')\n", (25970, 25987), False, 'from lxml import etree\n'), ((27076, 27115), 'lxml.etree.SubElement', 'etree.SubElement', (['methods_xml', '"""method"""'], {}), "(methods_xml, 'method')\n", (27092, 27115), False, 'from lxml import etree\n'), ((31014, 31035), 'lxml.etree.Element', 'etree.Element', (['"""body"""'], {}), "('body')\n", (31027, 31035), False, 'from lxml import etree\n'), ((7895, 7919), 'lxml.etree.tostring', 'etree.tostring', (['form_xml'], {}), '(form_xml)\n', (7909, 7919), False, 'from lxml import etree\n'), ((8017, 8058), 'common.AibError', 'AibError', ([], {'head': '"""XmlError"""', 'body': 'e.args[0]'}), "(head='XmlError', body=e.args[0])\n", (8025, 8058), False, 'from common import AibError\n'), ((15417, 15456), 'lxml.etree.SubElement', 'etree.SubElement', (['memobj_xml', '"""mem_col"""'], {}), "(memobj_xml, 'mem_col')\n", (15433, 15456), False, 'from lxml import etree\n'), ((36392, 36416), 'lxml.etree.Element', 'etree.Element', (['elem_type'], {}), '(elem_type)\n', (36405, 36416), False, 'from lxml import etree\n'), ((36429, 36466), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""toolbar"""'], {}), "(elem_xml, 'toolbar')\n", (36445, 36466), False, 'from lxml import etree\n'), ((36479, 36520), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""cur_columns"""'], {}), "(elem_xml, 'cur_columns')\n", (36495, 36520), False, 'from lxml import etree\n'), ((36533, 36573), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""cur_filter"""'], {}), "(elem_xml, 'cur_filter')\n", (36549, 36573), False, 'from lxml import etree\n'), ((36586, 36628), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""cur_sequence"""'], {}), "(elem_xml, 'cur_sequence')\n", (36602, 36628), False, 'from lxml import etree\n'), ((36641, 36683), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""grid_methods"""'], {}), "(elem_xml, 'grid_methods')\n", (36657, 36683), False, 'from lxml import etree\n'), ((37427, 37451), 'lxml.etree.Element', 'etree.Element', (['elem_type'], {}), '(elem_type)\n', (37440, 37451), False, 'from lxml import etree\n'), ((37464, 37501), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""toolbar"""'], {}), "(elem_xml, 'toolbar')\n", (37480, 37501), False, 'from lxml import etree\n'), ((37514, 37548), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""body"""'], {}), "(elem_xml, 'body')\n", (37530, 37548), False, 'from lxml import etree\n'), ((37561, 37601), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""button_row"""'], {}), "(elem_xml, 'button_row')\n", (37577, 37601), False, 'from lxml import etree\n'), ((37614, 37657), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""frame_methods"""'], {}), "(elem_xml, 'frame_methods')\n", (37630, 37657), False, 'from lxml import etree\n'), ((38407, 38431), 'lxml.etree.Element', 'etree.Element', (['elem_type'], {}), '(elem_type)\n', (38420, 38431), False, 'from lxml import etree\n'), ((38444, 38481), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""toolbar"""'], {}), "(elem_xml, 'toolbar')\n", (38460, 38481), False, 'from lxml import etree\n'), ((38494, 38528), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""body"""'], {}), "(elem_xml, 'body')\n", (38510, 38528), False, 'from lxml import etree\n'), ((38541, 38581), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""button_row"""'], {}), "(elem_xml, 'button_row')\n", (38557, 38581), False, 'from lxml import etree\n'), ((38594, 38637), 'lxml.etree.SubElement', 'etree.SubElement', (['elem_xml', '"""frame_methods"""'], {}), "(elem_xml, 'frame_methods')\n", (38610, 38637), False, 'from lxml import etree\n'), ((39469, 39499), 'lxml.etree.Element', 'etree.Element', (['"""subtypes_temp"""'], {}), "('subtypes_temp')\n", (39482, 39499), False, 'from lxml import etree\n'), ((39370, 39394), 'lxml.etree.Element', 'etree.Element', (['elem_type'], {}), '(elem_type)\n', (39383, 39394), False, 'from lxml import etree\n'), ((39648, 39694), 'lxml.etree.SubElement', 'etree.SubElement', (['subtypes_xml', '"""subtype_body"""'], {}), "(subtypes_xml, 'subtype_body')\n", (39664, 39694), False, 'from lxml import etree\n'), ((39918, 39942), 'lxml.etree.Element', 'etree.Element', (['elem_type'], {}), '(elem_type)\n', (39931, 39942), False, 'from lxml import etree\n')]
|
from .command import Command
from .connection import Connection
from .client_base import ClientBase
from amcp_pylib.response import ResponseBase, ResponseFactory
class Client(ClientBase):
""" Simple connection client class. """
def connect(self, host: str = "127.0.0.1", port: int = 5250):
if not self.connection:
self.connection = Connection(host, port)
def send(self, command: Command) -> ResponseBase:
return self.send_raw(bytes(command))
def send_raw(self, data: bytes) -> ResponseBase:
self.connection.send(data)
return self.process_response()
def process_response(self) -> ResponseBase:
data = self.connection.receive()
return ResponseFactory.create_from_bytes(data)
|
[
"amcp_pylib.response.ResponseFactory.create_from_bytes"
] |
[((724, 763), 'amcp_pylib.response.ResponseFactory.create_from_bytes', 'ResponseFactory.create_from_bytes', (['data'], {}), '(data)\n', (757, 763), False, 'from amcp_pylib.response import ResponseBase, ResponseFactory\n')]
|
from lebanese_channels.channel import Channel
# noinspection PyUnresolvedReferences
from lebanese_channels.services import *
CHANNEL_LIST = []
for cls in Channel.__subclasses__():
CHANNEL_LIST.append(cls())
CHANNEL_LIST = sorted(CHANNEL_LIST, key=lambda x: x.get_name())
|
[
"lebanese_channels.channel.Channel.__subclasses__"
] |
[((156, 180), 'lebanese_channels.channel.Channel.__subclasses__', 'Channel.__subclasses__', ([], {}), '()\n', (178, 180), False, 'from lebanese_channels.channel import Channel\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from layer import GATLayer
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class GAT(nn.Module):
def __init__(self, num_features, hidden_size, embedding_size, alpha):
super(GAT, self).__init__()
self.hidden_size = hidden_size
self.embedding_size = embedding_size
self.alpha = alpha
self.conv1 = GATLayer(num_features, hidden_size, alpha)
self.conv2 = GATLayer(hidden_size, embedding_size, alpha)
def forward(self, x, adj, M):
h = self.conv1(x, adj, M)
h = self.conv2(h, adj, M)
z = F.normalize(h, p=2, dim=1)
A_pred = self.dot_product_decode(z)
return A_pred, z
def dot_product_decode(self, Z):
A_pred = torch.sigmoid(torch.matmul(Z, Z.t()))
return A_pred
class pseudo_gat(nn.Module):
def __init__(self, num_features, hidden_size):
super(pseudo_gat, self).__init__()
self.w1 = nn.Linear(num_features, hidden_size)
self.iden = nn.Parameter(data = torch.randn((num_points, hidden_dims),dtype=torch.float).to(device), requires_grad=True)
def forward(self, x, adj, M):
z = self.w1(x) + iden
A_pred = torch.sigmoid(torch.matmul(z, z.t()))
return A_pred, z
|
[
"torch.randn",
"torch.cuda.is_available",
"torch.nn.Linear",
"layer.GATLayer",
"torch.nn.functional.normalize"
] |
[((127, 152), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (150, 152), False, 'import torch\n'), ((431, 473), 'layer.GATLayer', 'GATLayer', (['num_features', 'hidden_size', 'alpha'], {}), '(num_features, hidden_size, alpha)\n', (439, 473), False, 'from layer import GATLayer\n'), ((495, 539), 'layer.GATLayer', 'GATLayer', (['hidden_size', 'embedding_size', 'alpha'], {}), '(hidden_size, embedding_size, alpha)\n', (503, 539), False, 'from layer import GATLayer\n'), ((655, 681), 'torch.nn.functional.normalize', 'F.normalize', (['h'], {'p': '(2)', 'dim': '(1)'}), '(h, p=2, dim=1)\n', (666, 681), True, 'import torch.nn.functional as F\n'), ((1009, 1045), 'torch.nn.Linear', 'nn.Linear', (['num_features', 'hidden_size'], {}), '(num_features, hidden_size)\n', (1018, 1045), True, 'import torch.nn as nn\n'), ((1086, 1143), 'torch.randn', 'torch.randn', (['(num_points, hidden_dims)'], {'dtype': 'torch.float'}), '((num_points, hidden_dims), dtype=torch.float)\n', (1097, 1143), False, 'import torch\n')]
|
# Generated by Django 2.1.12 on 2019-10-14 14:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('frontend', '0002_auto_20191010_0025'),
]
operations = [
migrations.AddField(
model_name='page',
name='js',
field=models.TextField(blank=True, help_text='会被放入 <code>script</code> 的 JS'),
),
]
|
[
"django.db.models.TextField"
] |
[((330, 401), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'help_text': '"""会被放入 <code>script</code> 的 JS"""'}), "(blank=True, help_text='会被放入 <code>script</code> 的 JS')\n", (346, 401), False, 'from django.db import migrations, models\n')]
|
import time
from functools import partial
from pathlib import Path
import toml
import torch
import colorful
import numpy as np
import matplotlib.pyplot as plt
from joblib import Parallel, delayed
from torch.cuda.amp import GradScaler
from sklearn.metrics import DetCurveDisplay
import src.util.metrics as metrics
from src.util import visualization
from src.util.acoustic_utils import stft, istft
from src.util.utils import prepare_empty_dir, ExecutionTime
plt.switch_backend('agg')
class BaseTrainer:
def __init__(self,
dist,
rank,
config,
resume: bool,
model,
loss_function,
optimizer,
scheduler):
self.color_tool = colorful
self.color_tool.use_style("solarized")
self.model = model
self.optimizer = optimizer
self.scheduler = scheduler
self.loss_function = loss_function
# DistributedDataParallel (DDP)
self.rank = rank
self.dist = dist
# Automatic mixed precision (AMP)
self.use_amp = config["meta"]["use_amp"]
self.scaler = GradScaler(enabled=self.use_amp)
# Acoustics
self.acoustic_config = config["acoustic"]
# Supported STFT
n_fft = self.acoustic_config["n_fft"]
hop_length = self.acoustic_config["hop_length"]
win_length = self.acoustic_config["win_length"]
center = self.acoustic_config["center"]
self.torch_stft = partial(stft, n_fft=n_fft, hop_length=hop_length, win_length=win_length,
device=self.rank, center=center)
self.istft = partial(istft, n_fft=n_fft, hop_length=hop_length, win_length=win_length,
device=self.rank, center=center)
# Trainer.train in config
self.train_config = config["trainer"]["train"]
self.epochs = self.train_config["epochs"]
self.save_checkpoint_interval = self.train_config["save_checkpoint_interval"]
self.clip_grad_norm_value = self.train_config["clip_grad_norm_value"]
assert self.save_checkpoint_interval >= 1
# Trainer.validation in config
self.validation_config = config["trainer"]["validation"]
self.validation_interval = self.validation_config["validation_interval"]
self.save_max_metric_score = self.validation_config["save_max_metric_score"]
assert self.validation_interval >= 1
# Trainer.visualization in config
self.visualization_config = config["trainer"]["visualization"]
# In the 'train.py' file, if the 'resume' item is True, we will update the following args:
self.start_epoch = 1
self.best_score = -np.inf if self.save_max_metric_score else np.inf
self.save_dir = Path(config["meta"]["save_dir"]).expanduser().absolute() / config["meta"]["experiment_name"]
self.checkpoints_dir = self.save_dir / "checkpoints"
self.logs_dir = self.save_dir / "logs"
self.thresholds = {'eer': 0,
'fpr_1': 0,
'fnr_1': 0}
if resume:
self._resume_checkpoint()
if config["meta"]["preloaded_model_path"]:
self._preload_model(Path(config["preloaded_model_path"]))
if self.rank == 0:
prepare_empty_dir([self.checkpoints_dir, self.logs_dir], resume=resume)
self.writer = visualization.writer(self.logs_dir.as_posix())
self.writer.add_text(
tag="Configuration",
text_string=f"<pre> \n{toml.dumps(config)} \n</pre>",
global_step=1
)
print(self.color_tool.cyan("The configurations are as follows: "))
print(self.color_tool.cyan("=" * 40))
print(self.color_tool.cyan(toml.dumps(config)[:-1])) # except "\n"
print(self.color_tool.cyan("=" * 40))
with open((self.save_dir / f"{time.strftime('%Y-%m-%d %H:%M:%S')}.toml").as_posix(), "w") as handle:
toml.dump(config, handle)
self._print_networks([self.model])
def _preload_model(self, model_path):
"""
Preload model parameters (in "*.tar" format) at the start of experiment.
Args:
model_path (Path): The file path of the *.tar file
"""
model_path = model_path.expanduser().absolute()
assert model_path.exists(), f"The file {model_path.as_posix()} is not exist. please check path."
map_location = {'cuda:%d' % 0: 'cuda:%d' % self.rank}
model_checkpoint = torch.load(model_path.as_posix(), map_location=map_location)
self.model.load_state_dict(model_checkpoint["model"], strict=False)
if self.rank == 0:
print(f"Model preloaded successfully from {model_path.as_posix()}.")
def _resume_checkpoint(self):
"""
Resume experiment from the latest checkpoint.
"""
latest_model_path = self.checkpoints_dir.expanduser().absolute() / "latest_model.tar"
assert latest_model_path.exists(), f"{latest_model_path} does not exist, can not load latest checkpoint."
self.dist.barrier() # see https://stackoverflow.com/questions/59760328/how-does-torch-distributed-barrier-work
map_location = {'cuda:%d' % 0: 'cuda:%d' % self.rank}
checkpoint = torch.load(latest_model_path.as_posix(), map_location=map_location)
self.start_epoch = checkpoint["epoch"] + 1
self.best_score = checkpoint["best_score"]
self.optimizer.load_state_dict(checkpoint["optimizer"])
self.scheduler.load_state_dict(checkpoint["scheduler"])
self.scaler.load_state_dict(checkpoint["scaler"])
self.model.load_state_dict(checkpoint["model"])
self.thresholds = checkpoint["thresholds"]
if self.rank == 0:
print(f"Model checkpoint loaded. Training will begin at {self.start_epoch} epoch.")
def _save_checkpoint(self, epoch, is_best_epoch=False):
"""
Save checkpoint to "<save_dir>/<config name>/checkpoints" directory, which consists of:
- the epoch number
- the best metric score in history
- the optimizer parameters
- the model parameters
Args:
is_best_epoch (bool): In current epoch, if the model get a best metric score (is_best_epoch=True),
the checkpoint of model will be saved as "<save_dir>/checkpoints/best_model.tar".
"""
print(f"\t Saving {epoch} epoch model checkpoint...")
state_dict = {
"epoch": epoch,
"best_score": self.best_score,
"optimizer": self.optimizer.state_dict(),
"scheduler": self.scheduler.state_dict(),
"scaler": self.scaler.state_dict(),
"model": self.model.state_dict(),
"thresholds": self.thresholds
}
# "latest_model.tar"
# Contains all checkpoint information, including the optimizer parameters, the model parameters, etc.
# New checkpoint will overwrite the older one.
torch.save(state_dict, (self.checkpoints_dir / "latest_model.tar").as_posix())
# "model_{epoch_number}.tar"
# Contains all checkpoint information, like "latest_model.tar". However, the newer information will no overwrite the older one.
torch.save(state_dict, (self.checkpoints_dir / f"model_{str(epoch).zfill(4)}.tar").as_posix())
# If the model get a best metric score (is_best_epoch=True) in the current epoch,
# the model checkpoint will be saved as "best_model.tar."
# The newer best-scored checkpoint will overwrite the older one.
if is_best_epoch:
print(self.color_tool.red(f"\t Found a best score in the {epoch} epoch, saving..."))
torch.save(state_dict, (self.checkpoints_dir / "best_model.tar").as_posix())
def _is_best_epoch(self, score, save_max_metric_score=True):
"""
Check if the current model got the best metric score
"""
if save_max_metric_score and score >= self.best_score:
self.best_score = score
return True
elif not save_max_metric_score and score <= self.best_score:
self.best_score = score
return True
else:
return False
@staticmethod
def _print_networks(models: list):
print(f"This project contains {len(models)} models, the number of the parameters is: ")
params_of_all_networks = 0
for idx, model in enumerate(models, start=1):
params_of_network = 0
for param in model.parameters():
params_of_network += param.numel()
print(f"\tNetwork {idx}: {params_of_network / 1e6} million.")
params_of_all_networks += params_of_network
print(f"The amount of parameters in the project is {params_of_all_networks / 1e6} million.")
def _set_models_to_train_mode(self):
self.model.train()
def _set_models_to_eval_mode(self):
self.model.eval()
@staticmethod
def get_thresholds(labels, scores):
eer_t, eer, fpr_1_t, fpr_1_fnr, fnr_1_t, fnr_1_fpr = metrics.compute_thresholds(labels, scores)
return eer_t, fpr_1_t, fnr_1_t, eer, fpr_1_fnr, fnr_1_fpr
def metrics_visualization(self, labels, predicted, metrics_list, epoch):
"""
Get metrics on validation dataset by paralleling.
"""
assert "ROC_AUC" in metrics_list
# Check if the metric is registered in "util.metrics" file.
for i in metrics_list:
assert i in metrics.REGISTERED_METRICS.keys(), f"{i} is not registered, please check 'util.metrics' file."
fpr, tpr, thresholds = metrics.roc_curve(labels.reshape(-1), predicted.reshape(-1))
roc_auc_mean = 0
for metric_name in metrics_list:
mean_score = metrics.REGISTERED_METRICS[metric_name](fpr, tpr)
# Add the mean value of the metric to tensorboard
self.writer.add_scalar(f"Validation/{metric_name}", mean_score, epoch)
if metric_name == "ROC_AUC":
roc_auc_mean = mean_score
fig, axes = plt.subplots(1, 1, figsize=(6, 6))
display = DetCurveDisplay(fpr=fpr, fnr=1 - tpr, estimator_name=f'ROC_AUC = {roc_auc_mean}')
display.plot(axes)
self.writer.add_figure(f"DetCurve", fig, epoch)
eer_t, fpr_1_t, fnr_1_t, _, _, _ = self.get_thresholds(labels.reshape(-1), predicted.reshape(-1))
f1, _, _, precision, recall = metrics.get_f1((predicted.reshape(-1) > eer_t).int(), labels.reshape(-1))
self.writer.add_scalar(f"Validation/F1", f1, epoch)
self.writer.add_scalar(f"Validation/Precision", precision, epoch)
self.writer.add_scalar(f"Validation/recall", recall, epoch)
self.thresholds = {'eer': eer_t,
'fpr_1': fpr_1_t,
'fnr_1': fnr_1_t}
return roc_auc_mean
def train(self):
for epoch in range(self.start_epoch, self.epochs + 1):
if self.rank == 0:
print(self.color_tool.yellow(f"{'=' * 15} {epoch} epoch {'=' * 15}"))
print("[0 seconds] Begin training...")
timer = ExecutionTime()
self._set_models_to_train_mode()
self._train_epoch(epoch)
# Only use the first GPU (process) to the validation.
if self.rank == 0:
if epoch % self.validation_interval == 0:
print(f"[{timer.duration()} seconds] Training has finished, validation is in progress...")
if self.save_checkpoint_interval != 0 and (epoch % self.save_checkpoint_interval == 0):
self._save_checkpoint(epoch)
self._set_models_to_eval_mode()
metric_score = self._validation_epoch(epoch)
if self.save_checkpoint_interval != 0 and (epoch % self.save_checkpoint_interval == 0):
self._save_checkpoint(epoch)
if self._is_best_epoch(metric_score, save_max_metric_score=self.save_max_metric_score):
self._save_checkpoint(epoch, is_best_epoch=True)
print(f"[{timer.duration()} seconds] This epoch has finished.")
def _train_epoch(self, epoch):
raise NotImplementedError
def _validation_epoch(self, epoch):
raise NotImplementedError
|
[
"matplotlib.pyplot.switch_backend",
"functools.partial",
"src.util.metrics.REGISTERED_METRICS.keys",
"toml.dumps",
"src.util.utils.ExecutionTime",
"time.strftime",
"src.util.metrics.compute_thresholds",
"pathlib.Path",
"torch.cuda.amp.GradScaler",
"sklearn.metrics.DetCurveDisplay",
"matplotlib.pyplot.subplots",
"toml.dump",
"src.util.utils.prepare_empty_dir"
] |
[((460, 485), 'matplotlib.pyplot.switch_backend', 'plt.switch_backend', (['"""agg"""'], {}), "('agg')\n", (478, 485), True, 'import matplotlib.pyplot as plt\n'), ((1174, 1206), 'torch.cuda.amp.GradScaler', 'GradScaler', ([], {'enabled': 'self.use_amp'}), '(enabled=self.use_amp)\n', (1184, 1206), False, 'from torch.cuda.amp import GradScaler\n'), ((1537, 1646), 'functools.partial', 'partial', (['stft'], {'n_fft': 'n_fft', 'hop_length': 'hop_length', 'win_length': 'win_length', 'device': 'self.rank', 'center': 'center'}), '(stft, n_fft=n_fft, hop_length=hop_length, win_length=win_length,\n device=self.rank, center=center)\n', (1544, 1646), False, 'from functools import partial\n'), ((1698, 1808), 'functools.partial', 'partial', (['istft'], {'n_fft': 'n_fft', 'hop_length': 'hop_length', 'win_length': 'win_length', 'device': 'self.rank', 'center': 'center'}), '(istft, n_fft=n_fft, hop_length=hop_length, win_length=win_length,\n device=self.rank, center=center)\n', (1705, 1808), False, 'from functools import partial\n'), ((9307, 9349), 'src.util.metrics.compute_thresholds', 'metrics.compute_thresholds', (['labels', 'scores'], {}), '(labels, scores)\n', (9333, 9349), True, 'import src.util.metrics as metrics\n'), ((10322, 10356), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(6, 6)'}), '(1, 1, figsize=(6, 6))\n', (10334, 10356), True, 'import matplotlib.pyplot as plt\n'), ((10375, 10461), 'sklearn.metrics.DetCurveDisplay', 'DetCurveDisplay', ([], {'fpr': 'fpr', 'fnr': '(1 - tpr)', 'estimator_name': 'f"""ROC_AUC = {roc_auc_mean}"""'}), "(fpr=fpr, fnr=1 - tpr, estimator_name=\n f'ROC_AUC = {roc_auc_mean}')\n", (10390, 10461), False, 'from sklearn.metrics import DetCurveDisplay\n'), ((3383, 3454), 'src.util.utils.prepare_empty_dir', 'prepare_empty_dir', (['[self.checkpoints_dir, self.logs_dir]'], {'resume': 'resume'}), '([self.checkpoints_dir, self.logs_dir], resume=resume)\n', (3400, 3454), False, 'from src.util.utils import prepare_empty_dir, ExecutionTime\n'), ((11402, 11417), 'src.util.utils.ExecutionTime', 'ExecutionTime', ([], {}), '()\n', (11415, 11417), False, 'from src.util.utils import prepare_empty_dir, ExecutionTime\n'), ((3305, 3341), 'pathlib.Path', 'Path', (["config['preloaded_model_path']"], {}), "(config['preloaded_model_path'])\n", (3309, 3341), False, 'from pathlib import Path\n'), ((4106, 4131), 'toml.dump', 'toml.dump', (['config', 'handle'], {}), '(config, handle)\n', (4115, 4131), False, 'import toml\n'), ((9741, 9774), 'src.util.metrics.REGISTERED_METRICS.keys', 'metrics.REGISTERED_METRICS.keys', ([], {}), '()\n', (9772, 9774), True, 'import src.util.metrics as metrics\n'), ((3885, 3903), 'toml.dumps', 'toml.dumps', (['config'], {}), '(config)\n', (3895, 3903), False, 'import toml\n'), ((2847, 2879), 'pathlib.Path', 'Path', (["config['meta']['save_dir']"], {}), "(config['meta']['save_dir'])\n", (2851, 2879), False, 'from pathlib import Path\n'), ((3640, 3658), 'toml.dumps', 'toml.dumps', (['config'], {}), '(config)\n', (3650, 3658), False, 'import toml\n'), ((4019, 4053), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (4032, 4053), False, 'import time\n')]
|
import numpy as np
import torch
from torchvision import datasets, transforms
DEFAULT_DATA_DIR = "/is/rg/al/Projects/prob-models/data/"
class ReconstructionDataset(torch.utils.data.Dataset):
def __init__(
self, name, split="train", flatten=True, train_split=0.8, data_dir=None
):
assert split in ("train", "val", "test")
if data_dir is None:
data_dir = DEFAULT_DATA_DIR
load_train = split == "train" or split == "val"
if name == "mnist":
dataset = datasets.MNIST(
data_dir,
train=load_train,
download=True,
transform=transforms.ToTensor(),
)
elif name == "fashion-mnist":
dataset = datasets.FashionMNIST(
data_dir,
train=load_train,
download=True,
transform=transforms.ToTensor(),
)
else:
raise ValueError("Unknown dataset name {name}")
self.images = torch.stack([x[0] for x in dataset], axis=0)
if split == "train" or split == "val":
train_samples = int(train_split * len(self.images))
rng = np.random.RandomState(45)
idxs = rng.permutation(len(self.images))
if split == "train":
train_idxs = idxs[:train_samples]
self.images = self.images[train_idxs]
else:
val_idxs = idxs[train_samples:]
self.images = self.images[val_idxs]
self._shape = self.images.shape[1:]
if flatten:
self.images = self.images.reshape(len(self.images), -1)
example = self[0]
if flatten:
self.input_dim = example[0].shape[0]
self.target_dim = example[1].shape[0]
else:
self.input_dim = example[0]
self.target_dim = example[1]
@property
def shape(self):
return self._shape
def to_tensors(self):
return self.images, self.images
def __len__(self):
return len(self.images)
def __getitem__(self, idx):
img = self.images[idx]
return img, img
|
[
"torch.stack",
"numpy.random.RandomState",
"torchvision.transforms.ToTensor"
] |
[((1027, 1071), 'torch.stack', 'torch.stack', (['[x[0] for x in dataset]'], {'axis': '(0)'}), '([x[0] for x in dataset], axis=0)\n', (1038, 1071), False, 'import torch\n'), ((1201, 1226), 'numpy.random.RandomState', 'np.random.RandomState', (['(45)'], {}), '(45)\n', (1222, 1226), True, 'import numpy as np\n'), ((656, 677), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (675, 677), False, 'from torchvision import datasets, transforms\n'), ((893, 914), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (912, 914), False, 'from torchvision import datasets, transforms\n')]
|
from typing import Iterator
from _pytest.fixtures import FixtureRequest
from pytest import fixture
from nextcloud_notes_api import Note
def _example_note() -> Note:
return Note(
title='Spam',
content='Bacon',
category='Todo',
favorite=True,
id=1337,
# https://stackoverflow.com/questions/59199985/why-is-datetimes-timestamp-method-returning-oserror-errno-22-invalid-a
modified=100_000,
)
@fixture
def example_note() -> Note:
"""
Returns:
Note: Note with all attributes set
"""
return _example_note()
@fixture
def example_note_gen(request: FixtureRequest) -> Iterator[Note]:
"""
Args:
request (FixtureRequest): `request.param` is the length of the generator
Yields:
Note: Example note, see `example_note()`
"""
return (_example_note() for _ in range(request.param))
|
[
"nextcloud_notes_api.Note"
] |
[((180, 277), 'nextcloud_notes_api.Note', 'Note', ([], {'title': '"""Spam"""', 'content': '"""Bacon"""', 'category': '"""Todo"""', 'favorite': '(True)', 'id': '(1337)', 'modified': '(100000)'}), "(title='Spam', content='Bacon', category='Todo', favorite=True, id=1337,\n modified=100000)\n", (184, 277), False, 'from nextcloud_notes_api import Note\n')]
|
import sqlite3
from sqlite3 import Error
import time
import datetime
db = "/home/pi/projects/pigrow/db.sqlite3"
def create_table(conn):
create_table_query = """ CREATE TABLE IF NOT EXISTS dht_data (
id integer PRIMARY KEY,
humidity real ,
temperature real,
ts text
);"""
try:
c = conn.cursor()
c.execute(create_table_query)
except Error as e:
print(e)
def create_connection(db):
conn = None
try:
conn = sqlite3.connect(db)
print(sqlite3.version)
except Error as e:
print(e)
return conn
def insertMeasure(conn, measure):
insert_query = ''' INSERT INTO dht_data(humidity, temperature, ts)
VALUES(?, ?, ?) '''
cur = conn.cursor()
cur.execute(insert_query, measure)
conn.commit()
def work():
import Adafruit_DHT
sensor = Adafruit_DHT.DHT22
sensor_pin = 18
while True:
conn = create_connection(db)
create_table(conn)
humidity, temperature = Adafruit_DHT.read_retry(sensor, sensor_pin)
ts = datetime.datetime.now().timestamp()
measure = (humidity, temperature, ts)
insertMeasure(conn, measure)
print("inserted {}".format(measure))
conn.close()
time.sleep(20)
print("Database connection does not exist")
if __name__ == '__main__':
work()
|
[
"Adafruit_DHT.read_retry",
"sqlite3.connect",
"datetime.datetime.now",
"time.sleep"
] |
[((594, 613), 'sqlite3.connect', 'sqlite3.connect', (['db'], {}), '(db)\n', (609, 613), False, 'import sqlite3\n'), ((1123, 1166), 'Adafruit_DHT.read_retry', 'Adafruit_DHT.read_retry', (['sensor', 'sensor_pin'], {}), '(sensor, sensor_pin)\n', (1146, 1166), False, 'import Adafruit_DHT\n'), ((1373, 1387), 'time.sleep', 'time.sleep', (['(20)'], {}), '(20)\n', (1383, 1387), False, 'import time\n'), ((1180, 1203), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1201, 1203), False, 'import datetime\n')]
|
import sys
sys.path.append("../")
from VideoTools import *
import subprocess
import os
import scipy.misc
MAXHEIGHT = 160
MINWIDTH = 120
def saveVideoID(I, IDims, fileprefix, ID, FrameRate = 30, NumberFrames = 30):
N = I.shape[0]
print(I.shape)
if I.shape[0] > FrameRate*5:
I = I[0:FrameRate*5, :]
N = I.shape[0]
frame = np.array([])
print("IDims = ", IDims)
for i in range(N):
frame = np.reshape(I[i, :], IDims)
frame[frame < 0] = 0
frame[frame > 1] = 1
if IDims[0] > MAXHEIGHT:
fac1 = MAXHEIGHT/float(IDims[0])
fac2 = MINWIDTH/float(IDims[1])
fac = max(fac1, fac2)
if i == 0:
print("Resizing by %g"%fac)
frame = scipy.misc.imresize(frame, fac)
mpimage.imsave("%s%i.png"%(TEMP_STR, i+1), frame)
PS = 60
if frame.shape[1] > MINWIDTH*1.5:
PS = int(30.0*frame.shape[1]/MINWIDTH)
for i in range(NumberFrames):
command = ["convert", "%s%i.png"%(TEMP_STR, N), "-fill", "red", "-pointsize", "%i"%PS, "-draw", 'text 20,60 %s%.3i%s'%("'", ID, "'"), "%s%i.png"%(TEMP_STR, N+i+1)]
print(command)
subprocess.call(command)
print(N + i + 1)
#Convert to video using avconv
for t in ["avi", "webm", "ogg"]:
filename = "%s.%s"%(fileprefix, t)
#Overwrite by default
if os.path.exists(filename):
os.remove(filename)
command = [AVCONV_BIN,
'-r', "%i"%FrameRate,
'-i', TEMP_STR + '%d.png',
'-r', "%i"%FrameRate,
'-b', '30000k',
filename]
subprocess.call(command)
#Clean up
for i in range(N+NumberFrames):
os.remove("%s%i.png"%(TEMP_STR, i+1))
np.random.seed(100)
IDs = np.random.permutation(999)
i = 0
Videos = ["OrigVideos/%s"%v for v in os.listdir("OrigVideos")]
for V in Videos:
print("Saving %s..."%V)
(I, IDims) = loadVideo(V)
saveVideoID(I, IDims, "NumberedVideos/%i"%i, IDs[i])
i = i + 1
|
[
"sys.path.append",
"os.remove",
"os.path.exists",
"subprocess.call",
"os.listdir"
] |
[((11, 33), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (26, 33), False, 'import sys\n'), ((1187, 1211), 'subprocess.call', 'subprocess.call', (['command'], {}), '(command)\n', (1202, 1211), False, 'import subprocess\n'), ((1393, 1417), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (1407, 1417), False, 'import os\n'), ((1687, 1711), 'subprocess.call', 'subprocess.call', (['command'], {}), '(command)\n', (1702, 1711), False, 'import subprocess\n'), ((1769, 1810), 'os.remove', 'os.remove', (["('%s%i.png' % (TEMP_STR, i + 1))"], {}), "('%s%i.png' % (TEMP_STR, i + 1))\n", (1778, 1810), False, 'import os\n'), ((1906, 1930), 'os.listdir', 'os.listdir', (['"""OrigVideos"""'], {}), "('OrigVideos')\n", (1916, 1930), False, 'import os\n'), ((1431, 1450), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (1440, 1450), False, 'import os\n')]
|
from flask import Blueprint, render_template
site_admin = Blueprint("admin_site", __name__, url_prefix="/admin",template_folder="template", static_folder="static", static_url_path="/admin/static")
@site_admin.route("/")
def admin():
return render_template("index.html")
|
[
"flask.Blueprint",
"flask.render_template"
] |
[((59, 203), 'flask.Blueprint', 'Blueprint', (['"""admin_site"""', '__name__'], {'url_prefix': '"""/admin"""', 'template_folder': '"""template"""', 'static_folder': '"""static"""', 'static_url_path': '"""/admin/static"""'}), "('admin_site', __name__, url_prefix='/admin', template_folder=\n 'template', static_folder='static', static_url_path='/admin/static')\n", (68, 203), False, 'from flask import Blueprint, render_template\n'), ((246, 275), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (261, 275), False, 'from flask import Blueprint, render_template\n')]
|
import time
from threading import Timer
from typing import Callable
from pydispatch import dispatcher
from zacoby import global_logger
from zacoby.exceptions import ElementDoesNotExist, MethodError
# from zacoby.pipeline import Pipeline
from zacoby.settings import settings
from zacoby.signals import signal
class DriverMixin:
def __init__(self, driver: Callable, timeout: int):
self.driver = driver
self.timeout = timeout
# signal.send(dispatcher.Any, self, timeout=timeout)
class Wait(DriverMixin):
def __init__(self, name: str, driver: Callable, timeout: int=10):
super().__init__(driver, timeout)
self.name = name
self.exceptions = []
self.results = []
def _start_polling(self, func, **kwargs):
# result = None
# results = []
end_time = sum([time.time(), self.timeout])
global_logger.info(f'Waiting for element [{self.name}] - ({self.timeout}s)...')
while True:
try:
result = func(driver=self.driver, **kwargs)
except Exception:
raise
else:
# return result
self.results.append(result)
time.sleep(self.timeout)
if time.time() > end_time:
break
# raise TimeoutError()
def until(self, func: Callable, **kwargs):
self._start_polling(func, **kwargs)
return self
def until_not(self, func: Callable, **kwargs):
self._start_polling(func, **kwargs)
return self
def chains(self, *funcs: Callable, method='until'):
authorized_methods = ['until', 'until_not']
if method not in authorized_methods:
raise MethodError()
for func in funcs:
pass
def logical_map(self, methods: dict):
container = []
for key, method in methods.items():
container.append(method())
return self
class Pause(DriverMixin):
def _start_pause(self, callback = None):
result = []
if callback is not None:
if not callable(callback):
raise TypeError('Callback should be a callable')
timer = Timer(self.timeout, function=callback, kwargs={'driver': self.driver, 'result': result})
else:
timer = Timer(self.timeout, function=lambda: True)
timer.start()
global_logger.info(f'Entering sleep mode ({self.timeout}s)')
timer.join()
if not timer.is_alive():
timer.cancel()
return result if result else None
|
[
"threading.Timer",
"time.time",
"time.sleep",
"zacoby.exceptions.MethodError",
"zacoby.global_logger.info"
] |
[((890, 969), 'zacoby.global_logger.info', 'global_logger.info', (['f"""Waiting for element [{self.name}] - ({self.timeout}s)..."""'], {}), "(f'Waiting for element [{self.name}] - ({self.timeout}s)...')\n", (908, 969), False, 'from zacoby import global_logger\n'), ((2432, 2492), 'zacoby.global_logger.info', 'global_logger.info', (['f"""Entering sleep mode ({self.timeout}s)"""'], {}), "(f'Entering sleep mode ({self.timeout}s)')\n", (2450, 2492), False, 'from zacoby import global_logger\n'), ((1238, 1262), 'time.sleep', 'time.sleep', (['self.timeout'], {}), '(self.timeout)\n', (1248, 1262), False, 'import time\n'), ((1759, 1772), 'zacoby.exceptions.MethodError', 'MethodError', ([], {}), '()\n', (1770, 1772), False, 'from zacoby.exceptions import ElementDoesNotExist, MethodError\n'), ((2236, 2328), 'threading.Timer', 'Timer', (['self.timeout'], {'function': 'callback', 'kwargs': "{'driver': self.driver, 'result': result}"}), "(self.timeout, function=callback, kwargs={'driver': self.driver,\n 'result': result})\n", (2241, 2328), False, 'from threading import Timer\n'), ((2359, 2402), 'threading.Timer', 'Timer', (['self.timeout'], {'function': '(lambda : True)'}), '(self.timeout, function=lambda : True)\n', (2364, 2402), False, 'from threading import Timer\n'), ((845, 856), 'time.time', 'time.time', ([], {}), '()\n', (854, 856), False, 'import time\n'), ((1278, 1289), 'time.time', 'time.time', ([], {}), '()\n', (1287, 1289), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
import sys
from redis import Redis
from rq import Queue, Connection, Worker
from mailhook.config import config
# Preload libraries
import twilio
# Provide queue names to listen to as arguments to this script,
# similar to rqworker
redis_conn = Redis(config.REDIS_HOST)
with Connection(redis_conn):
qs = map(Queue, sys.argv[1:]) or [Queue()]
w = Worker(qs)
w.work()
|
[
"redis.Redis",
"rq.Worker",
"rq.Connection",
"rq.Queue"
] |
[((272, 296), 'redis.Redis', 'Redis', (['config.REDIS_HOST'], {}), '(config.REDIS_HOST)\n', (277, 296), False, 'from redis import Redis\n'), ((303, 325), 'rq.Connection', 'Connection', (['redis_conn'], {}), '(redis_conn)\n', (313, 325), False, 'from rq import Queue, Connection, Worker\n'), ((382, 392), 'rq.Worker', 'Worker', (['qs'], {}), '(qs)\n', (388, 392), False, 'from rq import Queue, Connection, Worker\n'), ((365, 372), 'rq.Queue', 'Queue', ([], {}), '()\n', (370, 372), False, 'from rq import Queue, Connection, Worker\n')]
|
'''
Created on 5 janv. 2022
@author: slinux
'''
import sys
# The answer is that the module xmlrpc is part of python3
import xmlrpc.client
import os
import logging
class IPFS_RPC_Client(object):
#Put your server IP here
_ip='0.0.0.0'
_port=1234
_url = ""
_client = None
def __init__(self, ip="127.0.0.1", port=9000, useHTTPS=False):
self._ip = ip
self._port = port
self.logger = logging.getLogger('wxRaven')
self.url = 'http://{}:{}'.format(ip, port)
if useHTTPS:
self.url = 'https://{}'.format(ip)
if ip.__contains__('http'):
self.url = '{}'.format(ip)
self.logger.info(f'Creating a new IPFS RPC Client at {self.url}')
self._client = xmlrpc.client.ServerProxy(self.url)
def sendFile(self, filename):
curDir = os.path.dirname(os.path.realpath(__file__))
#filename = sys.argv[1]
#fpn = curDir + '/' + filename
fpn = filename
localadd, remotefnae = os.path.split(filename)
self.logger.info(' filename -> ({})'.format(filename))
self.logger.info(' fpn -> ({})'.format(remotefnae))
if not os.path.exists(fpn):
self.logger.info('Missing file -> ({})'.format(fpn))
#sys.exit(1)
_resultUpload = None
with open(fpn, "rb") as handle:
binary_data = xmlrpc.client.Binary(handle.read())
_resultUpload = self._client.server_receive_file(binary_data, remotefnae)
self.logger.info(f'_resultUpload = {_resultUpload}')
return _resultUpload
def sendJSON(self, JSON):
self.logger.info(f'JSON = {JSON}')
_resultUpload = self._client.server_receive_json(JSON)
#self.logger.info(f'_resultUpload = {_resultUpload}')
return _resultUpload
#.add_json(self.compile_message(message))
|
[
"os.path.realpath",
"os.path.split",
"os.path.exists",
"logging.getLogger"
] |
[((448, 476), 'logging.getLogger', 'logging.getLogger', (['"""wxRaven"""'], {}), "('wxRaven')\n", (465, 476), False, 'import logging\n'), ((1080, 1103), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (1093, 1103), False, 'import os\n'), ((927, 953), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (943, 953), False, 'import os\n'), ((1242, 1261), 'os.path.exists', 'os.path.exists', (['fpn'], {}), '(fpn)\n', (1256, 1261), False, 'import os\n')]
|
import datetime
import inspect
import os
import sys
class Puzzle:
# The delimiter to use to separate the input data into a list for subsequent
# processing. E.g. '\n', ',', etc. Delimited items can be processed prior to
# being added to the input list by overriding _process_input_item().
# Set to None to read the data in whole. In this case, data can be processed
# by overriding _process_input_data().
input_delimiter = '\n'
def __init__(self, sample=False, verbosity=2):
self.sample = sample
self.verbosity = verbosity
def get_input_file_name(self):
path = os.path.dirname(os.path.abspath(inspect.getfile(self.__class__)))
filename = 'sample' if self.sample else 'input'
return os.path.join(path, filename)
def process_input_item(self, input_line):
return input_line
def process_input_data(self, input_data):
return input_data
def get_input(self):
input_file = self.get_input_file_name()
delimiter = self.input_delimiter
process_item = self.process_input_item
with open(input_file, 'r') as f:
if delimiter == '\n':
# Trim whitespace from and process each line in the input file,
# skipping any blank lines
input_data = []
for line in f.readlines():
line = line.strip()
if line:
input_data.append(process_item(line))
else:
raw_input = f.read().strip() # trim whitespace (e.g. newlines)
if delimiter:
# Trim whitespace from and process each item in the raw
# input data after applying the configured delimiter
input_data = [process_item(item.strip()) for item in raw_input.split(delimiter)]
else:
# Process the raw input data directly
input_data = self.process_input_data(raw_input)
return input_data
def _do_solve(self, solvers):
v = self.verbosity
max_v = v > 1
line_endings = '\n' if max_v else ''
# Get input
if max_v:
sample = '**SAMPLE** ' if self.sample else ''
print('=' * 50, f'\n\nProcessing {sample}', end='')
print('Input... ', end=line_endings)
start = datetime.datetime.now()
try:
input_data = self.get_input()
except FileNotFoundError:
print(f'No input data file found (looked in {self.get_input_file_name()}).')
return
t = (datetime.datetime.now() - start).total_seconds()
if self.input_delimiter == '\n':
input_desc = f'has {len(input_data)} lines'
elif self.input_delimiter:
input_desc = f'has {len(input_data)} items'
else:
size = sys.getsizeof(input_data)
input_desc = f'is {size} bytes'
if max_v:
print('Input ', end='')
print(f'{input_desc} ({type(input_data)}) [{t}s]')
# Run solvers
for part, solver in solvers:
if self.input_delimiter:
# Copy the data so each part is free to manipulate it without
# affecting subsequent parts
part_input_data = input_data[:]
else:
part_input_data = input_data
if max_v:
print('\nSolving ', end='')
print('Part {}... '.format(part), end=line_endings)
start = datetime.datetime.now()
solution = solver(part_input_data)
t = (datetime.datetime.now() - start).total_seconds()
if max_v:
print('Solution: ', end='')
print('{} [{}s]'.format(solution, t))
if max_v:
print('\n', '=' * 50, sep='')
def _part1(self, input_data):
raise NotImplementedError()
def _part2(self, input_data):
raise NotImplementedError()
def solve_part1(self):
self._do_solve([(1, self._part1)])
def solve_part2(self):
self._do_solve([(2, self._part2)])
def solve(self):
self._do_solve([(1, self._part1), (2, self._part2)])
|
[
"inspect.getfile",
"os.path.join",
"datetime.datetime.now",
"sys.getsizeof"
] |
[((798, 826), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (810, 826), False, 'import os\n'), ((2573, 2596), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2594, 2596), False, 'import datetime\n'), ((3824, 3847), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3845, 3847), False, 'import datetime\n'), ((684, 715), 'inspect.getfile', 'inspect.getfile', (['self.__class__'], {}), '(self.__class__)\n', (699, 715), False, 'import inspect\n'), ((3095, 3120), 'sys.getsizeof', 'sys.getsizeof', (['input_data'], {}), '(input_data)\n', (3108, 3120), False, 'import sys\n'), ((2816, 2839), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2837, 2839), False, 'import datetime\n'), ((3912, 3935), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3933, 3935), False, 'import datetime\n')]
|
# create a credentials.py file with the following keys
from credentials import ckey, csecret, atoken, asecret
from tweepy import Stream, OAuthHandler
from tweepy.streaming import StreamListener
class listener(StreamListener):
def on_data(self, data):
print(data)
return True
def on_error(self, status):
print(status)
auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
twitterStream = Stream(auth, listener())
twitterStream.filter(track=['python'])
|
[
"tweepy.OAuthHandler"
] |
[((360, 387), 'tweepy.OAuthHandler', 'OAuthHandler', (['ckey', 'csecret'], {}), '(ckey, csecret)\n', (372, 387), False, 'from tweepy import Stream, OAuthHandler\n')]
|
## l2_attack.py -- attack a network optimizing for l_2 distance
##
## Copyright (C) 2016, <NAME> <<EMAIL>>.
##
## This program is licenced under the BSD 2-Clause licence,
## contained in the LICENCE file in this directory.
## Modified by <NAME> 2017
import tensorflow as tf
import numpy as np
BINARY_SEARCH_STEPS = 9 # number of times to adjust the constant with binary search
MAX_ITERATIONS = 10000 # number of iterations to perform gradient descent
ABORT_EARLY = True # if we stop improving, abort gradient descent early
LEARNING_RATE = 1e-2 # larger values converge faster to less accurate results, default 1e-2
TARGETED = False # should we target one specific class? or just be wrong?
CONFIDENCE = 0 # how strong the adversarial example should be
INITIAL_CONST = 1e-3 # the initial constant c to pick as a first guess
class CarliniL2:
def __init__(self, sess, models, batch_size=1, confidence = CONFIDENCE,
targeted = TARGETED, learning_rate = LEARNING_RATE,
binary_search_steps = BINARY_SEARCH_STEPS, max_iterations = MAX_ITERATIONS,
abort_early = ABORT_EARLY,
initial_const = INITIAL_CONST,
boxmin = -0.5, boxmax = 0.5):
"""
The L_2 optimized attack.
This attack is the most efficient and should be used as the primary
attack to evaluate potential defenses.
Returns adversarial examples for the supplied model.
confidence: Confidence of adversarial examples: higher produces examples
that are farther away, but more strongly classified as adversarial.
batch_size: Number of attacks to run simultaneously.
targeted: True if we should perform a targetted attack, False otherwise.
learning_rate: The learning rate for the attack algorithm. Smaller values
produce better results but are slower to converge.
binary_search_steps: The number of times we perform binary search to
find the optimal tradeoff-constant between distance and confidence.
max_iterations: The maximum number of iterations. Larger values are more
accurate; setting too small will require a large learning rate and will
produce poor results.
abort_early: If true, allows early aborts if gradient descent gets stuck.
initial_const: The initial tradeoff-constant to use to tune the relative
importance of distance and confidence. If binary_search_steps is large,
the initial constant is not important.
boxmin: Minimum pixel value (default -0.5).
boxmax: Maximum pixel value (default 0.5).
"""
image_size, num_channels, num_labels = models[0].image_size, models[0].num_channels, models[0].num_labels
self.sess = sess
self.TARGETED = targeted
self.LEARNING_RATE = learning_rate
self.MAX_ITERATIONS = max_iterations
self.BINARY_SEARCH_STEPS = binary_search_steps
self.ABORT_EARLY = abort_early
self.CONFIDENCE = confidence
self.initial_const = initial_const
self.batch_size = batch_size
self.num_models = len(models)
self.num_labels = num_labels
shape = (batch_size,image_size,image_size,num_channels)
# the variable we're going to optimize over
modifier = tf.Variable(np.zeros(shape,dtype=np.float32))
# these are variables to be more efficient in sending data to tf
self.timg = tf.Variable(np.zeros(shape), dtype=tf.float32)
self.tlab = tf.Variable(np.zeros((batch_size,num_labels)), dtype=tf.float32)
self.const = tf.Variable(np.zeros(batch_size), dtype=tf.float32)
self.weights = tf.Variable(np.zeros(self.num_models), dtype=tf.float32)
# and here's what we use to assign them
self.assign_timg = tf.placeholder(tf.float32, shape)
self.assign_tlab = tf.placeholder(tf.float32, (batch_size, num_labels))
self.assign_const = tf.placeholder(tf.float32, [batch_size])
self.assign_weights = tf.placeholder(tf.float32, [self.num_models])
# the resulting image, tanh'd to keep bounded from boxmin to boxmax
self.boxmul = (boxmax - boxmin) / 2.
self.boxplus = (boxmin + boxmax) / 2.
self.newimg = tf.tanh(modifier + self.timg) * self.boxmul + self.boxplus
# prediction BEFORE-SOFTMAX of the model
self.outputs = [model.predict(self.newimg) for model in models]
# distance to the input data
self.l2dist = tf.reduce_sum(tf.square(self.newimg-(tf.tanh(self.timg) * self.boxmul + self.boxplus)),[1,2,3])
# compute the probability of the label class versus the maximum other
reals = []
others = []
for i in xrange(self.num_models):
real = tf.reduce_sum((self.tlab) * self.outputs[i], 1)
other = tf.reduce_max((1 - self.tlab)*self.outputs[i] - (self.tlab*10000), 1)
reals.append(real)
others.append(other)
self.reals, self.others = reals, others
loss1list = []
if self.TARGETED:
# if targetted, optimize for making the other class most likely
for i in xrange(self.num_models):
loss1list.append(tf.maximum(0.0, self.weights[i] * (others[i] - reals[i] + self.CONFIDENCE)))
else:
# if untargeted, optimize for making this class least likely.
for i in xrange(self.num_models):
loss1list.append(tf.maximum(0.0, self.weights[i] * (reals[i] - others[i] + self.CONFIDENCE)))
self.loss1list = loss1list # TODO: remove
# sum up the losses
self.loss2 = tf.reduce_sum(self.l2dist)
self.loss1 = tf.reduce_sum(self.const * tf.add_n(self.loss1list))
self.loss = self.loss1 + self.loss2
self.reals = reals
self.others = others
# Setup the adam optimizer and keep track of variables we're creating
start_vars = set(x.name for x in tf.global_variables())
optimizer = tf.train.AdamOptimizer(self.LEARNING_RATE)
self.train = optimizer.minimize(self.loss, var_list=[modifier])
end_vars = tf.global_variables()
new_vars = [x for x in end_vars if x.name not in start_vars]
# these are the variables to initialize when we run
self.setup = []
self.setup.append(self.timg.assign(self.assign_timg))
self.setup.append(self.tlab.assign(self.assign_tlab))
self.setup.append(self.const.assign(self.assign_const))
self.setup.append(self.weights.assign(self.assign_weights))
self.init = tf.variables_initializer(var_list=[modifier]+new_vars)
def attack(self, imgs, targets, weights):
"""
Perform the L_2 attack on the given images for the given targets.
If self.targeted is true, then the targets represents the target labels.
If self.targeted is false, then targets are the original class labels.
"""
r = []
# print('go up to',len(imgs))
for i in range(0,len(imgs),self.batch_size):
# print('tick',i)
r.extend(self.attack_batch(imgs[i:i+self.batch_size], targets[i:i+self.batch_size], weights))
return np.array(r)
def attack_batch(self, imgs, labs, weights):
"""
Run the attack on a batch of images and labels.
"""
def compareLoss(x, y):
"""
x is an np array of shape num_models x num_classes
y is the true label or target label of the class
returns a number in [0,1] indicating the expected loss of the learner
"""
if not isinstance(x, (float, int, np.int64)):
x = np.copy(x)
for v in x: # update the target scores for each individual prediction
if self.TARGETED:
v[y] -= self.CONFIDENCE
else:
v[y] += self.CONFIDENCE
x = np.argmax(x, 1) # these are the predictions of each hypothesis
if self.TARGETED:
return np.dot(x == y, weights)
else:
return np.dot(x != y, weights)
batch_size = self.batch_size
# convert to tanh-space
imgs = np.arctanh((imgs - self.boxplus) / self.boxmul * 0.999999)
# set the lower and upper bounds accordingly
lower_bound = np.zeros(batch_size)
CONST = np.ones(batch_size)*self.initial_const
upper_bound = np.ones(batch_size)*1e10
# the best l2, score, and image attack
o_bestl2 = [1e10]*batch_size
o_bestscore = [-1]*batch_size
o_bestattack = [np.zeros(imgs[0].shape)]*batch_size
for outer_step in range(self.BINARY_SEARCH_STEPS):
# completely reset adam's internal state.
self.sess.run(self.init)
batch = imgs[:batch_size]
batchlab = labs[:batch_size]
bestl2 = [1e10]*batch_size
bestscore = [0.0]*batch_size
# set the variables so that we don't have to send them over again
self.sess.run(self.setup, {self.assign_timg: batch,
self.assign_tlab: batchlab,
self.assign_const: CONST,
self.assign_weights: weights})
# print "Outer Step ", outer_step, "Current C ", CONST, lower_bound, upper_bound
prev = 1e10 # used to be e6
for iteration in range(self.MAX_ITERATIONS):
# perform the attack
_, l, l2s, scores, nimg = self.sess.run([self.train, self.loss,
self.l2dist, self.outputs,
self.newimg])
scores = np.array(scores).reshape(self.batch_size, self.num_models, self.num_labels)
# if iteration % 200 == 0:
# print(iteration, self.sess.run((self.loss, self.loss1, self.loss2)))
# check if we should abort search if we're getting nowhere. (check every 10%)
if self.ABORT_EARLY and iteration%(self.MAX_ITERATIONS * .10) == 0:
if l > prev*.9999:
break
prev = l
for e,(l2,sc,ii) in enumerate(zip(l2s,scores,nimg)):
currLoss = compareLoss(sc, np.argmax(batchlab[e])) # expected loss of the learner
if currLoss > bestscore[e]: # we've found a clear improvement for this value of c
bestl2[e] = l2
bestscore[e] = currLoss
if currLoss == bestscore[e] and l2 < bestl2[e]:
bestl2[e] = l2
if currLoss > o_bestscore[e]:
o_bestl2[e] = l2
o_bestscore[e] = currLoss
o_bestattack[e] = ii
if currLoss == o_bestscore[e] and l2 < o_bestl2[e]:
o_bestl2[e] = l2
o_bestattack[e] = ii
# finished trying out the adam optimizer for a particular c, now need to decide on the next value
# adjust the constant as needed
for e in range(batch_size):
if bestscore[e] == 1.0:
upper_bound[e] = min(upper_bound[e], CONST[e])
if upper_bound[e] < 1e9:
CONST[e] = (lower_bound[e] + upper_bound[e])/2
else:
lower_bound[e] = max(lower_bound[e],CONST[e])
if upper_bound[e] < 1e9:
CONST[e] = (lower_bound[e] + upper_bound[e])/2
else:
CONST[e] *= 100
# return the best solution found
return o_bestattack
|
[
"numpy.arctanh",
"tensorflow.reduce_sum",
"tensorflow.add_n",
"numpy.copy",
"numpy.argmax",
"tensorflow.maximum",
"tensorflow.variables_initializer",
"numpy.zeros",
"numpy.ones",
"tensorflow.placeholder",
"tensorflow.global_variables",
"numpy.array",
"tensorflow.tanh",
"numpy.dot",
"tensorflow.reduce_max",
"tensorflow.train.AdamOptimizer"
] |
[((3887, 3920), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', 'shape'], {}), '(tf.float32, shape)\n', (3901, 3920), True, 'import tensorflow as tf\n'), ((3948, 4000), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '(batch_size, num_labels)'], {}), '(tf.float32, (batch_size, num_labels))\n', (3962, 4000), True, 'import tensorflow as tf\n'), ((4029, 4069), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[batch_size]'], {}), '(tf.float32, [batch_size])\n', (4043, 4069), True, 'import tensorflow as tf\n'), ((4100, 4145), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[self.num_models]'], {}), '(tf.float32, [self.num_models])\n', (4114, 4145), True, 'import tensorflow as tf\n'), ((5756, 5782), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['self.l2dist'], {}), '(self.l2dist)\n', (5769, 5782), True, 'import tensorflow as tf\n'), ((6120, 6162), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['self.LEARNING_RATE'], {}), '(self.LEARNING_RATE)\n', (6142, 6162), True, 'import tensorflow as tf\n'), ((6254, 6275), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (6273, 6275), True, 'import tensorflow as tf\n'), ((6707, 6763), 'tensorflow.variables_initializer', 'tf.variables_initializer', ([], {'var_list': '([modifier] + new_vars)'}), '(var_list=[modifier] + new_vars)\n', (6731, 6763), True, 'import tensorflow as tf\n'), ((7325, 7336), 'numpy.array', 'np.array', (['r'], {}), '(r)\n', (7333, 7336), True, 'import numpy as np\n'), ((8384, 8442), 'numpy.arctanh', 'np.arctanh', (['((imgs - self.boxplus) / self.boxmul * 0.999999)'], {}), '((imgs - self.boxplus) / self.boxmul * 0.999999)\n', (8394, 8442), True, 'import numpy as np\n'), ((8519, 8539), 'numpy.zeros', 'np.zeros', (['batch_size'], {}), '(batch_size)\n', (8527, 8539), True, 'import numpy as np\n'), ((3398, 3431), 'numpy.zeros', 'np.zeros', (['shape'], {'dtype': 'np.float32'}), '(shape, dtype=np.float32)\n', (3406, 3431), True, 'import numpy as np\n'), ((3538, 3553), 'numpy.zeros', 'np.zeros', (['shape'], {}), '(shape)\n', (3546, 3553), True, 'import numpy as np\n'), ((3605, 3639), 'numpy.zeros', 'np.zeros', (['(batch_size, num_labels)'], {}), '((batch_size, num_labels))\n', (3613, 3639), True, 'import numpy as np\n'), ((3691, 3711), 'numpy.zeros', 'np.zeros', (['batch_size'], {}), '(batch_size)\n', (3699, 3711), True, 'import numpy as np\n'), ((3766, 3791), 'numpy.zeros', 'np.zeros', (['self.num_models'], {}), '(self.num_models)\n', (3774, 3791), True, 'import numpy as np\n'), ((4876, 4921), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(self.tlab * self.outputs[i])', '(1)'], {}), '(self.tlab * self.outputs[i], 1)\n', (4889, 4921), True, 'import tensorflow as tf\n'), ((4944, 5015), 'tensorflow.reduce_max', 'tf.reduce_max', (['((1 - self.tlab) * self.outputs[i] - self.tlab * 10000)', '(1)'], {}), '((1 - self.tlab) * self.outputs[i] - self.tlab * 10000, 1)\n', (4957, 5015), True, 'import tensorflow as tf\n'), ((8556, 8575), 'numpy.ones', 'np.ones', (['batch_size'], {}), '(batch_size)\n', (8563, 8575), True, 'import numpy as np\n'), ((8617, 8636), 'numpy.ones', 'np.ones', (['batch_size'], {}), '(batch_size)\n', (8624, 8636), True, 'import numpy as np\n'), ((4336, 4365), 'tensorflow.tanh', 'tf.tanh', (['(modifier + self.timg)'], {}), '(modifier + self.timg)\n', (4343, 4365), True, 'import tensorflow as tf\n'), ((5831, 5855), 'tensorflow.add_n', 'tf.add_n', (['self.loss1list'], {}), '(self.loss1list)\n', (5839, 5855), True, 'import tensorflow as tf\n'), ((7815, 7825), 'numpy.copy', 'np.copy', (['x'], {}), '(x)\n', (7822, 7825), True, 'import numpy as np\n'), ((8092, 8107), 'numpy.argmax', 'np.argmax', (['x', '(1)'], {}), '(x, 1)\n', (8101, 8107), True, 'import numpy as np\n'), ((8209, 8232), 'numpy.dot', 'np.dot', (['(x == y)', 'weights'], {}), '(x == y, weights)\n', (8215, 8232), True, 'import numpy as np\n'), ((8274, 8297), 'numpy.dot', 'np.dot', (['(x != y)', 'weights'], {}), '(x != y, weights)\n', (8280, 8297), True, 'import numpy as np\n'), ((8789, 8812), 'numpy.zeros', 'np.zeros', (['imgs[0].shape'], {}), '(imgs[0].shape)\n', (8797, 8812), True, 'import numpy as np\n'), ((5332, 5407), 'tensorflow.maximum', 'tf.maximum', (['(0.0)', '(self.weights[i] * (others[i] - reals[i] + self.CONFIDENCE))'], {}), '(0.0, self.weights[i] * (others[i] - reals[i] + self.CONFIDENCE))\n', (5342, 5407), True, 'import tensorflow as tf\n'), ((5577, 5652), 'tensorflow.maximum', 'tf.maximum', (['(0.0)', '(self.weights[i] * (reals[i] - others[i] + self.CONFIDENCE))'], {}), '(0.0, self.weights[i] * (reals[i] - others[i] + self.CONFIDENCE))\n', (5587, 5652), True, 'import tensorflow as tf\n'), ((6077, 6098), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (6096, 6098), True, 'import tensorflow as tf\n'), ((9988, 10004), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (9996, 10004), True, 'import numpy as np\n'), ((10595, 10617), 'numpy.argmax', 'np.argmax', (['batchlab[e]'], {}), '(batchlab[e])\n', (10604, 10617), True, 'import numpy as np\n'), ((4630, 4648), 'tensorflow.tanh', 'tf.tanh', (['self.timg'], {}), '(self.timg)\n', (4637, 4648), True, 'import tensorflow as tf\n')]
|
# =========================================
# IMPORTS
# --------------------------------------
import rootpath
rootpath.append()
# =========================================
# EXPORTS
# --------------------------------------
from inspecta.inspector import *
|
[
"rootpath.append"
] |
[((120, 137), 'rootpath.append', 'rootpath.append', ([], {}), '()\n', (135, 137), False, 'import rootpath\n')]
|
from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
def index(request):
return HttpResponse("Desde la vista App")
def sumar(request, numero1, numero2):
sum = numero1 + numero2
return HttpResponse("La suma de %s + %s = %s" % (numero1, numero2, sum))
### !! %s = sicnifica que sera de tipo string !! %f = sera de tipo flotante
def restar(request, numero1, numero2):
res = numero1 - numero2
return HttpResponse("La resta de %s - %s = %s" % (numero1, numero2, res))
### !! %s = sicnifica que sera de tipo string !! %f = sera de tipo flotante
def multiplicar(request, numero1, numero2):
mul = numero1 * numero2
return HttpResponse("La multiplicacion de %s * %s = %s" % (numero1, numero2, mul))
### !! %s = sicnifica que sera de tipo string !! %f = sera de tipo flotante
def dividir(request, numero1, numero2):
div = numero1 / numero2
return HttpResponse("La division de %s / %s = %f" % (numero1, numero2, div))
### !! %s = sicnifica que sera de tipo string !! %f = sera de tipo flotante
|
[
"django.http.HttpResponse"
] |
[((132, 166), 'django.http.HttpResponse', 'HttpResponse', (['"""Desde la vista App"""'], {}), "('Desde la vista App')\n", (144, 166), False, 'from django.http import HttpResponse\n'), ((245, 310), 'django.http.HttpResponse', 'HttpResponse', (["('La suma de %s + %s = %s' % (numero1, numero2, sum))"], {}), "('La suma de %s + %s = %s' % (numero1, numero2, sum))\n", (257, 310), False, 'from django.http import HttpResponse\n'), ((477, 543), 'django.http.HttpResponse', 'HttpResponse', (["('La resta de %s - %s = %s' % (numero1, numero2, res))"], {}), "('La resta de %s - %s = %s' % (numero1, numero2, res))\n", (489, 543), False, 'from django.http import HttpResponse\n'), ((709, 784), 'django.http.HttpResponse', 'HttpResponse', (["('La multiplicacion de %s * %s = %s' % (numero1, numero2, mul))"], {}), "('La multiplicacion de %s * %s = %s' % (numero1, numero2, mul))\n", (721, 784), False, 'from django.http import HttpResponse\n'), ((946, 1015), 'django.http.HttpResponse', 'HttpResponse', (["('La division de %s / %s = %f' % (numero1, numero2, div))"], {}), "('La division de %s / %s = %f' % (numero1, numero2, div))\n", (958, 1015), False, 'from django.http import HttpResponse\n')]
|
import django.core.validators
import django.db.models.deletion
import django.utils.timezone
from django.db import migrations, models
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Car",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
(
"modified",
model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="modified",
),
),
("maker", models.CharField(max_length=50)),
("model", models.CharField(max_length=50)),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Rate",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
(
"modified",
model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="modified",
),
),
(
"rate",
models.PositiveSmallIntegerField(
validators=[
django.core.validators.MinValueValidator(1),
django.core.validators.MaxValueValidator(5),
]
),
),
(
"car",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="rates",
to="cars.car",
),
),
],
options={
"abstract": False,
},
),
]
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.AutoField"
] |
[((405, 498), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (421, 498), False, 'from django.db import migrations, models\n'), ((1257, 1288), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1273, 1288), False, 'from django.db import migrations, models\n'), ((1317, 1348), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1333, 1348), False, 'from django.db import migrations, models\n'), ((1591, 1684), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1607, 1684), False, 'from django.db import migrations, models\n'), ((2833, 2937), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""rates"""', 'to': '"""cars.car"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='rates', to='cars.car')\n", (2850, 2937), False, 'from django.db import migrations, models\n')]
|
import os
import json
from .exceptions import DataLoadException
class JsonDataLoader:
"""
Load base calculation data
from provided JSON file.
"""
def __init__(self, path):
self._path = path
def load(self):
if not os.path.exists(self._path):
raise DataLoadException("Data file does not exist.")
with open(self._path, "r") as f:
data = json.load(f)
return data
|
[
"json.load",
"os.path.exists"
] |
[((258, 284), 'os.path.exists', 'os.path.exists', (['self._path'], {}), '(self._path)\n', (272, 284), False, 'import os\n'), ((412, 424), 'json.load', 'json.load', (['f'], {}), '(f)\n', (421, 424), False, 'import json\n')]
|
from rest_framework import viewsets, permissions
import ippon.cup_fight.permissions as cfp
import ippon.cup_fight.serializers as cfs
import ippon.models.cup_fight as cfm
class CupFightViewSet(viewsets.ModelViewSet):
queryset = cfm.CupFight.objects.all()
serializer_class = cfs.CupFightSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
cfp.IsCupFightOwnerOrReadOnly)
|
[
"ippon.models.cup_fight.CupFight.objects.all"
] |
[((234, 260), 'ippon.models.cup_fight.CupFight.objects.all', 'cfm.CupFight.objects.all', ([], {}), '()\n', (258, 260), True, 'import ippon.models.cup_fight as cfm\n')]
|
"""
.. module:: evaluator
:synopsis: Holding all evaluator classes!
.. moduleauthor:: <NAME>
"""
from typing import List, Union, Dict
from abc import ABC, abstractmethod
import numpy as np
import pandas as pd
from scipy.stats import pearsonr
from sklearn.metrics import classification_report
from mtc.core.sentence import Sentence
def Evaluator(name, *args, **kwargs):
"""
All evaluator classes should be called via this method
"""
for cls in EvaluatorBaseClass.__subclasses__():
if cls.__name__ == name:
return cls(*args, **kwargs)
raise ValueError('No evalutor named %s' % name)
class EvaluatorBaseClass(ABC):
"""
Any evaluator class must inherit from this class
"""
@property
def key_name(self):
"""Name must be unique!"""
return self.__class__.__name__
def evaluate(self, *args, **kwargs) -> List[Sentence]:
"""Add embeddings to all words in a list of sentences. If embeddings are already added, updates only if embeddings
are non-static."""
# if only one sentence is passed, convert to list of sentence
self._evaluate_internal(*args, **kwargs)
@abstractmethod
def _evaluate_internal(self, sentences: List[Sentence]) -> List[Sentence]:
"""Private method for adding embeddings to all words in a list of sentences."""
pass
@abstractmethod
def get_params(self) -> Dict:
pass
class PearsonCorrelationCoefficientEvaluator(EvaluatorBaseClass):
def __init__(self):
super().__init__()
self.results = dict()
@property
def key_name(self):
"""Name must be unique!"""
return f"{self.__class__.__name__}"
def _evaluate_internal(self, y_eval, y_eval_predicted, *args, **kwargs):
# y_train = np.take(exp_data['y'], exp_data['idx_train'], axis=0)
# y_pred_train = np.take(exp_data['y_pred'], exp_data['idx_train'], axis=0)
# y_test = np.take(exp_data['y'], exp_data['idx_dev'], axis=0)
# y_pred_test = np.take(exp_data['y_pred'], exp_data['idx_dev'], axis=0)
self.results['pearson'] = [pearsonr(y_eval_predicted, y_eval)[0]]
# self.results['pearson_test_set'] = [pearsonr(y_pred_test, y_test)[0]]
# print('on training set with pcc: %f' % self.results['pearson'][0])
print('PCC: %f' % self.results['pearson'][0])
def get_params(self) -> Dict:
params = dict()
params['name'] = self.key_name
params['append'] = True
params.update(self.results)
return params
class PredictionAccuracyBySentence(EvaluatorBaseClass):
def __init__(self):
super().__init__()
self.results = None
self.diff_dict = {}
@property
def key_name(self):
"""Name must be unique!"""
return f"{self.__class__.__name__}"
def _evaluate_internal(self, y_eval, y_eval_predicted, test_index, rsa_a_eval, rsa_b_eval):
self.diff_dict = {
'diff': list(abs(y_eval-y_eval_predicted)),
'sen_idx': test_index,
'gold_standard': y_eval,
'pred': y_eval_predicted,
'raw_sentences_a': rsa_a_eval,
'raw_sentences_b': rsa_b_eval
}
def get_params(self) -> Dict:
params = dict()
params['name'] = self.key_name
params['append'] = False
params['diff_dict'] = self.diff_dict
return params
if __name__ == '__main__':
from mtc.core.preprocessor import Preprocessor
# from mtc.core.sentence import Sentence
# from sklearn import linear_model, ensemble
#
# preprocessor = Preprocessor('DefaultPreprocessor')
#
# sentence_a = [
# Sentence('Hallo du, wie geht es dir?', preprocessor, {'ground_truth': 3}),
# Sentence('Mein Name ist Tina.', preprocessor, {'ground_truth':2})
# ]
# sentence_b = [
# Sentence('Hi du, wie geht\'s?', preprocessor),
# Sentence('Mein Name ist Paul', preprocessor),
# ]
#
# clf = linear_model.Lasso(alpha=0.1)
# classifier = Classifier('SelectiveClassifier', clf=clf, classifier_methods=[{'method': 'sequence_matcher_similarity'}])
# evaluator = Evaluator('PCCE')
#
# classifier.fit(sentence_a, sentence_b)
# classifier.predict(sentence_a, sentence_b)
# evaluator.evaluate(sentence_a[0])
|
[
"scipy.stats.pearsonr"
] |
[((2137, 2171), 'scipy.stats.pearsonr', 'pearsonr', (['y_eval_predicted', 'y_eval'], {}), '(y_eval_predicted, y_eval)\n', (2145, 2171), False, 'from scipy.stats import pearsonr\n')]
|
from django.contrib import admin
from django.urls import path, include
from .views import documentation
from .router import router
from rest_framework_jwt.views import obtain_jwt_token
urlpatterns = [
path('tokenAuth/', obtain_jwt_token),
path('', documentation, name='documentation'),
path('', include('main.urls')),
path('', include(router.urls)),
path('admin/', admin.site.urls),
]
|
[
"django.urls.path",
"django.urls.include"
] |
[((208, 244), 'django.urls.path', 'path', (['"""tokenAuth/"""', 'obtain_jwt_token'], {}), "('tokenAuth/', obtain_jwt_token)\n", (212, 244), False, 'from django.urls import path, include\n'), ((250, 295), 'django.urls.path', 'path', (['""""""', 'documentation'], {'name': '"""documentation"""'}), "('', documentation, name='documentation')\n", (254, 295), False, 'from django.urls import path, include\n'), ((373, 404), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (377, 404), False, 'from django.urls import path, include\n'), ((310, 330), 'django.urls.include', 'include', (['"""main.urls"""'], {}), "('main.urls')\n", (317, 330), False, 'from django.urls import path, include\n'), ((346, 366), 'django.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (353, 366), False, 'from django.urls import path, include\n')]
|
from queue import Queue
class TreeNode:
def __init__(self, val):
self.val = val
self.left = None
self.right = None
@staticmethod
def from_array(array):
# bfs construct binary tree
root = TreeNode(array[0])
q = Queue()
q.put(root)
i = 1
while not q.empty() and i != len(array):
node = q.get()
node.left = TreeNode(array[i])
q.put(node.left)
if i + 1 != len(array):
node.right = TreeNode(array[i + 1])
q.put(node.right)
i += 2
return root
def ldr(root: TreeNode):
stack = []
node = root
result = []
while node or stack:
# go to the most left node
while node:
stack.append(node)
node = node.left
node = stack.pop()
result.append(node.val)
node = node.right
return ' '.join(list(map(str, result)))
def dlr(root: TreeNode):
stack = []
node = root
result = []
while node or stack:
while node:
result.append(node.val)
stack.append(node)
node = node.left
node = stack.pop()
node = node.right
return ' '.join(list(map(str, result)))
def lrd(root: TreeNode):
"""
After getting DRL(reverse RL for DLR), reverse output to LRD.
"""
stack = []
node = root
result = []
while node or stack:
while node:
result.append(node.val)
stack.append(node)
node = node.right
node = stack.pop()
node = node.left
return ' '.join(list(map(str, reversed(result))))
if __name__ == '__main__':
root = TreeNode(1)
root.right = TreeNode(2)
root.right.left = TreeNode(3)
print(ldr(root))
|
[
"queue.Queue"
] |
[((273, 280), 'queue.Queue', 'Queue', ([], {}), '()\n', (278, 280), False, 'from queue import Queue\n')]
|
from beerbackend.user.models import Beer, families, User
from flask_restful import Resource, Api, reqparse, fields, marshal_with
from flask.json import jsonify
import os
import json
beer_get_parse = reqparse.RequestParser()
beer_get_parse.add_argument('beer_name', dest='beer_name',
type=str, required=True,
help='The Name of the beer')
PBR = {
"sour": 1,
"malty": 1,
"family": "pale-lager",
"hoppy": 1,
"name": "PBR",
"abv": 1,
"wood": 1,
"bitter": 1,
"color": 1,
"roasty": 1,
"spice": 1,
"sweet": 1,
"fruit": 1
}
class BeerApi(Resource):
def get(self):
args = beer_get_parse.parse_args()
name = args.beer_name
beer = Beer.query.filter(Beer.beer_name == name).first()
print(name)
print(beer)
if beer:
return beer.to_data()
else:
return None
class BeersApi(Resource):
def get(self):
beers = Beer.query.all()
if beers:
return{"beers": [beer.to_data() for beer in beers]}
else:
return {"beers": []}
def put(self):
print(os.getcwd())
with open('beers.json','r') as fin:
beers = json.load(fin)
for beer in beers["beers"]:
family = None
if beer.get("family").lower() in families.values():
family = list(families.values()).index(beer.get("family").lower()) + 1
else:
family = 1 #default to 1 if not a family we know
Beer.create(beer_name=beer["name"], abv=beer["abv"], bitter=beer["bitter"],
color=beer["color"], fruit=beer["fruit"], hoppy=beer["hoppy"],
malty=beer["malty"], roasty=beer["roasty"], sweet=beer["sweet"],
spice=beer["spice"], wood=beer["wood"], family=family,
smoke=beer["smoke"], sour=beer["sour"])
|
[
"json.load",
"beerbackend.user.models.Beer.create",
"beerbackend.user.models.Beer.query.all",
"os.getcwd",
"flask_restful.reqparse.RequestParser",
"beerbackend.user.models.families.values",
"beerbackend.user.models.Beer.query.filter"
] |
[((201, 225), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (223, 225), False, 'from flask_restful import Resource, Api, reqparse, fields, marshal_with\n'), ((965, 981), 'beerbackend.user.models.Beer.query.all', 'Beer.query.all', ([], {}), '()\n', (979, 981), False, 'from beerbackend.user.models import Beer, families, User\n'), ((1145, 1156), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1154, 1156), False, 'import os\n'), ((1222, 1236), 'json.load', 'json.load', (['fin'], {}), '(fin)\n', (1231, 1236), False, 'import json\n'), ((724, 765), 'beerbackend.user.models.Beer.query.filter', 'Beer.query.filter', (['(Beer.beer_name == name)'], {}), '(Beer.beer_name == name)\n', (741, 765), False, 'from beerbackend.user.models import Beer, families, User\n'), ((1573, 1890), 'beerbackend.user.models.Beer.create', 'Beer.create', ([], {'beer_name': "beer['name']", 'abv': "beer['abv']", 'bitter': "beer['bitter']", 'color': "beer['color']", 'fruit': "beer['fruit']", 'hoppy': "beer['hoppy']", 'malty': "beer['malty']", 'roasty': "beer['roasty']", 'sweet': "beer['sweet']", 'spice': "beer['spice']", 'wood': "beer['wood']", 'family': 'family', 'smoke': "beer['smoke']", 'sour': "beer['sour']"}), "(beer_name=beer['name'], abv=beer['abv'], bitter=beer['bitter'],\n color=beer['color'], fruit=beer['fruit'], hoppy=beer['hoppy'], malty=\n beer['malty'], roasty=beer['roasty'], sweet=beer['sweet'], spice=beer[\n 'spice'], wood=beer['wood'], family=family, smoke=beer['smoke'], sour=\n beer['sour'])\n", (1584, 1890), False, 'from beerbackend.user.models import Beer, families, User\n'), ((1356, 1373), 'beerbackend.user.models.families.values', 'families.values', ([], {}), '()\n', (1371, 1373), False, 'from beerbackend.user.models import Beer, families, User\n'), ((1409, 1426), 'beerbackend.user.models.families.values', 'families.values', ([], {}), '()\n', (1424, 1426), False, 'from beerbackend.user.models import Beer, families, User\n')]
|
from vanilla import *
from mojo.extensions import getExtensionDefault, setExtensionDefault
class DrawBotSettingsController(object):
def __init__(self):
self.w = Window((250, 45), "DrawBot Settings")
self.w.openPythonFilesInDrawBot = CheckBox((10, 10, -10, 22),
"Open .py files directly in DrawBot.",
value=getExtensionDefault("com.drawBot.openPyFileDirectly", False),
callback=self.openPythonFilesInDrawBotCallback)
self.w.open()
def openPythonFilesInDrawBotCallback(self, sender):
setExtensionDefault("com.drawBot.openPyFileDirectly", sender.get())
DrawBotSettingsController()
|
[
"mojo.extensions.getExtensionDefault"
] |
[((355, 415), 'mojo.extensions.getExtensionDefault', 'getExtensionDefault', (['"""com.drawBot.openPyFileDirectly"""', '(False)'], {}), "('com.drawBot.openPyFileDirectly', False)\n", (374, 415), False, 'from mojo.extensions import getExtensionDefault, setExtensionDefault\n')]
|
# -*- encoding:utf-8 -*-
"""
买入择时示例因子:动态自适应双均线策略
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import math
import numpy as np
from .ABuFactorBuyBase import AbuFactorBuyXD, BuyCallMixin
from ..IndicatorBu.ABuNDMa import calc_ma_from_prices
from ..CoreBu.ABuPdHelper import pd_resample
from ..TLineBu.ABuTL import AbuTLine
__author__ = '阿布'
__weixin__ = 'abu_quant'
# noinspection PyAttributeOutsideInit
class AbuDoubleMaBuy(AbuFactorBuyXD, BuyCallMixin):
"""示例买入动态自适应双均线策略"""
def _init_self(self, **kwargs):
"""
kwargs中可选参数:fast: 均线快线周期,默认不设置,使用自适应动态快线
kwargs中可选参数:slow: 均线慢线周期,默认不设置,使用自适应动态慢线
kwargs中可选参数:resample_max: 动态慢线可设置参数重采样周期最大值,默认100,即动态慢线最大100
kwargs中可选参数:resample_min: 动态慢线可设置参数重采样周期最小值,默认10,即动态慢线最小10
kwargs中可选参数:change_threshold:动态慢线可设置参数代表慢线的选取阀值,默认0.12
"""
# 均线快线周期,默认使用5天均线
self.ma_fast = kwargs.pop('fast', -1)
self.dynamic_fast = False
if self.ma_fast == -1:
self.ma_fast = 5
self.dynamic_fast = True
# 均线慢线周期,默认使用60天均线
self.ma_slow = kwargs.pop('slow', -1)
self.dynamic_slow = False
if self.ma_slow == -1:
self.ma_slow = 60
self.dynamic_slow = True
# 动态慢线可设置参数重采样周期最大值,默认90
self.resample_max = kwargs.pop('resample_max', 100)
# 动态慢线可设置参数重采样周期最小值,默认10
self.resample_min = kwargs.pop('resample_min', 10)
# 动态慢线可设置参数代表慢线的选取阀值,默认0.12
self.change_threshold = kwargs.pop('change_threshold', 0.12)
if self.ma_fast >= self.ma_slow:
# 慢线周期必须大于快线
raise ValueError('ma_fast >= self.ma_slow !')
# xd周期数据需要比ma_slow大一天,这样计算ma就可以拿到今天和昨天两天的ma,用来判断金叉,死叉
kwargs['xd'] = self.ma_slow + 1
# 设置好xd后可以直接使用基类针对xd的初始化
super(AbuDoubleMaBuy, self)._init_self(**kwargs)
# 在输出生成的orders_pd中显示的名字
self.factor_name = '{}:fast={},slow={}'.format(self.__class__.__name__, self.ma_fast, self.ma_slow)
def _dynamic_calc_fast(self, today):
"""
根据大盘最近一个月走势震荡程度,动态决策快线的值,规则如下:
如果大盘最近一个月走势使用:
一次拟合可以表达:fast=slow * 0.05 eg: slow=60->fast=60*0.05=3
二次拟合可以表达:fast=slow * 0.15 eg: slow=60->fast=60*0.15=9
三次拟合可以表达:fast=slow * 0.3 eg: slow=60->fast=60*0.3=18
四次及以上拟合可以表达:fast=slow * 0.5 eg: slow=60->fast=60*0.5=30
"""
# 策略中拥有self.benchmark,即交易基准对象,AbuBenchmark实例对象,benchmark.kl_pd即对应的市场大盘走势
benchmark_df = self.benchmark.kl_pd
# 拿出大盘的今天
benchmark_today = benchmark_df[benchmark_df.date == today.date]
if benchmark_today.empty:
# 默认值为慢线的0.15
return math.ceil(self.ma_slow * 0.15)
# 要拿大盘最近一个月的走势,准备切片的start,end
end_key = int(benchmark_today.iloc[0].key)
start_key = end_key - 20
if start_key < 0:
# 默认值为慢线的0.15
return math.ceil(self.ma_slow * 0.15)
# 使用切片切出从今天开始向前20天的数据
benchmark_month = benchmark_df[start_key:end_key + 1]
# 通过大盘最近一个月的收盘价格做为参数构造AbuTLine对象
benchmark_month_line = AbuTLine(benchmark_month.close, 'benchmark month line')
# 计算这个月最少需要几次拟合才能代表走势曲线
least = benchmark_month_line.show_least_valid_poly(show=False)
if least == 1:
# 一次拟合可以表达:fast=slow * 0.05 eg: slow=60->fast=60*0.05=3
return math.ceil(self.ma_slow * 0.05)
elif least == 2:
# 二次拟合可以表达:fast=slow * 0.15 eg: slow=60->fast=60*0.15=9
return math.ceil(self.ma_slow * 0.15)
elif least == 3:
# 三次拟合可以表达:fast=slow * 0.3 eg: slow=60->fast=60*0.3=18
return math.ceil(self.ma_slow * 0.3)
else:
# 四次及以上拟合可以表达:fast=slow * 0.5 eg: slow=60->fast=60*0.5=30
return math.ceil(self.ma_slow * 0.5)
def _dynamic_calc_slow(self, today):
"""
动态决策慢线的值,规则如下:
切片最近一段时间的金融时间序列,对金融时间序列进行变换周期重新采样,
对重新采样的结果进行pct_change处理,对pct_change序列取abs绝对值,
对pct_change绝对值序列取平均,即算出重新采样的周期内的平均变化幅度,
上述的变换周期由10, 15,20,30....进行迭代, 直到计算出第一个重新
采样的周期内的平均变化幅度 > 0.12的周期做为slow的取值
"""
last_kl = self.past_today_kl(today, self.resample_max)
if last_kl.empty:
# 返回慢线默认值60
return 60
for slow in np.arange(self.resample_min, self.resample_max, 5):
rule = '{}D'.format(slow)
change = abs(pd_resample(last_kl.close, rule, how='mean').pct_change()).mean()
"""
eg: pd_resample(last_kl.close, rule, how='mean')
2014-07-23 249.0728
2014-09-03 258.3640
2014-10-15 240.8663
2014-11-26 220.1552
2015-01-07 206.0070
2015-02-18 198.0932
2015-04-01 217.9791
2015-05-13 251.3640
2015-06-24 266.4511
2015-08-05 244.3334
2015-09-16 236.2250
2015-10-28 222.0441
2015-12-09 222.0574
2016-01-20 177.2303
2016-03-02 226.8766
2016-04-13 230.6000
2016-05-25 216.7596
2016-07-06 222.6420
abs(pd_resample(last_kl.close, rule, how='mean').pct_change())
2014-09-03 0.037
2014-10-15 0.068
2014-11-26 0.086
2015-01-07 0.064
2015-02-18 0.038
2015-04-01 0.100
2015-05-13 0.153
2015-06-24 0.060
2015-08-05 0.083
2015-09-16 0.033
2015-10-28 0.060
2015-12-09 0.000
2016-01-20 0.202
2016-03-02 0.280
2016-04-13 0.016
2016-05-25 0.060
2016-07-06 0.027
abs(pd_resample(last_kl.close, rule, how='mean').pct_change()).mean():
0.080
"""
if change > self.change_threshold:
"""
返回第一个大于change_threshold的slow,
change_threshold默认为0.12,以周期突破的策略一般需要在0.08以上,0.12是为快线留出套利空间
"""
return slow
# 迭代np.arange(min, max, 5)都不符合就返回max
return self.resample_max
def fit_month(self, today):
# fit_month即在回测策略中每一个月执行一次的方法
if self.dynamic_slow:
# 一定要先动态算ma_slow,因为动态计算fast依赖slow
self.ma_slow = self._dynamic_calc_slow(today)
if self.dynamic_fast:
# 动态计算快线
self.ma_fast = self._dynamic_calc_fast(today)
# 动态重新计算后,改变在输出生成的orders_pd中显示的名字
self.factor_name = '{}:fast={},slow={}'.format(self.__class__.__name__, self.ma_fast, self.ma_slow)
# import logging
# logging.debug('{}:{}-fast={}|slow={}'.format(self.kl_pd.name, today.date, self.ma_fast, self.ma_slow))
def fit_day(self, today):
"""双均线买入择时因子,信号快线上穿慢行形成金叉做为买入信号"""
# 计算快线
fast_line = calc_ma_from_prices(self.xd_kl.close, int(self.ma_fast), min_periods=1)
# 计算慢线
slow_line = calc_ma_from_prices(self.xd_kl.close, int(self.ma_slow), min_periods=1)
if len(fast_line) >= 2 and len(slow_line) >= 2:
# 今天的快线值
fast_today = fast_line[-1]
# 昨天的快线值
fast_yesterday = fast_line[-2]
# 今天的慢线值
slow_today = slow_line[-1]
# 昨天的慢线值
slow_yesterday = slow_line[-2]
if slow_yesterday >= fast_yesterday and fast_today > slow_today:
# 快线上穿慢线, 形成买入金叉,使用了今天收盘价格,明天买入
return self.buy_tomorrow()
"""可以选择是否覆盖AbuFactorBuyXD中的buy_tomorrow来增大交易频率,默认基类中self.skip_days = self.xd降低了频率"""
# def buy_tomorrow(self):
# return self.make_buy_order(self.today_ind)
|
[
"numpy.arange",
"math.ceil"
] |
[((4463, 4513), 'numpy.arange', 'np.arange', (['self.resample_min', 'self.resample_max', '(5)'], {}), '(self.resample_min, self.resample_max, 5)\n', (4472, 4513), True, 'import numpy as np\n'), ((2821, 2851), 'math.ceil', 'math.ceil', (['(self.ma_slow * 0.15)'], {}), '(self.ma_slow * 0.15)\n', (2830, 2851), False, 'import math\n'), ((3046, 3076), 'math.ceil', 'math.ceil', (['(self.ma_slow * 0.15)'], {}), '(self.ma_slow * 0.15)\n', (3055, 3076), False, 'import math\n'), ((3510, 3540), 'math.ceil', 'math.ceil', (['(self.ma_slow * 0.05)'], {}), '(self.ma_slow * 0.05)\n', (3519, 3540), False, 'import math\n'), ((3653, 3683), 'math.ceil', 'math.ceil', (['(self.ma_slow * 0.15)'], {}), '(self.ma_slow * 0.15)\n', (3662, 3683), False, 'import math\n'), ((3795, 3824), 'math.ceil', 'math.ceil', (['(self.ma_slow * 0.3)'], {}), '(self.ma_slow * 0.3)\n', (3804, 3824), False, 'import math\n'), ((3928, 3957), 'math.ceil', 'math.ceil', (['(self.ma_slow * 0.5)'], {}), '(self.ma_slow * 0.5)\n', (3937, 3957), False, 'import math\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.