code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from flask import json, jsonify
from datetime import datetime
LOGFAILS = "chats.txt"
def lasi(adresats):
chata_rindas = []
with open(LOGFAILS, "r", encoding="utf-8") as f:
for rinda in f:
r = json.loads(rinda)
if "adresats" in r:
if r["adresats"] == adresats or r["adresats"] == "visi" or r["vards"] == adresats:
chata_rindas.append(r)
return jsonify({"chats": chata_rindas})
LABAIS_VARDS = "vau"
SLIKTIE_VARDI = ["ņau", "kaķis"]
def pieraksti_zinju(dati):
# limitējam ziņas garumu
dati["chats"]["zinja"] = dati["chats"]["zinja"][0:140]
now = datetime.now()
laiks = now.strftime("%Y/%m/%d, %H:%M:%S")
# Cenzūra
chata_rindas = []
zinjas_vardi = dati["chats"]["zinja"].split()
for vards in zinjas_vardi:
if vards in SLIKTIE_VARDI:
chata_rindas.append(LABAIS_VARDS)
else:
chata_rindas.append(vards)
dati["chats"]["zinja"]=" ".join(chata_rindas)
with open(LOGFAILS, "a", newline="", encoding="utf-8") as f:
dati["chats"]["laiks"] = laiks
f.write(json.dumps(dati["chats"]) + "\n")
|
[
"flask.json.dumps",
"flask.jsonify",
"datetime.datetime.now",
"flask.json.loads"
] |
[((426, 458), 'flask.jsonify', 'jsonify', (["{'chats': chata_rindas}"], {}), "({'chats': chata_rindas})\n", (433, 458), False, 'from flask import json, jsonify\n'), ((640, 654), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (652, 654), False, 'from datetime import datetime\n'), ((222, 239), 'flask.json.loads', 'json.loads', (['rinda'], {}), '(rinda)\n', (232, 239), False, 'from flask import json, jsonify\n'), ((1125, 1150), 'flask.json.dumps', 'json.dumps', (["dati['chats']"], {}), "(dati['chats'])\n", (1135, 1150), False, 'from flask import json, jsonify\n')]
|
import sys
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import (QApplication, QMainWindow, QCheckBox, QVBoxLayout,
QLineEdit, QPushButton, QWidget, QHBoxLayout)
class SuperCheckbox(QHBoxLayout):
def __init__(self, parent=None):
super(SuperCheckbox, self).__init__(parent)
self.checkbox = QCheckBox()
self.addWidget(self.checkbox)
self.edit = QLineEdit()
self.edit.setText('Placeholder')
self.addWidget(self.edit)
button = QPushButton()
button.setIcon(QIcon('icons/plus.svg'))
button.clicked.connect(self.removeThis)
button.resize(self.sizeHint())
self.addWidget(button)
def checkState(self):
return self.checkbox.checkState()
def removeThis(self):
print('Removed !')
class CentralWidget(QWidget):
items = []
def __init__(self, parent):
super(CentralWidget, self).__init__(parent)
self.container = QVBoxLayout(self)
btn = QPushButton('Dodaj')
btn.clicked.connect(self.addSuperCheckbox)
btn.resize(btn.sizeHint())
self.container.addWidget(btn)
def addSuperCheckbox(self):
item = SuperCheckbox()
self.container.addLayout(item)
self.items.append(item)
class GlowneOkienko(QMainWindow):
def __init__(self):
super(GlowneOkienko, self).__init__(None)
self.setWindowTitle('Dynamic layout & typ złożony')
self.setWindowIcon(QIcon('icons/exit.svg'))
self.setGeometry(500, 400, 600, 400)
self.widget = CentralWidget(self)
self.setCentralWidget(self.widget)
if __name__ == '__main__':
app = QApplication([])
window = GlowneOkienko()
window.show()
sys.exit(app.exec_())
|
[
"PyQt5.QtGui.QIcon",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QCheckBox",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QApplication"
] |
[((1690, 1706), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['[]'], {}), '([])\n', (1702, 1706), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QCheckBox, QVBoxLayout, QLineEdit, QPushButton, QWidget, QHBoxLayout\n'), ((347, 358), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ([], {}), '()\n', (356, 358), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QCheckBox, QVBoxLayout, QLineEdit, QPushButton, QWidget, QHBoxLayout\n'), ((418, 429), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (427, 429), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QCheckBox, QVBoxLayout, QLineEdit, QPushButton, QWidget, QHBoxLayout\n'), ((523, 536), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (534, 536), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QCheckBox, QVBoxLayout, QLineEdit, QPushButton, QWidget, QHBoxLayout\n'), ((983, 1000), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (994, 1000), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QCheckBox, QVBoxLayout, QLineEdit, QPushButton, QWidget, QHBoxLayout\n'), ((1016, 1036), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""Dodaj"""'], {}), "('Dodaj')\n", (1027, 1036), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QCheckBox, QVBoxLayout, QLineEdit, QPushButton, QWidget, QHBoxLayout\n'), ((560, 583), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""icons/plus.svg"""'], {}), "('icons/plus.svg')\n", (565, 583), False, 'from PyQt5.QtGui import QIcon\n'), ((1494, 1517), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""icons/exit.svg"""'], {}), "('icons/exit.svg')\n", (1499, 1517), False, 'from PyQt5.QtGui import QIcon\n')]
|
"""
Burp utility module for Python Burp extensions. Author: <NAME>
License: MIT
# Usage
1. Add it as a Python Burp module and use `from burputils import BurpUtils`.
For more info see:
https://parsiya.net/blog/2018-12-19-python-utility-modules-for-burp-extensions/
2. Copy the files to the same path as your extension and use `from burputils
import BurpUtils`.
* These extra files do not have to be loaded in Burp, they just needs to
be in the same path.
3. Copy/paste used code into your extension.
Please see README for details.
"""
class BurpUtils:
"""Helpers for Burp Python extensions"""
def __init__(self, callbacks):
"""Set IExtensionHelpers
Set with callbacks.getHelpers() in registerExtenderCallbacks.
"""
self.helpers = callbacks.getHelpers()
self.callbacks = callbacks
def getInfoFromBytes(self, isRequest, rawBytes):
"""Process request or response from raw bytes.
Returns IRequestInfo or IResponseInfo respectively.
Use getInfo instead if you have access to an IHttpRequestResponse
object. It allows you to use all methods like IRequestInfo.getUrl()
later.
Args:
* isRequest (bool): Set to true if rawBytes is a request. false if it's a
response.
* rawBytes (byte[]): Raw bytes containing the request or response.
"""
if isRequest:
return self.helpers.analyzeRequest(rawBytes)
else:
return self.helpers.analyzeResponse(rawBytes)
def getInfo(self, isRequest, requestResponse):
"""Process request or response from IHttpRequestResponse.
Returns IRequestInfo or IResponseInfo respectively.
This method is preferable to getInfoFromBytes.
Args:
* isRequest (bool): Set to true if rawBytes is a request. false if it's
a response.
* requestResponse (IHttpRequestResponse): Object containing the request
or the response.
"""
if isRequest:
return self.helpers.analyzeRequest(requestResponse)
else:
return self.helpers.analyzeResponse(requestResponse.getResponse())
def getBodyFromBytes(self, isRequest, rawBytes):
"""Extracts the body bytes from a request or response raw bytes.
Returns a byte[] containing the body of the request or response.
Args:
* isRequest (bool): Set to true if rawBytes is a request. false if it's a
response.
* rawBytes (byte[]): Raw bytes containing the request or response.
"""
info = self.getInfoFromBytes(isRequest, rawBytes)
return rawBytes[info.getBodyOffset()]
def getBody(self, isRequest, requestResponse):
"""Extracts the body bytes of an IHttpRequestResponse object.
Returns a byte[] containing the body of the request or response.
Args:
* isRequest (bool): Set to true if rawBytes is a request. false if it's a
response.
* requestResponse (IHttpRequestResponse): Object containing the request
or the response.
"""
info = self.getInfo(isRequest, requestResponse)
if isRequest:
return requestResponse.getRequest()[info.getBodyOffset():]
else:
return requestResponse.getResponse()[info.getBodyOffset():]
def getHeaders(self, info):
"""Extract the headers from an IRequestInfo or IResponseInfo object.
Returns a Headers object with the headers.
Args:
* info (IRequestInfo or IResponseInfo): Request info. Use the output
from getInfo or getInfoFromBytes.
"""
from headers import Headers
hdr = Headers()
# this is IRequestInfo.getHeaders() or IResponseInfo.getHeaders() from Burp
rawHdr = info.getHeaders()
hdr.importRaw(rawHdr)
return hdr
def setRequestResponse(self, isRequest, message, requestResponse):
"""Set the request or response for an IHttpRequestResponse object.
Returns the modified requestResponse.
Args:
* isRequest (bool): True if message is a request. False for response.
* message (byte[]): Raw bytes of the request or response. Usually comes
from buildHttpMessage.
* requestResponse (IHttpRequestResponse): RequestResponse to be
modified.
"""
# if isRequest is True, use setRequest. Otherwise, setResponse.
if isRequest:
requestResponse.setRequest(message)
else:
requestResponse.setResponse(message)
return message
def runExternal(self, command, args):
"""Runs command with args via the command line.
For the sake of simplicity, everything after the first item will be in a
list of strings.
Executes "command args[0] args[1] ...".
Security implication: This is code-execution-as-a-service.
Args:
* command (string): Name of the command.
* args (list of strings): Arguments in a Python list.
"""
# alternatively, we could accept a string containing all the commands,
# then run shlex.split and pass the result to popen.
from subprocess import Popen, PIPE
import sys
# insert the command at the start of the list, everything gets shifted.
args.insert(command, 0)
# run everything
proc = Popen(args, stdout=PIPE, stderr=PIPE)
output = proc.stdout.read()
proc.stdout.close()
err = proc.stderr.read()
proc.stderr.close()
sys.stdout.write(err)
return output
def setHighlight(self, color, requestResponse):
"""Set the highlight color for requestResponse in Burp's HTTP History.
Returns the modified requestResponse.
Args:
* color (string): Highlight color.
One of: red, orange, yellow, green, cyan, blue, pink, magenta, gray
* requestResponse (IHttpRequestResponse) RequestResponse to be
modified.
"""
validColors = ["red","orange","yellow","green","cyan","blue","pink","magenta","gray"]
# convert the input to lowercase.
color = color.lower()
if color not in validColors:
color = None
return
requestResponse.setHighlight(color)
return requestResponse
def bytesToString(self, data):
# type: (bytearray) -> (str)
"""Converts a byte[] to string.
Args:
* data (bytearray): Byte array to be converted to string."""
return self.helpers.bytesToString(data)
def getPath(self, reqResp):
# type: (IHttpRequestResponse) -> (str)
"""Analyzes a byte[] of a request and returns the path.
Args:
* reqResp (IHttpRequestResponse): The RequestResponse with the path."""
if reqResp is None:
return ""
info = self.helpers.analyzeRequest(reqResp)
return info.getUrl().getFile()
def burpToolName(self, toolFlag):
# type: (int) -> (str)
"""Returns the descriptive name for the Burp tool identified by
toolFlag.
Args:
* toolFlag (int): The flag representing the Burp tool."""
return self.callbacks.getToolName(toolFlag)
|
[
"headers.Headers",
"sys.stdout.write",
"subprocess.Popen"
] |
[((3942, 3951), 'headers.Headers', 'Headers', ([], {}), '()\n', (3949, 3951), False, 'from headers import Headers\n'), ((5729, 5766), 'subprocess.Popen', 'Popen', (['args'], {'stdout': 'PIPE', 'stderr': 'PIPE'}), '(args, stdout=PIPE, stderr=PIPE)\n', (5734, 5766), False, 'from subprocess import Popen, PIPE\n'), ((5905, 5926), 'sys.stdout.write', 'sys.stdout.write', (['err'], {}), '(err)\n', (5921, 5926), False, 'import sys\n')]
|
"""Utilities for querying the Windows registry.
@see: Cake Build System (http://sourceforge.net/projects/cake-build)
@copyright: Copyright (c) 2010 <NAME>, <NAME>.
@license: Licensed under the MIT license.
"""
import _winreg as winreg # Do this so Python 2to3 conversion works.
import sys
import cake.system
_shownWow64Warning = False
# Define locally here since some versions of the winreg module don't have them
KEY_WOW64_64KEY = 0x0100
KEY_WOW64_32KEY = 0x0200
if cake.system.isWindows64():
_readAccessModes = (winreg.KEY_READ | KEY_WOW64_64KEY, winreg.KEY_READ | KEY_WOW64_32KEY)
else:
_readAccessModes = (winreg.KEY_READ,)
def queryString(key, subKey, name):
"""Queries a string value from the Windows registry.
On 64-bit Windows this function will first try to query the value from
the 64-bit registry. If the value doesn't exist there it will then try to
find the value in the 32-bit registry.
@param key: The key to query, eg: winreg.HKEY_LOCAL_MACHINE
@type key: string
@param subKey: The subkey to query, eg: r"SOFTWARE\Microsoft"
@type subKey: string
@param name: The name to query, eg: "InstallDir"
@type name: string
@return: The value queried.
@rtype: string
@raise WindowsError: If the value could not be found/read.
"""
for sam in _readAccessModes:
try:
keyHandle = winreg.OpenKey(key, subKey, 0, sam)
try:
return str(winreg.QueryValueEx(keyHandle, name)[0])
finally:
winreg.CloseKey(keyHandle)
except WindowsError:
if sam is _readAccessModes[-1]:
raise
|
[
"_winreg.QueryValueEx",
"_winreg.CloseKey",
"_winreg.OpenKey"
] |
[((1348, 1383), '_winreg.OpenKey', 'winreg.OpenKey', (['key', 'subKey', '(0)', 'sam'], {}), '(key, subKey, 0, sam)\n', (1362, 1383), True, 'import _winreg as winreg\n'), ((1478, 1504), '_winreg.CloseKey', 'winreg.CloseKey', (['keyHandle'], {}), '(keyHandle)\n', (1493, 1504), True, 'import _winreg as winreg\n'), ((1414, 1450), '_winreg.QueryValueEx', 'winreg.QueryValueEx', (['keyHandle', 'name'], {}), '(keyHandle, name)\n', (1433, 1450), True, 'import _winreg as winreg\n')]
|
import random
lives = 5 #this is a starting live
words = ['antelope', 'planet', 'science', 'measurement',
'africa', 'space', 'systems', 'continental']
secret_word = random.choice(words)
clue = list('?????')
heart_symbol = u'\u2764'
print(heart_symbol * 5)
guessed_word_correctly = False
def update_clue(guessed_letter, secret_word, clue):
index = 0
while index < len(secret_word):
if guessed_letter == secret_word[index]:
clue[index] = guessed_letter
index = index + 1
while lives > 0:
print(clue)
print('Lives left: ' + heart_symbol * lives)
guess = input('Guess a letter or the whole word: ')
if guess == secret_word:
guessed_word_correctly = True
break
if guess in secret_word:
update_clue(guess, secret_word, clue)
else:
print('Incorrect. You lose a life')
lives = lives - 1
if guessed_word_correctly:
print('You won! The secret word was ' + secret_word)
else:
print('You lost! The secret word was ' + secret_word)
|
[
"random.choice"
] |
[((180, 200), 'random.choice', 'random.choice', (['words'], {}), '(words)\n', (193, 200), False, 'import random\n')]
|
import numpy as np
import mpnum as mp
import tmps
from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state
import time
from scipy.special import factorial
import math
def get_spin_initial_state(theta, mpa_type='mps'):
"""
Returns the initial state for the spin impurity:
psi_0 = cos(theta) |1> + sin(theta) |0>
in the desired tensor network form (mps, mpo, pmps)
"""
ground = np.array([0.0, np.sin(theta)])
excited = np.array([np.cos(theta), 0.0])
return convert.to_mparray(ground + excited, mpa_type)
def get_spin_boson_0T_chain_initial_state(theta, bath_local_dim, nof_coefficients):
"""
Returns the full initial state (vacuum state) for 0T chain with nof_coefficients sites and a local dimension of
bath_local_dim.
"""
sys_psi_0 = get_spin_initial_state(theta)
bath_psi_0 = broadcast_number_ground_state(bath_local_dim, nof_coefficients)
return mp.chain([sys_psi_0, bath_psi_0])
def get_spin_boson_0T_star_initial_state(theta, system_index, bath_local_dim, nof_coefficients):
"""
Returns the full initial state (vacuum state) for 0T star with nof_coefficients sites and a local dimension of
bath_local_dim. The impurity is located at system_index.
"""
sys_psi_0 = get_spin_initial_state(theta)
# Initial states of the bath sites left and right of the system:
left_bath_psi_0, right_bath_psi_0 = tmps.utils.broadcast_number_ground_state(bath_local_dim, system_index), \
tmps.utils.broadcast_number_ground_state(bath_local_dim,
nof_coefficients - system_index)
return mp.chain([left_bath_psi_0, sys_psi_0, right_bath_psi_0]
if left_bath_psi_0 is not None else [sys_psi_0, right_bath_psi_0])
def _compute_finiteT_chain_residual(psi_0, mpa_type, dims):
"""
Returns residual of the finite-temperature initial state of the bath. List of populations in
the highest energy state of each mode
"""
res = []
for index, dim in enumerate(dims):
res.append(np.real(state_reduction_as_ndarray(psi_0, mpa_type, startsite=index)[dim - 1, dim - 1]))
return res
def get_spin_boson_finiteT_chain_initial_state(theta, beta, h_site, h_bond, bath_local_dim, nof_coefficients,
mpa_type='pmps',
nof_steps=None, state_compression_kwargs=None,
op_compression_kwargs=None, second_order_trotter=False,
psi_0_compression_kwargs=None, residual=True,
force_pmps_evolution=True, verbose=True):
"""
Computes the initial state for the finite temperature spin_boson model in chain geometry.
The bath state is computed via imaginary time evolution.
:param theta: Spin parameter for psi_0 = cos(theta) |1> + sin(theta) |0>
:param beta: Inverse temperature of the bath
:param h_site: Bath local Hamiltonian list
:param h_bond: Bath nearest neighbor coupling Hamiltonian list
:param bath_local_dim: Local dimension of the bath
:param nof_coefficients: Number of bath sites
:param mpa_type: MPS type of the chain (mps, mpo, pmps)
:param nof_steps: Number of steps for the imaginary time evolution
:param state_compression_kwargs: Keyword args for the imaginary time evolution compression
:param op_compression_kwargs: Keyword args for the imaginary time evolution operator pre-compression
:param second_order_trotter: Set True for second order trotter based imaginary time evolution
:param psi_0_compression_kwargs: Keyword args for the imaginary time evolution initial state compression
:param residual: Set True to compute List of populations in the highest energy state of each bath mode.
:param force_pmps_evolution: Set True to always use pmps for the imaginary time evolution
:param verbose: Set true to make imaginary time evolution verbose
:return: Initial state of system and bath as mps, mpo or pmps, info dict
"""
assert mpa_type == 'mpo' or mpa_type == 'pmps'
if nof_steps is None:
nof_steps = int(beta*100)
t0_wall = time.clock()
t0_proc = time.perf_counter()
if isinstance(bath_local_dim, int):
dims = [bath_local_dim] * nof_coefficients
else:
raise AssertionError('Unsupported data type for fixed_dim')
psi_0, info = tmps.chain.thermal.from_hamiltonian(beta, mpa_type, h_site, h_bond,
nof_steps=nof_steps,
state_compression_kwargs=state_compression_kwargs,
op_compression_kwargs=op_compression_kwargs,
second_order_trotter=second_order_trotter,
psi_0_compression_kwargs=psi_0_compression_kwargs,
force_pmps_evolution=force_pmps_evolution,
verbose=verbose)
tf_proc = time.perf_counter() - t0_proc
tf_wall = time.clock() - t0_wall
info['walltime'] = tf_wall
info['cpu_time'] = tf_proc
info['bath_dims'] = dims
if residual:
res = _compute_finiteT_chain_residual(psi_0, mpa_type, dims)
max_res = np.max(res)
info['res'] = res
info['max_res'] = max_res
else:
info['res'] = None
info['max_res'] = None
print('Finite T ground state residual ', info['res'])
print('Finite T ground state max. residual: ', info['max_res'])
sys_psi_0 = get_spin_initial_state(theta, mpa_type=mpa_type)
return mp.chain([sys_psi_0, psi_0]), info
def get_star_local_dims(beta, xi, fixed_dim=None, high_energy_pop=1e-20, sitewise=False):
"""
Computes the local dimension for the finite temperature star bath for the spin_boson model.
:param beta: Inverse temperature of the bath
:param xi: Star geometry bath energies
:param fixed_dim: Uses this fixed dimension for the star evolution
:param high_energy_pop: Chooses local dimension, such that the population in the highest energy of each bath mode
stays below this threshold
:param sitewise: If set False the local dimension is chosen uniformly for all sites to be the
highest local dimension from the high_energy_pop calculation.
:returns: List of bath dimensions
"""
if fixed_dim is None:
dims = []
for xi_i in xi:
a = 1 / (np.exp(beta * xi_i) - 1)
dims.append(math.ceil(1 / (beta * xi_i) * np.log(1 + 1 / (high_energy_pop * a))))
if sitewise:
return dims
else:
return [np.max(dims)]*len(xi)
else:
if isinstance(fixed_dim, (list, tuple)):
assert len(fixed_dim) == len(xi)
return fixed_dim
elif isinstance(fixed_dim, int):
return [fixed_dim]*len(xi)
else:
raise AssertionError('Unsupported data type for fixed_dim')
def _compute_finite_T_star_residual(beta, xi, dims):
"""
Returns residual of the finite-temperature initial state of the bath. List of populations in
the highest energy state of each mode
"""
res = []
for xi_i, dim in zip(xi, dims):
res.append((np.exp(beta*xi_i) - 1)/(np.exp(beta*xi_i * dim)))
return res
def get_spin_boson_finiteT_star_initial_state(theta, beta, system_index, xi, mpa_type='pmps', fixed_dim=None,
high_energy_pop=1e-20, sitewise=False, residual=True):
"""
Computes the initial state for the finite temperature spin_boson model in star geometry.
The bath state is computed via imaginary time evolution.
:param theta: Spin parameter for psi_0 = cos(theta) |1> + sin(theta) |0>
:param beta: Inverse temperature of the bath
:param system_index: Impurity position in the auxiliary chain
:param xi: Star geometry bath energies
:param mpa_type: Type: mps, mpo or pmps of the initial state
:param fixed_dim: Uses this fixed dimension for the star evolution
:param high_energy_pop: Chooses local dimension, such that the population in the highest energy of each bath mode
stays below this threshold
:param sitewise: If set False the local dimension is chosen uniformly for all sites to be the
highest local dimension from the high_energy_pop calculation.
:param residual: Computes list of populations in the highest energy state of each mode
:return: Initial state of system and bath as mps, mpo or pmps, info dict
"""
assert mpa_type == 'mpo' or mpa_type == 'pmps'
t0_wall = time.clock()
t0_proc = time.perf_counter()
dims = get_star_local_dims(beta, xi, fixed_dim=fixed_dim, high_energy_pop=high_energy_pop, sitewise=sitewise)
ops = [xi[i] * np.arange(dim) for i, dim in enumerate(dims)]
if system_index > 0:
left_state = get_thermal_state(beta, mpa_type, ops[:system_index], to_cform=None)
right_state = get_thermal_state(beta, mpa_type, ops[system_index:], to_cform=None)
else:
left_state = None
right_state = get_thermal_state(beta, mpa_type, ops, to_cform=None)
tf_proc = time.perf_counter() - t0_proc
tf_wall = time.clock() - t0_wall
info = dict()
info['walltime'] = tf_wall
info['cpu_time'] = tf_proc
info['bath_dims'] = dims
if residual:
info['res'] = _compute_finite_T_star_residual(beta, xi, dims)
info['max_res'] = np.max(info['res'])
else:
info['res'] = None
info['max_res'] = None
sys_psi_0 = get_spin_initial_state(theta, mpa_type=mpa_type)
return mp.chain([left_state, sys_psi_0, right_state]) if left_state is not None else \
mp.chain([sys_psi_0, right_state]), info
def get_boson_boson_0T_chain_initial_state(alpha, nof_coefficients, cutoff_dim):
"""
Initial state for the Boson-Boson model in chain geometry (see Sec. 4.4.3 of the thesis)
:param alpha: accuracy alpha for the impurity coherent state
:param nof_coefficients: Number of bath sites
:param cutoff_dim: Local dimension of the system and impurity
:return: Initial state in MPS form
"""
pop = lambda x: np.exp(-np.abs(alpha) ** 2 / 2) * alpha ** x / np.sqrt(factorial(x))
sys_psi_0 = convert.to_mparray(pop(np.arange(cutoff_dim)), 'mps')
bath_psi_0 = broadcast_number_ground_state(cutoff_dim, nof_coefficients)
return mp.chain([sys_psi_0, bath_psi_0])
def get_boson_boson_0T_star_initial_state(alpha, system_index, nof_coefficients, cutoff_dim):
"""
Initial state for the Boson-Boson model in star geometry (see Sec. 4.4.3 of the thesis)
:param alpha: accuracy alpha for the impurity coherent state
:param system_index: Index of the impurity in the auxiliary chain
:param nof_coefficients: Number of bath sites
:param cutoff_dim: Local dimension of the system and impurity
:return: Initial state in MPS form
"""
pop = lambda x: np.exp(-np.abs(alpha) ** 2 / 2) * alpha ** x / np.sqrt(factorial(x, exact=True))
sys_psi_0 = convert.to_mparray(pop(np.arange(cutoff_dim)), 'mps')
# Initial states of the bath sites left and right of the system:
left_bath_psi_0, right_bath_psi_0 = tmps.utils.broadcast_number_ground_state(cutoff_dim, system_index), \
tmps.utils.broadcast_number_ground_state(cutoff_dim,
nof_coefficients - system_index)
return mp.chain([left_bath_psi_0, sys_psi_0, right_bath_psi_0]
if left_bath_psi_0 is not None else [sys_psi_0, right_bath_psi_0])
|
[
"scipy.special.factorial",
"numpy.abs",
"numpy.log",
"tmps.utils.state_reduction_as_ndarray",
"mpnum.chain",
"tmps.utils.convert.to_mparray",
"time.perf_counter",
"time.clock",
"numpy.max",
"tmps.chain.thermal.from_hamiltonian",
"numpy.sin",
"numpy.arange",
"numpy.cos",
"numpy.exp",
"tmps.utils.get_thermal_state",
"tmps.utils.broadcast_number_ground_state"
] |
[((551, 597), 'tmps.utils.convert.to_mparray', 'convert.to_mparray', (['(ground + excited)', 'mpa_type'], {}), '(ground + excited, mpa_type)\n', (569, 597), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((905, 968), 'tmps.utils.broadcast_number_ground_state', 'broadcast_number_ground_state', (['bath_local_dim', 'nof_coefficients'], {}), '(bath_local_dim, nof_coefficients)\n', (934, 968), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((980, 1013), 'mpnum.chain', 'mp.chain', (['[sys_psi_0, bath_psi_0]'], {}), '([sys_psi_0, bath_psi_0])\n', (988, 1013), True, 'import mpnum as mp\n'), ((1762, 1889), 'mpnum.chain', 'mp.chain', (['([left_bath_psi_0, sys_psi_0, right_bath_psi_0] if left_bath_psi_0 is not\n None else [sys_psi_0, right_bath_psi_0])'], {}), '([left_bath_psi_0, sys_psi_0, right_bath_psi_0] if left_bath_psi_0\n is not None else [sys_psi_0, right_bath_psi_0])\n', (1770, 1889), True, 'import mpnum as mp\n'), ((4398, 4410), 'time.clock', 'time.clock', ([], {}), '()\n', (4408, 4410), False, 'import time\n'), ((4425, 4444), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (4442, 4444), False, 'import time\n'), ((4632, 4987), 'tmps.chain.thermal.from_hamiltonian', 'tmps.chain.thermal.from_hamiltonian', (['beta', 'mpa_type', 'h_site', 'h_bond'], {'nof_steps': 'nof_steps', 'state_compression_kwargs': 'state_compression_kwargs', 'op_compression_kwargs': 'op_compression_kwargs', 'second_order_trotter': 'second_order_trotter', 'psi_0_compression_kwargs': 'psi_0_compression_kwargs', 'force_pmps_evolution': 'force_pmps_evolution', 'verbose': 'verbose'}), '(beta, mpa_type, h_site, h_bond,\n nof_steps=nof_steps, state_compression_kwargs=state_compression_kwargs,\n op_compression_kwargs=op_compression_kwargs, second_order_trotter=\n second_order_trotter, psi_0_compression_kwargs=psi_0_compression_kwargs,\n force_pmps_evolution=force_pmps_evolution, verbose=verbose)\n', (4667, 4987), False, 'import tmps\n'), ((9080, 9092), 'time.clock', 'time.clock', ([], {}), '()\n', (9090, 9092), False, 'import time\n'), ((9107, 9126), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9124, 9126), False, 'import time\n'), ((10812, 10871), 'tmps.utils.broadcast_number_ground_state', 'broadcast_number_ground_state', (['cutoff_dim', 'nof_coefficients'], {}), '(cutoff_dim, nof_coefficients)\n', (10841, 10871), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((10883, 10916), 'mpnum.chain', 'mp.chain', (['[sys_psi_0, bath_psi_0]'], {}), '([sys_psi_0, bath_psi_0])\n', (10891, 10916), True, 'import mpnum as mp\n'), ((11983, 12110), 'mpnum.chain', 'mp.chain', (['([left_bath_psi_0, sys_psi_0, right_bath_psi_0] if left_bath_psi_0 is not\n None else [sys_psi_0, right_bath_psi_0])'], {}), '([left_bath_psi_0, sys_psi_0, right_bath_psi_0] if left_bath_psi_0\n is not None else [sys_psi_0, right_bath_psi_0])\n', (11991, 12110), True, 'import mpnum as mp\n'), ((1466, 1536), 'tmps.utils.broadcast_number_ground_state', 'tmps.utils.broadcast_number_ground_state', (['bath_local_dim', 'system_index'], {}), '(bath_local_dim, system_index)\n', (1506, 1536), False, 'import tmps\n'), ((1580, 1673), 'tmps.utils.broadcast_number_ground_state', 'tmps.utils.broadcast_number_ground_state', (['bath_local_dim', '(nof_coefficients - system_index)'], {}), '(bath_local_dim, nof_coefficients -\n system_index)\n', (1620, 1673), False, 'import tmps\n'), ((5363, 5382), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (5380, 5382), False, 'import time\n'), ((5407, 5419), 'time.clock', 'time.clock', ([], {}), '()\n', (5417, 5419), False, 'import time\n'), ((5625, 5636), 'numpy.max', 'np.max', (['res'], {}), '(res)\n', (5631, 5636), True, 'import numpy as np\n'), ((5967, 5995), 'mpnum.chain', 'mp.chain', (['[sys_psi_0, psi_0]'], {}), '([sys_psi_0, psi_0])\n', (5975, 5995), True, 'import mpnum as mp\n'), ((9352, 9420), 'tmps.utils.get_thermal_state', 'get_thermal_state', (['beta', 'mpa_type', 'ops[:system_index]'], {'to_cform': 'None'}), '(beta, mpa_type, ops[:system_index], to_cform=None)\n', (9369, 9420), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((9443, 9511), 'tmps.utils.get_thermal_state', 'get_thermal_state', (['beta', 'mpa_type', 'ops[system_index:]'], {'to_cform': 'None'}), '(beta, mpa_type, ops[system_index:], to_cform=None)\n', (9460, 9511), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((9570, 9623), 'tmps.utils.get_thermal_state', 'get_thermal_state', (['beta', 'mpa_type', 'ops'], {'to_cform': 'None'}), '(beta, mpa_type, ops, to_cform=None)\n', (9587, 9623), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((9638, 9657), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9655, 9657), False, 'import time\n'), ((9682, 9694), 'time.clock', 'time.clock', ([], {}), '()\n', (9692, 9694), False, 'import time\n'), ((9927, 9946), 'numpy.max', 'np.max', (["info['res']"], {}), "(info['res'])\n", (9933, 9946), True, 'import numpy as np\n'), ((11695, 11761), 'tmps.utils.broadcast_number_ground_state', 'tmps.utils.broadcast_number_ground_state', (['cutoff_dim', 'system_index'], {}), '(cutoff_dim, system_index)\n', (11735, 11761), False, 'import tmps\n'), ((11805, 11894), 'tmps.utils.broadcast_number_ground_state', 'tmps.utils.broadcast_number_ground_state', (['cutoff_dim', '(nof_coefficients - system_index)'], {}), '(cutoff_dim, nof_coefficients -\n system_index)\n', (11845, 11894), False, 'import tmps\n'), ((479, 492), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (485, 492), True, 'import numpy as np\n'), ((519, 532), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (525, 532), True, 'import numpy as np\n'), ((9260, 9274), 'numpy.arange', 'np.arange', (['dim'], {}), '(dim)\n', (9269, 9274), True, 'import numpy as np\n'), ((10091, 10137), 'mpnum.chain', 'mp.chain', (['[left_state, sys_psi_0, right_state]'], {}), '([left_state, sys_psi_0, right_state])\n', (10099, 10137), True, 'import mpnum as mp\n'), ((10179, 10213), 'mpnum.chain', 'mp.chain', (['[sys_psi_0, right_state]'], {}), '([sys_psi_0, right_state])\n', (10187, 10213), True, 'import mpnum as mp\n'), ((10764, 10785), 'numpy.arange', 'np.arange', (['cutoff_dim'], {}), '(cutoff_dim)\n', (10773, 10785), True, 'import numpy as np\n'), ((11555, 11576), 'numpy.arange', 'np.arange', (['cutoff_dim'], {}), '(cutoff_dim)\n', (11564, 11576), True, 'import numpy as np\n'), ((7684, 7709), 'numpy.exp', 'np.exp', (['(beta * xi_i * dim)'], {}), '(beta * xi_i * dim)\n', (7690, 7709), True, 'import numpy as np\n'), ((10711, 10723), 'scipy.special.factorial', 'factorial', (['x'], {}), '(x)\n', (10720, 10723), False, 'from scipy.special import factorial\n'), ((11490, 11514), 'scipy.special.factorial', 'factorial', (['x'], {'exact': '(True)'}), '(x, exact=True)\n', (11499, 11514), False, 'from scipy.special import factorial\n'), ((2209, 2269), 'tmps.utils.state_reduction_as_ndarray', 'state_reduction_as_ndarray', (['psi_0', 'mpa_type'], {'startsite': 'index'}), '(psi_0, mpa_type, startsite=index)\n', (2235, 2269), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((6854, 6873), 'numpy.exp', 'np.exp', (['(beta * xi_i)'], {}), '(beta * xi_i)\n', (6860, 6873), True, 'import numpy as np\n'), ((7052, 7064), 'numpy.max', 'np.max', (['dims'], {}), '(dims)\n', (7058, 7064), True, 'import numpy as np\n'), ((7660, 7679), 'numpy.exp', 'np.exp', (['(beta * xi_i)'], {}), '(beta * xi_i)\n', (7666, 7679), True, 'import numpy as np\n'), ((6933, 6970), 'numpy.log', 'np.log', (['(1 + 1 / (high_energy_pop * a))'], {}), '(1 + 1 / (high_energy_pop * a))\n', (6939, 6970), True, 'import numpy as np\n'), ((10664, 10677), 'numpy.abs', 'np.abs', (['alpha'], {}), '(alpha)\n', (10670, 10677), True, 'import numpy as np\n'), ((11443, 11456), 'numpy.abs', 'np.abs', (['alpha'], {}), '(alpha)\n', (11449, 11456), True, 'import numpy as np\n')]
|
# Generated by Django 3.0.4 on 2020-04-13 21:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profiles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='profile',
name='active',
field=models.BooleanField(default=True, verbose_name='Activo/Inactivo'),
),
]
|
[
"django.db.models.BooleanField"
] |
[((325, 390), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""Activo/Inactivo"""'}), "(default=True, verbose_name='Activo/Inactivo')\n", (344, 390), False, 'from django.db import migrations, models\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import os
from dataclasses import dataclass, field
from typing import List, Tuple
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import utils
from fairseq.data.data_utils import compute_mask_indices
from fairseq.dataclass import ChoiceEnum, FairseqDataclass
from fairseq.models import BaseFairseqModel, register_model
from fairseq.modules import (
Fp32GroupNorm,
Fp32LayerNorm,
GradMultiply,
GumbelVectorQuantizer,
LayerNorm,
MultiheadAttention,
SamePad,
TransposeLast,
)
from fairseq.modules.transformer_sentence_encoder import init_bert_params
from fairseq.utils import buffered_arange, index_put, is_xla_tensor
from torchvision.models.resnet import resnet50
EXTRACTOR_MODE_CHOICES = ChoiceEnum(["default", "layer_norm"])
MASKING_DISTRIBUTION_CHOICES = ChoiceEnum(["static", "uniform", "normal", "poisson"])
@dataclass
class MM2VecConfig(FairseqDataclass):
model_stage: int = field(
default=1,
metadata={"help": "model_stage=1 for training visual feature extractor only,"
"model_stage=2 for pretrain on all subnet"
"model_stage=? for fine-tune"},
)
extractor_mode: EXTRACTOR_MODE_CHOICES = field(
default="default",
metadata={
"help": "mode for feature extractor. default has a single group norm with d "
"groups in the first conv block, whereas layer_norm has layer norms in "
"every block (meant to use with normalize=True)"
},
)
encoder_layers: int = field(
default=12, metadata={"help": "num encoder layers in the transformer"}
)
encoder_embed_dim: int = field(
default=768, metadata={"help": "encoder embedding dimension"}
)
encoder_ffn_embed_dim: int = field(
default=3072, metadata={"help": "encoder embedding dimension for FFN"}
)
encoder_attention_heads: int = field(
default=12, metadata={"help": "num encoder attention heads"}
)
activation_fn: ChoiceEnum(utils.get_available_activation_fns()) = field(
default="gelu", metadata={"help": "activation function to use"}
)
# dropouts
dropout: float = field(
default=0.1, metadata={"help": "dropout probability for the transformer"}
)
attention_dropout: float = field(
default=0.1, metadata={"help": "dropout probability for attention weights"}
)
activation_dropout: float = field(
default=0.0, metadata={"help": "dropout probability after activation in FFN"}
)
encoder_layerdrop: float = field(
default=0.0, metadata={"help": "probability of dropping a tarnsformer layer"}
)
dropout_input: float = field(
default=0.0,
metadata={"help": "dropout to apply to the input (after feat extr)"},
)
dropout_features: float = field(
default=0.0,
metadata={"help": "dropout to apply to the features (after feat extr)"},
)
final_dim: int = field(
default=0,
metadata={
"help": "project final representations and targets to this many dimensions."
"set to encoder_embed_dim is <= 0"
},
)
layer_norm_first: bool = field(
default=False, metadata={"help": "apply layernorm first in the transformer"}
)
audio_conv_feature_layers: str = field(
default="[(512, 10, 5, 0)] + [(512, 3, 2, 0)] * 4 + [(512, 2, 2, 0)] + [(512, 2, 2, 0)]",
metadata={
"help": "string describing convolutional feature extraction layers in form of a python list that contains "
"[(dim, kernel_size, stride), ...]"
},
)
conv_bias: bool = field(
default=False, metadata={"help": "include bias in conv encoder"}
)
logit_temp: float = field(
default=0.1, metadata={"help": "temperature to divide logits by"}
)
quantize_targets: bool = field(
default=False, metadata={"help": "use quantized targets"}
)
quantize_input: bool = field(
default=False, metadata={"help": "use quantized inputs"}
)
same_quantizer: bool = field(
default=False, metadata={"help": "use same quantizer for inputs and targets"}
)
target_glu: bool = field(
default=False, metadata={"help": "adds projection + glu to targets"}
)
feature_grad_mult: float = field(
default=1.0, metadata={"help": "multiply feature extractor var grads by this"}
)
quantizer_depth: int = field(
default=1,
metadata={"help": "number of quantizer layers"},
)
quantizer_factor: int = field(
default=3,
metadata={
"help": "dimensionality increase for inner quantizer layers (if depth > 1)"
},
)
latent_vars: int = field(
default=320,
metadata={"help": "number of latent variables V in each group of the codebook"},
)
latent_groups: int = field(
default=2,
metadata={"help": "number of groups G of latent variables in the codebook"},
)
latent_dim: int = field(
default=0,
metadata={
"help": "if > 0, uses this dimensionality for latent variables. "
"otherwise uses final_dim / latent_groups"
},
)
# masking
mask_length: int = field(default=10, metadata={"help": "mask length"})
mask_prob: float = field(
default=0.65, metadata={"help": "probability of replacing a token with mask"}
)
mask_selection: MASKING_DISTRIBUTION_CHOICES = field(
default="static", metadata={"help": "how to choose mask length"}
)
mask_other: float = field(
default=0,
metadata={
"help": "secondary mask argument (used for more complex distributions), "
"see help in compute_mask_indices"
},
)
no_mask_overlap: bool = field(
default=False, metadata={"help": "whether to allow masks to overlap"}
)
mask_min_space: int = field(
default=1,
metadata={"help": "min space between spans (if no overlap is enabled)"},
)
# channel masking
mask_channel_length: int = field(
default=10, metadata={"help": "length of the mask for features (channels)"}
)
mask_channel_prob: float = field(
default=0.0, metadata={"help": "probability of replacing a feature with 0"}
)
mask_channel_before: bool = False
mask_channel_selection: MASKING_DISTRIBUTION_CHOICES = field(
default="static",
metadata={"help": "how to choose mask length for channel masking"},
)
mask_channel_other: float = field(
default=0,
metadata={
"help": "secondary mask argument (used for more complex distributions), "
"see help in compute_mask_indicesh"
},
)
no_mask_channel_overlap: bool = field(
default=False, metadata={"help": "whether to allow channel masks to overlap"}
)
mask_channel_min_space: int = field(
default=1,
metadata={"help": "min space between spans (if no overlap is enabled)"},
)
# negative selection
num_negatives: int = field(
default=100,
metadata={"help": "number of negative examples from the same sample"},
)
negatives_from_everywhere: bool = field(
default=False,
metadata={"help": "sample negatives from everywhere, not just masked states"},
)
cross_sample_negatives: int = field(
default=0, metadata={"help": "number of negative examples from the any sample"}
)
codebook_negatives: int = field(
default=0, metadata={"help": "number of negative examples codebook"}
)
# positional embeddings
conv_pos: int = field(
default=128,
metadata={"help": "number of filters for convolutional positional embeddings"},
)
conv_pos_groups: int = field(
default=16,
metadata={"help": "number of groups for convolutional positional embedding"},
)
latent_temp: Tuple[float, float, float] = field(
default=(2, 0.5, 0.999995),
metadata={
"help": "temperature for latent variable sampling. "
"can be tuple of 3 values (start, end, decay)"
},
)
# Visual Part
visual_conv_feature_layers: str = field(
default="[(512, 11, 1, 5)] * 3 + [(1024, 11, 1, 5)]",
metadata={
"help": "string describing visual-subnet convolutional feature extraction layers in form of a python list that contains "
"[(dim, kernel_size, stride, padding), ...]"
},
)
visual_input_dim: int = field(
default=112,
metadata={"help": "number of dims of visual pictures"},
)
visual_encoder_dim: int = field(
default=2048,
metadata={"help": "number of dims after MoCo"},
)
projection_dim: int = field(
default=512,
metadata={"help": "output dimension of projection head"},
)
# checkpoint part
m2v_path : str = field(
default="./checkpoints-mm-2/",
metadata={
"help": "path to mm2vec stage 1 last model or stage 2 process model"
},
)
# aggregation part
audio_weight: float = field(
default=0.5,
metadata={
"help":"weight for audio_features"
}
)
visual_weight: float = field(
default=0.5,
metadata={
"help":"weight for audio_features"
}
)
remove_quantizer_weight: bool = field(
default=False,
metadata={
"help": "remove quantizer pretrain params"
}
)
unfreeze_quantizer_weight:bool = field(
default=False,
metadata={
"help": "freeze quantizer pretrain params"
}
)
# MoCo
MoCo_replace:bool = field(
default=False,
metadata={"help":"replace first conv2d in MoCo with conv3d"}
)
@register_model("mm2vec", dataclass=MM2VecConfig)
class MM2VecModel(BaseFairseqModel):
def __init__(self, cfg: MM2VecConfig):
super().__init__()
self.cfg = cfg
audio_feature_enc_layers = eval(cfg.audio_conv_feature_layers)
visual_feature_enc_layers = eval(cfg.visual_conv_feature_layers)
self.audio_embed_dim = audio_feature_enc_layers[-1][0] # 512
self.visual_embed_dim = visual_feature_enc_layers[-1][0] # 1024
self.projection_dim = cfg.projection_dim # 512
self.audio_feature_extractor = ConvFeatureExtractionModel(
conv_layers=audio_feature_enc_layers,
dropout=0.0,
mode=cfg.extractor_mode,
conv_bias=cfg.conv_bias,
input_dim=1,
)
self.visual_input_dim = cfg.visual_input_dim # 112
self.MoCo_replace = cfg.MoCo_replace
self.MoCo_extractor = MoCo(replace=self.MoCo_replace)
self.visual_encoder_dim = cfg.visual_encoder_dim # 2048
self.visual_feature_extractor = ConvFeatureExtractionModel(
conv_layers=visual_feature_enc_layers,
dropout=0.0,
mode=cfg.extractor_mode,
conv_bias=cfg.conv_bias,
input_dim=2048,
)
self.post_extract_proj = (
# 512 -> 768
nn.Linear(self.audio_embed_dim, cfg.encoder_embed_dim)
if self.audio_embed_dim != cfg.encoder_embed_dim and not cfg.quantize_input
else None
)
self.projection_head = nn.Sequential(
# 512 -> 512
nn.Linear(int(self.visual_embed_dim / 2), int(self.visual_embed_dim / 2), bias=False),
nn.ReLU(),
# 512 -> 768
nn.Linear(int(self.visual_embed_dim / 2), cfg.encoder_embed_dim, bias=False),
)
""" mask part """
self.mask_prob = cfg.mask_prob
self.mask_selection = cfg.mask_selection
self.mask_other = cfg.mask_other
self.mask_length = cfg.mask_length
self.no_mask_overlap = cfg.no_mask_overlap
self.mask_min_space = cfg.mask_min_space
self.mask_channel_prob = cfg.mask_channel_prob
self.mask_channel_before = cfg.mask_channel_before
self.mask_channel_selection = cfg.mask_channel_selection
self.mask_channel_other = cfg.mask_channel_other
self.mask_channel_length = cfg.mask_channel_length
self.no_mask_channel_overlap = cfg.no_mask_channel_overlap
self.mask_channel_min_space = cfg.mask_channel_min_space
""" mask part """
self.dropout_input = nn.Dropout(cfg.dropout_input)
self.dropout_features = nn.Dropout(cfg.dropout_features)
self.feature_grad_mult = cfg.feature_grad_mult
self.quantizer = None
self.input_quantizer = None
self.n_negatives = cfg.num_negatives
self.cross_sample_negatives = cfg.cross_sample_negatives
self.codebook_negatives = cfg.codebook_negatives
self.negatives_from_everywhere = cfg.negatives_from_everywhere
self.logit_temp = cfg.logit_temp
final_dim = cfg.final_dim if cfg.final_dim > 0 else cfg.encoder_embed_dim
if cfg.quantize_targets:
vq_dim = cfg.latent_dim if cfg.latent_dim > 0 else final_dim
self.quantizer = GumbelVectorQuantizer(
dim=self.audio_embed_dim, # 512
num_vars=cfg.latent_vars, # 320
temp=cfg.latent_temp,
groups=cfg.latent_groups, # 2
combine_groups=False,
vq_dim=vq_dim,
time_first=True,
weight_proj_depth=cfg.quantizer_depth,
weight_proj_factor=cfg.quantizer_factor,
)
self.project_q = nn.Linear(vq_dim, final_dim)
else:
self.project_q = nn.Linear(self.embed, final_dim)
# if cfg.quantize_input:
# if cfg.same_quantizer and self.quantizer is not None:
# vq_dim = final_dim
# self.input_quantizer = self.quantizer
# else:
# vq_dim = cfg.latent_dim if cfg.latent_dim > 0 else cfg.encoder_embed_dim
# self.input_quantizer = GumbelVectorQuantizer(
# dim=self.embed,
# num_vars=cfg.latent_vars,
# temp=cfg.latent_temp,
# groups=cfg.latent_groups,
# combine_groups=False,
# vq_dim=vq_dim,
# time_first=True,
# weight_proj_depth=cfg.quantizer_depth,
# weight_proj_factor=cfg.quantizer_factor,
# )
# self.project_inp = nn.Linear(vq_dim, cfg.encoder_embed_dim)
self.mask_emb = nn.Parameter(
torch.FloatTensor(cfg.encoder_embed_dim).uniform_()
)
self.encoder = TransformerEncoder(cfg)
self.layer_norm = LayerNorm(self.audio_embed_dim)
self.visual_layer_norm = LayerNorm(int(self.visual_embed_dim / 2))
self.target_glu = None
if cfg.target_glu:
self.target_glu = nn.Sequential(
nn.Linear(final_dim, final_dim * 2), nn.GLU()
)
self.final_proj = nn.Linear(cfg.encoder_embed_dim, final_dim)
self.model_stage = cfg.model_stage
self.audio_weight = cfg.audio_weight
self.visual_weight = cfg.visual_weight
def upgrade_state_dict_named(self, state_dict, name):
super().upgrade_state_dict_named(state_dict, name)
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
return state_dict
@classmethod
def build_model(cls, cfg: MM2VecConfig, task=None):
"""Build a new model instance."""
model = cls(cfg)
if cfg.model_stage == 1:
model_dict = model.state_dict()
wav2vec_dict = {k.replace('feature', 'audio_feature'): v for k, v in
torch.load('../pretrain/wav2vec_small.pt')["model"].items()}
moco_dict = {k.replace('module.encoder_q', 'MoCo_extractor.encoder'): v for k, v in
torch.load('../pretrain/moco_v2_800ep_pretrain.pth.tar')["state_dict"].items()}
if cfg.remove_quantizer_weight:
popKeys = ['quantizer.vars', 'quantizer.weight_proj.weight', 'quantizer.weight_proj.bias']
for k in popKeys:
wav2vec_dict.pop(k)
popKeys = ['MoCo_extractor.encoder.fc.0.bias', 'MoCo_extractor.encoder.fc.2.bias',
'MoCo_extractor.encoder.fc.0.weight', 'MoCo_extractor.encoder.fc.2.weight']
if cfg.MoCo_replace:
popKeys.append('MoCo_extractor.encoder.conv1.weight')
for k in popKeys:
moco_dict.pop(k)
model_dict.update(wav2vec_dict)
model_dict.update(moco_dict)
model.load_state_dict(model_dict)
popKeys = ['quantizer.vars', 'quantizer.weight_proj.weight', 'quantizer.weight_proj.bias']
for name, param in model.named_parameters():
# print(name)
if name in wav2vec_dict.keys() or name in moco_dict.keys():
param.requires_grad = False
if name in popKeys and cfg.unfreeze_quantizer_weight:
param.requires_grad = True
elif cfg.model_stage == 2:
model_dict = model.state_dict()
checkpoint_path = os.path.join(cfg.m2v_path, 'checkpoint_last.pt')
checkpoints_dict = torch.load(checkpoint_path)['model']
model_dict.update(checkpoints_dict)
model.load_state_dict(model_dict)
else:
return model
print('num_total_param: {},num_trainable_param: {},num_freezed_param: {}'.format(
sum([params.numel() for params in model.parameters()]),
sum([params.numel() for params in model.parameters() if params.requires_grad]),
sum([params.numel() for params in model.parameters() if not params.requires_grad])))
return model
def apply_mask(
self,
x_audio,
x_visual,
padding_mask,
mask_indices=None,
mask_channel_indices=None,
):
B, T, C = x_audio.shape
# FIXME INFERENCE
if self.mask_channel_prob > 0 and self.mask_channel_before:
mask_channel_indices = compute_mask_indices(
(B, C),
None,
self.mask_channel_prob,
self.mask_channel_length,
self.mask_channel_selection,
self.mask_channel_other,
no_overlap=self.no_mask_channel_overlap,
min_space=self.mask_channel_min_space,
)
mask_channel_indices = (
torch.from_numpy(mask_channel_indices)
.to(x.device)
.unsqueeze(1)
.expand(-1, T, -1)
)
x[mask_channel_indices] = 0
if self.mask_prob > 0:
if mask_indices is None:
mask_indices = compute_mask_indices(
(B, T),
padding_mask,
self.mask_prob,
self.mask_length,
self.mask_selection,
self.mask_other,
min_masks=2,
no_overlap=self.no_mask_overlap,
min_space=self.mask_min_space,
)
mask_indices = torch.from_numpy(mask_indices).to(x_audio.device)
x_audio = index_put(x_audio, mask_indices, self.mask_emb)
x_visual = index_put(x_visual, mask_indices, self.mask_emb)
else:
mask_indices = None
# FIXME INFERENCE
if self.mask_channel_prob > 0 and not self.mask_channel_before:
if mask_channel_indices is None:
mask_channel_indices = compute_mask_indices(
(B, C),
None,
self.mask_channel_prob,
self.mask_channel_length,
self.mask_channel_selection,
self.mask_channel_other,
no_overlap=self.no_mask_channel_overlap,
min_space=self.mask_channel_min_space,
)
mask_channel_indices = (
torch.from_numpy(mask_channel_indices)
.to(x_audio.device)
.unsqueeze(1)
.expand(-1, T, -1)
)
x_audio = index_put(x_audio, mask_channel_indices, 0)
x_visual = index_put(x_visual, mask_channel_indices, 0)
return x_audio, x_visual, mask_indices
def sample_negatives(self, y_audio, y_visual, num, padding_count=None):
#ignore
if self.n_negatives == 0 and self.cross_sample_negatives == 0:
return y.new(0)
bsz, tsz, fsz = y_audio.shape
y_audio = y_audio.view(-1, fsz) # BTC => (BxT)C
y_visual = y_visual.view(-1, fsz)
# FIXME: what happens if padding_count is specified?
cross_high = tsz * bsz
high = tsz - (padding_count or 0)
with torch.no_grad():
assert high > 1, f"{bsz,tsz,fsz}"
if self.n_negatives > 0:
tszs = (
buffered_arange(num)
.unsqueeze(-1)
.expand(-1, self.n_negatives)
.flatten()
)
neg_idxs = torch.randint(
low=0, high=high - 1, size=(bsz, self.n_negatives * num)
)
neg_idxs[neg_idxs >= tszs] += 1
if self.cross_sample_negatives > 0:
tszs = (
buffered_arange(num)
.unsqueeze(-1)
.expand(-1, self.cross_sample_negatives)
.flatten()
)
cross_neg_idxs = torch.randint(
low=0,
high=cross_high - 1,
size=(bsz, self.cross_sample_negatives * num),
)
cross_neg_idxs[cross_neg_idxs >= tszs] += 1
if self.n_negatives > 0:
for i in range(1, bsz):
neg_idxs[i] += i * high
else:
neg_idxs = cross_neg_idxs
if self.cross_sample_negatives > 0 and self.n_negatives > 0:
neg_idxs = torch.cat([neg_idxs, cross_neg_idxs], dim=1)
negs_audio = y_audio[neg_idxs.view(-1)]
negs_audio = negs_audio.view(
bsz, num, self.n_negatives + self.cross_sample_negatives, fsz
).permute(
2, 0, 1, 3
) # to NxBxTxC
negs_visual = y_visual[neg_idxs.view(-1)]
negs_visual = negs_visual.view(
bsz, num, self.n_negatives + self.cross_sample_negatives, fsz
).permute(
2, 0, 1, 3
) # to NxBxTxC
return negs_audio, negs_visual, neg_idxs
def compute_preds(self, x, y, negatives):
neg_is_pos = (y == negatives).all(-1)
y = y.unsqueeze(0)
targets = torch.cat([y, negatives], dim=0)
logits = torch.cosine_similarity(x.float(), targets.float(), dim=-1).type_as(x)
logits = logits / self.logit_temp
if is_xla_tensor(logits) or neg_is_pos.any():
fillval = -float(2 ** 30)
if not hasattr(self, "_inftensor"):
self._inftensor = (
torch.tensor(fillval).to(x.device)
if is_xla_tensor(logits)
else float("-inf")
)
logits[1:] = index_put(logits[1:], neg_is_pos, self._inftensor)
return logits
def _get_feat_extract_output_lengths(self, input_lengths: torch.LongTensor):
"""
Computes the output length of the convolutional layers
"""
def _conv_out_length(input_length, kernel_size, stride):
return torch.floor((input_length - kernel_size) / stride + 1)
conv_cfg_list = eval(self.cfg.audio_conv_feature_layers)
for i in range(len(conv_cfg_list)):
input_lengths = _conv_out_length(
input_lengths, conv_cfg_list[i][1], conv_cfg_list[i][2]
)
return input_lengths.to(torch.long)
def compute_visual_length(self,visual_source):
visual_length = list()
max_visual_length = -1
for i in range(len(visual_source)):
length = int(visual_source[i].size(1) / self.visual_input_dim)
if length > max_visual_length:
max_visual_length = length
visual_length.append(length)
return max_visual_length,visual_length
def visual_padding(self,visual_features,visual_length,max_visual_length):
visual_source_new = torch.tensor([], dtype=visual_features.dtype, device=visual_features.device)
start = 0
# 根据visual length数组切分MoCo的输出结果,并padding到最长
visual_source_len = max_visual_length
for l in visual_length:
visual_source_new = torch.cat((visual_source_new, torch.cat(
(visual_features[start:start + l],
torch.zeros((visual_source_len - l, 3,112,112), dtype=visual_features.dtype,
device=visual_features.device)))))
return visual_source_new
def forward(
self,
audio_source,
visual_source,
padding_mask=None,
mask=True,
features_only=False,
layer=None,
mask_indices=None,
mask_channel_indices=None,
padding_count=None,
):
"""
先只管cropping的训练模式,stage1 stage2都是对应好了的 visual 和 audio 长度
batch内不同sample的visual length或者 audio length都是一样的
不需要算长度序列
inference:dataset的pad参数被设置 需要传入padding mask的时候,audio的source才是padding了的
这个时候才需要记录visual length,并在过完moco之后padding
"""
result = {}
# FIXME INFERENCE
if padding_mask is not None:
# compute visual length
max_visual_length, visual_length = self.compute_visual_length(visual_source)
visual_source = torch.cat(visual_source,1)
visual_source = torch.split(visual_source, self.visual_input_dim, 1)
visual_source = torch.cat(visual_source)
visual_source = visual_source.view(-1, self.visual_input_dim, self.visual_input_dim)
visual_source = visual_source.unsqueeze(1).repeat(1, 3, 1, 1)
if self.MoCo_replace:
visual_source = self.visual_padding(visual_source,visual_length,max_visual_length)
visual_source = visual_source.view(len(visual_length),max_visual_length,3,112,112)
visual_source = visual_source.transpose(1,2)
else:
"""
cropping训练,batch内的visual input长度一样
"""
visual_batch_size = len(visual_source)
max_visual_length = int(visual_source[0].size(1)/112)
visual_source = torch.stack(visual_source)
visual_source = torch.split(visual_source, self.visual_input_dim, 1)
visual_source = torch.cat(visual_source)
visual_source = visual_source.view(-1, self.visual_input_dim, self.visual_input_dim)
visual_source = visual_source.unsqueeze(1).repeat(1, 3, 1, 1)
if self.MoCo_replace:
visual_source = visual_source.view(visual_batch_size, max_visual_length, 3, self.visual_input_dim, self.visual_input_dim)
visual_source = visual_source.transpose(1, 2)
"""MoCo input dim:[n_frames,3,112,112]"""
visual_features = self.MoCo_extractor(visual_source)
visual_features = visual_features.view(-1,max_visual_length,self.visual_encoder_dim)
visual_features = visual_features.transpose(1,2)
"""
长度问题到这里应该就结束了,后面不管是padding还是cropping都是align好了的
"""
if self.feature_grad_mult > 0:
# audio: (bsz*sample_length) --> (bsz * feature_dim * frames)
# visual: (bsz*feature_dim * frames) --> (bsz * feature_dim_new * frames)
af_beforeGELU, audio_features = self.audio_feature_extractor(audio_source)
vf_beforeGELU, visual_features = self.visual_feature_extractor(visual_features)
if self.feature_grad_mult != 1.0:
audio_features = GradMultiply.apply(audio_features, self.feature_grad_mult)
visual_features = GradMultiply.apply(visual_features, self.feature_grad_mult)
else:
with torch.no_grad():
af_beforeGELU, audio_features = self.audio_feature_extractor(audio_source)
vf_beforeGELU, visual_features = self.visual_feature_extractor(visual_features)
features_pen = 0 # penalty loss
af_beforeGELU = af_beforeGELU.transpose(1,2)
vf_beforeGELU = vf_beforeGELU.transpose(1,2)
vf_beforeGELU = vf_beforeGELU.reshape(vf_beforeGELU.size(0), -1,int(vf_beforeGELU.size(2) / 2))
vf_beforeGELU = vf_beforeGELU[:, :af_beforeGELU.size(1), :]
af_beforeGELU = self.layer_norm(af_beforeGELU)
vf_beforeGELU = self.visual_layer_norm(vf_beforeGELU)
result["pre_gelu_audio"] = af_beforeGELU
result["pre_gelu_visual"] = vf_beforeGELU
# FIXME:做不做transpose和layer_norm对MSE的影响是啥?过不过GELU的MSE区别是啥?
audio_features = audio_features.transpose(1, 2)
visual_features = visual_features.transpose(1, 2)
visual_features = visual_features.reshape(visual_features.size(0), -1, int(visual_features.size(2) / 2))
visual_features = visual_features[:, :audio_features.size(1), :]
audio_features = self.layer_norm(audio_features) # 512维度上做的layernorm
visual_features = self.visual_layer_norm(visual_features)
result["post_gelu_audio"] = audio_features
result["post_gelu_visual"] = visual_features
unmasked_audio_features = audio_features.clone()
unmasked_visual_features = visual_features.clone()
# FIXME INFERENCE
"""sample维度的padding mask到frame维度的padding mask"""
if padding_mask is not None and padding_mask.any():
input_lengths = (1 - padding_mask.long()).sum(-1)
# apply conv formula to get real output_lengths
output_lengths = self._get_feat_extract_output_lengths(input_lengths)
padding_mask = torch.zeros(
audio_features.shape[:2], dtype=audio_features.dtype, device=audio_features.device
)
# these two operations makes sure that all values
# before the output lengths indices are attended to
padding_mask[
(
torch.arange(padding_mask.shape[0], device=padding_mask.device),
output_lengths - 1,
)
] = 1
padding_mask = (1 - padding_mask.flip([-1]).cumsum(-1).flip([-1])).bool()
else:
padding_mask = None
# 512 -> 768
if self.post_extract_proj is not None:
audio_features = self.post_extract_proj(audio_features)
visual_features = self.post_extract_proj(visual_features)
# if self.projection_head is not None:
# visual_features = self.projection_head(visual_features)
result["features_pen"] = features_pen
audio_features = self.dropout_input(audio_features)
visual_features = self.dropout_input(visual_features)
unmasked_audio_features = self.dropout_features(unmasked_audio_features)
unmasked_visual_features = self.dropout_features(unmasked_visual_features)
num_vars = None
code_ppl = None
prob_ppl = None
curr_temp = None
# if self.input_quantizer:
# q = self.input_quantizer(features, produce_targets=False)
# features = q["x"]
# num_vars = q["num_vars"]
# code_ppl = q["code_perplexity"]
# prob_ppl = q["prob_perplexity"]
# curr_temp = q["temp"]
# features = self.project_inp(features)
if mask:
# inference的时候不计算mask / compute mask indices and set (indices==True) position as self.mask_emb
x_audio, x_visual, mask_indices = self.apply_mask(
audio_features,
visual_features,
padding_mask,
mask_indices=mask_indices,
mask_channel_indices=mask_channel_indices,
)
if not is_xla_tensor(x_audio) and not is_xla_tensor(x_audio) and mask_indices is not None:
# tpu-comment: reducing the size in a dynamic way causes
# too many recompilations on xla.
y_audio = unmasked_audio_features[mask_indices].view(
unmasked_audio_features.size(0), -1, unmasked_audio_features.size(-1)
)
y_visual = unmasked_visual_features[mask_indices].view(
unmasked_visual_features.size(0), -1, unmasked_visual_features.size(-1)
)
else:
# ignore
y_audio = unmasked_audio_features
y_visual = unmasked_visual_features
else:
x_audio = audio_features
x_visual = visual_features
y_audio = unmasked_audio_features
y_visual = unmasked_visual_features
mask_indices = None
"""
mask之后的过transformer
stage 1: 两个模态分别过
stage 2: 两个模态取平均后过
"""
if self.model_stage == 1:
"""
x_audio:Batch * n_frames(with mask_emb) * feature_dim(512)
x_visual:Batch * n_frames(with mask_emb) * feature_dim(512)
x_audio.shape == x_visual.shape
"""
x_audio, layer_results_audio = self.encoder(x_audio, padding_mask=padding_mask, layer=layer)
x_visual, layer_results_visual = self.encoder(x_visual, padding_mask=padding_mask, layer=layer)
elif self.model_stage == 2:
x_cat = (self.audio_weight * x_audio + self.visual_weight * x_visual)
x_cat,layer_results_cat = self.encoder(x_cat, padding_mask=padding_mask, layer=layer)
else:
x_cat = (0.0 * x_audio + 1.0 * x_visual)
x_cat, _ = self.encoder(x_cat, padding_mask=padding_mask, layer=layer)
# FIXME INFERENCE
if features_only:
return {
"x": x_cat,
"padding_mask": padding_mask,
"audio_features": unmasked_audio_features,
"visual_features": unmasked_visual_features,
}
"""
inference时到这儿就结束了
"""
if self.quantizer:
q_visual = self.quantizer(y_visual, produce_targets=False)
y_visual = q_visual["x"]
q_audio = self.quantizer(y_audio, produce_targets=False)
y_audio = q_audio["x"]
if self.model_stage == 1:
"""
只管visual这边的diversity loss
"""
num_vars = q_visual["num_vars"]
code_ppl = [q_visual["code_perplexity"], q_audio["code_perplexity"]]
# 进入码本的比例 = code_ppl/(num_vars*num_latent_groups)
# print("visual_num_vars:",num_vars)
# print("audio_num_vars:", q_audio["num_vars"])
# print("visual_code_ppl:", code_ppl)
# print("audio_code_ppl:", q_audio["code_perplexity"])
prob_ppl = q_visual["prob_perplexity"]
curr_temp = q_visual["temp"]
elif self.model_stage == 2:
num_vars = q_visual["num_vars"]
code_ppl = [q_visual["code_perplexity"], q_audio["code_perplexity"]]
# print("num_vars_va:", num_vars)
# print("code_ppl_va:", code_ppl)
prob_ppl = [q_visual["prob_perplexity"], q_audio["prob_perplexity"]]
curr_temp = [q_visual["temp"], q_audio["temp"]]
y_audio = self.project_q(y_audio)
y_visual = self.project_q(y_visual)
# ignore
if self.negatives_from_everywhere:
# ignore
neg_cands = self.quantizer(unmasked_features, produce_targets=False)[
"x"
]
negs, _ = self.sample_negatives(
neg_cands,
y.size(1),
padding_count=padding_count,
)
negs = self.project_q(negs)
else:
negs_audio,negs_visual, negs_indices = self.sample_negatives(
y_audio,
y_visual,
y_audio.size(1),
padding_count=padding_count,
)
# ignore
if self.codebook_negatives > 0:
cb_negs = self.quantizer.sample_from_codebook(
y.size(0) * y.size(1), self.codebook_negatives
)
cb_negs = cb_negs.view(
self.codebook_negatives, y.size(0), y.size(1), -1
) # order doesnt matter
cb_negs = self.project_q(cb_negs)
negs = torch.cat([negs, cb_negs], dim=0)
else:
y_audio = self.project_q(y_audio)
y_visual = self.project_q(y_visual)
#ignore
if self.negatives_from_everywhere:
negs, _ = self.sample_negatives(
unmasked_features,
y.size(1),
padding_count=padding_count,
)
negs = self.project_q(negs)
else:
negs, _ = self.sample_negatives(
y,
y.size(1),
padding_count=padding_count,
)
if not is_xla_tensor(x_audio) and not is_xla_tensor(x_visual) and self.model_stage == 1:
# tpu-comment: reducing the size in a dynamic way causes
# too many recompilations on xla.
x_audio = x_audio[mask_indices].view(x_audio.size(0), -1, x_audio.size(-1))
x_visual = x_visual[mask_indices].view(x_visual.size(0), -1, x_visual.size(-1))
elif not is_xla_tensor(x_cat) and self.model_stage == 2:
x_cat = x_cat[mask_indices].view(x_cat.size(0), -1, x_cat.size(-1))
# ignore
if self.target_glu:
y = self.target_glu(y)
negs = self.target_glu(negs)
if self.model_stage == 1:
x_audio = self.final_proj(x_audio)
x_audio = self.compute_preds(x_audio, y_audio, negs_audio)
x_visual = self.final_proj(x_visual)
x_visual = self.compute_preds(x_visual, y_visual, negs_visual)
result["x_audio"] = x_audio
result["x_visual"] = x_visual
result["padding_mask"] = padding_mask
elif self.model_stage == 2:
x_cat = self.final_proj(x_cat)
x_audio = self.compute_preds(x_cat, y_audio, negs_audio)
x_visual = self.compute_preds(x_cat, y_visual, negs_visual)
result["x_audio"] =x_audio
result["x_visual"] = x_visual
result["padding_mask"] = padding_mask
if prob_ppl is not None:
result["prob_perplexity"] = prob_ppl
result["code_perplexity"] = code_ppl
result["num_vars"] = num_vars
result["temp"] = curr_temp
result["stage"] = self.model_stage
return result
def quantize(self, x):
assert self.quantizer is not None
x = self.feature_extractor(x)
x = x.transpose(1, 2)
x = self.layer_norm(x)
return self.quantizer.forward_idx(x)
def extract_features(self, audio_source, visual_source, padding_mask, mask=False, layer=None):
res = self.forward(
audio_source,visual_source, padding_mask, mask=mask, features_only=True, layer=layer
)
return res
def get_logits(self, net_output):
logits_audio = net_output["x_audio"]
logits_visual = net_output["x_visual"]
logits_audio = logits_audio.transpose(0, 2)
logits_visual = logits_visual.transpose(0, 2)
logits_audio = logits_audio.reshape(-1, logits_audio.size(-1))
logits_visual = logits_visual.reshape(-1, logits_audio.size(-1))
return logits_audio,logits_visual
def get_targets(self, sample, net_output, expand_steps=True):
x_audio = net_output["x_audio"]
x_visual = net_output["x_visual"]
return x_audio.new_zeros(x_audio.size(1) * x_audio.size(2), dtype=torch.long), x_visual.new_zeros(x_visual.size(1) * x_visual.size(2), dtype=torch.long)
def get_extra_losses(self, net_output):
pen = []
if "prob_perplexity" in net_output:
if self.model_stage == 1:
pen.append(
(net_output["num_vars"] - net_output["prob_perplexity"])
/ net_output["num_vars"]
)
else:
for i in range(2):
# visual audio
pen.append(
(net_output["num_vars"] - net_output["prob_perplexity"][i])
/ net_output["num_vars"]
)
if "features_pen" in net_output:
pen.append(net_output["features_pen"])
return pen
def remove_pretraining_modules(self):
self.quantizer = None
self.project_q = None
self.target_glu = None
self.final_proj = None
class ConvFeatureExtractionModel(nn.Module):
def __init__(
self,
conv_layers: List[Tuple[int, int, int, int]],
dropout: float = 0.0,
mode: str = "default",
conv_bias: bool = False,
input_dim=1,
):
super().__init__()
assert mode in {"default", "layer_norm"}
def block(
n_in,
n_out,
kernel_size,
stride,
padding,
is_layer_norm=False,
is_group_norm=False,
conv_bias=False,
):
def make_conv():
conv = nn.Conv1d(n_in, n_out, kernel_size=kernel_size, stride=stride, padding=padding, bias=conv_bias)
nn.init.kaiming_normal_(conv.weight)
return conv
assert (
is_layer_norm and is_group_norm
) == False, "layer norm and group norm are exclusive"
if is_layer_norm:
return nn.Sequential(
make_conv(),
nn.Dropout(p=dropout),
nn.Sequential(
TransposeLast(),
Fp32LayerNorm(dim, elementwise_affine=True),
TransposeLast(),
),
nn.GELU(),
)
elif is_group_norm:
return nn.Sequential(
make_conv(),
nn.Dropout(p=dropout),
Fp32GroupNorm(dim, dim, affine=True),
nn.GELU(),
)
else:
return nn.Sequential(make_conv(), nn.Dropout(p=dropout), nn.GELU())
in_d = input_dim
self.conv_layers = nn.ModuleList()
for i, cl in enumerate(conv_layers):
assert len(cl) == 4, "invalid conv definition: " + str(cl)
(dim, kernel_size, stride, padding) = cl
self.conv_layers.append(
block(
in_d,
dim,
kernel_size,
stride,
padding,
is_layer_norm=mode == "layer_norm",
is_group_norm=mode == "default" and i == 0,
conv_bias=conv_bias,
)
)
in_d = dim
def forward(self, x):
# BxT -> BxCxT
if len(x.shape) == 2:
x = x.unsqueeze(1)
for conv in self.conv_layers:
if conv == self.conv_layers[-1]:
for name, module in conv.named_children():
if name =="2":
"""
0 Conv1d
1 Dropout
2 GELU
2 means GELU()
"""
before_GELU = x
x = module(x)
else:
x = conv(x)
return before_GELU, x
class TransformerEncoder(nn.Module):
def __init__(self, args):
super().__init__()
self.dropout = args.dropout
self.embedding_dim = args.encoder_embed_dim
self.pos_conv = nn.Conv1d(
self.embedding_dim,
self.embedding_dim,
kernel_size=args.conv_pos,
padding=args.conv_pos // 2,
groups=args.conv_pos_groups,
)
dropout = 0
std = math.sqrt((4 * (1.0 - dropout)) / (args.conv_pos * self.embedding_dim))
nn.init.normal_(self.pos_conv.weight, mean=0, std=std)
nn.init.constant_(self.pos_conv.bias, 0)
self.pos_conv = nn.utils.weight_norm(self.pos_conv, name="weight", dim=2)
self.pos_conv = nn.Sequential(self.pos_conv, SamePad(args.conv_pos), nn.GELU())
self.layers = nn.ModuleList(
[
TransformerSentenceEncoderLayer(
embedding_dim=self.embedding_dim,
ffn_embedding_dim=args.encoder_ffn_embed_dim,
num_attention_heads=args.encoder_attention_heads,
dropout=self.dropout,
attention_dropout=args.attention_dropout,
activation_dropout=args.activation_dropout,
activation_fn=args.activation_fn,
layer_norm_first=args.layer_norm_first,
)
for _ in range(args.encoder_layers)
]
)
self.layer_norm_first = args.layer_norm_first
self.layer_norm = LayerNorm(self.embedding_dim)
self.layerdrop = args.encoder_layerdrop
self.apply(init_bert_params)
def forward(self, x, padding_mask=None, layer=None):
x, layer_results = self.extract_features(x, padding_mask, layer)
if self.layer_norm_first and layer is None:
x = self.layer_norm(x)
return x, layer_results
def extract_features(self, x, padding_mask=None, tgt_layer=None):
if padding_mask is not None:
x = index_put(x, padding_mask, 0)
x_conv = self.pos_conv(x.transpose(1, 2))
x_conv = x_conv.transpose(1, 2)
x = x + x_conv
if not self.layer_norm_first:
x = self.layer_norm(x)
x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
layer_results = []
r = None
for i, layer in enumerate(self.layers):
dropout_probability = np.random.random()
if not self.training or (dropout_probability > self.layerdrop):
x, z = layer(x, self_attn_padding_mask=padding_mask, need_weights=False)
if tgt_layer is not None:
layer_results.append((x, z))
if i == tgt_layer:
r = x
break
if r is not None:
x = r
# T x B x C -> B x T x C
x = x.transpose(0, 1)
return x, layer_results
def max_positions(self):
"""Maximum output length supported by the encoder."""
return self.args.max_positions
def upgrade_state_dict_named(self, state_dict, name):
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
return state_dict
class TransformerSentenceEncoderLayer(nn.Module):
"""
Implements a Transformer Encoder Layer used in BERT/XLM style pre-trained
models.
"""
def __init__(
self,
embedding_dim: float = 768,
ffn_embedding_dim: float = 3072,
num_attention_heads: float = 8,
dropout: float = 0.1,
attention_dropout: float = 0.1,
activation_dropout: float = 0.1,
activation_fn: str = "relu",
layer_norm_first: bool = False,
) -> None:
super().__init__()
# Initialize parameters
self.embedding_dim = embedding_dim
self.dropout = dropout
self.activation_dropout = activation_dropout
# Initialize blocks
self.activation_fn = utils.get_activation_fn(activation_fn)
self.self_attn = MultiheadAttention(
self.embedding_dim,
num_attention_heads,
dropout=attention_dropout,
self_attention=True,
)
self.dropout1 = nn.Dropout(dropout)
self.dropout2 = nn.Dropout(self.activation_dropout)
self.dropout3 = nn.Dropout(dropout)
self.layer_norm_first = layer_norm_first
# layer norm associated with the self attention layer
self.self_attn_layer_norm = LayerNorm(self.embedding_dim)
self.fc1 = nn.Linear(self.embedding_dim, ffn_embedding_dim)
self.fc2 = nn.Linear(ffn_embedding_dim, self.embedding_dim)
# layer norm associated with the position wise feed-forward NN
self.final_layer_norm = LayerNorm(self.embedding_dim)
def forward(
self,
x: torch.Tensor,
self_attn_mask: torch.Tensor = None,
self_attn_padding_mask: torch.Tensor = None,
need_weights: bool = False,
att_args=None,
):
"""
LayerNorm is applied either before or after the self-attention/ffn
modules similar to the original Transformer imlementation.
"""
residual = x
if self.layer_norm_first:
x = self.self_attn_layer_norm(x)
x, attn = self.self_attn(
query=x,
key=x,
value=x,
key_padding_mask=self_attn_padding_mask,
attn_mask=self_attn_mask,
)
x = self.dropout1(x)
x = residual + x
residual = x
x = self.final_layer_norm(x)
x = self.activation_fn(self.fc1(x))
x = self.dropout2(x)
x = self.fc2(x)
x = self.dropout3(x)
x = residual + x
else:
x, attn = self.self_attn(
query=x,
key=x,
value=x,
key_padding_mask=self_attn_padding_mask,
)
x = self.dropout1(x)
x = residual + x
x = self.self_attn_layer_norm(x)
residual = x
x = self.activation_fn(self.fc1(x))
x = self.dropout2(x)
x = self.fc2(x)
x = self.dropout3(x)
x = residual + x
x = self.final_layer_norm(x)
return x, attn
class MoCo(nn.Module):
def __init__(self, replace=False):
super(MoCo, self).__init__()
self.encoder = nn.Sequential()
self.replace = replace
for name, module in resnet50().named_children():
"""
name:conv1
name:bn1
name:relu
name:maxpool
name:layer1
name:layer2
name:layer3
name:layer4
name:avgpool
name:fc
"""
if name == 'conv1':
if self.replace:
module = nn.Conv3d(3,64,kernel_size=(7, 7, 7), stride=(1, 2, 2), padding=(3, 3, 3), bias=False)
self.encoder.add_module(name, module)
elif name != 'fc':
self.encoder.add_module(name, module)
# else:
# self.ResNet.append(nn.Linear(in_features=2048, out_features=128, bias=True))
def forward(self, x):
x = self.encoder.conv1(x)
if self.replace:
x = x.transpose(1,2)
x = x.reshape(-1,x.size(2),x.size(3),x.size(4))
x = self.encoder.bn1(x)
x = self.encoder.relu(x)
x = self.encoder.maxpool(x)
x = self.encoder.layer1(x)
x = self.encoder.layer2(x)
x = self.encoder.layer3(x)
x = self.encoder.layer4(x)
x = self.encoder.avgpool(x)
feature = torch.flatten(x, start_dim=1)
return F.normalize(feature, dim=-1)
|
[
"torch.nn.Dropout",
"fairseq.modules.SamePad",
"torch.nn.GLU",
"torch.nn.functional.dropout",
"torch.cat",
"fairseq.modules.Fp32GroupNorm",
"torch.nn.init.constant_",
"torch.arange",
"torch.nn.utils.weight_norm",
"torch.nn.functional.normalize",
"torch.no_grad",
"os.path.join",
"torch.flatten",
"torch.nn.init.kaiming_normal_",
"fairseq.utils.index_put",
"torch.nn.Conv3d",
"torch.load",
"torch.nn.Conv1d",
"fairseq.utils.get_activation_fn",
"torch.FloatTensor",
"fairseq.modules.TransposeLast",
"fairseq.modules.Fp32LayerNorm",
"fairseq.modules.LayerNorm",
"torch.nn.Linear",
"torch.zeros",
"fairseq.data.data_utils.compute_mask_indices",
"torch.randint",
"math.sqrt",
"torch.nn.ModuleList",
"torch.split",
"fairseq.utils.get_available_activation_fns",
"dataclasses.field",
"fairseq.models.register_model",
"torch.nn.GELU",
"torch.floor",
"torchvision.models.resnet.resnet50",
"fairseq.dataclass.ChoiceEnum",
"torch.from_numpy",
"torch.nn.ReLU",
"torch.stack",
"torch.nn.Sequential",
"fairseq.modules.GumbelVectorQuantizer",
"fairseq.modules.MultiheadAttention",
"fairseq.modules.GradMultiply.apply",
"torch.nn.init.normal_",
"numpy.random.random",
"fairseq.utils.is_xla_tensor",
"fairseq.utils.buffered_arange",
"torch.tensor"
] |
[((963, 1000), 'fairseq.dataclass.ChoiceEnum', 'ChoiceEnum', (["['default', 'layer_norm']"], {}), "(['default', 'layer_norm'])\n", (973, 1000), False, 'from fairseq.dataclass import ChoiceEnum, FairseqDataclass\n'), ((1032, 1086), 'fairseq.dataclass.ChoiceEnum', 'ChoiceEnum', (["['static', 'uniform', 'normal', 'poisson']"], {}), "(['static', 'uniform', 'normal', 'poisson'])\n", (1042, 1086), False, 'from fairseq.dataclass import ChoiceEnum, FairseqDataclass\n'), ((10150, 10198), 'fairseq.models.register_model', 'register_model', (['"""mm2vec"""'], {'dataclass': 'MM2VecConfig'}), "('mm2vec', dataclass=MM2VecConfig)\n", (10164, 10198), False, 'from fairseq.models import BaseFairseqModel, register_model\n'), ((1161, 1333), 'dataclasses.field', 'field', ([], {'default': '(1)', 'metadata': "{'help':\n 'model_stage=1 for training visual feature extractor only,model_stage=2 for pretrain on all subnetmodel_stage=? for fine-tune'\n }"}), "(default=1, metadata={'help':\n 'model_stage=1 for training visual feature extractor only,model_stage=2 for pretrain on all subnetmodel_stage=? for fine-tune'\n })\n", (1166, 1333), False, 'from dataclasses import dataclass, field\n'), ((1452, 1691), 'dataclasses.field', 'field', ([], {'default': '"""default"""', 'metadata': "{'help':\n 'mode for feature extractor. default has a single group norm with d groups in the first conv block, whereas layer_norm has layer norms in every block (meant to use with normalize=True)'\n }"}), "(default='default', metadata={'help':\n 'mode for feature extractor. default has a single group norm with d groups in the first conv block, whereas layer_norm has layer norms in every block (meant to use with normalize=True)'\n })\n", (1457, 1691), False, 'from dataclasses import dataclass, field\n'), ((1784, 1861), 'dataclasses.field', 'field', ([], {'default': '(12)', 'metadata': "{'help': 'num encoder layers in the transformer'}"}), "(default=12, metadata={'help': 'num encoder layers in the transformer'})\n", (1789, 1861), False, 'from dataclasses import dataclass, field\n'), ((1905, 1973), 'dataclasses.field', 'field', ([], {'default': '(768)', 'metadata': "{'help': 'encoder embedding dimension'}"}), "(default=768, metadata={'help': 'encoder embedding dimension'})\n", (1910, 1973), False, 'from dataclasses import dataclass, field\n'), ((2021, 2098), 'dataclasses.field', 'field', ([], {'default': '(3072)', 'metadata': "{'help': 'encoder embedding dimension for FFN'}"}), "(default=3072, metadata={'help': 'encoder embedding dimension for FFN'})\n", (2026, 2098), False, 'from dataclasses import dataclass, field\n'), ((2148, 2215), 'dataclasses.field', 'field', ([], {'default': '(12)', 'metadata': "{'help': 'num encoder attention heads'}"}), "(default=12, metadata={'help': 'num encoder attention heads'})\n", (2153, 2215), False, 'from dataclasses import dataclass, field\n'), ((2300, 2370), 'dataclasses.field', 'field', ([], {'default': '"""gelu"""', 'metadata': "{'help': 'activation function to use'}"}), "(default='gelu', metadata={'help': 'activation function to use'})\n", (2305, 2370), False, 'from dataclasses import dataclass, field\n'), ((2422, 2507), 'dataclasses.field', 'field', ([], {'default': '(0.1)', 'metadata': "{'help': 'dropout probability for the transformer'}"}), "(default=0.1, metadata={'help': 'dropout probability for the transformer'}\n )\n", (2427, 2507), False, 'from dataclasses import dataclass, field\n'), ((2548, 2634), 'dataclasses.field', 'field', ([], {'default': '(0.1)', 'metadata': "{'help': 'dropout probability for attention weights'}"}), "(default=0.1, metadata={'help':\n 'dropout probability for attention weights'})\n", (2553, 2634), False, 'from dataclasses import dataclass, field\n'), ((2677, 2765), 'dataclasses.field', 'field', ([], {'default': '(0.0)', 'metadata': "{'help': 'dropout probability after activation in FFN'}"}), "(default=0.0, metadata={'help':\n 'dropout probability after activation in FFN'})\n", (2682, 2765), False, 'from dataclasses import dataclass, field\n'), ((2807, 2895), 'dataclasses.field', 'field', ([], {'default': '(0.0)', 'metadata': "{'help': 'probability of dropping a tarnsformer layer'}"}), "(default=0.0, metadata={'help':\n 'probability of dropping a tarnsformer layer'})\n", (2812, 2895), False, 'from dataclasses import dataclass, field\n'), ((2933, 3025), 'dataclasses.field', 'field', ([], {'default': '(0.0)', 'metadata': "{'help': 'dropout to apply to the input (after feat extr)'}"}), "(default=0.0, metadata={'help':\n 'dropout to apply to the input (after feat extr)'})\n", (2938, 3025), False, 'from dataclasses import dataclass, field\n'), ((3075, 3170), 'dataclasses.field', 'field', ([], {'default': '(0.0)', 'metadata': "{'help': 'dropout to apply to the features (after feat extr)'}"}), "(default=0.0, metadata={'help':\n 'dropout to apply to the features (after feat extr)'})\n", (3080, 3170), False, 'from dataclasses import dataclass, field\n'), ((3212, 3358), 'dataclasses.field', 'field', ([], {'default': '(0)', 'metadata': "{'help':\n 'project final representations and targets to this many dimensions.set to encoder_embed_dim is <= 0'\n }"}), "(default=0, metadata={'help':\n 'project final representations and targets to this many dimensions.set to encoder_embed_dim is <= 0'\n })\n", (3217, 3358), False, 'from dataclasses import dataclass, field\n'), ((3439, 3526), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'apply layernorm first in the transformer'}"}), "(default=False, metadata={'help':\n 'apply layernorm first in the transformer'})\n", (3444, 3526), False, 'from dataclasses import dataclass, field\n'), ((3574, 3841), 'dataclasses.field', 'field', ([], {'default': '"""[(512, 10, 5, 0)] + [(512, 3, 2, 0)] * 4 + [(512, 2, 2, 0)] + [(512, 2, 2, 0)]"""', 'metadata': "{'help':\n 'string describing convolutional feature extraction layers in form of a python list that contains [(dim, kernel_size, stride), ...]'\n }"}), "(default=\n '[(512, 10, 5, 0)] + [(512, 3, 2, 0)] * 4 + [(512, 2, 2, 0)] + [(512, 2, 2, 0)]'\n , metadata={'help':\n 'string describing convolutional feature extraction layers in form of a python list that contains [(dim, kernel_size, stride), ...]'\n })\n", (3579, 3841), False, 'from dataclasses import dataclass, field\n'), ((3905, 3976), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'include bias in conv encoder'}"}), "(default=False, metadata={'help': 'include bias in conv encoder'})\n", (3910, 3976), False, 'from dataclasses import dataclass, field\n'), ((4015, 4087), 'dataclasses.field', 'field', ([], {'default': '(0.1)', 'metadata': "{'help': 'temperature to divide logits by'}"}), "(default=0.1, metadata={'help': 'temperature to divide logits by'})\n", (4020, 4087), False, 'from dataclasses import dataclass, field\n'), ((4131, 4195), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'use quantized targets'}"}), "(default=False, metadata={'help': 'use quantized targets'})\n", (4136, 4195), False, 'from dataclasses import dataclass, field\n'), ((4237, 4300), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'use quantized inputs'}"}), "(default=False, metadata={'help': 'use quantized inputs'})\n", (4242, 4300), False, 'from dataclasses import dataclass, field\n'), ((4342, 4430), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'use same quantizer for inputs and targets'}"}), "(default=False, metadata={'help':\n 'use same quantizer for inputs and targets'})\n", (4347, 4430), False, 'from dataclasses import dataclass, field\n'), ((4464, 4539), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'adds projection + glu to targets'}"}), "(default=False, metadata={'help': 'adds projection + glu to targets'})\n", (4469, 4539), False, 'from dataclasses import dataclass, field\n'), ((4585, 4674), 'dataclasses.field', 'field', ([], {'default': '(1.0)', 'metadata': "{'help': 'multiply feature extractor var grads by this'}"}), "(default=1.0, metadata={'help':\n 'multiply feature extractor var grads by this'})\n", (4590, 4674), False, 'from dataclasses import dataclass, field\n'), ((4712, 4777), 'dataclasses.field', 'field', ([], {'default': '(1)', 'metadata': "{'help': 'number of quantizer layers'}"}), "(default=1, metadata={'help': 'number of quantizer layers'})\n", (4717, 4777), False, 'from dataclasses import dataclass, field\n'), ((4829, 4937), 'dataclasses.field', 'field', ([], {'default': '(3)', 'metadata': "{'help': 'dimensionality increase for inner quantizer layers (if depth > 1)'}"}), "(default=3, metadata={'help':\n 'dimensionality increase for inner quantizer layers (if depth > 1)'})\n", (4834, 4937), False, 'from dataclasses import dataclass, field\n'), ((5002, 5105), 'dataclasses.field', 'field', ([], {'default': '(320)', 'metadata': "{'help': 'number of latent variables V in each group of the codebook'}"}), "(default=320, metadata={'help':\n 'number of latent variables V in each group of the codebook'})\n", (5007, 5105), False, 'from dataclasses import dataclass, field\n'), ((5150, 5247), 'dataclasses.field', 'field', ([], {'default': '(2)', 'metadata': "{'help': 'number of groups G of latent variables in the codebook'}"}), "(default=2, metadata={'help':\n 'number of groups G of latent variables in the codebook'})\n", (5155, 5247), False, 'from dataclasses import dataclass, field\n'), ((5289, 5432), 'dataclasses.field', 'field', ([], {'default': '(0)', 'metadata': "{'help':\n 'if > 0, uses this dimensionality for latent variables. otherwise uses final_dim / latent_groups'\n }"}), "(default=0, metadata={'help':\n 'if > 0, uses this dimensionality for latent variables. otherwise uses final_dim / latent_groups'\n })\n", (5294, 5432), False, 'from dataclasses import dataclass, field\n'), ((5522, 5573), 'dataclasses.field', 'field', ([], {'default': '(10)', 'metadata': "{'help': 'mask length'}"}), "(default=10, metadata={'help': 'mask length'})\n", (5527, 5573), False, 'from dataclasses import dataclass, field\n'), ((5597, 5685), 'dataclasses.field', 'field', ([], {'default': '(0.65)', 'metadata': "{'help': 'probability of replacing a token with mask'}"}), "(default=0.65, metadata={'help':\n 'probability of replacing a token with mask'})\n", (5602, 5685), False, 'from dataclasses import dataclass, field\n'), ((5747, 5818), 'dataclasses.field', 'field', ([], {'default': '"""static"""', 'metadata': "{'help': 'how to choose mask length'}"}), "(default='static', metadata={'help': 'how to choose mask length'})\n", (5752, 5818), False, 'from dataclasses import dataclass, field\n'), ((5857, 6000), 'dataclasses.field', 'field', ([], {'default': '(0)', 'metadata': "{'help':\n 'secondary mask argument (used for more complex distributions), see help in compute_mask_indices'\n }"}), "(default=0, metadata={'help':\n 'secondary mask argument (used for more complex distributions), see help in compute_mask_indices'\n })\n", (5862, 6000), False, 'from dataclasses import dataclass, field\n'), ((6080, 6156), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'whether to allow masks to overlap'}"}), "(default=False, metadata={'help': 'whether to allow masks to overlap'})\n", (6085, 6156), False, 'from dataclasses import dataclass, field\n'), ((6197, 6290), 'dataclasses.field', 'field', ([], {'default': '(1)', 'metadata': "{'help': 'min space between spans (if no overlap is enabled)'}"}), "(default=1, metadata={'help':\n 'min space between spans (if no overlap is enabled)'})\n", (6202, 6290), False, 'from dataclasses import dataclass, field\n'), ((6364, 6450), 'dataclasses.field', 'field', ([], {'default': '(10)', 'metadata': "{'help': 'length of the mask for features (channels)'}"}), "(default=10, metadata={'help':\n 'length of the mask for features (channels)'})\n", (6369, 6450), False, 'from dataclasses import dataclass, field\n'), ((6492, 6578), 'dataclasses.field', 'field', ([], {'default': '(0.0)', 'metadata': "{'help': 'probability of replacing a feature with 0'}"}), "(default=0.0, metadata={'help':\n 'probability of replacing a feature with 0'})\n", (6497, 6578), False, 'from dataclasses import dataclass, field\n'), ((6686, 6781), 'dataclasses.field', 'field', ([], {'default': '"""static"""', 'metadata': "{'help': 'how to choose mask length for channel masking'}"}), "(default='static', metadata={'help':\n 'how to choose mask length for channel masking'})\n", (6691, 6781), False, 'from dataclasses import dataclass, field\n'), ((6833, 6977), 'dataclasses.field', 'field', ([], {'default': '(0)', 'metadata': "{'help':\n 'secondary mask argument (used for more complex distributions), see help in compute_mask_indicesh'\n }"}), "(default=0, metadata={'help':\n 'secondary mask argument (used for more complex distributions), see help in compute_mask_indicesh'\n })\n", (6838, 6977), False, 'from dataclasses import dataclass, field\n'), ((7065, 7153), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'whether to allow channel masks to overlap'}"}), "(default=False, metadata={'help':\n 'whether to allow channel masks to overlap'})\n", (7070, 7153), False, 'from dataclasses import dataclass, field\n'), ((7198, 7291), 'dataclasses.field', 'field', ([], {'default': '(1)', 'metadata': "{'help': 'min space between spans (if no overlap is enabled)'}"}), "(default=1, metadata={'help':\n 'min space between spans (if no overlap is enabled)'})\n", (7203, 7291), False, 'from dataclasses import dataclass, field\n'), ((7362, 7455), 'dataclasses.field', 'field', ([], {'default': '(100)', 'metadata': "{'help': 'number of negative examples from the same sample'}"}), "(default=100, metadata={'help':\n 'number of negative examples from the same sample'})\n", (7367, 7455), False, 'from dataclasses import dataclass, field\n'), ((7513, 7616), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'sample negatives from everywhere, not just masked states'}"}), "(default=False, metadata={'help':\n 'sample negatives from everywhere, not just masked states'})\n", (7518, 7616), False, 'from dataclasses import dataclass, field\n'), ((7670, 7760), 'dataclasses.field', 'field', ([], {'default': '(0)', 'metadata': "{'help': 'number of negative examples from the any sample'}"}), "(default=0, metadata={'help':\n 'number of negative examples from the any sample'})\n", (7675, 7760), False, 'from dataclasses import dataclass, field\n'), ((7801, 7876), 'dataclasses.field', 'field', ([], {'default': '(0)', 'metadata': "{'help': 'number of negative examples codebook'}"}), "(default=0, metadata={'help': 'number of negative examples codebook'})\n", (7806, 7876), False, 'from dataclasses import dataclass, field\n'), ((7940, 8042), 'dataclasses.field', 'field', ([], {'default': '(128)', 'metadata': "{'help': 'number of filters for convolutional positional embeddings'}"}), "(default=128, metadata={'help':\n 'number of filters for convolutional positional embeddings'})\n", (7945, 8042), False, 'from dataclasses import dataclass, field\n'), ((8089, 8188), 'dataclasses.field', 'field', ([], {'default': '(16)', 'metadata': "{'help': 'number of groups for convolutional positional embedding'}"}), "(default=16, metadata={'help':\n 'number of groups for convolutional positional embedding'})\n", (8094, 8188), False, 'from dataclasses import dataclass, field\n'), ((8255, 8406), 'dataclasses.field', 'field', ([], {'default': '(2, 0.5, 0.999995)', 'metadata': "{'help':\n 'temperature for latent variable sampling. can be tuple of 3 values (start, end, decay)'\n }"}), "(default=(2, 0.5, 0.999995), metadata={'help':\n 'temperature for latent variable sampling. can be tuple of 3 values (start, end, decay)'\n })\n", (8260, 8406), False, 'from dataclasses import dataclass, field\n'), ((8515, 8764), 'dataclasses.field', 'field', ([], {'default': '"""[(512, 11, 1, 5)] * 3 + [(1024, 11, 1, 5)]"""', 'metadata': "{'help':\n 'string describing visual-subnet convolutional feature extraction layers in form of a python list that contains [(dim, kernel_size, stride, padding), ...]'\n }"}), "(default='[(512, 11, 1, 5)] * 3 + [(1024, 11, 1, 5)]', metadata={\n 'help':\n 'string describing visual-subnet convolutional feature extraction layers in form of a python list that contains [(dim, kernel_size, stride, padding), ...]'\n })\n", (8520, 8764), False, 'from dataclasses import dataclass, field\n'), ((8847, 8921), 'dataclasses.field', 'field', ([], {'default': '(112)', 'metadata': "{'help': 'number of dims of visual pictures'}"}), "(default=112, metadata={'help': 'number of dims of visual pictures'})\n", (8852, 8921), False, 'from dataclasses import dataclass, field\n'), ((8975, 9042), 'dataclasses.field', 'field', ([], {'default': '(2048)', 'metadata': "{'help': 'number of dims after MoCo'}"}), "(default=2048, metadata={'help': 'number of dims after MoCo'})\n", (8980, 9042), False, 'from dataclasses import dataclass, field\n'), ((9092, 9168), 'dataclasses.field', 'field', ([], {'default': '(512)', 'metadata': "{'help': 'output dimension of projection head'}"}), "(default=512, metadata={'help': 'output dimension of projection head'})\n", (9097, 9168), False, 'from dataclasses import dataclass, field\n'), ((9236, 9357), 'dataclasses.field', 'field', ([], {'default': '"""./checkpoints-mm-2/"""', 'metadata': "{'help': 'path to mm2vec stage 1 last model or stage 2 process model'}"}), "(default='./checkpoints-mm-2/', metadata={'help':\n 'path to mm2vec stage 1 last model or stage 2 process model'})\n", (9241, 9357), False, 'from dataclasses import dataclass, field\n'), ((9448, 9514), 'dataclasses.field', 'field', ([], {'default': '(0.5)', 'metadata': "{'help': 'weight for audio_features'}"}), "(default=0.5, metadata={'help': 'weight for audio_features'})\n", (9453, 9514), False, 'from dataclasses import dataclass, field\n'), ((9585, 9651), 'dataclasses.field', 'field', ([], {'default': '(0.5)', 'metadata': "{'help': 'weight for audio_features'}"}), "(default=0.5, metadata={'help': 'weight for audio_features'})\n", (9590, 9651), False, 'from dataclasses import dataclass, field\n'), ((9731, 9806), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'remove quantizer pretrain params'}"}), "(default=False, metadata={'help': 'remove quantizer pretrain params'})\n", (9736, 9806), False, 'from dataclasses import dataclass, field\n'), ((9888, 9963), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'freeze quantizer pretrain params'}"}), "(default=False, metadata={'help': 'freeze quantizer pretrain params'})\n", (9893, 9963), False, 'from dataclasses import dataclass, field\n'), ((10043, 10130), 'dataclasses.field', 'field', ([], {'default': '(False)', 'metadata': "{'help': 'replace first conv2d in MoCo with conv3d'}"}), "(default=False, metadata={'help':\n 'replace first conv2d in MoCo with conv3d'})\n", (10048, 10130), False, 'from dataclasses import dataclass, field\n'), ((2260, 2296), 'fairseq.utils.get_available_activation_fns', 'utils.get_available_activation_fns', ([], {}), '()\n', (2294, 2296), False, 'from fairseq import utils\n'), ((12796, 12825), 'torch.nn.Dropout', 'nn.Dropout', (['cfg.dropout_input'], {}), '(cfg.dropout_input)\n', (12806, 12825), True, 'import torch.nn as nn\n'), ((12858, 12890), 'torch.nn.Dropout', 'nn.Dropout', (['cfg.dropout_features'], {}), '(cfg.dropout_features)\n', (12868, 12890), True, 'import torch.nn as nn\n'), ((15158, 15189), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.audio_embed_dim'], {}), '(self.audio_embed_dim)\n', (15167, 15189), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((15472, 15515), 'torch.nn.Linear', 'nn.Linear', (['cfg.encoder_embed_dim', 'final_dim'], {}), '(cfg.encoder_embed_dim, final_dim)\n', (15481, 15515), True, 'import torch.nn as nn\n'), ((23459, 23491), 'torch.cat', 'torch.cat', (['[y, negatives]'], {'dim': '(0)'}), '([y, negatives], dim=0)\n', (23468, 23491), False, 'import torch\n'), ((25168, 25244), 'torch.tensor', 'torch.tensor', (['[]'], {'dtype': 'visual_features.dtype', 'device': 'visual_features.device'}), '([], dtype=visual_features.dtype, device=visual_features.device)\n', (25180, 25244), False, 'import torch\n'), ((43875, 43890), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (43888, 43890), True, 'import torch.nn as nn\n'), ((45337, 45474), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.embedding_dim', 'self.embedding_dim'], {'kernel_size': 'args.conv_pos', 'padding': '(args.conv_pos // 2)', 'groups': 'args.conv_pos_groups'}), '(self.embedding_dim, self.embedding_dim, kernel_size=args.conv_pos,\n padding=args.conv_pos // 2, groups=args.conv_pos_groups)\n', (45346, 45474), True, 'import torch.nn as nn\n'), ((45576, 45645), 'math.sqrt', 'math.sqrt', (['(4 * (1.0 - dropout) / (args.conv_pos * self.embedding_dim))'], {}), '(4 * (1.0 - dropout) / (args.conv_pos * self.embedding_dim))\n', (45585, 45645), False, 'import math\n'), ((45656, 45710), 'torch.nn.init.normal_', 'nn.init.normal_', (['self.pos_conv.weight'], {'mean': '(0)', 'std': 'std'}), '(self.pos_conv.weight, mean=0, std=std)\n', (45671, 45710), True, 'import torch.nn as nn\n'), ((45719, 45759), 'torch.nn.init.constant_', 'nn.init.constant_', (['self.pos_conv.bias', '(0)'], {}), '(self.pos_conv.bias, 0)\n', (45736, 45759), True, 'import torch.nn as nn\n'), ((45785, 45842), 'torch.nn.utils.weight_norm', 'nn.utils.weight_norm', (['self.pos_conv'], {'name': '"""weight"""', 'dim': '(2)'}), "(self.pos_conv, name='weight', dim=2)\n", (45805, 45842), True, 'import torch.nn as nn\n'), ((46679, 46708), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.embedding_dim'], {}), '(self.embedding_dim)\n', (46688, 46708), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((47403, 47455), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.dropout', 'training': 'self.training'}), '(x, p=self.dropout, training=self.training)\n', (47412, 47455), True, 'import torch.nn.functional as F\n'), ((49190, 49228), 'fairseq.utils.get_activation_fn', 'utils.get_activation_fn', (['activation_fn'], {}), '(activation_fn)\n', (49213, 49228), False, 'from fairseq import utils\n'), ((49254, 49366), 'fairseq.modules.MultiheadAttention', 'MultiheadAttention', (['self.embedding_dim', 'num_attention_heads'], {'dropout': 'attention_dropout', 'self_attention': '(True)'}), '(self.embedding_dim, num_attention_heads, dropout=\n attention_dropout, self_attention=True)\n', (49272, 49366), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((49446, 49465), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (49456, 49465), True, 'import torch.nn as nn\n'), ((49490, 49525), 'torch.nn.Dropout', 'nn.Dropout', (['self.activation_dropout'], {}), '(self.activation_dropout)\n', (49500, 49525), True, 'import torch.nn as nn\n'), ((49550, 49569), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (49560, 49569), True, 'import torch.nn as nn\n'), ((49719, 49748), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.embedding_dim'], {}), '(self.embedding_dim)\n', (49728, 49748), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((49768, 49816), 'torch.nn.Linear', 'nn.Linear', (['self.embedding_dim', 'ffn_embedding_dim'], {}), '(self.embedding_dim, ffn_embedding_dim)\n', (49777, 49816), True, 'import torch.nn as nn\n'), ((49836, 49884), 'torch.nn.Linear', 'nn.Linear', (['ffn_embedding_dim', 'self.embedding_dim'], {}), '(ffn_embedding_dim, self.embedding_dim)\n', (49845, 49884), True, 'import torch.nn as nn\n'), ((49989, 50018), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.embedding_dim'], {}), '(self.embedding_dim)\n', (49998, 50018), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((51722, 51737), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (51735, 51737), True, 'import torch.nn as nn\n'), ((53000, 53029), 'torch.flatten', 'torch.flatten', (['x'], {'start_dim': '(1)'}), '(x, start_dim=1)\n', (53013, 53029), False, 'import torch\n'), ((53045, 53073), 'torch.nn.functional.normalize', 'F.normalize', (['feature'], {'dim': '(-1)'}), '(feature, dim=-1)\n', (53056, 53073), True, 'import torch.nn.functional as F\n'), ((11519, 11573), 'torch.nn.Linear', 'nn.Linear', (['self.audio_embed_dim', 'cfg.encoder_embed_dim'], {}), '(self.audio_embed_dim, cfg.encoder_embed_dim)\n', (11528, 11573), True, 'import torch.nn as nn\n'), ((11877, 11886), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (11884, 11886), True, 'import torch.nn as nn\n'), ((13514, 13781), 'fairseq.modules.GumbelVectorQuantizer', 'GumbelVectorQuantizer', ([], {'dim': 'self.audio_embed_dim', 'num_vars': 'cfg.latent_vars', 'temp': 'cfg.latent_temp', 'groups': 'cfg.latent_groups', 'combine_groups': '(False)', 'vq_dim': 'vq_dim', 'time_first': '(True)', 'weight_proj_depth': 'cfg.quantizer_depth', 'weight_proj_factor': 'cfg.quantizer_factor'}), '(dim=self.audio_embed_dim, num_vars=cfg.latent_vars,\n temp=cfg.latent_temp, groups=cfg.latent_groups, combine_groups=False,\n vq_dim=vq_dim, time_first=True, weight_proj_depth=cfg.quantizer_depth,\n weight_proj_factor=cfg.quantizer_factor)\n', (13535, 13781), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((13980, 14008), 'torch.nn.Linear', 'nn.Linear', (['vq_dim', 'final_dim'], {}), '(vq_dim, final_dim)\n', (13989, 14008), True, 'import torch.nn as nn\n'), ((14052, 14084), 'torch.nn.Linear', 'nn.Linear', (['self.embed', 'final_dim'], {}), '(self.embed, final_dim)\n', (14061, 14084), True, 'import torch.nn as nn\n'), ((18668, 18901), 'fairseq.data.data_utils.compute_mask_indices', 'compute_mask_indices', (['(B, C)', 'None', 'self.mask_channel_prob', 'self.mask_channel_length', 'self.mask_channel_selection', 'self.mask_channel_other'], {'no_overlap': 'self.no_mask_channel_overlap', 'min_space': 'self.mask_channel_min_space'}), '((B, C), None, self.mask_channel_prob, self.\n mask_channel_length, self.mask_channel_selection, self.\n mask_channel_other, no_overlap=self.no_mask_channel_overlap, min_space=\n self.mask_channel_min_space)\n', (18688, 18901), False, 'from fairseq.data.data_utils import compute_mask_indices\n'), ((19865, 19912), 'fairseq.utils.index_put', 'index_put', (['x_audio', 'mask_indices', 'self.mask_emb'], {}), '(x_audio, mask_indices, self.mask_emb)\n', (19874, 19912), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((19936, 19984), 'fairseq.utils.index_put', 'index_put', (['x_visual', 'mask_indices', 'self.mask_emb'], {}), '(x_visual, mask_indices, self.mask_emb)\n', (19945, 19984), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((20865, 20908), 'fairseq.utils.index_put', 'index_put', (['x_audio', 'mask_channel_indices', '(0)'], {}), '(x_audio, mask_channel_indices, 0)\n', (20874, 20908), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((20932, 20976), 'fairseq.utils.index_put', 'index_put', (['x_visual', 'mask_channel_indices', '(0)'], {}), '(x_visual, mask_channel_indices, 0)\n', (20941, 20976), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((21504, 21519), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (21517, 21519), False, 'import torch\n'), ((22768, 22812), 'torch.cat', 'torch.cat', (['[neg_idxs, cross_neg_idxs]'], {'dim': '(1)'}), '([neg_idxs, cross_neg_idxs], dim=1)\n', (22777, 22812), False, 'import torch\n'), ((23636, 23657), 'fairseq.utils.is_xla_tensor', 'is_xla_tensor', (['logits'], {}), '(logits)\n', (23649, 23657), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((23983, 24033), 'fairseq.utils.index_put', 'index_put', (['logits[1:]', 'neg_is_pos', 'self._inftensor'], {}), '(logits[1:], neg_is_pos, self._inftensor)\n', (23992, 24033), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((24311, 24365), 'torch.floor', 'torch.floor', (['((input_length - kernel_size) / stride + 1)'], {}), '((input_length - kernel_size) / stride + 1)\n', (24322, 24365), False, 'import torch\n'), ((26504, 26531), 'torch.cat', 'torch.cat', (['visual_source', '(1)'], {}), '(visual_source, 1)\n', (26513, 26531), False, 'import torch\n'), ((26559, 26611), 'torch.split', 'torch.split', (['visual_source', 'self.visual_input_dim', '(1)'], {}), '(visual_source, self.visual_input_dim, 1)\n', (26570, 26611), False, 'import torch\n'), ((26640, 26664), 'torch.cat', 'torch.cat', (['visual_source'], {}), '(visual_source)\n', (26649, 26664), False, 'import torch\n'), ((27368, 27394), 'torch.stack', 'torch.stack', (['visual_source'], {}), '(visual_source)\n', (27379, 27394), False, 'import torch\n'), ((27423, 27475), 'torch.split', 'torch.split', (['visual_source', 'self.visual_input_dim', '(1)'], {}), '(visual_source, self.visual_input_dim, 1)\n', (27434, 27475), False, 'import torch\n'), ((27504, 27528), 'torch.cat', 'torch.cat', (['visual_source'], {}), '(visual_source)\n', (27513, 27528), False, 'import torch\n'), ((30776, 30876), 'torch.zeros', 'torch.zeros', (['audio_features.shape[:2]'], {'dtype': 'audio_features.dtype', 'device': 'audio_features.device'}), '(audio_features.shape[:2], dtype=audio_features.dtype, device=\n audio_features.device)\n', (30787, 30876), False, 'import torch\n'), ((45896, 45918), 'fairseq.modules.SamePad', 'SamePad', (['args.conv_pos'], {}), '(args.conv_pos)\n', (45903, 45918), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((45920, 45929), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (45927, 45929), True, 'import torch.nn as nn\n'), ((47172, 47201), 'fairseq.utils.index_put', 'index_put', (['x', 'padding_mask', '(0)'], {}), '(x, padding_mask, 0)\n', (47181, 47201), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((47647, 47665), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (47663, 47665), True, 'import numpy as np\n'), ((15385, 15420), 'torch.nn.Linear', 'nn.Linear', (['final_dim', '(final_dim * 2)'], {}), '(final_dim, final_dim * 2)\n', (15394, 15420), True, 'import torch.nn as nn\n'), ((15422, 15430), 'torch.nn.GLU', 'nn.GLU', ([], {}), '()\n', (15428, 15430), True, 'import torch.nn as nn\n'), ((17727, 17775), 'os.path.join', 'os.path.join', (['cfg.m2v_path', '"""checkpoint_last.pt"""'], {}), "(cfg.m2v_path, 'checkpoint_last.pt')\n", (17739, 17775), False, 'import os\n'), ((19371, 19571), 'fairseq.data.data_utils.compute_mask_indices', 'compute_mask_indices', (['(B, T)', 'padding_mask', 'self.mask_prob', 'self.mask_length', 'self.mask_selection', 'self.mask_other'], {'min_masks': '(2)', 'no_overlap': 'self.no_mask_overlap', 'min_space': 'self.mask_min_space'}), '((B, T), padding_mask, self.mask_prob, self.mask_length,\n self.mask_selection, self.mask_other, min_masks=2, no_overlap=self.\n no_mask_overlap, min_space=self.mask_min_space)\n', (19391, 19571), False, 'from fairseq.data.data_utils import compute_mask_indices\n'), ((20214, 20447), 'fairseq.data.data_utils.compute_mask_indices', 'compute_mask_indices', (['(B, C)', 'None', 'self.mask_channel_prob', 'self.mask_channel_length', 'self.mask_channel_selection', 'self.mask_channel_other'], {'no_overlap': 'self.no_mask_channel_overlap', 'min_space': 'self.mask_channel_min_space'}), '((B, C), None, self.mask_channel_prob, self.\n mask_channel_length, self.mask_channel_selection, self.\n mask_channel_other, no_overlap=self.no_mask_channel_overlap, min_space=\n self.mask_channel_min_space)\n', (20234, 20447), False, 'from fairseq.data.data_utils import compute_mask_indices\n'), ((21833, 21904), 'torch.randint', 'torch.randint', ([], {'low': '(0)', 'high': '(high - 1)', 'size': '(bsz, self.n_negatives * num)'}), '(low=0, high=high - 1, size=(bsz, self.n_negatives * num))\n', (21846, 21904), False, 'import torch\n'), ((22285, 22378), 'torch.randint', 'torch.randint', ([], {'low': '(0)', 'high': '(cross_high - 1)', 'size': '(bsz, self.cross_sample_negatives * num)'}), '(low=0, high=cross_high - 1, size=(bsz, self.\n cross_sample_negatives * num))\n', (22298, 22378), False, 'import torch\n'), ((28734, 28792), 'fairseq.modules.GradMultiply.apply', 'GradMultiply.apply', (['audio_features', 'self.feature_grad_mult'], {}), '(audio_features, self.feature_grad_mult)\n', (28752, 28792), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((28827, 28886), 'fairseq.modules.GradMultiply.apply', 'GradMultiply.apply', (['visual_features', 'self.feature_grad_mult'], {}), '(visual_features, self.feature_grad_mult)\n', (28845, 28886), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((28918, 28933), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (28931, 28933), False, 'import torch\n'), ((37669, 37702), 'torch.cat', 'torch.cat', (['[negs, cb_negs]'], {'dim': '(0)'}), '([negs, cb_negs], dim=0)\n', (37678, 37702), False, 'import torch\n'), ((38313, 38335), 'fairseq.utils.is_xla_tensor', 'is_xla_tensor', (['x_audio'], {}), '(x_audio)\n', (38326, 38335), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((38344, 38367), 'fairseq.utils.is_xla_tensor', 'is_xla_tensor', (['x_visual'], {}), '(x_visual)\n', (38357, 38367), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((42733, 42833), 'torch.nn.Conv1d', 'nn.Conv1d', (['n_in', 'n_out'], {'kernel_size': 'kernel_size', 'stride': 'stride', 'padding': 'padding', 'bias': 'conv_bias'}), '(n_in, n_out, kernel_size=kernel_size, stride=stride, padding=\n padding, bias=conv_bias)\n', (42742, 42833), True, 'import torch.nn as nn\n'), ((42845, 42881), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['conv.weight'], {}), '(conv.weight)\n', (42868, 42881), True, 'import torch.nn as nn\n'), ((51797, 51807), 'torchvision.models.resnet.resnet50', 'resnet50', ([], {}), '()\n', (51805, 51807), False, 'from torchvision.models.resnet import resnet50\n'), ((15022, 15062), 'torch.FloatTensor', 'torch.FloatTensor', (['cfg.encoder_embed_dim'], {}), '(cfg.encoder_embed_dim)\n', (15039, 15062), False, 'import torch\n'), ((17807, 17834), 'torch.load', 'torch.load', (['checkpoint_path'], {}), '(checkpoint_path)\n', (17817, 17834), False, 'import torch\n'), ((23879, 23900), 'fairseq.utils.is_xla_tensor', 'is_xla_tensor', (['logits'], {}), '(logits)\n', (23892, 23900), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((31093, 31156), 'torch.arange', 'torch.arange', (['padding_mask.shape[0]'], {'device': 'padding_mask.device'}), '(padding_mask.shape[0], device=padding_mask.device)\n', (31105, 31156), False, 'import torch\n'), ((32901, 32923), 'fairseq.utils.is_xla_tensor', 'is_xla_tensor', (['x_audio'], {}), '(x_audio)\n', (32914, 32923), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((32932, 32954), 'fairseq.utils.is_xla_tensor', 'is_xla_tensor', (['x_audio'], {}), '(x_audio)\n', (32945, 32954), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((38707, 38727), 'fairseq.utils.is_xla_tensor', 'is_xla_tensor', (['x_cat'], {}), '(x_cat)\n', (38720, 38727), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((43186, 43207), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (43196, 43207), True, 'import torch.nn as nn\n'), ((43438, 43447), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (43445, 43447), True, 'import torch.nn as nn\n'), ((52184, 52276), 'torch.nn.Conv3d', 'nn.Conv3d', (['(3)', '(64)'], {'kernel_size': '(7, 7, 7)', 'stride': '(1, 2, 2)', 'padding': '(3, 3, 3)', 'bias': '(False)'}), '(3, 64, kernel_size=(7, 7, 7), stride=(1, 2, 2), padding=(3, 3, 3),\n bias=False)\n', (52193, 52276), True, 'import torch.nn as nn\n'), ((19793, 19823), 'torch.from_numpy', 'torch.from_numpy', (['mask_indices'], {}), '(mask_indices)\n', (19809, 19823), False, 'import torch\n'), ((43268, 43283), 'fairseq.modules.TransposeLast', 'TransposeLast', ([], {}), '()\n', (43281, 43283), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((43309, 43352), 'fairseq.modules.Fp32LayerNorm', 'Fp32LayerNorm', (['dim'], {'elementwise_affine': '(True)'}), '(dim, elementwise_affine=True)\n', (43322, 43352), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((43378, 43393), 'fairseq.modules.TransposeLast', 'TransposeLast', ([], {}), '()\n', (43391, 43393), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((43590, 43611), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (43600, 43611), True, 'import torch.nn as nn\n'), ((43633, 43669), 'fairseq.modules.Fp32GroupNorm', 'Fp32GroupNorm', (['dim', 'dim'], {'affine': '(True)'}), '(dim, dim, affine=True)\n', (43646, 43669), False, 'from fairseq.modules import Fp32GroupNorm, Fp32LayerNorm, GradMultiply, GumbelVectorQuantizer, LayerNorm, MultiheadAttention, SamePad, TransposeLast\n'), ((43691, 43700), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (43698, 43700), True, 'import torch.nn as nn\n'), ((43788, 43809), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (43798, 43809), True, 'import torch.nn as nn\n'), ((43811, 43820), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (43818, 43820), True, 'import torch.nn as nn\n'), ((23821, 23842), 'torch.tensor', 'torch.tensor', (['fillval'], {}), '(fillval)\n', (23833, 23842), False, 'import torch\n'), ((25533, 25647), 'torch.zeros', 'torch.zeros', (['(visual_source_len - l, 3, 112, 112)'], {'dtype': 'visual_features.dtype', 'device': 'visual_features.device'}), '((visual_source_len - l, 3, 112, 112), dtype=visual_features.\n dtype, device=visual_features.device)\n', (25544, 25647), False, 'import torch\n'), ((16201, 16243), 'torch.load', 'torch.load', (['"""../pretrain/wav2vec_small.pt"""'], {}), "('../pretrain/wav2vec_small.pt')\n", (16211, 16243), False, 'import torch\n'), ((16383, 16439), 'torch.load', 'torch.load', (['"""../pretrain/moco_v2_800ep_pretrain.pth.tar"""'], {}), "('../pretrain/moco_v2_800ep_pretrain.pth.tar')\n", (16393, 16439), False, 'import torch\n'), ((19083, 19121), 'torch.from_numpy', 'torch.from_numpy', (['mask_channel_indices'], {}), '(mask_channel_indices)\n', (19099, 19121), False, 'import torch\n'), ((20673, 20711), 'torch.from_numpy', 'torch.from_numpy', (['mask_channel_indices'], {}), '(mask_channel_indices)\n', (20689, 20711), False, 'import torch\n'), ((21650, 21670), 'fairseq.utils.buffered_arange', 'buffered_arange', (['num'], {}), '(num)\n', (21665, 21670), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n'), ((22085, 22105), 'fairseq.utils.buffered_arange', 'buffered_arange', (['num'], {}), '(num)\n', (22100, 22105), False, 'from fairseq.utils import buffered_arange, index_put, is_xla_tensor\n')]
|
from keras.models import Sequential
from keras.models import Model
from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda
from keras.layers import concatenate
import numpy as np
import tensorflow as tf
def to_yuv(img, in_cspace='RGB'):
img_float = tf.cast(img, dtype=tf.float32) / 255.
if (in_cspace == 'RGB'):
img_rgb = tf.image.rgb_to_yuv(img_float)
elif (in_cspace == 'BGR'):
img_rgb = tf.image.bgr_to_yuv(img_float)
else:
raise ValueError(f"Unknown value of {in_cspace} for parameter 'in_space.'")
return img_rgb
def nvidia_model(img, crops=((0, 0), (0, 0)) ):
"""
A CNN model based on the NVIDIA paper implemented with Keras
Functional API.
:rtype: keras.models.Model
"""
x = Lambda(to_yuv, name='to_yuv')(img)
x = Lambda(lambda x : x * 2 - 1, name='normalization')(x)
# Add crop layer if crops are specified
if (np.asarray(crops).flatten() > 0).any():
# Crop the input image to the ROI
x = Cropping2D(cropping=crops)(x)
# Convoutional Layers
# Conv 1: 24@30x62 [kernel = 5x5; strides = 2x2]
x = Conv2D(filters=24, kernel_size=5, name='L1_conv')(x)
x = ELU()(x)
x = MaxPool2D(strides=(2,2), name='L1_pool')(x)
x = BatchNormalization()(x)
# Conv 2: 36@13x29 [kernel = 5x5; strides = 2x2]
x = Conv2D(filters=36, kernel_size=5, name='L2_conv')(x)
x = ELU()(x)
x = MaxPool2D(strides=(2,2), name='L2_pool')(x)
x = BatchNormalization()(x)
# Conv 3: 48@5x13 [kernel = 5x5; strides = 2x2]
x = Conv2D(filters=48, kernel_size=5, name='L3_conv')(x)
x = ELU()(x)
x = MaxPool2D(strides=(2,2), name='L3_pool')(x)
x = BatchNormalization()(x)
# Conv 4: 64@3x11 [kernel = 3x3; strides = 1x1]
x = Conv2D(filters=64, kernel_size=3, name='L4_conv')(x)
x = ELU()(x)
x = BatchNormalization()(x)
# Conv 5: 64@1x9 [kernel = 3x3; strides = 1x1]
x = Conv2D(filters=64, kernel_size=3, name='L5_conv')(x)
x = ELU()(x)
x = BatchNormalization()(x)
# 2D -> 1D Flatten to feed into FC layers
flattened = Flatten()(x)
xst = Dense(128, name='FC1_steer')(flattened)
xst = ELU()(xst)
xst = Dropout(rate=0.5)(xst)
xst = Dense(64, name='FC2_steer')(xst)
xst = ELU()(xst)
xst = Dropout(rate=0.5)(xst)
xst = Dense(16, name='FC3_steer')(xst)
xst = ELU()(xst)
xst = Dropout(rate=0.5)(xst)
# Ouyput layer
out_steer = Dense(1, name='OUT_steer')(xst)
model = Model(inputs=img, outputs=out_steer)
return model
|
[
"tensorflow.image.rgb_to_yuv",
"tensorflow.image.bgr_to_yuv",
"keras.layers.Cropping2D",
"keras.layers.Dropout",
"numpy.asarray",
"keras.layers.MaxPool2D",
"keras.layers.Flatten",
"keras.models.Model",
"keras.layers.ELU",
"tensorflow.cast",
"keras.layers.Dense",
"keras.layers.Lambda",
"keras.layers.Conv2D",
"keras.layers.BatchNormalization"
] |
[((2546, 2582), 'keras.models.Model', 'Model', ([], {'inputs': 'img', 'outputs': 'out_steer'}), '(inputs=img, outputs=out_steer)\n', (2551, 2582), False, 'from keras.models import Model\n'), ((312, 342), 'tensorflow.cast', 'tf.cast', (['img'], {'dtype': 'tf.float32'}), '(img, dtype=tf.float32)\n', (319, 342), True, 'import tensorflow as tf\n'), ((398, 428), 'tensorflow.image.rgb_to_yuv', 'tf.image.rgb_to_yuv', (['img_float'], {}), '(img_float)\n', (417, 428), True, 'import tensorflow as tf\n'), ((812, 841), 'keras.layers.Lambda', 'Lambda', (['to_yuv'], {'name': '"""to_yuv"""'}), "(to_yuv, name='to_yuv')\n", (818, 841), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((855, 904), 'keras.layers.Lambda', 'Lambda', (['(lambda x: x * 2 - 1)'], {'name': '"""normalization"""'}), "(lambda x: x * 2 - 1, name='normalization')\n", (861, 904), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1174, 1223), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(24)', 'kernel_size': '(5)', 'name': '"""L1_conv"""'}), "(filters=24, kernel_size=5, name='L1_conv')\n", (1180, 1223), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1235, 1240), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (1238, 1240), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1252, 1293), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'strides': '(2, 2)', 'name': '"""L1_pool"""'}), "(strides=(2, 2), name='L1_pool')\n", (1261, 1293), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1304, 1324), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1322, 1324), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1390, 1439), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(36)', 'kernel_size': '(5)', 'name': '"""L2_conv"""'}), "(filters=36, kernel_size=5, name='L2_conv')\n", (1396, 1439), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1451, 1456), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (1454, 1456), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1468, 1509), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'strides': '(2, 2)', 'name': '"""L2_pool"""'}), "(strides=(2, 2), name='L2_pool')\n", (1477, 1509), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1520, 1540), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1538, 1540), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1606, 1655), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(48)', 'kernel_size': '(5)', 'name': '"""L3_conv"""'}), "(filters=48, kernel_size=5, name='L3_conv')\n", (1612, 1655), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1667, 1672), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (1670, 1672), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1684, 1725), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'strides': '(2, 2)', 'name': '"""L3_pool"""'}), "(strides=(2, 2), name='L3_pool')\n", (1693, 1725), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1736, 1756), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1754, 1756), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1822, 1871), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(64)', 'kernel_size': '(3)', 'name': '"""L4_conv"""'}), "(filters=64, kernel_size=3, name='L4_conv')\n", (1828, 1871), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1883, 1888), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (1886, 1888), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1900, 1920), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1918, 1920), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((1984, 2033), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(64)', 'kernel_size': '(3)', 'name': '"""L5_conv"""'}), "(filters=64, kernel_size=3, name='L5_conv')\n", (1990, 2033), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2045, 2050), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (2048, 2050), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2062, 2082), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (2080, 2082), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2149, 2158), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (2156, 2158), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2172, 2200), 'keras.layers.Dense', 'Dense', (['(128)'], {'name': '"""FC1_steer"""'}), "(128, name='FC1_steer')\n", (2177, 2200), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2222, 2227), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (2225, 2227), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2243, 2260), 'keras.layers.Dropout', 'Dropout', ([], {'rate': '(0.5)'}), '(rate=0.5)\n', (2250, 2260), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2278, 2305), 'keras.layers.Dense', 'Dense', (['(64)'], {'name': '"""FC2_steer"""'}), "(64, name='FC2_steer')\n", (2283, 2305), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2321, 2326), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (2324, 2326), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2342, 2359), 'keras.layers.Dropout', 'Dropout', ([], {'rate': '(0.5)'}), '(rate=0.5)\n', (2349, 2359), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2376, 2403), 'keras.layers.Dense', 'Dense', (['(16)'], {'name': '"""FC3_steer"""'}), "(16, name='FC3_steer')\n", (2381, 2403), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2419, 2424), 'keras.layers.ELU', 'ELU', ([], {}), '()\n', (2422, 2424), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2440, 2457), 'keras.layers.Dropout', 'Dropout', ([], {'rate': '(0.5)'}), '(rate=0.5)\n', (2447, 2457), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((2500, 2526), 'keras.layers.Dense', 'Dense', (['(1)'], {'name': '"""OUT_steer"""'}), "(1, name='OUT_steer')\n", (2505, 2526), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((478, 508), 'tensorflow.image.bgr_to_yuv', 'tf.image.bgr_to_yuv', (['img_float'], {}), '(img_float)\n', (497, 508), True, 'import tensorflow as tf\n'), ((1056, 1082), 'keras.layers.Cropping2D', 'Cropping2D', ([], {'cropping': 'crops'}), '(cropping=crops)\n', (1066, 1082), False, 'from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda\n'), ((962, 979), 'numpy.asarray', 'np.asarray', (['crops'], {}), '(crops)\n', (972, 979), True, 'import numpy as np\n')]
|
import os
import pickle
import cv2
import numpy as np
import streamlit as st
import tensorflow as tf
import grpc
from tensorflow_serving.apis import (
prediction_service_pb2_grpc,
predict_pb2
)
from consts import (
TRAIN_FD,
TRAIN_PKL_FP,
TRAIN_LABEL_FP
)
@st.cache
def load_prec_embs():
with open(TRAIN_PKL_FP, "rb") as f:
train_embs = pickle.load(f)
with open(TRAIN_LABEL_FP, "rb") as f:
train_labels = pickle.load(f)
train_img_fps = wfile(TRAIN_FD)
assert len(train_img_fps) == train_embs.shape[0]
return train_img_fps, train_embs, train_labels
def wfile(root):
img_fps = []
for path, subdirs, files in os.walk(root):
for name in files:
img_fps.append(os.path.join(path, name))
return sorted(img_fps)
class FlowerArc:
def __init__(self,
host="localhost",
port=8500,
model_name="flower",
model_signature="flower_signature",
input_name="input_image",
output_name="emb_pred"):
self.host = host
self.port = port
self.channel = grpc.insecure_channel("{}:{}".format(
self.host, self.port
))
self.stub = prediction_service_pb2_grpc.PredictionServiceStub(
self.channel
)
self.input_name = input_name
self.output_name = output_name
self.request = predict_pb2.PredictRequest()
self.request.model_spec.name = model_name
self.request.model_spec.signature_name = model_signature
def norm_mean_std(self,
img):
img = img / 255
img = img.astype('float32')
mean = np.mean(img, axis=(0, 1, 2))
std = np.std(img, axis=(0, 1, 2))
img = (img - mean) / std
return img
def test_preprocess(self,
img,
img_size=(384, 384),
expand=True):
img = cv2.resize(img, img_size)
# normalize image
img = self.norm_mean_std(img)
if expand:
img = np.expand_dims(img, axis=0)
return img
def predict(self, img):
assert img.ndim == 3
img = self.test_preprocess(img)
self.request.inputs[self.input_name].CopyFrom(
tf.contrib.util.make_tensor_proto(
img,
dtype=tf.float32,
shape=img.shape
)
)
result = self.stub.Predict(self.request, 10.0)
emb_pred = tf.contrib.util.make_ndarray(
result.outputs[self.output_name]
)
return emb_pred
class Saliency:
def __init__(self,
host="localhost",
port=8500,
model_name="saliency",
model_signature="serving_default",
input_name="input_image",
output_name="pred_mask"):
self.host = host
self.port = port
self.channel = grpc.insecure_channel("{}:{}".format(
self.host, self.port
))
self.stub = prediction_service_pb2_grpc.PredictionServiceStub(
self.channel
)
self.input_name = input_name
self.output_name = output_name
self.request = predict_pb2.PredictRequest()
self.request.model_spec.name = model_name
self.request.model_spec.signature_name = model_signature
def test_preprocess(self,
img,
img_size=(320, 240),
expand=True):
img = cv2.resize(img, img_size)
if expand:
img = np.expand_dims(img, axis=0)
return img
def predict(self, img):
assert img.ndim == 3
img = self.test_preprocess(img)
self.request.inputs[self.input_name].CopyFrom(
tf.contrib.util.make_tensor_proto(
img,
dtype=tf.float32,
shape=img.shape
)
)
result = self.stub.Predict(self.request, 10.0)
pred_mask = tf.contrib.util.make_ndarray(
result.outputs[self.output_name]
)
return pred_mask
|
[
"os.path.join",
"tensorflow_serving.apis.predict_pb2.PredictRequest",
"numpy.std",
"os.walk",
"numpy.expand_dims",
"tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub",
"pickle.load",
"numpy.mean",
"tensorflow.contrib.util.make_ndarray",
"tensorflow.contrib.util.make_tensor_proto",
"cv2.resize"
] |
[((679, 692), 'os.walk', 'os.walk', (['root'], {}), '(root)\n', (686, 692), False, 'import os\n'), ((373, 387), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (384, 387), False, 'import pickle\n'), ((454, 468), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (465, 468), False, 'import pickle\n'), ((1261, 1324), 'tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub', 'prediction_service_pb2_grpc.PredictionServiceStub', (['self.channel'], {}), '(self.channel)\n', (1310, 1324), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((1447, 1475), 'tensorflow_serving.apis.predict_pb2.PredictRequest', 'predict_pb2.PredictRequest', ([], {}), '()\n', (1473, 1475), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((1725, 1753), 'numpy.mean', 'np.mean', (['img'], {'axis': '(0, 1, 2)'}), '(img, axis=(0, 1, 2))\n', (1732, 1753), True, 'import numpy as np\n'), ((1768, 1795), 'numpy.std', 'np.std', (['img'], {'axis': '(0, 1, 2)'}), '(img, axis=(0, 1, 2))\n', (1774, 1795), True, 'import numpy as np\n'), ((2007, 2032), 'cv2.resize', 'cv2.resize', (['img', 'img_size'], {}), '(img, img_size)\n', (2017, 2032), False, 'import cv2\n'), ((2574, 2636), 'tensorflow.contrib.util.make_ndarray', 'tf.contrib.util.make_ndarray', (['result.outputs[self.output_name]'], {}), '(result.outputs[self.output_name])\n', (2602, 2636), True, 'import tensorflow as tf\n'), ((3143, 3206), 'tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub', 'prediction_service_pb2_grpc.PredictionServiceStub', (['self.channel'], {}), '(self.channel)\n', (3192, 3206), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((3329, 3357), 'tensorflow_serving.apis.predict_pb2.PredictRequest', 'predict_pb2.PredictRequest', ([], {}), '()\n', (3355, 3357), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((3631, 3656), 'cv2.resize', 'cv2.resize', (['img', 'img_size'], {}), '(img, img_size)\n', (3641, 3656), False, 'import cv2\n'), ((4134, 4196), 'tensorflow.contrib.util.make_ndarray', 'tf.contrib.util.make_ndarray', (['result.outputs[self.output_name]'], {}), '(result.outputs[self.output_name])\n', (4162, 4196), True, 'import tensorflow as tf\n'), ((2136, 2163), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (2150, 2163), True, 'import numpy as np\n'), ((2352, 2425), 'tensorflow.contrib.util.make_tensor_proto', 'tf.contrib.util.make_tensor_proto', (['img'], {'dtype': 'tf.float32', 'shape': 'img.shape'}), '(img, dtype=tf.float32, shape=img.shape)\n', (2385, 2425), True, 'import tensorflow as tf\n'), ((3695, 3722), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (3709, 3722), True, 'import numpy as np\n'), ((3911, 3984), 'tensorflow.contrib.util.make_tensor_proto', 'tf.contrib.util.make_tensor_proto', (['img'], {'dtype': 'tf.float32', 'shape': 'img.shape'}), '(img, dtype=tf.float32, shape=img.shape)\n', (3944, 3984), True, 'import tensorflow as tf\n'), ((748, 772), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (760, 772), False, 'import os\n')]
|
"""
This module contains EveryAction objects, such as :class:`.Person` or :class:`.CanvassResponse`, which represent
structured EveryAction data directly corresponding to objects in the
`EveryAction 8 VAN API docs <https://developers.everyaction.com/van-api>`__.
"""
from datetime import datetime
from typing import Any, ClassVar, Dict, Iterable, List, Optional, Union
from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue
from everyaction.exception import EAException
__all__ = [
'ActivistCode',
'ActivistCodeData',
'ActivistCodeResponse',
'Address',
'AddRegistrantsResponse',
'Adjustment',
'AdjustmentResponse',
'Attribution',
'AvailableValue',
'APIKeyProfile',
'AVEVDataFileAction',
'BallotRequestType',
'BallotReturnStatus',
'BallotType',
'BankAccount',
'BargainingUnit',
'BargainingUnitJobClass',
'BatchForm',
'BatchProgram',
'BulkImportAction',
'BulkImportField',
'BulkImportJob',
'BulkImportJobData',
'Canvasser',
'CanvassContext',
'CanvassFileRequest',
'CanvassResponse',
'ChangedEntityBulkImportField',
'ChangedEntityExportJob',
'ChangedEntityExportRequest',
'ChangedEntityField',
'ChangeType',
'Code',
'CodeResult',
'Column',
'Commitment',
'ConfirmationEmailData',
'Constraints',
'ContactHistory',
'ContactType',
'Contribution',
'Currency',
'CustomField',
'CustomFieldValue',
'Department',
'Designation',
'DisclosureFieldValue',
'Disbursement',
'DistrictField',
'DistrictFieldValue',
'Email',
'EmailMessage',
'EmailMessageContent',
'EmailMessageContentDistributions',
'Employer',
'EmployerBargainingUnit',
'Error',
'Event',
'EventRole',
'EventShift',
'EventType',
'ExportJob',
'ExportJobType',
'ExtendedSourceCode',
'FieldValueMapping',
'File',
'FileLoadingJob',
'FinancialBatch',
'Folder',
'GeoCoordinate',
'Identifier',
'InputType',
'IsCellStatus',
'JobActionType',
'JobClass',
'JobFile',
'JobNotification',
'KeyValuePair',
'Listener',
'ListLoadCallbackData',
'Location',
'MappingParent',
'MappingType',
'MappingTypeData',
'Membership',
'MembershipSourceCode',
'MemberStatus',
'MiniVANExport',
'Note',
'NoteCategory',
'OnlineActionsForm',
'Organization',
'OrganizationPhone',
'Person',
'Phone',
'Pledge',
'PreferredPronoun',
'PrintedList',
'ProgramType',
'Registrant',
'RegistrationForm',
'RelationalMapping',
'Relationship',
'ReportedEthnicity',
'ReportedGender',
'ReportedLanguagePreference',
'ReportedRace',
'ReportedSexualOrientation',
'ResultCode',
'SavedList',
'SavedListData',
'SavedListLoadAction',
'ScheduleType',
'Score',
'ScoreApprovalCriteria',
'ScoreLoadAction',
'ScoreUpdate',
'ScriptResponse',
'ShiftType',
'Signup',
'Status',
'Story',
'StoryStatus',
'Subgroup',
'SupportedEntity',
'SupporterGroup',
'SupportField',
'Suppression',
'SurveyQuestion',
'SurveyCanvassResponse',
'SurveyResponse',
'Target',
'TargetExportJob',
'UpdateStatistics',
'User',
'ValueMapping',
'ValueMappingData',
'VolunteerActivityResponse',
'VoterRegistrationBatch',
'WorkArea',
'Worksite'
]
# Class definitions and additions to shared properties are organized by their "orders".
# A property has order n > 1 when its factory depends on at least one class of order n - 1 and it depends on classes of
# no higher order than n - 1. A property has order 1 when its factory does not depend on an EAObject child definition.
# Similarly, a Class has order n > 1 when it has properties of order n or is a subclass of a class of order n - 1 and
# has no higher order properties/base classes. A Class has order 1 when it has only properties of order 1 or no
# properties at all and does not inherit except from EAObject, EAObjectWithID, or EAObjectWithIDAndName.
# The organization style is the following, with each component in alphabetical order: 1st order properties in, 1st order
# classes which may depend on 1st order properties, 2nd order properties whose factories depend on a 1st order class,
# 2nd order classes which may depend on 1st or 2nd order properties or a 1st order class, and so on. This organizational
# structure allows for a consistent way to specify entities after their dependencies and in alphabetical order
# independent from their dependencies are named.
# Expand is handled specially
def _expand_factory(arg: Union[str, Iterable[str]]) -> str:
if not isinstance(arg, str):
# comma-delimited str or Iterable[str] allowed for expand.
# Note: str is Iterable, be careful when modifying this code.
if isinstance(arg, Iterable):
return ','.join(arg)
else:
raise TypeError(
f'Expected str or Iterable for expand, found {type(arg).__name__}: {arg}'
)
return arg
# --- Circular Reference Factories ---
# The following functions are factories for objects which have circular references.
# For example, Organizations have a field which is another Organization, and Departments have employers and vice-versa.
def _employer_factory(*args: Any, **kwargs: Any) -> 'Employer':
return Employer(*args, **kwargs)
def _organization_factory(*args: Any, **kwargs: Any) -> 'Organization':
return Organization(*args, **kwargs)
# --- First Order Properties and Objects ---
EAProperty.share(
acceptedOneTimeAmount=EAProperty('accepted_one_time'),
acceptedRecurringAmount=EAProperty('accepted_recurring', 'recurring'),
action=EAProperty(),
actionType=EAProperty('type'),
added=EAProperty(),
additionalEnvelopeName=EAProperty('additional_envelope'),
additionalSalutation=EAProperty(),
adjustmentType=EAProperty('type'),
allowMultipleMode=EAProperty('multiple_mode', 'mode'),
alternateId=EAProperty('alternate', 'alt'),
amount=EAProperty(),
amountAttributed=EAProperty('amount'),
apiKeyTypeName=EAProperty('type_name', 'type'),
areSubgroupsSticky=EAProperty('sticky_subgroups', 'sticky_groups'),
assignableTypes=EAProperty(singular_alias='assignable_type'),
assignedValue=EAProperty('value'),
attributionType=EAProperty('type'),
average=EAProperty(),
averageValue=EAProperty('average'),
badValues=EAProperty('bad'),
bankAccount=EAProperty('account'),
bankAccountId=EAProperty('bank_account', 'account'),
batchCode=EAProperty('batch'),
biographyImageUrl=EAProperty('biography_image', 'bio_image_url', 'bio_image'),
bounceCount=EAProperty('bounces'),
campaignId=EAProperty('campaign'),
canBeMappedToColumn=EAProperty('column_mappable', 'mappable'),
canBeRepeatable=EAProperty('allows_repeats'),
canHaveGoals=EAProperty('allows_goals'),
canHaveMultipleLocations=EAProperty('allows_multiple_locations'),
canHaveMultipleShifts=EAProperty('allows_multiple_shifts'),
canHaveRoleMaximums=EAProperty('allows_role_maximums'),
canHaveRoleMinimums=EAProperty('allows_role_minimums'),
canvassedBy=EAProperty('canvasser'),
canvassFileRequestId=EAProperty('canvass_id'),
canvassFileRequestGuid=EAProperty('canvass_guid'),
caseworkCases=EAProperty('cases', singular_alias='case'),
caseworkIssues=EAProperty('issues', singular_alias='issue'),
caseworkStories=EAProperty('stories', singular_alias='story'),
ccExpirationMonth=EAProperty('cc_exp_month'),
ccExpirationYear=EAProperty('cc_exp_year'),
changeTypeName=EAProperty('change_type', 'change'),
channelTypeName=EAProperty('channel_type', 'channel'),
checkDate=EAProperty(),
checkNumber=EAProperty(),
city=EAProperty(),
code=EAProperty(),
codeId=EAProperty('code'),
codeIds=EAProperty('codes'),
collectedLocationId=EAProperty('collected_location', 'location'),
color=EAProperty(),
columnDelimiter=EAProperty('delimiter'),
columnName=EAProperty('column'),
committeeName=EAProperty('committee'),
confidenceLevel=EAProperty('confidence'),
contact=EAProperty(),
contactMethodPreferenceCode=EAProperty('contact_preference_code', 'preference_code', 'contact_preference'),
contactMode=EAProperty(),
contactModeId=EAProperty('contact_mode'),
contactTypeId=EAProperty('contact_type'),
contributionCount=EAProperty('contributions'),
contributionId=EAProperty('contribution'),
contributionSummary=EAProperty(),
contributionTotal=EAProperty(),
copyToEmails=EAProperty('copy_to', is_array=True),
countryCode=EAProperty('country'),
coverCostsAmount=EAProperty('cover_costs'),
createdAfter=EAProperty('after'),
createdBefore=EAProperty('before'),
createdBy=EAProperty('creator'),
createdByCommitteeId=EAProperty('committee'),
createdByEmail=EAProperty('created_by', 'creator_email', 'creator'),
createdDate=EAProperty('created'),
creditCardLast4=EAProperty('cc_last4', 'last4'),
currency=EAProperty(),
currencyType=EAProperty('type'),
custom=EAProperty(),
customFieldGroupId=EAProperty('group'),
customFieldId=EAProperty('field'),
customFieldsGroupType=EAProperty('group_type', 'type'),
customPropertyKey=EAProperty('property_key', 'custom_key', 'key'),
cycle=EAProperty(),
databaseMode=EAProperty('mode'),
databaseName=EAProperty(),
dateAdjusted=EAProperty('adjusted', 'date'),
dateCanvassed=EAProperty('canvassed'),
dateCardsSent=EAProperty('cards_sent'),
dateChangedFrom=EAProperty('changed_from'),
dateChangedTo=EAProperty('changed_to'),
dateClosed=EAProperty('closed'),
dateCreated=EAProperty('created'),
dateDeposited=EAProperty('deposited'),
dateExpired=EAProperty('expired'),
dateExpireMembership=EAProperty('expiration_date', 'expiration', 'expires'),
dateIssued=EAProperty('issued'),
dateLastRenewed=EAProperty('last_renewed', 'renewed'),
dateModified=EAProperty('modified'),
dateOfBirth=EAProperty('birthday'),
dateOpened=EAProperty('opened'),
datePosted=EAProperty('posted'),
dateProcessed=EAProperty('processed'),
dateReceived=EAProperty('received'),
dateScheduled=EAProperty('scheduled'),
dateSent=EAProperty('sent'),
dateStartMembership=EAProperty('start_date', 'started'),
dateThanked=EAProperty('thanked'),
decreasedBy=EAProperty('decrease'),
defaultEndTime=EAProperty('default_end'),
defaultStartTime=EAProperty('default_start'),
depositDate=EAProperty(),
depositNumber=EAProperty(),
detailedCode=EAProperty(),
description=EAProperty('desc'),
designationId=EAProperty('designation'),
dialingPrefix=EAProperty('prefix'),
directMarketingCode=EAProperty('marketing_code'),
disclosureFieldValue=EAProperty('field_value', 'disclosure_value', 'value'),
displayMode=EAProperty(),
displayName=EAProperty('display'),
doorCount=EAProperty('door'),
dotNetTimeZoneId=EAProperty('dot_net_time_zone', 'time_zone'),
downloadUrl=EAProperty('download'),
duesAttributionTypeName=EAProperty('dues_attribution_type', 'dues_attribution'),
duesEntityTypeName=EAProperty('dues_entity_type', 'dues_entity'),
duplicateRows=EAProperty('duplicates'),
electionRecords=EAProperty(singular_alias='election_record'),
electionType=EAProperty(),
email=EAProperty(),
employer=EAProperty(factory=_employer_factory),
employerBargainingUnitId=EAProperty('employer_bargaining_unit'),
employerId=EAProperty('employer'),
endDate=EAProperty('end'),
endTime=EAProperty('end'),
endTimeOverride=EAProperty('end_override', 'end'),
enrollmentTypeName=EAProperty('enrollment_type', 'enrollment'),
envelopeName=EAProperty('envelope'),
errorCode=EAProperty('error'),
eventId=EAProperty('event'),
eventTypeId=EAProperty('event_type', 'type'),
eventTypeIds=EAProperty('event_types'),
excludeChangesFromSelf=EAProperty('exclude_self'),
expand=EAProperty(factory=_expand_factory),
expectedContributionCount=EAProperty('expected_count'),
expectedContributionTotalAmount=EAProperty('expected_total', 'expected_amount'),
exportedRecordCount=EAProperty('exported_records', 'record_count', 'records', 'count'),
ext=EAProperty(),
externalId=EAProperty('external'),
fieldName=EAProperty('field'),
fieldType=EAProperty('field', 'type'),
fileSizeKbLimit=EAProperty('size_kb_limit', 'kb_limit'),
financialBatchId=EAProperty('financial_batch'),
finderNumber=EAProperty('finder'),
firstName=EAProperty('first'),
folderId=EAProperty('folder'),
folderName=EAProperty('folder'),
formalEnvelopeName=EAProperty('formal_envelope'),
formalSalutation=EAProperty(),
formSubmissionCount=EAProperty('form_submissions', 'forms', 'submissions'),
frequency=EAProperty(),
fromEmail=EAProperty(),
fromName=EAProperty('sender'),
fromSubject=EAProperty('subject'),
fullName=EAProperty(),
generatedAfter=EAProperty('after'),
generatedBefore=EAProperty('before'),
goal=EAProperty(),
groupId=EAProperty(),
groupName=EAProperty(),
groupType=EAProperty(),
guid=EAProperty(),
hasHeader=EAProperty(),
hasMyCampaign=EAProperty('my_campaign'),
hasMyVoters=EAProperty('my_voters'),
hasPredefinedValues=EAProperty('has_predefined'),
hasQuotes=EAProperty(),
hint=EAProperty(),
increasedBy=EAProperty('increase'),
includeAllAutoGenerated=EAProperty('include_auto_generated', 'include_generated'),
includeAllStatuses=EAProperty('include_statuses', 'include_closed'),
includeInactive=EAProperty(),
includeUnassigned=EAProperty(),
inputTypeId=EAProperty('input_type'),
interventionCallbackUrl=EAProperty('intervention_url', 'callback_url'),
invalidCharacters=EAProperty('invalid_chars'),
invalidRowsFileUrl=EAProperty('invalid_rows_url', 'invalid_url'),
inRepetitionWithEventId=EAProperty('repeat_of'),
isActive=EAProperty('active'),
isApplicable=EAProperty('applicable'),
isAssociatedWithBadges=EAProperty('associated_with_badges'),
isAtLeastOneLocationRequired=EAProperty('needs_location', 'location_required', 'requires_location'),
isAutoGenerated=EAProperty('auto_generated', 'generated'),
isConfirmationEmailEnabled=EAProperty('confirmation_email_enabled', 'confirmation_enabled', 'confirmation'),
isConfirmedOptInEnabled=EAProperty('confirmed_opt_in_enabled', 'opt_in_enabled', 'opt_in'),
isCoreField=EAProperty('is_core', 'core_field', 'core'),
isCustomDistrict=EAProperty('custom_district', 'is_custom', 'custom'),
isEditable=EAProperty('editable'),
isEventLead=EAProperty('event_lead', 'lead'),
isExportable=EAProperty('exportable'),
isMember=EAProperty('member'),
isMultiAssign=EAProperty('multi_assign'),
isMyOrganization=EAProperty('my_organization', 'my_org'),
isOfflineSignup=EAProperty('offline_property', 'offline'),
isOnlineActionsAvailable=EAProperty('online_actions_available', 'actions_available'),
isOnlyEditableByCreatingUser=EAProperty(
'only_editable_by_creating_user',
'only_editable_by_creator',
'only_creator_may_edit'
),
isOpen=EAProperty('open'),
isPreferred=EAProperty('preferred'),
isPubliclyViewable=EAProperty('publicly_viewable', 'public'),
isRecurringEmailEnabled=EAProperty('recurring_email_enabled', 'recurring_enabled', 'recurring'),
isRequired=EAProperty('required'),
isSearchable=EAProperty('searchable'),
isSharedWithChildCommitteesByDefault=EAProperty('default_share_child'),
isSharedWithMasterCommitteeByDefault=EAProperty('default_share_master'),
isSubscribed=EAProperty('subscribed'),
isUpsellAccepted=EAProperty('upsell_accepted'),
isUpsellShown=EAProperty('upsell_shown'),
isViewRestricted=EAProperty('view_restricted'),
jobStatus=EAProperty('status'),
key=EAProperty(),
keyReference=EAProperty('reference'),
lastName=EAProperty('last'),
lat=EAProperty(),
levelId=EAProperty(),
levelName=EAProperty(),
line1=EAProperty(),
line2=EAProperty(),
line3=EAProperty(),
linkedCreditCardPaymentDisbursementId=EAProperty('credit_card_payment'),
linkedJointFundraisingContributionId=EAProperty(
'joint_fundraising_contribution', 'fundraising_contribution', 'fundraising'
),
linkedPartnershipContributionId=EAProperty('partnership_contribution', 'partnership'),
linkedReimbursementDisbursementId=EAProperty('reimbursement'),
linksClickedCount=EAProperty('links_clicked'),
listCount=EAProperty('list'),
listDescription=EAProperty('description', 'desc'),
listName=EAProperty('list', 'name'),
loadStatus=EAProperty('status'),
lon=EAProperty(),
mappingTypeName=EAProperty('mapping_type', 'mapping'),
matchedRows=EAProperty('matched'),
matchedRowsCount=EAProperty('matched_count', 'matched'),
matchPercent=EAProperty('match', 'percent'),
max=EAProperty(),
maxDoorCount=EAProperty('max_door'),
maxFieldLength=EAProperty('max_length', 'max_len'),
maxLength=EAProperty(),
maxPeopleCount=EAProperty('max_people'),
maxTextboxCharacters=EAProperty('max_box_chars'),
maxValue=EAProperty('max'),
medianValue=EAProperty('median'),
mediumName=EAProperty('medium'),
message=EAProperty(),
middleName=EAProperty('middle'),
min=EAProperty(),
minValue=EAProperty('min'),
modifiedBy=EAProperty('modifier'),
modifiedByEmail=EAProperty('modified_by', 'modifier_email', 'modifier'),
nextTransactionDate=EAProperty('next_transaction', 'next'),
nickname=EAProperty(),
notes=EAProperty(),
nulledOut=EAProperty('nulled'),
number=EAProperty(),
numberOfCards=EAProperty('num_cards', 'cards'),
numberTimesRenewed=EAProperty('times_renewed', 'renewals'),
occupation=EAProperty(),
onlineReferenceNumber=EAProperty('reference_number', 'ref_number'),
onlyMyBatches=EAProperty('only_mine'),
openCount=EAProperty('opens'),
optInStatus=EAProperty('opt_in'),
orderby=EAProperty('order_by'),
organizationContactName=EAProperty('organization_contact', 'org_contact'),
organizationContactOfficialName=EAProperty('organization_contact_official', 'org_contact_official'),
organizationId=EAProperty('organization', 'org'),
organizationRoles=EAProperty('org_roles', singular_alias='org_role'),
organizeAt=EAProperty(),
originalAmount=EAProperty('original'),
originalRowCount=EAProperty('original_count', 'original'),
outOfRange=EAProperty('OOR'),
overwriteExistingListId=EAProperty('overwrite_existing_id', 'overwrite_id', 'overwrite'),
parentCodeId=EAProperty('parent_code'),
parentDepartmentId=EAProperty('parent_department', 'parent'),
parentFieldId=EAProperty('parent_field', 'parent'),
parentFieldName=EAProperty('parent_field', 'parent'),
parentId=EAProperty('parent'),
parentOrganization=EAProperty('parent', factory=_organization_factory),
parentValueId=EAProperty('parent_value'),
party=EAProperty(),
paymentType=EAProperty(),
personIdColumn=EAProperty('id_column', 'id_col'),
personIdType=EAProperty('person_type'),
personType=EAProperty(),
phone=EAProperty(),
phoneId=EAProperty('phone'),
phoneNumber=EAProperty('number'),
points=EAProperty(),
preview=EAProperty(),
primaryContact=EAProperty(),
primaryCustomField=EAProperty('primary_custom'),
processedAmount=EAProperty(),
processedCurrency=EAProperty(),
professionalSuffix=EAProperty(),
properties=EAProperty(singular_alias='property'),
question=EAProperty(),
questionId=EAProperty('question'),
recipientCount=EAProperty('recipients'),
recordCount=EAProperty('records'),
recurrenceType=EAProperty('recurrence'),
referenceCode=EAProperty('reference'),
relationshipId=EAProperty('relationship'),
remainingAmount=EAProperty('remaining'),
replyToEmail=EAProperty('reply_to'),
requestedCustomFieldIds=EAProperty('custom_field_ids', 'custom_fields', singular_alias='custom_field'),
requestedFields=EAProperty('fields', singular_alias='field'),
requestedIds=EAProperty('ids', singular_alias='requested_id'),
resourceType=EAProperty('resource'),
resourceTypes=EAProperty('resources', singular_alias='resource'),
resourceUrl=EAProperty('url'),
responseId=EAProperty('response'),
result=EAProperty(),
resultCodeId=EAProperty('result_code'),
resultFileColumnName=EAProperty('result_column_name', 'result_column', 'column_name', 'column'),
resultFileSizeKbLimit=EAProperty('size_kb_limit', 'kb_limit'),
resultFileSizeLimitKb=EAProperty('size_kb_limit', 'kb_limit'),
resultOutcomeGroup=EAProperty('outcome_group'),
salutation=EAProperty(),
savedListId=EAProperty('saved_list', 'list'),
scoreColumn=EAProperty('score_col'),
scoreId=EAProperty('score'),
scriptQuestion=EAProperty('question'),
searchKeyword=EAProperty('search', 'keyword'),
selectedOneTimeAmount=EAProperty('selected_one_time'),
selfReportedEthnicities=EAProperty('ethnicities', is_array=True),
selfReportedEthnicity=EAProperty('ethnicity'),
selfReportedGenders=EAProperty('genders', singular_alias='gender'),
selfReportedLanguagePreference=EAProperty('language_preference', 'language'),
selfReportedRace=EAProperty('race'),
selfReportedRaces=EAProperty('races', is_array=True),
selfReportedSexualOrientations=EAProperty('sexual_orientations', singular_alias='sexual_orientation'),
senderDisplayName=EAProperty('sender_display', 'sender_name'),
senderEmailAddress=EAProperty('sender_email'),
sex=EAProperty(),
shortName=EAProperty('short'),
smsOptInStatus=EAProperty('sms_opt_in'),
sourceUrl=EAProperty('source', 'url'),
sourceValue=EAProperty('source'),
startingAfter=EAProperty('after'),
startingBefore=EAProperty('before'),
startDate=EAProperty('start'),
startTime=EAProperty('start'),
startTimeOverride=EAProperty('start_override', 'start'),
stateCode=EAProperty('state'),
stateOrProvince=EAProperty('state', 'province'),
staticValue=EAProperty('static'),
status=EAProperty(),
statuses=EAProperty(),
statusName=EAProperty('status'),
subscriptionStatus=EAProperty('status'),
supporterGroupId=EAProperty('supporter_group', 'group'),
suffix=EAProperty(),
surveyQuestionId=EAProperty('question'),
surveyResponseId=EAProperty('response'),
syncPeriodEnd=EAProperty('sync_end', 'end'),
syncPeriodStart=EAProperty('sync_start', 'start'),
targetId=EAProperty('target'),
targetValue=EAProperty('target'),
text=EAProperty(),
title=EAProperty(),
tolerance=EAProperty('tolerance'),
totalDuesPaid=EAProperty('total_paid'),
totalRows=EAProperty('total'),
turfName=EAProperty('turf'),
type=EAProperty(),
typeAndName=EAProperty(),
typeId=EAProperty('type'),
unitNo=EAProperty('unit'),
unmatchedRowsCount=EAProperty('unmatched_count', 'unmatched'),
unsubscribeCount=EAProperty('unsubscribes'),
upsellType=EAProperty('upsell'),
url=EAProperty(),
username=EAProperty('user'),
userFirstName=EAProperty('first_name', 'first'),
userLastName=EAProperty('last_name', 'last'),
value=EAProperty(),
vanId=EAProperty('van'),
webhookUrl=EAProperty('webhook'),
website=EAProperty(),
zipOrPostalCode=EAProperty('zip_code', 'zip', 'postal_code', 'postal'),
ID=EAProperty()
)
class ActivistCode(
EAObjectWithIDAndName,
_prefix='activistCode',
_keys={'description', 'isMultiAssign', 'mediumName', 'scriptQuestion', 'shortName', 'status', 'type'}
):
"""Represents an `Activist Code
<https://docs.everyaction.com/reference/activist-codes#common-models-1>`__.
"""
class ActivistCodeData(
EAObjectWithIDAndName,
_prefix='activistCode',
_prefixed={'name', 'typeAndName'},
_keys={'canvassedBy', 'dateCanvassed', 'dateCreated'}
):
"""Represents the data associated with responses to `getting Activist Codes
<https://docs.everyaction.com/reference/people#peoplevanidactivistcodes>`__.
"""
class Adjustment(EAObject, _keys={'adjustmentType', 'amount', 'datePosted'}):
"""Represents the data associated with responses to `adjusting a Contribution
<https://docs.everyaction.com/reference/contributions#contributionscontributionidadjustments>`__.
"""
class AdjustmentResponse(EAObject, _keys={'contributionId', 'dateAdjusted', 'originalAmount', 'remainingAmount'}):
"""Represents the data associated with a response to a `Contribution adjustment
<https://docs.everyaction.com/reference/contributions#contributionscontributionidadjustments>`__.
"""
class APIKeyProfile(
EAObject,
_keys={
'apiKeyTypeName',
'committeeName',
'databaseName',
'hasMyCampaign',
'hasMyVoters',
'keyReference',
'username',
'userFirstName',
'userLastName'
}
):
"""Represents an `API key profile
<https://docs.everyaction.com/reference/overview#introspection>`__.
"""
class Attribution(EAObject, _keys={'amountAttributed', 'attributionType', 'dateThanked', 'notes', 'vanId'}):
"""Represents an `Attribution object
<https://docs.everyaction.com/reference/contributions#common-models-8>`__.
"""
class AvailableValue(EAObjectWithIDAndName, _keys={'parentValueId'}):
"""Represents
`AvailableValues <https://docs.everyaction.com/reference/custom-fields#common-models-9>`__.
for a Custom Field.
"""
class BallotRequestType(EAObjectWithIDAndName, _prefix='ballotRequestType'):
"""Represents a `Ballot Request Type
<https://docs.everyaction.com/reference/ballots#common-models-2>`__.
"""
class BallotReturnStatus(EAObjectWithIDAndName, _prefix='ballotReturnStatus'):
"""Represents a `Ballot Return Status
<https://docs.everyaction.com/reference/ballots#common-models-2>`__.
"""
class BallotType(EAObjectWithIDAndName, _prefix='ballotType'):
"""Represents a `Ballot Type
<https://docs.everyaction.com/reference/ballots#common-models-2>`__.
"""
class BankAccount(EAObjectWithIDAndName, _prefix='bankAccount'):
"""Represents a `Bank Account object
<https://docs.everyaction.com/reference/contributions#common-models-8>`__.
"""
class BargainingUnit(EAObjectWithIDAndName, _prefix='bargainingUnit', _keys={'employerBargainingUnitId', 'shortName'}):
"""Represents a `Bargaining Unit
<https://docs.everyaction.com/reference/bargaining-units#common-models-3>`__.
"""
class BatchForm(EAObjectWithIDAndName, _prefix='form'):
"""Represents a form for `Voter Registration Batches
<https://docs.everyaction.com/reference/voter-registration-batches#common-models-39>`__.
"""
class BatchProgram(EAObjectWithID, _prefix='programType'):
"""Represents a program for `Voter Registration Batches
<https://docs.everyaction.com/reference/voter-registration-batches#common-models-39>`__.
"""
class Canvasser(EAObjectWithID, _prefix='canvasser'):
"""Represents a `Canvasser
<https://docs.everyaction.com/reference/minivan-exports#common-models-25>`__.
"""
class CanvassContext(EAObject, _keys={'contactTypeId', 'dateCanvassed', 'inputTypeId', 'phoneId'}):
"""Represents a `Canvass Context
<https://docs.everyaction.com/reference/people#peoplevanidcanvassresponses>`__.
"""
class CanvassFileRequest(
EAObjectWithID,
_keys={'dateExpired', 'downloadUrl', 'errorCode', 'guid', 'savedListId', 'status', 'type', 'webhookUrl'},
):
"""Represents a `Canvass File Request
<https://docs.everyaction.com/reference/canvass-file-requests>`__.
"""
class ChangedEntityExportRequest(
EAObjectWithID,
_prefix='exportJob',
_keys={
'dateChangedFrom',
'dateChangedTo',
'excludeChangesFromSelf',
'includeInactive',
'requestedCustomFieldIds',
'requestedFields',
'requestedIds',
'resourceType'
}
):
"""Represents data associated with a request to `create a Changed Entity Export Job
<https://docs.everyaction.com/reference/changed-entities#changedentityexportjobs>`__.
"""
class ChangeType(EAObjectWithIDAndName, _prefix='changeType', _prefixed={'name'}, _keys={'description'}):
"""Represents a `changeType
<https://docs.everyaction.com/reference/changed-entity-export-jobs#changedentityexportjobschangetypesresourcetype>`__.
"""
@classmethod
def _id_key(cls) -> Optional[str]:
return 'ID'
class CodeResult(EAObjectWithID, _prefix='code', _keys={'message'}):
"""Represents the data associated with a response to a code batch request. See `POST /codes/batch
<https://docs.everyaction.com/reference/codes#codesbatch>`__
for an example.
"""
class Column(EAObjectWithName):
"""Represents a `Column
<https://docs.everyaction.com/reference/bulk-import#column>`__.
"""
class Commitment(
EAObjectWithID,
_prefix='commitment',
_keys={
'amount',
'ccExpirationMonth',
'ccExpirationYear',
'creditCardLast4',
'currency',
'designationId',
'endDate',
'frequency',
'nextTransactionDate',
'paymentType',
'startDate',
'status'
}
):
"""Represents a `Commitment
<https://docs.everyaction.com/reference/commitments#common-models-6>`__.
"""
class ConfirmationEmailData(
EAObject,
_keys={
'copyToEmails',
'fromEmail',
'fromName',
'fromSubject',
'isConfirmationEmailEnabled',
'isRecurringEmailEnabled',
'replyToEmail'
}
):
"""Represents `Confirmation Email Data
<https://docs.everyaction.com/reference/online-actions-forms#confirmation-email-data>`__.
"""
class ContactType(EAObjectWithIDAndName, _prefix='contactType', _keys={'channelTypeName'}):
"""Represents a `Contact Type
<https://docs.everyaction.com/reference/canvass-responses#canvassresponsescontacttypes>`__.
"""
class Constraints(EAObject, _keys={'invalidCharacters', 'maxLength'}):
"""Represents a description of the violated constraints for :class:`.Error` objects."""
class ContactHistory(EAObject, _keys={'contactTypeId', 'dateCanvassed', 'inputTypeId', 'resultCodeId'}):
"""Represents a `Contact History object
<https://docs.everyaction.com/reference/people#peoplevanidnotes-1>`__.
"""
class Currency(EAObject, _keys={'amount', 'currencyType'}):
"""Represents the type and the amount of a currency. Found, for instance, in the response of
`GET /people/{vanId}/membership
<https://docs.everyaction.com/reference/people#peoplevanidmembership>`__.
"""
class CustomFieldValue(EAObject, _keys={'assignedValue', 'customFieldGroupId', 'customFieldId'}):
"""Represents a `CustomFieldValue
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
def __init__(
self,
customFieldId: Optional[int] = None,
customFieldGroupId: Optional[int] = None,
assignedValue: Optional[str] = None,
**kwargs: EAValue
) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param customFieldId: ID of the custom field.
:param customFieldGroupId: ID of the group of the custom field.
:param assignedValue: Value assigned to the custom field.
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(
customFieldId=customFieldId,
customFieldGroupId=customFieldGroupId,
assignedValue=assignedValue,
**kwargs
)
class Department(EAObjectWithIDAndName, _prefix='department', _keys={'employer', 'parentDepartmentId'}):
"""Represents a `Department
<https://docs.everyaction.com/reference/departments#common-models-10>`__.
"""
class Designation(EAObjectWithIDAndName, _prefix='designation'):
"""Represents a `Designation
<https://docs.everyaction.com/reference/designations#common-models-11>`__.
"""
class DisclosureFieldValue(EAObjectWithID, _prefix='disclosureField', _prefixed={'value'}, _keys={'designationId'}):
"""Represents a `Disclosure Field Value
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
def __init__(
self,
disclosureFieldId: Optional[int] = None,
disclosureFieldValue: Optional[str] = None,
designationId: Optional[int] = None,
**kwargs: EAValue
) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param disclosureFieldId: ID of the disclosure field.
:param disclosureFieldValue: Value for the disclosure field.
:param designationId: ID of designation.
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(
disclosureFieldId=disclosureFieldId,
disclosureFieldValue=disclosureFieldValue,
designationId=designationId,
**kwargs
)
class DistrictFieldValue(EAObjectWithIDAndName, _keys={'parentId'}):
"""Represents a `District Field Value
<https://docs.everyaction.com/reference/district-fields#common-models-13>`__.
"""
class Email(EAObject, _keys={'dateCreated', 'email', 'isPreferred', 'isSubscribed', 'subscriptionStatus', 'type'}):
"""Represents an `Email
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
def __init__(self, email: Optional[str] = None, **kwargs: EAValue) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param email: The email address.
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(email=email, **kwargs)
class EmailMessageContentDistributions(
EAObject,
_keys={
'bounceCount',
'contributionCount',
'contributionTotal',
'dateSent',
'formSubmissionCount',
'linksClickedCount',
'openCount',
'recipientCount',
'unsubscribeCount'
}
):
"""Represents an `Email Message Content Distributions object
<https://docs.everyaction.com/reference/email#common-models-14>`__.
"""
class EventRole(EAObjectWithIDAndName, _prefix='role', _keys={'goal', 'isEventLead', 'max', 'min'}):
"""Represents a `Role
<https://docs.everyaction.com/reference/events#common-models-18>`__
for an Event Type.
"""
class EventShift(EAObjectWithIDAndName, _prefix='eventShift', _keys={'endTime', 'startTime'}):
"""Represents a `Shift
<https://docs.everyaction.com/reference/events#common-models-18>`__.
"""
class ExportJobType(EAObjectWithIDAndName, _prefix='exportJobType'):
"""Represents an `Export Job Type
<https://docs.everyaction.com/reference/export-jobs#exportjobtypes>`__.
"""
class File(EAObject, _keys={'dateExpired', 'downloadUrl', 'recordCount'}):
"""Represents a `File object
<https://docs.everyaction.com/reference/bulk-import#common-models-4>`__
in EveryAction. Used in many contexts.
"""
class FinancialBatch(
EAObjectWithIDAndName,
_prefix='financialBatch',
_prefixed={'name', 'number'},
_keys={
'bankAccountId',
'checkDate',
'checkNumber',
'dateClosed',
'dateDeposited',
'dateOpened',
'depositNumber',
'designationId',
'expectedContributionCount',
'expectedContributionTotalAmount',
'isAutoGenerated',
'isOpen'
}
):
"""Represents a `Financial Batch
<https://docs.everyaction.com/reference/financial-batches#common-models-21>`__.
"""
class Folder(EAObjectWithIDAndName, _prefix='folder'):
"""Represents a `folder
<https://docs.everyaction.com/reference/folders>`__.
"""
class GeoCoordinate(EAObject, _keys={'lat', 'lon'}):
"""Represents a `Geographic Coordinate
<https://docs.everyaction.com/reference/locations>`__.
"""
class Identifier(EAObject, _keys={'externalId', 'type'}):
"""Represents an `Identifier
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
class IsCellStatus(EAObjectWithIDAndName, _prefix='status', _prefixed={'name'}):
"""Represents an `Phone Is a Cell Status
<https://docs.everyaction.com/reference/phones#phones-iscellstatuses>`__.
"""
class JobActionType(EAObject, _keys={'actionType'}):
"""Represents a `Job Action Type
<https://docs.everyaction.com/reference/file-loading-jobs#action>`__.
"""
@staticmethod
def make(**kwargs: EAValue) -> 'JobActionType':
action_type = EAProperty.shared('actionType').find('actionType', kwargs, pop=True)
if not action_type:
raise EAException('Expected actionType property or alias to be specified for JobActionType')
lower = action_type.lower()
if lower == 'score':
return ScoreLoadAction(**kwargs)
if lower == 'avevdatafile':
return AVEVDataFileAction(**kwargs)
if lower == 'loadsavedlistfile':
return SavedListLoadAction(**kwargs)
raise EAException(f'Unrecognized Job Action Type {action_type}')
class JobClass(EAObjectWithIDAndName, _prefix='jobClass', _keys={'shortName'}):
"""Represents a `Job Class
<https://docs.everyaction.com/reference/job-classes#common-models-22>`__.
"""
class JobNotification(EAObject, _keys={'description', 'message', 'status'}):
"""Represents a `Notification
<https://docs.everyaction.com/reference/file-loading-jobs#notification>`__
for File Loading Jobs.
"""
class InputType(EAObjectWithIDAndName, _prefix='inputType'):
"""Represents an `Input Type
<https://docs.everyaction.com/reference/canvass-responses#canvassresponsesinputtypes>`__.
"""
class KeyValuePair(EAObject, _keys={'key', 'value'}):
"""Represents a key value pair for possible values of a `Support Field
<https://docs.everyaction.com/reference/voter-registration-batches#voterregistrationbatchesstatesstatesupportedfields>`__.
"""
class Listener(EAObject, _keys={'type', 'value'}):
"""Represents a `Listener
<https://docs.everyaction.com/reference/file-loading-jobs#overview-22>`__.
"""
class MembershipSourceCode(EAObjectWithIDAndName, _prefix='code', _prefixed={'name'}):
"""Represents a `Membership Source Code
<https://docs.everyaction.com/reference/people#peoplevanidmembership>`__.
"""
class MemberStatus(EAObjectWithIDAndName, _prefix='memberStatus', _keys={'isMember'}):
"""Represents a `Member Status
<https://docs.everyaction.com/reference/member-statuses#common-models-24>`__.
"""
class NoteCategory(EAObjectWithIDAndName, _prefix='noteCategory', _keys={'assignableTypes'}):
"""Represents a `Note Category
<https://docs.everyaction.com/reference/notes#common-models-26>`__.
"""
class Organization(
EAObjectWithIDAndName,
_prefix='organization',
_prefixed={'type'},
_keys={'parentOrganization', 'shortName', 'website'},
):
"""Represents an `Organization
<https://docs.everyaction.com/reference/employers#common-models-15>`__.
"""
class OrganizationPhone(
EAObjectWithID,
_prefix='organizationPhone',
_keys={
'confidenceLevel',
'countryCode',
'dialingPrefix',
'organizationId',
'phone',
},
phoneType=EAProperty('type')
):
"""Represents a `Phone for an organization
<https://docs.everyaction.com/reference/employers#common-models-15>`__.
"""
class Pledge(EAObjectWithID, _prefix='pledge'):
"""Represents a `Pledge object
<https://docs.everyaction.com/reference/contributions#common-models-8>`__.
"""
class PreferredPronoun(EAObjectWithIDAndName, _prefix='preferredPronoun', _prefixed={'name'}):
"""Represents a `preferred pronoun
<https://docs.everyaction.com/reference/reported-demographics#pronouns>`__.
"""
class PrintedList(EAObjectWithName, _keys={'number'}):
"""Represents a `Printed List
<https://docs.everyaction.com/reference/printed-lists#common-models-28>`__.
"""
class ProgramType(EAObjectWithIDAndName, _prefix='programType'):
"""Represents a `Program Type
<https://docs.everyaction.com/reference/voter-registration-batches#voterregistrationbatchesprogramtypes>`__.
"""
class RegistrationForm(EAObjectWithIDAndName, _prefix='form'):
"""Represents a `Registration Form
<https://docs.everyaction.com/reference/voter-registration-batches#voterregistrationbatchesregistrationforms>`__.
"""
class RelationalMapping(EAObject, _keys={'fieldName', 'value'}):
"""Represents a `Relational Mapping
<https://docs.everyaction.com/reference/changed-entities#changedentityexportjobsfieldsresourcetype>`__.
"""
class Relationship(EAObjectWithIDAndName):
"""Represents a `Relationship
<https://docs.everyaction.com/reference/relationships#relationships>`__.
"""
class ReportedEthnicity(EAObjectWithIDAndName, _prefix='reportedEthnicity', _prefixed={'name'}):
"""Represents a `Reported Ethnicity
<https://docs.everyaction.com/reference/reported-demographics#reportedethnicities>`__.
"""
class ReportedGender(EAObjectWithIDAndName, _prefix='reportedGender', _prefixed={'name'}):
"""Represents a `Reported Gender
<https://docs.everyaction.com/reference/reported-demographics#reportedgenders>`__.
"""
class ReportedLanguagePreference(EAObjectWithIDAndName, _prefix='reportedLanguagePreference', _prefixed={'name'}):
"""Represents a `Reported Language Preference
<https://docs.everyaction.com/reference/reported-demographics#reportedlanguagepreferences>`__.
"""
class ReportedRace(EAObjectWithIDAndName, _prefix='reportedRace', _prefixed={'name'}):
"""Represents a `Reported Race
<https://docs.everyaction.com/reference/reported-demographics#reportedraces>`__.
"""
class ReportedSexualOrientation(EAObjectWithIDAndName, _prefix='reportedSexualOrientation', _prefixed={'name'}):
"""Represents a `Reported Sexual Orientation
<https://docs.everyaction.com/reference/reported-demographics#reportedsexualorientations>`__.
"""
class ResultCode(EAObjectWithIDAndName, _prefix='resultCode', _keys={'mediumName', 'resultOutcomeGroup', 'shortName'}):
"""Represents a `Result Code
<https://docs.everyaction.com/reference/canvass-responses#canvassresponsesresultcodes>`__.
"""
class SavedList(EAObjectWithIDAndName, _prefix='savedList', _keys={'description', 'doorCount', 'listCount'}):
"""Represents a `Saved List
<https://docs.everyaction.com/reference/saved-lists#common-models-29>`__.
"""
class SavedListData(
EAObjectWithID,
_prefix='savedList',
_keys={'matchedRowsCount', 'originalRowCount', 'unmatchedRowsCount'}
):
"""Represents `Saved List Data
<https://docs.everyaction.com/reference/file-loading-jobs#saved-list-load>`__
for Saved List Load actions.
"""
class ScheduleType(EAObjectWithIDAndName, _prefix='scheduleType'):
"""Represents a `Schedule Type
<https://docs.everyaction.com/reference/schedule-types#common-models-30>`__.
"""
class Score(EAObjectWithIDAndName, _prefix='score', _keys={'description', 'maxValue', 'minValue', 'shortName'}):
"""Represents a `Score
<https://docs.everyaction.com/reference/scores#overview-37>`__.
"""
class ScoreApprovalCriteria(EAObject, _keys={'average', 'tolerance'}):
"""Represents `Score Approval Criteria
<https://docs.everyaction.com/reference/file-loading-jobs#score-load-action>`__
"""
class ScriptResponse(EAObject, _keys={'type'}):
"""Represents a `Script Response
<https://docs.everyaction.com/reference/people#peoplevanidcanvassresponses>`__.
"""
_PROPERTIES = {
'type': EAProperty()
}
@staticmethod
def make(**kwargs: EAValue) -> 'ScriptResponse':
typ = kwargs.pop('type', None)
if typ is None:
raise EAException('Expected type for ScriptResponse')
lower = typ.lower()
if lower == 'activistcode':
return ActivistCodeResponse(**kwargs)
if lower == 'surveyresponse':
return SurveyCanvassResponse(**kwargs)
if lower == 'volunteeractivity':
return VolunteerActivityResponse(**kwargs)
raise EAException(f'Unrecognized Script Response type: {typ}')
class ShiftType(EAObjectWithIDAndName, _prefix='shiftType', _keys={'defaultEndTime', 'defaultStartTime'}):
"""Represents a `Shift Type
<https://docs.everyaction.com/reference/employers#common-models-15>`__.
"""
class Status(EAObjectWithIDAndName, _prefix='status'):
"""Represents a `Status
<https://docs.everyaction.com/reference/event-types#common-models-17>`__
in EveryAction. Used in multiple contexts.
"""
class StoryStatus(EAObjectWithIDAndName, _prefix='storyStatus'):
"""Represents a `StoryStatus
<https://docs.everyaction.com/reference/stories#common-models-34>`__.
"""
@classmethod
def _name_key(cls) -> Optional[str]:
return 'statusName'
class Subgroup(EAObjectWithIDAndName, _prefix='subgroup', _keys={'fullName', 'isAssociatedWithBadges'}):
"""Represents a `Subgroup
<https://docs.everyaction.com/reference/targets#common-models-37>`__
for a Target.
"""
class SupportedEntity(EAObjectWithName, _keys={'isApplicable', 'isSearchable'}):
"""Represents a `Supported Entity
<https://docs.everyaction.com/reference/codes#common-models-7>`__
in the context of codes.
"""
class SupporterGroup(EAObjectWithIDAndName, _keys={'description'}):
"""Represents a `Supporter Group
<https://docs.everyaction.com/reference/supporter-groups#common-models-35>`__.
"""
class Suppression(EAObjectWithName, _prefix='suppression', _prefixed={'code', 'name'}):
"""Represents a `Suppression
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
_CODE_TO_NAME: ClassVar[Dict[str, str]] = {
'NC': 'do not call',
'NE': 'do not email',
'NM': 'do not mail',
'NW': 'do not walk'
}
_NAME_TO_CODE: ClassVar[Dict[str, str]] = {n: c for c, n in _CODE_TO_NAME.items()}
DO_NOT_CALL: ClassVar['Suppression'] = None
DO_NOT_EMAIL: ClassVar['Suppression'] = None
DO_NOT_MAIL: ClassVar['Suppression'] = None
DO_NOT_WALK: ClassVar['Suppression'] = None
def __init__(
self,
code_or_name: Optional[str] = None,
**kwargs: EAValue
) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate. When the positional argument `code_or_name` is given, it is assumed to be a
code (e.g., "NC" for "Do not call") when it has length at most 2, and otherwise it is assumed to be a name.
:param code_or_name: When the given, it is assumed to be a code (e.g., "NC" for "Do not call") when it has
length at most 2, and otherwise it is assumed to be a name.
:param kwargs: Mapping of (alias or name) -> value.
"""
code = None
name = None
if code_or_name:
# Infer from str length whether it is a name or a code.
if len(code_or_name) > 2:
super().__init__(suppressionName=code_or_name, **kwargs)
else:
super().__init__(suppressionCode=code_or_name, **kwargs)
# Continue trying to infer the name or code if they are not yet determined.
code = code or self._NAME_TO_CODE.get((name or '').lower())
name = name or self._CODE_TO_NAME.get((code or '').upper())
super().__init__(suppressionCode=code, suppressionName=name, **kwargs)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Suppression):
return False
if self.code and other.code:
return self.code.upper() == other.code.upper()
if self.name and other.name:
return self.name.lower() == other.name.lower()
# "Null" suppressions where name and code are both None are equal to each other.
return not (self.name or other.name or self.code or other.code)
@property
def no_call(self) -> bool:
"""Indicates whether this is a "Do Not Call" suppression.
:returns: :code:`True` if this is a "Do Not Call" suppression, :code:`False` otherwise.
"""
return (self.code or '').upper() == 'NC' or (self.name or '').lower() == 'do not call'
@property
def no_email(self) -> bool:
"""Indicates whether this is a "Do Not Email" suppression.
:returns: :code:`True` if this is a "Do Not Email" suppression, :code:`False` otherwise.
"""
return (self.code or '').upper() == 'NE' or (self.name or '').lower() == 'do not email'
@property
def no_mail(self) -> bool:
"""Indicates whether this is a "Do Not Mail" suppression.
:returns: :code:`True` if this is a "Do Not Mail" suppression, :code:`False` otherwise.
"""
return (self.code or '').upper() == 'NM' or (self.name or '').lower() == 'do not mail'
@property
def no_walk(self) -> bool:
"""Indicate whether this is a "Do Not Walk" suppression.
:returns: :code:`True` if this is a "Do Not Walk" suppression, :code:`False` otherwise.
"""
return (self.code or '').upper() == 'NW' or (self.name or '').lower() == 'do not walk'
Suppression.DO_NOT_CALL = Suppression('NC')
Suppression.DO_NOT_EMAIL = Suppression('NE')
Suppression.DO_NOT_MAIL = Suppression('NM')
Suppression.DO_NOT_WALK = Suppression('NW')
class SurveyResponse(EAObjectWithIDAndName, _prefix='surveyResponse', _keys={'mediumName', 'shortName'}):
"""
Represents a `Survey Response
<https://docs.everyaction.com/reference/survey-questions#common-models-36>`__.
"""
class UpdateStatistics(
EAObject
):
"""Represents an `Update Statistics
<https://docs.everyaction.com/reference/score-updates>`__.
"""
class User(EAObjectWithID, _prefix='user', _keys={'firstName', 'lastName'}):
"""Represents a `VAN User
<https://docs.everyaction.com/reference/extended-source-codes>`__.
"""
class ValueMapping(EAObjectWithIDAndName, _keys={'parentId', 'sourceValue', 'targetValue'}):
"""Represents a `value
<https://docs.everyaction.com/reference/bulk-import#bulkimportjobs>`__
in the context of bulk import jobs.
"""
class WorkArea(EAObjectWithIDAndName, _prefix='workArea'):
"""Represents a `Work Area
<https://docs.everyaction.com/reference/worksites#common-models-16>`__.
"""
# --- Second Order Properties and Objects ---
EAProperty.share(
activistCodes=EAProperty(singular_alias='activist_code', factory=ActivistCode),
approvalCriteria=EAProperty('criteria', factory=ScoreApprovalCriteria),
availableValues=EAProperty('available', 'values', singular_alias='value', factory=AvailableValue),
bargainingUnit=EAProperty(factory=BargainingUnit),
bargainingUnits=EAProperty(singular_alias='bargaining_unit', factory=BargainingUnit),
canvassers=EAProperty(singular_alias='canvasser', factory=Canvasser),
canvassContext=EAProperty('context', factory=CanvassContext),
category=EAProperty(factory=NoteCategory),
columns=EAProperty(singular_alias='column', factory=Column),
columnsToIncludeInResultsFile=EAProperty(
'include_columns',
'include',
singular_alias='include_column',
factory=Column
),
confirmationEmailData=EAProperty(
'confirmation_email',
'confirmation_data',
'confirmation',
factory=ConfirmationEmailData
),
contactAttributions=EAProperty('attributions', factory=Attribution),
contactHistory=EAProperty('history', factory=ContactHistory),
contributionBankAccount=EAProperty('contribution_account', 'account_obj', factory=BankAccount),
customFieldValues=EAProperty('custom_values', singular_alias='custom_value', factory=CustomFieldValue),
customProperties=EAProperty('properties', singular_alias='property', factory=KeyValuePair),
departments=EAProperty(singular_alias='department', factory=Department),
designation=EAProperty(factory=Designation),
detailedConstraints=EAProperty('constraints', factory=Constraints),
disclosureFieldValues=EAProperty(
'disclosures',
'field_values',
'values',
singular_alias='disclosure',
factory=DisclosureFieldValue
),
districtFieldValue=EAProperty(factory=DistrictFieldValue),
districtFieldValues=EAProperty('values', singular_alias='value', factory=DistrictFieldValue),
duesPaid=EAProperty(factory=Currency),
emailMessageContentDistributions=EAProperty('distributions', factory=EmailMessageContentDistributions),
file=EAProperty(factory=File),
files=EAProperty(singular_alias='file', factory=File),
firstMembershipSourceCode=EAProperty('first_source_code', 'source_code', factory=MembershipSourceCode),
form=EAProperty(factory=BatchForm),
geoLocation=EAProperty('geo', 'location', factory=GeoCoordinate),
identifiers=EAProperty(singular_alias='identifier', factory=Identifier),
isCellStatus=EAProperty('cell_status', 'is_cell', factory=IsCellStatus),
jobClass=EAProperty(factory=JobClass),
limitedToParentValues=EAProperty('limited_to', is_array=True, factory=AvailableValue),
listeners=EAProperty(singular_alias='listener', factory=Listener),
pledge=EAProperty(factory=Pledge),
possibleValues=EAProperty('possible', singular_alias='possible_value', factory=KeyValuePair),
preferredPronoun=EAProperty(factory=PreferredPronoun),
programType=EAProperty('program', factory=BatchProgram),
relationalMappings=EAProperty('relations', singular_alias='relation', factory=RelationalMapping),
resultFiles=EAProperty('files', singular_alias='file', factory=File),
role=EAProperty(factory=EventRole),
roles=EAProperty(singular_alias='role', factory=EventRole),
savedList=EAProperty('list', factory=SavedListData),
score=EAProperty(factory=Score),
scores=EAProperty(singular_alias='score', factory=Score),
shift=EAProperty(factory=EventShift),
shifts=EAProperty(singular_alias='shift', factory=EventShift),
storyStatus=EAProperty('status', factory=StoryStatus),
subgroups=EAProperty(singular_alias='subgroup', factory=Subgroup),
suppressions=EAProperty(singular_alias='suppression', factory=Suppression),
supportedEntities=EAProperty('entities', singular_alias='entity', factory=SupportedEntity),
updateStatistics=EAProperty('update_stats', 'statistics', 'stats', factory=UpdateStatistics),
values=EAProperty(singular_alias='value', factory=ValueMapping)
)
class ActivistCodeResponse(ScriptResponse, EAObjectWithID, _prefix='activistCode', _keys={'action'}):
"""Represents an `Activist Code Response
<https://docs.everyaction.com/reference/people#peoplevanidcanvassresponses>`__.
"""
def __init__(self, id: Optional[int] = None, **kwargs: EAValue) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param id: ID to initialize with. When given alone, a simple object results (see
`A Note About Simple Objects <https://docs.everyaction.com/reference/events-overview>`__).
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(type='ActivistCode', activistCodeId=id, **kwargs)
class Address(
EAObjectWithID,
_prefix='address',
_prefixed={'line1', 'line2', 'line3'},
_keys={
'city',
'countryCode',
'displayMode',
'geoLocation',
'isPreferred',
'preview',
'stateOrProvince',
'type',
'zipOrPostalCode'
}
):
"""Represents an `Address
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
class AVEVDataFileAction(JobActionType):
"""Represents an `AVEV Data File Action
<https://docs.everyaction.com/reference/file-loading-jobs#avev-data-file>`__.
"""
def __init__(self, **kwargs: EAValue) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(actionType='AVEVDataFile', **kwargs)
class BargainingUnitJobClass(
EAObjectWithID,
_prefix='employerBargainingUnitJobClass',
_keys={'bargainingUnit', 'employerBargainingUnitId', 'jobClass'}
):
"""Represents an `Employer Bargaining Unit Job Class
<https://docs.everyaction.com/reference/employers#common-models-15>`__.
"""
class ChangedEntityBulkImportField(EAObject, _keys={'fieldName', 'mappingTypeName', 'relationalMappings'}):
"""Represents a `bulk import field
<https://docs.everyaction.com/reference/changed-entities#changedentityexportjobsfieldsresourcetype>`__
in the context of changed entities.
"""
class ChangedEntityExportJob(
EAObjectWithID,
_prefix='exportJob',
_keys={
'dateChangedFrom',
'dateChangedTo',
'exportedRecordCount',
'files',
'jobStatus',
'message'
}
):
"""Represents data for an existing `ChangedEntityExportJob
<https://docs.everyaction.com/reference/changed-entities#common-models-5>`__.
"""
class Code(
EAObjectWithIDAndName,
_prefix='code',
_prefixed={'type'},
_keys={'dateCreated', 'dateModified', 'description', 'parentCodeId', 'supportedEntities'}
):
"""Represents a `Code object
<https://docs.everyaction.com/reference/codes#common-models-7>`__.
"""
class CustomField(
EAObjectWithIDAndName,
_prefix='customField',
_prefixed={'groupId', 'groupName', 'groupType', 'name', 'parentId', 'typeId'},
_keys={'availableValues', 'isEditable', 'isExportable', 'maxTextboxCharacters'}
):
"""Represents a `Custom Field
<https://docs.everyaction.com/reference/custom-fields#common-models-9>`__.
"""
class DistrictField(
EAObjectWithIDAndName,
_prefix='districtField',
_prefixed={'values'},
_keys={'isCustomDistrict', 'parentFieldId'}
):
"""Represents a `District Field
<https://docs.everyaction.com/reference/district-fields#common-models-13>`__.
"""
class EmailMessageContent(
EAObject,
_keys={'createdBy', 'dateCreated', 'emailMessageContentDistributions', 'senderDisplayName', 'senderEmailAddress'}
):
"""Represents an `email message content object
<https://docs.everyaction.com/reference/email#common-models-14>`__.
"""
class EmployerBargainingUnit(EAObjectWithID, _prefix='employerBargainingUnit', _keys={'bargainingUnit'}):
"""Represents an `Employer Bargaining Unit
<https://docs.everyaction.com/reference/employers#employersemployeridbargainingunitsbargainingunitid>`__.
"""
class Error(
EAObject,
_keys={'code', 'detailedConstraints', 'detailedCode', 'hint', 'properties', 'referenceCode', 'resourceUrl', 'text'}
):
"""Represents an `Error object
<https://docs.everyaction.com/reference/bulk-import#common-models-4>`__.
"""
class ExtendedSourceCode(
EAObjectWithIDAndName,
_prefix='extendedSourceCode',
_prefixed={'name'},
_keys={'dateCreated', 'dateModified', 'modifiedBy'},
createdBy=EAProperty('creator', factory=User)
):
"""Represents an `Extended Source Code
<https://docs.everyaction.com/reference/extended-source-codes#common-models-20>`__.
"""
class FieldValueMapping(EAObject, _keys={'columnName', 'fieldName', 'staticValue', 'values'}):
"""Represents a `fieldValueMapping
<https://docs.everyaction.com/reference/bulk-import#bulkimportjobs>`__.
"""
class JobFile(
EAObject,
_prefix='file',
_prefixed={'name'},
_keys={'columns', 'columnDelimiter', 'hasHeader', 'hasQuotes', 'sourceUrl'}
):
"""Represents a `file object for a job
<https://docs.everyaction.com/reference/file-loading-jobs#overview-22>`__.
"""
class ListLoadCallbackData(JobNotification, _keys={'description', 'message', 'savedList', 'status'}):
"""Represents `Callback Data
<https://docs.everyaction.com/reference/file-loading-jobs#saved-list-load>`__
for a Saved List Load action.
"""
class MappingParent(EAObject, _keys={'limitedToParentValues', 'parentFieldName'}):
"""Represents prerequisites for mapping a field as described `here
<https://docs.everyaction.com/reference/bulk-import#bulkimportmappingtypes>`__.
"""
class Membership(
EAObject,
_keys={
'changeTypeName',
'dateCardsSent',
'dateExpireMembership',
'dateLastRenewed',
'dateStartMembership',
'duesAttributionTypeName',
'duesEntityTypeName',
'duesPaid',
'enrollmentTypeName',
'firstMembershipSourceCode',
'levelId',
'levelName',
'numberOfCards',
'numberTimesRenewed',
'statusName',
'totalDuesPaid'
}
):
"""Contains `membership information
<https://docs.everyaction.com/reference/people#peoplevanidmembership>`__
for a person.
"""
class MiniVANExport(
EAObjectWithIDAndName,
_prefix='minivanExport',
_keys={
'canvassers',
'databaseMode',
'dateCreated'
},
createdBy=EAProperty('creator', factory=User)
):
"""Represents a `MiniVAN Export
<https://docs.everyaction.com/reference/minivan-exports#common-models-25>`__.
"""
class Note(
EAObjectWithID,
_prefix='note',
_keys={'category', 'contactHistory', 'createdDate', 'isViewRestricted', 'text'}
):
"""Represents a `Note
<https://docs.everyaction.com/reference/people#peoplevanidnotes>`__.
"""
class OnlineActionsForm(
EAObjectWithIDAndName,
_prefix='formTracking',
_keys={
'activistCodes',
'campaignId',
'codeId',
'confirmationEmailData',
'createdByEmail',
'dateCreated',
'dateModified',
'designation',
'eventId',
'isActive',
'isConfirmedOptInEnabled',
'modifiedByEmail'
},
formType=EAProperty('type'),
formTypeId=EAProperty()
):
"""Represents an `Online Action Form
<https://docs.everyaction.com/reference/online-actions-forms#common-models-27>`__.
"""
@classmethod
def _name_key(cls) -> Optional[str]:
return 'formName'
class Phone(
EAObjectWithID,
_prefix='phone',
_prefixed={'number', 'optInStatus', 'type'},
_keys={'countryCode', 'dateCreated', 'dialingPrefix', 'ext', 'isCellStatus', 'isPreferred', 'smsOptInStatus'}
):
"""Represents a `Phone
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
def __init__(self, id_or_number: Optional[Union[int, str]] = None, **kwargs: EAValue) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param id_or_number: Either the phone ID (if an integer), or the phone number (if a string). A simple object
will result when an integer is given for the `id_or_number` positional parameter
(see `A Note About Simple Objects <https://docs.everyaction.com/reference/events#overview-19>`__).
When a string is given instead, it is assumed to correspond to the phone number, accessible via
instance.number.
:param kwargs: Mapping of (alias or name) -> value.
"""
if id_or_number is not None:
if isinstance(id_or_number, int):
# Assume id for int.
super().__init__(id=id_or_number, **kwargs)
elif isinstance(id_or_number, str):
# Assume phone number for str.
super().__init__(number=id_or_number, **kwargs)
else:
raise ValueError(f'Expected int or str for id_or_number, got {type(id_or_number)}: {id_or_number}')
else:
super().__init__(**kwargs)
class SavedListLoadAction(
JobActionType,
_keys={'folderId', 'listDescription', 'listName', 'overwriteExistingListId', 'personIdColumn', 'personIdType'}
):
"""Represents a `Saved List Load action
<https://docs.everyaction.com/reference/file-loading-jobs#saved-list-load>`__.
"""
def __init__(self, **kwargs: EAValue) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(actionType='LoadSavedListFile', **kwargs)
class ScoreLoadAction(
JobActionType,
_keys={'approvalCriteria', 'personIdColumn', 'personIdType', 'scoreColumn', 'scoreId'}
):
"""Represents a `Score Load Action
<https://docs.everyaction.com/reference/file-loading-jobs#score-load-action>`__.
"""
def __init__(self, **kwargs: EAValue) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(actionType='Score', **kwargs)
class ScoreUpdate(
EAObjectWithID,
_prefix='scoreUpdate',
_keys={'dateProcessed', 'loadStatus', 'score', 'updateStatistics'}
):
"""Represents a `Score Update
<https://docs.everyaction.com/reference/score-updates#scoreupdatesscoreupdateid>`__.
"""
class SupportField(
EAObject,
_keys={'customPropertyKey', 'displayName', 'fieldType', 'maxFieldLength', 'possibleValues'}
):
"""Represents a `Support Field
<https://docs.everyaction.com/reference/voter-registration-batches#voterregistrationbatchesregistrationforms>`__
for a Voter Registration Batch.
"""
class SurveyCanvassResponse(
ScriptResponse,
_keys={'mediumName', 'name', 'shortName', 'surveyQuestionId', 'surveyResponseId'}
):
"""Represents a `Survey Response
<https://docs.everyaction.com/reference/people#peoplevanidcanvassresponses>`__
in the context of a canvass response.
"""
def __init__(
self,
surveyQuestionId: Optional[int] = None,
surveyResponseId: Optional[int] = None,
**kwargs: EAValue
) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param surveyQuestionId: ID of the survey question.
:param surveyResponseId: ID of the survey response.
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(
type='SurveyResponse',
surveyQuestionId=surveyQuestionId,
surveyResponseId=surveyResponseId,
**kwargs
)
class Target(
EAObjectWithIDAndName,
_prefix='target',
_keys={'areSubgroupsSticky', 'description', 'points', 'status', 'subgroups', 'type'}
):
"""Represents a `Target
<https://docs.everyaction.com/reference/targets#common-models-37>`__.
"""
class TargetExportJob(
EAObjectWithID,
_prefix='exportJob',
_keys={'file', 'jobStatus', 'targetId', 'webhookUrl'},
):
"""Represents a `Target Export Job
<https://docs.everyaction.com/reference/target-export-jobs#targetexportjobsexportjobid>`__.
"""
class VolunteerActivityResponse(ScriptResponse, _prefix='volunteerActivity', _keys={'action'}):
"""Represents a `Volunteer Activity
<https://docs.everyaction.com/reference/people#peoplevanidcanvassresponses>`__.
"""
def __init__(self, id: Optional[int] = None, **kwargs: EAValue) -> None:
"""
Initialize by setting the specified property names and aliases. Note that values will automatically be converted
to API objects when appropriate.
:param id: ID to initialize with. When given alone, a simple object results (see
`A Note About Simple Objects <https://docs.everyaction.com/reference/events-overview>`__).
:param kwargs: Mapping of (alias or name) -> value.
"""
super().__init__(type='VolunteerActivity', volunteerActivityId=id, **kwargs)
class VoterRegistrationBatch(
EAObjectWithIDAndName,
_prefix='voterRegistrationBatch',
_keys={'dateCreated', 'description', 'form', 'personType', 'programType', 'stateCode', 'status'}
):
"""Represents a `Voter Registration Batch
<https://docs.everyaction.com/reference/voter-registration-batches#common-models-39>`__.
"""
# --- Third Order Properties and Objects ---
EAProperty.share(
address=EAProperty(factory=Address),
addresses=EAProperty(singular_alias='address', factory=Address),
bulkImportFields=EAProperty(singular_alias='bulk_import_field', factory=ChangedEntityBulkImportField),
codes=EAProperty(singular_alias='code', factory=Code),
customFields=EAProperty(singular_alias='custom_field', factory=CustomField),
districts=EAProperty(singular_alias='district', factory=DistrictField),
districtFields=EAProperty(singular_alias='district_field', factory=DistrictField),
emails=EAProperty(singular_alias='email', factory=Email),
emailMessageContent=EAProperty(singular_alias='content', factory=EmailMessageContent),
errors=EAProperty(singular_alias='error', factory=Error),
extendedSourceCode=EAProperty('extended_source', factory=ExtendedSourceCode),
fieldValueMappings=EAProperty(
'field_mappings',
'value_mappings',
'mappings',
singular_alias='mapping',
factory=FieldValueMapping
),
jobClasses=EAProperty(singular_alias='job_class', factory=BargainingUnitJobClass),
parents=EAProperty(singular_alias='parent', factory=MappingParent),
phones=EAProperty(singular_alias='phone', factory=Phone),
recordedAddresses=EAProperty(singular_alias='recorded_address', factory=Address),
responses=EAProperty(singular_alias='response', factory=ScriptResponse.make),
surveyQuestionResponses=EAProperty('responses', singular_alias='response', factory=SurveyResponse),
tags=EAProperty(singular_alias='tag', factory=Code),
voterRegistrationBatches=EAProperty(
'registration_batches',
'batches',
singular_alias='batch',
factory=VoterRegistrationBatch
),
workAreas=EAProperty(singular_alias='work_area')
)
class AddRegistrantsResponse(EAObject, _keys={'alternateId', 'errors', 'result', 'vanId'}):
"""Represents the data associated with a response to `adding registrants
<https://docs.everyaction.com/reference/voter-registration-batches#voterregistrationbatchesbatchidpeople>`__
to a Voter Registration Batch.
"""
class BulkImportField(
EAObjectWithName,
_keys={'canBeMappedToColumn', 'description', 'hasPredefinedValues', 'isRequired', 'parents'}
):
"""Represents a `mapping type field
<https://docs.everyaction.com/reference/bulk-import#bulkimportmappingtypes>`__.
"""
class BulkImportJobData(
EAObjectWithID,
_prefix='job',
_keys={'errors', 'resourceType', 'resultFileSizeLimitKb', 'resultFiles', 'status'}
):
"""Represents data for an existing `Bulk Import Job
<https://docs.everyaction.com/reference/bulk-import#common-models-4>`__.
"""
class CanvassResponse(EAObject, _keys={'canvassContext', 'responses', 'resultCodeId'}):
"""Represents a `Canvass Response
<https://docs.everyaction.com/reference/people#peoplevanidcanvassresponses>`__.
"""
class ChangedEntityField(
EAObjectWithName,
_keys={'availableValues', 'bulkImportFields', 'isCoreField', 'maxTextboxCharacters'},
_prefix='field',
_prefixed={'name', 'type'},
):
"""Represents a `changed entity field
<https://docs.everyaction.com/reference/changed-entities#changedentityexportjobsfieldsresourcetype>`__.
"""
_TYPE_TO_FACTORY = {}
ValueType = Union[bool, int, str, datetime]
@staticmethod
def _parse_bool(s: str) -> bool:
if s.lower() == 'true':
return True
if s.lower() == 'false':
return False
raise ValueError(f'Could not parse "{s}" to a boolean.')
def parse(self, value: str) -> ValueType:
"""Parse the raw string value of a field into a typed result.
The below table gives the behavior of this function for each `field type
<https://docs.everyaction.com/reference/changed-entities#changedentityexportjobsfieldsresourcetype>`__.
+------------+--------------------------------------------------------------------------------------------+
| Field Type | Behavior |
+============+============================================================================================+
| B | Parses "true" to :code:`True` and "false" to :code:`False`. |
+------------+--------------------------------------------------------------------------------------------+
| D | Parses into a naive `datetime object <https://docs.python.org/3/library/datetime.html>`__. |
+------------+--------------------------------------------------------------------------------------------+
| M | Keeps the original string value. |
+------------+--------------------------------------------------------------------------------------------+
| N | Parses into an :code:`int`. |
+------------+--------------------------------------------------------------------------------------------+
| T | Keeps the original string value. |
+------------+--------------------------------------------------------------------------------------------+
:param value: The value to parse.
:returns: The parsed value.
"""
return self._TYPE_TO_FACTORY[self.type](value) if value else None
# References inner staticmethod so needs to be defined here.
ChangedEntityField._TYPE_TO_FACTORY = {
'B': ChangedEntityField._parse_bool,
'D': datetime.fromisoformat,
'M': lambda s: s,
'N': int,
'T': lambda s: s,
}
class Contribution(
EAObject,
_keys={
'acceptedOneTimeAmount',
'acceptedRecurringAmount',
'amount',
'bankAccount',
'checkDate',
'checkNumber',
'codes',
'contact',
'contactAttributions',
'contributionBankAccount',
'contributionId',
'coverCostsAmount',
'dateReceived',
'dateThanked',
'depositDate',
'depositNumber',
'designation',
'directMarketingCode',
'disclosureFieldValues',
'extendedSourceCode',
'identifiers',
'isUpsellAccepted',
'isUpsellShown',
'linkedJointFundraisingContributionId',
'linkedPartnershipContributionId',
'notes',
'onlineReferenceNumber',
'paymentType',
'pledge',
'processedAmount',
'processedCurrency',
'selectedOneTimeAmount',
'status',
'upsellType'
}
):
"""Represents a `Contribution
<https://docs.everyaction.com/reference/contributions#common-models-8>`__.
"""
class Disbursement(
EAObjectWithID,
_prefix='disbursement',
_keys={
'amount',
'batchCode',
'checkDate',
'checkNumber',
'codes',
'contact',
'dateIssued',
'designation',
'disclosureFieldValues',
'linkedCreditCardPaymentDisbursementId',
'linkedReimbursementDisbursementId',
'notes'
}
):
"""Represents a `Disbursement
<https://docs.everyaction.com/reference/disbursements#common-models-12>`__.
"""
class EmailMessage(
EAObjectWithIDAndName,
_prefix='foreignMessage',
_keys={'createdBy', 'dateCreated', 'dateModified', 'dateScheduled', 'emailMessageContent'},
campaignID=EAProperty('campaign')
):
"""Represents an `email message
<https://docs.everyaction.com/reference/email#common-models-14>`__.
"""
# TODO: Is emailMessageContent really an array? If so, can it actually contain multiple entities?
class FileLoadingJob(
EAObjectWithID,
_prefix='job',
_keys={'description', 'interventionCallbackUrl', 'invalidRowsFileUrl', 'listeners'},
actions=EAProperty(singular_alias='action', factory=JobActionType.make),
file=EAProperty(factory=JobFile)
):
"""Represents a `File Loading Job
<https://docs.everyaction.com/reference/file-loading-jobs>`__.
"""
class Location(EAObjectWithIDAndName, _prefix='location', _keys={'address', 'displayName'}):
"""Represents a `Location
<https://docs.everyaction.com/reference/locations>`__.
"""
class MappingType(EAObjectWithName, _keys={'fieldValueMappings', 'resultFileColumnName'}):
"""Represents a `bulk import mapping type
<https://docs.everyaction.com/reference/bulk-import#mapping-types>`__.
"""
class Person(
EAObjectWithID,
_prefix='van',
_keys={
'additionalEnvelopeName',
'additionalSalutation',
'addresses',
'biographyImageUrl',
'caseworkCases',
'caseworkIssues',
'caseworkStories',
'collectedLocationId',
'contactMethodPreferenceCode',
'contactMode',
'contactModeId',
'customFieldValues',
'customProperties',
'cycle',
'dateOfBirth',
'disclosureFieldValues',
'districts',
'electionRecords',
'electionType',
'emails',
'envelopeName',
'finderNumber',
'firstName',
'formalEnvelopeName',
'formalSalutation',
'identifiers',
'lastName',
'middleName',
'nickname',
'occupation',
'organizationContactOfficialName',
'organizationRoles',
'party',
'phones',
'preferredPronoun',
'primaryContact',
'recordedAddresses',
'salutation',
'scores',
'selfReportedEthnicities',
'selfReportedEthnicity',
'selfReportedGenders',
'selfReportedLanguagePreference',
'selfReportedRace',
'selfReportedRaces',
'selfReportedSexualOrientations',
'sex',
'suppressions',
'surveyQuestionResponses',
'suffix',
'title',
'website'
},
employer=EAProperty()
):
"""Represents a `Person
<https://docs.everyaction.com/reference/people#common-models>`__.
"""
@staticmethod
def _find_factory(**kwargs: EAValue) -> Optional['Person']:
status = kwargs.get('status')
if status is not None:
if status != 'Unmatched':
raise AssertionError(f'Only expected Unmatched status, found "{status}"')
return None
return Person(**kwargs)
@staticmethod
def _get_preferred(of: List[Any], attr: Optional[str] = None) -> Optional[Any]:
# Get a preferred entity from a list of entities by checking the "preferred" attribute.
if of:
result_list = [o for o in of if o.preferred]
if result_list:
# Multiple preferred entities should be impossible without bad modifications.
assert len(result_list) == 1
if attr:
return getattr(result_list[0], attr)
return result_list[0]
return None
def add_suppression(self, suppression: Suppression) -> bool:
"""Adds the given suppression to this person if it is not already present.
:param suppression: The suppression to add.
:returns: :code:`True` if the suppression was added, :code:`False` if it was already present.
"""
self.suppressions = self.suppressions or []
if suppression not in self.suppressions:
self.suppressions.append(suppression)
return True
return False
def has_suppression(self, suppression: Suppression) -> Optional[bool]:
"""Determines whether this contact has the given suppression.
:param suppression: The suppression to check for.
:returns: :code:`True` if this contact has the suppression, :code:`False` if suppression information is
available (when :code:`suppressions` attribute is not :code:`None`) and the suppression was not found, or
:code:`None` if no suppression information is available.
"""
if self.suppressions is not None:
return suppression in self.suppressions
return None
def remove_suppression(self, suppression: Suppression) -> bool:
"""Removes the given suppression from this person if it is present.
:param suppression: The suppression to remove.
:returns: :code:`True` if the suppression was removed, :code:`False` if the suppression was not found.
"""
if self.suppressions:
try:
self.suppressions.remove(suppression)
return True
except ValueError:
return False
return False
def set_suppression(self, suppression: Suppression, value: bool) -> bool:
"""Add or remove the given suppression.
:param suppression: Suppression to add or remove.
:param value: :code:`True` to add the suppression, :code:`False` to remove it.
:returns: :code:`True` if suppressions were changed, :code:`False` otherwise.
"""
if value:
return self.add_suppression(suppression)
else:
return self.remove_suppression(suppression)
@property
def do_not_call(self) -> Optional[bool]:
"""Determine if this contact is marked as "Do Not Call".
:returns: :code:`True` is this contact is marked as "Do Not Call", :code:`False` is suppressions are present
and do not contain "Do Not Call", or :code:`None` if no suppression information is available.
"""
return self.has_suppression(Suppression.DO_NOT_CALL)
@do_not_call.setter
def do_not_call(self, value: bool) -> None:
"""Sets the "Do Not Call" status of this contact.
:param value: Value to set to.
"""
self.set_suppression(Suppression.DO_NOT_CALL, value)
@property
def do_not_email(self) -> Optional[bool]:
"""Determine if this contact is marked as "Do Not Email".
:returns: :code:`True` is this contact is marked as "Do Not Email", :code:`False` is suppressions are present
and do not contain "Do Not Email", or :code:`None` if no suppression information is available.
"""
return self.has_suppression(Suppression.DO_NOT_EMAIL)
@do_not_email.setter
def do_not_email(self, value: bool) -> None:
"""Sets the "Do Not Call" status of this contact.
:param value: Value to set to.
"""
self.set_suppression(Suppression.DO_NOT_EMAIL, value)
@property
def do_not_mail(self) -> Optional[bool]:
"""Determine if this contact is marked as "Do Not Mail".
:returns: :code:`True` is this contact is marked as "Do Not Mail", :code:`False` is suppressions are present
and do not contain "Do Not Mail", or :code:`None` if no suppression information is available.
"""
return self.has_suppression(Suppression.DO_NOT_MAIL)
@do_not_mail.setter
def do_not_mail(self, value: bool) -> None:
"""Sets the "Do Not Call" status of this contact.
:param value: Value to set to.
"""
self.set_suppression(Suppression.DO_NOT_MAIL, value)
@property
def do_not_walk(self) -> Optional[bool]:
"""Determine if this contact is marked as "Do Not Mail".
:returns: :code:`True` is this contact is marked as "Do Not Walk", :code:`False` is suppressions are present
and do not contain "Do Not Walk", or :code:`None` if no suppression information is available.
"""
return self.has_suppression(Suppression.DO_NOT_WALK)
@do_not_walk.setter
def do_not_walk(self, value: bool) -> None:
"""Sets the "Do Not Call" status of this contact.
:param value: Value to set to.
"""
self.set_suppression(Suppression.DO_NOT_WALK, value)
@property
def preferred_address(self) -> Optional[Address]:
"""Get this contact's preferred mailing address as an :class:`.Address` object if it exists, or :code:`None`
if this contact has no addresses or if information on what address is preferred is unavailable.
:returns: The preferred mailing address object, or :code:`None` if no preferred mailing address could be
determined.
"""
return self._get_preferred(self.addresses)
@property
def preferred_email(self) -> Optional[str]:
"""Get the address of this contact's preferred email if it exists, or :code:`None` if this contact has no email
addresses or if information on what address is preferred is unavailable.
:returns: The preferred email address, or code:`None` if no preferred email address could be determined.
"""
return self._get_preferred(self.emails, "email")
@property
def preferred_phone(self) -> Optional[str]:
"""Get the number of this contact's preferred phone if it exists, or :code:`None` if this contact has no phone
numbers or if information on what number is preferred is unavailable.
:returns: The preferred phone number, or code:`None` if no preferred phone number could be determined.
"""
return self._get_preferred(self.phones, "number")
class Story(
EAObjectWithID,
_prefix='story',
_prefixed={'text'},
_keys={'campaignId', 'storyStatus', 'tags', 'title', 'vanId'}
):
"""Represents a `Story
<https://docs.everyaction.com/reference/stories#common-models-34>`__.
"""
class SurveyQuestion(
EAObjectWithIDAndName,
_prefix='surveyQuestion',
_keys={'cycle', 'mediumName', 'scriptQuestion', 'shortName', 'status', 'type'},
responses=EAProperty(singular_alias='response', factory=SurveyCanvassResponse)
):
"""Represents a `Survey Question
<https://docs.everyaction.com/reference/survey-questions#common-models-36>`__.
"""
class ValueMappingData(EAObjectWithIDAndName, _keys={'parents'}):
"""Represents data for an existing `value mapping
<https://docs.everyaction.com/reference/bulk-import#bulkimportmappingtypesmappingtypenamefieldnamevalues>`__
in the context of bulk import jobs.
"""
class Worksite(EAObjectWithIDAndName, _prefix='worksite', _keys={'address', 'employer', 'isPreferred', 'workAreas'}):
"""Represents a `Worksite
<https://docs.everyaction.com/reference/worksites#common-models-16>`__.
"""
# --- Fourth Order Properties and Objects ---
EAProperty.share(
defaultLocation=EAProperty(factory=Location),
fields=EAProperty(singular_alias='field', factory=BulkImportField),
location=EAProperty(factory=Location),
locations=EAProperty(singular_alias='location', factory=Location),
mappingTypes=EAProperty('mappings', singular_alias='mapping', factory=MappingType),
person=EAProperty(factory=Person),
surveyQuestions=EAProperty('questions', singular_alias='question', factory=SurveyQuestion),
worksites=EAProperty(singular_alias='worksite', factory=Worksite)
)
class BulkImportAction(
EAObject,
_keys={'actionType', 'columnsToIncludeInResultsFile', 'mappingTypes', 'resultFileSizeKbLimit', 'resourceType'}
):
"""Represents a `bulk import action
<https://docs.everyaction.com/reference/bulk-import#action>`__.
"""
class Employer(
EAObjectWithIDAndName,
_prefix='employer',
_keys={
'bargainingUnits',
'departments',
'isMyOrganization',
'jobClasses',
'parentOrganization',
'shortName',
'website',
'worksites'
},
phones=EAProperty(singular_alias='phone', factory=OrganizationPhone),
shifts=EAProperty(singular_alias='shift', factory=ShiftType)
):
"""Represents an `Employer
<https://docs.everyaction.com/reference/employers#common-models-15>`__.
"""
class EventType(
EAObjectWithIDAndName,
_prefix='eventType',
_keys={
'canBeRepeatable',
'canHaveGoals',
'canHaveMultipleLocations',
'canHaveMultipleShifts',
'canHaveRoleMaximums',
'canHaveRoleMinimums',
'color',
'defaultLocation',
'isAtLeastOneLocationRequired',
'isOnlineActionsAvailable',
'isSharedWithChildCommitteesByDefault',
'isSharedWithMasterCommitteeByDefault',
'roles',
},
statuses=EAProperty(is_array=True, factory=Status)
):
"""Represents an `Event Type
<https://docs.everyaction.com/reference/event-types#common-models-17>`__.
"""
class ExportJob(
EAObjectWithID,
_prefix='exportJob',
_prefixed={'guid'},
_keys={
'activistCodes',
'canvassFileRequestId',
'canvassFileRequestGuid',
'customFields',
'dateExpired',
'districtFields',
'downloadUrl',
'errorCode',
'savedListId',
'status',
'surveyQuestions',
'type',
'webhookUrl'
}
):
"""Represents an `Export Job
<https://docs.everyaction.com/reference/export-jobs#common-models-19>`__.
"""
class MappingTypeData(EAObjectWithName, _keys={'allowMultipleMode', 'displayName', 'fields', 'resourceTypes'}):
"""Represents data for an existing `bulk import mapping type
<https://docs.everyaction.com/reference/bulk-import#bulkimportmappingtypes>`__.
"""
class Registrant(EAObject, _keys={'alternateId', 'customProperties', 'person'}):
"""Represents a `Registrant
<https://docs.everyaction.com/reference/voter-registration-batches#voterregistrationbatchesbatchidpeople>`__
for a Voter Registration Batch.
"""
# --- Fifth Order Properties and Objects ---
EAProperty.share(
actions=EAProperty(singular_alias='action', factory=BulkImportAction),
eventType=EAProperty('type', factory=EventType)
)
class BulkImportJob(EAObject, _keys={'actions', 'description'}, file=EAProperty(factory=JobFile)):
"""Represents a `Bulk Import Job
<https://docs.everyaction.com/reference/bulk-import#bulkimportjobs>`__.
"""
class Event(
EAObjectWithIDAndName,
_prefix='event',
_keys={
'codes',
'createdDate',
'description',
'districtFieldValue',
'dotNetTimeZoneId',
'endDate',
'eventType',
'isActive',
'isOnlyEditableByCreatingUser',
'isPubliclyViewable',
'locations',
'roles',
'shifts',
'shortName',
'startDate',
'voterRegistrationBatches'
},
notes=EAProperty(singular_alias='note', factory=Note)
):
"""Represents an `Event
<https://docs.everyaction.com/reference/events#common-models-18>`__.
"""
# --- Sixth Order Properties and Objects ---
EAProperty.share(
event=EAProperty(factory=Event)
)
class Signup(
EAObjectWithID,
_prefix='eventSignup',
_keys={
'dateModified',
'endTimeOverride',
'event',
'isOfflineSignup',
'location',
'modifiedBy',
'notes',
'person',
'shift',
'startTimeOverride',
'supporterGroupId',
'role'
},
status=EAProperty(factory=Status)
):
"""Represents a `Signup
<https://docs.everyaction.com/reference/signups#common-models-33>`__.
"""
|
[
"everyaction.core.EAProperty",
"everyaction.exception.EAException",
"everyaction.core.EAProperty.shared"
] |
[((40152, 40170), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {}), "('type')\n", (40162, 40170), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((60390, 60425), 'everyaction.core.EAProperty', 'EAProperty', (['"""creator"""'], {'factory': 'User'}), "('creator', factory=User)\n", (60400, 60425), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((62397, 62432), 'everyaction.core.EAProperty', 'EAProperty', (['"""creator"""'], {'factory': 'User'}), "('creator', factory=User)\n", (62407, 62432), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((63218, 63236), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {}), "('type')\n", (63228, 63236), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((63253, 63265), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (63263, 63265), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((77443, 77465), 'everyaction.core.EAProperty', 'EAProperty', (['"""campaign"""'], {}), "('campaign')\n", (77453, 77465), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((77852, 77915), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""action"""', 'factory': 'JobActionType.make'}), "(singular_alias='action', factory=JobActionType.make)\n", (77862, 77915), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((77926, 77953), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'JobFile'}), '(factory=JobFile)\n', (77936, 77953), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((79934, 79946), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (79944, 79946), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((87647, 87715), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""response"""', 'factory': 'SurveyCanvassResponse'}), "(singular_alias='response', factory=SurveyCanvassResponse)\n", (87657, 87715), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((89524, 89585), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""phone"""', 'factory': 'OrganizationPhone'}), "(singular_alias='phone', factory=OrganizationPhone)\n", (89534, 89585), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((89598, 89651), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""shift"""', 'factory': 'ShiftType'}), "(singular_alias='shift', factory=ShiftType)\n", (89608, 89651), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((90288, 90329), 'everyaction.core.EAProperty', 'EAProperty', ([], {'is_array': '(True)', 'factory': 'Status'}), '(is_array=True, factory=Status)\n', (90298, 90329), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((91801, 91828), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'JobFile'}), '(factory=JobFile)\n', (91811, 91828), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((92428, 92475), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""note"""', 'factory': 'Note'}), "(singular_alias='note', factory=Note)\n", (92438, 92475), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((93045, 93071), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Status'}), '(factory=Status)\n', (93055, 93071), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((5753, 5784), 'everyaction.core.EAProperty', 'EAProperty', (['"""accepted_one_time"""'], {}), "('accepted_one_time')\n", (5763, 5784), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((5814, 5859), 'everyaction.core.EAProperty', 'EAProperty', (['"""accepted_recurring"""', '"""recurring"""'], {}), "('accepted_recurring', 'recurring')\n", (5824, 5859), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((5872, 5884), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (5882, 5884), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((5901, 5919), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {}), "('type')\n", (5911, 5919), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((5931, 5943), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (5941, 5943), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((5972, 6005), 'everyaction.core.EAProperty', 'EAProperty', (['"""additional_envelope"""'], {}), "('additional_envelope')\n", (5982, 6005), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6032, 6044), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (6042, 6044), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6065, 6083), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {}), "('type')\n", (6075, 6083), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6107, 6142), 'everyaction.core.EAProperty', 'EAProperty', (['"""multiple_mode"""', '"""mode"""'], {}), "('multiple_mode', 'mode')\n", (6117, 6142), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6160, 6190), 'everyaction.core.EAProperty', 'EAProperty', (['"""alternate"""', '"""alt"""'], {}), "('alternate', 'alt')\n", (6170, 6190), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6203, 6215), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (6213, 6215), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6238, 6258), 'everyaction.core.EAProperty', 'EAProperty', (['"""amount"""'], {}), "('amount')\n", (6248, 6258), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6279, 6310), 'everyaction.core.EAProperty', 'EAProperty', (['"""type_name"""', '"""type"""'], {}), "('type_name', 'type')\n", (6289, 6310), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6335, 6382), 'everyaction.core.EAProperty', 'EAProperty', (['"""sticky_subgroups"""', '"""sticky_groups"""'], {}), "('sticky_subgroups', 'sticky_groups')\n", (6345, 6382), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6404, 6448), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""assignable_type"""'}), "(singular_alias='assignable_type')\n", (6414, 6448), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6468, 6487), 'everyaction.core.EAProperty', 'EAProperty', (['"""value"""'], {}), "('value')\n", (6478, 6487), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6509, 6527), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {}), "('type')\n", (6519, 6527), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6541, 6553), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (6551, 6553), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6572, 6593), 'everyaction.core.EAProperty', 'EAProperty', (['"""average"""'], {}), "('average')\n", (6582, 6593), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6609, 6626), 'everyaction.core.EAProperty', 'EAProperty', (['"""bad"""'], {}), "('bad')\n", (6619, 6626), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6644, 6665), 'everyaction.core.EAProperty', 'EAProperty', (['"""account"""'], {}), "('account')\n", (6654, 6665), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6685, 6722), 'everyaction.core.EAProperty', 'EAProperty', (['"""bank_account"""', '"""account"""'], {}), "('bank_account', 'account')\n", (6695, 6722), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6738, 6757), 'everyaction.core.EAProperty', 'EAProperty', (['"""batch"""'], {}), "('batch')\n", (6748, 6757), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6781, 6840), 'everyaction.core.EAProperty', 'EAProperty', (['"""biography_image"""', '"""bio_image_url"""', '"""bio_image"""'], {}), "('biography_image', 'bio_image_url', 'bio_image')\n", (6791, 6840), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6858, 6879), 'everyaction.core.EAProperty', 'EAProperty', (['"""bounces"""'], {}), "('bounces')\n", (6868, 6879), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6896, 6918), 'everyaction.core.EAProperty', 'EAProperty', (['"""campaign"""'], {}), "('campaign')\n", (6906, 6918), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((6944, 6985), 'everyaction.core.EAProperty', 'EAProperty', (['"""column_mappable"""', '"""mappable"""'], {}), "('column_mappable', 'mappable')\n", (6954, 6985), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7007, 7035), 'everyaction.core.EAProperty', 'EAProperty', (['"""allows_repeats"""'], {}), "('allows_repeats')\n", (7017, 7035), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7054, 7080), 'everyaction.core.EAProperty', 'EAProperty', (['"""allows_goals"""'], {}), "('allows_goals')\n", (7064, 7080), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7111, 7150), 'everyaction.core.EAProperty', 'EAProperty', (['"""allows_multiple_locations"""'], {}), "('allows_multiple_locations')\n", (7121, 7150), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7178, 7214), 'everyaction.core.EAProperty', 'EAProperty', (['"""allows_multiple_shifts"""'], {}), "('allows_multiple_shifts')\n", (7188, 7214), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7240, 7274), 'everyaction.core.EAProperty', 'EAProperty', (['"""allows_role_maximums"""'], {}), "('allows_role_maximums')\n", (7250, 7274), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7300, 7334), 'everyaction.core.EAProperty', 'EAProperty', (['"""allows_role_minimums"""'], {}), "('allows_role_minimums')\n", (7310, 7334), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7352, 7375), 'everyaction.core.EAProperty', 'EAProperty', (['"""canvasser"""'], {}), "('canvasser')\n", (7362, 7375), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7402, 7426), 'everyaction.core.EAProperty', 'EAProperty', (['"""canvass_id"""'], {}), "('canvass_id')\n", (7412, 7426), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7455, 7481), 'everyaction.core.EAProperty', 'EAProperty', (['"""canvass_guid"""'], {}), "('canvass_guid')\n", (7465, 7481), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7501, 7543), 'everyaction.core.EAProperty', 'EAProperty', (['"""cases"""'], {'singular_alias': '"""case"""'}), "('cases', singular_alias='case')\n", (7511, 7543), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7564, 7608), 'everyaction.core.EAProperty', 'EAProperty', (['"""issues"""'], {'singular_alias': '"""issue"""'}), "('issues', singular_alias='issue')\n", (7574, 7608), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7630, 7675), 'everyaction.core.EAProperty', 'EAProperty', (['"""stories"""'], {'singular_alias': '"""story"""'}), "('stories', singular_alias='story')\n", (7640, 7675), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7699, 7725), 'everyaction.core.EAProperty', 'EAProperty', (['"""cc_exp_month"""'], {}), "('cc_exp_month')\n", (7709, 7725), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7748, 7773), 'everyaction.core.EAProperty', 'EAProperty', (['"""cc_exp_year"""'], {}), "('cc_exp_year')\n", (7758, 7773), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7794, 7829), 'everyaction.core.EAProperty', 'EAProperty', (['"""change_type"""', '"""change"""'], {}), "('change_type', 'change')\n", (7804, 7829), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7851, 7888), 'everyaction.core.EAProperty', 'EAProperty', (['"""channel_type"""', '"""channel"""'], {}), "('channel_type', 'channel')\n", (7861, 7888), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7904, 7916), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (7914, 7916), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7934, 7946), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (7944, 7946), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7957, 7969), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (7967, 7969), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((7980, 7992), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (7990, 7992), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8005, 8023), 'everyaction.core.EAProperty', 'EAProperty', (['"""code"""'], {}), "('code')\n", (8015, 8023), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8037, 8056), 'everyaction.core.EAProperty', 'EAProperty', (['"""codes"""'], {}), "('codes')\n", (8047, 8056), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8082, 8126), 'everyaction.core.EAProperty', 'EAProperty', (['"""collected_location"""', '"""location"""'], {}), "('collected_location', 'location')\n", (8092, 8126), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8138, 8150), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (8148, 8150), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8172, 8195), 'everyaction.core.EAProperty', 'EAProperty', (['"""delimiter"""'], {}), "('delimiter')\n", (8182, 8195), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8212, 8232), 'everyaction.core.EAProperty', 'EAProperty', (['"""column"""'], {}), "('column')\n", (8222, 8232), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8252, 8275), 'everyaction.core.EAProperty', 'EAProperty', (['"""committee"""'], {}), "('committee')\n", (8262, 8275), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8297, 8321), 'everyaction.core.EAProperty', 'EAProperty', (['"""confidence"""'], {}), "('confidence')\n", (8307, 8321), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8335, 8347), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (8345, 8347), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8381, 8459), 'everyaction.core.EAProperty', 'EAProperty', (['"""contact_preference_code"""', '"""preference_code"""', '"""contact_preference"""'], {}), "('contact_preference_code', 'preference_code', 'contact_preference')\n", (8391, 8459), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8477, 8489), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (8487, 8489), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8509, 8535), 'everyaction.core.EAProperty', 'EAProperty', (['"""contact_mode"""'], {}), "('contact_mode')\n", (8519, 8535), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8555, 8581), 'everyaction.core.EAProperty', 'EAProperty', (['"""contact_type"""'], {}), "('contact_type')\n", (8565, 8581), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8605, 8632), 'everyaction.core.EAProperty', 'EAProperty', (['"""contributions"""'], {}), "('contributions')\n", (8615, 8632), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8653, 8679), 'everyaction.core.EAProperty', 'EAProperty', (['"""contribution"""'], {}), "('contribution')\n", (8663, 8679), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8705, 8717), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (8715, 8717), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8741, 8753), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (8751, 8753), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8772, 8808), 'everyaction.core.EAProperty', 'EAProperty', (['"""copy_to"""'], {'is_array': '(True)'}), "('copy_to', is_array=True)\n", (8782, 8808), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8826, 8847), 'everyaction.core.EAProperty', 'EAProperty', (['"""country"""'], {}), "('country')\n", (8836, 8847), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8870, 8895), 'everyaction.core.EAProperty', 'EAProperty', (['"""cover_costs"""'], {}), "('cover_costs')\n", (8880, 8895), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8914, 8933), 'everyaction.core.EAProperty', 'EAProperty', (['"""after"""'], {}), "('after')\n", (8924, 8933), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8953, 8973), 'everyaction.core.EAProperty', 'EAProperty', (['"""before"""'], {}), "('before')\n", (8963, 8973), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((8989, 9010), 'everyaction.core.EAProperty', 'EAProperty', (['"""creator"""'], {}), "('creator')\n", (8999, 9010), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9037, 9060), 'everyaction.core.EAProperty', 'EAProperty', (['"""committee"""'], {}), "('committee')\n", (9047, 9060), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9081, 9133), 'everyaction.core.EAProperty', 'EAProperty', (['"""created_by"""', '"""creator_email"""', '"""creator"""'], {}), "('created_by', 'creator_email', 'creator')\n", (9091, 9133), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9151, 9172), 'everyaction.core.EAProperty', 'EAProperty', (['"""created"""'], {}), "('created')\n", (9161, 9172), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9194, 9225), 'everyaction.core.EAProperty', 'EAProperty', (['"""cc_last4"""', '"""last4"""'], {}), "('cc_last4', 'last4')\n", (9204, 9225), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9240, 9252), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (9250, 9252), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9271, 9289), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {}), "('type')\n", (9281, 9289), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9302, 9314), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (9312, 9314), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9339, 9358), 'everyaction.core.EAProperty', 'EAProperty', (['"""group"""'], {}), "('group')\n", (9349, 9358), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9378, 9397), 'everyaction.core.EAProperty', 'EAProperty', (['"""field"""'], {}), "('field')\n", (9388, 9397), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9425, 9457), 'everyaction.core.EAProperty', 'EAProperty', (['"""group_type"""', '"""type"""'], {}), "('group_type', 'type')\n", (9435, 9457), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9481, 9528), 'everyaction.core.EAProperty', 'EAProperty', (['"""property_key"""', '"""custom_key"""', '"""key"""'], {}), "('property_key', 'custom_key', 'key')\n", (9491, 9528), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9540, 9552), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (9550, 9552), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9571, 9589), 'everyaction.core.EAProperty', 'EAProperty', (['"""mode"""'], {}), "('mode')\n", (9581, 9589), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9608, 9620), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (9618, 9620), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9639, 9669), 'everyaction.core.EAProperty', 'EAProperty', (['"""adjusted"""', '"""date"""'], {}), "('adjusted', 'date')\n", (9649, 9669), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9689, 9712), 'everyaction.core.EAProperty', 'EAProperty', (['"""canvassed"""'], {}), "('canvassed')\n", (9699, 9712), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9732, 9756), 'everyaction.core.EAProperty', 'EAProperty', (['"""cards_sent"""'], {}), "('cards_sent')\n", (9742, 9756), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9778, 9804), 'everyaction.core.EAProperty', 'EAProperty', (['"""changed_from"""'], {}), "('changed_from')\n", (9788, 9804), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9824, 9848), 'everyaction.core.EAProperty', 'EAProperty', (['"""changed_to"""'], {}), "('changed_to')\n", (9834, 9848), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9865, 9885), 'everyaction.core.EAProperty', 'EAProperty', (['"""closed"""'], {}), "('closed')\n", (9875, 9885), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9903, 9924), 'everyaction.core.EAProperty', 'EAProperty', (['"""created"""'], {}), "('created')\n", (9913, 9924), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9944, 9967), 'everyaction.core.EAProperty', 'EAProperty', (['"""deposited"""'], {}), "('deposited')\n", (9954, 9967), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((9985, 10006), 'everyaction.core.EAProperty', 'EAProperty', (['"""expired"""'], {}), "('expired')\n", (9995, 10006), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10033, 10087), 'everyaction.core.EAProperty', 'EAProperty', (['"""expiration_date"""', '"""expiration"""', '"""expires"""'], {}), "('expiration_date', 'expiration', 'expires')\n", (10043, 10087), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10104, 10124), 'everyaction.core.EAProperty', 'EAProperty', (['"""issued"""'], {}), "('issued')\n", (10114, 10124), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10146, 10183), 'everyaction.core.EAProperty', 'EAProperty', (['"""last_renewed"""', '"""renewed"""'], {}), "('last_renewed', 'renewed')\n", (10156, 10183), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10202, 10224), 'everyaction.core.EAProperty', 'EAProperty', (['"""modified"""'], {}), "('modified')\n", (10212, 10224), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10242, 10264), 'everyaction.core.EAProperty', 'EAProperty', (['"""birthday"""'], {}), "('birthday')\n", (10252, 10264), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10281, 10301), 'everyaction.core.EAProperty', 'EAProperty', (['"""opened"""'], {}), "('opened')\n", (10291, 10301), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10318, 10338), 'everyaction.core.EAProperty', 'EAProperty', (['"""posted"""'], {}), "('posted')\n", (10328, 10338), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10358, 10381), 'everyaction.core.EAProperty', 'EAProperty', (['"""processed"""'], {}), "('processed')\n", (10368, 10381), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10400, 10422), 'everyaction.core.EAProperty', 'EAProperty', (['"""received"""'], {}), "('received')\n", (10410, 10422), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10442, 10465), 'everyaction.core.EAProperty', 'EAProperty', (['"""scheduled"""'], {}), "('scheduled')\n", (10452, 10465), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10480, 10498), 'everyaction.core.EAProperty', 'EAProperty', (['"""sent"""'], {}), "('sent')\n", (10490, 10498), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10524, 10559), 'everyaction.core.EAProperty', 'EAProperty', (['"""start_date"""', '"""started"""'], {}), "('start_date', 'started')\n", (10534, 10559), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10577, 10598), 'everyaction.core.EAProperty', 'EAProperty', (['"""thanked"""'], {}), "('thanked')\n", (10587, 10598), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10616, 10638), 'everyaction.core.EAProperty', 'EAProperty', (['"""decrease"""'], {}), "('decrease')\n", (10626, 10638), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10659, 10684), 'everyaction.core.EAProperty', 'EAProperty', (['"""default_end"""'], {}), "('default_end')\n", (10669, 10684), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10707, 10734), 'everyaction.core.EAProperty', 'EAProperty', (['"""default_start"""'], {}), "('default_start')\n", (10717, 10734), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10752, 10764), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (10762, 10764), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10784, 10796), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (10794, 10796), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10815, 10827), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (10825, 10827), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10845, 10863), 'everyaction.core.EAProperty', 'EAProperty', (['"""desc"""'], {}), "('desc')\n", (10855, 10863), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10883, 10908), 'everyaction.core.EAProperty', 'EAProperty', (['"""designation"""'], {}), "('designation')\n", (10893, 10908), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10928, 10948), 'everyaction.core.EAProperty', 'EAProperty', (['"""prefix"""'], {}), "('prefix')\n", (10938, 10948), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((10974, 11002), 'everyaction.core.EAProperty', 'EAProperty', (['"""marketing_code"""'], {}), "('marketing_code')\n", (10984, 11002), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11029, 11083), 'everyaction.core.EAProperty', 'EAProperty', (['"""field_value"""', '"""disclosure_value"""', '"""value"""'], {}), "('field_value', 'disclosure_value', 'value')\n", (11039, 11083), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11101, 11113), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (11111, 11113), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11131, 11152), 'everyaction.core.EAProperty', 'EAProperty', (['"""display"""'], {}), "('display')\n", (11141, 11152), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11168, 11186), 'everyaction.core.EAProperty', 'EAProperty', (['"""door"""'], {}), "('door')\n", (11178, 11186), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11209, 11253), 'everyaction.core.EAProperty', 'EAProperty', (['"""dot_net_time_zone"""', '"""time_zone"""'], {}), "('dot_net_time_zone', 'time_zone')\n", (11219, 11253), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11271, 11293), 'everyaction.core.EAProperty', 'EAProperty', (['"""download"""'], {}), "('download')\n", (11281, 11293), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11323, 11378), 'everyaction.core.EAProperty', 'EAProperty', (['"""dues_attribution_type"""', '"""dues_attribution"""'], {}), "('dues_attribution_type', 'dues_attribution')\n", (11333, 11378), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11403, 11448), 'everyaction.core.EAProperty', 'EAProperty', (['"""dues_entity_type"""', '"""dues_entity"""'], {}), "('dues_entity_type', 'dues_entity')\n", (11413, 11448), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11468, 11492), 'everyaction.core.EAProperty', 'EAProperty', (['"""duplicates"""'], {}), "('duplicates')\n", (11478, 11492), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11514, 11558), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""election_record"""'}), "(singular_alias='election_record')\n", (11524, 11558), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11577, 11589), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (11587, 11589), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11601, 11613), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (11611, 11613), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11628, 11665), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': '_employer_factory'}), '(factory=_employer_factory)\n', (11638, 11665), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11696, 11734), 'everyaction.core.EAProperty', 'EAProperty', (['"""employer_bargaining_unit"""'], {}), "('employer_bargaining_unit')\n", (11706, 11734), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11751, 11773), 'everyaction.core.EAProperty', 'EAProperty', (['"""employer"""'], {}), "('employer')\n", (11761, 11773), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11787, 11804), 'everyaction.core.EAProperty', 'EAProperty', (['"""end"""'], {}), "('end')\n", (11797, 11804), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11818, 11835), 'everyaction.core.EAProperty', 'EAProperty', (['"""end"""'], {}), "('end')\n", (11828, 11835), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11857, 11890), 'everyaction.core.EAProperty', 'EAProperty', (['"""end_override"""', '"""end"""'], {}), "('end_override', 'end')\n", (11867, 11890), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11915, 11958), 'everyaction.core.EAProperty', 'EAProperty', (['"""enrollment_type"""', '"""enrollment"""'], {}), "('enrollment_type', 'enrollment')\n", (11925, 11958), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((11977, 11999), 'everyaction.core.EAProperty', 'EAProperty', (['"""envelope"""'], {}), "('envelope')\n", (11987, 11999), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12015, 12034), 'everyaction.core.EAProperty', 'EAProperty', (['"""error"""'], {}), "('error')\n", (12025, 12034), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12048, 12067), 'everyaction.core.EAProperty', 'EAProperty', (['"""event"""'], {}), "('event')\n", (12058, 12067), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12085, 12117), 'everyaction.core.EAProperty', 'EAProperty', (['"""event_type"""', '"""type"""'], {}), "('event_type', 'type')\n", (12095, 12117), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12136, 12161), 'everyaction.core.EAProperty', 'EAProperty', (['"""event_types"""'], {}), "('event_types')\n", (12146, 12161), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12190, 12216), 'everyaction.core.EAProperty', 'EAProperty', (['"""exclude_self"""'], {}), "('exclude_self')\n", (12200, 12216), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12229, 12264), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': '_expand_factory'}), '(factory=_expand_factory)\n', (12239, 12264), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12296, 12324), 'everyaction.core.EAProperty', 'EAProperty', (['"""expected_count"""'], {}), "('expected_count')\n", (12306, 12324), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12362, 12409), 'everyaction.core.EAProperty', 'EAProperty', (['"""expected_total"""', '"""expected_amount"""'], {}), "('expected_total', 'expected_amount')\n", (12372, 12409), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12435, 12501), 'everyaction.core.EAProperty', 'EAProperty', (['"""exported_records"""', '"""record_count"""', '"""records"""', '"""count"""'], {}), "('exported_records', 'record_count', 'records', 'count')\n", (12445, 12501), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12511, 12523), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (12521, 12523), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12540, 12562), 'everyaction.core.EAProperty', 'EAProperty', (['"""external"""'], {}), "('external')\n", (12550, 12562), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12578, 12597), 'everyaction.core.EAProperty', 'EAProperty', (['"""field"""'], {}), "('field')\n", (12588, 12597), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12613, 12640), 'everyaction.core.EAProperty', 'EAProperty', (['"""field"""', '"""type"""'], {}), "('field', 'type')\n", (12623, 12640), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12662, 12701), 'everyaction.core.EAProperty', 'EAProperty', (['"""size_kb_limit"""', '"""kb_limit"""'], {}), "('size_kb_limit', 'kb_limit')\n", (12672, 12701), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12724, 12753), 'everyaction.core.EAProperty', 'EAProperty', (['"""financial_batch"""'], {}), "('financial_batch')\n", (12734, 12753), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12772, 12792), 'everyaction.core.EAProperty', 'EAProperty', (['"""finder"""'], {}), "('finder')\n", (12782, 12792), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12808, 12827), 'everyaction.core.EAProperty', 'EAProperty', (['"""first"""'], {}), "('first')\n", (12818, 12827), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12842, 12862), 'everyaction.core.EAProperty', 'EAProperty', (['"""folder"""'], {}), "('folder')\n", (12852, 12862), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12879, 12899), 'everyaction.core.EAProperty', 'EAProperty', (['"""folder"""'], {}), "('folder')\n", (12889, 12899), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12924, 12953), 'everyaction.core.EAProperty', 'EAProperty', (['"""formal_envelope"""'], {}), "('formal_envelope')\n", (12934, 12953), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((12976, 12988), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (12986, 12988), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13014, 13068), 'everyaction.core.EAProperty', 'EAProperty', (['"""form_submissions"""', '"""forms"""', '"""submissions"""'], {}), "('form_submissions', 'forms', 'submissions')\n", (13024, 13068), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13084, 13096), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13094, 13096), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13112, 13124), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13122, 13124), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13139, 13159), 'everyaction.core.EAProperty', 'EAProperty', (['"""sender"""'], {}), "('sender')\n", (13149, 13159), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13177, 13198), 'everyaction.core.EAProperty', 'EAProperty', (['"""subject"""'], {}), "('subject')\n", (13187, 13198), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13213, 13225), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13223, 13225), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13246, 13265), 'everyaction.core.EAProperty', 'EAProperty', (['"""after"""'], {}), "('after')\n", (13256, 13265), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13287, 13307), 'everyaction.core.EAProperty', 'EAProperty', (['"""before"""'], {}), "('before')\n", (13297, 13307), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13318, 13330), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13328, 13330), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13344, 13356), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13354, 13356), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13372, 13384), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13382, 13384), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13400, 13412), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13410, 13412), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13423, 13435), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13433, 13435), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13451, 13463), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13461, 13463), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13483, 13508), 'everyaction.core.EAProperty', 'EAProperty', (['"""my_campaign"""'], {}), "('my_campaign')\n", (13493, 13508), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13526, 13549), 'everyaction.core.EAProperty', 'EAProperty', (['"""my_voters"""'], {}), "('my_voters')\n", (13536, 13549), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13575, 13603), 'everyaction.core.EAProperty', 'EAProperty', (['"""has_predefined"""'], {}), "('has_predefined')\n", (13585, 13603), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13619, 13631), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13629, 13631), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13642, 13654), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13652, 13654), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13672, 13694), 'everyaction.core.EAProperty', 'EAProperty', (['"""increase"""'], {}), "('increase')\n", (13682, 13694), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13724, 13781), 'everyaction.core.EAProperty', 'EAProperty', (['"""include_auto_generated"""', '"""include_generated"""'], {}), "('include_auto_generated', 'include_generated')\n", (13734, 13781), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13806, 13854), 'everyaction.core.EAProperty', 'EAProperty', (['"""include_statuses"""', '"""include_closed"""'], {}), "('include_statuses', 'include_closed')\n", (13816, 13854), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13876, 13888), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13886, 13888), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13912, 13924), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (13922, 13924), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13942, 13966), 'everyaction.core.EAProperty', 'EAProperty', (['"""input_type"""'], {}), "('input_type')\n", (13952, 13966), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((13996, 14042), 'everyaction.core.EAProperty', 'EAProperty', (['"""intervention_url"""', '"""callback_url"""'], {}), "('intervention_url', 'callback_url')\n", (14006, 14042), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14066, 14093), 'everyaction.core.EAProperty', 'EAProperty', (['"""invalid_chars"""'], {}), "('invalid_chars')\n", (14076, 14093), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14118, 14163), 'everyaction.core.EAProperty', 'EAProperty', (['"""invalid_rows_url"""', '"""invalid_url"""'], {}), "('invalid_rows_url', 'invalid_url')\n", (14128, 14163), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14193, 14216), 'everyaction.core.EAProperty', 'EAProperty', (['"""repeat_of"""'], {}), "('repeat_of')\n", (14203, 14216), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14231, 14251), 'everyaction.core.EAProperty', 'EAProperty', (['"""active"""'], {}), "('active')\n", (14241, 14251), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14270, 14294), 'everyaction.core.EAProperty', 'EAProperty', (['"""applicable"""'], {}), "('applicable')\n", (14280, 14294), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14323, 14359), 'everyaction.core.EAProperty', 'EAProperty', (['"""associated_with_badges"""'], {}), "('associated_with_badges')\n", (14333, 14359), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14394, 14464), 'everyaction.core.EAProperty', 'EAProperty', (['"""needs_location"""', '"""location_required"""', '"""requires_location"""'], {}), "('needs_location', 'location_required', 'requires_location')\n", (14404, 14464), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14486, 14527), 'everyaction.core.EAProperty', 'EAProperty', (['"""auto_generated"""', '"""generated"""'], {}), "('auto_generated', 'generated')\n", (14496, 14527), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14560, 14645), 'everyaction.core.EAProperty', 'EAProperty', (['"""confirmation_email_enabled"""', '"""confirmation_enabled"""', '"""confirmation"""'], {}), "('confirmation_email_enabled', 'confirmation_enabled', 'confirmation'\n )\n", (14570, 14645), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14670, 14736), 'everyaction.core.EAProperty', 'EAProperty', (['"""confirmed_opt_in_enabled"""', '"""opt_in_enabled"""', '"""opt_in"""'], {}), "('confirmed_opt_in_enabled', 'opt_in_enabled', 'opt_in')\n", (14680, 14736), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14754, 14797), 'everyaction.core.EAProperty', 'EAProperty', (['"""is_core"""', '"""core_field"""', '"""core"""'], {}), "('is_core', 'core_field', 'core')\n", (14764, 14797), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14820, 14872), 'everyaction.core.EAProperty', 'EAProperty', (['"""custom_district"""', '"""is_custom"""', '"""custom"""'], {}), "('custom_district', 'is_custom', 'custom')\n", (14830, 14872), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14889, 14911), 'everyaction.core.EAProperty', 'EAProperty', (['"""editable"""'], {}), "('editable')\n", (14899, 14911), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14929, 14961), 'everyaction.core.EAProperty', 'EAProperty', (['"""event_lead"""', '"""lead"""'], {}), "('event_lead', 'lead')\n", (14939, 14961), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((14980, 15004), 'everyaction.core.EAProperty', 'EAProperty', (['"""exportable"""'], {}), "('exportable')\n", (14990, 15004), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15019, 15039), 'everyaction.core.EAProperty', 'EAProperty', (['"""member"""'], {}), "('member')\n", (15029, 15039), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15059, 15085), 'everyaction.core.EAProperty', 'EAProperty', (['"""multi_assign"""'], {}), "('multi_assign')\n", (15069, 15085), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15108, 15147), 'everyaction.core.EAProperty', 'EAProperty', (['"""my_organization"""', '"""my_org"""'], {}), "('my_organization', 'my_org')\n", (15118, 15147), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15169, 15210), 'everyaction.core.EAProperty', 'EAProperty', (['"""offline_property"""', '"""offline"""'], {}), "('offline_property', 'offline')\n", (15179, 15210), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15241, 15300), 'everyaction.core.EAProperty', 'EAProperty', (['"""online_actions_available"""', '"""actions_available"""'], {}), "('online_actions_available', 'actions_available')\n", (15251, 15300), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15335, 15436), 'everyaction.core.EAProperty', 'EAProperty', (['"""only_editable_by_creating_user"""', '"""only_editable_by_creator"""', '"""only_creator_may_edit"""'], {}), "('only_editable_by_creating_user', 'only_editable_by_creator',\n 'only_creator_may_edit')\n", (15345, 15436), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15475, 15493), 'everyaction.core.EAProperty', 'EAProperty', (['"""open"""'], {}), "('open')\n", (15485, 15493), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15511, 15534), 'everyaction.core.EAProperty', 'EAProperty', (['"""preferred"""'], {}), "('preferred')\n", (15521, 15534), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15559, 15600), 'everyaction.core.EAProperty', 'EAProperty', (['"""publicly_viewable"""', '"""public"""'], {}), "('publicly_viewable', 'public')\n", (15569, 15600), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15630, 15701), 'everyaction.core.EAProperty', 'EAProperty', (['"""recurring_email_enabled"""', '"""recurring_enabled"""', '"""recurring"""'], {}), "('recurring_email_enabled', 'recurring_enabled', 'recurring')\n", (15640, 15701), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15718, 15740), 'everyaction.core.EAProperty', 'EAProperty', (['"""required"""'], {}), "('required')\n", (15728, 15740), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15759, 15783), 'everyaction.core.EAProperty', 'EAProperty', (['"""searchable"""'], {}), "('searchable')\n", (15769, 15783), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15826, 15859), 'everyaction.core.EAProperty', 'EAProperty', (['"""default_share_child"""'], {}), "('default_share_child')\n", (15836, 15859), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15902, 15936), 'everyaction.core.EAProperty', 'EAProperty', (['"""default_share_master"""'], {}), "('default_share_master')\n", (15912, 15936), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((15955, 15979), 'everyaction.core.EAProperty', 'EAProperty', (['"""subscribed"""'], {}), "('subscribed')\n", (15965, 15979), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16002, 16031), 'everyaction.core.EAProperty', 'EAProperty', (['"""upsell_accepted"""'], {}), "('upsell_accepted')\n", (16012, 16031), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16051, 16077), 'everyaction.core.EAProperty', 'EAProperty', (['"""upsell_shown"""'], {}), "('upsell_shown')\n", (16061, 16077), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16100, 16129), 'everyaction.core.EAProperty', 'EAProperty', (['"""view_restricted"""'], {}), "('view_restricted')\n", (16110, 16129), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16145, 16165), 'everyaction.core.EAProperty', 'EAProperty', (['"""status"""'], {}), "('status')\n", (16155, 16165), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16175, 16187), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (16185, 16187), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16206, 16229), 'everyaction.core.EAProperty', 'EAProperty', (['"""reference"""'], {}), "('reference')\n", (16216, 16229), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16244, 16262), 'everyaction.core.EAProperty', 'EAProperty', (['"""last"""'], {}), "('last')\n", (16254, 16262), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16272, 16284), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (16282, 16284), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16298, 16310), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (16308, 16310), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16326, 16338), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (16336, 16338), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16350, 16362), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (16360, 16362), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16374, 16386), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (16384, 16386), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16398, 16410), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (16408, 16410), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16454, 16487), 'everyaction.core.EAProperty', 'EAProperty', (['"""credit_card_payment"""'], {}), "('credit_card_payment')\n", (16464, 16487), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16530, 16621), 'everyaction.core.EAProperty', 'EAProperty', (['"""joint_fundraising_contribution"""', '"""fundraising_contribution"""', '"""fundraising"""'], {}), "('joint_fundraising_contribution', 'fundraising_contribution',\n 'fundraising')\n", (16540, 16621), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16669, 16722), 'everyaction.core.EAProperty', 'EAProperty', (['"""partnership_contribution"""', '"""partnership"""'], {}), "('partnership_contribution', 'partnership')\n", (16679, 16722), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16762, 16789), 'everyaction.core.EAProperty', 'EAProperty', (['"""reimbursement"""'], {}), "('reimbursement')\n", (16772, 16789), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16813, 16840), 'everyaction.core.EAProperty', 'EAProperty', (['"""links_clicked"""'], {}), "('links_clicked')\n", (16823, 16840), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16856, 16874), 'everyaction.core.EAProperty', 'EAProperty', (['"""list"""'], {}), "('list')\n", (16866, 16874), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16896, 16929), 'everyaction.core.EAProperty', 'EAProperty', (['"""description"""', '"""desc"""'], {}), "('description', 'desc')\n", (16906, 16929), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16944, 16970), 'everyaction.core.EAProperty', 'EAProperty', (['"""list"""', '"""name"""'], {}), "('list', 'name')\n", (16954, 16970), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((16987, 17007), 'everyaction.core.EAProperty', 'EAProperty', (['"""status"""'], {}), "('status')\n", (16997, 17007), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17017, 17029), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17027, 17029), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17051, 17088), 'everyaction.core.EAProperty', 'EAProperty', (['"""mapping_type"""', '"""mapping"""'], {}), "('mapping_type', 'mapping')\n", (17061, 17088), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17106, 17127), 'everyaction.core.EAProperty', 'EAProperty', (['"""matched"""'], {}), "('matched')\n", (17116, 17127), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17150, 17188), 'everyaction.core.EAProperty', 'EAProperty', (['"""matched_count"""', '"""matched"""'], {}), "('matched_count', 'matched')\n", (17160, 17188), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17207, 17237), 'everyaction.core.EAProperty', 'EAProperty', (['"""match"""', '"""percent"""'], {}), "('match', 'percent')\n", (17217, 17237), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17247, 17259), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17257, 17259), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17278, 17300), 'everyaction.core.EAProperty', 'EAProperty', (['"""max_door"""'], {}), "('max_door')\n", (17288, 17300), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17321, 17356), 'everyaction.core.EAProperty', 'EAProperty', (['"""max_length"""', '"""max_len"""'], {}), "('max_length', 'max_len')\n", (17331, 17356), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17372, 17384), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17382, 17384), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17405, 17429), 'everyaction.core.EAProperty', 'EAProperty', (['"""max_people"""'], {}), "('max_people')\n", (17415, 17429), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17456, 17483), 'everyaction.core.EAProperty', 'EAProperty', (['"""max_box_chars"""'], {}), "('max_box_chars')\n", (17466, 17483), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17498, 17515), 'everyaction.core.EAProperty', 'EAProperty', (['"""max"""'], {}), "('max')\n", (17508, 17515), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17533, 17553), 'everyaction.core.EAProperty', 'EAProperty', (['"""median"""'], {}), "('median')\n", (17543, 17553), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17570, 17590), 'everyaction.core.EAProperty', 'EAProperty', (['"""medium"""'], {}), "('medium')\n", (17580, 17590), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17604, 17616), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17614, 17616), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17633, 17653), 'everyaction.core.EAProperty', 'EAProperty', (['"""middle"""'], {}), "('middle')\n", (17643, 17653), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17663, 17675), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17673, 17675), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17690, 17707), 'everyaction.core.EAProperty', 'EAProperty', (['"""min"""'], {}), "('min')\n", (17700, 17707), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17724, 17746), 'everyaction.core.EAProperty', 'EAProperty', (['"""modifier"""'], {}), "('modifier')\n", (17734, 17746), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17768, 17823), 'everyaction.core.EAProperty', 'EAProperty', (['"""modified_by"""', '"""modifier_email"""', '"""modifier"""'], {}), "('modified_by', 'modifier_email', 'modifier')\n", (17778, 17823), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17849, 17887), 'everyaction.core.EAProperty', 'EAProperty', (['"""next_transaction"""', '"""next"""'], {}), "('next_transaction', 'next')\n", (17859, 17887), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17902, 17914), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17912, 17914), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17926, 17938), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17936, 17938), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17954, 17974), 'everyaction.core.EAProperty', 'EAProperty', (['"""nulled"""'], {}), "('nulled')\n", (17964, 17974), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((17987, 17999), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (17997, 17999), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18019, 18051), 'everyaction.core.EAProperty', 'EAProperty', (['"""num_cards"""', '"""cards"""'], {}), "('num_cards', 'cards')\n", (18029, 18051), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18076, 18115), 'everyaction.core.EAProperty', 'EAProperty', (['"""times_renewed"""', '"""renewals"""'], {}), "('times_renewed', 'renewals')\n", (18086, 18115), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18132, 18144), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (18142, 18144), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18172, 18216), 'everyaction.core.EAProperty', 'EAProperty', (['"""reference_number"""', '"""ref_number"""'], {}), "('reference_number', 'ref_number')\n", (18182, 18216), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18236, 18259), 'everyaction.core.EAProperty', 'EAProperty', (['"""only_mine"""'], {}), "('only_mine')\n", (18246, 18259), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18275, 18294), 'everyaction.core.EAProperty', 'EAProperty', (['"""opens"""'], {}), "('opens')\n", (18285, 18294), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18312, 18332), 'everyaction.core.EAProperty', 'EAProperty', (['"""opt_in"""'], {}), "('opt_in')\n", (18322, 18332), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18346, 18368), 'everyaction.core.EAProperty', 'EAProperty', (['"""order_by"""'], {}), "('order_by')\n", (18356, 18368), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18398, 18447), 'everyaction.core.EAProperty', 'EAProperty', (['"""organization_contact"""', '"""org_contact"""'], {}), "('organization_contact', 'org_contact')\n", (18408, 18447), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18485, 18552), 'everyaction.core.EAProperty', 'EAProperty', (['"""organization_contact_official"""', '"""org_contact_official"""'], {}), "('organization_contact_official', 'org_contact_official')\n", (18495, 18552), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18573, 18606), 'everyaction.core.EAProperty', 'EAProperty', (['"""organization"""', '"""org"""'], {}), "('organization', 'org')\n", (18583, 18606), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18630, 18680), 'everyaction.core.EAProperty', 'EAProperty', (['"""org_roles"""'], {'singular_alias': '"""org_role"""'}), "('org_roles', singular_alias='org_role')\n", (18640, 18680), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18697, 18709), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (18707, 18709), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18730, 18752), 'everyaction.core.EAProperty', 'EAProperty', (['"""original"""'], {}), "('original')\n", (18740, 18752), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18775, 18815), 'everyaction.core.EAProperty', 'EAProperty', (['"""original_count"""', '"""original"""'], {}), "('original_count', 'original')\n", (18785, 18815), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18832, 18849), 'everyaction.core.EAProperty', 'EAProperty', (['"""OOR"""'], {}), "('OOR')\n", (18842, 18849), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18879, 18943), 'everyaction.core.EAProperty', 'EAProperty', (['"""overwrite_existing_id"""', '"""overwrite_id"""', '"""overwrite"""'], {}), "('overwrite_existing_id', 'overwrite_id', 'overwrite')\n", (18889, 18943), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((18962, 18987), 'everyaction.core.EAProperty', 'EAProperty', (['"""parent_code"""'], {}), "('parent_code')\n", (18972, 18987), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19012, 19053), 'everyaction.core.EAProperty', 'EAProperty', (['"""parent_department"""', '"""parent"""'], {}), "('parent_department', 'parent')\n", (19022, 19053), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19073, 19109), 'everyaction.core.EAProperty', 'EAProperty', (['"""parent_field"""', '"""parent"""'], {}), "('parent_field', 'parent')\n", (19083, 19109), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19131, 19167), 'everyaction.core.EAProperty', 'EAProperty', (['"""parent_field"""', '"""parent"""'], {}), "('parent_field', 'parent')\n", (19141, 19167), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19182, 19202), 'everyaction.core.EAProperty', 'EAProperty', (['"""parent"""'], {}), "('parent')\n", (19192, 19202), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19227, 19278), 'everyaction.core.EAProperty', 'EAProperty', (['"""parent"""'], {'factory': '_organization_factory'}), "('parent', factory=_organization_factory)\n", (19237, 19278), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19298, 19324), 'everyaction.core.EAProperty', 'EAProperty', (['"""parent_value"""'], {}), "('parent_value')\n", (19308, 19324), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19336, 19348), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19346, 19348), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19366, 19378), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19376, 19378), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19399, 19432), 'everyaction.core.EAProperty', 'EAProperty', (['"""id_column"""', '"""id_col"""'], {}), "('id_column', 'id_col')\n", (19409, 19432), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19451, 19476), 'everyaction.core.EAProperty', 'EAProperty', (['"""person_type"""'], {}), "('person_type')\n", (19461, 19476), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19493, 19505), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19503, 19505), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19517, 19529), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19527, 19529), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19543, 19562), 'everyaction.core.EAProperty', 'EAProperty', (['"""phone"""'], {}), "('phone')\n", (19553, 19562), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19580, 19600), 'everyaction.core.EAProperty', 'EAProperty', (['"""number"""'], {}), "('number')\n", (19590, 19600), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19613, 19625), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19623, 19625), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19639, 19651), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19649, 19651), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19672, 19684), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19682, 19684), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19709, 19737), 'everyaction.core.EAProperty', 'EAProperty', (['"""primary_custom"""'], {}), "('primary_custom')\n", (19719, 19737), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19759, 19771), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19769, 19771), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19795, 19807), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19805, 19807), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19832, 19844), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19842, 19844), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19861, 19898), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""property"""'}), "(singular_alias='property')\n", (19871, 19898), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19913, 19925), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (19923, 19925), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19942, 19964), 'everyaction.core.EAProperty', 'EAProperty', (['"""question"""'], {}), "('question')\n", (19952, 19964), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((19985, 20009), 'everyaction.core.EAProperty', 'EAProperty', (['"""recipients"""'], {}), "('recipients')\n", (19995, 20009), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20027, 20048), 'everyaction.core.EAProperty', 'EAProperty', (['"""records"""'], {}), "('records')\n", (20037, 20048), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20069, 20093), 'everyaction.core.EAProperty', 'EAProperty', (['"""recurrence"""'], {}), "('recurrence')\n", (20079, 20093), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20113, 20136), 'everyaction.core.EAProperty', 'EAProperty', (['"""reference"""'], {}), "('reference')\n", (20123, 20136), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20157, 20183), 'everyaction.core.EAProperty', 'EAProperty', (['"""relationship"""'], {}), "('relationship')\n", (20167, 20183), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20205, 20228), 'everyaction.core.EAProperty', 'EAProperty', (['"""remaining"""'], {}), "('remaining')\n", (20215, 20228), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20247, 20269), 'everyaction.core.EAProperty', 'EAProperty', (['"""reply_to"""'], {}), "('reply_to')\n", (20257, 20269), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20299, 20377), 'everyaction.core.EAProperty', 'EAProperty', (['"""custom_field_ids"""', '"""custom_fields"""'], {'singular_alias': '"""custom_field"""'}), "('custom_field_ids', 'custom_fields', singular_alias='custom_field')\n", (20309, 20377), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20399, 20443), 'everyaction.core.EAProperty', 'EAProperty', (['"""fields"""'], {'singular_alias': '"""field"""'}), "('fields', singular_alias='field')\n", (20409, 20443), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20462, 20510), 'everyaction.core.EAProperty', 'EAProperty', (['"""ids"""'], {'singular_alias': '"""requested_id"""'}), "('ids', singular_alias='requested_id')\n", (20472, 20510), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20529, 20551), 'everyaction.core.EAProperty', 'EAProperty', (['"""resource"""'], {}), "('resource')\n", (20539, 20551), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20571, 20621), 'everyaction.core.EAProperty', 'EAProperty', (['"""resources"""'], {'singular_alias': '"""resource"""'}), "('resources', singular_alias='resource')\n", (20581, 20621), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20639, 20656), 'everyaction.core.EAProperty', 'EAProperty', (['"""url"""'], {}), "('url')\n", (20649, 20656), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20673, 20695), 'everyaction.core.EAProperty', 'EAProperty', (['"""response"""'], {}), "('response')\n", (20683, 20695), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20708, 20720), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (20718, 20720), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20739, 20764), 'everyaction.core.EAProperty', 'EAProperty', (['"""result_code"""'], {}), "('result_code')\n", (20749, 20764), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20791, 20865), 'everyaction.core.EAProperty', 'EAProperty', (['"""result_column_name"""', '"""result_column"""', '"""column_name"""', '"""column"""'], {}), "('result_column_name', 'result_column', 'column_name', 'column')\n", (20801, 20865), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20893, 20932), 'everyaction.core.EAProperty', 'EAProperty', (['"""size_kb_limit"""', '"""kb_limit"""'], {}), "('size_kb_limit', 'kb_limit')\n", (20903, 20932), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((20960, 20999), 'everyaction.core.EAProperty', 'EAProperty', (['"""size_kb_limit"""', '"""kb_limit"""'], {}), "('size_kb_limit', 'kb_limit')\n", (20970, 20999), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21024, 21051), 'everyaction.core.EAProperty', 'EAProperty', (['"""outcome_group"""'], {}), "('outcome_group')\n", (21034, 21051), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21068, 21080), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (21078, 21080), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21098, 21130), 'everyaction.core.EAProperty', 'EAProperty', (['"""saved_list"""', '"""list"""'], {}), "('saved_list', 'list')\n", (21108, 21130), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21148, 21171), 'everyaction.core.EAProperty', 'EAProperty', (['"""score_col"""'], {}), "('score_col')\n", (21158, 21171), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21185, 21204), 'everyaction.core.EAProperty', 'EAProperty', (['"""score"""'], {}), "('score')\n", (21195, 21204), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21225, 21247), 'everyaction.core.EAProperty', 'EAProperty', (['"""question"""'], {}), "('question')\n", (21235, 21247), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21267, 21298), 'everyaction.core.EAProperty', 'EAProperty', (['"""search"""', '"""keyword"""'], {}), "('search', 'keyword')\n", (21277, 21298), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21326, 21357), 'everyaction.core.EAProperty', 'EAProperty', (['"""selected_one_time"""'], {}), "('selected_one_time')\n", (21336, 21357), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21387, 21427), 'everyaction.core.EAProperty', 'EAProperty', (['"""ethnicities"""'], {'is_array': '(True)'}), "('ethnicities', is_array=True)\n", (21397, 21427), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21455, 21478), 'everyaction.core.EAProperty', 'EAProperty', (['"""ethnicity"""'], {}), "('ethnicity')\n", (21465, 21478), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21504, 21550), 'everyaction.core.EAProperty', 'EAProperty', (['"""genders"""'], {'singular_alias': '"""gender"""'}), "('genders', singular_alias='gender')\n", (21514, 21550), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21587, 21632), 'everyaction.core.EAProperty', 'EAProperty', (['"""language_preference"""', '"""language"""'], {}), "('language_preference', 'language')\n", (21597, 21632), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21655, 21673), 'everyaction.core.EAProperty', 'EAProperty', (['"""race"""'], {}), "('race')\n", (21665, 21673), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21697, 21731), 'everyaction.core.EAProperty', 'EAProperty', (['"""races"""'], {'is_array': '(True)'}), "('races', is_array=True)\n", (21707, 21731), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21768, 21838), 'everyaction.core.EAProperty', 'EAProperty', (['"""sexual_orientations"""'], {'singular_alias': '"""sexual_orientation"""'}), "('sexual_orientations', singular_alias='sexual_orientation')\n", (21778, 21838), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21862, 21905), 'everyaction.core.EAProperty', 'EAProperty', (['"""sender_display"""', '"""sender_name"""'], {}), "('sender_display', 'sender_name')\n", (21872, 21905), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21930, 21956), 'everyaction.core.EAProperty', 'EAProperty', (['"""sender_email"""'], {}), "('sender_email')\n", (21940, 21956), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21966, 21978), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (21976, 21978), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((21994, 22013), 'everyaction.core.EAProperty', 'EAProperty', (['"""short"""'], {}), "('short')\n", (22004, 22013), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22034, 22058), 'everyaction.core.EAProperty', 'EAProperty', (['"""sms_opt_in"""'], {}), "('sms_opt_in')\n", (22044, 22058), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22074, 22101), 'everyaction.core.EAProperty', 'EAProperty', (['"""source"""', '"""url"""'], {}), "('source', 'url')\n", (22084, 22101), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22119, 22139), 'everyaction.core.EAProperty', 'EAProperty', (['"""source"""'], {}), "('source')\n", (22129, 22139), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22159, 22178), 'everyaction.core.EAProperty', 'EAProperty', (['"""after"""'], {}), "('after')\n", (22169, 22178), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22199, 22219), 'everyaction.core.EAProperty', 'EAProperty', (['"""before"""'], {}), "('before')\n", (22209, 22219), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22235, 22254), 'everyaction.core.EAProperty', 'EAProperty', (['"""start"""'], {}), "('start')\n", (22245, 22254), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22270, 22289), 'everyaction.core.EAProperty', 'EAProperty', (['"""start"""'], {}), "('start')\n", (22280, 22289), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22313, 22350), 'everyaction.core.EAProperty', 'EAProperty', (['"""start_override"""', '"""start"""'], {}), "('start_override', 'start')\n", (22323, 22350), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22366, 22385), 'everyaction.core.EAProperty', 'EAProperty', (['"""state"""'], {}), "('state')\n", (22376, 22385), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22407, 22438), 'everyaction.core.EAProperty', 'EAProperty', (['"""state"""', '"""province"""'], {}), "('state', 'province')\n", (22417, 22438), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22456, 22476), 'everyaction.core.EAProperty', 'EAProperty', (['"""static"""'], {}), "('static')\n", (22466, 22476), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22489, 22501), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (22499, 22501), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22516, 22528), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (22526, 22528), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22545, 22565), 'everyaction.core.EAProperty', 'EAProperty', (['"""status"""'], {}), "('status')\n", (22555, 22565), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22590, 22610), 'everyaction.core.EAProperty', 'EAProperty', (['"""status"""'], {}), "('status')\n", (22600, 22610), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22633, 22671), 'everyaction.core.EAProperty', 'EAProperty', (['"""supporter_group"""', '"""group"""'], {}), "('supporter_group', 'group')\n", (22643, 22671), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22684, 22696), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (22694, 22696), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22719, 22741), 'everyaction.core.EAProperty', 'EAProperty', (['"""question"""'], {}), "('question')\n", (22729, 22741), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22764, 22786), 'everyaction.core.EAProperty', 'EAProperty', (['"""response"""'], {}), "('response')\n", (22774, 22786), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22806, 22835), 'everyaction.core.EAProperty', 'EAProperty', (['"""sync_end"""', '"""end"""'], {}), "('sync_end', 'end')\n", (22816, 22835), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22857, 22890), 'everyaction.core.EAProperty', 'EAProperty', (['"""sync_start"""', '"""start"""'], {}), "('sync_start', 'start')\n", (22867, 22890), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22905, 22925), 'everyaction.core.EAProperty', 'EAProperty', (['"""target"""'], {}), "('target')\n", (22915, 22925), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22943, 22963), 'everyaction.core.EAProperty', 'EAProperty', (['"""target"""'], {}), "('target')\n", (22953, 22963), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22974, 22986), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (22984, 22986), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((22998, 23010), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (23008, 23010), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23026, 23049), 'everyaction.core.EAProperty', 'EAProperty', (['"""tolerance"""'], {}), "('tolerance')\n", (23036, 23049), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23069, 23093), 'everyaction.core.EAProperty', 'EAProperty', (['"""total_paid"""'], {}), "('total_paid')\n", (23079, 23093), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23109, 23128), 'everyaction.core.EAProperty', 'EAProperty', (['"""total"""'], {}), "('total')\n", (23119, 23128), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23143, 23161), 'everyaction.core.EAProperty', 'EAProperty', (['"""turf"""'], {}), "('turf')\n", (23153, 23161), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23172, 23184), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (23182, 23184), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23202, 23214), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (23212, 23214), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23227, 23245), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {}), "('type')\n", (23237, 23245), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23258, 23276), 'everyaction.core.EAProperty', 'EAProperty', (['"""unit"""'], {}), "('unit')\n", (23268, 23276), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23301, 23343), 'everyaction.core.EAProperty', 'EAProperty', (['"""unmatched_count"""', '"""unmatched"""'], {}), "('unmatched_count', 'unmatched')\n", (23311, 23343), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23366, 23392), 'everyaction.core.EAProperty', 'EAProperty', (['"""unsubscribes"""'], {}), "('unsubscribes')\n", (23376, 23392), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23409, 23429), 'everyaction.core.EAProperty', 'EAProperty', (['"""upsell"""'], {}), "('upsell')\n", (23419, 23429), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23439, 23451), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (23449, 23451), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23466, 23484), 'everyaction.core.EAProperty', 'EAProperty', (['"""user"""'], {}), "('user')\n", (23476, 23484), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23504, 23537), 'everyaction.core.EAProperty', 'EAProperty', (['"""first_name"""', '"""first"""'], {}), "('first_name', 'first')\n", (23514, 23537), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23556, 23587), 'everyaction.core.EAProperty', 'EAProperty', (['"""last_name"""', '"""last"""'], {}), "('last_name', 'last')\n", (23566, 23587), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23599, 23611), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (23609, 23611), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23623, 23640), 'everyaction.core.EAProperty', 'EAProperty', (['"""van"""'], {}), "('van')\n", (23633, 23640), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23657, 23678), 'everyaction.core.EAProperty', 'EAProperty', (['"""webhook"""'], {}), "('webhook')\n", (23667, 23678), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23692, 23704), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (23702, 23704), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23726, 23780), 'everyaction.core.EAProperty', 'EAProperty', (['"""zip_code"""', '"""zip"""', '"""postal_code"""', '"""postal"""'], {}), "('zip_code', 'zip', 'postal_code', 'postal')\n", (23736, 23780), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((23789, 23801), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (23799, 23801), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((37877, 37935), 'everyaction.exception.EAException', 'EAException', (['f"""Unrecognized Job Action Type {action_type}"""'], {}), "(f'Unrecognized Job Action Type {action_type}')\n", (37888, 37935), False, 'from everyaction.exception import EAException\n'), ((44568, 44580), 'everyaction.core.EAProperty', 'EAProperty', ([], {}), '()\n', (44578, 44580), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((45102, 45158), 'everyaction.exception.EAException', 'EAException', (['f"""Unrecognized Script Response type: {typ}"""'], {}), "(f'Unrecognized Script Response type: {typ}')\n", (45113, 45158), False, 'from everyaction.exception import EAException\n'), ((51564, 51628), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""activist_code"""', 'factory': 'ActivistCode'}), "(singular_alias='activist_code', factory=ActivistCode)\n", (51574, 51628), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((51651, 51704), 'everyaction.core.EAProperty', 'EAProperty', (['"""criteria"""'], {'factory': 'ScoreApprovalCriteria'}), "('criteria', factory=ScoreApprovalCriteria)\n", (51661, 51704), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((51726, 51812), 'everyaction.core.EAProperty', 'EAProperty', (['"""available"""', '"""values"""'], {'singular_alias': '"""value"""', 'factory': 'AvailableValue'}), "('available', 'values', singular_alias='value', factory=\n AvailableValue)\n", (51736, 51812), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((51828, 51862), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'BargainingUnit'}), '(factory=BargainingUnit)\n', (51838, 51862), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((51884, 51952), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""bargaining_unit"""', 'factory': 'BargainingUnit'}), "(singular_alias='bargaining_unit', factory=BargainingUnit)\n", (51894, 51952), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((51969, 52026), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""canvasser"""', 'factory': 'Canvasser'}), "(singular_alias='canvasser', factory=Canvasser)\n", (51979, 52026), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52047, 52092), 'everyaction.core.EAProperty', 'EAProperty', (['"""context"""'], {'factory': 'CanvassContext'}), "('context', factory=CanvassContext)\n", (52057, 52092), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52107, 52139), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'NoteCategory'}), '(factory=NoteCategory)\n', (52117, 52139), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52153, 52204), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""column"""', 'factory': 'Column'}), "(singular_alias='column', factory=Column)\n", (52163, 52204), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52240, 52333), 'everyaction.core.EAProperty', 'EAProperty', (['"""include_columns"""', '"""include"""'], {'singular_alias': '"""include_column"""', 'factory': 'Column'}), "('include_columns', 'include', singular_alias='include_column',\n factory=Column)\n", (52250, 52333), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52395, 52499), 'everyaction.core.EAProperty', 'EAProperty', (['"""confirmation_email"""', '"""confirmation_data"""', '"""confirmation"""'], {'factory': 'ConfirmationEmailData'}), "('confirmation_email', 'confirmation_data', 'confirmation',\n factory=ConfirmationEmailData)\n", (52405, 52499), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52559, 52606), 'everyaction.core.EAProperty', 'EAProperty', (['"""attributions"""'], {'factory': 'Attribution'}), "('attributions', factory=Attribution)\n", (52569, 52606), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52627, 52672), 'everyaction.core.EAProperty', 'EAProperty', (['"""history"""'], {'factory': 'ContactHistory'}), "('history', factory=ContactHistory)\n", (52637, 52672), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52702, 52772), 'everyaction.core.EAProperty', 'EAProperty', (['"""contribution_account"""', '"""account_obj"""'], {'factory': 'BankAccount'}), "('contribution_account', 'account_obj', factory=BankAccount)\n", (52712, 52772), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52796, 52885), 'everyaction.core.EAProperty', 'EAProperty', (['"""custom_values"""'], {'singular_alias': '"""custom_value"""', 'factory': 'CustomFieldValue'}), "('custom_values', singular_alias='custom_value', factory=\n CustomFieldValue)\n", (52806, 52885), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52903, 52976), 'everyaction.core.EAProperty', 'EAProperty', (['"""properties"""'], {'singular_alias': '"""property"""', 'factory': 'KeyValuePair'}), "('properties', singular_alias='property', factory=KeyValuePair)\n", (52913, 52976), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((52994, 53053), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""department"""', 'factory': 'Department'}), "(singular_alias='department', factory=Department)\n", (53004, 53053), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53071, 53102), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Designation'}), '(factory=Designation)\n', (53081, 53102), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53128, 53174), 'everyaction.core.EAProperty', 'EAProperty', (['"""constraints"""'], {'factory': 'Constraints'}), "('constraints', factory=Constraints)\n", (53138, 53174), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53202, 53317), 'everyaction.core.EAProperty', 'EAProperty', (['"""disclosures"""', '"""field_values"""', '"""values"""'], {'singular_alias': '"""disclosure"""', 'factory': 'DisclosureFieldValue'}), "('disclosures', 'field_values', 'values', singular_alias=\n 'disclosure', factory=DisclosureFieldValue)\n", (53212, 53317), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53383, 53421), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'DistrictFieldValue'}), '(factory=DistrictFieldValue)\n', (53393, 53421), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53447, 53519), 'everyaction.core.EAProperty', 'EAProperty', (['"""values"""'], {'singular_alias': '"""value"""', 'factory': 'DistrictFieldValue'}), "('values', singular_alias='value', factory=DistrictFieldValue)\n", (53457, 53519), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53534, 53562), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Currency'}), '(factory=Currency)\n', (53544, 53562), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53601, 53670), 'everyaction.core.EAProperty', 'EAProperty', (['"""distributions"""'], {'factory': 'EmailMessageContentDistributions'}), "('distributions', factory=EmailMessageContentDistributions)\n", (53611, 53670), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53681, 53705), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'File'}), '(factory=File)\n', (53691, 53705), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53717, 53764), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""file"""', 'factory': 'File'}), "(singular_alias='file', factory=File)\n", (53727, 53764), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53796, 53872), 'everyaction.core.EAProperty', 'EAProperty', (['"""first_source_code"""', '"""source_code"""'], {'factory': 'MembershipSourceCode'}), "('first_source_code', 'source_code', factory=MembershipSourceCode)\n", (53806, 53872), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53883, 53912), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'BatchForm'}), '(factory=BatchForm)\n', (53893, 53912), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((53930, 53982), 'everyaction.core.EAProperty', 'EAProperty', (['"""geo"""', '"""location"""'], {'factory': 'GeoCoordinate'}), "('geo', 'location', factory=GeoCoordinate)\n", (53940, 53982), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54001, 54060), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""identifier"""', 'factory': 'Identifier'}), "(singular_alias='identifier', factory=Identifier)\n", (54011, 54060), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54079, 54137), 'everyaction.core.EAProperty', 'EAProperty', (['"""cell_status"""', '"""is_cell"""'], {'factory': 'IsCellStatus'}), "('cell_status', 'is_cell', factory=IsCellStatus)\n", (54089, 54137), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54152, 54180), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'JobClass'}), '(factory=JobClass)\n', (54162, 54180), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54208, 54271), 'everyaction.core.EAProperty', 'EAProperty', (['"""limited_to"""'], {'is_array': '(True)', 'factory': 'AvailableValue'}), "('limited_to', is_array=True, factory=AvailableValue)\n", (54218, 54271), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54287, 54342), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""listener"""', 'factory': 'Listener'}), "(singular_alias='listener', factory=Listener)\n", (54297, 54342), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54355, 54381), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Pledge'}), '(factory=Pledge)\n', (54365, 54381), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54402, 54479), 'everyaction.core.EAProperty', 'EAProperty', (['"""possible"""'], {'singular_alias': '"""possible_value"""', 'factory': 'KeyValuePair'}), "('possible', singular_alias='possible_value', factory=KeyValuePair)\n", (54412, 54479), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54502, 54538), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'PreferredPronoun'}), '(factory=PreferredPronoun)\n', (54512, 54538), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54556, 54599), 'everyaction.core.EAProperty', 'EAProperty', (['"""program"""'], {'factory': 'BatchProgram'}), "('program', factory=BatchProgram)\n", (54566, 54599), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54624, 54701), 'everyaction.core.EAProperty', 'EAProperty', (['"""relations"""'], {'singular_alias': '"""relation"""', 'factory': 'RelationalMapping'}), "('relations', singular_alias='relation', factory=RelationalMapping)\n", (54634, 54701), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54719, 54775), 'everyaction.core.EAProperty', 'EAProperty', (['"""files"""'], {'singular_alias': '"""file"""', 'factory': 'File'}), "('files', singular_alias='file', factory=File)\n", (54729, 54775), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54786, 54815), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'EventRole'}), '(factory=EventRole)\n', (54796, 54815), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54827, 54879), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""role"""', 'factory': 'EventRole'}), "(singular_alias='role', factory=EventRole)\n", (54837, 54879), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54895, 54936), 'everyaction.core.EAProperty', 'EAProperty', (['"""list"""'], {'factory': 'SavedListData'}), "('list', factory=SavedListData)\n", (54905, 54936), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54948, 54973), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Score'}), '(factory=Score)\n', (54958, 54973), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((54986, 55035), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""score"""', 'factory': 'Score'}), "(singular_alias='score', factory=Score)\n", (54996, 55035), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55047, 55077), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'EventShift'}), '(factory=EventShift)\n', (55057, 55077), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55090, 55144), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""shift"""', 'factory': 'EventShift'}), "(singular_alias='shift', factory=EventShift)\n", (55100, 55144), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55162, 55203), 'everyaction.core.EAProperty', 'EAProperty', (['"""status"""'], {'factory': 'StoryStatus'}), "('status', factory=StoryStatus)\n", (55172, 55203), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55219, 55274), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""subgroup"""', 'factory': 'Subgroup'}), "(singular_alias='subgroup', factory=Subgroup)\n", (55229, 55274), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55293, 55354), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""suppression"""', 'factory': 'Suppression'}), "(singular_alias='suppression', factory=Suppression)\n", (55303, 55354), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55378, 55450), 'everyaction.core.EAProperty', 'EAProperty', (['"""entities"""'], {'singular_alias': '"""entity"""', 'factory': 'SupportedEntity'}), "('entities', singular_alias='entity', factory=SupportedEntity)\n", (55388, 55450), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55473, 55548), 'everyaction.core.EAProperty', 'EAProperty', (['"""update_stats"""', '"""statistics"""', '"""stats"""'], {'factory': 'UpdateStatistics'}), "('update_stats', 'statistics', 'stats', factory=UpdateStatistics)\n", (55483, 55548), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((55561, 55617), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""value"""', 'factory': 'ValueMapping'}), "(singular_alias='value', factory=ValueMapping)\n", (55571, 55617), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((69876, 69903), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Address'}), '(factory=Address)\n', (69886, 69903), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((69919, 69972), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""address"""', 'factory': 'Address'}), "(singular_alias='address', factory=Address)\n", (69929, 69972), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((69995, 70084), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""bulk_import_field"""', 'factory': 'ChangedEntityBulkImportField'}), "(singular_alias='bulk_import_field', factory=\n ChangedEntityBulkImportField)\n", (70005, 70084), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70091, 70138), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""code"""', 'factory': 'Code'}), "(singular_alias='code', factory=Code)\n", (70101, 70138), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70157, 70219), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""custom_field"""', 'factory': 'CustomField'}), "(singular_alias='custom_field', factory=CustomField)\n", (70167, 70219), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70235, 70295), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""district"""', 'factory': 'DistrictField'}), "(singular_alias='district', factory=DistrictField)\n", (70245, 70295), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70316, 70382), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""district_field"""', 'factory': 'DistrictField'}), "(singular_alias='district_field', factory=DistrictField)\n", (70326, 70382), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70395, 70444), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""email"""', 'factory': 'Email'}), "(singular_alias='email', factory=Email)\n", (70405, 70444), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70470, 70535), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""content"""', 'factory': 'EmailMessageContent'}), "(singular_alias='content', factory=EmailMessageContent)\n", (70480, 70535), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70548, 70597), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""error"""', 'factory': 'Error'}), "(singular_alias='error', factory=Error)\n", (70558, 70597), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70622, 70679), 'everyaction.core.EAProperty', 'EAProperty', (['"""extended_source"""'], {'factory': 'ExtendedSourceCode'}), "('extended_source', factory=ExtendedSourceCode)\n", (70632, 70679), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70704, 70820), 'everyaction.core.EAProperty', 'EAProperty', (['"""field_mappings"""', '"""value_mappings"""', '"""mappings"""'], {'singular_alias': '"""mapping"""', 'factory': 'FieldValueMapping'}), "('field_mappings', 'value_mappings', 'mappings', singular_alias=\n 'mapping', factory=FieldValueMapping)\n", (70714, 70820), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70878, 70948), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""job_class"""', 'factory': 'BargainingUnitJobClass'}), "(singular_alias='job_class', factory=BargainingUnitJobClass)\n", (70888, 70948), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((70962, 71020), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""parent"""', 'factory': 'MappingParent'}), "(singular_alias='parent', factory=MappingParent)\n", (70972, 71020), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((71033, 71082), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""phone"""', 'factory': 'Phone'}), "(singular_alias='phone', factory=Phone)\n", (71043, 71082), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((71106, 71168), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""recorded_address"""', 'factory': 'Address'}), "(singular_alias='recorded_address', factory=Address)\n", (71116, 71168), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((71184, 71250), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""response"""', 'factory': 'ScriptResponse.make'}), "(singular_alias='response', factory=ScriptResponse.make)\n", (71194, 71250), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((71280, 71354), 'everyaction.core.EAProperty', 'EAProperty', (['"""responses"""'], {'singular_alias': '"""response"""', 'factory': 'SurveyResponse'}), "('responses', singular_alias='response', factory=SurveyResponse)\n", (71290, 71354), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((71365, 71411), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""tag"""', 'factory': 'Code'}), "(singular_alias='tag', factory=Code)\n", (71375, 71411), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((71442, 71547), 'everyaction.core.EAProperty', 'EAProperty', (['"""registration_batches"""', '"""batches"""'], {'singular_alias': '"""batch"""', 'factory': 'VoterRegistrationBatch'}), "('registration_batches', 'batches', singular_alias='batch',\n factory=VoterRegistrationBatch)\n", (71452, 71547), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((71597, 71635), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""work_area"""'}), "(singular_alias='work_area')\n", (71607, 71635), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88450, 88478), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Location'}), '(factory=Location)\n', (88460, 88478), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88491, 88550), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""field"""', 'factory': 'BulkImportField'}), "(singular_alias='field', factory=BulkImportField)\n", (88501, 88550), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88565, 88593), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Location'}), '(factory=Location)\n', (88575, 88593), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88609, 88664), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""location"""', 'factory': 'Location'}), "(singular_alias='location', factory=Location)\n", (88619, 88664), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88683, 88752), 'everyaction.core.EAProperty', 'EAProperty', (['"""mappings"""'], {'singular_alias': '"""mapping"""', 'factory': 'MappingType'}), "('mappings', singular_alias='mapping', factory=MappingType)\n", (88693, 88752), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88765, 88791), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Person'}), '(factory=Person)\n', (88775, 88791), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88813, 88887), 'everyaction.core.EAProperty', 'EAProperty', (['"""questions"""'], {'singular_alias': '"""question"""', 'factory': 'SurveyQuestion'}), "('questions', singular_alias='question', factory=SurveyQuestion)\n", (88823, 88887), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((88903, 88958), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""worksite"""', 'factory': 'Worksite'}), "(singular_alias='worksite', factory=Worksite)\n", (88913, 88958), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((91613, 91674), 'everyaction.core.EAProperty', 'EAProperty', ([], {'singular_alias': '"""action"""', 'factory': 'BulkImportAction'}), "(singular_alias='action', factory=BulkImportAction)\n", (91623, 91674), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((91690, 91727), 'everyaction.core.EAProperty', 'EAProperty', (['"""type"""'], {'factory': 'EventType'}), "('type', factory=EventType)\n", (91700, 91727), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((92663, 92688), 'everyaction.core.EAProperty', 'EAProperty', ([], {'factory': 'Event'}), '(factory=Event)\n', (92673, 92688), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n'), ((37491, 37582), 'everyaction.exception.EAException', 'EAException', (['"""Expected actionType property or alias to be specified for JobActionType"""'], {}), "(\n 'Expected actionType property or alias to be specified for JobActionType')\n", (37502, 37582), False, 'from everyaction.exception import EAException\n'), ((44740, 44787), 'everyaction.exception.EAException', 'EAException', (['"""Expected type for ScriptResponse"""'], {}), "('Expected type for ScriptResponse')\n", (44751, 44787), False, 'from everyaction.exception import EAException\n'), ((37376, 37407), 'everyaction.core.EAProperty.shared', 'EAProperty.shared', (['"""actionType"""'], {}), "('actionType')\n", (37393, 37407), False, 'from everyaction.core import EAObject, EAObjectWithID, EAObjectWithIDAndName, EAObjectWithName, EAProperty, EAValue\n')]
|
# Based on intern/cycles/blender/blender_camera.cpp
from mathutils import Matrix
class BlenderCamera:
def __init__(self, b_render):
self.nearclip = 1e-5
self.type = 'CAMERA_PERSPECTIVE'
self.ortho_scale = 1.0
self.lens = 50.0
self.aperturesize = 0.0
self.apertureblades = 0
self.aperturerotation = 0.0
self.focaldistance = 10.0
self.shift = [0, 0]
self.offset = [0, 0]
self.zoom = 1.0
self.pixelaspect = [1.0, 1.0]
self.aperture_ratio = 1.0
self.sensor_fit = 'AUTO' # AUTO, HORIZONTAL, VERTICAL
self.sensor_width = 36.0
self.sensor_height = 24.0
self.full_width = int(b_render.resolution_x * b_render.resolution_percentage / 100)
self.full_height = int(b_render.resolution_y * b_render.resolution_percentage / 100)
# [left, right, bottom, top]
self.border = [0.0, 1.0, 0.0, 1.0]
self.viewport_camera_border = [0.0, 1.0, 0.0, 1.0]
#self.pano_viewplane
self.matrix = Matrix()
def modified(self, other):
# XXX
if self.type != other.type:
return True
if self.lens != other.lens:
return True
if self.full_width != other.full_width or self.full_height != other.full_height:
return True
if self.matrix != other.matrix:
return True
return False
def from_view(self, b_engine, b_scene, b_v3d, b_rv3d, width, height):
# b_engine is used in the b_ob branch (but not atm)
self.nearclip = b_v3d.clip_start
# clip_end
self.lens = b_v3d.lens
#self.shuttertime
if b_rv3d.view_perspective == 'CAMERA':
#ob = b_v3d.use_local_camera if b_v3d.camera else b_scene.camera
#if ob:
# self.from_object(b_engine, b_ob, skip_panorama)
# else:
# Magic zoom formula
zoom = b_rv3d.view_camera_zoom
zoom = 1.4142 + zoom / 50.0
zoom *= zoom
self.zoom = 2.0 / zoom
self.offset = b_rv3d.view_camera_offset
elif b_rv3d.view_perspective == 'ORTHO':
pass
self.zoom *= 2.0
self.matrix = b_rv3d.view_matrix.inverted()
def viewplane(self, width, height):
"""
Return viewplane, aspectratio, sensor_size
"""
xratio = 1.0 * width * self.pixelaspect[0]
yratio = 1.0 * height * self.pixelaspect[1]
if self.sensor_fit == 'AUTO':
horizontal_fit = xratio > yratio
sensor_size = self.sensor_width
elif self.sensor_fit == 'HORIZONTAL':
horizontal_fit = True
sensor_size = self.sensor_width
else:
horizontal_fit = False
sensor_size = self.sensor_height
if horizontal_fit:
aspectratio = xratio / yratio
xaspect = aspectratio
yaspect = 1.0
else:
aspectratio = yratio / xratio
xaspect = 1.0
yaspect = aspectratio
if self.type == 'CAMERA_ORTHOGRAPHIC':
xaspect = xaspect * self.ortho_scale / (aspectratio * 2.0)
yaspect = yaspect * self.ortho_scale / (aspectratio * 2.0)
aspectratio = self.ortho_scale / 2.0
if self.type == 'CAMERA_PANORAMA':
viewplane = None
else:
# CAMERA_PERSPECTIVE
# [left, right, bottom, top]
viewplane = [-xaspect, xaspect, -yaspect, yaspect]
# Zoom for 3D camera view
viewplane = list(map(lambda v: v*self.zoom, viewplane))
# Modify viewplane with camera shift and 3D camera view offset
dx = 2.0 * (aspectratio * self.shift[0] + self.offset[0] * xaspect * 2.0)
dy = 2.0 * (aspectratio * self.shift[1] + self.offset[1] * yaspect * 2.0)
viewplane[0] += dx
viewplane[1] += dx
viewplane[2] += dy
viewplane[3] += dy
return viewplane, aspectratio, sensor_size
def sync_view(b_scene, b_v3d, b_rv3d, width, height):
bcam = BlenderCamera(b_scene.render)
bcam.from_view(None, b_scene, b_v3d, b_rv3d, width, height)
#bcam.border
#bcam.sync()
return bcam
"""
def sync_camera(b_render, b_scene, width, height, viewname):
bcam = BlenderCamera()
bcam.pixelaspect = [b_render.pixel_aspect_x, b_render.pixel_aspect_y]
#bcam.shuttertime = b_render.motion_blur_shutter
if b_render.use_border:
bcam.border = [b_render.border_min_x, b_render.border_max_x, b_render.border_min_y, b_render.border_max_y]
b_ob = b_scene.camera
#if b_ob:
# blender_camera_from_object(b_cam, b_engine, b_ob)
# b_engine.camera_model_matrix(b_ob, bcam.use_spherical_stereo, b_ob_matrix);
# bcam.matrix = get_transform(b_ob_matrix);
blender_camera_sync(cam, bcam, width, height, viewname)
"""
|
[
"mathutils.Matrix"
] |
[((1133, 1141), 'mathutils.Matrix', 'Matrix', ([], {}), '()\n', (1139, 1141), False, 'from mathutils import Matrix\n')]
|
from unittest.mock import create_autospec
from jgikbase.idmapping.storage.id_mapping_storage import IDMappingStorage
from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup
from jgikbase.idmapping.core.user import AuthsourceID, User, Username
from jgikbase.idmapping.core.tokens import Token, HashedToken
from jgikbase.test.idmapping.test_utils import assert_exception_correct
from pytest import raises
from jgikbase.test.idmapping.core.tokens_test import is_base64
import time
from jgikbase.idmapping.core.errors import NoSuchAuthsourceError
def test_set_init_fail():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
fail_set_init(None, TypeError('user_lookup cannot be None'))
fail_set_init(set([handler, None]), TypeError('None item in user_lookup'))
def fail_set_init(handlers, expected):
with raises(Exception) as got:
UserLookupSet(handlers)
assert_exception_correct(got.value, expected)
def test_set_get_user_default_cache_ttl():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
check_set_get_user_default_cache_ttl(hset, handler, timer, [0, 299, 300, 301])
def test_set_get_user_default_cache_ttl_set_ttl():
check_set_get_user_default_cache_ttl_set_ttl(100, [0, 99, 100, 101])
check_set_get_user_default_cache_ttl_set_ttl(500, [0, 499, 500, 501])
def check_set_get_user_default_cache_ttl_set_ttl(ttl, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_user_expiration=ttl)
check_set_get_user_default_cache_ttl(hset, handler, timer, timervals)
def check_set_get_user_default_cache_ttl(hset, handler, timer, timervals):
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u')), False, None, None)
timer.return_value = timervals[0]
# user will not be in cache
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), False)
# user is now cached
handler.get_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[1] # just below default cache time
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), False)
# now expire the user
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u')), True, None, None)
timer.return_value = timervals[2]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), True)
# get the user again, should be cached.
handler.get_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[3]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), True)
assert handler.get_user.call_args_list == [((Token('t'),), {}), ((Token('t'),), {})]
def test_set_get_user_cache_max_count():
# testing the default of 10k is just silly, not going to bother.
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_max_size=2)
# add user 1
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), False, None, None)
timer.return_value = 0
assert hset.get_user(AuthsourceID('as'), Token('t1')) == \
(User(AuthsourceID('as'), Username('u1')), False)
# add user 2
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u2')), True, None, None)
timer.return_value = 1
assert hset.get_user(AuthsourceID('as'), Token('t2')) == \
(User(AuthsourceID('as'), Username('u2')), True)
# add user 3, user 1 should now be evicted from the cache
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u3')), False, None, None)
timer.return_value = 2
assert hset.get_user(AuthsourceID('as'), Token('t3')) == \
(User(AuthsourceID('as'), Username('u3')), False)
# should only need a handler call for user 1 at this point
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), True, None, None)
timer.return_value = 3
# get the 3 users. Get user 1 last otherwise it'll evict user 2 from the cache
assert hset.get_user(AuthsourceID('as'), Token('t2')) == \
(User(AuthsourceID('as'), Username('u2')), True)
assert hset.get_user(AuthsourceID('as'), Token('t3')) == \
(User(AuthsourceID('as'), Username('u3')), False)
assert hset.get_user(AuthsourceID('as'), Token('t1')) == \
(User(AuthsourceID('as'), Username('u1')), True)
# check that the calls to get_user are as expected:
assert handler.get_user.call_args_list == [((Token('t1'),), {}),
((Token('t2'),), {}),
((Token('t3'),), {}),
((Token('t1'),), {})]
def test_set_get_user_rel_ttl():
check_set_get_user_handler_ttl(None, 3, [100, 102, 103])
def test_set_get_user_epoch_ttl():
check_set_get_user_handler_ttl(1003, None, [1000, 1002, 1003])
def test_set_get_user_epoch_lt_rel_ttl():
# tests the case where both epoch and relative ttls are provided, but the epoch ttl is
# closer than the relative ttl.
check_set_get_user_handler_ttl(1003, 6, [1000, 1002, 1003])
def test_set_get_user_rel_lt_epoch_ttl():
# tests the case where both epoch and relative ttls are provided, but the relative ttl is
# closer than the epoch ttl.
check_set_get_user_handler_ttl(1007, 4, [1000, 1003, 1004])
def check_set_get_user_handler_ttl(epoch, rel, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), False, epoch, rel)
timer.return_value = timervals[0]
# cache user for X secs
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u1')), False)
# force an error if the handler is called
handler.get_user.return_value = None
timer.return_value = timervals[1]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u1')), False)
# expect handler call at Y sec
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), True, epoch, rel)
timer.return_value = timervals[2]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u1')), True)
# check correct number of calls to get_user
assert handler.get_user.call_args_list == [((Token('t'),), {}), ((Token('t'),), {})]
def test_set_get_user_fail_None_input():
hset = UserLookupSet(set())
fail_set_get_user(hset, None, Token('t'), TypeError('authsource_id cannot be None'))
fail_set_get_user(hset, AuthsourceID('a'), None, TypeError('token cannot be None'))
def test_set_get_user_no_authsource():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
fail_set_get_user(UserLookupSet(set([handler])),
AuthsourceID('bs'),
Token('t'),
NoSuchAuthsourceError('bs'))
def fail_set_get_user(hset, authsource_id, token, expected):
with raises(Exception) as got:
hset.get_user(authsource_id, token)
assert_exception_correct(got.value, expected)
def test_set_is_valid_user_default_cache_ttl():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
check_set_is_valid_user_default_cache_ttl(hset, handler, timer, [0, 3599, 3600, 3601])
def test_set_is_valid_user_default_cache_ttl_set_ttl():
check_set_is_valid_user_default_cache_ttl_set_ttl(100, [0, 99, 100, 101])
check_set_is_valid_user_default_cache_ttl_set_ttl(10000, [0, 9999, 10000, 10001])
def check_set_is_valid_user_default_cache_ttl_set_ttl(ttl, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_is_valid_expiration=ttl)
check_set_is_valid_user_default_cache_ttl(hset, handler, timer, timervals)
def check_set_is_valid_user_default_cache_ttl(hset, handler, timer, timervals):
handler.is_valid_user.return_value = (True, None, None)
timer.return_value = timervals[0]
# user will not be in cache
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
# user is now cached
handler.is_valid_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[1] # just below default cache time
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
# now expire the user
handler.is_valid_user.return_value = (True, None, None)
timer.return_value = timervals[2]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
# get the user again, should be cached
handler.is_valid_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[3]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
assert handler.is_valid_user.call_args_list == [((Username('u'),), {}), ((Username('u'),), {})]
def test_set_is_valid_user_invalid_user():
# invalid users shouldn't get cached.
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
handler.is_valid_user.return_value = (False, None, None)
timer.return_value = 0
# user will not be in cache
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is False
# would normally expect a cache time of 3600s, but should not be cached here.
timer.return_value = 10
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is False
assert handler.is_valid_user.call_args_list == [((Username('u'),), {}), ((Username('u'),), {})]
def test_set_is_valid_user_cache_max_count():
# testing the default of 10k is just silly, not going to bother.
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_max_size=2)
# add user 1
handler.is_valid_user.return_value = (True, None, None)
timer.return_value = 0
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# add user 2. Don't need another return value for is_valid_user, has to be True to cache
timer.return_value = 1
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u2'))) is True
# add user 3, user 1 should now be evicted from the cache
timer.return_value = 2
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u3'))) is True
# force an assert fail if is_valid_user is called early:
handler.is_valid_user.return_value = (False, None, None)
timer.return_value = 3
# get the 3 users. Get user 1 last otherwise it'll evict user 2 from the cache
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u2'))) is True
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u3'))) is True
# get user 1
handler.is_valid_user.return_value = (True, None, None)
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# check that the calls to is_valid_user are as expected:
assert handler.is_valid_user.call_args_list == [((Username('u1'),), {}),
((Username('u2'),), {}),
((Username('u3'),), {}),
((Username('u1'),), {})]
def test_set_is_valid_user_rel_ttl():
check_set_is_valid_user_handler_ttl(None, 3, [100, 102, 103])
def test_set_is_valid_user_epoch_ttl():
check_set_is_valid_user_handler_ttl(1003, None, [1000, 1002, 1003])
def test_set_is_valid_user_epoch_lt_rel_ttl():
# tests the case where both epoch and relative ttls are provided, but the epoch ttl is
# closer than the relative ttl.
check_set_is_valid_user_handler_ttl(1003, 6, [1000, 1002, 1003])
def test_set_is_valid_user_rel_lt_epoch_ttl():
# tests the case where both epoch and relative ttls are provided, but the relative ttl is
# closer than the epoch ttl.
check_set_is_valid_user_handler_ttl(1007, 4, [1000, 1003, 1004])
def check_set_is_valid_user_handler_ttl(epoch, rel, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
handler.is_valid_user.return_value = (True, epoch, rel)
timer.return_value = timervals[0]
# cache user for X secs
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# force an error if the handler is called
handler.is_valid_user.return_value = None
timer.return_value = timervals[1]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# expect handler call at Y sec
handler.is_valid_user.return_value = (True, epoch, rel)
timer.return_value = timervals[2]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# check correct number of calls to get_user
assert handler.is_valid_user.call_args_list == [((Username('u1'),), {}),
((Username('u1'),), {})]
def test_set_is_valid_user_None_inputs():
hset = UserLookupSet(set())
fail_set_is_valid_user(hset, None, TypeError('user cannot be None'))
def test_set_is_valid_user_no_authsource():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
fail_set_is_valid_user(UserLookupSet(set([handler])),
User(AuthsourceID('bs'), Username('n')),
NoSuchAuthsourceError('bs'))
def fail_set_is_valid_user(hset, user, expected):
with raises(Exception) as got:
hset.is_valid_user(user)
assert_exception_correct(got.value, expected)
def test_local_init_fail():
with raises(Exception) as got:
LocalUserLookup(None)
assert_exception_correct(got.value, TypeError('storage cannot be None'))
def test_local_get_authsource():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
assert LocalUserLookup(storage).get_authsource_id() == AuthsourceID('local')
def test_local_get_user_admin():
check_local_get_user_admin(True)
check_local_get_user_admin(False)
def check_local_get_user_admin(isadmin):
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
storage.get_user.return_value = (Username('bar'), isadmin)
assert LocalUserLookup(storage).get_user(Token('foo')) == \
(User(AuthsourceID('local'), Username('bar')), isadmin, None, 300)
thash = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'
assert storage.get_user.call_args_list == [((HashedToken(thash),), {})]
def test_local_get_user_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).get_user(None)
assert_exception_correct(got.value, TypeError('token cannot be None'))
def test_local_is_valid_user():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
storage.user_exists.return_value = True
luh = LocalUserLookup(storage)
assert luh.is_valid_user(Username('foo')) == (True, None, 3600)
storage.user_exists.return_value = False
assert luh.is_valid_user(Username('bar')) == (False, None, 3600)
assert storage.user_exists.call_args_list == [
((Username('foo'),), {}),
((Username('bar'),), {})]
def test_local_is_valid_user_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).is_valid_user(None)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_create_user():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
t = LocalUserLookup(storage).create_user(Username('foo'))
assert is_base64(t.token) is True
assert len(t.token) is 28
assert storage.create_local_user.call_args_list == \
[((Username('foo'), t.get_hashed_token()), {})]
def test_local_create_user_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).create_user(None)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_new_token():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
t = LocalUserLookup(storage).new_token(Username('bar'))
assert is_base64(t.token) is True
assert len(t.token) is 28
assert storage.update_local_user_token.call_args_list == \
[((Username('bar'), t.get_hashed_token()), {})]
def test_local_new_token_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).new_token(None)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_set_user_as_admin():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
LocalUserLookup(storage).set_user_as_admin(Username('n'), True)
LocalUserLookup(storage).set_user_as_admin(Username('r'), False)
assert storage.set_local_user_as_admin.call_args_list == [((Username('n'), True), {}),
((Username('r'), False), {})]
def test_local_set_user_as_admin_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).set_user_as_admin(None, True)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_get_users():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
storage.get_users.return_value = {Username('foo'): False, Username('bar'): True}
assert LocalUserLookup(storage).get_users() == {Username('foo'): False,
Username('bar'): True}
assert storage.get_users.call_args_list == [((), {})]
|
[
"jgikbase.idmapping.core.user_lookup.LocalUserLookup",
"unittest.mock.create_autospec",
"jgikbase.test.idmapping.core.tokens_test.is_base64",
"jgikbase.test.idmapping.test_utils.assert_exception_correct",
"jgikbase.idmapping.core.tokens.HashedToken",
"jgikbase.idmapping.core.errors.NoSuchAuthsourceError",
"pytest.raises",
"jgikbase.idmapping.core.user_lookup.UserLookupSet",
"jgikbase.idmapping.core.user.AuthsourceID",
"jgikbase.idmapping.core.user.Username",
"jgikbase.idmapping.core.tokens.Token"
] |
[((620, 677), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (635, 677), False, 'from unittest.mock import create_autospec\n'), ((935, 980), 'jgikbase.test.idmapping.test_utils.assert_exception_correct', 'assert_exception_correct', (['got.value', 'expected'], {}), '(got.value, expected)\n', (959, 980), False, 'from jgikbase.test.idmapping.test_utils import assert_exception_correct\n'), ((1040, 1097), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (1055, 1097), False, 'from unittest.mock import create_autospec\n'), ((1110, 1151), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (1125, 1151), False, 'from unittest.mock import create_autospec\n'), ((1197, 1215), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (1209, 1215), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((1631, 1688), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (1646, 1688), False, 'from unittest.mock import create_autospec\n'), ((1701, 1742), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (1716, 1742), False, 'from unittest.mock import create_autospec\n'), ((1788, 1806), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (1800, 1806), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3408, 3465), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (3423, 3465), False, 'from unittest.mock import create_autospec\n'), ((3478, 3519), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (3493, 3519), False, 'from unittest.mock import create_autospec\n'), ((3565, 3583), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (3577, 3583), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6202, 6259), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (6217, 6259), False, 'from unittest.mock import create_autospec\n'), ((6272, 6313), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (6287, 6313), False, 'from unittest.mock import create_autospec\n'), ((6359, 6377), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (6371, 6377), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7696, 7753), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (7711, 7753), False, 'from unittest.mock import create_autospec\n'), ((7799, 7817), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (7811, 7817), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((8145, 8190), 'jgikbase.test.idmapping.test_utils.assert_exception_correct', 'assert_exception_correct', (['got.value', 'expected'], {}), '(got.value, expected)\n', (8169, 8190), False, 'from jgikbase.test.idmapping.test_utils import assert_exception_correct\n'), ((8255, 8312), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (8270, 8312), False, 'from unittest.mock import create_autospec\n'), ((8325, 8366), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (8340, 8366), False, 'from unittest.mock import create_autospec\n'), ((8412, 8430), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (8424, 8430), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((8881, 8938), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (8896, 8938), False, 'from unittest.mock import create_autospec\n'), ((8951, 8992), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (8966, 8992), False, 'from unittest.mock import create_autospec\n'), ((9038, 9056), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (9050, 9056), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((10436, 10493), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (10451, 10493), False, 'from unittest.mock import create_autospec\n'), ((10506, 10547), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (10521, 10547), False, 'from unittest.mock import create_autospec\n'), ((10593, 10611), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (10605, 10611), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11288, 11345), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (11303, 11345), False, 'from unittest.mock import create_autospec\n'), ((11358, 11399), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (11373, 11399), False, 'from unittest.mock import create_autospec\n'), ((11445, 11463), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (11457, 11463), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((13803, 13860), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (13818, 13860), False, 'from unittest.mock import create_autospec\n'), ((13873, 13914), 'unittest.mock.create_autospec', 'create_autospec', (['time.time'], {'spec_set': '(True)'}), '(time.time, spec_set=True)\n', (13888, 13914), False, 'from unittest.mock import create_autospec\n'), ((13960, 13978), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (13972, 13978), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((15075, 15132), 'unittest.mock.create_autospec', 'create_autospec', (['UserLookup'], {'spec_set': '(True)', 'instance': '(True)'}), '(UserLookup, spec_set=True, instance=True)\n', (15090, 15132), False, 'from unittest.mock import create_autospec\n'), ((15178, 15196), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (15190, 15196), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((15504, 15549), 'jgikbase.test.idmapping.test_utils.assert_exception_correct', 'assert_exception_correct', (['got.value', 'expected'], {}), '(got.value, expected)\n', (15528, 15549), False, 'from jgikbase.test.idmapping.test_utils import assert_exception_correct\n'), ((15771, 15834), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (15786, 15834), False, 'from unittest.mock import create_autospec\n'), ((16083, 16146), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (16098, 16146), False, 'from unittest.mock import create_autospec\n'), ((16554, 16617), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (16569, 16617), False, 'from unittest.mock import create_autospec\n'), ((16824, 16887), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (16839, 16887), False, 'from unittest.mock import create_autospec\n'), ((16943, 16967), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (16958, 16967), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((17326, 17389), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (17341, 17389), False, 'from unittest.mock import create_autospec\n'), ((17602, 17665), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (17617, 17665), False, 'from unittest.mock import create_autospec\n'), ((17963, 18026), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (17978, 18026), False, 'from unittest.mock import create_autospec\n'), ((18235, 18298), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (18250, 18298), False, 'from unittest.mock import create_autospec\n'), ((18598, 18661), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (18613, 18661), False, 'from unittest.mock import create_autospec\n'), ((18876, 18939), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (18891, 18939), False, 'from unittest.mock import create_autospec\n'), ((19319, 19382), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (19334, 19382), False, 'from unittest.mock import create_autospec\n'), ((19603, 19666), 'unittest.mock.create_autospec', 'create_autospec', (['IDMappingStorage'], {'spec_set': '(True)', 'instance': '(True)'}), '(IDMappingStorage, spec_set=True, instance=True)\n', (19618, 19666), False, 'from unittest.mock import create_autospec\n'), ((873, 890), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (879, 890), False, 'from pytest import raises\n'), ((907, 930), 'jgikbase.idmapping.core.user_lookup.UserLookupSet', 'UserLookupSet', (['handlers'], {}), '(handlers)\n', (920, 930), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((7498, 7508), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (7503, 7508), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((7581, 7598), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""a"""'], {}), "('a')\n", (7593, 7598), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7894, 7912), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""bs"""'], {}), "('bs')\n", (7906, 7912), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7936, 7946), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (7941, 7946), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((7970, 7997), 'jgikbase.idmapping.core.errors.NoSuchAuthsourceError', 'NoSuchAuthsourceError', (['"""bs"""'], {}), "('bs')\n", (7991, 7997), False, 'from jgikbase.idmapping.core.errors import NoSuchAuthsourceError\n'), ((8071, 8088), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (8077, 8088), False, 'from pytest import raises\n'), ((15351, 15378), 'jgikbase.idmapping.core.errors.NoSuchAuthsourceError', 'NoSuchAuthsourceError', (['"""bs"""'], {}), "('bs')\n", (15372, 15378), False, 'from jgikbase.idmapping.core.errors import NoSuchAuthsourceError\n'), ((15441, 15458), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (15447, 15458), False, 'from pytest import raises\n'), ((15589, 15606), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (15595, 15606), False, 'from pytest import raises\n'), ((15623, 15644), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['None'], {}), '(None)\n', (15638, 15644), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((15894, 15915), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""local"""'], {}), "('local')\n", (15906, 15915), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((16184, 16199), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (16192, 16199), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((16627, 16644), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (16633, 16644), False, 'from pytest import raises\n'), ((17399, 17416), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (17405, 17416), False, 'from pytest import raises\n'), ((17712, 17727), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""foo"""'], {}), "('foo')\n", (17720, 17727), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((17741, 17759), 'jgikbase.test.idmapping.core.tokens_test.is_base64', 'is_base64', (['t.token'], {}), '(t.token)\n', (17750, 17759), False, 'from jgikbase.test.idmapping.core.tokens_test import is_base64\n'), ((18036, 18053), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (18042, 18053), False, 'from pytest import raises\n'), ((18343, 18358), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (18351, 18358), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((18372, 18390), 'jgikbase.test.idmapping.core.tokens_test.is_base64', 'is_base64', (['t.token'], {}), '(t.token)\n', (18381, 18390), False, 'from jgikbase.test.idmapping.core.tokens_test import is_base64\n'), ((18671, 18688), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (18677, 18688), False, 'from pytest import raises\n'), ((18988, 19001), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""n"""'], {}), "('n')\n", (18996, 19001), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((19056, 19069), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""r"""'], {}), "('r')\n", (19064, 19069), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((19392, 19409), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (19398, 19409), False, 'from pytest import raises\n'), ((19705, 19720), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""foo"""'], {}), "('foo')\n", (19713, 19720), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((19729, 19744), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (19737, 19744), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2078, 2096), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2090, 2096), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2098, 2111), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (2106, 2111), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2229, 2247), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2241, 2247), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2249, 2259), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (2254, 2259), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((2531, 2549), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2543, 2549), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2551, 2561), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (2556, 2561), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((2694, 2712), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2706, 2712), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2714, 2727), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (2722, 2727), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2812, 2830), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2824, 2830), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2832, 2842), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (2837, 2842), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((3099, 3117), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (3111, 3117), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3119, 3129), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (3124, 3129), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((3711, 3729), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (3723, 3729), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3731, 3745), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (3739, 3745), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3820, 3838), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (3832, 3838), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3840, 3851), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t1"""'], {}), "('t1')\n", (3845, 3851), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((3976, 3994), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (3988, 3994), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3996, 4010), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u2"""'], {}), "('u2')\n", (4004, 4010), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4084, 4102), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4096, 4102), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4104, 4115), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t2"""'], {}), "('t2')\n", (4109, 4115), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((4284, 4302), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4296, 4302), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4304, 4318), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u3"""'], {}), "('u3')\n", (4312, 4318), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4393, 4411), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4405, 4411), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4413, 4424), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t3"""'], {}), "('t3')\n", (4418, 4424), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((4595, 4613), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4607, 4613), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4615, 4629), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (4623, 4629), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4786, 4804), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4798, 4804), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4806, 4817), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t2"""'], {}), "('t2')\n", (4811, 4817), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((4907, 4925), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4919, 4925), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4927, 4938), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t3"""'], {}), "('t3')\n", (4932, 4938), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((5029, 5047), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (5041, 5047), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((5049, 5060), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t1"""'], {}), "('t1')\n", (5054, 5060), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((6470, 6488), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (6482, 6488), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6490, 6504), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (6498, 6504), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6618, 6636), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (6630, 6636), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6638, 6648), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (6643, 6648), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((6865, 6883), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (6877, 6883), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6885, 6895), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (6890, 6895), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((7038, 7056), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (7050, 7056), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7058, 7072), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (7066, 7072), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7157, 7175), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (7169, 7175), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7177, 7187), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (7182, 7187), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((15288, 15306), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""bs"""'], {}), "('bs')\n", (15300, 15306), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((15308, 15321), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""n"""'], {}), "('n')\n", (15316, 15321), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((16256, 16268), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""foo"""'], {}), "('foo')\n", (16261, 16268), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((16998, 17013), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""foo"""'], {}), "('foo')\n", (17006, 17013), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((17113, 17128), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (17121, 17128), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((17675, 17699), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (17690, 17699), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((18308, 18332), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (18323, 18332), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((18945, 18969), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (18960, 18969), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((19013, 19037), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (19028, 19037), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((19805, 19820), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""foo"""'], {}), "('foo')\n", (19813, 19820), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((19881, 19896), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (19889, 19896), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2280, 2298), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2292, 2298), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2300, 2313), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (2308, 2313), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2582, 2600), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2594, 2600), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2602, 2615), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (2610, 2615), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2863, 2881), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (2875, 2881), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((2883, 2896), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (2891, 2896), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3150, 3168), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (3162, 3168), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3170, 3183), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (3178, 3183), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3872, 3890), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (3884, 3890), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((3892, 3906), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (3900, 3906), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4136, 4154), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4148, 4154), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4156, 4170), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u2"""'], {}), "('u2')\n", (4164, 4170), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4445, 4463), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4457, 4463), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4465, 4479), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u3"""'], {}), "('u3')\n", (4473, 4479), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4838, 4856), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4850, 4856), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4858, 4872), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u2"""'], {}), "('u2')\n", (4866, 4872), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4959, 4977), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (4971, 4977), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((4979, 4993), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u3"""'], {}), "('u3')\n", (4987, 4993), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((5081, 5099), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (5093, 5099), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((5101, 5115), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (5109, 5115), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6669, 6687), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (6681, 6687), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6689, 6703), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (6697, 6703), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6916, 6934), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (6928, 6934), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((6936, 6950), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (6944, 6950), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7208, 7226), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (7220, 7226), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((7228, 7242), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (7236, 7242), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((9466, 9484), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (9478, 9484), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((9486, 9499), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (9494, 9499), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((9733, 9751), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (9745, 9751), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((9753, 9766), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (9761, 9766), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((9938, 9956), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (9950, 9956), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((9958, 9971), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (9966, 9971), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((10190, 10208), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (10202, 10208), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((10210, 10223), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (10218, 10223), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((10818, 10836), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (10830, 10836), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((10838, 10851), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (10846, 10851), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11011, 11029), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (11023, 11029), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11031, 11044), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (11039, 11044), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11672, 11690), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (11684, 11690), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11692, 11706), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (11700, 11706), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11874, 11892), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (11886, 11892), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11894, 11908), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u2"""'], {}), "('u2')\n", (11902, 11908), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12045, 12063), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (12057, 12063), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12065, 12079), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u3"""'], {}), "('u3')\n", (12073, 12079), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12359, 12377), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (12371, 12377), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12379, 12393), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u2"""'], {}), "('u2')\n", (12387, 12393), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12440, 12458), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (12452, 12458), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12460, 12474), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u3"""'], {}), "('u3')\n", (12468, 12474), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12598, 12616), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (12610, 12616), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12618, 12632), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (12626, 12632), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14191, 14209), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (14203, 14209), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14211, 14225), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (14219, 14225), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14403, 14421), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (14415, 14421), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14423, 14437), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (14431, 14437), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14618, 14636), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""as"""'], {}), "('as')\n", (14630, 14636), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14638, 14652), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (14646, 14652), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((15846, 15870), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (15861, 15870), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((16222, 16246), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (16237, 16246), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((16289, 16310), 'jgikbase.idmapping.core.user.AuthsourceID', 'AuthsourceID', (['"""local"""'], {}), "('local')\n", (16301, 16310), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((16312, 16327), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (16320, 16327), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((16661, 16685), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (16676, 16685), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((17433, 17457), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (17448, 17457), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((18070, 18094), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (18085, 18094), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((18705, 18729), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (18720, 18729), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((19426, 19450), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (19441, 19450), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((19764, 19788), 'jgikbase.idmapping.core.user_lookup.LocalUserLookup', 'LocalUserLookup', (['storage'], {}), '(storage)\n', (19779, 19788), False, 'from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup\n'), ((3242, 3252), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (3247, 3252), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((3263, 3273), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (3268, 3273), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((5230, 5241), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t1"""'], {}), "('t1')\n", (5235, 5241), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((5299, 5310), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t2"""'], {}), "('t2')\n", (5304, 5310), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((5368, 5379), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t3"""'], {}), "('t3')\n", (5373, 5379), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((5437, 5448), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t1"""'], {}), "('t1')\n", (5442, 5448), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((7349, 7359), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (7354, 7359), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((7370, 7380), 'jgikbase.idmapping.core.tokens.Token', 'Token', (['"""t"""'], {}), "('t')\n", (7375, 7380), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((10289, 10302), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (10297, 10302), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((10313, 10326), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (10321, 10326), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11111, 11124), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (11119, 11124), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((11135, 11148), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u"""'], {}), "('u')\n", (11143, 11148), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12759, 12773), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (12767, 12773), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12836, 12850), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u2"""'], {}), "('u2')\n", (12844, 12850), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12913, 12927), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u3"""'], {}), "('u3')\n", (12921, 12927), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((12990, 13004), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (12998, 13004), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14766, 14780), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (14774, 14780), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((14843, 14857), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""u1"""'], {}), "('u1')\n", (14851, 14857), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((16479, 16497), 'jgikbase.idmapping.core.tokens.HashedToken', 'HashedToken', (['thash'], {}), '(thash)\n', (16490, 16497), False, 'from jgikbase.idmapping.core.tokens import Token, HashedToken\n'), ((17215, 17230), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""foo"""'], {}), "('foo')\n", (17223, 17230), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((17249, 17264), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (17257, 17264), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((17867, 17882), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""foo"""'], {}), "('foo')\n", (17875, 17882), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((18504, 18519), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""bar"""'], {}), "('bar')\n", (18512, 18519), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((19143, 19156), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""n"""'], {}), "('n')\n", (19151, 19156), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n'), ((19234, 19247), 'jgikbase.idmapping.core.user.Username', 'Username', (['"""r"""'], {}), "('r')\n", (19242, 19247), False, 'from jgikbase.idmapping.core.user import AuthsourceID, User, Username\n')]
|
"""Map of short links to the full urls."""
from google.cloud import firestore
from google.cloud.firestore import Increment
LINKS_COLLECTION_NAME = u'links'
TOTAL_VISITS_COLLECTION_NAME = u'total_visits'
URL_KEY = u'url'
VISIT_COUNT_KEY = u'visit_count'
COUNT_KEY = u'count'
class Links:
def __init__(self):
self.db = firestore.Client()
self.links = self.db.collection(LINKS_COLLECTION_NAME)
self.total_visits = self.db.collection(TOTAL_VISITS_COLLECTION_NAME).document('visits')
def has(self, keyword):
doc_ref = self.links.document(keyword)
doc = doc_ref.get()
return doc.exists
def insert(self, keyword, url):
doc_ref = self.links.document(keyword)
data = {URL_KEY: url}
doc_ref.set(data)
def get(self, keyword):
if not self.has(keyword):
return None
doc_ref = self.links.document(keyword)
doc = doc_ref.get()
doc_dict = doc.to_dict()
url = doc_dict[URL_KEY]
self.increment(keyword)
return url
def increment(self, keyword):
doc_ref = self.links.document(keyword)
doc_ref.update({VISIT_COUNT_KEY: Increment(1)})
self.increment_total_visits()
def increment_total_visits(self):
total_visits = self.total_visits.get()
if not total_visits.exists:
self.total_visits.set({COUNT_KEY: 0})
self.total_visits.update({COUNT_KEY: Increment(1)})
def get_all_links(self):
"""Fetch all links from database."""
link_dicts = []
links = self.links.stream()
for link in links:
link_dict = link.to_dict()
link_dict['key'] = link.id
link_dicts.append(link_dict)
return link_dicts
|
[
"google.cloud.firestore.Client",
"google.cloud.firestore.Increment"
] |
[((332, 350), 'google.cloud.firestore.Client', 'firestore.Client', ([], {}), '()\n', (348, 350), False, 'from google.cloud import firestore\n'), ((1184, 1196), 'google.cloud.firestore.Increment', 'Increment', (['(1)'], {}), '(1)\n', (1193, 1196), False, 'from google.cloud.firestore import Increment\n'), ((1454, 1466), 'google.cloud.firestore.Increment', 'Increment', (['(1)'], {}), '(1)\n', (1463, 1466), False, 'from google.cloud.firestore import Increment\n')]
|
import os
import sys
curr_path = os.path.abspath(__file__)
root_path = os.path.abspath(
os.path.join(curr_path, os.path.pardir, os.path.pardir))
sys.path.append(root_path)
from pyjuque.Exchanges.CcxtExchange import CcxtExchange
from pyjuque.Plotting import PlotData
import plotly.graph_objs as go
def horizontal_line(start_time, end_time, value, color=None):
return go.layout.Shape(
type="line",
x0=start_time,
y0=value,
x1=end_time,
y1=value,
line=dict(color=color)
)
def Main():
exchange = CcxtExchange('binance')
symbol = "BTC/USDT"
interval = "4h"
df = exchange.getOHLCVHistory(symbol, interval, 8000)
start_time = df['time'][0]
end_time = df['time'][len(df)-1]
price_min = df['close'].min()
price_max = df['close'].max()
diff = price_max - price_min
level1 = price_max - 0.236 * diff
level2 = price_max - 0.382 * diff
level3 = price_max - 0.618 * diff
lines = []
lines.append(horizontal_line(
start_time, end_time, price_max,
color="rgba(255, 0, 0, 255)"))
lines.append(horizontal_line(
start_time, end_time, level1,
color="rgba(255, 255, 0, 255)"))
lines.append(horizontal_line(
start_time, end_time, level2,
color="rgba(0, 255, 0, 255)"))
lines.append(horizontal_line(
start_time, end_time, level3,
color="rgba(0, 255, 255, 255)"))
lines.append(horizontal_line(
start_time, end_time, price_min,
color="rgba(0, 0, 255, 255)"))
PlotData(df,
add_candles=False,
plot_shapes=lines,
plot_title="fib_levels_"+symbol.replace('/', '').lower() + "_" + interval,
show_plot=True)
if __name__ == '__main__':
Main()
|
[
"sys.path.append",
"os.path.abspath",
"os.path.join",
"pyjuque.Exchanges.CcxtExchange.CcxtExchange"
] |
[((33, 58), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (48, 58), False, 'import os\n'), ((149, 175), 'sys.path.append', 'sys.path.append', (['root_path'], {}), '(root_path)\n', (164, 175), False, 'import sys\n'), ((92, 147), 'os.path.join', 'os.path.join', (['curr_path', 'os.path.pardir', 'os.path.pardir'], {}), '(curr_path, os.path.pardir, os.path.pardir)\n', (104, 147), False, 'import os\n'), ((561, 584), 'pyjuque.Exchanges.CcxtExchange.CcxtExchange', 'CcxtExchange', (['"""binance"""'], {}), "('binance')\n", (573, 584), False, 'from pyjuque.Exchanges.CcxtExchange import CcxtExchange\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1beta1.types import io
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1beta1",
manifest={
"ModelMonitoringObjectiveConfig",
"ModelMonitoringAlertConfig",
"ThresholdConfig",
"SamplingStrategy",
},
)
class ModelMonitoringObjectiveConfig(proto.Message):
r"""Next ID: 6
Attributes:
training_dataset (google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.TrainingDataset):
Training dataset for models. This field has
to be set only if
TrainingPredictionSkewDetectionConfig is
specified.
training_prediction_skew_detection_config (google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.TrainingPredictionSkewDetectionConfig):
The config for skew between training data and
prediction data.
prediction_drift_detection_config (google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.PredictionDriftDetectionConfig):
The config for drift of prediction data.
"""
class TrainingDataset(proto.Message):
r"""Training Dataset information.
Attributes:
dataset (str):
The resource name of the Dataset used to
train this Model.
gcs_source (google.cloud.aiplatform_v1beta1.types.GcsSource):
The Google Cloud Storage uri of the unmanaged
Dataset used to train this Model.
bigquery_source (google.cloud.aiplatform_v1beta1.types.BigQuerySource):
The BigQuery table of the unmanaged Dataset
used to train this Model.
data_format (str):
Data format of the dataset, only applicable
if the input is from Google Cloud Storage.
The possible formats are:
"tf-record"
The source file is a TFRecord file.
"csv"
The source file is a CSV file.
target_field (str):
The target field name the model is to
predict. This field will be excluded when doing
Predict and (or) Explain for the training data.
logging_sampling_strategy (google.cloud.aiplatform_v1beta1.types.SamplingStrategy):
Strategy to sample data from Training
Dataset. If not set, we process the whole
dataset.
"""
dataset = proto.Field(proto.STRING, number=3, oneof="data_source",)
gcs_source = proto.Field(
proto.MESSAGE, number=4, oneof="data_source", message=io.GcsSource,
)
bigquery_source = proto.Field(
proto.MESSAGE, number=5, oneof="data_source", message=io.BigQuerySource,
)
data_format = proto.Field(proto.STRING, number=2,)
target_field = proto.Field(proto.STRING, number=6,)
logging_sampling_strategy = proto.Field(
proto.MESSAGE, number=7, message="SamplingStrategy",
)
class TrainingPredictionSkewDetectionConfig(proto.Message):
r"""The config for Training & Prediction data skew detection. It
specifies the training dataset sources and the skew detection
parameters.
Attributes:
skew_thresholds (Sequence[google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.TrainingPredictionSkewDetectionConfig.SkewThresholdsEntry]):
Key is the feature name and value is the
threshold. If a feature needs to be monitored
for skew, a value threshold must be configed for
that feature. The threshold here is against
feature distribution distance between the
training and prediction feature.
"""
skew_thresholds = proto.MapField(
proto.STRING, proto.MESSAGE, number=1, message="ThresholdConfig",
)
class PredictionDriftDetectionConfig(proto.Message):
r"""The config for Prediction data drift detection.
Attributes:
drift_thresholds (Sequence[google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.PredictionDriftDetectionConfig.DriftThresholdsEntry]):
Key is the feature name and value is the
threshold. If a feature needs to be monitored
for drift, a value threshold must be configed
for that feature. The threshold here is against
feature distribution distance between different
time windws.
"""
drift_thresholds = proto.MapField(
proto.STRING, proto.MESSAGE, number=1, message="ThresholdConfig",
)
training_dataset = proto.Field(proto.MESSAGE, number=1, message=TrainingDataset,)
training_prediction_skew_detection_config = proto.Field(
proto.MESSAGE, number=2, message=TrainingPredictionSkewDetectionConfig,
)
prediction_drift_detection_config = proto.Field(
proto.MESSAGE, number=3, message=PredictionDriftDetectionConfig,
)
class ModelMonitoringAlertConfig(proto.Message):
r"""Next ID: 2
Attributes:
email_alert_config (google.cloud.aiplatform_v1beta1.types.ModelMonitoringAlertConfig.EmailAlertConfig):
Email alert config.
"""
class EmailAlertConfig(proto.Message):
r"""The config for email alert.
Attributes:
user_emails (Sequence[str]):
The email addresses to send the alert.
"""
user_emails = proto.RepeatedField(proto.STRING, number=1,)
email_alert_config = proto.Field(
proto.MESSAGE, number=1, oneof="alert", message=EmailAlertConfig,
)
class ThresholdConfig(proto.Message):
r"""The config for feature monitoring threshold.
Next ID: 3
Attributes:
value (float):
Specify a threshold value that can trigger
the alert. If this threshold config is for
feature distribution distance: 1. For
categorical feature, the distribution distance
is calculated by L-inifinity norm.
2. For numerical feature, the distribution
distance is calculated by Jensen–Shannon
divergence.
Each feature must have a non-zero threshold if
they need to be monitored. Otherwise no alert
will be triggered for that feature.
"""
value = proto.Field(proto.DOUBLE, number=1, oneof="threshold",)
class SamplingStrategy(proto.Message):
r"""Sampling Strategy for logging, can be for both training and
prediction dataset.
Next ID: 2
Attributes:
random_sample_config (google.cloud.aiplatform_v1beta1.types.SamplingStrategy.RandomSampleConfig):
Random sample config. Will support more
sampling strategies later.
"""
class RandomSampleConfig(proto.Message):
r"""Requests are randomly selected.
Attributes:
sample_rate (float):
Sample rate (0, 1]
"""
sample_rate = proto.Field(proto.DOUBLE, number=1,)
random_sample_config = proto.Field(
proto.MESSAGE, number=1, message=RandomSampleConfig,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"proto.RepeatedField",
"proto.MapField",
"proto.module",
"proto.Field"
] |
[((700, 878), 'proto.module', 'proto.module', ([], {'package': '"""google.cloud.aiplatform.v1beta1"""', 'manifest': "{'ModelMonitoringObjectiveConfig', 'ModelMonitoringAlertConfig',\n 'ThresholdConfig', 'SamplingStrategy'}"}), "(package='google.cloud.aiplatform.v1beta1', manifest={\n 'ModelMonitoringObjectiveConfig', 'ModelMonitoringAlertConfig',\n 'ThresholdConfig', 'SamplingStrategy'})\n", (712, 878), False, 'import proto\n'), ((5431, 5492), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(1)', 'message': 'TrainingDataset'}), '(proto.MESSAGE, number=1, message=TrainingDataset)\n', (5442, 5492), False, 'import proto\n'), ((5542, 5630), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(2)', 'message': 'TrainingPredictionSkewDetectionConfig'}), '(proto.MESSAGE, number=2, message=\n TrainingPredictionSkewDetectionConfig)\n', (5553, 5630), False, 'import proto\n'), ((5681, 5757), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(3)', 'message': 'PredictionDriftDetectionConfig'}), '(proto.MESSAGE, number=3, message=PredictionDriftDetectionConfig)\n', (5692, 5757), False, 'import proto\n'), ((6317, 6394), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(1)', 'oneof': '"""alert"""', 'message': 'EmailAlertConfig'}), "(proto.MESSAGE, number=1, oneof='alert', message=EmailAlertConfig)\n", (6328, 6394), False, 'import proto\n'), ((7156, 7210), 'proto.Field', 'proto.Field', (['proto.DOUBLE'], {'number': '(1)', 'oneof': '"""threshold"""'}), "(proto.DOUBLE, number=1, oneof='threshold')\n", (7167, 7210), False, 'import proto\n'), ((7860, 7924), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(1)', 'message': 'RandomSampleConfig'}), '(proto.MESSAGE, number=1, message=RandomSampleConfig)\n', (7871, 7924), False, 'import proto\n'), ((3154, 3210), 'proto.Field', 'proto.Field', (['proto.STRING'], {'number': '(3)', 'oneof': '"""data_source"""'}), "(proto.STRING, number=3, oneof='data_source')\n", (3165, 3210), False, 'import proto\n'), ((3233, 3312), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(4)', 'oneof': '"""data_source"""', 'message': 'io.GcsSource'}), "(proto.MESSAGE, number=4, oneof='data_source', message=io.GcsSource)\n", (3244, 3312), False, 'import proto\n'), ((3362, 3451), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(5)', 'oneof': '"""data_source"""', 'message': 'io.BigQuerySource'}), "(proto.MESSAGE, number=5, oneof='data_source', message=io.\n BigQuerySource)\n", (3373, 3451), False, 'import proto\n'), ((3492, 3527), 'proto.Field', 'proto.Field', (['proto.STRING'], {'number': '(2)'}), '(proto.STRING, number=2)\n', (3503, 3527), False, 'import proto\n'), ((3552, 3587), 'proto.Field', 'proto.Field', (['proto.STRING'], {'number': '(6)'}), '(proto.STRING, number=6)\n', (3563, 3587), False, 'import proto\n'), ((3625, 3689), 'proto.Field', 'proto.Field', (['proto.MESSAGE'], {'number': '(7)', 'message': '"""SamplingStrategy"""'}), "(proto.MESSAGE, number=7, message='SamplingStrategy')\n", (3636, 3689), False, 'import proto\n'), ((4520, 4605), 'proto.MapField', 'proto.MapField', (['proto.STRING', 'proto.MESSAGE'], {'number': '(1)', 'message': '"""ThresholdConfig"""'}), "(proto.STRING, proto.MESSAGE, number=1, message='ThresholdConfig'\n )\n", (4534, 4605), False, 'import proto\n'), ((5303, 5388), 'proto.MapField', 'proto.MapField', (['proto.STRING', 'proto.MESSAGE'], {'number': '(1)', 'message': '"""ThresholdConfig"""'}), "(proto.STRING, proto.MESSAGE, number=1, message='ThresholdConfig'\n )\n", (5317, 5388), False, 'import proto\n'), ((6246, 6289), 'proto.RepeatedField', 'proto.RepeatedField', (['proto.STRING'], {'number': '(1)'}), '(proto.STRING, number=1)\n', (6265, 6289), False, 'import proto\n'), ((7795, 7830), 'proto.Field', 'proto.Field', (['proto.DOUBLE'], {'number': '(1)'}), '(proto.DOUBLE, number=1)\n', (7806, 7830), False, 'import proto\n')]
|
import json as js
import logging
from .baseoutput import baseoutput
log = logging.getLogger("MPP-Solar")
class json(baseoutput):
def __str__(self):
return "json - outputs the results to standard out in json format"
def __init__(self, *args, **kwargs) -> None:
log.debug(f"processor.json __init__ kwargs {kwargs}")
def output(self, *args, **kwargs):
log.info("Using output processor: json")
log.debug(f"processor.json.output kwargs {kwargs}")
data = self.get_kwargs(kwargs, "data")
output = {}
for key in data:
value = data[key]
if isinstance(value, list):
value = data[key][0]
# unit = data[key][1]
# remove spaces
key = key.lower().replace(" ", "_")
output[key] = value
print(js.dumps(output))
|
[
"logging.getLogger",
"json.dumps"
] |
[((76, 106), 'logging.getLogger', 'logging.getLogger', (['"""MPP-Solar"""'], {}), "('MPP-Solar')\n", (93, 106), False, 'import logging\n'), ((849, 865), 'json.dumps', 'js.dumps', (['output'], {}), '(output)\n', (857, 865), True, 'import json as js\n')]
|
import urllib.request
import os
class ImageHandler:
def __init__(self, dir_prefix):
self.poster_dir_path = "./poster/" + dir_prefix + "/"
self.poster_url = "http://image.tmdb.org/t/p/w185"
def download_all_posters(self, df):
directory = os.path.dirname(self.poster_dir_path)
if not os.path.exists(directory):
os.makedirs(directory)
print("Download Image")
for i, url in enumerate(df["poster_path"]):
print(i, "/" + str(len(df["poster_path"])))
if not os.path.exists(self.poster_dir_path + str(i) + ".jpg"):
self.download_poster(url, i)
print("Download Finish")
def download_poster(self, poster_path, ids):
try:
urllib.request.urlretrieve(self.poster_url + str(poster_path), self.poster_dir_path + str(ids) + ".jpg")
except IOError as e:
print('404', e)
except Exception as e:
print('404', e)
|
[
"os.path.dirname",
"os.path.exists",
"os.makedirs"
] |
[((273, 310), 'os.path.dirname', 'os.path.dirname', (['self.poster_dir_path'], {}), '(self.poster_dir_path)\n', (288, 310), False, 'import os\n'), ((327, 352), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (341, 352), False, 'import os\n'), ((366, 388), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (377, 388), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
# file: train_utils.py
# author: songyouwei <<EMAIL>>
# Copyright (C) 2018. All Rights Reserved.
from data_utils import ABSADatesetReader
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from tensorboardX import SummaryWriter
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class Instructor:
def __init__(self, module_class, model_name, dataset='twitter', embed_dim=100, max_seq_len=40, batch_size=128):
absa_dataset = ABSADatesetReader(dataset=dataset, embed_dim=embed_dim, max_seq_len=max_seq_len)
self.train_data_loader = DataLoader(dataset=absa_dataset.train_data, batch_size=batch_size, shuffle=True)
self.test_data_loader = DataLoader(dataset=absa_dataset.test_data, batch_size=len(absa_dataset.test_data), shuffle=False)
self.writer = SummaryWriter(log_dir='{0}_logs'.format(model_name))
self.model = module_class(absa_dataset.embedding_matrix).to(device)
def run(self, inputs_cols, learning_rate=0.001, num_epochs=20, log_step=5):
# Loss and Optimizer
criterion = nn.CrossEntropyLoss()
params = filter(lambda p: p.requires_grad, self.model.parameters())
optimizer = torch.optim.Adam(params, lr=learning_rate)
max_test_acc = 0
global_step = 0
for epoch in range(num_epochs):
print('>' * 100)
print('epoch: ', epoch)
n_correct, n_total = 0, 0
for i_batch, sample_batched in enumerate(self.train_data_loader):
global_step += 1
# switch model to training mode, clear gradient accumulators
self.model.train()
optimizer.zero_grad()
inputs = [sample_batched[col].to(device) for col in inputs_cols]
targets = sample_batched['polarity'].to(device)
outputs = self.model(inputs)
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
if global_step % log_step == 0:
n_correct += (torch.argmax(outputs, -1) == targets).sum().item()
n_total += len(outputs)
train_acc = n_correct / n_total
# switch model to evaluation mode
self.model.eval()
n_test_correct, n_test_total = 0, 0
with torch.no_grad():
for t_batch, t_sample_batched in enumerate(self.test_data_loader):
t_inputs = [t_sample_batched[col].to(device) for col in inputs_cols]
t_targets = t_sample_batched['polarity'].to(device)
t_outputs = self.model(t_inputs)
n_test_correct += (torch.argmax(t_outputs, -1) == t_targets).sum().item()
n_test_total += len(t_outputs)
test_acc = n_test_correct / n_test_total
if test_acc > max_test_acc:
max_test_acc = test_acc
print('loss: {:.4f}, acc: {:.4f}, test_acc: {:.4f}'.format(loss.item(), train_acc, test_acc))
# log
self.writer.add_scalar('loss', loss, global_step)
self.writer.add_scalar('acc', train_acc, global_step)
self.writer.add_scalar('test_acc', test_acc, global_step)
self.writer.close()
print('max_test_acc: {0}'.format(max_test_acc))
|
[
"torch.utils.data.DataLoader",
"torch.argmax",
"data_utils.ABSADatesetReader",
"torch.nn.CrossEntropyLoss",
"torch.optim.Adam",
"torch.cuda.is_available",
"torch.no_grad"
] |
[((310, 335), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (333, 335), False, 'import torch\n'), ((507, 592), 'data_utils.ABSADatesetReader', 'ABSADatesetReader', ([], {'dataset': 'dataset', 'embed_dim': 'embed_dim', 'max_seq_len': 'max_seq_len'}), '(dataset=dataset, embed_dim=embed_dim, max_seq_len=max_seq_len\n )\n', (524, 592), False, 'from data_utils import ABSADatesetReader\n'), ((621, 706), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'absa_dataset.train_data', 'batch_size': 'batch_size', 'shuffle': '(True)'}), '(dataset=absa_dataset.train_data, batch_size=batch_size, shuffle=True\n )\n', (631, 706), False, 'from torch.utils.data import DataLoader\n'), ((1114, 1135), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1133, 1135), True, 'import torch.nn as nn\n'), ((1232, 1274), 'torch.optim.Adam', 'torch.optim.Adam', (['params'], {'lr': 'learning_rate'}), '(params, lr=learning_rate)\n', (1248, 1274), False, 'import torch\n'), ((2442, 2457), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2455, 2457), False, 'import torch\n'), ((2121, 2146), 'torch.argmax', 'torch.argmax', (['outputs', '(-1)'], {}), '(outputs, -1)\n', (2133, 2146), False, 'import torch\n'), ((2836, 2863), 'torch.argmax', 'torch.argmax', (['t_outputs', '(-1)'], {}), '(t_outputs, -1)\n', (2848, 2863), False, 'import torch\n')]
|
import secrets
def max_pow_2(n):
i = 0
while n % (2 ** i) != 0:
i += 1
return i
# def miller_rabin_p(n, r, d):
def miller_rabin(k, n):
if k == 1:
return False
elif k in (2, 3):
return True
elif k % 2 == 0:
return False
else:
r = max_pow_2(n)
d = n // (2 ** r)
for _ in range(k):
a = 2 + secrets.randbelow(n - 3)
x = pow(a, d, n)
if x in (1, n - 1):
continue
return all(miller_rabin_p(n, r, d) for _ in range(k))
|
[
"secrets.randbelow"
] |
[((387, 411), 'secrets.randbelow', 'secrets.randbelow', (['(n - 3)'], {}), '(n - 3)\n', (404, 411), False, 'import secrets\n')]
|
"""!
@brief Cluster analysis algorithm: ROCK
@details Implementation based on paper @cite inproceedings::rock::1.
@authors <NAME> (<EMAIL>)
@date 2014-2020
@copyright BSD-3-Clause
"""
from pyclustering.cluster.encoder import type_encoding
from pyclustering.utils import euclidean_distance
from pyclustering.core.wrapper import ccore_library
import pyclustering.core.rock_wrapper as wrapper
class rock:
"""!
@brief The class represents clustering algorithm ROCK.
Example:
@code
from pyclustering.cluster import cluster_visualizer
from pyclustering.cluster.rock import rock
from pyclustering.samples.definitions import FCPS_SAMPLES
from pyclustering.utils import read_sample
# Read sample for clustering from file.
sample = read_sample(FCPS_SAMPLES.SAMPLE_HEPTA)
# Create instance of ROCK algorithm for cluster analysis. Seven clusters should be allocated.
rock_instance = rock(sample, 1.0, 7)
# Run cluster analysis.
rock_instance.process()
# Obtain results of clustering.
clusters = rock_instance.get_clusters()
# Visualize clustering results.
visualizer = cluster_visualizer()
visualizer.append_clusters(clusters, sample)
visualizer.show()
@endcode
"""
def __init__(self, data, eps, number_clusters, threshold=0.5, ccore=True):
"""!
@brief Constructor of clustering algorithm ROCK.
@param[in] data (list): Input data - list of points where each point is represented by list of coordinates.
@param[in] eps (double): Connectivity radius (similarity threshold), points are neighbors if distance between them is less than connectivity radius.
@param[in] number_clusters (uint): Defines number of clusters that should be allocated from the input data set.
@param[in] threshold (double): Value that defines degree of normalization that influences on choice of clusters for merging during processing.
@param[in] ccore (bool): Defines should be CCORE (C++ pyclustering library) used instead of Python code or not.
"""
self.__pointer_data = data
self.__eps = eps
self.__number_clusters = number_clusters
self.__threshold = threshold
self.__clusters = None
self.__ccore = ccore
if self.__ccore:
self.__ccore = ccore_library.workable()
self.__verify_arguments()
self.__degree_normalization = 1.0 + 2.0 * ((1.0 - threshold) / (1.0 + threshold))
self.__adjacency_matrix = None
self.__create_adjacency_matrix()
def process(self):
"""!
@brief Performs cluster analysis in line with rules of ROCK algorithm.
@return (rock) Returns itself (ROCK instance).
@see get_clusters()
"""
# TODO: (Not related to specification, just idea) First iteration should be investigated. Euclidean distance should be used for clustering between two
# points and rock algorithm between clusters because we consider non-categorical samples. But it is required more investigations.
if self.__ccore is True:
self.__clusters = wrapper.rock(self.__pointer_data, self.__eps, self.__number_clusters, self.__threshold)
else:
self.__clusters = [[index] for index in range(len(self.__pointer_data))]
while len(self.__clusters) > self.__number_clusters:
indexes = self.__find_pair_clusters(self.__clusters)
if indexes != [-1, -1]:
self.__clusters[indexes[0]] += self.__clusters[indexes[1]]
self.__clusters.pop(indexes[1]) # remove merged cluster.
else:
break # totally separated clusters have been allocated
return self
def get_clusters(self):
"""!
@brief Returns list of allocated clusters, each cluster contains indexes of objects in list of data.
@return (list) List of allocated clusters, each cluster contains indexes of objects in list of data.
@see process()
"""
return self.__clusters
def get_cluster_encoding(self):
"""!
@brief Returns clustering result representation type that indicate how clusters are encoded.
@return (type_encoding) Clustering result representation.
@see get_clusters()
"""
return type_encoding.CLUSTER_INDEX_LIST_SEPARATION
def __find_pair_clusters(self, clusters):
"""!
@brief Returns pair of clusters that are best candidates for merging in line with goodness measure.
The pair of clusters for which the above goodness measure is maximum is the best pair of clusters to be merged.
@param[in] clusters (list): List of clusters that have been allocated during processing, each cluster is represented by list of indexes of points from the input data set.
@return (list) List that contains two indexes of clusters (from list 'clusters') that should be merged on this step.
It can be equals to [-1, -1] when no links between clusters.
"""
maximum_goodness = 0.0
cluster_indexes = [-1, -1]
for i in range(0, len(clusters)):
for j in range(i + 1, len(clusters)):
goodness = self.__calculate_goodness(clusters[i], clusters[j])
if goodness > maximum_goodness:
maximum_goodness = goodness
cluster_indexes = [i, j]
return cluster_indexes
def __calculate_links(self, cluster1, cluster2):
"""!
@brief Returns number of link between two clusters.
@details Link between objects (points) exists only if distance between them less than connectivity radius.
@param[in] cluster1 (list): The first cluster.
@param[in] cluster2 (list): The second cluster.
@return (uint) Number of links between two clusters.
"""
number_links = 0
for index1 in cluster1:
for index2 in cluster2:
number_links += self.__adjacency_matrix[index1][index2]
return number_links
def __create_adjacency_matrix(self):
"""!
@brief Creates 2D adjacency matrix (list of lists) where each element described existence of link between points (means that points are neighbors).
"""
size_data = len(self.__pointer_data)
self.__adjacency_matrix = [[0 for i in range(size_data)] for j in range(size_data)]
for i in range(0, size_data):
for j in range(i + 1, size_data):
distance = euclidean_distance(self.__pointer_data[i], self.__pointer_data[j])
if (distance <= self.__eps):
self.__adjacency_matrix[i][j] = 1
self.__adjacency_matrix[j][i] = 1
def __calculate_goodness(self, cluster1, cluster2):
"""!
@brief Calculates coefficient 'goodness measurement' between two clusters. The coefficient defines level of suitability of clusters for merging.
@param[in] cluster1 (list): The first cluster.
@param[in] cluster2 (list): The second cluster.
@return Goodness measure between two clusters.
"""
number_links = self.__calculate_links(cluster1, cluster2)
devider = (len(cluster1) + len(cluster2)) ** self.__degree_normalization - len(cluster1) ** self.__degree_normalization - len(cluster2) ** self.__degree_normalization
return number_links / devider
def __verify_arguments(self):
"""!
@brief Verify input parameters for the algorithm and throw exception in case of incorrectness.
"""
if len(self.__pointer_data) == 0:
raise ValueError("Input data is empty (size: '%d')." % len(self.__pointer_data))
if self.__eps < 0:
raise ValueError("Connectivity radius (current value: '%d') should be greater or equal to 0." % self.__eps)
if self.__threshold < 0 or self.__threshold > 1:
raise ValueError("Threshold (current value: '%d') should be in range (0, 1)." % self.__threshold)
if (self.__number_clusters is not None) and (self.__number_clusters <= 0):
raise ValueError("Amount of clusters (current value: '%d') should be greater than 0." %
self.__number_clusters)
|
[
"pyclustering.core.rock_wrapper.rock",
"pyclustering.core.wrapper.ccore_library.workable",
"pyclustering.utils.euclidean_distance"
] |
[((2536, 2560), 'pyclustering.core.wrapper.ccore_library.workable', 'ccore_library.workable', ([], {}), '()\n', (2558, 2560), False, 'from pyclustering.core.wrapper import ccore_library\n'), ((3402, 3494), 'pyclustering.core.rock_wrapper.rock', 'wrapper.rock', (['self.__pointer_data', 'self.__eps', 'self.__number_clusters', 'self.__threshold'], {}), '(self.__pointer_data, self.__eps, self.__number_clusters, self.\n __threshold)\n', (3414, 3494), True, 'import pyclustering.core.rock_wrapper as wrapper\n'), ((7252, 7318), 'pyclustering.utils.euclidean_distance', 'euclidean_distance', (['self.__pointer_data[i]', 'self.__pointer_data[j]'], {}), '(self.__pointer_data[i], self.__pointer_data[j])\n', (7270, 7318), False, 'from pyclustering.utils import euclidean_distance\n')]
|
import itertools
from language.heuristic.hardcore_annotated_expression import eT, apply_fun_to_nested, eL, eD, ltd_ify, Argu
from language.heuristic.littletools.generator_tools import count_up
from language.heuristic.pairix import Pairix
from language.heuristic.littletools.nested_list_tools import flatten_reduce, collapse
from language.heuristic.similaritymixer import SimilarityMixer
from helpers.time_tools import timeit_context
import logging
logging.captureWarnings(True)
logging.getLogger().setLevel(logging.INFO)
class Subjects_and_Aspects(Pairix):
''' This module finds pairs of arguments, that are the subjects and aspects for a pair of a pair of expressions
'''
def __init__(self, corpus):
self.similar = \
SimilarityMixer([(2, SimilarityMixer.elmo_sim(), 0.4, 1)])
self.subjects_aspects = \
SimilarityMixer ([(1, SimilarityMixer.multi_paral_tup_sim(SimilarityMixer.subj_asp_sim, n=4), 0, 1),
(-1000, SimilarityMixer.multi_sim(SimilarityMixer.same_expression_sim, n=100), 0, 0.001)])
def annotate(self, clusters=None, graph_fun=None):
''' Annotates the correlations, that means expressions that are similar to each other and are DistinctFilter from the
pair, that was found as excluding each other. For instance 'from the one side' and 'from the other side'.
In part the graph is cleaned, because also exmaples can be marked as seemingly contradictions.
On the other side the same operation is done for additional (sub)predications in context, following
coreferential relations
What is a subject and what a subject is decided on the sequence in respect to the theme-rheme-distinction.
What you speak about, comes first, the subject. With what you want to divide this thing up, is the aspect.
If you change the direction of explanation, this also changes. E.g. If you are in the first sentence talking
about microsoft warranties, and secondly you explain different cases, you speak about warranty. If you
start with these cases and then tell, that in one case you have in the other not warranty, you speak about
these cases.
:param clusters: 2tuple-2tuple-list-predicate-dicts, so 4 predicates in contradicting/corralating
constellation
:param graph_fun: neo4j driver
'''
def argument_tuples(predicate):
args = self.get_arguments(predicate)
return list(itertools.permutations(args, r=2))
with timeit_context('retrieve and generate pairs of arguments for each side'):
argument_tuples_in_sides = apply_fun_to_nested (
fun=argument_tuples,
attribute='predicate_id',
data=clusters)
# now in three steps:
# 1. the 1rst and 2nd element of the pairs must be similar to pairs of other sides --> hdbscan on tuple parallel
# semantical similarity
with timeit_context('computing sameness for the words within these pairs and the subject-'):
def correllate(x,y):
eL(
[self.similar.choose(data=(to_corr.unique(),
to_corr.unique()),
layout='hdbscan',
n=100)
for to_corr in to_correlate])
argument_tuples_in_sides = apply_fun_to_nested (
fun=argument_tuples,
attribute='predicate_id',
data=clusters)
# 2. these tuples have a distance between these two words within, like name ~> thing in multiple sentences
# they have a grammatical and semantical distance within. We compute this as a feature of these tuples and
# feed them again into SimilarityMixer and again hdbscan. So they must be converted to dicts
# 3. look for the maximum distance with at least two tuples in these grouped tuples.
# (things, things. things), (name answering to definition, name coresponding with the name) (name, name, name, name)
with timeit_context('compute pairs of similar distance'):
subjects_aspects = eL(
[self.subjects_aspects.choose(
(corr, corr),
n=100,
minimize=False,
layout='n',
out='ex')
for corr in correlated])
with timeit_context('writing everything'):
self.neo4j_write(graph_fun, subjects_aspects, clusters)
return subjects_aspects
def argument_or_reference_instead (self, arguments):
''' This exchanges in the list of arguments the ones, that are referencing to other nouns, and keep the ones,
that are fine.
:param arguments: argument dicts
:return: lists with some changes of same len
'''
new_arguments = []
for argument in arguments:
reference = argument['coreferenced'](argument['coref'])
if reference:
new_arguments.extend(reference)
else:
new_arguments.append(argument)
try:
assert new_arguments and all (new_arguments)
except AssertionError:
print (arguments)
raise
assert all(isinstance(arg, Argu) for arg in new_arguments)
return new_arguments
def get_arguments(self, predicate_s):
""" Gets the arguments of the predicate
:param predicate_s: predicate-dict or predicate list
:return: argument-dict
"""
if isinstance(predicate_s, list):
arguments = eL(flatten_reduce([self.get_arguments(pred) for pred in predicate_s]))
# if len (arguments.unique()) != len(arguments):
# logging.warning("INDEED AN EFFECT!!! %d" % (len (arguments.unique())- len(arguments)))
return arguments.unique()
arguments = predicate_s['arguments']
try:
assert (arguments)
except:
raise
arguments_ref = self.argument_or_reference_instead (arguments)
assert arguments_ref
return arguments_ref
def get_correlated (self, pair):
""" Returns pairs of similar arguments
:param pair: opposed pair of predicate-dict-2tuples
:return: correlated argument-dict-2tuples
"""
arguments = self.get_arguments(pair[0][0]), self.get_arguments(pair[1][0])
if not all(arguments):
raise ValueError ('no argument for predicate, that can be referenced?')
return self.similar.choose(arguments, layout='n', n=100, out='ex')
def neo4j_write (self, graph_fun, subjects_aspects, clusters):
''' push subjects and aspects to neo4j with appropriate node_labels
:param graph_fun: neo4j driver
:param subjects_aspects: annotated structure
:param clusters: the correlating and contrasting clusters, that were used to make widows for the query of
subjects and aspects
'''
with timeit_context('typing nested list for subject/aspect'):
subjects_aspects = \
ltd_ify(subjects_aspects,
node_type=['DENOTATION'],
stack_types=['SUBJECTS_ASPECTS_ALL', 'CLUSTER', 'A_S_TUPLES', ('SUBJECTS', 'ASPECTS'), 'GROUP', 'ARGUMENT'])
with timeit_context('push results to neo4j'):
self.neo4j_push (subjects_aspects, graph_fun)
apply_fun_to_nested(fun=self.get_arguments, attribute='predicate_id', data=clusters)
with timeit_context('neo4j cleanup'):
self.merge_clean_up(graph_fun)
cnt = count_up()
def neo4j_push(self, x, graph_fun):
''' push nested annotation structure to neo4j
:param x: nested eL, eT, eD-structure
:param graph_fun: neo4j driver
'''
with timeit_context('generate query'):
query = "".join(list(collapse(x.neo4j_write() + ['\n'])))
with open("query %d.txt" % next(self.cnt), "w") as text_file:
text_file.write(query)
with timeit_context('neo4j'):
graph_fun(query)
def merge_clean_up(self, graph_fun):
''' Connect predicate and argument nodes and transit a node in the nested annotation
:param graph_fun:
:return:
'''
query = """MATCH (n:CONNOTATION),(a:ARGUMENT)
WHERE a.id in n.arg_ids
MERGE (n)-[:X]->(a)
RETURN n,a"""
graph_fun(query)
query = """MATCH (n)-->(:GROUP)-->(s)
CALL apoc.create.addLabels( id(s), labels(n) )
YIELD node as n1
MERGE (n)<-[:X]-(s)
RETURN n"""
graph_fun(query)
|
[
"language.heuristic.hardcore_annotated_expression.ltd_ify",
"language.heuristic.similaritymixer.SimilarityMixer.multi_paral_tup_sim",
"language.heuristic.hardcore_annotated_expression.apply_fun_to_nested",
"language.heuristic.littletools.generator_tools.count_up",
"itertools.permutations",
"helpers.time_tools.timeit_context",
"logging.captureWarnings",
"language.heuristic.similaritymixer.SimilarityMixer.multi_sim",
"language.heuristic.similaritymixer.SimilarityMixer.elmo_sim",
"logging.getLogger"
] |
[((450, 479), 'logging.captureWarnings', 'logging.captureWarnings', (['(True)'], {}), '(True)\n', (473, 479), False, 'import logging\n'), ((7826, 7836), 'language.heuristic.littletools.generator_tools.count_up', 'count_up', ([], {}), '()\n', (7834, 7836), False, 'from language.heuristic.littletools.generator_tools import count_up\n'), ((480, 499), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (497, 499), False, 'import logging\n'), ((7639, 7728), 'language.heuristic.hardcore_annotated_expression.apply_fun_to_nested', 'apply_fun_to_nested', ([], {'fun': 'self.get_arguments', 'attribute': '"""predicate_id"""', 'data': 'clusters'}), "(fun=self.get_arguments, attribute='predicate_id', data=\n clusters)\n", (7658, 7728), False, 'from language.heuristic.hardcore_annotated_expression import eT, apply_fun_to_nested, eL, eD, ltd_ify, Argu\n'), ((2619, 2691), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""retrieve and generate pairs of arguments for each side"""'], {}), "('retrieve and generate pairs of arguments for each side')\n", (2633, 2691), False, 'from helpers.time_tools import timeit_context\n'), ((2732, 2818), 'language.heuristic.hardcore_annotated_expression.apply_fun_to_nested', 'apply_fun_to_nested', ([], {'fun': 'argument_tuples', 'attribute': '"""predicate_id"""', 'data': 'clusters'}), "(fun=argument_tuples, attribute='predicate_id', data=\n clusters)\n", (2751, 2818), False, 'from language.heuristic.hardcore_annotated_expression import eT, apply_fun_to_nested, eL, eD, ltd_ify, Argu\n'), ((3061, 3152), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""computing sameness for the words within these pairs and the subject-"""'], {}), "(\n 'computing sameness for the words within these pairs and the subject-')\n", (3075, 3152), False, 'from helpers.time_tools import timeit_context\n'), ((3511, 3597), 'language.heuristic.hardcore_annotated_expression.apply_fun_to_nested', 'apply_fun_to_nested', ([], {'fun': 'argument_tuples', 'attribute': '"""predicate_id"""', 'data': 'clusters'}), "(fun=argument_tuples, attribute='predicate_id', data=\n clusters)\n", (3530, 3597), False, 'from language.heuristic.hardcore_annotated_expression import eT, apply_fun_to_nested, eL, eD, ltd_ify, Argu\n'), ((4210, 4261), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""compute pairs of similar distance"""'], {}), "('compute pairs of similar distance')\n", (4224, 4261), False, 'from helpers.time_tools import timeit_context\n'), ((4540, 4576), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""writing everything"""'], {}), "('writing everything')\n", (4554, 4576), False, 'from helpers.time_tools import timeit_context\n'), ((7202, 7257), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""typing nested list for subject/aspect"""'], {}), "('typing nested list for subject/aspect')\n", (7216, 7257), False, 'from helpers.time_tools import timeit_context\n'), ((7308, 7478), 'language.heuristic.hardcore_annotated_expression.ltd_ify', 'ltd_ify', (['subjects_aspects'], {'node_type': "['DENOTATION']", 'stack_types': "['SUBJECTS_ASPECTS_ALL', 'CLUSTER', 'A_S_TUPLES', ('SUBJECTS', 'ASPECTS'),\n 'GROUP', 'ARGUMENT']"}), "(subjects_aspects, node_type=['DENOTATION'], stack_types=[\n 'SUBJECTS_ASPECTS_ALL', 'CLUSTER', 'A_S_TUPLES', ('SUBJECTS', 'ASPECTS'\n ), 'GROUP', 'ARGUMENT'])\n", (7315, 7478), False, 'from language.heuristic.hardcore_annotated_expression import eT, apply_fun_to_nested, eL, eD, ltd_ify, Argu\n'), ((7531, 7570), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""push results to neo4j"""'], {}), "('push results to neo4j')\n", (7545, 7570), False, 'from helpers.time_tools import timeit_context\n'), ((7738, 7769), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""neo4j cleanup"""'], {}), "('neo4j cleanup')\n", (7752, 7769), False, 'from helpers.time_tools import timeit_context\n'), ((8044, 8076), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""generate query"""'], {}), "('generate query')\n", (8058, 8076), False, 'from helpers.time_tools import timeit_context\n'), ((8274, 8297), 'helpers.time_tools.timeit_context', 'timeit_context', (['"""neo4j"""'], {}), "('neo4j')\n", (8288, 8297), False, 'from helpers.time_tools import timeit_context\n'), ((2570, 2603), 'itertools.permutations', 'itertools.permutations', (['args'], {'r': '(2)'}), '(args, r=2)\n', (2592, 2603), False, 'import itertools\n'), ((776, 802), 'language.heuristic.similaritymixer.SimilarityMixer.elmo_sim', 'SimilarityMixer.elmo_sim', ([], {}), '()\n', (800, 802), False, 'from language.heuristic.similaritymixer import SimilarityMixer\n'), ((884, 954), 'language.heuristic.similaritymixer.SimilarityMixer.multi_paral_tup_sim', 'SimilarityMixer.multi_paral_tup_sim', (['SimilarityMixer.subj_asp_sim'], {'n': '(4)'}), '(SimilarityMixer.subj_asp_sim, n=4)\n', (919, 954), False, 'from language.heuristic.similaritymixer import SimilarityMixer\n'), ((1002, 1071), 'language.heuristic.similaritymixer.SimilarityMixer.multi_sim', 'SimilarityMixer.multi_sim', (['SimilarityMixer.same_expression_sim'], {'n': '(100)'}), '(SimilarityMixer.same_expression_sim, n=100)\n', (1027, 1071), False, 'from language.heuristic.similaritymixer import SimilarityMixer\n')]
|
# -*- coding=utf-8 -*-
import random
def randomResult(inputFilePath, outputFilePath):
fr = open(inputFilePath, 'rb')
fw = open(outputFilePath, 'w')
userList = []
for line in fr.readlines():
list = line.split('\t')
userList.append(list[0])
# print len(userList)
# for i in userList[0]
# print age, gender, education
for user in userList:
age = random.randint(0, 6)
gender = random.randint(0, 2)
education = random.randint(0, 6)
result = [str(user), str(age), str(gender), str(education)]
# print result
# print type(user)
for i in result:
i.decode('utf-8').encode('GBK')
fw.write(i + ' ')
fw.write('\n')
fr.close()
fw.close()
if __name__ == '__main__':
randomResult('./data/test.csv', './output/randomResult.csv')
|
[
"random.randint"
] |
[((406, 426), 'random.randint', 'random.randint', (['(0)', '(6)'], {}), '(0, 6)\n', (420, 426), False, 'import random\n'), ((444, 464), 'random.randint', 'random.randint', (['(0)', '(2)'], {}), '(0, 2)\n', (458, 464), False, 'import random\n'), ((485, 505), 'random.randint', 'random.randint', (['(0)', '(6)'], {}), '(0, 6)\n', (499, 505), False, 'import random\n')]
|
import subprocess
from typing import Union
class Jtalk:
def __init__(self,
dict_dir,
voice_file,
output_file='/dev/null',
trace_file='/dev/null',
sampling='auto',
frame_period='auto',
all_pass='auto',
filter_coefficient=0.0,
speed_rate=1.0,
half_tone=0.0,
threshold=0.5,
spectrum=1.0,
log_f0=1.0,
volume=0.0,
buffer=0
):
"""
in ubuntu apt-get;
dict_dir = "/var/lib/mecab/dic/open-jtalk/naist-jdic"
voice_file = "/usr/share/hts-voice/nitech-jp-atr503-m001/nitech_jp_atr503_m001.htsvoice"
:param dict_dir: -x dir : dictionary directory [ N/A]
:param voice_file: -m htsvoice : HTS voice files [ N/A]
:param output_file: -ow s : filename of output wav audio (generated speech) [ N/A]
:param trace_file: -ot s : filename of output trace information [ N/A]
:param sampling: -s i : sampling frequency [ auto][ 1-- ]
:param frame_period: -s i : sampling frequency [ auto][ 1-- ]
:param all_pass: -a f : all-pass constant [ auto][ 0.0-- 1.0]
:param filter_coefficient: -b f : postfiltering coefficient [ 0.0][ 0.0-- 1.0]
:param speed_rate: -r f : speech speed rate [ 1.0][ 0.0-- ]
:param half_tone: -fm f : additional half-tone [ 0.0][ -- ]
:param threshold: -u f : voiced/unvoiced threshold [ 0.5][ 0.0-- 1.0]
:param spectrum: -jm f : weight of GV for spectrum [ 1.0][ 0.0-- ]
:param log_f0: -jf f : weight of GV for log F0 [ 1.0][ 0.0-- ]
:param volume: -g f : volume (dB) [ 0.0][ -- ]
:param buffer: -z i : audio buffer size (if i==0, turn off) [ 0][ 0-- ]
"""
self._dict_dir = dict_dir
self._voice_file = voice_file
self._output_file = output_file
self._trace_file = trace_file
self._sampling = sampling
self._frame_period = frame_period
self._all_pass = all_pass
self._filter_coefficient = filter_coefficient
self._speed_rate = speed_rate
self._half_tone = half_tone
self._log_f0 = log_f0
self._threshold = threshold
self._spectrum = spectrum
self._volume = volume
self._buffer = buffer
@property
def dict_dir(self) -> str:
return self._dict_dir
@property
def voice_file(self) -> str:
return self._voice_file
@property
def output_file(self) -> str:
return self._output_file
@property
def trace_file(self) -> str:
return self._trace_file
@property
def sampling(self) -> Union[int, str]:
return self._sampling
@property
def frame_period(self) -> Union[int, str]:
return self._frame_period
@property
def all_pass(self) -> Union[float, str]:
return self._all_pass
@property
def filter_coefficient(self) -> float:
return self._filter_coefficient
@property
def speed_rate(self) -> float:
return self._speed_rate
@property
def half_tone(self) -> float:
return self._half_tone
@property
def log_f0(self) -> float:
return self._log_f0
@property
def spectrum(self) -> float:
return self._spectrum
@property
def volume(self) -> float:
return self._volume
@property
def buffer(self) -> float:
return self._buffer
def from_string(self,
string,
dict_dir=None,
voice_file=None,
output_file=None,
trace_file=None,
sampling=None,
frame_period=None,
all_pass=None,
filter_coefficient=None,
speed_rate=None,
half_tone=None,
threshold=None,
spectrum=None,
log_f0=None,
volume=None,
buffer=None,
timeout=60):
command = [
'open_jtalk',
'-x', dict_dir or self._dict_dir,
'-m', voice_file or self._voice_file,
'-ow', trace_file or self._trace_file,
'-ot', sampling or self._sampling,
'-s', frame_period or self._frame_period,
'-p', all_pass or self._all_pass,
'-a', filter_coefficient or self._filter_coefficient,
'-b', speed_rate or self._speed_rate,
'-fm', half_tone or self._half_tone,
'-u', threshold or self._threshold,
'-jm', spectrum or self._spectrum,
'-jf', log_f0 or self._log_f0,
'-g', volume or self._volume,
'-z', buffer or self._buffer
]
proc = subprocess.Popen(command, stdin=subprocess.PIPE)
proc.stdin.write(string)
proc.stdin.close()
proc.wait(timeout=timeout)
return output_file
def from_file(self,
infile,
dict_dir=None,
voice_file=None,
output_file=None,
trace_file=None,
sampling=None,
frame_period=None,
all_pass=None,
filter_coefficient=None,
speed_rate=None,
half_tone=None,
threshold=None,
spectrum=None,
log_f0=None,
volume=None,
buffer=None,
timeout=60):
command = [
'open_jtalk',
'-x', dict_dir or self._dict_dir,
'-m', voice_file or self._voice_file,
'-ow', trace_file or self._trace_file,
'-ot', sampling or self._sampling,
'-s', frame_period or self._frame_period,
'-p', all_pass or self._all_pass,
'-a', filter_coefficient or self._filter_coefficient,
'-b', speed_rate or self._speed_rate,
'-fm', half_tone or self._half_tone,
'-u', threshold or self._threshold,
'-jm', spectrum or self._spectrum,
'-jf', log_f0 or self._log_f0,
'-g', volume or self._volume,
'-z', buffer or self._buffer,
infile
]
process = subprocess.Popen(command, stdin=subprocess.PIPE)
process.wait(timeout=timeout)
return output_file
|
[
"subprocess.Popen"
] |
[((5652, 5700), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stdin': 'subprocess.PIPE'}), '(command, stdin=subprocess.PIPE)\n', (5668, 5700), False, 'import subprocess\n'), ((7193, 7241), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stdin': 'subprocess.PIPE'}), '(command, stdin=subprocess.PIPE)\n', (7209, 7241), False, 'import subprocess\n')]
|
from flask import Flask, render_template, redirect, url_for, flash
from forms import addContactForm
import db_utils
# initialize the Flask app
app = Flask(__name__)
# Note: This is a demo application hence secret is hardcoded for simplicity.
# For all practical purposes, take the key from OS environment variables or config files.
app.secret_key = 'any random string'
# Route to list all phone contacts of the user.
@app.route('/', methods=["GET"])
def list_contacts():
contacts = db_utils.get_contacts()
if contacts is None:
return "Error conecting to database. Ensure that the database is installed properly."
return render_template('list_contacts.html', contacts=contacts)
# Add a contact to phonebook
@app.route('/add/', methods=["GET", "POST"])
def add_contact():
form = addContactForm()
if form.validate_on_submit(): # Validate the form for CSRF etc.
# Extract form information
name = form.name.data
mobile_no = form.mobile_no.data
email = form.email.data
add_response = db_utils.add_contact({ # Add to database
"name": name,
"mobile_no": mobile_no,
"email": email
})
if add_response:
flash("Added!") # Show acknowledge to user
# Redirect to list_contacts page
return redirect(url_for("list_contacts"))
else:
flash("Error occured while adding contact. Try Again!")
return render_template('add_contact.html', form=form)
# Delete Channel from the database
@app.route('/delete/<contact_id>/', methods=["GET"])
def delete_contact(contact_id):
db_utils.delete_contact(contact_id)
return redirect(url_for('list_contacts'))
if __name__ == '__main__':
app.run(host='0.0.0.0', port=80)
|
[
"flask.flash",
"db_utils.delete_contact",
"forms.addContactForm",
"db_utils.add_contact",
"flask.Flask",
"flask.url_for",
"flask.render_template",
"db_utils.get_contacts"
] |
[((150, 165), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (155, 165), False, 'from flask import Flask, render_template, redirect, url_for, flash\n'), ((489, 512), 'db_utils.get_contacts', 'db_utils.get_contacts', ([], {}), '()\n', (510, 512), False, 'import db_utils\n'), ((643, 699), 'flask.render_template', 'render_template', (['"""list_contacts.html"""'], {'contacts': 'contacts'}), "('list_contacts.html', contacts=contacts)\n", (658, 699), False, 'from flask import Flask, render_template, redirect, url_for, flash\n'), ((805, 821), 'forms.addContactForm', 'addContactForm', ([], {}), '()\n', (819, 821), False, 'from forms import addContactForm\n'), ((1467, 1513), 'flask.render_template', 'render_template', (['"""add_contact.html"""'], {'form': 'form'}), "('add_contact.html', form=form)\n", (1482, 1513), False, 'from flask import Flask, render_template, redirect, url_for, flash\n'), ((1639, 1674), 'db_utils.delete_contact', 'db_utils.delete_contact', (['contact_id'], {}), '(contact_id)\n', (1662, 1674), False, 'import db_utils\n'), ((1051, 1127), 'db_utils.add_contact', 'db_utils.add_contact', (["{'name': name, 'mobile_no': mobile_no, 'email': email}"], {}), "({'name': name, 'mobile_no': mobile_no, 'email': email})\n", (1071, 1127), False, 'import db_utils\n'), ((1695, 1719), 'flask.url_for', 'url_for', (['"""list_contacts"""'], {}), "('list_contacts')\n", (1702, 1719), False, 'from flask import Flask, render_template, redirect, url_for, flash\n'), ((1230, 1245), 'flask.flash', 'flash', (['"""Added!"""'], {}), "('Added!')\n", (1235, 1245), False, 'from flask import Flask, render_template, redirect, url_for, flash\n'), ((1399, 1454), 'flask.flash', 'flash', (['"""Error occured while adding contact. Try Again!"""'], {}), "('Error occured while adding contact. Try Again!')\n", (1404, 1454), False, 'from flask import Flask, render_template, redirect, url_for, flash\n'), ((1347, 1371), 'flask.url_for', 'url_for', (['"""list_contacts"""'], {}), "('list_contacts')\n", (1354, 1371), False, 'from flask import Flask, render_template, redirect, url_for, flash\n')]
|
from models import Notebook, TextNote, VideoNote
bio = Notebook("Bio 201 Notes")
bio.notes.append(TextNote("This is the first day of Bio 201"))
bio.notes.append(TextNote("Final exam is 95%."))
bio.notes.append(VideoNote("https://www.youtube.com/watch?v=PKffm2uI4dk"))
bio.display()
bio.save("bio201.txt")
bio.load("bio201.txt")
print(bio.to_json())
|
[
"models.VideoNote",
"models.Notebook",
"models.TextNote"
] |
[((56, 81), 'models.Notebook', 'Notebook', (['"""Bio 201 Notes"""'], {}), "('Bio 201 Notes')\n", (64, 81), False, 'from models import Notebook, TextNote, VideoNote\n'), ((100, 144), 'models.TextNote', 'TextNote', (['"""This is the first day of Bio 201"""'], {}), "('This is the first day of Bio 201')\n", (108, 144), False, 'from models import Notebook, TextNote, VideoNote\n'), ((163, 193), 'models.TextNote', 'TextNote', (['"""Final exam is 95%."""'], {}), "('Final exam is 95%.')\n", (171, 193), False, 'from models import Notebook, TextNote, VideoNote\n'), ((212, 268), 'models.VideoNote', 'VideoNote', (['"""https://www.youtube.com/watch?v=PKffm2uI4dk"""'], {}), "('https://www.youtube.com/watch?v=PKffm2uI4dk')\n", (221, 268), False, 'from models import Notebook, TextNote, VideoNote\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import csv
from math import sqrt
import numpy as np
import yaml
from scipy.interpolate import interp1d
class PlanetVariables:
def __init__(self, name):
self.name = name
self._getDataFromFile()
def _getDataFromFile(self):
with open("planetVariables/" + self.name + ".yml", 'r') as ymlfile:
yamlFile = yaml.load(ymlfile)
self.meanPlanet_revolutions = yamlFile['mean planet revolutions per yuga']
self.longitude_slow_apogee = yamlFile['longitude slow apogee']
self.sizeSlow_at_0 = yamlFile['size slow epicycle at 0 & 180']
self.sizeSlow_at_90 = yamlFile['size slow epicycle at 90 & 270']
if self.name != 'Sun':
self.fast_apogee_revolutions = yamlFile['fast apogee revolutions per yuga']
self.sizeFast_at_0 = yamlFile['size fast epicycle at 0 & 180']
self.sizeFast_at_90 = yamlFile['size fast epicycle at 90 & 270']
class AngleAndSinHandler:
def __init__(self, thetas, sinValues):
self.InterpolatedSinTable = interp1d(thetas, sinValues)
self.InterpolatedInverseSinTable = interp1d(sinValues, thetas)
# return exmple: 270.5 -> 270 deg & 30 min
# bug with input 30.2666667, the int in min makes from 16.0 15...
def DecimalDegreeToIndividualAngleUnits(self, decimalDeg):
degrees = int(decimalDeg)
minutes = int((decimalDeg - degrees) * 60)
seconds = (decimalDeg - degrees - minutes/60.)*3600
return degrees, minutes, seconds
# returns the decimal degrees
def IndividualAngleUnitsToDecimalDegree(self, degrees, minutes, seconds=0):
tmpMinutes = minutes + seconds / 60.
return degrees + tmpMinutes / 60.
def getPositveAngle(self, decimalAngle):
while decimalAngle < 0:
decimalAngle += 360
while decimalAngle > 360:
decimalAngle -= 360
return decimalAngle
def roundToMinutes(self, decimalAngle):
_deg, _min, _sec = self.DecimalDegreeToIndividualAngleUnits(
decimalAngle)
if (_sec >= 30.):
return self.IndividualAngleUnitsToDecimalDegree(_deg, _min+1, 0)
else:
return self.IndividualAngleUnitsToDecimalDegree(_deg, _min, 0)
# positivity is required
def _getQuadrantOfAngle(self, decimalAngle):
# the qudrants are 0, 1, 2, 3
if (decimalAngle <= 90):
return 0
elif (decimalAngle <= 180):
return 1
elif (decimalAngle <= 270):
return 2
else:
return 3
def sinOf(self, decimalAngle):
angleForSin = self.getPositveAngle(decimalAngle)
quadrant = self._getQuadrantOfAngle(angleForSin)
# the quadrant numberation goes from 0 to 3
if (quadrant <= 1):
sign = 1
else:
sign = -1
angleForSin = angleForSin - quadrant*90
return sign * self.InterpolatedSinTable(angleForSin)
def arcsinOf(self, sinValue):
if (sinValue < 0):
return -1 * self.InterpolatedInverseSinTable(-sinValue)
else:
return self.InterpolatedInverseSinTable(sinValue)
def printAngle(self, name, decimalAngle, inDecimal=True):
if inDecimal:
print('{:20}: {}°'.format(name, decimalAngle))
else:
_deg, _min, _sec = self.DecimalDegreeToIndividualAngleUnits(
decimalAngle)
print('{:20}: {}° {}\' {}\'\''.format(name, _deg, _min, _sec))
def makePositiveRoundAndPrint(self, name, angle, inDecimal=True, doRound=False):
# make positive
angle = self.getPositveAngle(angle)
# do the rounding
if doRound:
angle = self.roundToMinutes(angle)
# print the angle
self.printAngle(name, angle, inDecimal)
return angle
def readCsvFile(filename):
with open(filename) as csvDataFile:
csvReader = csv.reader(csvDataFile)
# Get the radius
line = next(csvReader)
R = float(line[2])
# Skip second line
next(csvReader)
# read the rest
tmpArray = [(float(row[1]), float(row[2])) for row in csvReader]
thetas, sinValues = zip(*tmpArray)
return R, thetas, sinValues
def getSizeEpicycle(size_at_0, size_at_90, r_for_sin, handlerAngleSin, decimalAngle):
return size_at_0 + (size_at_90 - size_at_0) * abs(handlerAngleSin.sinOf(decimalAngle)) / (1. * r_for_sin)
def getRadiusEpicycle(size_at_0, size_at_90, radiusDeferent, handlerAngleSin, decimalAngle, printAll):
sizeEpicycle = getSizeEpicycle(
size_at_0, size_at_90, radiusDeferent, handlerAngleSin, decimalAngle)
radiusEpicycle = sizeEpicycle / 360. * radiusDeferent
if printAll:
print('{:20}: {}'.format('sizeEpicycle', sizeEpicycle))
print('{:20}: {}'.format('radiusEpicycle', radiusEpicycle))
return radiusEpicycle
def getDecimalAngleFromRotation(revolutionSpeed, elapsedDays, period):
numRevolutions = (revolutionSpeed * elapsedDays) / (1. * period)
return (numRevolutions - int(numRevolutions)) * 360
def getFastEquation(radiusFast, radiusDeferent, handlerAngleSin, kappa, printAll):
sinKappa = handlerAngleSin.sinOf(kappa)
VB = (radiusFast * sinKappa) / radiusDeferent
radialDistance = sqrt(
VB**2 + (radiusDeferent + sqrt(sinKappa**2 - VB**2))**2)
sigma = handlerAngleSin.arcsinOf(radiusFast * sinKappa / radialDistance)
if printAll:
print('{:20}: {}'.format('sinKappa', sinKappa))
print('{:20}: {}'.format('radiusDeferent', radiusDeferent))
print('{:20}: {}'.format('radiusFast', radiusFast))
print('{:20}: {}'.format('radialDistance', radialDistance))
return sigma
def getSlowEquation(radiusSlow, radiusDeferent, handlerAngleSin, kappa, printAll):
sinKappa = handlerAngleSin.sinOf(kappa)
if printAll:
print('{:20}: {}'.format('sinKappa', sinKappa))
mu = handlerAngleSin.arcsinOf(radiusSlow * sinKappa / radiusDeferent)
return mu
#############################################################
def doSunProcedure(
_yuga, _days_in_yuga, _days_since_epoch,
_radiusDeferent, _handler,
_meanPlanet_revolutions, _longitude_slow_apogee,
_sizeSlow_at_0, _sizeSlow_at_90,
_doRounding, _printDecimalDegree, _printAll):
# mean planet calculation
lambda_bar = getDecimalAngleFromRotation(
_meanPlanet_revolutions, _days_since_epoch, _days_in_yuga)
lambda_bar = _handler.makePositiveRoundAndPrint(
'lambda_bar', lambda_bar, _printDecimalDegree, _doRounding)
# apply half the slow equation to the computed result
lambda_mu = _longitude_slow_apogee
lambda_mu = _handler.makePositiveRoundAndPrint(
'lambda_mu', lambda_mu, _printDecimalDegree, _doRounding)
kappa_mu = lambda_bar - lambda_mu
kappa_mu = _handler.makePositiveRoundAndPrint(
'kappa_mu', kappa_mu, _printDecimalDegree, _doRounding)
# get the current radius of the epicycle
radiusSlow = getRadiusEpicycle(
_sizeSlow_at_0, _sizeSlow_at_90, _radiusDeferent, _handler, kappa_mu, _printAll)
mu = getSlowEquation(radiusSlow, _radiusDeferent,
_handler, kappa_mu, _printAll)
mu = _handler.makePositiveRoundAndPrint(
'mu', mu, _printDecimalDegree, _doRounding)
# plus or minus? use the secondSign...
lambda_true = lambda_bar + mu
lambda_true = _handler.makePositiveRoundAndPrint(
'lambda_true', lambda_true, _printDecimalDegree, _doRounding)
#############################################################
def do4stepProcedure(
_yuga, _days_in_yuga, _days_since_epoch,
_radiusDeferent, _handler,
_meanPlanet_revolutions, _fast_apogee_revolutions, _longitude_slow_apogee,
_sizeSlow_at_0, _sizeSlow_at_90, _sizeFast_at_0, _sizeFast_at_90,
_doRounding, _printDecimalDegree, _printAll,
_firstSign, _secondSign, _thirdSign, _fourthSign):
# 4 step procedure, from suryasiddhanta
# 0th step
# calculate the mean planets longitude (lambda_bar)
lambda_bar = getDecimalAngleFromRotation(
_meanPlanet_revolutions, _days_since_epoch, _days_in_yuga)
lambda_bar = _handler.makePositiveRoundAndPrint(
'lambda_bar', lambda_bar, _printDecimalDegree, _doRounding)
################# START 1st step #################
# apply half the fast equation to the mean planet
lambda_sigma = getDecimalAngleFromRotation(
_fast_apogee_revolutions, _days_since_epoch, _days_in_yuga)
lambda_sigma = _handler.makePositiveRoundAndPrint(
'lambda_sigma', lambda_sigma, _printDecimalDegree, _doRounding)
kappa_sigma_1 = lambda_bar - lambda_sigma
kappa_sigma_1 = _handler.makePositiveRoundAndPrint(
'kappa_sigma_1', kappa_sigma_1, _printDecimalDegree, _doRounding)
# get the current radius of the epicycle
radiusFast = getRadiusEpicycle(
_sizeFast_at_0, _sizeFast_at_90, _radiusDeferent, _handler, kappa_sigma_1, _printAll)
sigma_1 = getFastEquation(
radiusFast, _radiusDeferent, _handler, kappa_sigma_1, _printAll)
sigma_1 = _handler.makePositiveRoundAndPrint(
'sigma_1', sigma_1, _printDecimalDegree, _doRounding)
# plus or minus? use the firstSign...
lambda_1 = lambda_bar + _firstSign * 0.5 * sigma_1
lambda_1 = _handler.makePositiveRoundAndPrint(
'lambda_1', lambda_1, _printDecimalDegree, _doRounding)
################# END 1st step #################
################# START 2nd step #################
# apply half the slow equation to the computed result
lambda_mu = _longitude_slow_apogee
lambda_mu = _handler.makePositiveRoundAndPrint(
'lambda_mu', lambda_mu, _printDecimalDegree, _doRounding)
kappa_mu_1 = lambda_1 - lambda_mu
kappa_mu_1 = _handler.makePositiveRoundAndPrint(
'kappa_mu_1', kappa_mu_1, _printDecimalDegree, _doRounding)
# get the current radius of the epicycle
radiusSlow = getRadiusEpicycle(
_sizeSlow_at_0, _sizeSlow_at_90, _radiusDeferent, _handler, kappa_mu_1, _printAll)
mu_1 = getSlowEquation(radiusSlow, _radiusDeferent,
_handler, kappa_mu_1, _printAll)
mu_1 = _handler.makePositiveRoundAndPrint(
'mu_1', mu_1, _printDecimalDegree, _doRounding)
# plus or minus? use the secondSign...
lambda_2 = lambda_1 + _secondSign * 0.5 * mu_1
lambda_2 = _handler.makePositiveRoundAndPrint(
'lambda_2', lambda_2, _printDecimalDegree, _doRounding)
################# END 2nd step #################
################# START 3rd step #################
# start form the computed result, compute the slow equation,
# apply it whole to the mean planet
kappa_mu_2 = lambda_2 - lambda_mu
kappa_mu_2 = _handler.makePositiveRoundAndPrint(
'kappa_mu_2', kappa_mu_2, _printDecimalDegree, _doRounding)
# get the current radius of the epicycle
radiusSlow = getRadiusEpicycle(
_sizeSlow_at_0, _sizeSlow_at_90, _radiusDeferent, _handler, kappa_mu_2, _printAll)
mu_2 = getSlowEquation(radiusSlow, _radiusDeferent,
_handler, kappa_mu_2, _printAll)
mu_2 = _handler.makePositiveRoundAndPrint(
'mu_2', mu_2, _printDecimalDegree, _doRounding)
# plus or minus? use the thridSign...
lambda_3 = lambda_bar + _thirdSign * mu_2
lambda_3 = _handler.makePositiveRoundAndPrint(
'lambda_3', lambda_3, _printDecimalDegree, _doRounding)
################# END 3rd step #################
################# START 4th step #################
# apply the whole fast equation to the computed result
kappa_sigma_2 = lambda_3 - lambda_sigma
kappa_sigma_2 = _handler.makePositiveRoundAndPrint(
'kappa_sigma_2', kappa_sigma_2, _printDecimalDegree, _doRounding)
# get the current size of the epicycle
radiusFast = getRadiusEpicycle(
_sizeFast_at_0, _sizeFast_at_90, _radiusDeferent, _handler, kappa_sigma_2, _printAll)
sigma_2 = getFastEquation(
radiusFast, _radiusDeferent, _handler, kappa_sigma_2, _printAll)
sigma_2 = _handler.makePositiveRoundAndPrint(
'sigma_2', sigma_2, _printDecimalDegree, _doRounding)
# plus or minus? use the fourthSign...
lambda_true = lambda_3 + _fourthSign * sigma_2
lambda_true = _handler.makePositiveRoundAndPrint(
'lambda_true', lambda_true, _printDecimalDegree, _doRounding)
################# END 4th step #################
#############################################################
def allPosibilityWay(yuga, days_in_yuga, days_since_epoch, radiusDeferent, handler, planet, doRounding, printDecimalDegree, printAll):
for i in [-1, 1]:
for j in [-1, 1]:
for k in [-1, 1]:
for l in [-1, 1]:
print(
"####################################### " '{},{},{},{}'.format(i, j, k, l))
do4stepProcedure(
yuga, days_in_yuga, days_since_epoch,
radiusDeferent, handler,
planet.meanPlanet_revolutions, planet.fast_apogee_revolutions, planet.longitude_slow_apogee,
planet.sizeSlow_at_0, planet.sizeSlow_at_90, planet.sizeFast_at_0, planet.sizeFast_at_90,
doRounding, printDecimalDegree, printAll,
i, j, k, l)
#############################################################
if __name__ == "__main__":
# get the global variables from the yaml file
with open("globalVariables.yml", 'r') as ymlfile:
globalVars = yaml.load(ymlfile)
# setup the Sin tables
# it's assumed that the sin table only gives vales for angles in [0,90 deg]
radiusDeferent, thetas, sinValues = readCsvFile(
globalVars['sin table'])
handler = AngleAndSinHandler(thetas, sinValues)
# evidence suggest that angle values are rounded to the nearest minute
doRounding = globalVars['round to minutes']
# print angles in decimalDegree
printDecimalDegree = globalVars['print in decimal degrees']
# print all steps
printAll = globalVars['print all steps']
yuga = globalVars['yuga']
days_in_yuga = globalVars['days in a yuga']
days_since_epoch = globalVars['days since the epoch']
planets = []
for planetToCalculate in globalVars['do calculations for']:
if planetToCalculate == 'Sun':
planets.append(PlanetVariables(planetToCalculate))
elif planetToCalculate == 'Moon':
print(planetToCalculate + "is not yet implemented...")
elif planetToCalculate == 'Mars' or planetToCalculate == 'Mercury' or planetToCalculate == 'Jupiter' or planetToCalculate == 'Venus' or planetToCalculate == 'Saturn':
planets.append(PlanetVariables(planetToCalculate))
else:
print("Unknown planet! Please check for typos")
for p in planets:
print(p.name)
print("")
if(p.name == 'Sun'):
doSunProcedure(
yuga, days_in_yuga, days_since_epoch,
radiusDeferent, handler,
p.meanPlanet_revolutions, p.longitude_slow_apogee,
p.sizeSlow_at_0, p.sizeSlow_at_90,
doRounding, printDecimalDegree, printAll)
else:
do4stepProcedure(
yuga, days_in_yuga, days_since_epoch,
radiusDeferent, handler,
p.meanPlanet_revolutions, p.fast_apogee_revolutions, p.longitude_slow_apogee,
p.sizeSlow_at_0, p.sizeSlow_at_90, p.sizeFast_at_0, p.sizeFast_at_90,
doRounding, printDecimalDegree, printAll,
-1, 1, 1, -1)
print("")
|
[
"scipy.interpolate.interp1d",
"yaml.load",
"csv.reader",
"math.sqrt"
] |
[((1094, 1121), 'scipy.interpolate.interp1d', 'interp1d', (['thetas', 'sinValues'], {}), '(thetas, sinValues)\n', (1102, 1121), False, 'from scipy.interpolate import interp1d\n'), ((1165, 1192), 'scipy.interpolate.interp1d', 'interp1d', (['sinValues', 'thetas'], {}), '(sinValues, thetas)\n', (1173, 1192), False, 'from scipy.interpolate import interp1d\n'), ((3994, 4017), 'csv.reader', 'csv.reader', (['csvDataFile'], {}), '(csvDataFile)\n', (4004, 4017), False, 'import csv\n'), ((13783, 13801), 'yaml.load', 'yaml.load', (['ymlfile'], {}), '(ymlfile)\n', (13792, 13801), False, 'import yaml\n'), ((396, 414), 'yaml.load', 'yaml.load', (['ymlfile'], {}), '(ymlfile)\n', (405, 414), False, 'import yaml\n'), ((5422, 5451), 'math.sqrt', 'sqrt', (['(sinKappa ** 2 - VB ** 2)'], {}), '(sinKappa ** 2 - VB ** 2)\n', (5426, 5451), False, 'from math import sqrt\n')]
|
"""
Start a Parameterized Build
"""
from __future__ import print_function
from jenkinsapi.jenkins import Jenkins
jenkins = Jenkins('http://localhost:8080')
params = {'VERSION': '1.2.3', 'PYTHON_VER': '2.7'}
# This will start the job in non-blocking manner
jenkins.build_job('foo', params)
# This will start the job and will return a QueueItem object which
# can be used to get build results
job = jenkins['foo']
qi = job.invoke(build_params=params)
# Block this script until build is finished
if qi.is_queued() or qi.is_running():
qi.block_until_complete()
build = qi.get_build()
print(build)
|
[
"jenkinsapi.jenkins.Jenkins"
] |
[((125, 157), 'jenkinsapi.jenkins.Jenkins', 'Jenkins', (['"""http://localhost:8080"""'], {}), "('http://localhost:8080')\n", (132, 157), False, 'from jenkinsapi.jenkins import Jenkins\n')]
|
"""change_schedules_repeat_cycle_column
Revision ID: 73340f5f1adf
Revises: acf23daeb12b
Create Date: 2020-08-23 12:09:49.948494
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "73340f5f1adf"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
op.drop_column("schedules", "repeat_cycle")
op.add_column("schedules", sa.Column("repeat_cycle", sa.Integer, nullable=True))
def downgrade():
op.drop_column("schedules", "repeat_cycle")
op.add_column("schedules", sa.Column("repeat_cycle", sa.String(10), nullable=True))
|
[
"sqlalchemy.String",
"alembic.op.drop_column",
"sqlalchemy.Column"
] |
[((333, 376), 'alembic.op.drop_column', 'op.drop_column', (['"""schedules"""', '"""repeat_cycle"""'], {}), "('schedules', 'repeat_cycle')\n", (347, 376), False, 'from alembic import op\n'), ((485, 528), 'alembic.op.drop_column', 'op.drop_column', (['"""schedules"""', '"""repeat_cycle"""'], {}), "('schedules', 'repeat_cycle')\n", (499, 528), False, 'from alembic import op\n'), ((408, 460), 'sqlalchemy.Column', 'sa.Column', (['"""repeat_cycle"""', 'sa.Integer'], {'nullable': '(True)'}), "('repeat_cycle', sa.Integer, nullable=True)\n", (417, 460), True, 'import sqlalchemy as sa\n'), ((586, 599), 'sqlalchemy.String', 'sa.String', (['(10)'], {}), '(10)\n', (595, 599), True, 'import sqlalchemy as sa\n')]
|
from openprocurement.tender.openua.procedure.models.document import (
PostDocument as BasePostDocument,
PatchDocument as BasePatchDocument,
Document as BaseDocument,
)
from schematics.types import StringType
class PostDocument(BasePostDocument):
language = StringType(required=True, choices=["uk", "en", "ru"], default="uk")
class PatchDocument(BasePatchDocument):
pass
class Document(BaseDocument):
pass
|
[
"schematics.types.StringType"
] |
[((275, 342), 'schematics.types.StringType', 'StringType', ([], {'required': '(True)', 'choices': "['uk', 'en', 'ru']", 'default': '"""uk"""'}), "(required=True, choices=['uk', 'en', 'ru'], default='uk')\n", (285, 342), False, 'from schematics.types import StringType\n')]
|
import pygame
class Player(pygame.sprite.Sprite):
def __init__(self, *groups):
super().__init__(*groups)
self.image = pygame.image.load("img/baixo1.png")
self.image = pygame.transform.scale(self.image, [45, 45])
self.rect = pygame.Rect(540, 360, 45, 45)
self.speed = 5
def update(self, *args):
keys = pygame.key.get_pressed()
if keys[pygame.K_w]:
self.rect.y -= self.speed
elif keys[pygame.K_s]:
self.rect.y += self.speed
elif keys[pygame.K_a]:
self.rect.x -= self.speed
elif keys[pygame.K_d]:
self.rect.x += self.speed
if self.rect.top < 130:
self.rect.top = 130
elif self.rect.bottom > 680:
self.rect.bottom = 680
elif self.rect.left < 10:
self.rect.left = 10
elif self.rect.right > 1070:
self.rect.right = 1070
|
[
"pygame.image.load",
"pygame.transform.scale",
"pygame.Rect",
"pygame.key.get_pressed"
] |
[((146, 181), 'pygame.image.load', 'pygame.image.load', (['"""img/baixo1.png"""'], {}), "('img/baixo1.png')\n", (163, 181), False, 'import pygame\n'), ((204, 248), 'pygame.transform.scale', 'pygame.transform.scale', (['self.image', '[45, 45]'], {}), '(self.image, [45, 45])\n', (226, 248), False, 'import pygame\n'), ((270, 299), 'pygame.Rect', 'pygame.Rect', (['(540)', '(360)', '(45)', '(45)'], {}), '(540, 360, 45, 45)\n', (281, 299), False, 'import pygame\n'), ((374, 398), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (396, 398), False, 'import pygame\n')]
|
from pkg_resources import parse_version
def process_version(identifier, data):
# parse version to test against:
data["version_raw"] = data["version"]
try:
version = parse_version(data["version"])
except TypeError:
return
try:
parts = version.base_version.split(".")
parts += ["0"] * (4 - len(parts))
data["version_major"] = int(parts[0])
data["version_minor"] = int(parts[1])
data["version_bugfix"] = int(parts[2])
data["version_postfix"] = parts[3]
except ValueError:
return
def load_version(settings):
return process_version
|
[
"pkg_resources.parse_version"
] |
[((187, 217), 'pkg_resources.parse_version', 'parse_version', (["data['version']"], {}), "(data['version'])\n", (200, 217), False, 'from pkg_resources import parse_version\n')]
|
#IN THE NAME OF ALLAH
#Nike Name: Pcrlth0n
#(C) 2008
#a simple way to create and change your registry on windows
import win32api
def new_key():
reg1 = open('C:\\reg1.reg', 'w')
reg1.write("""REGEDIT4\n[HKEY_CURRENT_USER\\Example""")
reg1.close()
win32api.WinExec('reg import C:\\reg1.reg', 0)
def new_string_key():
reg2 = open('C:\\reg2.reg', 'w')
reg2.write("""REGEDIT4\n[HKEY_CURRENT_USER\\Example]\n"String Key"="C:\\\\\"""")
reg2.close()
win32api.WinExec('reg import C:\\reg2.reg', 0)
def new_dword_key():
reg3 = open('C:\\reg3.reg', 'w')
reg3.write("""REGEDIT4\n[HKEY_CURRENT_USER\\Example]\n"Dword key"=dword:00000000 """)
reg3.close()
win32api.WinExec('reg import C:\\reg3.reg', 0)
#new_key()
#new_string_key()
#new_dword_key()
|
[
"win32api.WinExec"
] |
[((264, 310), 'win32api.WinExec', 'win32api.WinExec', (['"""reg import C:\\\\reg1.reg"""', '(0)'], {}), "('reg import C:\\\\reg1.reg', 0)\n", (280, 310), False, 'import win32api\n'), ((476, 522), 'win32api.WinExec', 'win32api.WinExec', (['"""reg import C:\\\\reg2.reg"""', '(0)'], {}), "('reg import C:\\\\reg2.reg', 0)\n", (492, 522), False, 'import win32api\n'), ((692, 738), 'win32api.WinExec', 'win32api.WinExec', (['"""reg import C:\\\\reg3.reg"""', '(0)'], {}), "('reg import C:\\\\reg3.reg', 0)\n", (708, 738), False, 'import win32api\n')]
|
# Copyright (c) 2015 SONATA-NFV, UBIWHERE
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, UBIWHERE
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
import unittest
from son.package.package import Packager
from son.workspace.workspace import Workspace
from son.workspace.workspace import Project
class IntPDTester(unittest.TestCase):
__pfd__ = {
'version': '0.5',
'package': {
'version': '0.1',
'name': 'sonata-project-sample',
'vendor': 'com.sonata.project',
'maintainer': 'Name, Company, Contact',
'description': 'Project description',
},
'descriptor_extension': 'yml'
}
def __init__(self, *args, **kwargs):
super(IntPDTester, self).__init__(*args, **kwargs)
ws = Workspace("")
prj = Project(ws, '/')
self.pck = Packager(workspace=ws, project=prj, generate_pd=False)
def test_correct_gds(self):
""" Test the correct general description section """
gsd = self.pck.package_gds(IntPDTester.__pfd__)
self.assertNotEqual(gsd, False)
def test_incomplete_gds(self):
"""
Test the returning message when the provided
project has incomplete information.
"""
pfd = IntPDTester.__pfd__
pfd.pop('package')
gsd = self.pck.package_gds(pfd)
self.assertEqual(gsd, None)
|
[
"son.workspace.workspace.Project",
"son.package.package.Packager",
"son.workspace.workspace.Workspace"
] |
[((1775, 1788), 'son.workspace.workspace.Workspace', 'Workspace', (['""""""'], {}), "('')\n", (1784, 1788), False, 'from son.workspace.workspace import Workspace\n'), ((1803, 1819), 'son.workspace.workspace.Project', 'Project', (['ws', '"""/"""'], {}), "(ws, '/')\n", (1810, 1819), False, 'from son.workspace.workspace import Project\n'), ((1839, 1893), 'son.package.package.Packager', 'Packager', ([], {'workspace': 'ws', 'project': 'prj', 'generate_pd': '(False)'}), '(workspace=ws, project=prj, generate_pd=False)\n', (1847, 1893), False, 'from son.package.package import Packager\n')]
|
#!/usr/bin/env python3
import subprocess as sp
def system_update():
sp.call(["sudo", "apt-get","update"])
sp.call(["sudo", "apt", "upgrade", "-y"])
def install_apache_php():
sp.call(["sudo", "apt", "install", "-y", "apache2", "libapache2-mod-php", "php-cli", "php-mbstring", "php-sqlite3", "php-opcache", "php-json", "php-mysql", "php-pgsql", "php-ldap", "php-gd", "php-xml"])
sp.call(["sudo", "systemctl", "enable", "--now", "apache2.service"])
def install_maraidb():
sp.call(["sudo", "apt", "install", "-y", "mariadb-server", "mariadb-client"])
sp.call(["sudo", "systemctl", "enable", "--now", "mariadb.service"])
sp.call(["sudo", "mysql_secure_installation"])
def install_kb():
version = input("Enter the version: ")
print(version)
kbversion = "kanboard-"+version+"/data"
filename = "v"+version+".tar.gz"
url = "https://github.com/kanboard/kanboard/archive/"+filename
print(url)
sp.call(["wget", url ])
sp.call(["tar", "xzvf", filename, "-C", "/var/www/html/"])
sp.call(["sudo", "mv", "/var/www/html/kanboard-"+version, "/var/www/html/kanboard"])
sp.call(["chown", "-R", "www-data:www-data", "/var/www/html/kanboard/data"])
sp.call(["rm", filename])
sp.call(["mysql", "-u", "root" , "-p", "-e", "CREATE DATABASE kanboard"])
sp.call(["mysql", "-u", "root", "-p", "-e", "CREATE USER 'kanboarduser'@'localhost' IDENTIFIED BY 'rajeshwar';"])
sp.call(["mysql", "-u", "root", "-p", "-e", "GRANT ALL PRIVILEGES ON kanboard.* TO 'kanboarduser'@'localhost' IDENTIFIED BY '<PASSWORD>' WITH GRANT OPTION;"])
sp.call(["mysql", "-u", "root", "-p", "-e", "FLUSH PRIVILEGES;"])
sp.call(["sudo", "sed", "-i", "s/DB_DRIVER', 'sqlite'/DB_DRIVER', 'mysql'/g", "/var/www/html/kanboard/config.default.php"])
sp.call(["sudo", "sed", "-i", "s/DB_USERNAME', 'root'/DB_USERNAME', 'kanboarduser'/g", "/var/www/html/kanboard/config.default.php"])
sp.call(["sudo", "sed", "-i", "s/DB_PASSWORD', ''/DB_PASSWORD', '<PASSWORD>'/g", "/var/www/html/kanboard/config.default.php"])
def restart_apache():
sp.call(["sudo", "touch", "/etc/php/7.4/mods-available/php.ini"])
f=open('/etc/php/7.4/mods-available/php.ini', "w")
sp.call(["echo", "extension=php.so"],stdout=f)
sp.call(["sudo", "systemctl", "restart", "apache2.service"])
sp.call(["sudo", "systemctl", "restart", "mysqld.service"])
def update_admin_passwd():
sp.call(["python3", "./update-admin-passwd.py"])
if __name__ == '__main__':
system_update()
install_apache_php()
install_maraidb()
install_kb()
restart_apache()
update_admin_passwd()
|
[
"subprocess.call"
] |
[((75, 113), 'subprocess.call', 'sp.call', (["['sudo', 'apt-get', 'update']"], {}), "(['sudo', 'apt-get', 'update'])\n", (82, 113), True, 'import subprocess as sp\n'), ((116, 157), 'subprocess.call', 'sp.call', (["['sudo', 'apt', 'upgrade', '-y']"], {}), "(['sudo', 'apt', 'upgrade', '-y'])\n", (123, 157), True, 'import subprocess as sp\n'), ((190, 400), 'subprocess.call', 'sp.call', (["['sudo', 'apt', 'install', '-y', 'apache2', 'libapache2-mod-php', 'php-cli',\n 'php-mbstring', 'php-sqlite3', 'php-opcache', 'php-json', 'php-mysql',\n 'php-pgsql', 'php-ldap', 'php-gd', 'php-xml']"], {}), "(['sudo', 'apt', 'install', '-y', 'apache2', 'libapache2-mod-php',\n 'php-cli', 'php-mbstring', 'php-sqlite3', 'php-opcache', 'php-json',\n 'php-mysql', 'php-pgsql', 'php-ldap', 'php-gd', 'php-xml'])\n", (197, 400), True, 'import subprocess as sp\n'), ((397, 465), 'subprocess.call', 'sp.call', (["['sudo', 'systemctl', 'enable', '--now', 'apache2.service']"], {}), "(['sudo', 'systemctl', 'enable', '--now', 'apache2.service'])\n", (404, 465), True, 'import subprocess as sp\n'), ((496, 573), 'subprocess.call', 'sp.call', (["['sudo', 'apt', 'install', '-y', 'mariadb-server', 'mariadb-client']"], {}), "(['sudo', 'apt', 'install', '-y', 'mariadb-server', 'mariadb-client'])\n", (503, 573), True, 'import subprocess as sp\n'), ((578, 646), 'subprocess.call', 'sp.call', (["['sudo', 'systemctl', 'enable', '--now', 'mariadb.service']"], {}), "(['sudo', 'systemctl', 'enable', '--now', 'mariadb.service'])\n", (585, 646), True, 'import subprocess as sp\n'), ((651, 697), 'subprocess.call', 'sp.call', (["['sudo', 'mysql_secure_installation']"], {}), "(['sudo', 'mysql_secure_installation'])\n", (658, 697), True, 'import subprocess as sp\n'), ((947, 969), 'subprocess.call', 'sp.call', (["['wget', url]"], {}), "(['wget', url])\n", (954, 969), True, 'import subprocess as sp\n'), ((975, 1033), 'subprocess.call', 'sp.call', (["['tar', 'xzvf', filename, '-C', '/var/www/html/']"], {}), "(['tar', 'xzvf', filename, '-C', '/var/www/html/'])\n", (982, 1033), True, 'import subprocess as sp\n'), ((1038, 1128), 'subprocess.call', 'sp.call', (["['sudo', 'mv', '/var/www/html/kanboard-' + version, '/var/www/html/kanboard']"], {}), "(['sudo', 'mv', '/var/www/html/kanboard-' + version,\n '/var/www/html/kanboard'])\n", (1045, 1128), True, 'import subprocess as sp\n'), ((1127, 1203), 'subprocess.call', 'sp.call', (["['chown', '-R', 'www-data:www-data', '/var/www/html/kanboard/data']"], {}), "(['chown', '-R', 'www-data:www-data', '/var/www/html/kanboard/data'])\n", (1134, 1203), True, 'import subprocess as sp\n'), ((1208, 1233), 'subprocess.call', 'sp.call', (["['rm', filename]"], {}), "(['rm', filename])\n", (1215, 1233), True, 'import subprocess as sp\n'), ((1239, 1311), 'subprocess.call', 'sp.call', (["['mysql', '-u', 'root', '-p', '-e', 'CREATE DATABASE kanboard']"], {}), "(['mysql', '-u', 'root', '-p', '-e', 'CREATE DATABASE kanboard'])\n", (1246, 1311), True, 'import subprocess as sp\n'), ((1317, 1434), 'subprocess.call', 'sp.call', (['[\'mysql\', \'-u\', \'root\', \'-p\', \'-e\',\n "CREATE USER \'kanboarduser\'@\'localhost\' IDENTIFIED BY \'rajeshwar\';"]'], {}), '([\'mysql\', \'-u\', \'root\', \'-p\', \'-e\',\n "CREATE USER \'kanboarduser\'@\'localhost\' IDENTIFIED BY \'rajeshwar\';"])\n', (1324, 1434), True, 'import subprocess as sp\n'), ((1435, 1602), 'subprocess.call', 'sp.call', (['[\'mysql\', \'-u\', \'root\', \'-p\', \'-e\',\n "GRANT ALL PRIVILEGES ON kanboard.* TO \'kanboarduser\'@\'localhost\' IDENTIFIED BY \'<PASSWORD>\' WITH GRANT OPTION;"\n ]'], {}), '([\'mysql\', \'-u\', \'root\', \'-p\', \'-e\',\n "GRANT ALL PRIVILEGES ON kanboard.* TO \'kanboarduser\'@\'localhost\' IDENTIFIED BY \'<PASSWORD>\' WITH GRANT OPTION;"\n ])\n', (1442, 1602), True, 'import subprocess as sp\n'), ((1598, 1663), 'subprocess.call', 'sp.call', (["['mysql', '-u', 'root', '-p', '-e', 'FLUSH PRIVILEGES;']"], {}), "(['mysql', '-u', 'root', '-p', '-e', 'FLUSH PRIVILEGES;'])\n", (1605, 1663), True, 'import subprocess as sp\n'), ((1668, 1799), 'subprocess.call', 'sp.call', (['[\'sudo\', \'sed\', \'-i\', "s/DB_DRIVER\', \'sqlite\'/DB_DRIVER\', \'mysql\'/g",\n \'/var/www/html/kanboard/config.default.php\']'], {}), '([\'sudo\', \'sed\', \'-i\',\n "s/DB_DRIVER\', \'sqlite\'/DB_DRIVER\', \'mysql\'/g",\n \'/var/www/html/kanboard/config.default.php\'])\n', (1675, 1799), True, 'import subprocess as sp\n'), ((1796, 1936), 'subprocess.call', 'sp.call', (['[\'sudo\', \'sed\', \'-i\',\n "s/DB_USERNAME\', \'root\'/DB_USERNAME\', \'kanboarduser\'/g",\n \'/var/www/html/kanboard/config.default.php\']'], {}), '([\'sudo\', \'sed\', \'-i\',\n "s/DB_USERNAME\', \'root\'/DB_USERNAME\', \'kanboarduser\'/g",\n \'/var/www/html/kanboard/config.default.php\'])\n', (1803, 1936), True, 'import subprocess as sp\n'), ((1933, 2067), 'subprocess.call', 'sp.call', (['[\'sudo\', \'sed\', \'-i\', "s/DB_PASSWORD\', \'\'/DB_PASSWORD\', \'<PASSWORD>\'/g",\n \'/var/www/html/kanboard/config.default.php\']'], {}), '([\'sudo\', \'sed\', \'-i\',\n "s/DB_PASSWORD\', \'\'/DB_PASSWORD\', \'<PASSWORD>\'/g",\n \'/var/www/html/kanboard/config.default.php\'])\n', (1940, 2067), True, 'import subprocess as sp\n'), ((2089, 2154), 'subprocess.call', 'sp.call', (["['sudo', 'touch', '/etc/php/7.4/mods-available/php.ini']"], {}), "(['sudo', 'touch', '/etc/php/7.4/mods-available/php.ini'])\n", (2096, 2154), True, 'import subprocess as sp\n'), ((2214, 2261), 'subprocess.call', 'sp.call', (["['echo', 'extension=php.so']"], {'stdout': 'f'}), "(['echo', 'extension=php.so'], stdout=f)\n", (2221, 2261), True, 'import subprocess as sp\n'), ((2265, 2325), 'subprocess.call', 'sp.call', (["['sudo', 'systemctl', 'restart', 'apache2.service']"], {}), "(['sudo', 'systemctl', 'restart', 'apache2.service'])\n", (2272, 2325), True, 'import subprocess as sp\n'), ((2330, 2389), 'subprocess.call', 'sp.call', (["['sudo', 'systemctl', 'restart', 'mysqld.service']"], {}), "(['sudo', 'systemctl', 'restart', 'mysqld.service'])\n", (2337, 2389), True, 'import subprocess as sp\n'), ((2427, 2475), 'subprocess.call', 'sp.call', (["['python3', './update-admin-passwd.py']"], {}), "(['python3', './update-admin-passwd.py'])\n", (2434, 2475), True, 'import subprocess as sp\n')]
|
from sqlalchemy import select
from ..core import Base
thesaurusDictTraduction = {}
invertedThesaurusDict = {'en': {}, 'fr': {}}
userOAuthDict = {}
def loadThesaurusTrad(config):
session = config.registry.dbmaker()
thesTable = Base.metadata.tables['ERDThesaurusTerm']
query = select(thesTable.c)
results = session.execute(query).fetchall()
for row in results:
newTraduction = {
'en': row['nameEn'], 'fr': row['nameFr'], 'parentID': row['parentID']}
if thesaurusDictTraduction.get(row['fullPath'], None):
thesaurusDictTraduction[row['fullPath']].append(newTraduction)
else:
thesaurusDictTraduction[row['fullPath']] = [newTraduction]
invertedThesaurusDict['en'][row['nameEn']] = row['fullPath']
invertedThesaurusDict['fr'][row['nameFr']] = row['fullPath']
session.close()
|
[
"sqlalchemy.select"
] |
[((292, 311), 'sqlalchemy.select', 'select', (['thesTable.c'], {}), '(thesTable.c)\n', (298, 311), False, 'from sqlalchemy import select\n')]
|
# @Author ZhangGJ
# @Date 2021/01/13 21:59
from plotly import offline
from plotly.graph_objs import Bar, Layout
from die import Die
die = Die()
# 掷几次骰子并将结果存储在一个列表中
results = []
for roll_num in range(1000):
result = die.roll()
results.append(result)
# 分析结果
frequencies = []
for value in range(1, die.num_sides + 1):
frequency = results.count(value)
frequencies.append(frequency)
# 对结果进行可视化
x_values = list(range(1, die.num_sides + 1))
data = [Bar(x=x_values, y=frequencies)]
x_axis_config = {'title': '结果'}
y_axis_config = {'title': '结果的频率'}
my_layout = Layout(title='掷一个D6 1000次的结果', xaxis=x_axis_config, yaxis=y_axis_config)
offline.plot({'data': data, 'layout': my_layout}, filename='d6.html')
|
[
"plotly.graph_objs.Layout",
"plotly.graph_objs.Bar",
"die.Die",
"plotly.offline.plot"
] |
[((139, 144), 'die.Die', 'Die', ([], {}), '()\n', (142, 144), False, 'from die import Die\n'), ((574, 646), 'plotly.graph_objs.Layout', 'Layout', ([], {'title': '"""掷一个D6 1000次的结果"""', 'xaxis': 'x_axis_config', 'yaxis': 'y_axis_config'}), "(title='掷一个D6 1000次的结果', xaxis=x_axis_config, yaxis=y_axis_config)\n", (580, 646), False, 'from plotly.graph_objs import Bar, Layout\n'), ((647, 716), 'plotly.offline.plot', 'offline.plot', (["{'data': data, 'layout': my_layout}"], {'filename': '"""d6.html"""'}), "({'data': data, 'layout': my_layout}, filename='d6.html')\n", (659, 716), False, 'from plotly import offline\n'), ((462, 492), 'plotly.graph_objs.Bar', 'Bar', ([], {'x': 'x_values', 'y': 'frequencies'}), '(x=x_values, y=frequencies)\n', (465, 492), False, 'from plotly.graph_objs import Bar, Layout\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys
import logging
from opencorpora import reader
from opencorpora.reader import CorpusReader
from russian_tagsets import converters
from pymorphy2.opencorpora_dict.parse import parse_opencorpora_xml
class UDConverter(object):
"""
Tries to convert the data provided by CorpusReader
to Universal Dependencies 1.4 (CoNLL-U) format.
OpenCorpora currently has no syntax markup so
respective fields remain empty
Processes and returns one sentence at a time
"""
def __init__(self, reader, path_to_dict, docids=None, categories=None):
assert isinstance(reader, CorpusReader)
self.docs = reader.iter_documents(docids, categories)
self.converter = converters.converter('opencorpora-int', 'ud14')
# prepare data to normalize verbal forms to INFN
self.lemma_rewrite = {}
dictionary = parse_opencorpora_xml(path_to_dict)
for from_id, to_id, type_id in dictionary.links:
if int(type_id) in (3, 5): # INFN -> VERB, GRND
self.lemma_rewrite[to_id] = dictionary.lexemes[from_id][0][0]
def sentences(self):
for doc in self.docs:
for sent in doc.iter_parsed_sents():
yield self._convert_sentence(sent)
def _convert_token(self, token, token_no):
if len(token[1]) > 1:
raise Exception("Ambiguous parses cannot be converted to UD: {}".format(token[1]))
lemma_id = token[1][0][2]
lemma = self.lemma_rewrite.get(lemma_id, token[1][0][0])
pos, grams = self.converter(token[1][0][1], lemma).split()
return '\t'.join((
str(token_no),
token[0],
lemma.upper(),
pos,
'_', # here should be XPOSTAG (lang-specific POS)
grams,
'\t'.join(['_'] * 4) # here should be syntax and misc
))
def _convert_sentence(self, sent):
return '\n'.join(self._convert_token(token, i+1) for i, token in enumerate(sent))
if __name__ == "__main__":
reader = CorpusReader(sys.argv[1])
conv = UDConverter(reader, sys.argv[2])
for sent_str in conv.sentences():
print(sent_str.encode('utf-8') + '\n')
|
[
"opencorpora.reader.CorpusReader",
"opencorpora.reader.iter_documents",
"pymorphy2.opencorpora_dict.parse.parse_opencorpora_xml",
"russian_tagsets.converters.converter"
] |
[((2107, 2132), 'opencorpora.reader.CorpusReader', 'CorpusReader', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (2119, 2132), False, 'from opencorpora.reader import CorpusReader\n'), ((702, 743), 'opencorpora.reader.iter_documents', 'reader.iter_documents', (['docids', 'categories'], {}), '(docids, categories)\n', (723, 743), False, 'from opencorpora import reader\n'), ((769, 816), 'russian_tagsets.converters.converter', 'converters.converter', (['"""opencorpora-int"""', '"""ud14"""'], {}), "('opencorpora-int', 'ud14')\n", (789, 816), False, 'from russian_tagsets import converters\n'), ((928, 963), 'pymorphy2.opencorpora_dict.parse.parse_opencorpora_xml', 'parse_opencorpora_xml', (['path_to_dict'], {}), '(path_to_dict)\n', (949, 963), False, 'from pymorphy2.opencorpora_dict.parse import parse_opencorpora_xml\n')]
|
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
# Open the image
img = np.array(Image.open('house.jpg')).astype(np.uint8)
# Apply gray scale
gray_img = np.round(0.299 * img[:, :, 0] +
0.587 * img[:, :, 1] +
0.114 * img[:, :, 2]).astype(np.uint8)
# Prewitt Operator
h, w = gray_img.shape
# define filters
horizontal = np.array([[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]]) # s2
vertical = np.array([[-1, -1, -1], [0, 0, 0], [1, 1, 1]]) # s1
# define images with 0s
newgradientImage = np.zeros((h, w))
# offset by 1
for i in range(1, h - 1):
for j in range(1, w - 1):
horizontalGrad = (horizontal[0, 0] * gray_img[i - 1, j - 1]) + \
(horizontal[0, 1] * gray_img[i - 1, j]) + \
(horizontal[0, 2] * gray_img[i - 1, j + 1]) + \
(horizontal[1, 0] * gray_img[i, j - 1]) + \
(horizontal[1, 1] * gray_img[i, j]) + \
(horizontal[1, 2] * gray_img[i, j + 1]) + \
(horizontal[2, 0] * gray_img[i + 1, j - 1]) + \
(horizontal[2, 1] * gray_img[i + 1, j]) + \
(horizontal[2, 2] * gray_img[i + 1, j + 1])
verticalGrad = (vertical[0, 0] * gray_img[i - 1, j - 1]) + \
(vertical[0, 1] * gray_img[i - 1, j]) + \
(vertical[0, 2] * gray_img[i - 1, j + 1]) + \
(vertical[1, 0] * gray_img[i, j - 1]) + \
(vertical[1, 1] * gray_img[i, j]) + \
(vertical[1, 2] * gray_img[i, j + 1]) + \
(vertical[2, 0] * gray_img[i + 1, j - 1]) + \
(vertical[2, 1] * gray_img[i + 1, j]) + \
(vertical[2, 2] * gray_img[i + 1, j + 1])
# Edge Magnitude
mag = np.sqrt(pow(horizontalGrad, 2.0) + pow(verticalGrad, 2.0))
newgradientImage[i - 1, j - 1] = mag
plt.figure()
plt.title('Prewitt_House')
plt.imsave('prewitt_house.jpg', newgradientImage, cmap='gray')
plt.imshow(newgradientImage, cmap='gray')
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.imshow",
"numpy.zeros",
"PIL.Image.open",
"matplotlib.pyplot.figure",
"numpy.array",
"matplotlib.pyplot.imsave",
"numpy.round"
] |
[((400, 446), 'numpy.array', 'np.array', (['[[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]]'], {}), '([[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]])\n', (408, 446), True, 'import numpy as np\n'), ((465, 511), 'numpy.array', 'np.array', (['[[-1, -1, -1], [0, 0, 0], [1, 1, 1]]'], {}), '([[-1, -1, -1], [0, 0, 0], [1, 1, 1]])\n', (473, 511), True, 'import numpy as np\n'), ((565, 581), 'numpy.zeros', 'np.zeros', (['(h, w)'], {}), '((h, w))\n', (573, 581), True, 'import numpy as np\n'), ((2052, 2064), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2062, 2064), True, 'import matplotlib.pyplot as plt\n'), ((2066, 2092), 'matplotlib.pyplot.title', 'plt.title', (['"""Prewitt_House"""'], {}), "('Prewitt_House')\n", (2075, 2092), True, 'import matplotlib.pyplot as plt\n'), ((2094, 2156), 'matplotlib.pyplot.imsave', 'plt.imsave', (['"""prewitt_house.jpg"""', 'newgradientImage'], {'cmap': '"""gray"""'}), "('prewitt_house.jpg', newgradientImage, cmap='gray')\n", (2104, 2156), True, 'import matplotlib.pyplot as plt\n'), ((2158, 2199), 'matplotlib.pyplot.imshow', 'plt.imshow', (['newgradientImage'], {'cmap': '"""gray"""'}), "(newgradientImage, cmap='gray')\n", (2168, 2199), True, 'import matplotlib.pyplot as plt\n'), ((2201, 2211), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2209, 2211), True, 'import matplotlib.pyplot as plt\n'), ((187, 263), 'numpy.round', 'np.round', (['(0.299 * img[:, :, 0] + 0.587 * img[:, :, 1] + 0.114 * img[:, :, 2])'], {}), '(0.299 * img[:, :, 0] + 0.587 * img[:, :, 1] + 0.114 * img[:, :, 2])\n', (195, 263), True, 'import numpy as np\n'), ((111, 134), 'PIL.Image.open', 'Image.open', (['"""house.jpg"""'], {}), "('house.jpg')\n", (121, 134), False, 'from PIL import Image\n')]
|
##
# File: ChemCompSearchWrapper.py
# Author: jdw
# Date: 9-Mar-2020
# Version: 0.001
#
# Updates:
#
##
"""
Wrapper for chemical component search operations.
"""
__docformat__ = "restructuredtext en"
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__license__ = "Apache 2.0"
import copy
import logging
import platform
import resource
import os
import time
from collections import namedtuple
from rcsb.utils.chem.ChemCompIndexProvider import ChemCompIndexProvider
from rcsb.utils.chem.ChemCompSearchIndexProvider import ChemCompSearchIndexProvider
from rcsb.utils.chem.MolecularFormula import MolecularFormula
from rcsb.utils.chem.OeSearchMoleculeProvider import OeSearchMoleculeProvider
from rcsb.utils.chem.OeIoUtils import OeIoUtils
from rcsb.utils.chem.OeSearchUtils import OeSearchUtils
from rcsb.utils.chem.OeSubStructSearchUtils import OeSubStructSearchUtils
from rcsb.utils.io.FileUtil import FileUtil
from rcsb.utils.io.MarshalUtil import MarshalUtil
from rcsb.utils.io.SftpUtil import SftpUtil
from rcsb.utils.io.SingletonClass import SingletonClass
HERE = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.dirname(os.path.dirname(os.path.dirname(HERE)))
logger = logging.getLogger(__name__)
MatchResults = namedtuple("MatchResults", "ccId oeMol searchType matchOpts screenType fpType fpScore oeIdx formula", defaults=(None,) * 9)
class ChemCompSearchWrapper(SingletonClass):
"""Wrapper for chemical component search operations."""
def __init__(self, **kwargs):
"""Wrapper class for chemical search/depiction operations.
Path and prefix data for wrapper class may be set as keyword arguments
as environmental variables.
Args:
cachePath (str): path to top-level cache directory used to store search index file dependencies
(default environment variable CHEM_SEARCH_CACHE_PATH or ".")
ccFileNamePrefix (str): prefix code used to distinguish different subsets of chemical definitions
(default environment variable CHEM_SEARCH_CC_PREFIX or "cc-full")
"""
self.__startTime = time.time()
#
self.__cachePath = kwargs.get("cachePath", os.environ.get("CHEM_SEARCH_CACHE_PATH", "."))
self.__ccFileNamePrefix = kwargs.get("ccFileNamePrefix", os.environ.get("CHEM_SEARCH_CC_PREFIX", "cc-full"))
#
self.__dependFileName = "ChemCompSearchWrapperData.tar.gz"
self.__dependTarFilePath = os.path.join(self.__cachePath, self.__dependFileName)
# ---
self.__mU = MarshalUtil(workPath=self.__cachePath)
# ---
self.__configD = {}
self.__ccIdxP = None
self.__siIdxP = None
self.__siIdx = {}
self.__oesmP = None
self.__oesU = None
self.__oesubsU = None
# ---
self.__statusDescriptorError = -100
self.__searchError = -200
self.__searchSuccess = 0
def setConfig(self, ccUrlTarget, birdUrlTarget, **kwargs):
"""Provide the chemical definition source path details for rebuilding search
index file dependencies.
Args:
ccUrlTarget (str): path to concatenated chemical component definition file
birdUrlTarget (str): path to the concatenated BIRD definition file
Other options are propagated to configurations of the wrapped classes in __bootstrapConfig()
"""
kwargs["ccUrlTarget"] = ccUrlTarget
kwargs["birdUrlTarget"] = birdUrlTarget
kwargs["cachePath"] = self.__cachePath
kwargs["ccFileNamePrefix"] = self.__ccFileNamePrefix
self.__configD = self.__bootstrapConfig(**kwargs)
return len(self.__configD) >= 3
def __bootstrapConfig(self, **kwargs):
"""Build on-the-fly default configuration for this wrapper class."""
# The following few options have no defaults -- and should be specified.
ccUrlTarget = kwargs.get("ccUrlTarget", None)
birdUrlTarget = kwargs.get("birdUrlTarget", None)
cachePath = kwargs.get("cachePath", None)
ccFileNamePrefix = kwargs.get("ccFileNamePrefix", None)
logger.info("Bootstrap configuration for prefix %r cc %r bird %r", ccFileNamePrefix, ccUrlTarget, birdUrlTarget)
# ---
# Reasonable values are selected for the remaining options...
oeFileNamePrefix = "oe-" + ccFileNamePrefix
try:
storeConfig = kwargs.get("storeConfig", True)
molLimit = kwargs.get("molLimit", None)
useCache = kwargs.get("useCache", False)
logSizes = kwargs.get("logSizes", False)
#
numProc = kwargs.get("numProc", 12)
maxProc = os.cpu_count()
numProc = min(numProc, maxProc)
maxChunkSize = kwargs.get("maxChunkSize", 50)
#
logger.debug("+++ >>> Assigning numProc as %d", numProc)
#
limitPerceptions = kwargs.get("limitPerceptions", False)
quietFlag = kwargs.get("quietFlag", True)
#
# fpTypeCuttoffD = {"TREE": 0.6, "MACCS": 0.9, "PATH": 0.6, "CIRCULAR": 0.6, "LINGO": 0.9}
fpTypeCuttoffD = kwargs.get("fpTypeCuttoffD", {"TREE": 0.6, "MACCS": 0.9})
buildTypeList = kwargs.get("buildTypeList", ["oe-iso-smiles", "oe-smiles", "cactvs-iso-smiles", "cactvs-smiles", "inchi"])
#
oesmpKwargs = {
"ccUrlTarget": ccUrlTarget,
"birdUrlTarget": birdUrlTarget,
"cachePath": cachePath,
"useCache": useCache,
"ccFileNamePrefix": ccFileNamePrefix,
"oeFileNamePrefix": oeFileNamePrefix,
"limitPerceptions": limitPerceptions,
"minCount": None,
"maxFpResults": 50,
"fpTypeCuttoffD": fpTypeCuttoffD,
"buildTypeList": buildTypeList,
"screenTypeList": None,
"quietFlag": quietFlag,
"numProc": numProc,
"maxChunkSize": maxChunkSize,
"molLimit": molLimit,
"logSizes": logSizes,
"suppressHydrogens": True,
}
ccsiKwargs = {
"ccUrlTarget": ccUrlTarget,
"birdUrlTarget": birdUrlTarget,
"cachePath": cachePath,
"useCache": useCache,
"ccFileNamePrefix": ccFileNamePrefix,
"oeFileNamePrefix": oeFileNamePrefix,
"limitPerceptions": limitPerceptions,
"minCount": None,
"numProc": numProc,
"quietFlag": quietFlag,
"maxChunkSize": maxChunkSize,
"molLimit": None,
"logSizes": False,
}
configD = {"versionNumber": 0.30, "ccsiKwargs": ccsiKwargs, "oesmpKwargs": oesmpKwargs}
#
if storeConfig:
configDirPath = os.path.join(cachePath, "config")
configFilePath = os.path.join(configDirPath, ccFileNamePrefix + "-config.json")
logger.info("Saving configuration bootstrap in %r", configFilePath)
self.__mU.mkdir(configDirPath)
self.__mU.doExport(configFilePath, configD, fmt="json", indent=3)
except Exception as e:
logger.exception("Failing with %s", str(e))
return configD
def readConfig(self, resetCachePath=True):
"""Read a prepared configuration file for the search wrapper class. This will override
any default configuration settings.
Args:
resetCachPath (bool): update cachePath configuration option with the current cachePath setting.
Returns:
bool : True for success or False otherwise
"""
#
#
ok = False
try:
#
configFilePath = os.path.join(self.__cachePath, "config", self.__ccFileNamePrefix + "-config.json")
configD = self.__mU.doImport(configFilePath, fmt="json")
logger.debug("ConfigD: %r", configD)
if configD and (len(configD) > 2) and float(configD["versionNumber"]) > 0.2:
logger.info("Read version %r sections %r from %s", configD["versionNumber"], list(configD.keys()), configFilePath)
ok = True
self.__configD = configD
if resetCachePath:
# Allow the configuration to be relocatable.
configD["ccsiKwargs"]["cachePath"] = self.__cachePath
configD["oesmpKwargs"]["cachePath"] = self.__cachePath
else:
logger.error("Reading config file fails from %r", configFilePath)
except Exception as e:
logger.exception("Failing with %s", str(e))
ok = False
return ok
def buildDependenices(self, ccUrlTarget, birdUrlTarget, **kwargs):
"""Convenience method to build configuration and static dependencies for the chemical search services.
Args:
ccUrlTarget (str): path to source concatenated chemical component definition file
birdUrlTarget (str): path to the source concatenated BIRD definition file
Other options are propagated to configurations of the wrapped classes in __bootstrapConfig()
"""
try:
okT = False
ok1 = self.setConfig(ccUrlTarget=ccUrlTarget, birdUrlTarget=birdUrlTarget, **kwargs)
useCache = kwargs.get("useCache", False)
ok2 = self.updateChemCompIndex(useCache=useCache)
ok3 = self.updateSearchIndex(useCache=useCache)
ok4 = self.updateSearchMoleculeProvider(useCache=useCache)
okBuild = ok1 and ok2 and ok3 and ok4
if okBuild:
fileU = FileUtil()
dirPathList = [os.path.join(self.__cachePath, subDir) for subDir in ["chem_comp", "oe_mol", "config"]]
okT = fileU.bundleTarfile(self.__dependTarFilePath, dirPathList, mode="w:gz", recursive=True)
#
return okT and okBuild
except Exception as e:
logger.exception("Failing build with %r and %r with %s", ccUrlTarget, birdUrlTarget, str(e))
return False
def stashDependencies(self, url, dirPath, bundleLabel="A", userName=None, pw=None):
"""Store a copy of the bundled search dependencies remotely -
Args:
url (str): URL string for the destination host (e.g. sftp://myserver.net or None for a local file)
dirPath (str): directory path on the remote resource
bundleLabel (str, optional): optional label preppended to the stashed dependency bundle artifact (default='A')
userName (str, optional): optional access information. Defaults to None.
password (str, optional): optional access information. Defaults to None.
Returns:
bool: True for success or False otherwise
"""
try:
ok = False
fn = self.__makeBundleFileName(self.__dependFileName, bundleLabel=bundleLabel)
if url and url.startswith("sftp://"):
sftpU = SftpUtil()
hostName = url[7:]
ok = sftpU.connect(hostName, userName, pw=pw, port=22)
if ok:
remotePath = os.path.join("/", dirPath, fn)
ok = sftpU.put(self.__dependTarFilePath, remotePath)
elif not url:
fileU = FileUtil()
remotePath = os.path.join(dirPath, fn)
ok = fileU.put(self.__dependTarFilePath, remotePath)
else:
logger.error("Unsupported stash protocol %r", url)
return ok
except Exception as e:
logger.exception("For %r %r failing with %s", url, dirPath, str(e))
return False
def __makeBundleFileName(self, rootName, bundleLabel="A"):
fn = rootName
try:
fn = rootName
fn = "%s-%s" % (bundleLabel.upper(), rootName) if bundleLabel else rootName
except Exception as e:
logger.exception("Failing with %s", str(e))
return fn
def restoreDependencies(self, url, dirPath, bundleLabel="A", userName=None, pw=None):
"""Restore bundled dependencies from remote storage and unbundle these in the
current local cache directory.
Args:
url (str): remote URL
dirPath (str): remote directory path on the
bundleLabel (str, optional): optional label preppended to the stashed dependency bundle artifact (default='A')
userName (str, optional): optional access information. Defaults to None.
password (str, optional): optional access information. Defaults to None.
"""
try:
ok = False
fileU = FileUtil()
fn = self.__makeBundleFileName(self.__dependFileName, bundleLabel=bundleLabel)
if not url:
remotePath = os.path.join(dirPath, fn)
ok = fileU.get(remotePath, self.__dependTarFilePath)
elif url and url.startswith("http://"):
remotePath = url + os.path.join("/", dirPath, fn)
ok = fileU.get(remotePath, self.__dependTarFilePath)
elif url and url.startswith("sftp://"):
sftpU = SftpUtil()
ok = sftpU.connect(url[7:], userName, pw=pw, port=22)
if ok:
remotePath = os.path.join(dirPath, fn)
ok = sftpU.get(remotePath, self.__dependTarFilePath)
else:
logger.error("Unsupported protocol %r", url)
if ok:
ok = fileU.unbundleTarfile(self.__dependTarFilePath, dirPath=self.__cachePath)
return ok
except Exception as e:
logger.exception("For %r %r Failing with %s", url, dirPath, str(e))
ok = False
return ok
def updateChemCompIndex(self, useCache=False):
"""Rebuild the basic index of source chemical component and BIRD definitions.
Update the internal state of this index in the current object instance.
Resource requirements: 94 sec 1 proc 7GB memory macbook pro
Args:
useCache (bool): False to rebuild search index and True to reload
Returns:
bool: True for success or false otherwise
"""
ok = False
try:
kwargs = copy.deepcopy(self.__configD["ccsiKwargs"]) if "ccsiKwargs" in self.__configD else None
if kwargs:
kwargs["useCache"] = useCache
ccIdxP = ChemCompIndexProvider(**kwargs)
ok = ccIdxP.testCache()
self.__ccIdxP = ccIdxP if ok else None
logger.info("Chemical component index status %r", ok)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok
def getChemCompIndex(self):
return self.__ccIdxP.getIndex() if self.__ccIdxP else {}
def getSearchMoleculeProvider(self):
return self.__oesmP if self.__oesmP else None
def updateSearchIndex(self, useCache=False):
"""Rebuild the search index from source chemical component and BIRD definitions.
Update the internal state of this index in the current object instance.
Resource requirements 771 secs 6 proc macbook pro 7GB memory.
Args:
useCache (bool): False to rebuild search index and True to reload
Returns:
bool: True for success or false otherwise
"""
ok = False
try:
kwargs = copy.deepcopy(self.__configD["ccsiKwargs"]) if "ccsiKwargs" in self.__configD else None
if kwargs:
kwargs["useCache"] = useCache
siIdxP = ChemCompSearchIndexProvider(**kwargs)
ok = siIdxP.testCache()
self.__siIdxP = siIdxP if siIdxP else None
self.__siIdx = siIdxP.getIndex() if siIdxP and ok else {}
logger.info("Search index status %r index len %d", ok, len(self.__siIdx) if self.__siIdx else 0)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok
def updateSearchMoleculeProvider(self, useCache=False):
"""Rebuild the search molecule provider.
Update the internal state of this object reference in the current object instance.
Resource requirements: 151 seconds 1 proc 0.5GB memory macbook pro
Args:
useCache (bool): False to rebuild molecule store and True to reload
Returns:
bool: True for success or false otherwise
"""
ok = False
try:
kwargs = copy.deepcopy(self.__configD["oesmpKwargs"]) if "oesmpKwargs" in self.__configD else None
if kwargs:
kwargs["useCache"] = useCache
oesmP = OeSearchMoleculeProvider(**kwargs)
ok = oesmP.testCache()
self.__oesmP = oesmP if oesmP and ok else None
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok
def reloadSearchDatabase(self):
"""Reload the in-memory search databases from the OE molecule provider.
Resource requirements: ~90sec load time 0.35 GB memory
Returns:
bool: True for success or False otherwise
"""
ok = False
try:
okmp = self.updateSearchMoleculeProvider(useCache=True)
if not okmp:
return ok
fpTypeCuttoffD = self.__configD["oesmpKwargs"]["fpTypeCuttoffD"] if "fpTypeCuttoffD" in self.__configD["oesmpKwargs"] else {}
fpTypeList = [k for k, v in fpTypeCuttoffD.items()]
oesU = OeSearchUtils(self.__oesmP, fpTypeList=fpTypeList)
ok1 = oesU.testCache()
self.__oesU = oesU if ok1 else None
#
oesubsU = OeSubStructSearchUtils(self.__oesmP)
ok2 = oesubsU.testCache()
self.__oesubsU = oesubsU if ok2 else None
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok1 and ok2
def searchByDescriptor(self, descriptor, descriptorType, matchOpts="graph-relaxed", searchId=None):
"""Wrapper method for descriptor match and descriptor substructure search methods.
Args:
descriptor (str): molecular descriptor (SMILES, InChI)
descriptorType (str): descriptor type (SMILES, InChI
matchOpts (str, optional): graph match criteria (graph-relaxed, graph-relaxed-stereo, graph-strict,
fingerprint-similarity, sub-struct-graph-relaxed, sub-struct-graph-relaxed-stereo,
sub-struct-graph-strict Defaults to "graph-relaxed")
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list, list): status, graph match and finger match lists of type (MatchResults)
-100 descriptor processing error
-200 search execution error
0 search execution success
"""
if matchOpts.startswith("sub-struct-"):
return self.subStructSearchByDescriptor(descriptor, descriptorType, matchOpts=matchOpts, searchId=searchId)
else:
return self.matchByDescriptor(descriptor, descriptorType, matchOpts=matchOpts, searchId=searchId)
def matchByDescriptor(self, descriptor, descriptorType, matchOpts="graph-relaxed", searchId=None):
"""Return graph match (w/ finger print pre-filtering) and finger print search results for the
input desriptor.
Args:
descriptor (str): molecular descriptor (SMILES, InChI)
descriptorType (str): descriptor type (SMILES, InChI
matchOpts (str, optional): graph match criteria (graph-relaxed, graph-relaxed-stereo, graph-strict,
fingerprint-similarity, Defaults to "graph-relaxed")
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list, list): status, graph match and finger match lists of type (MatchResults)
-100 descriptor processing error
-200 search execution error
0 search execution success
"""
ssL = fpL = []
retStatus = False
statusCode = -200
try:
fpTypeCuttoffD = self.__configD["oesmpKwargs"]["fpTypeCuttoffD"] if "fpTypeCuttoffD" in self.__configD["oesmpKwargs"] else {}
maxFpResults = self.__configD["oesmpKwargs"]["maxFpResults"] if "maxFpResults" in self.__configD["oesmpKwargs"] else 50
limitPerceptions = self.__configD["oesmpKwargs"]["limitPerceptions"] if "limitPerceptions" in self.__configD["oesmpKwargs"] else False
#
searchId = searchId if searchId else "query"
messageTag = searchId + ":" + descriptorType
oeioU = OeIoUtils()
oeMol = oeioU.descriptorToMol(descriptor, descriptorType, limitPerceptions=limitPerceptions, messageTag=messageTag)
oeMol = oeioU.suppressHydrogens(oeMol)
if not oeMol:
logger.warning("descriptor type %r molecule build fails: %r", descriptorType, descriptor)
return self.__statusDescriptorError, ssL, fpL
#
retStatus, ssL, fpL = self.__oesU.searchSubStructureAndFingerPrint(oeMol, list(fpTypeCuttoffD.items())[:2], maxFpResults, matchOpts=matchOpts)
statusCode = 0 if retStatus else self.__searchError
except Exception as e:
logger.exception("Failing with %s", str(e))
#
return statusCode, ssL, fpL
def subStructSearchByDescriptor(self, descriptor, descriptorType, matchOpts="sub-struct-graph-relaxed", searchId=None):
"""Return graph match (w/ finger print pre-filtering) and finger print search results for the
input desriptor.
Args:
descriptor (str): molecular descriptor (SMILES, InChI)
descriptorType (str): descriptor type (SMILES, InChI)
matchOpts (str, optional): graph match criteria (sub-struct-graph-relaxed, sub-struct-graph-relaxed-stereo,
sub-struct-graph-strict). Defaults to "sub-struct-graph-relaxed".
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list, list): status, substructure search results of type (MatchResults), empty list placeholder
-100 descriptor processing error
-200 search execution error
0 search execution success
"""
ssL = []
retStatus = False
statusCode = -200
try:
limitPerceptions = self.__configD["oesmpKwargs"]["limitPerceptions"] if "limitPerceptions" in self.__configD["oesmpKwargs"] else False
numProc = self.__configD["oesmpKwargs"]["numProc"] if "numProc" in self.__configD["oesmpKwargs"] else 4
#
searchId = searchId if searchId else "query"
messageTag = searchId + ":" + descriptorType
oeioU = OeIoUtils()
oeMol = oeioU.descriptorToMol(descriptor, descriptorType, limitPerceptions=limitPerceptions, messageTag=messageTag)
oeMol = oeioU.suppressHydrogens(oeMol)
if not oeMol:
logger.warning("descriptor type %r molecule build fails: %r", descriptorType, descriptor)
return self.__statusDescriptorError, ssL, []
#
ccIdL = self.__oesubsU.prefilterIndex(oeMol, self.__siIdxP, matchOpts=matchOpts)
retStatus, ssL = self.__oesubsU.searchSubStructure(oeMol, ccIdList=ccIdL, matchOpts=matchOpts, numProc=numProc)
statusCode = 0 if retStatus else self.__searchError
except Exception as e:
logger.exception("Failing with %s", str(e))
#
return statusCode, ssL, []
def matchByFormulaRange(self, elementRangeD, matchSubset=False, searchId=None):
"""Return formula match results for input element range dictionary.
Args:
elementRangeD (dict): {'<element_name>: {'min': <int>, 'max': <int>}, ... }
matchSubset (bool, optional): query for formula subset (default: False)
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list): status, list of chemical component identifiers
"""
ok = False
rL = []
try:
startTime = time.time()
searchId = searchId if searchId else "query"
rL = self.__ccIdxP.matchMolecularFormulaRange(elementRangeD, matchSubset=matchSubset)
ok = True
logger.info("%s formula %r matched %d (%.4f seconds)", searchId, elementRangeD, len(rL), time.time() - startTime)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok, rL
def matchByFormula(self, formula, matchSubset=False, searchId=None):
"""Return formula match results for input molecular formula.
Args:
formula (str): molecular formula (ex. 'C6H6')
matchSubset (bool, optional): query for formula subset (default: False)
searchId (str, optional): search identifier for logging. Defaults to None.
Returns:
(statusCode, list): status, list of chemical component identifiers
"""
ok = False
rL = []
try:
startTime = time.time()
searchId = searchId if searchId else "query"
mf = MolecularFormula()
eD = mf.parseFormula(formula)
elementRangeD = {k.upper(): {"min": v, "max": v} for k, v in eD.items()}
rL = self.__ccIdxP.matchMolecularFormulaRange(elementRangeD, matchSubset=matchSubset)
ok = True
logger.info("%s formula %r matched %d (%.4f seconds)", searchId, elementRangeD, len(rL), time.time() - startTime)
except Exception as e:
logger.exception("Failing with %s", str(e))
return ok, rL
def status(self):
unitS = "MB" if platform.system() == "Darwin" else "GB"
rusageMax = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
logger.info("Maximum resident memory size %.4f %s", rusageMax / 10 ** 6, unitS)
endTime = time.time()
logger.info("Status at %s (up %.4f seconds)", time.strftime("%Y %m %d %H:%M:%S", time.localtime()), endTime - self.__startTime)
|
[
"resource.getrusage",
"rcsb.utils.io.SftpUtil.SftpUtil",
"os.path.join",
"rcsb.utils.chem.ChemCompIndexProvider.ChemCompIndexProvider",
"os.path.dirname",
"time.localtime",
"copy.deepcopy",
"rcsb.utils.chem.OeSearchUtils.OeSearchUtils",
"rcsb.utils.chem.OeSearchMoleculeProvider.OeSearchMoleculeProvider",
"rcsb.utils.chem.OeIoUtils.OeIoUtils",
"rcsb.utils.io.MarshalUtil.MarshalUtil",
"rcsb.utils.chem.MolecularFormula.MolecularFormula",
"platform.system",
"rcsb.utils.chem.ChemCompSearchIndexProvider.ChemCompSearchIndexProvider",
"rcsb.utils.chem.OeSubStructSearchUtils.OeSubStructSearchUtils",
"time.time",
"os.environ.get",
"os.cpu_count",
"rcsb.utils.io.FileUtil.FileUtil",
"collections.namedtuple",
"logging.getLogger"
] |
[((1192, 1219), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1209, 1219), False, 'import logging\n'), ((1236, 1367), 'collections.namedtuple', 'namedtuple', (['"""MatchResults"""', '"""ccId oeMol searchType matchOpts screenType fpType fpScore oeIdx formula"""'], {'defaults': '((None,) * 9)'}), "('MatchResults',\n 'ccId oeMol searchType matchOpts screenType fpType fpScore oeIdx formula',\n defaults=(None,) * 9)\n", (1246, 1367), False, 'from collections import namedtuple\n'), ((1090, 1115), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1105, 1115), False, 'import os\n'), ((1158, 1179), 'os.path.dirname', 'os.path.dirname', (['HERE'], {}), '(HERE)\n', (1173, 1179), False, 'import os\n'), ((2150, 2161), 'time.time', 'time.time', ([], {}), '()\n', (2159, 2161), False, 'import time\n'), ((2499, 2552), 'os.path.join', 'os.path.join', (['self.__cachePath', 'self.__dependFileName'], {}), '(self.__cachePath, self.__dependFileName)\n', (2511, 2552), False, 'import os\n'), ((2587, 2625), 'rcsb.utils.io.MarshalUtil.MarshalUtil', 'MarshalUtil', ([], {'workPath': 'self.__cachePath'}), '(workPath=self.__cachePath)\n', (2598, 2625), False, 'from rcsb.utils.io.MarshalUtil import MarshalUtil\n'), ((27093, 27104), 'time.time', 'time.time', ([], {}), '()\n', (27102, 27104), False, 'import time\n'), ((2223, 2268), 'os.environ.get', 'os.environ.get', (['"""CHEM_SEARCH_CACHE_PATH"""', '"""."""'], {}), "('CHEM_SEARCH_CACHE_PATH', '.')\n", (2237, 2268), False, 'import os\n'), ((2335, 2385), 'os.environ.get', 'os.environ.get', (['"""CHEM_SEARCH_CC_PREFIX"""', '"""cc-full"""'], {}), "('CHEM_SEARCH_CC_PREFIX', 'cc-full')\n", (2349, 2385), False, 'import os\n'), ((4744, 4758), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (4756, 4758), False, 'import os\n'), ((7974, 8060), 'os.path.join', 'os.path.join', (['self.__cachePath', '"""config"""', "(self.__ccFileNamePrefix + '-config.json')"], {}), "(self.__cachePath, 'config', self.__ccFileNamePrefix +\n '-config.json')\n", (7986, 8060), False, 'import os\n'), ((12992, 13002), 'rcsb.utils.io.FileUtil.FileUtil', 'FileUtil', ([], {}), '()\n', (13000, 13002), False, 'from rcsb.utils.io.FileUtil import FileUtil\n'), ((18030, 18080), 'rcsb.utils.chem.OeSearchUtils.OeSearchUtils', 'OeSearchUtils', (['self.__oesmP'], {'fpTypeList': 'fpTypeList'}), '(self.__oesmP, fpTypeList=fpTypeList)\n', (18043, 18080), False, 'from rcsb.utils.chem.OeSearchUtils import OeSearchUtils\n'), ((18200, 18236), 'rcsb.utils.chem.OeSubStructSearchUtils.OeSubStructSearchUtils', 'OeSubStructSearchUtils', (['self.__oesmP'], {}), '(self.__oesmP)\n', (18222, 18236), False, 'from rcsb.utils.chem.OeSubStructSearchUtils import OeSubStructSearchUtils\n'), ((21497, 21508), 'rcsb.utils.chem.OeIoUtils.OeIoUtils', 'OeIoUtils', ([], {}), '()\n', (21506, 21508), False, 'from rcsb.utils.chem.OeIoUtils import OeIoUtils\n'), ((23818, 23829), 'rcsb.utils.chem.OeIoUtils.OeIoUtils', 'OeIoUtils', ([], {}), '()\n', (23827, 23829), False, 'from rcsb.utils.chem.OeIoUtils import OeIoUtils\n'), ((25249, 25260), 'time.time', 'time.time', ([], {}), '()\n', (25258, 25260), False, 'import time\n'), ((26242, 26253), 'time.time', 'time.time', ([], {}), '()\n', (26251, 26253), False, 'import time\n'), ((26328, 26346), 'rcsb.utils.chem.MolecularFormula.MolecularFormula', 'MolecularFormula', ([], {}), '()\n', (26344, 26346), False, 'from rcsb.utils.chem.MolecularFormula import MolecularFormula\n'), ((26936, 26976), 'resource.getrusage', 'resource.getrusage', (['resource.RUSAGE_SELF'], {}), '(resource.RUSAGE_SELF)\n', (26954, 26976), False, 'import resource\n'), ((7030, 7063), 'os.path.join', 'os.path.join', (['cachePath', '"""config"""'], {}), "(cachePath, 'config')\n", (7042, 7063), False, 'import os\n'), ((7097, 7159), 'os.path.join', 'os.path.join', (['configDirPath', "(ccFileNamePrefix + '-config.json')"], {}), "(configDirPath, ccFileNamePrefix + '-config.json')\n", (7109, 7159), False, 'import os\n'), ((9914, 9924), 'rcsb.utils.io.FileUtil.FileUtil', 'FileUtil', ([], {}), '()\n', (9922, 9924), False, 'from rcsb.utils.io.FileUtil import FileUtil\n'), ((11288, 11298), 'rcsb.utils.io.SftpUtil.SftpUtil', 'SftpUtil', ([], {}), '()\n', (11296, 11298), False, 'from rcsb.utils.io.SftpUtil import SftpUtil\n'), ((13147, 13172), 'os.path.join', 'os.path.join', (['dirPath', 'fn'], {}), '(dirPath, fn)\n', (13159, 13172), False, 'import os\n'), ((14634, 14677), 'copy.deepcopy', 'copy.deepcopy', (["self.__configD['ccsiKwargs']"], {}), "(self.__configD['ccsiKwargs'])\n", (14647, 14677), False, 'import copy\n'), ((14816, 14847), 'rcsb.utils.chem.ChemCompIndexProvider.ChemCompIndexProvider', 'ChemCompIndexProvider', ([], {}), '(**kwargs)\n', (14837, 14847), False, 'from rcsb.utils.chem.ChemCompIndexProvider import ChemCompIndexProvider\n'), ((15839, 15882), 'copy.deepcopy', 'copy.deepcopy', (["self.__configD['ccsiKwargs']"], {}), "(self.__configD['ccsiKwargs'])\n", (15852, 15882), False, 'import copy\n'), ((16021, 16058), 'rcsb.utils.chem.ChemCompSearchIndexProvider.ChemCompSearchIndexProvider', 'ChemCompSearchIndexProvider', ([], {}), '(**kwargs)\n', (16048, 16058), False, 'from rcsb.utils.chem.ChemCompSearchIndexProvider import ChemCompSearchIndexProvider\n'), ((16966, 17010), 'copy.deepcopy', 'copy.deepcopy', (["self.__configD['oesmpKwargs']"], {}), "(self.__configD['oesmpKwargs'])\n", (16979, 17010), False, 'import copy\n'), ((17149, 17183), 'rcsb.utils.chem.OeSearchMoleculeProvider.OeSearchMoleculeProvider', 'OeSearchMoleculeProvider', ([], {}), '(**kwargs)\n', (17173, 17183), False, 'from rcsb.utils.chem.OeSearchMoleculeProvider import OeSearchMoleculeProvider\n'), ((26876, 26893), 'platform.system', 'platform.system', ([], {}), '()\n', (26891, 26893), False, 'import platform\n'), ((27194, 27210), 'time.localtime', 'time.localtime', ([], {}), '()\n', (27208, 27210), False, 'import time\n'), ((9956, 9994), 'os.path.join', 'os.path.join', (['self.__cachePath', 'subDir'], {}), '(self.__cachePath, subDir)\n', (9968, 9994), False, 'import os\n'), ((11461, 11491), 'os.path.join', 'os.path.join', (['"""/"""', 'dirPath', 'fn'], {}), "('/', dirPath, fn)\n", (11473, 11491), False, 'import os\n'), ((11615, 11625), 'rcsb.utils.io.FileUtil.FileUtil', 'FileUtil', ([], {}), '()\n', (11623, 11625), False, 'from rcsb.utils.io.FileUtil import FileUtil\n'), ((11655, 11680), 'os.path.join', 'os.path.join', (['dirPath', 'fn'], {}), '(dirPath, fn)\n', (11667, 11680), False, 'import os\n'), ((25539, 25550), 'time.time', 'time.time', ([], {}), '()\n', (25548, 25550), False, 'import time\n'), ((26695, 26706), 'time.time', 'time.time', ([], {}), '()\n', (26704, 26706), False, 'import time\n'), ((13330, 13360), 'os.path.join', 'os.path.join', (['"""/"""', 'dirPath', 'fn'], {}), "('/', dirPath, fn)\n", (13342, 13360), False, 'import os\n'), ((13507, 13517), 'rcsb.utils.io.SftpUtil.SftpUtil', 'SftpUtil', ([], {}), '()\n', (13515, 13517), False, 'from rcsb.utils.io.SftpUtil import SftpUtil\n'), ((13644, 13669), 'os.path.join', 'os.path.join', (['dirPath', 'fn'], {}), '(dirPath, fn)\n', (13656, 13669), False, 'import os\n')]
|
import logging
from copy import deepcopy
from contextlib import contextmanager
from algoliasearch.http.verb import Verb
from algoliasearch.search_client import SearchClient
from algoliasearch.exceptions import AlgoliaException
from pyramid.exceptions import ConfigurationError
logger = logging.getLogger(__name__)
class Indexer(object):
def __init__(self, application_id, api_key, prefix="kinto"):
self.client = SearchClient.create(application_id, api_key)
self.prefix = prefix
self.tasks = []
def join(self):
for indexname, taskID in self.tasks:
index = self.client.init_index(indexname)
index.wait_task(taskID)
self.tasks = []
def set_extra_headers(self, headers):
self.client._config.headers.update(headers)
def indexname(self, bucket_id, collection_id):
return "{}-{}-{}".format(self.prefix, bucket_id, collection_id)
def create_index(
self, bucket_id, collection_id, settings=None, wait_for_creation=False
):
if settings is None:
settings = {}
self.update_index(
bucket_id, collection_id, settings=settings, wait_for_task=wait_for_creation
)
def update_index(
self, bucket_id, collection_id, settings=None, wait_for_task=False
):
indexname = self.indexname(bucket_id, collection_id)
if settings is not None:
index = self.client.init_index(indexname)
res = index.set_settings(settings, {"forwardToReplicas": True})
if wait_for_task:
res.wait()
else:
self.tasks.append((indexname, res[0]["taskID"]))
def delete_index(self, bucket_id, collection_id=None):
if collection_id is None:
response = self.client.list_indices()
index_prefix = self.indexname(bucket_id, "")
collections = [
i["name"]
for i in response["items"]
if i["name"].startswith(index_prefix)
]
else:
collections = [self.indexname(bucket_id, collection_id)]
for indexname in collections:
try:
self.client.init_index(indexname).delete()
except AlgoliaException as e: # pragma: no cover
if "HTTP Code: 404" not in str(e):
raise
def search(self, bucket_id, collection_id, **kwargs):
indexname = self.indexname(bucket_id, collection_id)
index = self.client.init_index(indexname)
query = kwargs.pop("query", "")
return index.search(query, kwargs)
def flush(self):
response = self.client.list_indices()
for index in response["items"]:
indexname = index["name"]
if indexname.startswith(self.prefix):
index = self.client.init_index(indexname)
index.clear_objects().wait()
index.delete().wait()
def isalive(self):
self.client._transporter.read(Verb.GET, "1/isalive", {}, None)
@contextmanager
def bulk(self):
bulk = BulkClient(self)
yield bulk
for indexname, requests in bulk.operations.items():
index = self.client.init_index(indexname)
res = index.batch(requests)
self.tasks.append((indexname, res[0]["taskID"]))
class BulkClient:
def __init__(self, indexer):
self.indexer = indexer
self.operations = {}
def index_record(self, bucket_id, collection_id, record, id_field="id"):
indexname = self.indexer.indexname(bucket_id, collection_id)
self.operations.setdefault(indexname, [])
obj = deepcopy(record)
record_id = obj.pop(id_field)
obj["objectID"] = record_id
self.operations[indexname].append({"action": "addObject", "body": obj})
def unindex_record(self, bucket_id, collection_id, record, id_field="id"):
indexname = self.indexer.indexname(bucket_id, collection_id)
record_id = record[id_field]
self.operations.setdefault(indexname, [])
self.operations[indexname].append(
{"action": "deleteObject", "body": {"objectID": record_id}}
)
def heartbeat(request):
"""Test that Algolia is operationnal.
:param request: current request object
:type request: :class:`~pyramid:pyramid.request.Request`
:returns: ``True`` is everything is ok, ``False`` otherwise.
:rtype: bool
"""
indexer = request.registry.indexer
try:
indexer.isalive()
except Exception as e:
logger.exception(e)
return False
else:
return True
def load_from_config(config):
settings = config.get_settings()
application_id = settings.get("algolia.application_id")
api_key = settings.get("algolia.api_key")
if application_id is None or api_key is None:
message = (
"kinto-algolia needs kinto.algolia.application_id "
"and kinto.algolia.api_key settings to be set."
)
raise ConfigurationError(message)
prefix = settings.get("algolia.index_prefix", "kinto")
indexer = Indexer(application_id=application_id, api_key=api_key, prefix=prefix)
return indexer
|
[
"algoliasearch.search_client.SearchClient.create",
"copy.deepcopy",
"pyramid.exceptions.ConfigurationError",
"logging.getLogger"
] |
[((289, 316), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (306, 316), False, 'import logging\n'), ((429, 473), 'algoliasearch.search_client.SearchClient.create', 'SearchClient.create', (['application_id', 'api_key'], {}), '(application_id, api_key)\n', (448, 473), False, 'from algoliasearch.search_client import SearchClient\n'), ((3705, 3721), 'copy.deepcopy', 'deepcopy', (['record'], {}), '(record)\n', (3713, 3721), False, 'from copy import deepcopy\n'), ((5073, 5100), 'pyramid.exceptions.ConfigurationError', 'ConfigurationError', (['message'], {}), '(message)\n', (5091, 5100), False, 'from pyramid.exceptions import ConfigurationError\n')]
|
# Generated by Django 3.1.4 on 2021-07-13 07:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stationery', '0004_auto_20210710_0307'),
]
operations = [
migrations.DeleteModel(
name='Document',
),
migrations.AddField(
model_name='booking',
name='delivery_mode',
field=models.CharField(default='self taking', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='booking',
name='doc_cost',
field=models.PositiveIntegerField(default=500),
preserve_default=False,
),
migrations.AddField(
model_name='booking',
name='doc_type',
field=models.CharField(default='pdf', max_length=255),
preserve_default=False,
),
]
|
[
"django.db.models.CharField",
"django.db.models.PositiveIntegerField",
"django.db.migrations.DeleteModel"
] |
[((238, 277), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Document"""'}), "(name='Document')\n", (260, 277), False, 'from django.db import migrations, models\n'), ((417, 472), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""self taking"""', 'max_length': '(255)'}), "(default='self taking', max_length=255)\n", (433, 472), False, 'from django.db import migrations, models\n'), ((631, 671), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(500)'}), '(default=500)\n', (658, 671), False, 'from django.db import migrations, models\n'), ((830, 877), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""pdf"""', 'max_length': '(255)'}), "(default='pdf', max_length=255)\n", (846, 877), False, 'from django.db import migrations, models\n')]
|
import json
import logging
import mock
from django.test import TestCase, override_settings
from grpc._cython.cygrpc import _Metadatum
from django_socio_grpc.services import Service
from django_socio_grpc.settings import grpc_settings
from .utils import FakeContext
logger = logging.getLogger()
class FakeAuthentication:
def authenticate(self, context):
return ({"email": "<EMAIL>"}, context.META.get("HTTP_AUTHORIZATION"))
class DummyService(Service):
pass
service = DummyService()
def fake_create_service(self):
return service
class TestAuthenticationUnitary(TestCase):
@override_settings(
GRPC_FRAMEWORK={
"DEFAULT_AUTHENTICATION_CLASSES": [
"django_socio_grpc.tests.test_authentication.FakeAuthentication",
],
}
)
def test_settings(self):
# test settings correctly passed to grpc_settings
self.assertEqual(grpc_settings.DEFAULT_AUTHENTICATION_CLASSES, [FakeAuthentication])
def test_perform_authentication(self):
# Create a dummyservice for unitary tests
dummy_service = DummyService()
dummy_service.context = FakeContext()
# Call func
with mock.patch(
"django_socio_grpc.services.Service.resolve_user"
) as mock_resolve_user:
mock_resolve_user.return_value = ({"email": "<EMAIL>"}, {})
dummy_service.perform_authentication()
mock_resolve_user.assert_called_once_with()
self.assertEqual(dummy_service.context.user, {"email": "<EMAIL>"})
self.assertEqual(dummy_service.context.auth, {})
def test_resolve_user(self):
dummy_service = DummyService()
dummy_service.context = FakeContext()
dummy_service.context.META = {"HTTP_AUTHORIZATION": "faketoken"}
dummy_service.authentication_classes = [FakeAuthentication]
auth_user_tuple = dummy_service.resolve_user()
self.assertEqual(auth_user_tuple, ({"email": "<EMAIL>"}, "faketoken"))
@mock.patch("django_socio_grpc.services.Service.check_permissions", mock.MagicMock())
def test_perform_authentication_called_in_before_action(self):
dummy_service = DummyService()
with mock.patch(
"django_socio_grpc.services.Service.perform_authentication"
) as mock_perform_authentication:
dummy_service.before_action()
mock_perform_authentication.assert_called_once_with()
@mock.patch(
"django_socio_grpc.servicer_proxy.ServicerProxy.create_service", new=fake_create_service
)
class TestAuthenticationIntegration(TestCase):
def setUp(self):
self.service = DummyService
self.servicer = self.service.as_servicer()
self.fake_context = FakeContext()
def dummy_method(service, request, context):
pass
self.service.DummyMethod = dummy_method
def test_user_and_token_none_if_no_auth_class(self):
self.servicer.DummyMethod(None, self.fake_context)
self.assertIsNone(service.context.user)
self.assertIsNone(service.context.auth)
def test_user_and_token_set(self):
self.service.authentication_classes = [FakeAuthentication]
metadata = (("headers", json.dumps({"Authorization": "faketoken"})),)
self.fake_context._invocation_metadata.extend((_Metadatum(k, v) for k, v in metadata))
self.servicer.DummyMethod(None, self.fake_context)
self.assertEqual(service.context.META, {"HTTP_AUTHORIZATION": "faketoken"})
self.assertEqual(service.context.user, {"email": "<EMAIL>"})
self.assertEqual(service.context.auth, "faketoken")
|
[
"mock.patch",
"json.dumps",
"grpc._cython.cygrpc._Metadatum",
"mock.MagicMock",
"django.test.override_settings",
"logging.getLogger"
] |
[((278, 297), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (295, 297), False, 'import logging\n'), ((2471, 2575), 'mock.patch', 'mock.patch', (['"""django_socio_grpc.servicer_proxy.ServicerProxy.create_service"""'], {'new': 'fake_create_service'}), "('django_socio_grpc.servicer_proxy.ServicerProxy.create_service',\n new=fake_create_service)\n", (2481, 2575), False, 'import mock\n'), ((610, 751), 'django.test.override_settings', 'override_settings', ([], {'GRPC_FRAMEWORK': "{'DEFAULT_AUTHENTICATION_CLASSES': [\n 'django_socio_grpc.tests.test_authentication.FakeAuthentication']}"}), "(GRPC_FRAMEWORK={'DEFAULT_AUTHENTICATION_CLASSES': [\n 'django_socio_grpc.tests.test_authentication.FakeAuthentication']})\n", (627, 751), False, 'from django.test import TestCase, override_settings\n'), ((2097, 2113), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2111, 2113), False, 'import mock\n'), ((1211, 1272), 'mock.patch', 'mock.patch', (['"""django_socio_grpc.services.Service.resolve_user"""'], {}), "('django_socio_grpc.services.Service.resolve_user')\n", (1221, 1272), False, 'import mock\n'), ((2234, 2305), 'mock.patch', 'mock.patch', (['"""django_socio_grpc.services.Service.perform_authentication"""'], {}), "('django_socio_grpc.services.Service.perform_authentication')\n", (2244, 2305), False, 'import mock\n'), ((3248, 3290), 'json.dumps', 'json.dumps', (["{'Authorization': 'faketoken'}"], {}), "({'Authorization': 'faketoken'})\n", (3258, 3290), False, 'import json\n'), ((3349, 3365), 'grpc._cython.cygrpc._Metadatum', '_Metadatum', (['k', 'v'], {}), '(k, v)\n', (3359, 3365), False, 'from grpc._cython.cygrpc import _Metadatum\n')]
|
#!/usr/bin/env python3
"""Fusearch daemon"""
import argparse
import os
import signal
import sys
import logging
import textract
import functools
import progressbar
import tempfile
import pickle
import io
from fusearch.index import Index
from fusearch.model import Document
from fusearch.tokenizer import get_tokenizer, tokfreq, Tokenizer
from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader
from fusearch.config import Config
from multiprocessing import Process, Queue, cpu_count
import collections.abc
progressbar_index_widgets_ = [
" [",
progressbar.Timer(format="Elapsed %(elapsed)s"),
", ",
progressbar.SimpleProgress(),
" files"
#'count: ', progressbar.Counter(),
"] ",
progressbar.Bar(),
" (",
progressbar.ETA(),
") ",
]
def cleanup() -> None:
pass
def reload_config() -> None:
pass
def config_signal_handlers() -> None:
signal.signal(signal.SIGHUP, signal.SIG_IGN)
signal.signal(signal.SIGTERM, cleanup)
signal.signal(signal.SIGUSR1, reload_config)
signal.signal(signal.SIGTTIN, signal.SIG_IGN)
signal.signal(signal.SIGTSTP, signal.SIG_IGN)
signal.signal(signal.SIGTTOU, signal.SIG_IGN)
def redirect_stream(system_stream, target_stream):
""" Redirect a system stream to a specified file.
:param standard_stream: A file object representing a standard I/O
stream.
:param target_stream: The target file object for the redirected
stream, or ``None`` to specify the null device.
:return: ``None``.
`system_stream` is a standard system stream such as
``sys.stdout``. `target_stream` is an open file object that
should replace the corresponding system stream object.
If `target_stream` is ``None``, defaults to opening the
operating system's null device and using its file descriptor.
"""
if target_stream is None:
target_fd = os.open(os.devnull, os.O_RDWR)
else:
target_fd = target_stream.fileno()
os.dup2(target_fd, system_stream.fileno())
def fork_exit_parent() -> None:
pid = os.fork()
if pid > 0:
sys.exit(0)
def daemonize() -> None:
fork_exit_parent()
os.setsid()
fork_exit_parent()
os.chdir("/")
config_signal_handlers()
os.umask(0o022)
redirect_stream(sys.stdin, None)
redirect_stream(sys.stdout, open("/tmp/fusearch.out", "a"))
redirect_stream(sys.stderr, open("/tmp/fusearch.err", "a"))
fusearch_main()
def config_argparse() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description="fusearch daemon", epilog="")
parser.add_argument("-f", "--foreground", action="store_true", help="Don't daemonize")
parser.add_argument("-c", "--config", type=str, default="/etc/fusearch/config.yaml", help="config file")
return parser
def to_text(file: str) -> str:
assert os.path.isfile(file)
try:
txt_b = textract.process(file, method="pdftotext")
# TODO more intelligent decoding? there be dragons
txt = bytes_to_str(txt_b)
# print(file)
# print(len(txt))
# print(txt[:80])
# print('-------------------')
except Exception as e:
txt = ""
logging.exception("Exception while extracting text from '%s'", file)
# TODO mark it as failed instead of empty text
return txt
def document_from_file(file: str, tokenizer: Tokenizer) -> Document:
mtime_latest = mtime(file)
filename = filename_without_extension(file)
txt = filename + "\n" + to_text(file)
# Detect language and check that the document makes sense, OCR returns garbage sometimes
# TODO: add filename to content
document = Document(url=file, filename=filename, content=txt, tokfreq=tokfreq(tokenizer(txt)), mtime=mtime_latest)
return document
def needs_indexing(index: Index, file: str) -> bool:
mtime_latest = mtime(file)
# document = index.document_from_url(file)
mtime_last_known = index.mtime(file)
if not mtime_last_known or mtime_last_known and mtime_latest > mtime_last_known:
# logging.debug("needs_indexing: need '%s'", file)
return True
else:
# logging.debug("needs_indexing: NOT need '%s'", file)
return False
def get_index(path: str, config: Config) -> Index:
index_db = os.path.join(path, ".fusearch.db")
index = Index({"provider": "sqlite", "filename": index_db, "create_db": True}, tokenizer=get_tokenizer(config))
logging.debug("get_index: '%s' %d docs", index_db, index.doc_count)
return index
class NeedsIndexFileGenerator(object):
def __init__(self, path, config):
self.path = path
self.config = config
self.index = get_index(path, config)
assert os.path.isdir(path)
def __call__(self) -> collections.abc.Iterable:
""":returns a generator of files which are updated from the mtime in the index"""
file_needs_indexing = functools.partial(needs_indexing, self.index)
return filter(file_needs_indexing, file_generator_ext(self.path, self.config.include_extensions))
def file_producer(path: str, config: Config, file_queue: Queue, file_inventory: io.IOBase) -> None:
for file in pickle_loader(file_inventory):
# logging.debug("file_producer: %s", file)
file_queue.put(file)
logging.debug("file_producer is done")
def text_extract(config: Config, file_queue: Queue, document_queue: Queue):
# logging.debug("text_extract started")
tokenizer = get_tokenizer(config)
while True:
file = file_queue.get()
if file is None:
logging.debug("text_extract is done")
return
logging.debug(
"text_extract: file_queue.qsize %d document_queue.qsize %d", file_queue.qsize(), document_queue.qsize()
)
logging.debug("text_extract: '%s'", file)
# logging.debug("text_extract: %s", file)
document = document_from_file(file, tokenizer)
document_queue.put(document)
def document_consumer(path: str, config: Config, document_queue: Queue, file_count: int) -> None:
index = get_index(path, config)
if config.verbose:
pbar = progressbar.ProgressBar(max_value=file_count, widgets=progressbar_index_widgets_)
file_i = 0
while True:
doc = document_queue.get()
logging.debug("document_consumer(%d): document_queue.qsize %d", os.getpid(), document_queue.qsize())
if doc is None:
logging.debug("Document consumer, no more elements in the queue")
if config.verbose:
pbar.finish()
return
try:
index.add_document(doc)
logging.debug("document_consumer(%d): added %s", os.getpid(), doc.url)
except Exception as e:
logging.exception("document_consumer: index.add_document exception. Document[%s]", doc.url)
if config.verbose:
pbar.update(file_i)
file_i += 1
def gather_files(path, config, file_inventory) -> int:
""":returns file count"""
if not os.path.isdir(path):
logging.error("Not a directory: '%s', skipping indexing", path)
return
logging.info("Indexing %s", path)
logging.info("Calculating number of files to index (.=100files)")
if config.verbose:
widgets = [
" [",
progressbar.Timer(format="Elapsed %(elapsed)s"),
" ",
"count: ",
progressbar.Counter(),
"] ",
progressbar.BouncingBar(),
]
pbar = progressbar.ProgressBar(widgets=widgets)
file_count = 0
for file in NeedsIndexFileGenerator(path, config)():
pickle.dump(file, file_inventory)
file_count += 1
# if config.verbose and (file_count % 100) == 0:
# sys.stdout.write('.')
# sys.stdout.flush()
if config.verbose:
pbar.update(file_count)
# if config.verbose:
# sys.stdout.write('\n')
if config.verbose:
pbar.finish()
file_inventory.seek(0)
return file_count
def index_do(path, config) -> None:
file_inventory = tempfile.TemporaryFile()
file_count = gather_files(path, config, file_inventory)
logging.info("%d files to process", file_count)
if config.parallel_extraction:
index_parallel(path, config, file_count, file_inventory)
else:
index_serial(path, config, file_count, file_inventory)
def index_parallel(path: str, config: Config, file_count: int, file_inventory) -> None:
#
# file_producer -> N * test_extract -> document_consumer
#
# TODO: check that processes are alive to prevent deadlocks on exceptions in children
file_queue = Queue(cpu_count() * 8)
document_queue = Queue(256)
text_extract_procs = []
file_producer_proc = Process(
name="file producer", target=file_producer, daemon=True, args=(path, config, file_queue, file_inventory)
)
file_producer_proc.start()
document_consumer_proc = Process(
name="document consumer", target=document_consumer, daemon=True, args=(path, config, document_queue, file_count)
)
for i in range(cpu_count()):
p = Process(
name="text extractor {}".format(i),
target=text_extract,
daemon=True,
args=(config, file_queue, document_queue),
)
text_extract_procs.append(p)
p.start()
document_consumer_proc.start()
logging.debug("child processes started")
logging.debug("joining producer")
file_producer_proc.join()
logging.debug("joining text_extract")
for p in text_extract_procs:
file_queue.put(None)
for p in text_extract_procs:
logging.debug("joining text_extract %s", p)
p.join()
document_queue.put(None)
logging.debug("joining document_consumer")
document_consumer_proc.join()
logging.info("Parallel indexing finished")
def index_serial(path, config, file_count, file_inventory):
if config.verbose:
pbar = progressbar.ProgressBar(max_value=file_count, widgets=progressbar_index_widgets_)
file_i = 0
tokenizer = get_tokenizer(config)
logging.info("Indexing started")
index = get_index(path, config)
for file in pickle_loader(file_inventory):
doc = document_from_file(file, tokenizer)
try:
index.add_document(doc)
except Exception as e:
logging.exception("index_serial: index.add_document exception. Document[%s]", doc.url)
if config.verbose:
pbar.update(file_i)
file_i += 1
if config.verbose:
pbar.finish()
def fusearch_main(args) -> int:
logging.info("reading config from %s", args.config)
config = Config.from_file(args.config)
logging.info("%s", config)
for path in config.index_dirs:
index_do(path, config)
def script_name() -> str:
""":returns: script name with leading paths removed"""
return os.path.split(sys.argv[0])[1]
def config_logging() -> None:
import time
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.basicConfig(format="{}: %(asctime)sZ %(name)s %(levelname)s %(message)s".format(script_name()))
logging.Formatter.converter = time.gmtime
def main() -> int:
config_logging()
parser = config_argparse()
args = parser.parse_args()
if not args.foreground:
return daemonize()
fusearch_main(args)
if __name__ == "__main__":
sys.exit(main())
|
[
"fusearch.util.filename_without_extension",
"pickle.dump",
"progressbar.Counter",
"argparse.ArgumentParser",
"fusearch.util.mtime",
"fusearch.tokenizer.get_tokenizer",
"fusearch.util.file_generator_ext",
"progressbar.ETA",
"os.path.isfile",
"fusearch.config.Config.from_file",
"multiprocessing.Queue",
"progressbar.Timer",
"textract.process",
"os.path.join",
"os.chdir",
"multiprocessing.cpu_count",
"logging.error",
"fusearch.util.pickle_loader",
"progressbar.Bar",
"os.umask",
"os.fork",
"fusearch.util.bytes_to_str",
"functools.partial",
"os.open",
"signal.signal",
"progressbar.ProgressBar",
"sys.exit",
"logging.exception",
"logging.debug",
"os.getpid",
"progressbar.BouncingBar",
"os.path.isdir",
"logging.info",
"tempfile.TemporaryFile",
"os.setsid",
"multiprocessing.Process",
"os.path.split",
"progressbar.SimpleProgress",
"logging.getLogger"
] |
[((606, 653), 'progressbar.Timer', 'progressbar.Timer', ([], {'format': '"""Elapsed %(elapsed)s"""'}), "(format='Elapsed %(elapsed)s')\n", (623, 653), False, 'import progressbar\n'), ((669, 697), 'progressbar.SimpleProgress', 'progressbar.SimpleProgress', ([], {}), '()\n', (695, 697), False, 'import progressbar\n'), ((765, 782), 'progressbar.Bar', 'progressbar.Bar', ([], {}), '()\n', (780, 782), False, 'import progressbar\n'), ((798, 815), 'progressbar.ETA', 'progressbar.ETA', ([], {}), '()\n', (813, 815), False, 'import progressbar\n'), ((947, 991), 'signal.signal', 'signal.signal', (['signal.SIGHUP', 'signal.SIG_IGN'], {}), '(signal.SIGHUP, signal.SIG_IGN)\n', (960, 991), False, 'import signal\n'), ((996, 1034), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'cleanup'], {}), '(signal.SIGTERM, cleanup)\n', (1009, 1034), False, 'import signal\n'), ((1039, 1083), 'signal.signal', 'signal.signal', (['signal.SIGUSR1', 'reload_config'], {}), '(signal.SIGUSR1, reload_config)\n', (1052, 1083), False, 'import signal\n'), ((1088, 1133), 'signal.signal', 'signal.signal', (['signal.SIGTTIN', 'signal.SIG_IGN'], {}), '(signal.SIGTTIN, signal.SIG_IGN)\n', (1101, 1133), False, 'import signal\n'), ((1138, 1183), 'signal.signal', 'signal.signal', (['signal.SIGTSTP', 'signal.SIG_IGN'], {}), '(signal.SIGTSTP, signal.SIG_IGN)\n', (1151, 1183), False, 'import signal\n'), ((1188, 1233), 'signal.signal', 'signal.signal', (['signal.SIGTTOU', 'signal.SIG_IGN'], {}), '(signal.SIGTTOU, signal.SIG_IGN)\n', (1201, 1233), False, 'import signal\n'), ((2160, 2169), 'os.fork', 'os.fork', ([], {}), '()\n', (2167, 2169), False, 'import os\n'), ((2260, 2271), 'os.setsid', 'os.setsid', ([], {}), '()\n', (2269, 2271), False, 'import os\n'), ((2299, 2312), 'os.chdir', 'os.chdir', (['"""/"""'], {}), "('/')\n", (2307, 2312), False, 'import os\n'), ((2346, 2358), 'os.umask', 'os.umask', (['(18)'], {}), '(18)\n', (2354, 2358), False, 'import os\n'), ((2612, 2677), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""fusearch daemon"""', 'epilog': '""""""'}), "(description='fusearch daemon', epilog='')\n", (2635, 2677), False, 'import argparse\n'), ((2940, 2960), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (2954, 2960), False, 'import os\n'), ((3516, 3527), 'fusearch.util.mtime', 'mtime', (['file'], {}), '(file)\n', (3521, 3527), False, 'from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader\n'), ((3543, 3575), 'fusearch.util.filename_without_extension', 'filename_without_extension', (['file'], {}), '(file)\n', (3569, 3575), False, 'from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader\n'), ((3960, 3971), 'fusearch.util.mtime', 'mtime', (['file'], {}), '(file)\n', (3965, 3971), False, 'from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader\n'), ((4386, 4420), 'os.path.join', 'os.path.join', (['path', '""".fusearch.db"""'], {}), "(path, '.fusearch.db')\n", (4398, 4420), False, 'import os\n'), ((4541, 4608), 'logging.debug', 'logging.debug', (['"""get_index: \'%s\' %d docs"""', 'index_db', 'index.doc_count'], {}), '("get_index: \'%s\' %d docs", index_db, index.doc_count)\n', (4554, 4608), False, 'import logging\n'), ((5282, 5311), 'fusearch.util.pickle_loader', 'pickle_loader', (['file_inventory'], {}), '(file_inventory)\n', (5295, 5311), False, 'from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader\n'), ((5397, 5435), 'logging.debug', 'logging.debug', (['"""file_producer is done"""'], {}), "('file_producer is done')\n", (5410, 5435), False, 'import logging\n'), ((5574, 5595), 'fusearch.tokenizer.get_tokenizer', 'get_tokenizer', (['config'], {}), '(config)\n', (5587, 5595), False, 'from fusearch.tokenizer import get_tokenizer, tokfreq, Tokenizer\n'), ((7248, 7281), 'logging.info', 'logging.info', (['"""Indexing %s"""', 'path'], {}), "('Indexing %s', path)\n", (7260, 7281), False, 'import logging\n'), ((7286, 7351), 'logging.info', 'logging.info', (['"""Calculating number of files to index (.=100files)"""'], {}), "('Calculating number of files to index (.=100files)')\n", (7298, 7351), False, 'import logging\n'), ((8211, 8235), 'tempfile.TemporaryFile', 'tempfile.TemporaryFile', ([], {}), '()\n', (8233, 8235), False, 'import tempfile\n'), ((8300, 8347), 'logging.info', 'logging.info', (['"""%d files to process"""', 'file_count'], {}), "('%d files to process', file_count)\n", (8312, 8347), False, 'import logging\n'), ((8835, 8845), 'multiprocessing.Queue', 'Queue', (['(256)'], {}), '(256)\n', (8840, 8845), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((8899, 9016), 'multiprocessing.Process', 'Process', ([], {'name': '"""file producer"""', 'target': 'file_producer', 'daemon': '(True)', 'args': '(path, config, file_queue, file_inventory)'}), "(name='file producer', target=file_producer, daemon=True, args=(path,\n config, file_queue, file_inventory))\n", (8906, 9016), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((9088, 9213), 'multiprocessing.Process', 'Process', ([], {'name': '"""document consumer"""', 'target': 'document_consumer', 'daemon': '(True)', 'args': '(path, config, document_queue, file_count)'}), "(name='document consumer', target=document_consumer, daemon=True,\n args=(path, config, document_queue, file_count))\n", (9095, 9213), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((9545, 9585), 'logging.debug', 'logging.debug', (['"""child processes started"""'], {}), "('child processes started')\n", (9558, 9585), False, 'import logging\n'), ((9591, 9624), 'logging.debug', 'logging.debug', (['"""joining producer"""'], {}), "('joining producer')\n", (9604, 9624), False, 'import logging\n'), ((9659, 9696), 'logging.debug', 'logging.debug', (['"""joining text_extract"""'], {}), "('joining text_extract')\n", (9672, 9696), False, 'import logging\n'), ((9894, 9936), 'logging.debug', 'logging.debug', (['"""joining document_consumer"""'], {}), "('joining document_consumer')\n", (9907, 9936), False, 'import logging\n'), ((9975, 10017), 'logging.info', 'logging.info', (['"""Parallel indexing finished"""'], {}), "('Parallel indexing finished')\n", (9987, 10017), False, 'import logging\n'), ((10231, 10252), 'fusearch.tokenizer.get_tokenizer', 'get_tokenizer', (['config'], {}), '(config)\n', (10244, 10252), False, 'from fusearch.tokenizer import get_tokenizer, tokfreq, Tokenizer\n'), ((10257, 10289), 'logging.info', 'logging.info', (['"""Indexing started"""'], {}), "('Indexing started')\n", (10269, 10289), False, 'import logging\n'), ((10342, 10371), 'fusearch.util.pickle_loader', 'pickle_loader', (['file_inventory'], {}), '(file_inventory)\n', (10355, 10371), False, 'from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader\n'), ((10764, 10815), 'logging.info', 'logging.info', (['"""reading config from %s"""', 'args.config'], {}), "('reading config from %s', args.config)\n", (10776, 10815), False, 'import logging\n'), ((10829, 10858), 'fusearch.config.Config.from_file', 'Config.from_file', (['args.config'], {}), '(args.config)\n', (10845, 10858), False, 'from fusearch.config import Config\n'), ((10863, 10889), 'logging.info', 'logging.info', (['"""%s"""', 'config'], {}), "('%s', config)\n", (10875, 10889), False, 'import logging\n'), ((1985, 2015), 'os.open', 'os.open', (['os.devnull', 'os.O_RDWR'], {}), '(os.devnull, os.O_RDWR)\n', (1992, 2015), False, 'import os\n'), ((2194, 2205), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2202, 2205), False, 'import sys\n'), ((2986, 3028), 'textract.process', 'textract.process', (['file'], {'method': '"""pdftotext"""'}), "(file, method='pdftotext')\n", (3002, 3028), False, 'import textract\n'), ((3102, 3121), 'fusearch.util.bytes_to_str', 'bytes_to_str', (['txt_b'], {}), '(txt_b)\n', (3114, 3121), False, 'from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader\n'), ((4819, 4838), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (4832, 4838), False, 'import os\n'), ((5012, 5057), 'functools.partial', 'functools.partial', (['needs_indexing', 'self.index'], {}), '(needs_indexing, self.index)\n', (5029, 5057), False, 'import functools\n'), ((5895, 5936), 'logging.debug', 'logging.debug', (['"""text_extract: \'%s\'"""', 'file'], {}), '("text_extract: \'%s\'", file)\n', (5908, 5936), False, 'import logging\n'), ((6253, 6339), 'progressbar.ProgressBar', 'progressbar.ProgressBar', ([], {'max_value': 'file_count', 'widgets': 'progressbar_index_widgets_'}), '(max_value=file_count, widgets=\n progressbar_index_widgets_)\n', (6276, 6339), False, 'import progressbar\n'), ((7136, 7155), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (7149, 7155), False, 'import os\n'), ((7165, 7228), 'logging.error', 'logging.error', (['"""Not a directory: \'%s\', skipping indexing"""', 'path'], {}), '("Not a directory: \'%s\', skipping indexing", path)\n', (7178, 7228), False, 'import logging\n'), ((7631, 7671), 'progressbar.ProgressBar', 'progressbar.ProgressBar', ([], {'widgets': 'widgets'}), '(widgets=widgets)\n', (7654, 7671), False, 'import progressbar\n'), ((7756, 7789), 'pickle.dump', 'pickle.dump', (['file', 'file_inventory'], {}), '(file, file_inventory)\n', (7767, 7789), False, 'import pickle\n'), ((9244, 9255), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (9253, 9255), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((9800, 9843), 'logging.debug', 'logging.debug', (['"""joining text_extract %s"""', 'p'], {}), "('joining text_extract %s', p)\n", (9813, 9843), False, 'import logging\n'), ((10118, 10204), 'progressbar.ProgressBar', 'progressbar.ProgressBar', ([], {'max_value': 'file_count', 'widgets': 'progressbar_index_widgets_'}), '(max_value=file_count, widgets=\n progressbar_index_widgets_)\n', (10141, 10204), False, 'import progressbar\n'), ((11054, 11080), 'os.path.split', 'os.path.split', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (11067, 11080), False, 'import os\n'), ((3287, 3355), 'logging.exception', 'logging.exception', (['"""Exception while extracting text from \'%s\'"""', 'file'], {}), '("Exception while extracting text from \'%s\'", file)\n', (3304, 3355), False, 'import logging\n'), ((4514, 4535), 'fusearch.tokenizer.get_tokenizer', 'get_tokenizer', (['config'], {}), '(config)\n', (4527, 4535), False, 'from fusearch.tokenizer import get_tokenizer, tokfreq, Tokenizer\n'), ((5101, 5162), 'fusearch.util.file_generator_ext', 'file_generator_ext', (['self.path', 'self.config.include_extensions'], {}), '(self.path, self.config.include_extensions)\n', (5119, 5162), False, 'from fusearch.util import bytes_to_str, file_generator_ext, filename_without_extension, mtime, pickle_loader\n'), ((5681, 5718), 'logging.debug', 'logging.debug', (['"""text_extract is done"""'], {}), "('text_extract is done')\n", (5694, 5718), False, 'import logging\n'), ((6473, 6484), 'os.getpid', 'os.getpid', ([], {}), '()\n', (6482, 6484), False, 'import os\n'), ((6546, 6611), 'logging.debug', 'logging.debug', (['"""Document consumer, no more elements in the queue"""'], {}), "('Document consumer, no more elements in the queue')\n", (6559, 6611), False, 'import logging\n'), ((7425, 7472), 'progressbar.Timer', 'progressbar.Timer', ([], {'format': '"""Elapsed %(elapsed)s"""'}), "(format='Elapsed %(elapsed)s')\n", (7442, 7472), False, 'import progressbar\n'), ((7526, 7547), 'progressbar.Counter', 'progressbar.Counter', ([], {}), '()\n', (7545, 7547), False, 'import progressbar\n'), ((7579, 7604), 'progressbar.BouncingBar', 'progressbar.BouncingBar', ([], {}), '()\n', (7602, 7604), False, 'import progressbar\n'), ((8797, 8808), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (8806, 8808), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((11137, 11156), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (11154, 11156), False, 'import logging\n'), ((11185, 11214), 'logging.getLogger', 'logging.getLogger', (['"""requests"""'], {}), "('requests')\n", (11202, 11214), False, 'import logging\n'), ((6802, 6813), 'os.getpid', 'os.getpid', ([], {}), '()\n', (6811, 6813), False, 'import os\n'), ((6867, 6963), 'logging.exception', 'logging.exception', (['"""document_consumer: index.add_document exception. Document[%s]"""', 'doc.url'], {}), "(\n 'document_consumer: index.add_document exception. Document[%s]', doc.url)\n", (6884, 6963), False, 'import logging\n'), ((10515, 10605), 'logging.exception', 'logging.exception', (['"""index_serial: index.add_document exception. Document[%s]"""', 'doc.url'], {}), "('index_serial: index.add_document exception. Document[%s]',\n doc.url)\n", (10532, 10605), False, 'import logging\n')]
|
#!/usr/bin/env python
import rospy
from pyproj import Proj, transform
import numpy as np
from math import cos, sin, pi
from geometry_msgs.msg import Pose
from std_msgs.msg import Float64
from sensor_msgs.msg import NavSatFix
from ublox_msgs.msg import NavPVT
#Projection definition
#UTM-K
proj_UTMK = Proj(init='epsg:5178')
#WGS1984
proj_WGS84 = Proj(init='epsg:4326')
class Tm_heading:
def __init__(self):
self.pubtm = rospy.Publisher('current_tm', Pose, queue_size=1)
self.pubhead = rospy.Publisher('heading', Float64, queue_size=1)
self.subtm = rospy.Subscriber("ublox_gps/fix", NavSatFix, self.tm,queue_size=1)
self.subheading = rospy.Subscriber("ublox_gps/navpvt", NavPVT, self.heading,queue_size=1)
self.run()
def tm(self,Fix):
current_tm= Pose()
lon=Fix.longitude
lat=Fix.latitude
x, y = transform(proj_WGS84, proj_UTMK, lon, lat)
current_tm.position.x = x
current_tm.position.y = y
self.pubtm.publish(current_tm)
def heading(self,head):
heading = Float64()
heading.data=5*pi/2 - np.deg2rad(float(head.heading / 100000))
self.pubhead.publish(heading)
def run(self):
rate=rospy.Rate(1)
while not rospy.is_shutdown():
rate.sleep()
def main():
rospy.init_node('tm_heading',anonymous=True)
Tm_heading()
if __name__ == "__main__":
main()
|
[
"rospy.Subscriber",
"rospy.Publisher",
"rospy.Rate",
"std_msgs.msg.Float64",
"rospy.is_shutdown",
"pyproj.Proj",
"rospy.init_node",
"geometry_msgs.msg.Pose",
"pyproj.transform"
] |
[((302, 324), 'pyproj.Proj', 'Proj', ([], {'init': '"""epsg:5178"""'}), "(init='epsg:5178')\n", (306, 324), False, 'from pyproj import Proj, transform\n'), ((347, 369), 'pyproj.Proj', 'Proj', ([], {'init': '"""epsg:4326"""'}), "(init='epsg:4326')\n", (351, 369), False, 'from pyproj import Proj, transform\n'), ((1336, 1381), 'rospy.init_node', 'rospy.init_node', (['"""tm_heading"""'], {'anonymous': '(True)'}), "('tm_heading', anonymous=True)\n", (1351, 1381), False, 'import rospy\n'), ((434, 483), 'rospy.Publisher', 'rospy.Publisher', (['"""current_tm"""', 'Pose'], {'queue_size': '(1)'}), "('current_tm', Pose, queue_size=1)\n", (449, 483), False, 'import rospy\n'), ((507, 556), 'rospy.Publisher', 'rospy.Publisher', (['"""heading"""', 'Float64'], {'queue_size': '(1)'}), "('heading', Float64, queue_size=1)\n", (522, 556), False, 'import rospy\n'), ((578, 645), 'rospy.Subscriber', 'rospy.Subscriber', (['"""ublox_gps/fix"""', 'NavSatFix', 'self.tm'], {'queue_size': '(1)'}), "('ublox_gps/fix', NavSatFix, self.tm, queue_size=1)\n", (594, 645), False, 'import rospy\n'), ((671, 743), 'rospy.Subscriber', 'rospy.Subscriber', (['"""ublox_gps/navpvt"""', 'NavPVT', 'self.heading'], {'queue_size': '(1)'}), "('ublox_gps/navpvt', NavPVT, self.heading, queue_size=1)\n", (687, 743), False, 'import rospy\n'), ((805, 811), 'geometry_msgs.msg.Pose', 'Pose', ([], {}), '()\n', (809, 811), False, 'from geometry_msgs.msg import Pose\n'), ((878, 920), 'pyproj.transform', 'transform', (['proj_WGS84', 'proj_UTMK', 'lon', 'lat'], {}), '(proj_WGS84, proj_UTMK, lon, lat)\n', (887, 920), False, 'from pyproj import Proj, transform\n'), ((1075, 1084), 'std_msgs.msg.Float64', 'Float64', ([], {}), '()\n', (1082, 1084), False, 'from std_msgs.msg import Float64\n'), ((1227, 1240), 'rospy.Rate', 'rospy.Rate', (['(1)'], {}), '(1)\n', (1237, 1240), False, 'import rospy\n'), ((1259, 1278), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (1276, 1278), False, 'import rospy\n')]
|
# Test ability of OWSLib.wfs to interact with USDA SSURGO WFS 1.0.0 web service
# Contact e-mail: <EMAIL>
import unittest
from owslib.wfs import WebFeatureService
class USDASSURGOWFSTestCase(unittest.TestCase):
def runTest(self):
minX = -76.766960
minY = 39.283611
maxX = -76.684120
maxY = 39.338394
filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY)
wfs = WebFeatureService('http://SDMDataAccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs', version='1.0.0')
response = wfs.getfeature(typename=('MapunitPolyExtended',), filter=filter, propertyname=None)
self.assertTrue(response.read().find('<wfs:FeatureCollection') > 0,
'Unable to find feature dataset in WFS response')
|
[
"owslib.wfs.WebFeatureService"
] |
[((551, 663), 'owslib.wfs.WebFeatureService', 'WebFeatureService', (['"""http://SDMDataAccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs"""'], {'version': '"""1.0.0"""'}), "(\n 'http://SDMDataAccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs',\n version='1.0.0')\n", (568, 663), False, 'from owslib.wfs import WebFeatureService\n')]
|
import inspect
from itertools import islice
import pytest
import isle.show
from isle import Show
def test_get_latest():
show = isle.show.get_latest()
assert isinstance(show, Show)
def test_get_popular():
shows = isle.show.get_popular()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
def test_get_top_rated():
shows = isle.show.get_top_rated()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
def test_get_airing_today():
shows = isle.show.get_airing_today()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
def test_get_on_the_air():
shows = isle.show.get_on_the_air()
assert inspect.isgenerator(shows)
show = next(shows)
assert isinstance(show, Show)
|
[
"inspect.isgenerator"
] |
[((265, 291), 'inspect.isgenerator', 'inspect.isgenerator', (['shows'], {}), '(shows)\n', (284, 291), False, 'import inspect\n'), ((426, 452), 'inspect.isgenerator', 'inspect.isgenerator', (['shows'], {}), '(shows)\n', (445, 452), False, 'import inspect\n'), ((593, 619), 'inspect.isgenerator', 'inspect.isgenerator', (['shows'], {}), '(shows)\n', (612, 619), False, 'import inspect\n'), ((756, 782), 'inspect.isgenerator', 'inspect.isgenerator', (['shows'], {}), '(shows)\n', (775, 782), False, 'import inspect\n')]
|
import numpy
from discrete_fuzzy_operators.base.operators.binary_operators.fuzzy_discrete_binary_operator import \
FuzzyDiscreteBinaryOperator
from discrete_fuzzy_operators.builtin_operators.discrete.tnorms import TnormExamples
if __name__ == "__main__":
# EXAMPLE: Plot of some known t-norms.
lukasiewicz_operator = TnormExamples.get_tnorm(tnorm=TnormExamples.LUKASIEWICZ, n=7)
lukasiewicz_operator.plot_operator(figure_size=(700, 700), figure_title="Lukasiewicz t-norm")
lukasiewicz_operator.plot_three_dimensional_operator(draw_diagonal=True,
figure_size=(700, 700), figure_title="Lukasiewicz tensor")
drastic_operator = TnormExamples.get_tnorm(tnorm=TnormExamples.DRASTIC, n=7)
drastic_operator.plot_operator(figure_size=(700, 700), figure_title="Drastic t-norm")
nilpotent_operator = TnormExamples.get_tnorm(tnorm=TnormExamples.NILPOTENT_MINIMUM, n=7)
nilpotent_operator.plot_operator(figure_size=(700, 700), figure_title="Nilpotent minimum t-norm")
|
[
"discrete_fuzzy_operators.builtin_operators.discrete.tnorms.TnormExamples.get_tnorm"
] |
[((332, 393), 'discrete_fuzzy_operators.builtin_operators.discrete.tnorms.TnormExamples.get_tnorm', 'TnormExamples.get_tnorm', ([], {'tnorm': 'TnormExamples.LUKASIEWICZ', 'n': '(7)'}), '(tnorm=TnormExamples.LUKASIEWICZ, n=7)\n', (355, 393), False, 'from discrete_fuzzy_operators.builtin_operators.discrete.tnorms import TnormExamples\n'), ((709, 766), 'discrete_fuzzy_operators.builtin_operators.discrete.tnorms.TnormExamples.get_tnorm', 'TnormExamples.get_tnorm', ([], {'tnorm': 'TnormExamples.DRASTIC', 'n': '(7)'}), '(tnorm=TnormExamples.DRASTIC, n=7)\n', (732, 766), False, 'from discrete_fuzzy_operators.builtin_operators.discrete.tnorms import TnormExamples\n'), ((883, 950), 'discrete_fuzzy_operators.builtin_operators.discrete.tnorms.TnormExamples.get_tnorm', 'TnormExamples.get_tnorm', ([], {'tnorm': 'TnormExamples.NILPOTENT_MINIMUM', 'n': '(7)'}), '(tnorm=TnormExamples.NILPOTENT_MINIMUM, n=7)\n', (906, 950), False, 'from discrete_fuzzy_operators.builtin_operators.discrete.tnorms import TnormExamples\n')]
|
__license__ = 'MIT License <http://www.opensource.org/licenses/mit-license.php>'
__author__ = '<NAME> <<EMAIL>>'
__docformat__ = 'epytext'
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.db import transaction
from publications.models import Type
from publications.utils import import_bibtex as do_import_bibtex
# mapping of months
MONTHS = {
'jan': 1, 'january': 1,
'feb': 2, 'february': 2,
'mar': 3, 'march': 3,
'apr': 4, 'april': 4,
'may': 5,
'jun': 6, 'june': 6,
'jul': 7, 'july': 7,
'aug': 8, 'august': 8,
'sep': 9, 'september': 9,
'oct': 10, 'october': 10,
'nov': 11, 'november': 11,
'dec': 12, 'december': 12}
def import_bibtex(request):
if request.method == 'POST':
# try to import BibTex
bibtex = request.POST['bibliography']
with transaction.atomic():
publications, errors = do_import_bibtex(bibtex)
status = messages.SUCCESS
if len(publications) == 0:
status = messages.ERROR
msg = 'No publications were added, %i errors occurred' % len(errors)
elif len(publications) > 1:
msg = 'Successfully added %i publications (%i skipped due to errors)' % (len(publications), len(errors))
else:
msg = 'Successfully added %i publication (%i error(s) occurred)' % (len(publications), len(errors))
# show message
messages.add_message(request, status, msg)
for error in errors:
messages.add_message(request, messages.ERROR, error)
# redirect to publication listing
return HttpResponseRedirect('../')
else:
return render_to_response(
'admin/publications/import_bibtex.html', {
'title': 'Import BibTex',
'types': Type.objects.all(),
'request': request},
RequestContext(request))
import_bibtex = staff_member_required(import_bibtex)
|
[
"publications.utils.import_bibtex",
"django.contrib.admin.views.decorators.staff_member_required",
"django.contrib.messages.add_message",
"publications.models.Type.objects.all",
"django.http.HttpResponseRedirect",
"django.db.transaction.atomic",
"django.template.RequestContext"
] |
[((1897, 1933), 'django.contrib.admin.views.decorators.staff_member_required', 'staff_member_required', (['import_bibtex'], {}), '(import_bibtex)\n', (1918, 1933), False, 'from django.contrib.admin.views.decorators import staff_member_required\n'), ((1472, 1514), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'status', 'msg'], {}), '(request, status, msg)\n', (1492, 1514), False, 'from django.contrib import messages\n'), ((1643, 1670), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""../"""'], {}), "('../')\n", (1663, 1670), False, 'from django.http import HttpResponseRedirect\n'), ((973, 993), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (991, 993), False, 'from django.db import transaction\n'), ((1021, 1045), 'publications.utils.import_bibtex', 'do_import_bibtex', (['bibtex'], {}), '(bibtex)\n', (1037, 1045), True, 'from publications.utils import import_bibtex as do_import_bibtex\n'), ((1544, 1596), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.ERROR', 'error'], {}), '(request, messages.ERROR, error)\n', (1564, 1596), False, 'from django.contrib import messages\n'), ((1853, 1876), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (1867, 1876), False, 'from django.template import RequestContext\n'), ((1803, 1821), 'publications.models.Type.objects.all', 'Type.objects.all', ([], {}), '()\n', (1819, 1821), False, 'from publications.models import Type\n')]
|
# Copyright 2019 NetApp, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manila import context
from manila.db import api as db_api
from manila.scheduler.weighers import base_host
from manila.share import utils as share_utils
class HostAffinityWeigher(base_host.BaseHostWeigher):
def _weigh_object(self, obj, weight_properties):
"""Weigh hosts based on their proximity to the source's share pool.
If no snapshot_id was provided will return 0, otherwise, if source and
destination hosts are located on:
1. same back ends and pools: host is a perfect choice (100)
2. same back ends and different pools: host is a very good choice (75)
3. different back ends with the same AZ: host is a good choice (50)
4. different back ends and AZs: host isn't so good choice (25)
"""
ctx = context.get_admin_context()
request_spec = weight_properties.get('request_spec')
snapshot_id = request_spec.get('snapshot_id')
snapshot_host = request_spec.get('snapshot_host')
if None in [snapshot_id, snapshot_host]:
# NOTE(silvacarlose): if the request does not contain a snapshot_id
# or a snapshot_host, the user is not creating a share from a
# snapshot and we don't need to weigh the host.
return 0
snapshot_ref = db_api.share_snapshot_get(ctx, snapshot_id)
# Source host info: pool, backend and availability zone
src_pool = share_utils.extract_host(snapshot_host, 'pool')
src_backend = share_utils.extract_host(
request_spec.get('snapshot_host'), 'backend')
src_az = snapshot_ref['share']['availability_zone']
# Destination host info: pool, backend and availability zone
dst_pool = share_utils.extract_host(obj.host, 'pool')
dst_backend = share_utils.extract_host(obj.host, 'backend')
# NOTE(dviroel): All hosts were already filtered by the availability
# zone parameter.
dst_az = None
if weight_properties['availability_zone_id']:
dst_az = db_api.availability_zone_get(
ctx, weight_properties['availability_zone_id']).name
if src_backend == dst_backend:
return 100 if (src_pool and src_pool == dst_pool) else 75
else:
return 50 if (src_az and src_az == dst_az) else 25
|
[
"manila.db.api.availability_zone_get",
"manila.db.api.share_snapshot_get",
"manila.context.get_admin_context",
"manila.share.utils.extract_host"
] |
[((1414, 1441), 'manila.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (1439, 1441), False, 'from manila import context\n'), ((1924, 1967), 'manila.db.api.share_snapshot_get', 'db_api.share_snapshot_get', (['ctx', 'snapshot_id'], {}), '(ctx, snapshot_id)\n', (1949, 1967), True, 'from manila.db import api as db_api\n'), ((2051, 2098), 'manila.share.utils.extract_host', 'share_utils.extract_host', (['snapshot_host', '"""pool"""'], {}), "(snapshot_host, 'pool')\n", (2075, 2098), True, 'from manila.share import utils as share_utils\n'), ((2353, 2395), 'manila.share.utils.extract_host', 'share_utils.extract_host', (['obj.host', '"""pool"""'], {}), "(obj.host, 'pool')\n", (2377, 2395), True, 'from manila.share import utils as share_utils\n'), ((2418, 2463), 'manila.share.utils.extract_host', 'share_utils.extract_host', (['obj.host', '"""backend"""'], {}), "(obj.host, 'backend')\n", (2442, 2463), True, 'from manila.share import utils as share_utils\n'), ((2664, 2740), 'manila.db.api.availability_zone_get', 'db_api.availability_zone_get', (['ctx', "weight_properties['availability_zone_id']"], {}), "(ctx, weight_properties['availability_zone_id'])\n", (2692, 2740), True, 'from manila.db import api as db_api\n')]
|
#!/usr/bin/env python
# (c) 2020 <NAME>
# 2-clause BSD license
from setuptools import setup
from nattka import __version__
setup(
name='nattka',
version=__version__,
description='A New Arch Tester Toolkit (open source replacement '
'for stable-bot)',
author='<NAME>',
author_email='<EMAIL>',
license='BSD',
url='http://github.com/mgorny/nattka',
packages=['nattka'],
entry_points={
'console_scripts': [
'nattka=nattka.__main__:setuptools_main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Testing',
]
)
|
[
"setuptools.setup"
] |
[((127, 825), 'setuptools.setup', 'setup', ([], {'name': '"""nattka"""', 'version': '__version__', 'description': '"""A New Arch Tester Toolkit (open source replacement for stable-bot)"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""BSD"""', 'url': '"""http://github.com/mgorny/nattka"""', 'packages': "['nattka']", 'entry_points': "{'console_scripts': ['nattka=nattka.__main__:setuptools_main']}", 'classifiers': "['Development Status :: 5 - Production/Stable', 'Environment :: Console',\n 'Environment :: No Input/Output (Daemon)',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Testing']"}), "(name='nattka', version=__version__, description=\n 'A New Arch Tester Toolkit (open source replacement for stable-bot)',\n author='<NAME>', author_email='<EMAIL>', license='BSD', url=\n 'http://github.com/mgorny/nattka', packages=['nattka'], entry_points={\n 'console_scripts': ['nattka=nattka.__main__:setuptools_main']},\n classifiers=['Development Status :: 5 - Production/Stable',\n 'Environment :: Console', 'Environment :: No Input/Output (Daemon)',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Testing'])\n", (132, 825), False, 'from setuptools import setup\n')]
|
"""
SAVGOL INTERP.
--------------
"""
import argparse
from pathlib import Path
import matplotlib
import numpy as np
from embers.rf_tools.align_data import savgol_interp
from embers.rf_tools.colormaps import spectral
from matplotlib import pyplot as plt
matplotlib.use("Agg")
_spec, _ = spectral()
parser = argparse.ArgumentParser(
description="""
Savgol Interpolation paper plot
"""
)
parser.add_argument(
"--rf_dir",
metavar="\b",
default="../../tiles_data",
help="Directory with raw rf data. Default=.../../tiles_data",
)
parser.add_argument(
"--out_dir",
metavar="\b",
default="../embers_out/paper_plots",
help="Output Directory. Default=./embers_out/paper_plots",
)
args = parser.parse_args()
rf_dir = Path(args.rf_dir)
out_dir = Path(args.out_dir)
out_dir.mkdir(parents=True, exist_ok=True)
try:
ch = 8
(
ref_ali,
tile_ali,
time_array,
ref_power,
tile_power,
ref_time,
tile_time,
) = savgol_interp(
f"{rf_dir}/rf0XX/2019-09-15/rf0XX_2019-09-15-11:00.txt",
f"{rf_dir}/S06XX/2019-09-15/S06XX_2019-09-15-11:00.txt",
savgol_window_1=11,
savgol_window_2=15,
polyorder=2,
interp_type="cubic",
interp_freq=1,
)
plt.style.use("seaborn")
nice_fonts = {
# Use LaTeX to write all text
# "text.usetex": True,
"font.family": "sans-serif",
# Use 10pt font in plots, to match 10pt font in document
"axes.labelsize": 10,
"font.size": 10,
# Make the legend/label fonts a little smaller
"legend.fontsize": 6,
"xtick.labelsize": 8,
"ytick.labelsize": 8,
}
plt.rcParams.update(nice_fonts)
fig = plt.figure(figsize=(3.6, 2.4))
colors = _spec([0.14, 0.28])
tile_t = tile_time - tile_time[0]
time_array = time_array - time_array[0]
med = np.median(tile_power)
tile_p = tile_power - med
tile_p_aligned = tile_ali - med
plt.plot(
time_array,
tile_p_aligned[::, ch],
linewidth=1,
color=colors[0],
# color="#2c5d63",
alpha=0.9,
label="SavGol",
)
plt.scatter(
tile_t,
tile_p[::, ch],
color=colors[1],
# color="#7fa998",
marker=".",
s=3,
alpha=0.2,
label="AUT raw",
)
leg = plt.legend(loc="upper right", frameon=True, markerscale=4, handlelength=1)
leg.get_frame().set_facecolor("white")
for le in leg.legendHandles:
le.set_alpha(1)
plt.ylabel("Power [dB]")
plt.xlabel("Time [s]")
plt.tight_layout()
plt.savefig(f"{out_dir}/savgol.pdf", bbox_inches="tight")
print(f"SAVGOL INTERP saved to {out_dir}")
except Exception as e:
print(e)
print("Missing input rf files. Check path to rf_dir")
|
[
"argparse.ArgumentParser",
"matplotlib.pyplot.plot",
"numpy.median",
"matplotlib.pyplot.scatter",
"embers.rf_tools.colormaps.spectral",
"matplotlib.pyplot.legend",
"embers.rf_tools.align_data.savgol_interp",
"pathlib.Path",
"matplotlib.use",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.rcParams.update",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.savefig"
] |
[((256, 277), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (270, 277), False, 'import matplotlib\n'), ((289, 299), 'embers.rf_tools.colormaps.spectral', 'spectral', ([], {}), '()\n', (297, 299), False, 'from embers.rf_tools.colormaps import spectral\n'), ((310, 407), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""\n Savgol Interpolation paper plot\n """'}), '(description=\n """\n Savgol Interpolation paper plot\n """)\n', (333, 407), False, 'import argparse\n'), ((765, 782), 'pathlib.Path', 'Path', (['args.rf_dir'], {}), '(args.rf_dir)\n', (769, 782), False, 'from pathlib import Path\n'), ((793, 811), 'pathlib.Path', 'Path', (['args.out_dir'], {}), '(args.out_dir)\n', (797, 811), False, 'from pathlib import Path\n'), ((1018, 1247), 'embers.rf_tools.align_data.savgol_interp', 'savgol_interp', (['f"""{rf_dir}/rf0XX/2019-09-15/rf0XX_2019-09-15-11:00.txt"""', 'f"""{rf_dir}/S06XX/2019-09-15/S06XX_2019-09-15-11:00.txt"""'], {'savgol_window_1': '(11)', 'savgol_window_2': '(15)', 'polyorder': '(2)', 'interp_type': '"""cubic"""', 'interp_freq': '(1)'}), "(f'{rf_dir}/rf0XX/2019-09-15/rf0XX_2019-09-15-11:00.txt',\n f'{rf_dir}/S06XX/2019-09-15/S06XX_2019-09-15-11:00.txt',\n savgol_window_1=11, savgol_window_2=15, polyorder=2, interp_type=\n 'cubic', interp_freq=1)\n", (1031, 1247), False, 'from embers.rf_tools.align_data import savgol_interp\n'), ((1303, 1327), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""seaborn"""'], {}), "('seaborn')\n", (1316, 1327), True, 'from matplotlib import pyplot as plt\n'), ((1730, 1761), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (['nice_fonts'], {}), '(nice_fonts)\n', (1749, 1761), True, 'from matplotlib import pyplot as plt\n'), ((1773, 1803), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(3.6, 2.4)'}), '(figsize=(3.6, 2.4))\n', (1783, 1803), True, 'from matplotlib import pyplot as plt\n'), ((1932, 1953), 'numpy.median', 'np.median', (['tile_power'], {}), '(tile_power)\n', (1941, 1953), True, 'import numpy as np\n'), ((2025, 2129), 'matplotlib.pyplot.plot', 'plt.plot', (['time_array', 'tile_p_aligned[:, ch]'], {'linewidth': '(1)', 'color': 'colors[0]', 'alpha': '(0.9)', 'label': '"""SavGol"""'}), "(time_array, tile_p_aligned[:, ch], linewidth=1, color=colors[0],\n alpha=0.9, label='SavGol')\n", (2033, 2129), True, 'from matplotlib import pyplot as plt\n'), ((2214, 2315), 'matplotlib.pyplot.scatter', 'plt.scatter', (['tile_t', 'tile_p[:, ch]'], {'color': 'colors[1]', 'marker': '"""."""', 's': '(3)', 'alpha': '(0.2)', 'label': '"""AUT raw"""'}), "(tile_t, tile_p[:, ch], color=colors[1], marker='.', s=3, alpha=\n 0.2, label='AUT raw')\n", (2225, 2315), True, 'from matplotlib import pyplot as plt\n'), ((2414, 2488), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""', 'frameon': '(True)', 'markerscale': '(4)', 'handlelength': '(1)'}), "(loc='upper right', frameon=True, markerscale=4, handlelength=1)\n", (2424, 2488), True, 'from matplotlib import pyplot as plt\n'), ((2594, 2618), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Power [dB]"""'], {}), "('Power [dB]')\n", (2604, 2618), True, 'from matplotlib import pyplot as plt\n'), ((2623, 2645), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [s]"""'], {}), "('Time [s]')\n", (2633, 2645), True, 'from matplotlib import pyplot as plt\n'), ((2650, 2668), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2666, 2668), True, 'from matplotlib import pyplot as plt\n'), ((2673, 2730), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""{out_dir}/savgol.pdf"""'], {'bbox_inches': '"""tight"""'}), "(f'{out_dir}/savgol.pdf', bbox_inches='tight')\n", (2684, 2730), True, 'from matplotlib import pyplot as plt\n')]
|
"""
Provides authorization functions for Mojang's login and session servers
"""
import hashlib
import json
# This is for python2 compatibility
try:
import urllib.request as request
from urllib.error import URLError
except ImportError:
import urllib2 as request
from urllib2 import URLError
import logging
import os
from spockbot.mcp.yggdrasil import YggdrasilCore
from spockbot.plugins.base import PluginBase, pl_announce
logger = logging.getLogger('spockbot')
# This function courtesy of barneygale
def java_hex_digest(digest):
d = int(digest.hexdigest(), 16)
if d >> 39 * 4 & 0x8:
d = "-%x" % ((-d) & (2 ** (40 * 4) - 1))
else:
d = "%x" % d
return d
class AuthCore(object):
def __init__(self, event, online_mode, auth_timeout):
self.online_mode = online_mode
self.auth_timeout = auth_timeout
self.__event = event
self.ygg = YggdrasilCore()
self._shared_secret = None
self._username = None
def get_username(self):
return self._username
def set_username(self, username):
self.ygg.username = username
username = property(get_username, set_username)
def set_password(self, password):
if password and not self.online_mode:
logger.warning("PASSWORD PROVIDED WITH ONLINE_MODE == FALSE")
logger.warning("YOU PROBABLY DIDN'T WANT TO DO THAT")
self.ygg.password = password
password = property(lambda x: bool(x.ygg.password), set_password)
def set_client_token(self, client_token):
if not self.online_mode:
logger.warning("CLIENT TOKEN PROVIDED WITH ONLINE_MODE == FALSE")
logger.warning("YOU PROBABLY DIDN'T WANT TO DO THAT")
self.ygg.client_token = client_token
client_token = property(
lambda x: bool(x.ygg.client_token), set_client_token
)
def set_auth_token(self, auth_token):
if not self.online_mode:
logger.warning("AUTH TOKEN PROVIDED WITH ONLINE_MODE == FALSE")
logger.warning("YOU PROBABLY DIDN'T WANT TO DO THAT")
self.ygg.auth_token = auth_token
auth_token = property(
lambda x: bool(x.ygg.auth_token), set_auth_token
)
def get_shared_secret(self):
self._shared_secret = self._shared_secret or os.urandom(16)
return self._shared_secret
shared_secret = property(get_shared_secret)
def start_session(self):
if not self.online_mode:
self._username = self.ygg.username
return True
if self.ygg.login():
self._username = self.ygg.selected_profile['name']
return True
self.__event.emit('auth_session_error')
return False
def send_session_auth(self, pubkey_raw, server_id_raw):
server_id = java_hex_digest(hashlib.sha1(
server_id_raw.encode('ascii') + self.shared_secret + pubkey_raw
))
logger.info('Attempting to authenticate with Mojang session server')
url = "https://sessionserver.mojang.com/session/minecraft/join"
data = json.dumps({
'accessToken': self.ygg.access_token,
'selectedProfile': self.ygg.selected_profile,
'serverId': server_id,
}).encode('utf-8')
headers = {'Content-Type': 'application/json'}
req = request.Request(url, data, headers)
try:
rep = request.urlopen(
req, timeout=self.auth_timeout
).read().decode('ascii')
except URLError:
rep = "Couldn't connect to sessionserver.mojang.com"
if rep:
logger.warning('Mojang session auth response: %s', rep)
logger.info('Session authentication successful')
@pl_announce('Auth')
class AuthPlugin(PluginBase):
requires = 'Event'
defaults = {
'online_mode': True,
'auth_timeout': 3, # No idea how long this should be, 3s seems good
'auth_quit': True,
'sess_quit': True,
}
events = {
'auth_login_error': 'handle_auth_error',
'auth_session_error': 'handle_session_error',
}
def __init__(self, ploader, settings):
super(AuthPlugin, self).__init__(ploader, settings)
self.sess_quit = self.settings['sess_quit']
self.auth_quit = self.settings['auth_quit']
ploader.provides('Auth', AuthCore(
self.event,
self.settings['online_mode'],
self.settings['auth_timeout']
))
def handle_auth_error(self, name, data):
if self.auth_quit:
logger.error('AUTH: Session authentication error, calling kill')
self.event.kill()
def handle_session_error(self, name, data):
if self.sess_quit:
logger.error('AUTH: Session start error, calling kill')
self.event.kill()
|
[
"urllib2.urlopen",
"urllib2.Request",
"spockbot.mcp.yggdrasil.YggdrasilCore",
"json.dumps",
"spockbot.plugins.base.pl_announce",
"os.urandom",
"logging.getLogger"
] |
[((450, 479), 'logging.getLogger', 'logging.getLogger', (['"""spockbot"""'], {}), "('spockbot')\n", (467, 479), False, 'import logging\n'), ((3756, 3775), 'spockbot.plugins.base.pl_announce', 'pl_announce', (['"""Auth"""'], {}), "('Auth')\n", (3767, 3775), False, 'from spockbot.plugins.base import PluginBase, pl_announce\n'), ((917, 932), 'spockbot.mcp.yggdrasil.YggdrasilCore', 'YggdrasilCore', ([], {}), '()\n', (930, 932), False, 'from spockbot.mcp.yggdrasil import YggdrasilCore\n'), ((3354, 3389), 'urllib2.Request', 'request.Request', (['url', 'data', 'headers'], {}), '(url, data, headers)\n', (3369, 3389), True, 'import urllib2 as request\n'), ((2322, 2336), 'os.urandom', 'os.urandom', (['(16)'], {}), '(16)\n', (2332, 2336), False, 'import os\n'), ((3102, 3226), 'json.dumps', 'json.dumps', (["{'accessToken': self.ygg.access_token, 'selectedProfile': self.ygg.\n selected_profile, 'serverId': server_id}"], {}), "({'accessToken': self.ygg.access_token, 'selectedProfile': self.\n ygg.selected_profile, 'serverId': server_id})\n", (3112, 3226), False, 'import json\n'), ((3421, 3468), 'urllib2.urlopen', 'request.urlopen', (['req'], {'timeout': 'self.auth_timeout'}), '(req, timeout=self.auth_timeout)\n', (3436, 3468), True, 'import urllib2 as request\n')]
|
# Generated by Django 4.0.1 on 2022-03-15 04:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('exam', '0004_remove_exam_name_exam_exam_name_alter_exam_cls_and_more'),
]
operations = [
migrations.AlterField(
model_name='marks',
name='marks_mx',
field=models.IntegerField(blank=True, help_text='Maximum marks in the subject', null=True, verbose_name='Maximum Marks'),
),
migrations.AlterField(
model_name='marks',
name='marks_ob',
field=models.IntegerField(blank=True, help_text='Marks obtained in the subject', null=True, verbose_name='Marks Obtained'),
),
]
|
[
"django.db.models.IntegerField"
] |
[((371, 489), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'help_text': '"""Maximum marks in the subject"""', 'null': '(True)', 'verbose_name': '"""Maximum Marks"""'}), "(blank=True, help_text='Maximum marks in the subject',\n null=True, verbose_name='Maximum Marks')\n", (390, 489), False, 'from django.db import migrations, models\n'), ((608, 728), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'help_text': '"""Marks obtained in the subject"""', 'null': '(True)', 'verbose_name': '"""Marks Obtained"""'}), "(blank=True, help_text='Marks obtained in the subject',\n null=True, verbose_name='Marks Obtained')\n", (627, 728), False, 'from django.db import migrations, models\n')]
|
"""
Copyright (C) 2015, <NAME>
Contributed by <NAME> (<EMAIL>)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import unittest
import logging
import time
from mops.xlwtwrapper import XlwtWrapper
"""
測試 excel 寫入
"""
class XlwtWrapperTest(unittest.TestCase):
#準備
def setUp(self):
logging.basicConfig(level=logging.INFO)
pass
#收尾
def tearDown(self):
pass
#測試寫入行資料
def test_addRowData(self):
logging.info("XlwtWrapperTest.test_addRowData")
wrapper = XlwtWrapper()
for i in range(5000):
wrapper.addRowData(("20160208", "中文字", "B", "DEF", "123", "TWD", "456", "789", "DEF"))
wrapper.saveExcelFile()
#測試開始
if __name__ == "__main__":
unittest.main(exit=False)
|
[
"unittest.main",
"logging.info",
"mops.xlwtwrapper.XlwtWrapper",
"logging.basicConfig"
] |
[((777, 802), 'unittest.main', 'unittest.main', ([], {'exit': '(False)'}), '(exit=False)\n', (790, 802), False, 'import unittest\n'), ((334, 373), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (353, 373), False, 'import logging\n'), ((498, 545), 'logging.info', 'logging.info', (['"""XlwtWrapperTest.test_addRowData"""'], {}), "('XlwtWrapperTest.test_addRowData')\n", (510, 545), False, 'import logging\n'), ((564, 577), 'mops.xlwtwrapper.XlwtWrapper', 'XlwtWrapper', ([], {}), '()\n', (575, 577), False, 'from mops.xlwtwrapper import XlwtWrapper\n')]
|
# Sa se scrie o functie cu numele problema1 ce returneaza o lista ordonata crescator ce contine toate cuvintele
# din sirul de caractere s dat ca parametru. Un cuvant este format din: litere mici si mari, cifre si
# caracterul underscore '_'.
import re
import os
import urllib
from urllib import request
import hashlib
import zipfile
import socket
def problema1(s):
word_pattern = '(\w+)'
words = re.findall(word_pattern, s)
words.sort()
return words
# print(problema1('@c3sta 3st3, un cuvant_.'))
# Sa se scrie o functie cu numele problema2 care primeste ca parametri un sir de caractere s si un
# sir de caractere url ce reprezinta un link http.
# Sa se returneze True daca s se gaseste in continutul de la link-ul http dat, sau False altfel.
def problema2(s, url):
response = urllib.request.urlopen(url)
content = response.read()
return s.encode() in content
# print(problema2("facebook", "https://mbasic.facebook.com/"))
# print(problema2(s="2014 hackaday.com. All Rights Reserved.", url="http://retro.hackaday.com/"))
# print(problema2(s="google", url="https://www.google.com.hk"))
# print(problema2(s="gooogli", url="https://www.google.com.hk"))
# Sa se scrie o functie cu numele problema3 care primeste ca parametru un sir de caractere path ce
# reprezinta path-ul unui director.
# Sa se returneze o lista ordonata crescator cu hash-urile md5 ale tuturor fisierelor din director ( nerecursiv ).
def problema3(path):
def hash(filepath, block_size=4096):
try:
hash = hashlib.md5()
f = open(filepath, 'rb')
while True:
data = f.read(block_size)
if len(data) is 0:
break
hash.update(data)
f.close()
return hash.hexdigest()
except:
return ''
files = os.listdir(path)
md5 = []
for file in files:
file_path = os.path.join(path, file)
if os.path.isfile(file_path):
md5.append(hash(file_path))
md5.sort()
return md5
# print(problema3('C:\\facultate\\an3\\sem1\\python\\python\\labs'))
# Sa se scrie o functie cu numele problema4 ce primeste ca parametru un sir de caractere path ce
# reprezinta path-ul unei arhive zip.
# Sa se returneze o lista ordonata crescator cu numele fisierelor care au size dupa compresie mai mare de 1 KB
# ( 1000 de bytes ).
def problema4(path):
list = []
z = zipfile.ZipFile(path)
for i in z.infolist():
if i.compress_size > 1000:
name = os.path.basename(i.filename)
list.append(name)
list.sort()
return list
# print(problema4('C:\\facultate\\an3\\sem1\\Introduction-to-.Net\\project\\CLMS\\CLMS\\clms.zip'))
# Sa se scrie o functie cu numele problema5 care primeste ca argumente un sir de caractere host,
# un numar port si un sir de caractere text.
# Sa se returneze raspunsul final de la server, ca si string, urmand urmatorul protocol definit:
# - clientul trimite continutul argumentului text la server
# - clientul va primi de la server un alt sir de caractere (de lungime 32)
# - clientul trimite serverului hash-ul sha256 al sirului primit anterior
# - clientul primeste raspunsul final de la server (de lungime 32) pe care il returneaza
def problema5(host, port, text):
def get_sha256(text):
hash = hashlib.sha256()
hash.update(text.encode())
return hash.hexdigest()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.send(text.encode())
message = s.recv(32).decode()
hash_message = get_sha256(message)
s.send(hash_message.encode())
final_message = s.recv(32).decode()
s.close()
return final_message
|
[
"hashlib.md5",
"zipfile.ZipFile",
"os.path.basename",
"socket.socket",
"urllib.request.urlopen",
"hashlib.sha256",
"os.path.isfile",
"re.findall",
"os.path.join",
"os.listdir"
] |
[((408, 435), 're.findall', 're.findall', (['word_pattern', 's'], {}), '(word_pattern, s)\n', (418, 435), False, 'import re\n'), ((809, 836), 'urllib.request.urlopen', 'urllib.request.urlopen', (['url'], {}), '(url)\n', (831, 836), False, 'import urllib\n'), ((1862, 1878), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1872, 1878), False, 'import os\n'), ((2454, 2475), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\n', (2469, 2475), False, 'import zipfile\n'), ((3459, 3508), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (3472, 3508), False, 'import socket\n'), ((1936, 1960), 'os.path.join', 'os.path.join', (['path', 'file'], {}), '(path, file)\n', (1948, 1960), False, 'import os\n'), ((1972, 1997), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (1986, 1997), False, 'import os\n'), ((3366, 3382), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (3380, 3382), False, 'import hashlib\n'), ((1541, 1554), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (1552, 1554), False, 'import hashlib\n'), ((2558, 2586), 'os.path.basename', 'os.path.basename', (['i.filename'], {}), '(i.filename)\n', (2574, 2586), False, 'import os\n')]
|
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from v1.filters.albums.album import album_filter
from v1.music.models.album import Album
from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate
# albums
class AlbumView(APIView):
@staticmethod
def get(request):
"""
List albums
"""
albums = Album.objects.all()
albums = album_filter(request, albums)
if type(albums) == Response:
return albums
return Response(AlbumSerializer(albums, many=True).data)
@staticmethod
def post(request):
"""
Create album
"""
serializer = AlbumSerializerCreate(data=request.data, context={'request': request})
if serializer.is_valid():
serializer.save()
return Response(AlbumSerializer(serializer.instance).data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# albums/{album_id}
class AlbumDetail(APIView):
@staticmethod
def get(request, album_id):
"""
View individual album
"""
album = get_object_or_404(Album, pk=album_id)
return Response(AlbumSerializerFull(album).data)
@staticmethod
def patch(request, album_id):
"""
Update album
"""
album = get_object_or_404(Album, pk=album_id)
serializer = AlbumSerializerUpdate(album, data=request.data, context={'request': request}, partial=True)
if serializer.is_valid():
serializer.save()
return Response(AlbumSerializerFull(serializer.instance).data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@staticmethod
def delete(request, album_id):
"""
Delete album
"""
album = get_object_or_404(Album, pk=album_id)
if album.user != request.user:
return Response(status=status.HTTP_401_UNAUTHORIZED)
album.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
|
[
"v1.music.serializers.album.AlbumSerializerUpdate",
"v1.filters.albums.album.album_filter",
"v1.music.serializers.album.AlbumSerializerCreate",
"v1.music.serializers.album.AlbumSerializer",
"django.shortcuts.get_object_or_404",
"v1.music.serializers.album.AlbumSerializerFull",
"rest_framework.response.Response",
"v1.music.models.album.Album.objects.all"
] |
[((518, 537), 'v1.music.models.album.Album.objects.all', 'Album.objects.all', ([], {}), '()\n', (535, 537), False, 'from v1.music.models.album import Album\n'), ((555, 584), 'v1.filters.albums.album.album_filter', 'album_filter', (['request', 'albums'], {}), '(request, albums)\n', (567, 584), False, 'from v1.filters.albums.album import album_filter\n'), ((822, 892), 'v1.music.serializers.album.AlbumSerializerCreate', 'AlbumSerializerCreate', ([], {'data': 'request.data', 'context': "{'request': request}"}), "(data=request.data, context={'request': request})\n", (843, 892), False, 'from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate\n'), ((1075, 1138), 'rest_framework.response.Response', 'Response', (['serializer.errors'], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n', (1083, 1138), False, 'from rest_framework.response import Response\n'), ((1311, 1348), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Album'], {'pk': 'album_id'}), '(Album, pk=album_id)\n', (1328, 1348), False, 'from django.shortcuts import get_object_or_404\n'), ((1521, 1558), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Album'], {'pk': 'album_id'}), '(Album, pk=album_id)\n', (1538, 1558), False, 'from django.shortcuts import get_object_or_404\n'), ((1580, 1676), 'v1.music.serializers.album.AlbumSerializerUpdate', 'AlbumSerializerUpdate', (['album'], {'data': 'request.data', 'context': "{'request': request}", 'partial': '(True)'}), "(album, data=request.data, context={'request': request\n }, partial=True)\n", (1601, 1676), False, 'from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate\n'), ((1826, 1889), 'rest_framework.response.Response', 'Response', (['serializer.errors'], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n', (1834, 1889), False, 'from rest_framework.response import Response\n'), ((2006, 2043), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Album'], {'pk': 'album_id'}), '(Album, pk=album_id)\n', (2023, 2043), False, 'from django.shortcuts import get_object_or_404\n'), ((2186, 2229), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_204_NO_CONTENT'}), '(status=status.HTTP_204_NO_CONTENT)\n', (2194, 2229), False, 'from rest_framework.response import Response\n'), ((2102, 2147), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_401_UNAUTHORIZED'}), '(status=status.HTTP_401_UNAUTHORIZED)\n', (2110, 2147), False, 'from rest_framework.response import Response\n'), ((672, 706), 'v1.music.serializers.album.AlbumSerializer', 'AlbumSerializer', (['albums'], {'many': '(True)'}), '(albums, many=True)\n', (687, 706), False, 'from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate\n'), ((1373, 1399), 'v1.music.serializers.album.AlbumSerializerFull', 'AlbumSerializerFull', (['album'], {}), '(album)\n', (1392, 1399), False, 'from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate\n'), ((985, 1021), 'v1.music.serializers.album.AlbumSerializer', 'AlbumSerializer', (['serializer.instance'], {}), '(serializer.instance)\n', (1000, 1021), False, 'from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate\n'), ((1764, 1804), 'v1.music.serializers.album.AlbumSerializerFull', 'AlbumSerializerFull', (['serializer.instance'], {}), '(serializer.instance)\n', (1783, 1804), False, 'from v1.music.serializers.album import AlbumSerializer, AlbumSerializerCreate, AlbumSerializerFull, AlbumSerializerUpdate\n')]
|
from typing import Callable, TypeVar
from matroids.core.set_operator import powset
from matroids.construct import independent_sets
T = TypeVar('T')
def from_independent_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by independent sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by independent sets
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
_, Is = matroid
# n(X) = |X| - max{|I|: I ∈ Is, I ⊆ X}, ∀X ⊆ E.
return lambda X: len(X) - max(map(len, (I for I in Is if I <= X)))
def from_dependent_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by dependent sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by dependent sets.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_dependent_matroid(matroid)))
def from_bases_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by bases.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by dependent sets.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_bases_matroid(matroid)))
def from_circuits_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by circuits.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by circuits.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_circuits_matroid(matroid)))
def from_rank_matroid(matroid: tuple[set[T], Callable[[set[T]], int]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by a rank function.
Args:
matroid (tuple[set[T], Callable[[set[T]], int]]): A matroid defined by a rank function
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, r = matroid
return lambda X: len(X) - r(X)
def from_closure_matroid(matroid: tuple[set[T], Callable[[set[T]], set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by a closure function.
Args:
matroid (tuple[set[T], Callable[[set[T]], set[T]]]): A matroid defined by a closure function
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, cl = matroid
# n(X) = |X| - min{ |I| : X ⊆ cl(I) }.
return lambda X: len(X) - min(len(I) for I in powset(E) if X <= cl(I))
def from_flats_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by flats.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by flats.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_flats_matroid(matroid)))
def from_open_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by open sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by open sets.
Returns:
Callable[[set[T]], int]: The nulity function of a matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_open_matroid(matroid)))
def from_hyperplanes_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by hyperplanes
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by hyperplanes
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_hyperplanes_matroid(matroid)))
def from_spanning_matroid(matroid: tuple[set[T], list[set[T]]]) -> Callable[[set[T]], int]:
"""Construct a nulity function from a matroid defined by spanning sets.
Args:
matroid (tuple[set[T], list[set[T]]]): A matroid defined by spanning sets.
Returns:
Callable[[set[T]], int]: The nulity function of a given matroid.
"""
E, _ = matroid
return from_independent_matroid((E, independent_sets.from_spanning_matroid(matroid)))
|
[
"matroids.construct.independent_sets.from_bases_matroid",
"matroids.construct.independent_sets.from_spanning_matroid",
"matroids.construct.independent_sets.from_hyperplanes_matroid",
"matroids.construct.independent_sets.from_flats_matroid",
"matroids.core.set_operator.powset",
"matroids.construct.independent_sets.from_circuits_matroid",
"matroids.construct.independent_sets.from_open_matroid",
"typing.TypeVar",
"matroids.construct.independent_sets.from_dependent_matroid"
] |
[((140, 152), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (147, 152), False, 'from typing import Callable, TypeVar\n'), ((1083, 1131), 'matroids.construct.independent_sets.from_dependent_matroid', 'independent_sets.from_dependent_matroid', (['matroid'], {}), '(matroid)\n', (1122, 1131), False, 'from matroids.construct import independent_sets\n'), ((1542, 1586), 'matroids.construct.independent_sets.from_bases_matroid', 'independent_sets.from_bases_matroid', (['matroid'], {}), '(matroid)\n', (1577, 1586), False, 'from matroids.construct import independent_sets\n'), ((1997, 2044), 'matroids.construct.independent_sets.from_circuits_matroid', 'independent_sets.from_circuits_matroid', (['matroid'], {}), '(matroid)\n', (2035, 2044), False, 'from matroids.construct import independent_sets\n'), ((3413, 3457), 'matroids.construct.independent_sets.from_flats_matroid', 'independent_sets.from_flats_matroid', (['matroid'], {}), '(matroid)\n', (3448, 3457), False, 'from matroids.construct import independent_sets\n'), ((3860, 3903), 'matroids.construct.independent_sets.from_open_matroid', 'independent_sets.from_open_matroid', (['matroid'], {}), '(matroid)\n', (3894, 3903), False, 'from matroids.construct import independent_sets\n'), ((4321, 4371), 'matroids.construct.independent_sets.from_hyperplanes_matroid', 'independent_sets.from_hyperplanes_matroid', (['matroid'], {}), '(matroid)\n', (4362, 4371), False, 'from matroids.construct import independent_sets\n'), ((4792, 4839), 'matroids.construct.independent_sets.from_spanning_matroid', 'independent_sets.from_spanning_matroid', (['matroid'], {}), '(matroid)\n', (4830, 4839), False, 'from matroids.construct import independent_sets\n'), ((2989, 2998), 'matroids.core.set_operator.powset', 'powset', (['E'], {}), '(E)\n', (2995, 2998), False, 'from matroids.core.set_operator import powset\n')]
|
# -*- coding: utf-8 -*-
from odoo.addons.account.tests.common import AccountTestInvoicingCommon
from odoo.tests import tagged, Form
import time
@tagged('post_install', '-at_install')
class TestTransferWizard(AccountTestInvoicingCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
cls.company = cls.company_data['company']
cls.receivable_account = cls.company_data['default_account_receivable']
cls.payable_account = cls.company_data['default_account_payable']
cls.accounts = cls.env['account.account'].search([('reconcile', '=', False), ('company_id', '=', cls.company.id)], limit=5)
cls.journal = cls.company_data['default_journal_misc']
# Set rate for base currency to 1
cls.env['res.currency.rate'].search([('company_id', '=', cls.company.id), ('currency_id', '=', cls.company.currency_id.id)]).write({'rate': 1})
# Create test currencies
cls.test_currency_1 = cls.env['res.currency'].create({
'name': "PMK",
'symbol':'P',
})
cls.test_currency_2 = cls.env['res.currency'].create({
'name': "toto",
'symbol':'To',
})
cls.test_currency_3 = cls.env['res.currency'].create({
'name': "titi",
'symbol':'Ti',
})
# Create test rates
cls.env['res.currency.rate'].create({
'name': time.strftime('%Y') + '-' + '01' + '-01',
'rate': 0.5,
'currency_id': cls.test_currency_1.id,
'company_id': cls.company.id
})
cls.env['res.currency.rate'].create({
'name': time.strftime('%Y') + '-' + '01' + '-01',
'rate': 2,
'currency_id': cls.test_currency_2.id,
'company_id': cls.company.id
})
cls.env['res.currency.rate'].create({
'name': time.strftime('%Y') + '-' + '01' + '-01',
'rate': 10,
'currency_id': cls.test_currency_3.id,
'company_id': cls.company.id
})
# Create an account using a foreign currency
cls.test_currency_account = cls.env['account.account'].create({
'name': 'test destination account',
'code': 'test_dest_acc',
'user_type_id': cls.env['ir.model.data'].xmlid_to_res_id('account.data_account_type_current_assets'),
'currency_id': cls.test_currency_3.id,
})
# Create test account.move
cls.move_1 = cls.env['account.move'].create({
'journal_id': cls.journal.id,
'line_ids': [
(0, 0, {
'name': "test1_1",
'account_id': cls.receivable_account.id,
'debit': 500,
}),
(0, 0, {
'name': "test1_2",
'account_id': cls.accounts[0].id,
'credit': 500,
}),
(0, 0, {
'name': "test1_3",
'account_id': cls.accounts[0].id,
'debit': 800,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test1_4",
'account_id': cls.accounts[1].id,
'credit': 500,
}),
(0, 0, {
'name': "test1_5",
'account_id': cls.accounts[2].id,
'credit': 300,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test1_6",
'account_id': cls.accounts[0].id,
'debit': 270,
'currency_id': cls.test_currency_1.id,
'amount_currency': 540,
}),
(0, 0, {
'name': "test1_7",
'account_id': cls.accounts[1].id,
'credit': 140,
}),
(0, 0, {
'name': "test1_8",
'account_id': cls.accounts[2].id,
'credit': 160,
}),
(0, 0, {
'name': "test1_9",
'account_id': cls.accounts[2].id,
'debit': 30,
'currency_id': cls.test_currency_2.id,
'amount_currency': 15,
}),
]
})
cls.move_1.action_post()
cls.move_2 = cls.env['account.move'].create({
'journal_id': cls.journal.id,
'line_ids': [
(0, 0, {
'name': "test2_1",
'account_id': cls.accounts[1].id,
'debit': 400,
}),
(0, 0, {
'name': "test2_2",
'account_id': cls.payable_account.id,
'credit': 400,
}),
(0, 0, {
'name': "test2_3",
'account_id': cls.accounts[3].id,
'debit': 250,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test2_4",
'account_id': cls.accounts[1].id,
'debit': 480,
'partner_id': cls.partner_b.id,
}),
(0, 0, {
'name': "test2_5",
'account_id': cls.accounts[2].id,
'credit': 730,
'partner_id': cls.partner_a.id,
}),
(0, 0, {
'name': "test2_6",
'account_id': cls.accounts[2].id,
'credit': 412,
'partner_id': cls.partner_a.id,
'currency_id': cls.test_currency_2.id,
'amount_currency': -633,
}),
(0, 0, {
'name': "test2_7",
'account_id': cls.accounts[1].id,
'debit': 572,
}),
(0, 0, {
'name': "test2_8",
'account_id': cls.accounts[2].id,
'credit': 100,
'partner_id': cls.partner_a.id,
'currency_id': cls.test_currency_2.id,
'amount_currency': -123,
}),
(0, 0, {
'name': "test2_9",
'account_id': cls.accounts[2].id,
'credit': 60,
'partner_id': cls.partner_a.id,
'currency_id': cls.test_currency_1.id,
'amount_currency': -10,
}),
]
})
cls.move_2.action_post()
def test_transfer_wizard_reconcile(self):
""" Tests reconciliation when doing a transfer with the wizard
"""
active_move_lines = (self.move_1 + self.move_2).mapped('line_ids').filtered(lambda x: x.account_id.user_type_id.type in ('receivable', 'payable'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.receivable_account
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
payable_transfer = transfer_move.line_ids.filtered(lambda x: x.account_id == self.payable_account)
receivable_transfer = transfer_move.line_ids.filtered(lambda x: x.account_id == self.receivable_account)
self.assertTrue(payable_transfer.reconciled, "Payable line of the transfer move should be fully reconciled")
self.assertAlmostEqual(self.move_1.line_ids.filtered(lambda x: x.account_id == self.receivable_account).amount_residual, 100, self.company.currency_id.decimal_places, "Receivable line of the original move should be partially reconciled, and still have a residual amount of 100 (500 - 400 from payable account)")
self.assertTrue(self.move_2.line_ids.filtered(lambda x: x.account_id == self.payable_account).reconciled, "Payable line of the original move should be fully reconciled")
self.assertAlmostEqual(receivable_transfer.amount_residual, 0, self.company.currency_id.decimal_places, "Receivable line from the transfer move should have nothing left to reconcile")
self.assertAlmostEqual(payable_transfer.debit, 400, self.company.currency_id.decimal_places, "400 should have been debited from payable account to apply the transfer")
self.assertAlmostEqual(receivable_transfer.credit, 400, self.company.currency_id.decimal_places, "400 should have been credited to receivable account to apply the transfer")
def test_transfer_wizard_grouping(self):
""" Tests grouping (by account and partner) when doing a transfer with the wizard
"""
active_move_lines = (self.move_1 + self.move_2).mapped('line_ids').filtered(lambda x: x.name in ('test1_3', 'test1_4', 'test1_5', 'test2_3', 'test2_4', 'test2_5', 'test2_6', 'test2_8'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.accounts[4]
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
groups = {}
for line in transfer_move.line_ids:
key = (line.account_id, line.partner_id or None, line.currency_id)
self.assertFalse(groups.get(key), "There should be only one line per (account, partner, currency) group in the transfer move.")
groups[key] = line
self.assertAlmostEqual(groups[(self.accounts[0], self.partner_a, self.company_data['currency'])].balance, -800, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[1], None, self.company_data['currency'])].balance, 500, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[1], self.partner_b, self.company_data['currency'])].balance, -480, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[2], self.partner_a, self.company_data['currency'])].balance, 1030, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[2], self.partner_a, self.test_currency_2)].balance, 512, self.company.currency_id.decimal_places)
self.assertAlmostEqual(groups[(self.accounts[3], self.partner_a, self.company_data['currency'])].balance, -250, self.company.currency_id.decimal_places)
def test_transfer_wizard_currency_conversion(self):
""" Tests multi currency use of the transfer wizard, checking the conversion
is propperly done when using a destination account with a currency_id set.
"""
active_move_lines = self.move_1.mapped('line_ids').filtered(lambda x: x.name in ('test1_6', 'test1_9'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.test_currency_account
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
destination_line = transfer_move.line_ids.filtered(lambda x: x.account_id == self.test_currency_account)
self.assertEqual(destination_line.currency_id, self.test_currency_3, "Transferring to an account with a currency set should keep this currency on the transfer line.")
self.assertAlmostEqual(destination_line.amount_currency, 3000, self.company.currency_id.decimal_places, "Transferring two lines with different currencies (and the same partner) on an account with a currency set should convert the balance of these lines into this account's currency (here (270 + 30) * 10 = 3000)")
def test_transfer_wizard_no_currency_conversion(self):
""" Tests multi currency use of the transfer wizard, verifying that
currency amounts are kept on distinct lines when transferring to an
account without any currency specified.
"""
active_move_lines = self.move_2.mapped('line_ids').filtered(lambda x: x.name in ('test2_9', 'test2_6', 'test2_8'))
# We use a form to pass the context properly to the depends_context move_line_ids field
context = {'active_model': 'account.move.line', 'active_ids': active_move_lines.ids}
with Form(self.env['account.automatic.entry.wizard'].with_context(context)) as wizard_form:
wizard_form.action = 'change_account'
wizard_form.destination_account_id = self.receivable_account
wizard_form.journal_id = self.journal
wizard = wizard_form.save()
transfer_move_id = wizard.do_action()['res_id']
transfer_move = self.env['account.move'].browse(transfer_move_id)
destination_lines = transfer_move.line_ids.filtered(lambda x: x.account_id == self.receivable_account)
self.assertEqual(len(destination_lines), 2, "Two lines should have been created on destination account: one for each currency (the lines with same partner and currency should have been aggregated)")
self.assertAlmostEqual(destination_lines.filtered(lambda x: x.currency_id == self.test_currency_1).amount_currency, -10, self.test_currency_1.decimal_places)
self.assertAlmostEqual(destination_lines.filtered(lambda x: x.currency_id == self.test_currency_2).amount_currency, -756, self.test_currency_2.decimal_places)
|
[
"odoo.tests.tagged",
"time.strftime"
] |
[((146, 183), 'odoo.tests.tagged', 'tagged', (['"""post_install"""', '"""-at_install"""'], {}), "('post_install', '-at_install')\n", (152, 183), False, 'from odoo.tests import tagged, Form\n'), ((1483, 1502), 'time.strftime', 'time.strftime', (['"""%Y"""'], {}), "('%Y')\n", (1496, 1502), False, 'import time\n'), ((1720, 1739), 'time.strftime', 'time.strftime', (['"""%Y"""'], {}), "('%Y')\n", (1733, 1739), False, 'import time\n'), ((1955, 1974), 'time.strftime', 'time.strftime', (['"""%Y"""'], {}), "('%Y')\n", (1968, 1974), False, 'import time\n')]
|
from world import World
from player import Player
import mc_objects as mco
class Game:
def __init__(self) -> None:
self.world = World(20, 20)
self.player = Player(self.world)
print("Game started.")
def play(self):
stone_block = mco.Block(mco.Blocks.Stone)
pick = mco.Tool(mco.Tools.Pickaxe)
shovel = mco.Tool(mco.Tools.Shovel)
self.player.add_to_inv(stone_block, 0, 1)
self.player.use(0, 16, 0)
self.player.add_to_inv(pick, 1, 1)
self.player.select_object(1)
self.player.hit(0, 16, 0)
self.player.hit(0, 15, 0)
self.player.hit(0, 14, 0)
self.player.hit(0, 13, 0)
self.player.hit(0, 12, 0)
self.player.hit(0, 11, 0)
|
[
"mc_objects.Block",
"mc_objects.Tool",
"player.Player",
"world.World"
] |
[((144, 157), 'world.World', 'World', (['(20)', '(20)'], {}), '(20, 20)\n', (149, 157), False, 'from world import World\n'), ((180, 198), 'player.Player', 'Player', (['self.world'], {}), '(self.world)\n', (186, 198), False, 'from player import Player\n'), ((274, 301), 'mc_objects.Block', 'mco.Block', (['mco.Blocks.Stone'], {}), '(mco.Blocks.Stone)\n', (283, 301), True, 'import mc_objects as mco\n'), ((317, 344), 'mc_objects.Tool', 'mco.Tool', (['mco.Tools.Pickaxe'], {}), '(mco.Tools.Pickaxe)\n', (325, 344), True, 'import mc_objects as mco\n'), ((362, 388), 'mc_objects.Tool', 'mco.Tool', (['mco.Tools.Shovel'], {}), '(mco.Tools.Shovel)\n', (370, 388), True, 'import mc_objects as mco\n')]
|
import numpy as np
from config import GOPARAMETERS
def stone_features(board_state):
# 16 planes, where every other plane represents the stones of a particular color
# which means we track the stones of the last 8 moves.
features = np.zeros([16, GOPARAMETERS.N, GOPARAMETERS.N], dtype=np.uint8)
num_deltas_avail = board_state.board_deltas.shape[0]
cumulative_deltas = np.cumsum(board_state.board_deltas, axis=0)
last_eight = np.tile(board_state.board, [8, 1, 1])
last_eight[1:num_deltas_avail + 1] -= cumulative_deltas
last_eight[num_deltas_avail +1:] = last_eight[num_deltas_avail].reshape(1, GOPARAMETERS.N, GOPARAMETERS.N)
features[::2] = last_eight == board_state.to_play
features[1::2] = last_eight == -board_state.to_play
return np.rollaxis(features, 0, 3)
def color_to_play_feature(board_state):
# 1 plane representing which color is to play
# The plane is filled with 1's if the color to play is black; 0's otherwise
if board_state.to_play == GOPARAMETERS.BLACK:
return np.ones([GOPARAMETERS.N, GOPARAMETERS.N, 1], dtype=np.uint8)
else:
return np.zeros([GOPARAMETERS.N, GOPARAMETERS.N, 1], dtype=np.uint8)
def extract_features(board_state):
stone_feat = stone_features(board_state=board_state)
turn_feat = color_to_play_feature(board_state=board_state)
all_features = np.concatenate([stone_feat, turn_feat], axis=2)
return all_features
|
[
"numpy.zeros",
"numpy.ones",
"numpy.cumsum",
"numpy.tile",
"numpy.rollaxis",
"numpy.concatenate"
] |
[((245, 307), 'numpy.zeros', 'np.zeros', (['[16, GOPARAMETERS.N, GOPARAMETERS.N]'], {'dtype': 'np.uint8'}), '([16, GOPARAMETERS.N, GOPARAMETERS.N], dtype=np.uint8)\n', (253, 307), True, 'import numpy as np\n'), ((390, 433), 'numpy.cumsum', 'np.cumsum', (['board_state.board_deltas'], {'axis': '(0)'}), '(board_state.board_deltas, axis=0)\n', (399, 433), True, 'import numpy as np\n'), ((451, 488), 'numpy.tile', 'np.tile', (['board_state.board', '[8, 1, 1]'], {}), '(board_state.board, [8, 1, 1])\n', (458, 488), True, 'import numpy as np\n'), ((782, 809), 'numpy.rollaxis', 'np.rollaxis', (['features', '(0)', '(3)'], {}), '(features, 0, 3)\n', (793, 809), True, 'import numpy as np\n'), ((1369, 1416), 'numpy.concatenate', 'np.concatenate', (['[stone_feat, turn_feat]'], {'axis': '(2)'}), '([stone_feat, turn_feat], axis=2)\n', (1383, 1416), True, 'import numpy as np\n'), ((1046, 1106), 'numpy.ones', 'np.ones', (['[GOPARAMETERS.N, GOPARAMETERS.N, 1]'], {'dtype': 'np.uint8'}), '([GOPARAMETERS.N, GOPARAMETERS.N, 1], dtype=np.uint8)\n', (1053, 1106), True, 'import numpy as np\n'), ((1132, 1193), 'numpy.zeros', 'np.zeros', (['[GOPARAMETERS.N, GOPARAMETERS.N, 1]'], {'dtype': 'np.uint8'}), '([GOPARAMETERS.N, GOPARAMETERS.N, 1], dtype=np.uint8)\n', (1140, 1193), True, 'import numpy as np\n')]
|
import sys
import struct
import asyncio
import threading
import functools
from concurrent.futures import Future
from .. import pb
__all__ = ['DebugProxy', 'DebugCommand', 'AgentController']
class DebugProxy(threading.Thread):
"""
Debugging class, creating a connection between RobotControl and the Nao (naoth agent) which can be used to
* print out the communication between the naoth agent and the controlling instance (eg. RobotControl) -- set
:param:`print_cmd` to True
* create a connection which never 'dies'; even when the naoth agent restarts/dies, the connection to the controlling
instance is kept
* accepting multiple connections of controlling instances to the naoth agent, allowing -- for example -- to connect
two RobotControl instances to the robot, each command is distributed accordingly
A extra module is also available in order to start the proxy directly from the command line::
python -m naoth.utils.DebugProxy host [port] [--target port] [--print]
It is also possible to instantiate the proxy in the interactive shell or another python script to get slightly more
control over when the proxy ist started and stopped::
>>> import naoth
>>> p = naoth.utils.DebugProxy('localhost', print_cmd=True, start=False)
>>> # ...
>>> p.start()
>>> # ...
>>> p.stop()
"""
def __init__(self, agent_host, agent_port=5401, dest_port=7777, print_cmd=False, start=True):
"""
Initializes the class variables and starts the thread immediately if :param:`start` is set to True (default).
:param agent_host: the host name or ip address of naoth agent (robot, dummysimulator, ...); eg. "localhost"
:param agent_port: the debug port of the naoth agent; default is 5401
:param dest_port: the port which should be opened to allow controlling applications to connect to (eg. RC)
:param print_cmd: set to True to print out all commands and responses going through the proxy.
:param start: whether the thread should start immediately (default) or not
"""
super().__init__()
# the agent thread is only started, if there's at least one connected host
self._robot = None
self._robot_host = agent_host
self._robot_port = agent_port
self._host_host = 'localhost'
self._host_port = dest_port
self._print = print_cmd
self._loop = asyncio.new_event_loop()
self._hosts = []
self._host_listener = None
self._host_connection_cnt = 0
# start thread immediately
if start: self.start()
def run(self) -> None:
"""
The thread main loop.
Sets the asyncio loop of the thread, starts a listener server on the dest_port and runs until cancelled.
"""
# set the event loop to this thread
asyncio.set_event_loop(self._loop)
# start host listener server and 'wait' until the server ist started
self._host_listener = self._loop.run_until_complete(start_server(self._host, self._host_host, self._host_port))
# run until cancelled
self._loop.run_forever()
def stop(self, timeout=None) -> None:
"""
Stops the (running) thread and blocks until finished or until the optional timeout occurs.
Since this method is called from another thread, the stop request is scheduled as task on the main loop and
executed some ms later. Also `join` is called on the thread to wait until the thread is actually finished.
The timeout can be used to make sure the main program continues, if an error prevents terminating this
thread -- though that shouldn't happen.
"""
if self._loop.is_running():
self._loop.call_soon_threadsafe(lambda: self._loop.create_task(self._stop_internal()))
self.join(timeout)
async def _stop_internal(self) -> None:
"""
The (internal) scheduled stop request task called by `stop()`.
It stops the listener server, closes all open connections and stops the main loop, which causes the thread to
finish.
"""
# shutdown host listener server to prevent new connections
if self._host_listener:
self._host_listener.close()
await self._host_listener.wait_closed()
# cancel all established connections
# NOTE: the connection to the agent is stopped with the last host connection
for task in self._hosts:
task.cancel()
await task
self._loop.stop()
def _register_host(self) -> None:
"""
Registers a new host connection and sets the name of the task.
The connection to the naoth instance is started only, if there is a host connected to the proxy. This prevents
blocking the naoth instance unnecessarily from direct connection, when the proxy doesn't have something to do.
"""
self._host_connection_cnt += 1
#asyncio.Task.current_task().set_name('Host-{}'.format(self._host_connection_cnt)) # 3.8+
self._hosts.append(asyncio.Task.current_task())
if self._robot is None:
self._robot = AgentController(self._robot_host, self._robot_port)
self._robot.wait_connected() # TODO: is this reasonable???
def _unregister_host(self) -> None:
"""
Unregisters the host from the proxy.
if there are no other active host connections, the naoth agent controller/connection is stopped -- to prevent
blocking unused resources (naoth instance).
"""
self._hosts.remove(asyncio.Task.current_task())
if len(self._hosts) == 0:
if self._robot is not None:
self._robot.stop()
self._robot = None
async def _host(self, stream_reader, stream_writer) -> None:
"""
The actual task, which handles the host connection to the proxy.
Therefore is reads all debug commands send from the host and relays it to the connected naoth instance. The
response is returned to the host.
If required, the received command and the response if printed out to the terminal.
The task runs as long as the connection to the host is active or until the proxy thread is stopped.
"""
self._register_host()
while True:
try:
raw_id = await stream_reader.read(4)
# connection is closed/lost
if raw_id == b'': break
cmd_id = struct.unpack('=l', raw_id)[0]
# check if command is not just a heart beat
if cmd_id != -1:
raw_length = await stream_reader.read(4)
cmd_length = struct.unpack('=l', raw_length)[0]
raw_data = await stream_reader.read(cmd_length)
cmd = DebugCommand.deserialize(raw_data)
cmd.id = cmd_id
# NOTE: the callback is executed in the agent thread!
cmd.add_done_callback(functools.partial(self._response_handler, stream_writer))
self._robot.send_command(cmd)
if self._print:
print(cmd)
except asyncio.CancelledError: # task cancelled
break
except Exception as e:
print('Host-Task:', e)
# close the connection to the host before exiting
stream_writer.close()
await stream_writer._protocol._get_close_waiter(stream_writer) # HACK: in order to work with < 3.7
self._unregister_host()
def _response_handler(self, stream, cmd) -> None:
"""
Helper method in order to transfer the command from agent thread back to 'this' thread -- since the callback is
called in the agent thread. This can 'causes a delay of ~0.5ms.
"""
self._loop.call_soon_threadsafe(lambda: self._response_writer(stream, cmd))
def _response_writer(self, stream, cmd) -> None:
"""Writes the response of the command back to the requesting host."""
# TODO: what to todo, if the command got cancelled?!?
if stream and not cmd.cancelled():
stream.write(struct.pack("<I", cmd.id) + struct.pack("<I", len(cmd.result())) + cmd.result())
if self._print:
print(cmd)
class DebugCommand(Future):
"""
Class representing a debug command for a naoth agent.
It is a Future and can be waited for the response.
"""
def __init__(self, name: str, args=None):
"""
Constructor for the command.
:param name: the name of the command
:param args: additional argument(s) of the command as string or list of string/tuples (name and value)
"""
super().__init__()
self._id = 0
self._name = name
self._args = []
# args can be a string, tuple or a list
if isinstance(args, (str, tuple)):
self._args.append(args)
elif isinstance(args, list):
self._args.extend(args)
@property
def id(self) -> int:
"""Returns command id."""
return self._id
@id.setter
def id(self, value: int) -> None:
"""Sets the command id."""
self._id = value
@property
def name(self) -> str:
"""Returns the name of this command."""
return self._name
def add_arg(self, arg) -> None:
"""
Adds an argument to this command.
:param arg: this can be a simple string or a tuple of two strings (argument name and value).
"""
self._args.append(arg)
def serialize(self) -> bytes:
"""
Serializes the command to a byte representation in order to send it to the agent.
:return: returns the bytes representation of this command
"""
cmd_args = []
if self._args:
for a in self._args:
if isinstance(a, str):
cmd_args.append(pb.Messages_pb2.CMDArg(name=a))
else:
cmd_args.append(pb.Messages_pb2.CMDArg(name=a[0], bytes=a[1].encode()))
proto = pb.Messages_pb2.CMD(name=self.name, args=cmd_args)
return struct.pack("<I", self.id) + struct.pack("<I", proto.ByteSize()) + proto.SerializeToString()
@staticmethod
def deserialize(data) -> 'DebugCommand':
"""
Parses the given data and returns an instance of DebugCommand.
:param data: byte string of a serialized debug command
"""
proto = pb.Messages_pb2.CMD()
proto.ParseFromString(data)
return DebugCommand(proto.name, [(arg.name, arg.bytes.decode()) for arg in proto.args])
def __str__(self) -> str:
"""Returns the string representation of this command."""
str_builder = [self.__class__.__name__, '-', str(self.id), ' [', self._state, ']: ', self.name]
str_args = ", ".join(map(lambda a: self._str_args_helper(a), self._args))
if str_args:
str_builder.append(' ( ')
str_builder.append(str_args)
str_builder.append(' )')
if self.done() and len(self.result()) > 0:
str_builder.append(' {\n')
try:
str_builder.append(self.result().decode('utf-8').strip())
except:
str_builder.append(str(self.result()))
str_builder.append('\n}')
return ''.join(str_builder)
@staticmethod
def _str_args_helper(arg) -> str:
"""Helper method to format the command arguments for the `__str__` method."""
if isinstance(arg, str):
return arg
return arg[0] + ('' if len(arg[1]) == 0 else ': ' + repr(arg[1]))
class AgentController(threading.Thread):
"""
Class to establish a connection to a naoth agent and sending DebugRequests to it - like doing it via RobotControl.
An instance can be created in an interactive shell or script and sending debug requests to the robot:
>>> import naoth, time
>>> a = naoth.utils.AgentController('localhost', 5401)
>>> a.wait_connected()
>>> a.representation('PlayerInfo').add_done_callback(print)
>>> c = a.agent('soccer_agent')
>>> c.add_done_callback(print)
>>> c = a.debugrequest('gamecontroller:blow_whistle', True) # debug request for cognition
>>> r = c.result() # blocks until result is available
>>> a.debugrequest('Plot:Motion.Cycle', True, 'motion') # debug request for motion
>>> a.debugrequests([ \
('gamecontroller:gamephase:normal', True), \
('gamecontroller:game_state:penalized', True), \
('gamecontroller:set_play:pushing_free_kick', True), \
('gamecontroller:secondaryde:30', True) \
])
>>> a.behavior() # BehaviorStateSparse
>>> a.behavior(True) # BehaviorStateComplete
>>> a.module('FakeBallDetector', True)
>>> a.module('ArmCollisionDetector2018', True, 'motion')
>>> a.send_command(naoth.utils.DebugCommand('help', 'ping')).add_done_callback(print)
>>> c = naoth.utils.DebugCommand('Cognition:representation:list')
>>> a.send_command(c)
>>> c.result() # blocks until result is available
>>> a.stop() # stop the agent gracefully
"""
def __init__(self, host, port=5401, start=True):
"""
Initializes the class variables and starts the thread immediately if :param:`start` is set to True (default).
:param host: the host name or ip address of naoth agent (robot, dummysimulator, ...); eg. "localhost"
:param port: the debug port of the naoth agent; default is 5401
:param start: whether the thread should start immediately (default) or not
"""
super().__init__()
self._host = host
self._port = port
self._stream_reader = None
self._stream_writer = None
self._tasks = []
self._loop = asyncio.new_event_loop()
self._cmd_id = 1
self._cmd_q = asyncio.Queue(loop=self._loop)
self._cmd_m = {}
self._connected = threading.Event()
self._connected_internal = asyncio.Event(loop=self._loop)
# start thread immediately
if start: self.start()
def run(self) -> None:
"""
The thread main loop.
Sets the asyncio loop of the thread, starts all necessary tasks and runs until cancelled.
"""
# set the event loop to this thread
asyncio.set_event_loop(self._loop)
# schedule tasks
self._tasks.append(self._loop.create_task(self._connect())) # name='Connection listener'
self._tasks.append(self._loop.create_task(self._send_heart_beat())) # name='Heart beat'
self._tasks.append(self._loop.create_task(self._poll_answers())) # name='Poll answers'
self._tasks.append(self._loop.create_task(self._send_commands())) # name='Send commands'
# run tasks cooperatively and wait 'till loop is stopped
self._loop.run_forever()
self._set_connected(False)
def stop(self, timeout=None) -> None:
"""
Stops the (running) thread and blocks until finished or until the optional timeout occurs.
Since this method is called from another thread, the stop request is scheduled as task on the main loop and
executed some ms later. Also `join` is called on the thread to wait until the thread is actually finished.
The timeout can be used to make sure the main program continues, if an error prevents terminating this
thread -- though that shouldn't happen.
"""
if self._loop.is_running():
self._loop.call_soon_threadsafe(lambda: self._loop.create_task(self._stop_internal()))
self.join(timeout)
async def _stop_internal(self) -> None:
"""
The (internal) scheduled stop request task called by `stop()`.
It stops all scheduled tasks in reverse order and stops the main loop, which causes the thread to finish.
"""
for task in reversed(self._tasks):
try:
task.cancel()
await task
except Exception as e:
print('Stop agent:', task.get_name() if hasattr(task, 'get_name') else task, e, file=sys.stderr)
self._loop.stop()
def is_connected(self) -> bool:
"""Returns True, if the thread is connected to the naoth instance, False otherwise."""
return self._connected.is_set()
def _assert_is_alive(self):
"""Asserts, that this thread is alive, otherwise a runtime error is raised."""
if not self.is_alive():
raise RuntimeError(self.__class__.__name__ + " must be alive and running!")
def wait_connected(self, timeout=None) -> None:
"""
Blocks until the thread is connected to the naoth agent or until the optional timeout occurs.
If the thread wasn't started or isn't alive anymore, a runtime error is raised.
"""
self._assert_is_alive()
self._connected.wait(timeout)
def _set_connected(self, state: bool) -> None:
"""Internal helper method to handle the connection state."""
if state:
self._connected_internal.set()
self._connected.set()
else:
self._connected.clear()
self._connected_internal.clear()
if self._stream_writer:
self._stream_writer.close()
async def _connect(self) -> None:
"""Connection task, which is used to (re-)establish the connection to the naoth agent."""
while True:
try:
# (try to) establish connection or raise exception
self._stream_reader, \
self._stream_writer = await open_connection(host=self._host, port=self._port)
# update internal & external connection state
self._set_connected(True)
# wait 'till the connection is 'closed' (lost?)
await self._stream_writer._protocol._get_close_waiter(self._stream_writer) # HACK: in order to work with < 3.7
# reset the streams
self._stream_reader = None
self._stream_writer = None
except asyncio.CancelledError:
break
except OSError:
# task can be cancelled while sleeping ...
try:
# connection failed, wait before next connection attempt
await asyncio.sleep(1)
except asyncio.CancelledError:
break
except Exception as e: # unexpected exception
print('Connection listener:', e, file=sys.stderr)
except Exception as e: # unexpected exception
print('Connection listener:', e, file=sys.stderr)
finally:
# empty queue and set exception – since we doesn't have a connection
while not self._cmd_q.empty():
self._cmd_q.get_nowait().set_exception(Exception('Not connected to the agent!'))
if self._stream_writer:
self._stream_writer.close()
#await self._stream_writer.wait_closed() # NOTE: this doesn't complete?!
#await self._stream_writer._protocol._get_close_waiter(self._stream_writer) # HACK: in order to work with < 3.7
async def _send_heart_beat(self) -> None:
"""Task to regularly (1s) send a heart beat to the agent."""
while True:
try:
await self._connected_internal.wait()
self._stream_writer.write(struct.pack('!i', -1))
await self._stream_writer.drain()
await asyncio.sleep(1)
except asyncio.CancelledError: # task cancelled
break
except OSError: # connection lost
self._set_connected(False)
except Exception as e: # unexpected exception
print('Heart beat:', e, file=sys.stderr)
async def _poll_answers(self) -> None:
"""Task to receive the response of a previous command and set the result to that command."""
def lost_connection(d):
"""Helper function to determine, if the connection was lost."""
if d == b'':
self._set_connected(False)
return True
return False
while True:
try:
await self._connected_internal.wait()
raw_id = await self._stream_reader.read(4)
if lost_connection(raw_id): continue
cmd_id = struct.unpack('=l', raw_id)[0]
raw_size = await self._stream_reader.read(4)
if lost_connection(raw_size): continue
size = struct.unpack('=l', raw_size)[0]
raw_data = await self._stream_reader.read(size)
if size > 0 and lost_connection(raw_data): continue
while len(raw_data) < size:
new_data = await self._stream_reader.read(size - len(raw_data))
if lost_connection(new_data):
break
raw_data += new_data
if not self._connected.is_set():
continue
if cmd_id in self._cmd_m:
cmd, _id = self._cmd_m.pop(cmd_id)
if not cmd.cancelled():
cmd.id = _id
cmd.set_result(raw_data)
else:
print('Unknown command id:', cmd_id, file=sys.stderr)
except asyncio.CancelledError: # task cancelled
break
except OSError: # connection lost
self._set_connected(False)
except Exception as e: # unexpected exception
print('Poll answers:', e, file=sys.stderr)
async def _send_commands(self) -> None:
"""Task to send scheduled commands."""
def cancel_cmd(cmd, ex=None):
"""Helper function, if an exception occurred and the command couldn't be send."""
_, _id = self._cmd_m.pop(cmd.id)
cmd.set_exception(ex if ex else Exception('Lost connection to the agent!'))
cmd.id = _id # replace internal id with the original
while True:
try:
await self._connected_internal.wait()
# get next command
cmd = await self._cmd_q.get()
# set command to running
if cmd.set_running_or_notify_cancel():
self._store_cmd(cmd)
try:
# send command
self._stream_writer.write(cmd.serialize())
await self._stream_writer.drain()
except asyncio.CancelledError: # task cancelled
cancel_cmd(cmd)
break
except OSError: # connection lost
self._set_connected(False)
cancel_cmd(cmd)
except Exception as e: # unexpected exception
print('Send commands:', e, file=sys.stderr)
cancel_cmd(cmd, e)
finally:
self._cmd_q.task_done() # mark as done
else:
self._cmd_q.task_done() # mark as done
except asyncio.CancelledError: # task cancelled
break
except OSError: # connection lost
self._set_connected(False)
except Exception as e: # unexpected exception
print('Send commands:', e, file=sys.stderr)
def _store_cmd(self, cmd) -> None:
"""Replaces the command id with an internal id and store command+id for later response."""
self._cmd_m[self._cmd_id] = (cmd, cmd.id)
cmd.id = self._cmd_id
self._cmd_id += 1
def send_command(self, cmd: DebugCommand) -> DebugCommand:
"""
Schedules the given command in the command queue and returns the command.
:raises Exception: if not connected to a naoth agent or the given command was already executed
"""
if not self.is_connected():
raise Exception('Not connected to the agent!')
if cmd.done():
raise Exception('This command has already been executed!')
# command queue is not thread safe - make sure we're add it in the correct thread
# this can 'causes a delay of ~0.5ms
self._loop.call_soon_threadsafe(functools.partial(self._cmd_q.put_nowait, cmd))
return cmd
def debugrequest(self, request: str, enable: bool, type: str = 'cognition') -> DebugCommand:
"""
Enables/Disables a debug request of the agent.
:param request: the debug request which should be en-/disabled
:param enable: True, if debug request should be enabled, False if it should be disabled
:param type: the type of the debug request ('cognition' or 'motion')
:return: Returns the the scheduled command (future)
"""
return self.debugrequests([(request, enable)], type)
def debugrequests(self, requests: list, type: str = 'cognition') -> DebugCommand:
"""
Enables/Disables a list of debug request of the agent.
:param requests: a list of tuples ('debug request', True|False) of debug requests which should be en-/disabled
:param type: the type of the debug request ('cognition' or 'motion')
:return: Returns the the scheduled command (future)
"""
dbg = pb.Messages_pb2.DebugRequest(requests=[
pb.Messages_pb2.DebugRequest.Item(name=request, value=enable) for request, enable in requests
])
if type == 'cognition':
return self.send_command(DebugCommand('Cognition:representation:set', [('DebugRequest', dbg.SerializeToString().decode())]))
elif type == 'motion':
return self.send_command(DebugCommand('Motion:representation:set', [('DebugRequest', dbg.SerializeToString().decode())]))
raise Exception('Unknown debug request type! Allowed: "cognition", "motion"')
def module(self, name: str, enable: bool, type: str = 'cognition') -> DebugCommand:
"""
Enables/Disables a module of the agent instance.
:param name: the module which should be en-/disabled
:param enable: True, if module should be enabled, False if it should be disabled
:param type: the type of the module ('cognition' or 'motion')
:return: Returns the the scheduled command (future)
"""
if type == 'cognition':
return self.send_command(DebugCommand('Cognition:modules:set', [(name, ('on' if enable else 'off'))]))
elif type == 'motion':
return self.send_command(DebugCommand('Motion:modules:set', [(name, ('on' if enable else 'off'))]))
raise Exception('Unknown module type! Allowed: "cognition", "motion"')
def representation(self, name: str, type: str = 'cognition', binary: bool = False) -> DebugCommand:
"""
Schedules a command for retrieving a representation.
:param name: the name of the representation which should be retrieved.
:param type: the type of the representation ('cognition' or 'motion')
:param binary: whether the result should be binary (protobuf) or as string
:return: Returns the the scheduled command (future)
"""
if type == 'cognition':
if binary:
return self.send_command(DebugCommand('Cognition:representation:get', [name]))
else:
return self.send_command(DebugCommand('Cognition:representation:print', [name]))
elif type == 'motion':
if binary:
return self.send_command(DebugCommand('Motion:representation:get', [name]))
else:
return self.send_command(DebugCommand('Motion:representation:print', [name]))
raise Exception('Unknown representation type! Allowed: "cognition", "motion"')
def agent(self, name: str = None) -> DebugCommand:
"""
Get or set a named agent for execution.
:param name: the name of the agent (behavior), which should be executed or None if the current agent should
be returned
:return: Returns the the scheduled command (future)
"""
if name is None:
return self.send_command(DebugCommand('Cognition:behavior:get_agent'))
return self.send_command(DebugCommand('Cognition:behavior:set_agent', [('agent', name)]))
def behavior(self, complete=False) -> DebugCommand:
"""
Schedules a command for retrieving the current behavior of the agent.
:param complete: True, if the complete behavior tree should be retrieved, False otherwise (sparse)
:return: Returns the the scheduled command (future)
"""
if complete:
return self.representation('BehaviorStateComplete', binary=True)
else:
return self.representation('BehaviorStateSparse', binary=True)
if sys.version_info < (3, 7):
# python < 3.7
@asyncio.coroutine
def open_connection(host=None, port=None, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
protocol = StreamReaderProtocolCompat(reader)
transport, _ = yield from loop.create_connection(lambda: protocol, host, port)
writer = asyncio.StreamWriter(transport, protocol, reader, loop)
return reader, writer
@asyncio.coroutine
def start_server(client_connected_cb, host=None, port=None, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
def factory():
reader = asyncio.StreamReader(loop=loop)
protocol = StreamReaderProtocolCompat(reader, client_connected_cb, loop=loop)
return protocol
return (yield from loop.create_server(factory, host, port))
class StreamReaderProtocolCompat(asyncio.StreamReaderProtocol):
def __init__(self, stream_reader, client_connected_cb=None, loop=None):
super().__init__(stream_reader, client_connected_cb, loop)
self._closed = self._loop.create_future()
def connection_lost(self, exc) -> None:
super().connection_lost(exc)
if not self._closed.done():
if exc is None:
self._closed.set_result(None)
else:
self._closed.set_exception(exc)
def _get_close_waiter(self, stream):
return self._closed
def __del__(self):
if self._closed.done() and not self._closed.cancelled():
self._closed.exception()
else:
# python >= 3.7
open_connection = asyncio.open_connection
start_server = asyncio.start_server
|
[
"functools.partial",
"asyncio.get_event_loop",
"asyncio.sleep",
"asyncio.set_event_loop",
"asyncio.Event",
"struct.unpack",
"asyncio.StreamReader",
"asyncio.Task.current_task",
"struct.pack",
"asyncio.StreamWriter",
"threading.Event",
"asyncio.Queue",
"asyncio.new_event_loop"
] |
[((2486, 2510), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (2508, 2510), False, 'import asyncio\n'), ((2925, 2959), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['self._loop'], {}), '(self._loop)\n', (2947, 2959), False, 'import asyncio\n'), ((14202, 14226), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (14224, 14226), False, 'import asyncio\n'), ((14275, 14305), 'asyncio.Queue', 'asyncio.Queue', ([], {'loop': 'self._loop'}), '(loop=self._loop)\n', (14288, 14305), False, 'import asyncio\n'), ((14358, 14375), 'threading.Event', 'threading.Event', ([], {}), '()\n', (14373, 14375), False, 'import threading\n'), ((14411, 14441), 'asyncio.Event', 'asyncio.Event', ([], {'loop': 'self._loop'}), '(loop=self._loop)\n', (14424, 14441), False, 'import asyncio\n'), ((14743, 14777), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['self._loop'], {}), '(self._loop)\n', (14765, 14777), False, 'import asyncio\n'), ((29877, 29899), 'asyncio.StreamReader', 'asyncio.StreamReader', ([], {}), '()\n', (29897, 29899), False, 'import asyncio\n'), ((30058, 30113), 'asyncio.StreamWriter', 'asyncio.StreamWriter', (['transport', 'protocol', 'reader', 'loop'], {}), '(transport, protocol, reader, loop)\n', (30078, 30113), False, 'import asyncio\n'), ((5189, 5216), 'asyncio.Task.current_task', 'asyncio.Task.current_task', ([], {}), '()\n', (5214, 5216), False, 'import asyncio\n'), ((5709, 5736), 'asyncio.Task.current_task', 'asyncio.Task.current_task', ([], {}), '()\n', (5734, 5736), False, 'import asyncio\n'), ((24973, 25019), 'functools.partial', 'functools.partial', (['self._cmd_q.put_nowait', 'cmd'], {}), '(self._cmd_q.put_nowait, cmd)\n', (24990, 25019), False, 'import functools\n'), ((29834, 29858), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (29856, 29858), False, 'import asyncio\n'), ((30290, 30314), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (30312, 30314), False, 'import asyncio\n'), ((30360, 30391), 'asyncio.StreamReader', 'asyncio.StreamReader', ([], {'loop': 'loop'}), '(loop=loop)\n', (30380, 30391), False, 'import asyncio\n'), ((10387, 10413), 'struct.pack', 'struct.pack', (['"""<I"""', 'self.id'], {}), "('<I', self.id)\n", (10398, 10413), False, 'import struct\n'), ((6636, 6663), 'struct.unpack', 'struct.unpack', (['"""=l"""', 'raw_id'], {}), "('=l', raw_id)\n", (6649, 6663), False, 'import struct\n'), ((19946, 19967), 'struct.pack', 'struct.pack', (['"""!i"""', '(-1)'], {}), "('!i', -1)\n", (19957, 19967), False, 'import struct\n'), ((20042, 20058), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (20055, 20058), False, 'import asyncio\n'), ((20953, 20980), 'struct.unpack', 'struct.unpack', (['"""=l"""', 'raw_id'], {}), "('=l', raw_id)\n", (20966, 20980), False, 'import struct\n'), ((21124, 21153), 'struct.unpack', 'struct.unpack', (['"""=l"""', 'raw_size'], {}), "('=l', raw_size)\n", (21137, 21153), False, 'import struct\n'), ((6855, 6886), 'struct.unpack', 'struct.unpack', (['"""=l"""', 'raw_length'], {}), "('=l', raw_length)\n", (6868, 6886), False, 'import struct\n'), ((7173, 7229), 'functools.partial', 'functools.partial', (['self._response_handler', 'stream_writer'], {}), '(self._response_handler, stream_writer)\n', (7190, 7229), False, 'import functools\n'), ((8359, 8384), 'struct.pack', 'struct.pack', (['"""<I"""', 'cmd.id'], {}), "('<I', cmd.id)\n", (8370, 8384), False, 'import struct\n'), ((18810, 18826), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (18823, 18826), False, 'import asyncio\n')]
|
#!/usr/bin/env python3
from argparse import ArgumentParser
from elftools.elf.constants import P_FLAGS
from elftools.elf.elffile import ELFFile
import io
def segment_is_text(segment):
return segment['p_flags'] & P_FLAGS.PF_X == P_FLAGS.PF_X
def segment_is_data(segment):
return not segment_is_text(segment) and not segment_is_bss(segment)
def segment_is_bss(segment):
return segment['p_filesz'] == 0
def write_to_dol_header(file, offset, val):
file.seek(offset)
file.write(val.to_bytes(4, byteorder = 'big'))
file.seek(0, io.SEEK_END)
def write_segment_to_dol(idx, segment, dol_file):
write_to_dol_header(dol_file, 0x00 + 0x04 * idx, dol_file.tell())
write_to_dol_header(dol_file, 0x48 + 0x04 * idx, segment['p_vaddr'])
# align filesz to 0x20
filesz = ((segment['p_filesz'] + 0x1F) >> 5) << 5
write_to_dol_header(dol_file, 0x90 + 0x04 * idx, filesz)
dol_file.write(segment.data())
# align current dol size to 0x20
size = 0x20 - dol_file.tell() & 0x1F
dol_file.write(bytes([0x00] * size))
parser = ArgumentParser()
parser.add_argument('in_path')
parser.add_argument('out_path')
args = parser.parse_args()
with open(args.in_path, 'rb') as elf_file, open(args.out_path, 'wb') as dol_file:
elf_file = ELFFile(elf_file)
num_segments = elf_file.num_segments()
dol_file.write(bytes([0x00] * 0x100))
idx = 0
for i in range(num_segments):
segment = elf_file.get_segment(i)
if not segment_is_text(segment):
continue
write_segment_to_dol(idx, segment, dol_file)
idx += 1
idx = 7
for i in range(num_segments):
segment = elf_file.get_segment(i)
if not segment_is_data(segment):
continue
write_segment_to_dol(idx, segment, dol_file)
idx += 1
bss_start = 0
bss_end = 0
for i in range(num_segments):
segment = elf_file.get_segment(i)
if not segment_is_bss(segment):
continue
if bss_start == 0:
bss_start = segment['p_vaddr']
bss_end = segment['p_vaddr'] + segment['p_memsz']
write_to_dol_header(dol_file, 0xD8, bss_start)
bss_size = bss_end - bss_start
write_to_dol_header(dol_file, 0xDC, bss_size)
write_to_dol_header(dol_file, 0xE0, elf_file['e_entry'])
|
[
"elftools.elf.elffile.ELFFile",
"argparse.ArgumentParser"
] |
[((1066, 1082), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (1080, 1082), False, 'from argparse import ArgumentParser\n'), ((1271, 1288), 'elftools.elf.elffile.ELFFile', 'ELFFile', (['elf_file'], {}), '(elf_file)\n', (1278, 1288), False, 'from elftools.elf.elffile import ELFFile\n')]
|
import importlib
from pprint import pprint
import ast
def get_imports_from_file(file_name):
with open(file_name, "r") as source:
tree = ast.parse(source.read())
analyzer = Analyzer()
analyzer.visit(tree)
imports = analyzer.report()
ret = []
for i in imports:
print(i)
try:
a = importlib.util.find_spec(i)
if a.origin:
ret.append(a.origin)
except Exception as e:
print(e)
return ret
class Analyzer(ast.NodeVisitor):
def __init__(self):
self.import_list = []
def visit_Import(self, node):
for alias in node.names:
# print("import", alias.name)
self.import_list.append(alias.name)
self.generic_visit(node)
def visit_ImportFrom(self, node):
for alias in node.names:
# print("from", node.module, "import", alias.name)
if alias.name == '*':
self.import_list.append(node.module)
else:
self.import_list.append(node.module + '.' + alias.name)
self.generic_visit(node)
def generic_visit(self, node):
# print(type(node).__name__)
# print(node._fields)
return super().generic_visit(node)
def report(self):
return self.import_list
if __name__ == "__main__":
print(get_imports_from_file("backdoor.py"))
print(get_imports_from_file("defaults.py"))
|
[
"importlib.util.find_spec"
] |
[((340, 367), 'importlib.util.find_spec', 'importlib.util.find_spec', (['i'], {}), '(i)\n', (364, 367), False, 'import importlib\n')]
|
"""
@brief test log(time=2s)
"""
import sys
import os
import unittest
from pyquickhelper.loghelper import fLOG
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
from src.ensae_teaching_cs.automation_students import ProjectsRepository
class TestRepositoryLittleAspect(unittest.TestCase):
def test_regular_expression(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
text = """<br /></div>
<div><div dir="ltr">Pourriez-vous vous ajouter sur le doodle suivant ?<div><br></div><div>
<p style="margin:0in;font-family:Calibri;font-size:11pt" lang="fr">
<a href="http://doodle.com/poll/xxxxxxxxc9w8">http://doodle.com/poll/xxxxxxsyz7c9w8</a></p></div></div><div class
"""
f = ProjectsRepository._link_regex.findall(text)
fLOG(f)
self.assertEqual(len(f), 2)
self.assertEqual(f[0], "http://doodle.com/poll/xxxxxxxxc9w8")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"sys.path.append",
"pyquickhelper.loghelper.fLOG",
"src.ensae_teaching_cs.automation_students.ProjectsRepository._link_regex.findall",
"os.path.split"
] |
[((1290, 1305), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1303, 1305), False, 'import unittest\n'), ((579, 651), 'pyquickhelper.loghelper.fLOG', 'fLOG', (['__file__', 'self._testMethodName'], {'OutputPrint': "(__name__ == '__main__')"}), "(__file__, self._testMethodName, OutputPrint=__name__ == '__main__')\n", (583, 651), False, 'from pyquickhelper.loghelper import fLOG\n'), ((1090, 1134), 'src.ensae_teaching_cs.automation_students.ProjectsRepository._link_regex.findall', 'ProjectsRepository._link_regex.findall', (['text'], {}), '(text)\n', (1128, 1134), False, 'from src.ensae_teaching_cs.automation_students import ProjectsRepository\n'), ((1143, 1150), 'pyquickhelper.loghelper.fLOG', 'fLOG', (['f'], {}), '(f)\n', (1147, 1150), False, 'from pyquickhelper.loghelper import fLOG\n'), ((365, 386), 'sys.path.append', 'sys.path.append', (['path'], {}), '(path)\n', (380, 386), False, 'import sys\n'), ((254, 277), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (267, 277), False, 'import os\n')]
|
from numpy import array, copy, concatenate
from torch import Tensor
from botorch.acquisition.multi_objective.monte_carlo import (
qExpectedHypervolumeImprovement, qNoisyExpectedHypervolumeImprovement
)
from botorch.posteriors import GPyTorchPosterior, Posterior, DeterministicPosterior
from gpytorch.distributions import MultitaskMultivariateNormal
from gpytorch.lazy import BlockDiagLazyTensor
import torch
# TODO: replace these with the non-mocked versions once botorch #991 comes in
# will need to update to botorch master
class qDiscreteEHVI(qExpectedHypervolumeImprovement):
def forward(self, X: array) -> Tensor:
# mocks the qEHVI call
# assumes that X is an array of shape batch x q rather than a tensor of shape batch x q x d
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
return self._compute_qehvi(samples=samples)
class qDiscreteNEHVI(qNoisyExpectedHypervolumeImprovement):
# TODO: figure out how to remove
def __init__(
self,
model,
ref_point,
X_baseline,
sampler = None,
objective = None,
constraints = None,
X_pending = None,
eta: float = 1e-3,
prune_baseline: bool = False,
alpha: float = 0.0,
cache_pending: bool = True,
max_iep: int = 0,
incremental_nehvi: bool = True,
**kwargs,
):
model.eval()
mocked_features = model.get_features(X_baseline, model.bs)
# for string kernels
if mocked_features.ndim > 2:
mocked_features = mocked_features[..., 0].to(ref_point) # doint let this fail
super().__init__(
model=model,
ref_point=ref_point,
X_baseline=mocked_features,
sampler=sampler,
objective=objective,
constraints=constraints,
X_pending=X_pending,
eta=eta,
prune_baseline=prune_baseline,
alpha=alpha,
cache_pending=cache_pending,
max_iep=max_iep,
incremental_nehvi=incremental_nehvi,
**kwargs
)
self.X_baseline_string = X_baseline
def forward(self, X: array) -> Tensor:
if isinstance(X, Tensor):
baseline_X = self._X_baseline
baseline_X = baseline_X.expand(*X.shape[:-2], -1, -1)
X_full = torch.cat([baseline_X, X], dim=-2)
else:
baseline_X = copy(self.X_baseline_string) # ensure contiguity
baseline_X.resize(
baseline_X.shape[:-(X.ndim)] + X.shape[:-1] + baseline_X.shape[-1:]
)
X_full = concatenate([baseline_X, X], axis=-1)
# Note: it is important to compute the full posterior over `(X_baseline, X)``
# to ensure that we properly sample `f(X)` from the joint distribution `
# `f(X_baseline, X) ~ P(f | D)` given that we can already fixed the sampled
# function values for `f(X_baseline)`
posterior = self.model.posterior(X_full)
q = X.shape[-2]
self._set_sampler(q=q, posterior=posterior)
samples = self.sampler(posterior)[..., -q:, :]
# add previous nehvi from pending points
return self._compute_qehvi(samples=samples) + self._prev_nehvi
def _cache_root_decomposition(self, posterior: GPyTorchPosterior) -> None:
if posterior.mvn._interleaved:
if hasattr(posterior.mvn.lazy_covariance_matrix, 'base_lazy_tensor'):
posterior_lc_base = posterior.mvn.lazy_covariance_matrix.base_lazy_tensor
else:
posterior_lc_base = posterior.mvn.lazy_covariance_matrix
new_lazy_covariance = BlockDiagLazyTensor(posterior_lc_base)
posterior.mvn = MultitaskMultivariateNormal(posterior.mvn.mean, new_lazy_covariance, interleaved=False)
return super()._cache_root_decomposition(posterior=posterior)
class qMTGPDiscreteNEHVI(qDiscreteNEHVI):
# TODO: remove when botorch #1037 goes in
# this is copied over from that diff
_uses_matheron = True
def __init__(self, *args, **kwargs):
super().__init__(cache_root = False, *args, **kwargs)
def _set_sampler(
self,
q: int,
posterior: Posterior,
) -> None:
r"""Update the sampler to use the original base samples for X_baseline.
Args:
q: the batch size
posterior: the posterior
TODO: refactor some/all of this into the MCSampler.
"""
if self.q != q:
# create new base_samples
base_sample_shape = self.sampler._get_base_sample_shape(posterior=posterior)
self.sampler._construct_base_samples(
posterior=posterior, shape=base_sample_shape
)
if (
self.X_baseline.shape[0] > 0
and self.base_sampler.base_samples is not None
and not isinstance(posterior, DeterministicPosterior)
):
current_base_samples = self.base_sampler.base_samples.detach().clone()
# This is the # of non-`sample_shape` dimensions.
base_ndims = current_base_samples.dim() - 1
# Unsqueeze as many dimensions as needed to match base_sample_shape.
view_shape = (
self.sampler.sample_shape
+ torch.Size(
[1] * (len(base_sample_shape) - current_base_samples.dim())
)
+ current_base_samples.shape[-base_ndims:]
)
expanded_shape = (
base_sample_shape[:-base_ndims]
+ current_base_samples.shape[-base_ndims:]
)
# Use stored base samples:
# Use all base_samples from the current sampler
# this includes the base_samples from the base_sampler
# and any base_samples for the new points in the sampler.
# For example, when using sequential greedy candidate generation
# then generate the new candidate point using last (-1) base_sample
# in sampler. This copies that base sample.
end_idx = current_base_samples.shape[-1 if self._uses_matheron else -2]
expanded_samples = current_base_samples.view(view_shape).expand(
expanded_shape
)
if self._uses_matheron:
self.sampler.base_samples[..., :end_idx] = expanded_samples
else:
self.sampler.base_samples[..., :end_idx, :] = expanded_samples
# update cached subset indices
# Note: this also stores self.q = q
self._cache_q_subset_indices(q=q)
|
[
"numpy.copy",
"gpytorch.lazy.BlockDiagLazyTensor",
"gpytorch.distributions.MultitaskMultivariateNormal",
"torch.cat",
"numpy.concatenate"
] |
[((2408, 2442), 'torch.cat', 'torch.cat', (['[baseline_X, X]'], {'dim': '(-2)'}), '([baseline_X, X], dim=-2)\n', (2417, 2442), False, 'import torch\n'), ((2482, 2510), 'numpy.copy', 'copy', (['self.X_baseline_string'], {}), '(self.X_baseline_string)\n', (2486, 2510), False, 'from numpy import array, copy, concatenate\n'), ((2681, 2718), 'numpy.concatenate', 'concatenate', (['[baseline_X, X]'], {'axis': '(-1)'}), '([baseline_X, X], axis=-1)\n', (2692, 2718), False, 'from numpy import array, copy, concatenate\n'), ((3737, 3775), 'gpytorch.lazy.BlockDiagLazyTensor', 'BlockDiagLazyTensor', (['posterior_lc_base'], {}), '(posterior_lc_base)\n', (3756, 3775), False, 'from gpytorch.lazy import BlockDiagLazyTensor\n'), ((3804, 3895), 'gpytorch.distributions.MultitaskMultivariateNormal', 'MultitaskMultivariateNormal', (['posterior.mvn.mean', 'new_lazy_covariance'], {'interleaved': '(False)'}), '(posterior.mvn.mean, new_lazy_covariance,\n interleaved=False)\n', (3831, 3895), False, 'from gpytorch.distributions import MultitaskMultivariateNormal\n')]
|
from blog_site.common.database import Database
from blog_site.terminal_blog.model.menu import Menu
__author__ = '<NAME>'
Database.initialize()
menu = Menu()
menu.run_menu()
|
[
"blog_site.common.database.Database.initialize",
"blog_site.terminal_blog.model.menu.Menu"
] |
[((123, 144), 'blog_site.common.database.Database.initialize', 'Database.initialize', ([], {}), '()\n', (142, 144), False, 'from blog_site.common.database import Database\n'), ((152, 158), 'blog_site.terminal_blog.model.menu.Menu', 'Menu', ([], {}), '()\n', (156, 158), False, 'from blog_site.terminal_blog.model.menu import Menu\n')]
|
from django.shortcuts import render, redirect
from StudentManager.functions import viewStudents
from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers
import concurrent.futures
import threading
from django.utils import timezone
import datetime
from Manager.functions import incrementTotalCheckIn, decrementTotalCheckIn
from django.http import HttpResponseRedirect
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.contrib import messages
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from Dashboard.decorators import management
@login_required(login_url='login')
def viewStudentsList(request):
#if bool(Group.objects.get(name="accounts") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="principal") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="administrator") in User.objects.get(username=request.user).groups.all()) == False:
# return render(request, 'dashboard/dashboard.html',
# {'CheckStat': CheckStat.objects.get(id=1),
# 'students': Students.objects.all().filter(CheckedOut="Yes").order_by('LastName') | Students.objects.all().filter(CheckedIn="Yes").order_by('LastName'),
# 'mode': 'viewCheckIn'})
return viewStudents(request, "viewStudentsPass.html")
@login_required(login_url='login')
@management
def viewStudentsListAdmin(request):
#if bool(Group.objects.get(name="accounts") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="principal") in User.objects.get(username=request.user).groups.all() or
# Group.objects.get(name="administrator") in User.objects.get(username=request.user).groups.all()) == False:
# return render(request, 'dashboard/dashboard.html',
# {'CheckStat': CheckStat.objects.get(id=1),
# 'students': Students.objects.all().filter(CheckedOut="Yes").order_by('LastName') | Students.objects.all().filter(CheckedIn="Yes").order_by('LastName'),
# 'mode': 'viewCheckIn'})
return viewStudents(request, "viewStudentsPassAdmin.html")
@login_required(login_url='login')
@management
def viewCheckInProfileAdmin(request, pk):
student = Students.objects.get(pk=pk)
season = CurrentSeason.objects.get(pk=1).Season
checkin = ""
allowed = ""
if CheckIn.objects.filter(Student=student, Season=season).exists():
checkedIn = "Yes"
checkin = CheckIn.objects.get(Student=student, Season=season)
else:
checkedIn = "No"
if Allowed.objects.filter(Student=student, Season=season).exists():
allowed = Allowed.objects.get(Student=student, Season=season)
else:
allowed = ""
return render(request, "checkInProfilePassAdmin.html", {'student': student, 'checkedIn': checkedIn, 'checkin': checkin,
'allowed': allowed})
@login_required(login_url='login')
def viewCheckInProfile(request, pk):
student = Students.objects.get(pk=pk)
season = CurrentSeason.objects.get(pk=1).Season
checkin = ""
allowed = ""
if CheckIn.objects.filter(Student=student, Season=season).exists():
checkedIn = "Yes"
checkin = CheckIn.objects.get(Student=student, Season=season)
else:
checkedIn = "No"
if Allowed.objects.filter(Student=student, Season=season).exists():
allowed = Allowed.objects.get(Student=student, Season=season)
else:
allowed = ""
return render(request, "checkInProfilePass.html", {'student': student, 'checkedIn': checkedIn, 'checkin': checkin,
'allowed': allowed})
def Pass_helperAdmin(request, id):
if request.method == "POST":
reason = request.POST.getlist("reason")[0]
current_season = CurrentSeason.objects.get(pk=1)
season = Seasons.objects.get(SeasonName=current_season)
student = Students.objects.get(pk=id)
if Pointers.objects.filter(id=1).exists():
pass_code = Pointers.objects.get(id=1).PassCodePointer + 1
Pointers.objects.filter(id=1).update(PassCodePointer=pass_code)
Pointers.save
else:
pass_code = CheckIn.objects.all().count() + 1
Pointers.objects.create(id=1, Season=season, PassCodePointer=pass_code)
Pointers.save
pass_code = str(pass_code).zfill(4)
print("here")
if Allowed.objects.filter(Student=student, Season=season).exists():
Allowed.objects.create(Student=student, Season=season, Clear="Yes")
Allowed.save
else:
Allowed.objects.filter(Student=student, Season=season).update(Clear="Yes")
Allowed.save
if CheckIn.objects.filter(Student=student, Season=season).exists():
CheckIn.objects.filter(Student=student,
Season=season).update(Passed="Yes", PassCode=pass_code,
ReasonPass=reason, DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
else:
CheckIn.objects.create(Student=student,
Season=season, Passed="Yes", PassCode=pass_code,
ReasonPass=reason,
DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
print("checked in----")
def Pass_helper(request, id):
if request.method == "POST":
current_season = CurrentSeason.objects.get(pk=1)
season = Seasons.objects.get(SeasonName=current_season)
student = Students.objects.get(pk=id)
if Pointers.objects.filter(id=1).exists():
pass_code = Pointers.objects.get(id=1).PassCodePointer + 1
Pointers.objects.filter(id=1).update(PassCodePointer=pass_code)
Pointers.save()
else:
pass_code = CheckIn.objects.all().count() + 1
Pointers.objects.create(id=1, Season=season, PassCodePointer=pass_code)
Pointers.save
pass_code = str(pass_code).zfill(4)
if CheckIn.objects.filter(Student=student, Season=season).exists():
CheckIn.objects.filter(Student=student,
Season=season).update(Passed="Yes", PassCode=pass_code,
ReasonPass="Fulfilled all requirements.", DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
else:
CheckIn.objects.create(Student=student,
Season=season, Passed="Yes", PassCode=pass_code,
ReasonPass="Fulfilled all requirements.",
DateTimeStamp=timezone.now(),
ByStaffPass=(str(request.user.last_name) + ", " + str(request.user.first_name)))
CheckIn.save
#incrementTotalCheckIn()
print("checked in----")
def sendEMail(request, mailHead, recipient, template, context):
msg=""
if recipient != "None":
html_message = render_to_string("" + template, {
'context': context})
plain_message = strip_tags(html_message)
try:
send_mail(mailHead,
plain_message,
'<EMAIL>',
[recipient],
html_message=html_message,
fail_silently=False)
msg = "Email sent Successfully!"
return msg
except:
msg = "Email failed!"
return msg
else:
msg = "Operation Failed! No recipient provided."
return msg
def wardCheckedInEmail(request, pk):
student = Students.objects.get(pk=pk)
mailHead = "You Ward have being Checked-in into Brookstone Secondary Boarding Facility"
#recipient = student.ParentEmail
recipient = "<EMAIL>"
context = student
template = "EmailPassSuccess.html"
message = sendEMail(request, mailHead, recipient, template, context)
return message
@login_required(login_url='login')
def Pass(request, pk):
#with concurrent.futures.ThreadPoolExecutor() as executor:
# results = [executor.submit(checkin_helper, request, id), executor.submit(wardCheckedInEmail, request, id)]
# for f in concurrent.futures.as_completed(results):
# if f.result() != "EmailNoneResult":
# message = f.result()
# return message
t1 = threading.Thread(target=Pass_helper, args=[request, pk])
t2 = threading.Thread(target=wardCheckedInEmail, args=[request, pk])
message = t1.start()
message2 = t2.start()
message = "Verification Successfull! Student is cleared to pass."
if "Successfull" in message:
messages.success(request, message)
else:
messages.error(request, message)
return redirect("/Pass/viewCheckInProfile/" + str(pk))
#return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
#return redirect("/Pass/viewCheckInProfile/" + str(pk))
@login_required(login_url='login')
@management
def PassAdmin(request, pk):
# with concurrent.futures.ThreadPoolExecutor() as executor:
# results = [executor.submit(checkin_helper, request, id), executor.submit(wardCheckedInEmail, request, id)]
# for f in concurrent.futures.as_completed(results):
# if f.result() != "EmailNoneResult":
# message = f.result()
# return message
t1 = threading.Thread(target=Pass_helperAdmin, args=[request, pk])
t2 = threading.Thread(target=wardCheckedInEmail, args=[request, pk])
message = t1.start()
message2 = t2.start()
message = "Verification Successfull! Student is cleared to pass."
if "Successfull" in message:
print("here " + message)
messages.success(request, message)
else:
messages.error(request, message)
return redirect("/Pass/viewCheckInProfileAdmin/" + str(pk))
|
[
"django.core.mail.send_mail",
"django.contrib.messages.error",
"StudentManager.models.Pointers.objects.filter",
"StudentManager.models.Pointers.save",
"StudentManager.models.Allowed.objects.create",
"StudentManager.models.Allowed.objects.get",
"django.contrib.auth.decorators.login_required",
"django.utils.timezone.now",
"django.utils.html.strip_tags",
"django.shortcuts.render",
"threading.Thread",
"StudentManager.models.CheckIn.objects.filter",
"StudentManager.models.CheckIn.objects.get",
"StudentManager.models.Pointers.objects.create",
"StudentManager.models.CheckIn.objects.all",
"StudentManager.models.Allowed.objects.filter",
"StudentManager.models.Pointers.objects.get",
"django.template.loader.render_to_string",
"StudentManager.models.Students.objects.get",
"StudentManager.functions.viewStudents",
"StudentManager.models.Seasons.objects.get",
"django.contrib.messages.success",
"StudentManager.models.CurrentSeason.objects.get"
] |
[((708, 741), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (722, 741), False, 'from django.contrib.auth.decorators import login_required\n'), ((1525, 1558), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (1539, 1558), False, 'from django.contrib.auth.decorators import login_required\n'), ((2363, 2396), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (2377, 2396), False, 'from django.contrib.auth.decorators import login_required\n'), ((3156, 3189), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (3170, 3189), False, 'from django.contrib.auth.decorators import login_required\n'), ((8960, 8993), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (8974, 8993), False, 'from django.contrib.auth.decorators import login_required\n'), ((9954, 9987), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (9968, 9987), False, 'from django.contrib.auth.decorators import login_required\n'), ((1476, 1522), 'StudentManager.functions.viewStudents', 'viewStudents', (['request', '"""viewStudentsPass.html"""'], {}), "(request, 'viewStudentsPass.html')\n", (1488, 1522), False, 'from StudentManager.functions import viewStudents\n'), ((2309, 2360), 'StudentManager.functions.viewStudents', 'viewStudents', (['request', '"""viewStudentsPassAdmin.html"""'], {}), "(request, 'viewStudentsPassAdmin.html')\n", (2321, 2360), False, 'from StudentManager.functions import viewStudents\n'), ((2465, 2492), 'StudentManager.models.Students.objects.get', 'Students.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (2485, 2492), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((2969, 3106), 'django.shortcuts.render', 'render', (['request', '"""checkInProfilePassAdmin.html"""', "{'student': student, 'checkedIn': checkedIn, 'checkin': checkin, 'allowed':\n allowed}"], {}), "(request, 'checkInProfilePassAdmin.html', {'student': student,\n 'checkedIn': checkedIn, 'checkin': checkin, 'allowed': allowed})\n", (2975, 3106), False, 'from django.shortcuts import render, redirect\n'), ((3241, 3268), 'StudentManager.models.Students.objects.get', 'Students.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (3261, 3268), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((3745, 3877), 'django.shortcuts.render', 'render', (['request', '"""checkInProfilePass.html"""', "{'student': student, 'checkedIn': checkedIn, 'checkin': checkin, 'allowed':\n allowed}"], {}), "(request, 'checkInProfilePass.html', {'student': student, 'checkedIn':\n checkedIn, 'checkin': checkin, 'allowed': allowed})\n", (3751, 3877), False, 'from django.shortcuts import render, redirect\n'), ((8620, 8647), 'StudentManager.models.Students.objects.get', 'Students.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (8640, 8647), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((9384, 9440), 'threading.Thread', 'threading.Thread', ([], {'target': 'Pass_helper', 'args': '[request, pk]'}), '(target=Pass_helper, args=[request, pk])\n', (9400, 9440), False, 'import threading\n'), ((9450, 9513), 'threading.Thread', 'threading.Thread', ([], {'target': 'wardCheckedInEmail', 'args': '[request, pk]'}), '(target=wardCheckedInEmail, args=[request, pk])\n', (9466, 9513), False, 'import threading\n'), ((10395, 10456), 'threading.Thread', 'threading.Thread', ([], {'target': 'Pass_helperAdmin', 'args': '[request, pk]'}), '(target=Pass_helperAdmin, args=[request, pk])\n', (10411, 10456), False, 'import threading\n'), ((10466, 10529), 'threading.Thread', 'threading.Thread', ([], {'target': 'wardCheckedInEmail', 'args': '[request, pk]'}), '(target=wardCheckedInEmail, args=[request, pk])\n', (10482, 10529), False, 'import threading\n'), ((2506, 2537), 'StudentManager.models.CurrentSeason.objects.get', 'CurrentSeason.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (2531, 2537), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((2695, 2746), 'StudentManager.models.CheckIn.objects.get', 'CheckIn.objects.get', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (2714, 2746), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((2874, 2925), 'StudentManager.models.Allowed.objects.get', 'Allowed.objects.get', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (2893, 2925), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((3282, 3313), 'StudentManager.models.CurrentSeason.objects.get', 'CurrentSeason.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (3307, 3313), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((3471, 3522), 'StudentManager.models.CheckIn.objects.get', 'CheckIn.objects.get', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (3490, 3522), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((3650, 3701), 'StudentManager.models.Allowed.objects.get', 'Allowed.objects.get', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (3669, 3701), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4070, 4101), 'StudentManager.models.CurrentSeason.objects.get', 'CurrentSeason.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (4095, 4101), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4119, 4165), 'StudentManager.models.Seasons.objects.get', 'Seasons.objects.get', ([], {'SeasonName': 'current_season'}), '(SeasonName=current_season)\n', (4138, 4165), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4184, 4211), 'StudentManager.models.Students.objects.get', 'Students.objects.get', ([], {'pk': 'id'}), '(pk=id)\n', (4204, 4211), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6150, 6181), 'StudentManager.models.CurrentSeason.objects.get', 'CurrentSeason.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (6175, 6181), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6199, 6245), 'StudentManager.models.Seasons.objects.get', 'Seasons.objects.get', ([], {'SeasonName': 'current_season'}), '(SeasonName=current_season)\n', (6218, 6245), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6264, 6291), 'StudentManager.models.Students.objects.get', 'Students.objects.get', ([], {'pk': 'id'}), '(pk=id)\n', (6284, 6291), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((7987, 8040), 'django.template.loader.render_to_string', 'render_to_string', (["('' + template)", "{'context': context}"], {}), "('' + template, {'context': context})\n", (8003, 8040), False, 'from django.template.loader import render_to_string\n'), ((8078, 8102), 'django.utils.html.strip_tags', 'strip_tags', (['html_message'], {}), '(html_message)\n', (8088, 8102), False, 'from django.utils.html import strip_tags\n'), ((9678, 9712), 'django.contrib.messages.success', 'messages.success', (['request', 'message'], {}), '(request, message)\n', (9694, 9712), False, 'from django.contrib import messages\n'), ((9731, 9763), 'django.contrib.messages.error', 'messages.error', (['request', 'message'], {}), '(request, message)\n', (9745, 9763), False, 'from django.contrib import messages\n'), ((10727, 10761), 'django.contrib.messages.success', 'messages.success', (['request', 'message'], {}), '(request, message)\n', (10743, 10761), False, 'from django.contrib import messages\n'), ((10780, 10812), 'django.contrib.messages.error', 'messages.error', (['request', 'message'], {}), '(request, message)\n', (10794, 10812), False, 'from django.contrib import messages\n'), ((2586, 2640), 'StudentManager.models.CheckIn.objects.filter', 'CheckIn.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (2608, 2640), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((2791, 2845), 'StudentManager.models.Allowed.objects.filter', 'Allowed.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (2813, 2845), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((3362, 3416), 'StudentManager.models.CheckIn.objects.filter', 'CheckIn.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (3384, 3416), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((3567, 3621), 'StudentManager.models.Allowed.objects.filter', 'Allowed.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (3589, 3621), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4521, 4592), 'StudentManager.models.Pointers.objects.create', 'Pointers.objects.create', ([], {'id': '(1)', 'Season': 'season', 'PassCodePointer': 'pass_code'}), '(id=1, Season=season, PassCodePointer=pass_code)\n', (4544, 4592), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4774, 4841), 'StudentManager.models.Allowed.objects.create', 'Allowed.objects.create', ([], {'Student': 'student', 'Season': 'season', 'Clear': '"""Yes"""'}), "(Student=student, Season=season, Clear='Yes')\n", (4796, 4841), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6503, 6518), 'StudentManager.models.Pointers.save', 'Pointers.save', ([], {}), '()\n', (6516, 6518), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6603, 6674), 'StudentManager.models.Pointers.objects.create', 'Pointers.objects.create', ([], {'id': '(1)', 'Season': 'season', 'PassCodePointer': 'pass_code'}), '(id=1, Season=season, PassCodePointer=pass_code)\n', (6626, 6674), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((8128, 8239), 'django.core.mail.send_mail', 'send_mail', (['mailHead', 'plain_message', '"""<EMAIL>"""', '[recipient]'], {'html_message': 'html_message', 'fail_silently': '(False)'}), "(mailHead, plain_message, '<EMAIL>', [recipient], html_message=\n html_message, fail_silently=False)\n", (8137, 8239), False, 'from django.core.mail import send_mail\n'), ((4224, 4253), 'StudentManager.models.Pointers.objects.filter', 'Pointers.objects.filter', ([], {'id': '(1)'}), '(id=1)\n', (4247, 4253), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4697, 4751), 'StudentManager.models.Allowed.objects.filter', 'Allowed.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (4719, 4751), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((5005, 5059), 'StudentManager.models.CheckIn.objects.filter', 'CheckIn.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (5027, 5059), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6304, 6333), 'StudentManager.models.Pointers.objects.filter', 'Pointers.objects.filter', ([], {'id': '(1)'}), '(id=1)\n', (6327, 6333), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6757, 6811), 'StudentManager.models.CheckIn.objects.filter', 'CheckIn.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (6779, 6811), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4288, 4314), 'StudentManager.models.Pointers.objects.get', 'Pointers.objects.get', ([], {'id': '(1)'}), '(id=1)\n', (4308, 4314), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4347, 4376), 'StudentManager.models.Pointers.objects.filter', 'Pointers.objects.filter', ([], {'id': '(1)'}), '(id=1)\n', (4370, 4376), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((4893, 4947), 'StudentManager.models.Allowed.objects.filter', 'Allowed.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (4915, 4947), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((5082, 5136), 'StudentManager.models.CheckIn.objects.filter', 'CheckIn.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (5104, 5136), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((5303, 5317), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (5315, 5317), False, 'from django.utils import timezone\n'), ((5816, 5830), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (5828, 5830), False, 'from django.utils import timezone\n'), ((6368, 6394), 'StudentManager.models.Pointers.objects.get', 'Pointers.objects.get', ([], {'id': '(1)'}), '(id=1)\n', (6388, 6394), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6427, 6456), 'StudentManager.models.Pointers.objects.filter', 'Pointers.objects.filter', ([], {'id': '(1)'}), '(id=1)\n', (6450, 6456), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6834, 6888), 'StudentManager.models.CheckIn.objects.filter', 'CheckIn.objects.filter', ([], {'Student': 'student', 'Season': 'season'}), '(Student=student, Season=season)\n', (6856, 6888), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((7078, 7092), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (7090, 7092), False, 'from django.utils import timezone\n'), ((7614, 7628), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (7626, 7628), False, 'from django.utils import timezone\n'), ((4475, 4496), 'StudentManager.models.CheckIn.objects.all', 'CheckIn.objects.all', ([], {}), '()\n', (4494, 4496), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n'), ((6557, 6578), 'StudentManager.models.CheckIn.objects.all', 'CheckIn.objects.all', ([], {}), '()\n', (6576, 6578), False, 'from StudentManager.models import Students, Allowed, CurrentSeason, Seasons, CheckIn, Pointers\n')]
|
import sys
sys.path.append('/Users/ADB/Desktop/ /cryptopals')
from cryptotools import *
key = generate_key()
def enc_oracle(m):
m = b''.join(m.split(b';'))
m = b''.join(m.split(b'='))
prefix = b'comment1=cooking%20MCs;userdata='
suffix = b';comment2=%20like%20a%20pound%20of%20bacon'
plaintext = prefix + m + suffix
return aes_ctr_encrypt(plaintext, key)
def dec_oracle(c):
admin_string = b';admin=true;'
m = aes_ctr_decrypt(c, key)
return m.find(admin_string) >= 0
if __name__ == '__main__':
test_string = b'AadminAtrue'
c = list(enc_oracle(test_string))
c[32] = c[32] ^ ord('A') ^ ord(';')
c[38] = c[38] ^ ord('A') ^ ord('=')
c = bytes(c)
print(dec_oracle(c))
|
[
"sys.path.append"
] |
[((11, 61), 'sys.path.append', 'sys.path.append', (['"""/Users/ADB/Desktop/ /cryptopals"""'], {}), "('/Users/ADB/Desktop/ /cryptopals')\n", (26, 61), False, 'import sys\n')]
|
from AutoEncoder.denoising_AE.denoising_ae import DAE
from AutoEncoder.denoising_AE.data import Data
from AutoEncoder.denoising_AE.learner import Learner
from AutoEncoder.denoising_AE.visualizer import Visualizer
if __name__ == '__main__':
DATA_DIR = 'D:/rawDataFiles/digit_train.csv'
LEARNING_RATE = 3e-5
EPOCHS = 500
BATCH_SIZE = 3000
data = Data(DATA_DIR)
target, input = data.import_data()
dae = DAE(LEARNING_RATE)
target = target.to(dae.device)
input = input.to(dae.device)
print(target.shape)
print(input.shape)
learner = Learner(dae, input, target, batch_size=BATCH_SIZE, epochs=EPOCHS)
model = learner.learn()
viz = Visualizer(target, model)
viz.viz()
|
[
"AutoEncoder.denoising_AE.learner.Learner",
"AutoEncoder.denoising_AE.data.Data",
"AutoEncoder.denoising_AE.denoising_ae.DAE",
"AutoEncoder.denoising_AE.visualizer.Visualizer"
] |
[((380, 394), 'AutoEncoder.denoising_AE.data.Data', 'Data', (['DATA_DIR'], {}), '(DATA_DIR)\n', (384, 394), False, 'from AutoEncoder.denoising_AE.data import Data\n'), ((446, 464), 'AutoEncoder.denoising_AE.denoising_ae.DAE', 'DAE', (['LEARNING_RATE'], {}), '(LEARNING_RATE)\n', (449, 464), False, 'from AutoEncoder.denoising_AE.denoising_ae import DAE\n'), ((601, 666), 'AutoEncoder.denoising_AE.learner.Learner', 'Learner', (['dae', 'input', 'target'], {'batch_size': 'BATCH_SIZE', 'epochs': 'EPOCHS'}), '(dae, input, target, batch_size=BATCH_SIZE, epochs=EPOCHS)\n', (608, 666), False, 'from AutoEncoder.denoising_AE.learner import Learner\n'), ((709, 734), 'AutoEncoder.denoising_AE.visualizer.Visualizer', 'Visualizer', (['target', 'model'], {}), '(target, model)\n', (719, 734), False, 'from AutoEncoder.denoising_AE.visualizer import Visualizer\n')]
|
import sys
import pickle
import numpy as np
import smplx
import torch
import trimesh
from copy import deepcopy
from psbody.mesh import Mesh
import cv2
import os
import natsort
from tqdm import tqdm
def show(verts = None, faces = None, colors = None):
if torch.is_tensor(verts):
verts = verts.detach().numpy()
if torch.is_tensor(faces):
faces = faces.detach().numpy()
all_meshes = []
if faces is not None:
for i in range(len(verts)):
m = trimesh.Trimesh(verts[i], faces[i])
if colors is not None:
m.visual.vertex_colors = colors[i]
all_meshes.append(m)
else:
for i in range(len(verts)):
m = trimesh.PointCloud(verts[i], colors[i])
all_meshes.append(m)
scene = trimesh.scene.Scene()
for m in all_meshes:
scene.add_geometry(m)
scene.show('gl')
def get_param(path, pamir = True):
with open(path, 'rb') as fi:
d = pickle.load(fi)
if pamir:
scale = d['body_scale']
pose = d['body_pose'][0]
beta = d['betas'][0]
trans = d['global_body_translation']
pose_embedding = d['body_pose_embedding']
return pose_embedding, scale, pose, beta, trans
else:
scale = 1
# print(d.keys())
pose = d['pose']
beta = d['betas'][:99]
# trans = d['trans']
return None, scale, pose, beta, None
if __name__ == '__main__':
src = '/home/groot/PaMIR/our_scans/our_scans_image/mesh_data/'
scans = natsort.natsorted(os.listdir(src))
for scan in tqdm(scans):
scan_smpl_path = src + scan + '/smpl/smpl_param.pkl'
model_folder = '../models'
model = smplx.create(model_folder, create_global_orient = True, create_body_pose = False, create_betas = True, model_type='smpl', gender='male', create_transl = False, create_left_hand_pose= True, create_right_hand_pose = True, create_expression = True, create_jaw_pose = True, create_leye_pose = True, create_reye_pose = True, )
pose_embedding, scale, pose, beta, trans = get_param(scan_smpl_path)
go = torch.tensor(pose[:3]).unsqueeze(0)
pose = torch.tensor(pose[3:]).float().unsqueeze(0)
beta = torch.tensor(beta).float().unsqueeze(0)
output = model(betas=beta, body_pose = pose, global_orient=go, return_verts=True)
vert = output.vertices[0]
vert = vert.detach().numpy()
outdir = src + scan
mesh = Mesh()
vert = vert*scale
vert += trans
mesh.v = vert
mesh.f = model.faces
mesh.write_obj(outdir + '/smpl/smpl_mesh_ordered.obj')
|
[
"tqdm.tqdm",
"trimesh.Trimesh",
"psbody.mesh.Mesh",
"smplx.create",
"pickle.load",
"trimesh.PointCloud",
"torch.tensor",
"torch.is_tensor",
"os.listdir",
"trimesh.scene.Scene"
] |
[((259, 281), 'torch.is_tensor', 'torch.is_tensor', (['verts'], {}), '(verts)\n', (274, 281), False, 'import torch\n'), ((330, 352), 'torch.is_tensor', 'torch.is_tensor', (['faces'], {}), '(faces)\n', (345, 352), False, 'import torch\n'), ((794, 815), 'trimesh.scene.Scene', 'trimesh.scene.Scene', ([], {}), '()\n', (813, 815), False, 'import trimesh\n'), ((1600, 1611), 'tqdm.tqdm', 'tqdm', (['scans'], {}), '(scans)\n', (1604, 1611), False, 'from tqdm import tqdm\n'), ((972, 987), 'pickle.load', 'pickle.load', (['fi'], {}), '(fi)\n', (983, 987), False, 'import pickle\n'), ((1566, 1581), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (1576, 1581), False, 'import os\n'), ((1728, 2047), 'smplx.create', 'smplx.create', (['model_folder'], {'create_global_orient': '(True)', 'create_body_pose': '(False)', 'create_betas': '(True)', 'model_type': '"""smpl"""', 'gender': '"""male"""', 'create_transl': '(False)', 'create_left_hand_pose': '(True)', 'create_right_hand_pose': '(True)', 'create_expression': '(True)', 'create_jaw_pose': '(True)', 'create_leye_pose': '(True)', 'create_reye_pose': '(True)'}), "(model_folder, create_global_orient=True, create_body_pose=\n False, create_betas=True, model_type='smpl', gender='male',\n create_transl=False, create_left_hand_pose=True, create_right_hand_pose\n =True, create_expression=True, create_jaw_pose=True, create_leye_pose=\n True, create_reye_pose=True)\n", (1740, 2047), False, 'import smplx\n'), ((2499, 2505), 'psbody.mesh.Mesh', 'Mesh', ([], {}), '()\n', (2503, 2505), False, 'from psbody.mesh import Mesh\n'), ((491, 526), 'trimesh.Trimesh', 'trimesh.Trimesh', (['verts[i]', 'faces[i]'], {}), '(verts[i], faces[i])\n', (506, 526), False, 'import trimesh\n'), ((708, 747), 'trimesh.PointCloud', 'trimesh.PointCloud', (['verts[i]', 'colors[i]'], {}), '(verts[i], colors[i])\n', (726, 747), False, 'import trimesh\n'), ((2141, 2163), 'torch.tensor', 'torch.tensor', (['pose[:3]'], {}), '(pose[:3])\n', (2153, 2163), False, 'import torch\n'), ((2192, 2214), 'torch.tensor', 'torch.tensor', (['pose[3:]'], {}), '(pose[3:])\n', (2204, 2214), False, 'import torch\n'), ((2251, 2269), 'torch.tensor', 'torch.tensor', (['beta'], {}), '(beta)\n', (2263, 2269), False, 'import torch\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from scipy import linalg
from numpy.testing import assert_almost_equal
from megamix.online import GaussianMixture
from megamix.online.base import _log_normal_matrix
from megamix.online import dist_matrix
from megamix.utils_testing import checking
from scipy.special import logsumexp
import pytest
import h5py
class TestGaussianMixture_full:
def setup(self):
self.n_components = 5
self.dim = 2
self.n_points = 10
self.file_name = 'test'
def teardown(self):
checking.remove(self.file_name + '.h5')
def test_initialize(self,window):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window)
GM.initialize(points)
checking.verify_covariance(GM.get('cov'),self.n_components,self.dim)
checking.verify_means(GM.get('means'),self.n_components,self.dim)
checking.verify_log_pi(GM.get('log_weights'),self.n_components)
cov_chol = np.empty_like(GM.get('cov'))
for i in range(self.n_components):
cov_chol[i] = linalg.cholesky(GM.get('cov')[i],lower=True)
assert_almost_equal(cov_chol,GM.get('cov_chol'))
assert GM.get('_is_initialized') == True
def test_initialize_cov(self,window,update):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window)
means = np.random.randn(self.n_components,self.dim)
GM.set('means',means)
GM._initialize_cov(points)
predected_cov = GM.get('cov')
assignements = np.zeros((self.n_points,self.n_components))
M = dist_matrix(points,means)
for i in range(self.n_points):
index_min = np.argmin(M[i]) #the cluster number of the ith point is index_min
if (isinstance(index_min,np.int64)):
assignements[i][index_min] = 1
else: #Happens when two points are equally distant from a cluster mean
assignements[i][index_min[0]] = 1
N = np.sum(assignements,axis=0) + 1e-15
N /= self.n_points
S = np.zeros((self.n_components,self.dim,self.dim))
for i in range(self.n_components):
diff = points - means[i]
diff_weighted = diff * assignements[:,i:i+1]
S[i] = np.dot(diff_weighted.T,diff)
S[i].flat[::self.dim+1] += float(GM.get('reg_covar'))
S /= self.n_points
expected_cov = S / N[:,np.newaxis,np.newaxis]
assert_almost_equal(expected_cov,predected_cov)
def test_step_E(self,window):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window)
GM.initialize(points)
log_normal_matrix = _log_normal_matrix(points,GM.get('means'),
GM.get('cov_chol'),'full')
log_product = log_normal_matrix + GM.get('log_weights')[:,np.newaxis].T
expected_log_prob_norm = logsumexp(log_product,axis=1)
expected_log_resp = log_product - expected_log_prob_norm[:,np.newaxis]
predected_log_prob_norm, predected_log_resp = GM._step_E(points)
assert_almost_equal(expected_log_prob_norm,predected_log_prob_norm)
assert_almost_equal(expected_log_resp,predected_log_resp)
def test_step_M(self,window,update):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window,update=update)
GM.initialize(points)
_,log_resp = GM._step_E(points[:GM.get('window'):])
GM._sufficient_statistics(points[:GM.get('window'):],log_resp)
log_weights = np.log(GM.get('N'))
means = GM.get('X') / GM.get('N')[:,np.newaxis]
cov = GM.get('S') / GM.get('N')[:,np.newaxis,np.newaxis]
cov_chol = np.empty_like(cov)
for i in range(self.n_components):
cov_chol[i] = linalg.cholesky(cov[i],lower=True)
GM._step_M()
assert_almost_equal(log_weights,GM.get('log_weights'))
assert_almost_equal(means,GM.get('means'))
assert_almost_equal(cov,GM.get('cov'))
assert_almost_equal(cov_chol,GM.get('cov_chol'))
def test_sufficient_statistics(self,window,update):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window,update=update)
GM.initialize(points)
_,log_resp = GM._step_E(points[:GM.get('window'):])
points_exp = points[:window:]
resp = np.exp(log_resp)
gamma = 1/((GM.get('iter') + window//2)**GM.get('kappa'))
# New sufficient statistics
N = resp.sum(axis=0) + 10 * np.finfo(resp.dtype).eps
N /= window
X = np.dot(resp.T,points_exp)
X /= window
S = np.zeros((self.n_components,self.dim,self.dim))
for i in range(self.n_components):
diff = points_exp - GM.get('means')[i]
diff_weighted = diff * np.sqrt(resp[:,i:i+1])
S[i] = np.dot(diff_weighted.T,diff_weighted)
S /= window
# Sufficient statistics update
expected_N = (1-gamma)*GM.get('N') + gamma*N
expected_X = (1-gamma)*GM.get('X') + gamma*X
expected_S = (1-gamma)*GM.get('S') + gamma*S
expected_S_chol = np.zeros((self.n_components,self.dim,self.dim))
for i in range(self.n_components):
expected_S_chol[i] = linalg.cholesky(expected_S[i],lower=True)
GM._sufficient_statistics(points_exp,log_resp)
assert_almost_equal(expected_N,GM.get('N'))
assert_almost_equal(expected_X,GM.get('X'))
assert_almost_equal(expected_S,GM.get('S'))
def test_score(self,window,update):
points = np.random.randn(self.n_points,self.dim)
points2 = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window,update=update)
with pytest.raises(Exception):
GM.score(points)
GM.initialize(points)
GM.fit(points)
score1 = GM.score(points)
score2 = GM.score(points2)
assert score1 > score2
def test_write_and_read(self,update):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,update=update)
GM.initialize(points)
f = h5py.File(self.file_name + '.h5','w')
grp = f.create_group('init')
GM.write(grp)
f.close()
GM2 = GaussianMixture(self.n_components,update=update)
f = h5py.File(self.file_name + '.h5','r')
grp = f['init']
GM2.read_and_init(grp,points)
f.close()
checking.verify_online_models(GM,GM2)
GM.fit(points)
GM2.fit(points)
checking.verify_online_models(GM,GM2)
def test_predict_log_resp(self,window,update):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window,update=update)
with pytest.raises(Exception):
GM.predict_log_resp(points)
GM.initialize(points)
predected_log_resp = GM.predict_log_resp(points)
_,expected_log_resp = GM._step_E(points)
assert_almost_equal(predected_log_resp,expected_log_resp)
def test_update(self,window):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window,update=True)
GM.initialize(points)
GM.fit(points)
expected_cov_chol = np.zeros((self.n_components,self.dim,self.dim))
for i in range(self.n_components):
expected_cov_chol[i] = linalg.cholesky(GM.get('cov')[i],lower=True)
predected_cov_chol = GM.get('cov_chol')
assert_almost_equal(expected_cov_chol,predected_cov_chol)
def test_fit_save(self,window):
points = np.random.randn(self.n_points,self.dim)
GM = GaussianMixture(self.n_components,window=window)
checking.remove(self.file_name + '.h5')
GM.initialize(points)
GM.fit(points,saving='linear',saving_iter=2,
file_name=self.file_name)
f = h5py.File(self.file_name + '.h5','r')
cpt = 0
for name in f:
cpt += 1
assert cpt == self.n_points//(2*window)
checking.remove(self.file_name + '.h5')
GM.fit(points,saving='log',saving_iter=2,
file_name=self.file_name)
f = h5py.File(self.file_name + '.h5','r')
cpt = 0
for name in f:
cpt += 1
assert cpt == 1 + int(np.log(self.n_points/window)/np.log(2))
|
[
"numpy.sum",
"megamix.online.dist_matrix",
"scipy.linalg.cholesky",
"numpy.argmin",
"numpy.exp",
"scipy.special.logsumexp",
"megamix.online.GaussianMixture",
"numpy.random.randn",
"numpy.testing.assert_almost_equal",
"numpy.empty_like",
"numpy.finfo",
"pytest.raises",
"megamix.utils_testing.checking.verify_online_models",
"h5py.File",
"numpy.dot",
"numpy.log",
"numpy.zeros",
"megamix.utils_testing.checking.remove",
"numpy.sqrt"
] |
[((595, 634), 'megamix.utils_testing.checking.remove', 'checking.remove', (["(self.file_name + '.h5')"], {}), "(self.file_name + '.h5')\n", (610, 634), False, 'from megamix.utils_testing import checking\n'), ((699, 739), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (714, 739), True, 'import numpy as np\n'), ((752, 801), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window'}), '(self.n_components, window=window)\n', (767, 801), False, 'from megamix.online import GaussianMixture\n'), ((1428, 1468), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (1443, 1468), True, 'import numpy as np\n'), ((1481, 1530), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window'}), '(self.n_components, window=window)\n', (1496, 1530), False, 'from megamix.online import GaussianMixture\n'), ((1546, 1590), 'numpy.random.randn', 'np.random.randn', (['self.n_components', 'self.dim'], {}), '(self.n_components, self.dim)\n', (1561, 1590), True, 'import numpy as np\n'), ((1718, 1762), 'numpy.zeros', 'np.zeros', (['(self.n_points, self.n_components)'], {}), '((self.n_points, self.n_components))\n', (1726, 1762), True, 'import numpy as np\n'), ((1783, 1809), 'megamix.online.dist_matrix', 'dist_matrix', (['points', 'means'], {}), '(points, means)\n', (1794, 1809), False, 'from megamix.online import dist_matrix\n'), ((2272, 2321), 'numpy.zeros', 'np.zeros', (['(self.n_components, self.dim, self.dim)'], {}), '((self.n_components, self.dim, self.dim))\n', (2280, 2321), True, 'import numpy as np\n'), ((2678, 2726), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['expected_cov', 'predected_cov'], {}), '(expected_cov, predected_cov)\n', (2697, 2726), False, 'from numpy.testing import assert_almost_equal\n'), ((2786, 2826), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (2801, 2826), True, 'import numpy as np\n'), ((2839, 2888), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window'}), '(self.n_components, window=window)\n', (2854, 2888), False, 'from megamix.online import GaussianMixture\n'), ((3185, 3215), 'scipy.special.logsumexp', 'logsumexp', (['log_product'], {'axis': '(1)'}), '(log_product, axis=1)\n', (3194, 3215), False, 'from scipy.special import logsumexp\n'), ((3393, 3461), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['expected_log_prob_norm', 'predected_log_prob_norm'], {}), '(expected_log_prob_norm, predected_log_prob_norm)\n', (3412, 3461), False, 'from numpy.testing import assert_almost_equal\n'), ((3469, 3527), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['expected_log_resp', 'predected_log_resp'], {}), '(expected_log_resp, predected_log_resp)\n', (3488, 3527), False, 'from numpy.testing import assert_almost_equal\n'), ((3594, 3634), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (3609, 3634), True, 'import numpy as np\n'), ((3647, 3711), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window', 'update': 'update'}), '(self.n_components, window=window, update=update)\n', (3662, 3711), False, 'from megamix.online import GaussianMixture\n'), ((4063, 4081), 'numpy.empty_like', 'np.empty_like', (['cov'], {}), '(cov)\n', (4076, 4081), True, 'import numpy as np\n'), ((4525, 4565), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (4540, 4565), True, 'import numpy as np\n'), ((4578, 4642), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window', 'update': 'update'}), '(self.n_components, window=window, update=update)\n', (4593, 4642), False, 'from megamix.online import GaussianMixture\n'), ((4811, 4827), 'numpy.exp', 'np.exp', (['log_resp'], {}), '(log_resp)\n', (4817, 4827), True, 'import numpy as np\n'), ((5050, 5076), 'numpy.dot', 'np.dot', (['resp.T', 'points_exp'], {}), '(resp.T, points_exp)\n', (5056, 5076), True, 'import numpy as np\n'), ((5118, 5167), 'numpy.zeros', 'np.zeros', (['(self.n_components, self.dim, self.dim)'], {}), '((self.n_components, self.dim, self.dim))\n', (5126, 5167), True, 'import numpy as np\n'), ((5659, 5708), 'numpy.zeros', 'np.zeros', (['(self.n_components, self.dim, self.dim)'], {}), '((self.n_components, self.dim, self.dim))\n', (5667, 5708), True, 'import numpy as np\n'), ((6114, 6154), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (6129, 6154), True, 'import numpy as np\n'), ((6172, 6212), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (6187, 6212), True, 'import numpy as np\n'), ((6225, 6289), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window', 'update': 'update'}), '(self.n_components, window=window, update=update)\n', (6240, 6289), False, 'from megamix.online import GaussianMixture\n'), ((6578, 6618), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (6593, 6618), True, 'import numpy as np\n'), ((6631, 6680), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'update': 'update'}), '(self.n_components, update=update)\n', (6646, 6680), False, 'from megamix.online import GaussianMixture\n'), ((6731, 6769), 'h5py.File', 'h5py.File', (["(self.file_name + '.h5')", '"""w"""'], {}), "(self.file_name + '.h5', 'w')\n", (6740, 6769), False, 'import h5py\n'), ((6869, 6918), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'update': 'update'}), '(self.n_components, update=update)\n', (6884, 6918), False, 'from megamix.online import GaussianMixture\n'), ((6939, 6977), 'h5py.File', 'h5py.File', (["(self.file_name + '.h5')", '"""r"""'], {}), "(self.file_name + '.h5', 'r')\n", (6948, 6977), False, 'import h5py\n'), ((7074, 7112), 'megamix.utils_testing.checking.verify_online_models', 'checking.verify_online_models', (['GM', 'GM2'], {}), '(GM, GM2)\n', (7103, 7112), False, 'from megamix.utils_testing import checking\n'), ((7185, 7223), 'megamix.utils_testing.checking.verify_online_models', 'checking.verify_online_models', (['GM', 'GM2'], {}), '(GM, GM2)\n', (7214, 7223), False, 'from megamix.utils_testing import checking\n'), ((7300, 7340), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (7315, 7340), True, 'import numpy as np\n'), ((7353, 7417), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window', 'update': 'update'}), '(self.n_components, window=window, update=update)\n', (7368, 7417), False, 'from megamix.online import GaussianMixture\n'), ((7670, 7728), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['predected_log_resp', 'expected_log_resp'], {}), '(predected_log_resp, expected_log_resp)\n', (7689, 7728), False, 'from numpy.testing import assert_almost_equal\n'), ((7788, 7828), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (7803, 7828), True, 'import numpy as np\n'), ((7841, 7903), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window', 'update': '(True)'}), '(self.n_components, window=window, update=True)\n', (7856, 7903), False, 'from megamix.online import GaussianMixture\n'), ((8001, 8050), 'numpy.zeros', 'np.zeros', (['(self.n_components, self.dim, self.dim)'], {}), '((self.n_components, self.dim, self.dim))\n', (8009, 8050), True, 'import numpy as np\n'), ((8246, 8304), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['expected_cov_chol', 'predected_cov_chol'], {}), '(expected_cov_chol, predected_cov_chol)\n', (8265, 8304), False, 'from numpy.testing import assert_almost_equal\n'), ((8366, 8406), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self.dim)\n', (8381, 8406), True, 'import numpy as np\n'), ((8419, 8468), 'megamix.online.GaussianMixture', 'GaussianMixture', (['self.n_components'], {'window': 'window'}), '(self.n_components, window=window)\n', (8434, 8468), False, 'from megamix.online import GaussianMixture\n'), ((8485, 8524), 'megamix.utils_testing.checking.remove', 'checking.remove', (["(self.file_name + '.h5')"], {}), "(self.file_name + '.h5')\n", (8500, 8524), False, 'from megamix.utils_testing import checking\n'), ((8661, 8699), 'h5py.File', 'h5py.File', (["(self.file_name + '.h5')", '"""r"""'], {}), "(self.file_name + '.h5', 'r')\n", (8670, 8699), False, 'import h5py\n'), ((8837, 8876), 'megamix.utils_testing.checking.remove', 'checking.remove', (["(self.file_name + '.h5')"], {}), "(self.file_name + '.h5')\n", (8852, 8876), False, 'from megamix.utils_testing import checking\n'), ((8988, 9026), 'h5py.File', 'h5py.File', (["(self.file_name + '.h5')", '"""r"""'], {}), "(self.file_name + '.h5', 'r')\n", (8997, 9026), False, 'import h5py\n'), ((1872, 1887), 'numpy.argmin', 'np.argmin', (['M[i]'], {}), '(M[i])\n', (1881, 1887), True, 'import numpy as np\n'), ((2188, 2216), 'numpy.sum', 'np.sum', (['assignements'], {'axis': '(0)'}), '(assignements, axis=0)\n', (2194, 2216), True, 'import numpy as np\n'), ((2476, 2505), 'numpy.dot', 'np.dot', (['diff_weighted.T', 'diff'], {}), '(diff_weighted.T, diff)\n', (2482, 2505), True, 'import numpy as np\n'), ((4151, 4186), 'scipy.linalg.cholesky', 'linalg.cholesky', (['cov[i]'], {'lower': '(True)'}), '(cov[i], lower=True)\n', (4166, 4186), False, 'from scipy import linalg\n'), ((5337, 5375), 'numpy.dot', 'np.dot', (['diff_weighted.T', 'diff_weighted'], {}), '(diff_weighted.T, diff_weighted)\n', (5343, 5375), True, 'import numpy as np\n'), ((5783, 5825), 'scipy.linalg.cholesky', 'linalg.cholesky', (['expected_S[i]'], {'lower': '(True)'}), '(expected_S[i], lower=True)\n', (5798, 5825), False, 'from scipy import linalg\n'), ((6310, 6334), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (6323, 6334), False, 'import pytest\n'), ((7438, 7462), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (7451, 7462), False, 'import pytest\n'), ((5295, 5320), 'numpy.sqrt', 'np.sqrt', (['resp[:, i:i + 1]'], {}), '(resp[:, i:i + 1])\n', (5302, 5320), True, 'import numpy as np\n'), ((4984, 5004), 'numpy.finfo', 'np.finfo', (['resp.dtype'], {}), '(resp.dtype)\n', (4992, 5004), True, 'import numpy as np\n'), ((9129, 9159), 'numpy.log', 'np.log', (['(self.n_points / window)'], {}), '(self.n_points / window)\n', (9135, 9159), True, 'import numpy as np\n'), ((9158, 9167), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (9164, 9167), True, 'import numpy as np\n')]
|
from __future__ import division
import torch
# from BNNs.base_net import *
def softmax_CE_preact_hessian(last_layer_acts):
side = last_layer_acts.shape[1]
I = torch.eye(side).type(torch.ByteTensor)
# for i != j H = -ai * aj -- Note that these are activations not pre-activations
Hl = - last_layer_acts.unsqueeze(1) * last_layer_acts.unsqueeze(2)
# for i == j H = ai * (1 - ai)
Hl[:, I] = last_layer_acts * (1 - last_layer_acts)
return Hl
def layer_act_hessian_recurse(prev_hessian, prev_weights, layer_pre_acts):
newside = layer_pre_acts.shape[1]
batch_size = layer_pre_acts.shape[0]
I = torch.eye(newside).type(torch.ByteTensor) # .unsqueeze(0).expand([batch_size, -1, -1])
# print(d_act(layer_pre_acts).unsqueeze(1).shape, I.shape)
B = prev_weights.data.new(batch_size, newside, newside).fill_(0)
B[:, I] = (layer_pre_acts > 0).type(B.type()) # d_act(layer_pre_acts)
D = prev_weights.data.new(batch_size, newside, newside).fill_(0) # is just 0 for a piecewise linear
# D[:, I] = dd_act(layer_pre_acts) * act_grads
Hl = torch.bmm(torch.t(prev_weights).unsqueeze(0).expand([batch_size, -1, -1]), prev_hessian)
Hl = torch.bmm(Hl, prev_weights.unsqueeze(0).expand([batch_size, -1, -1]))
Hl = torch.bmm(B, Hl)
Hl = torch.matmul(Hl, B)
Hl = Hl + D
return Hl
def chol_scale_invert_kron_factor(factor, prior_scale, data_scale, upper=False):
scaled_factor = data_scale * factor + prior_scale * torch.eye(factor.shape[0]).type(factor.type())
inv_factor = torch.inverse(scaled_factor)
chol_inv_factor = torch.cholesky(inv_factor, upper=upper)
return chol_inv_factor
def sample_K_laplace_MN(MAP, upper_Qinv, lower_HHinv):
# H = Qi (kron) HHi
# sample isotropic unit variance mtrix normal
Z = MAP.data.new(MAP.size()).normal_(mean=0, std=1)
# AAT = HHi
# A = torch.cholesky(HHinv, upper=False)
# BTB = Qi
# B = torch.cholesky(Qinv, upper=True)
all_mtx_sample = MAP + torch.matmul(torch.matmul(lower_HHinv, Z), upper_Qinv)
weight_mtx_sample = all_mtx_sample[:, :-1]
bias_mtx_sample = all_mtx_sample[:, -1]
return weight_mtx_sample, bias_mtx_sample
|
[
"torch.t",
"torch.eye",
"torch.bmm",
"torch.cholesky",
"torch.inverse",
"torch.matmul"
] |
[((1285, 1301), 'torch.bmm', 'torch.bmm', (['B', 'Hl'], {}), '(B, Hl)\n', (1294, 1301), False, 'import torch\n'), ((1311, 1330), 'torch.matmul', 'torch.matmul', (['Hl', 'B'], {}), '(Hl, B)\n', (1323, 1330), False, 'import torch\n'), ((1565, 1593), 'torch.inverse', 'torch.inverse', (['scaled_factor'], {}), '(scaled_factor)\n', (1578, 1593), False, 'import torch\n'), ((1616, 1655), 'torch.cholesky', 'torch.cholesky', (['inv_factor'], {'upper': 'upper'}), '(inv_factor, upper=upper)\n', (1630, 1655), False, 'import torch\n'), ((169, 184), 'torch.eye', 'torch.eye', (['side'], {}), '(side)\n', (178, 184), False, 'import torch\n'), ((638, 656), 'torch.eye', 'torch.eye', (['newside'], {}), '(newside)\n', (647, 656), False, 'import torch\n'), ((2037, 2065), 'torch.matmul', 'torch.matmul', (['lower_HHinv', 'Z'], {}), '(lower_HHinv, Z)\n', (2049, 2065), False, 'import torch\n'), ((1501, 1527), 'torch.eye', 'torch.eye', (['factor.shape[0]'], {}), '(factor.shape[0])\n', (1510, 1527), False, 'import torch\n'), ((1118, 1139), 'torch.t', 'torch.t', (['prev_weights'], {}), '(prev_weights)\n', (1125, 1139), False, 'import torch\n')]
|
import json
from urllib.parse import urlencode
class Resource(object):
def __init__(self, api_client):
self.api_client = api_client
def get_machine(self, resid):
params = {'resid': resid}
resp = self.api_client.session.get(self.api_client.get_server() + '/common/resources/getMachineJSON?' +
urlencode(params))
return resp.json()
def save_machine(self, mid, machine_data):
params = {'resid': mid, 'json': json.dumps(machine_data)}
self.api_client.session.post(
self.api_client.get_server() + '/common/resources/saveMachineJSON', params)
|
[
"urllib.parse.urlencode",
"json.dumps"
] |
[((505, 529), 'json.dumps', 'json.dumps', (['machine_data'], {}), '(machine_data)\n', (515, 529), False, 'import json\n'), ((371, 388), 'urllib.parse.urlencode', 'urlencode', (['params'], {}), '(params)\n', (380, 388), False, 'from urllib.parse import urlencode\n')]
|
import numpy as np
import random
from time import time
random.seed(42)
def semi_greedy_construction(window, number_items, weight_max, values_items, weight_items):
efficiency = np.divide(values_items, weight_items)
items = {}
for i in range(number_items):
items[i] = efficiency[i], values_items[i], weight_items[i]
items = sorted(items.values(), reverse=True)
result_final = []
value = 0
weight = 0
aux = items[:]
while len(items) > 0 and weight < weight_max:
if len(items) >= window: tmp_window = window
else: tmp_window = len(items)
index = random.randint(0,tmp_window-1)
value_item = items[index][1]
weight_item = items[index][2]
if weight_item+weight <= weight_max:
result_final.append(items[index][1])
value += value_item
weight += weight_item
del items[index]
solution = np.zeros(number_items,dtype=np.int16)
for item in values_items:
if item in result_final: solution[values_items.index(item)] = 1
return solution, value, weight
def local_search(solution, values_items, weight_items, value, weight, weight_max):
length = len(solution)
neighbor = (solution.copy(), value, weight)
for i in range(length):
new_weight = 0
new_value = 0
if solution[i] == 0:
if weight+weight_items[i] <= weight_max:
if value+values_items[i] > neighbor[1]:
temp = solution.copy()
temp[i] = 1
neighbor = temp, weight+weight_items[i], value+values_items[i]
if value == neighbor[1] :return value
return local_search(neighbor[0], values_items, weight_items, neighbor[1], neighbor[2], weight_max)
def grasp(max_it, window, number_items, weight_max, values_items, weight_items):
best_solution = 0
for i in range(max_it):
solution, value, weight = semi_greedy_construction(window, number_items, weight_max, values_items, weight_items)
solution = local_search(solution, values_items, weight_items, value, weight, weight_max)
if solution > best_solution: best_solution = solution
return best_solution
|
[
"numpy.zeros",
"numpy.divide",
"random.seed",
"random.randint"
] |
[((56, 71), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (67, 71), False, 'import random\n'), ((179, 216), 'numpy.divide', 'np.divide', (['values_items', 'weight_items'], {}), '(values_items, weight_items)\n', (188, 216), True, 'import numpy as np\n'), ((816, 854), 'numpy.zeros', 'np.zeros', (['number_items'], {'dtype': 'np.int16'}), '(number_items, dtype=np.int16)\n', (824, 854), True, 'import numpy as np\n'), ((563, 596), 'random.randint', 'random.randint', (['(0)', '(tmp_window - 1)'], {}), '(0, tmp_window - 1)\n', (577, 596), False, 'import random\n')]
|
"""
"""
# Global imports
from functools import wraps
from cornflow_core.authentication import BaseAuth
from cornflow_core.exceptions import InvalidData, NoPermission
from cornflow_core.models import ViewBaseModel, PermissionViewRoleBaseModel
# Partial imports
from flask import request, g, current_app
# Internal modules imports
from .const import PERMISSION_METHOD_MAP
from ..models import UserModel, PermissionsDAG
class Auth(BaseAuth):
def __init__(self, user_model=UserModel):
super().__init__(user_model)
def authenticate(self):
user = self.get_user_from_header(request.headers)
check = Auth._get_permission_for_request(request, user.id)
g.user = user
return True
@staticmethod
def dag_permission_required(func):
"""
DAG permission decorator
:param func:
:return:
"""
@wraps(func)
def dag_decorator(*args, **kwargs):
if int(current_app.config["OPEN_DEPLOYMENT"]) == 0:
user_id = g.user.id
dag_id = request.json.get("schema", None)
if dag_id is None:
raise InvalidData(
error="The request does not specify a schema to use",
status_code=400,
)
else:
if PermissionsDAG.check_if_has_permissions(user_id, dag_id):
# We have permissions
return func(*args, **kwargs)
else:
raise NoPermission(
error="You do not have permission to use this DAG",
status_code=403,
)
else:
return func(*args, **kwargs)
return dag_decorator
@staticmethod
def return_user_from_token(token):
"""
Function used for internal testing. Given a token gives back the user_id encoded in it.
:param str token: the given token
:return: the user id code.
:rtype: int
"""
user_id = Auth.decode_token(token)["user_id"]
return user_id
"""
START OF INTERNAL PROTECTED METHODS
"""
@staticmethod
def _get_permission_for_request(req, user_id):
method, url = Auth._get_request_info(req)
user_roles = UserModel.get_one_user(user_id).roles
if user_roles is None or user_roles == {}:
raise NoPermission(
error="You do not have permission to access this endpoint",
status_code=403,
)
action_id = PERMISSION_METHOD_MAP[method]
view_id = ViewBaseModel.query.filter_by(url_rule=url).first().id
for role in user_roles:
has_permission = PermissionViewRoleBaseModel.get_permission(
role_id=role, api_view_id=view_id, action_id=action_id
)
if has_permission:
return True
raise NoPermission(
error="You do not have permission to access this endpoint", status_code=403
)
|
[
"cornflow_core.models.ViewBaseModel.query.filter_by",
"functools.wraps",
"flask.request.json.get",
"cornflow_core.models.PermissionViewRoleBaseModel.get_permission",
"cornflow_core.exceptions.NoPermission",
"cornflow_core.exceptions.InvalidData"
] |
[((888, 899), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (893, 899), False, 'from functools import wraps\n'), ((3018, 3111), 'cornflow_core.exceptions.NoPermission', 'NoPermission', ([], {'error': '"""You do not have permission to access this endpoint"""', 'status_code': '(403)'}), "(error='You do not have permission to access this endpoint',\n status_code=403)\n", (3030, 3111), False, 'from cornflow_core.exceptions import InvalidData, NoPermission\n'), ((2491, 2584), 'cornflow_core.exceptions.NoPermission', 'NoPermission', ([], {'error': '"""You do not have permission to access this endpoint"""', 'status_code': '(403)'}), "(error='You do not have permission to access this endpoint',\n status_code=403)\n", (2503, 2584), False, 'from cornflow_core.exceptions import InvalidData, NoPermission\n'), ((2814, 2917), 'cornflow_core.models.PermissionViewRoleBaseModel.get_permission', 'PermissionViewRoleBaseModel.get_permission', ([], {'role_id': 'role', 'api_view_id': 'view_id', 'action_id': 'action_id'}), '(role_id=role, api_view_id=\n view_id, action_id=action_id)\n', (2856, 2917), False, 'from cornflow_core.models import ViewBaseModel, PermissionViewRoleBaseModel\n'), ((1069, 1101), 'flask.request.json.get', 'request.json.get', (['"""schema"""', 'None'], {}), "('schema', None)\n", (1085, 1101), False, 'from flask import request, g, current_app\n'), ((1163, 1249), 'cornflow_core.exceptions.InvalidData', 'InvalidData', ([], {'error': '"""The request does not specify a schema to use"""', 'status_code': '(400)'}), "(error='The request does not specify a schema to use',\n status_code=400)\n", (1174, 1249), False, 'from cornflow_core.exceptions import InvalidData, NoPermission\n'), ((2697, 2740), 'cornflow_core.models.ViewBaseModel.query.filter_by', 'ViewBaseModel.query.filter_by', ([], {'url_rule': 'url'}), '(url_rule=url)\n', (2726, 2740), False, 'from cornflow_core.models import ViewBaseModel, PermissionViewRoleBaseModel\n'), ((1575, 1660), 'cornflow_core.exceptions.NoPermission', 'NoPermission', ([], {'error': '"""You do not have permission to use this DAG"""', 'status_code': '(403)'}), "(error='You do not have permission to use this DAG',\n status_code=403)\n", (1587, 1660), False, 'from cornflow_core.exceptions import InvalidData, NoPermission\n')]
|
#!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the quorum detection of avalanche."""
from time import time
from test_framework.avatools import (
create_coinbase_stakes,
get_ava_p2p_interface,
)
from test_framework.key import ECKey, ECPubKey
from test_framework.messages import AvalancheVote, AvalancheVoteError
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.wallet_util import bytes_to_wif
class AvalancheQuorumTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [
['-enableavalanche=1',
'-avacooldown=0',
'-avatimeout=0',
'-avaminquorumstake=100000000',
'-avaminquorumconnectedstakeratio=0.8']
]
def mock_forward(self, delta):
self.mock_time += delta
self.nodes[0].setmocktime(self.mock_time)
def run_test(self):
self.mock_time = int(time())
self.mock_forward(0)
# Create a local node to poll from and a helper to send polls from it
# and assert on the response
node = self.nodes[0]
poll_node = get_ava_p2p_interface(node)
poll_node_pubkey = ECPubKey()
poll_node_pubkey.set(bytes.fromhex(node.getavalanchekey()))
def poll_and_assert_response(expected):
# Send poll for best block
block = int(node.getbestblockhash(), 16)
poll_node.send_poll([block])
# Get response and check that the vote is what we expect
response = poll_node.wait_for_avaresponse()
r = response.response
assert poll_node_pubkey.verify_schnorr(response.sig, r.get_hash())
assert_equal(len(r.votes), 1)
actual = repr(r.votes[0])
expected = repr(AvalancheVote(expected, block))
assert_equal(actual, expected)
# Create peers to poll
num_quorum_peers = 2
coinbase_key = node.get_deterministic_priv_key().key
blocks = node.generate(num_quorum_peers)
peers = []
for i in range(0, num_quorum_peers):
keyHex = "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f75" + \
str(i)
k = ECKey()
k.set(bytes.fromhex(keyHex), True)
stakes = create_coinbase_stakes(
node, [blocks[i]], coinbase_key)
proof = node.buildavalancheproof(1, 1, bytes_to_wif(k.get_bytes()),
stakes)
peers.append({'key': k, 'proof': proof, 'stake': stakes})
def addavalanchenode(peer):
pubkey = peer['key'].get_pubkey().get_bytes().hex()
assert node.addavalanchenode(
peer['node'].nodeid, pubkey, peer['proof']) is True
# Start polling. The response should be UNKNOWN because there's no
# score
poll_and_assert_response(AvalancheVoteError.UNKNOWN)
# Create one peer with half the score and add one node
peers[0]['node'] = get_ava_p2p_interface(node)
addavalanchenode(peers[0])
poll_and_assert_response(AvalancheVoteError.UNKNOWN)
# Create a second peer with the other half and add one node
peers[1]['node'] = get_ava_p2p_interface(node)
addavalanchenode(peers[1])
poll_and_assert_response(AvalancheVoteError.ACCEPTED)
# Disconnect peer 1's node which drops us below the threshold, but we've
# latched that the quorum is established
self.mock_forward(1)
peers[1]['node'].peer_disconnect()
peers[1]['node'].wait_for_disconnect()
poll_and_assert_response(AvalancheVoteError.ACCEPTED)
# Reconnect node and re-establish quorum
peers[1]['node'] = get_ava_p2p_interface(node)
addavalanchenode(peers[1])
poll_and_assert_response(AvalancheVoteError.ACCEPTED)
if __name__ == '__main__':
AvalancheQuorumTest().main()
|
[
"test_framework.avatools.get_ava_p2p_interface",
"time.time",
"test_framework.messages.AvalancheVote",
"test_framework.avatools.create_coinbase_stakes",
"test_framework.util.assert_equal",
"test_framework.key.ECKey",
"test_framework.key.ECPubKey"
] |
[((1404, 1431), 'test_framework.avatools.get_ava_p2p_interface', 'get_ava_p2p_interface', (['node'], {}), '(node)\n', (1425, 1431), False, 'from test_framework.avatools import create_coinbase_stakes, get_ava_p2p_interface\n'), ((1459, 1469), 'test_framework.key.ECPubKey', 'ECPubKey', ([], {}), '()\n', (1467, 1469), False, 'from test_framework.key import ECKey, ECPubKey\n'), ((3315, 3342), 'test_framework.avatools.get_ava_p2p_interface', 'get_ava_p2p_interface', (['node'], {}), '(node)\n', (3336, 3342), False, 'from test_framework.avatools import create_coinbase_stakes, get_ava_p2p_interface\n'), ((3535, 3562), 'test_framework.avatools.get_ava_p2p_interface', 'get_ava_p2p_interface', (['node'], {}), '(node)\n', (3556, 3562), False, 'from test_framework.avatools import create_coinbase_stakes, get_ava_p2p_interface\n'), ((4049, 4076), 'test_framework.avatools.get_ava_p2p_interface', 'get_ava_p2p_interface', (['node'], {}), '(node)\n', (4070, 4076), False, 'from test_framework.avatools import create_coinbase_stakes, get_ava_p2p_interface\n'), ((1202, 1208), 'time.time', 'time', ([], {}), '()\n', (1206, 1208), False, 'from time import time\n'), ((2112, 2142), 'test_framework.util.assert_equal', 'assert_equal', (['actual', 'expected'], {}), '(actual, expected)\n', (2124, 2142), False, 'from test_framework.util import assert_equal\n'), ((2508, 2515), 'test_framework.key.ECKey', 'ECKey', ([], {}), '()\n', (2513, 2515), False, 'from test_framework.key import ECKey, ECPubKey\n'), ((2584, 2639), 'test_framework.avatools.create_coinbase_stakes', 'create_coinbase_stakes', (['node', '[blocks[i]]', 'coinbase_key'], {}), '(node, [blocks[i]], coinbase_key)\n', (2606, 2639), False, 'from test_framework.avatools import create_coinbase_stakes, get_ava_p2p_interface\n'), ((2068, 2098), 'test_framework.messages.AvalancheVote', 'AvalancheVote', (['expected', 'block'], {}), '(expected, block)\n', (2081, 2098), False, 'from test_framework.messages import AvalancheVote, AvalancheVoteError\n')]
|
# ----------------------------------------------------------------------------
# OpenBootcamp - Reto Diario 06
# Created By : Rikhen
# version =' 1.0'
# ---------------------------------------------------------------------------
import sys
import calculator as calc
import converter as conv
import validations as valid
print("Introduzca el cálculo deseado (operadores válidos: suma, resta, multiplica, divide)")
str = input()
varstrs = str.split()
if len(varstrs) != 4:
sys.exit("La entrada no es correcta!")
# Comprueba si las variables son de tipo 'string'
for s in varstrs:
if valid.is_string(s):
continue
else:
sys.exit(str(s) + " no es una cadena!")
# Asigna las variables
operador = varstrs[0]
conector = varstrs[2]
num1str = varstrs[1]
num2str = varstrs[3]
# Comprueba si conector es 'y'
if conector != 'y':
sys.exit("El conector no es válido!")
# Comprueba si la variable está incluida en el directorio
if valid.is_exists(num1str):
try:
# Convierte la cadena en un número
num1int = conv.convert_to_integer(num1str)
except Exception as e:
sys.exit("Ha ocurrido un error: " + e)
else:
sys.exit("El valor " + num1str + " no es válido")
if valid.is_exists(num2str):
try:
num2int = conv.convert_to_integer(num2str)
except Exception as e:
sys.exit("Ha ocurrido un error: " + e)
else:
sys.exit("El valor " + num2str + " no es válido")
# Calcula resultado en números
try:
resint = calc.calculate(operador, num1int, num2int)
except Exception as e:
sys.exit("Ha ocurrido un error: " + e)
# Convierte el resultado en una cadena
try:
resstr = conv.convert_to_string(abs(resint))
if resint < 0:
resstr = "menos " + resstr
except Exception as e:
sys.exit("Ha ocurrido un error: " + e)
# Imprime el resultado final
print("Resultado: ", resstr)
|
[
"converter.convert_to_integer",
"validations.is_string",
"validations.is_exists",
"calculator.calculate",
"sys.exit"
] |
[((954, 978), 'validations.is_exists', 'valid.is_exists', (['num1str'], {}), '(num1str)\n', (969, 978), True, 'import validations as valid\n'), ((1221, 1245), 'validations.is_exists', 'valid.is_exists', (['num2str'], {}), '(num2str)\n', (1236, 1245), True, 'import validations as valid\n'), ((478, 516), 'sys.exit', 'sys.exit', (['"""La entrada no es correcta!"""'], {}), "('La entrada no es correcta!')\n", (486, 516), False, 'import sys\n'), ((593, 611), 'validations.is_string', 'valid.is_string', (['s'], {}), '(s)\n', (608, 611), True, 'import validations as valid\n'), ((854, 891), 'sys.exit', 'sys.exit', (['"""El conector no es válido!"""'], {}), "('El conector no es válido!')\n", (862, 891), False, 'import sys\n'), ((1167, 1216), 'sys.exit', 'sys.exit', (["('El valor ' + num1str + ' no es válido')"], {}), "('El valor ' + num1str + ' no es válido')\n", (1175, 1216), False, 'import sys\n'), ((1391, 1440), 'sys.exit', 'sys.exit', (["('El valor ' + num2str + ' no es válido')"], {}), "('El valor ' + num2str + ' no es válido')\n", (1399, 1440), False, 'import sys\n'), ((1491, 1533), 'calculator.calculate', 'calc.calculate', (['operador', 'num1int', 'num2int'], {}), '(operador, num1int, num2int)\n', (1505, 1533), True, 'import calculator as calc\n'), ((1050, 1082), 'converter.convert_to_integer', 'conv.convert_to_integer', (['num1str'], {}), '(num1str)\n', (1073, 1082), True, 'import converter as conv\n'), ((1274, 1306), 'converter.convert_to_integer', 'conv.convert_to_integer', (['num2str'], {}), '(num2str)\n', (1297, 1306), True, 'import converter as conv\n'), ((1561, 1599), 'sys.exit', 'sys.exit', (["('Ha ocurrido un error: ' + e)"], {}), "('Ha ocurrido un error: ' + e)\n", (1569, 1599), False, 'import sys\n'), ((1778, 1816), 'sys.exit', 'sys.exit', (["('Ha ocurrido un error: ' + e)"], {}), "('Ha ocurrido un error: ' + e)\n", (1786, 1816), False, 'import sys\n'), ((1118, 1156), 'sys.exit', 'sys.exit', (["('Ha ocurrido un error: ' + e)"], {}), "('Ha ocurrido un error: ' + e)\n", (1126, 1156), False, 'import sys\n'), ((1342, 1380), 'sys.exit', 'sys.exit', (["('Ha ocurrido un error: ' + e)"], {}), "('Ha ocurrido un error: ' + e)\n", (1350, 1380), False, 'import sys\n')]
|
import pytest
from sym_lis3 import GlobalEnv
def test_map_basic():
g = GlobalEnv()
assert list(g.eval_str('(map (lambda (x) (* 2 x)) (list 1 2 3))')) == [2, 4, 6]
def test_map_curry():
g = GlobalEnv()
g.eval_str('(define "foo" (lambda (x y) (* x y)))')
assert list(g.eval_str('(map (curry foo 2) (list 1 2 3))')) == [2, 4, 6]
|
[
"sym_lis3.GlobalEnv"
] |
[((77, 88), 'sym_lis3.GlobalEnv', 'GlobalEnv', ([], {}), '()\n', (86, 88), False, 'from sym_lis3 import GlobalEnv\n'), ((205, 216), 'sym_lis3.GlobalEnv', 'GlobalEnv', ([], {}), '()\n', (214, 216), False, 'from sym_lis3 import GlobalEnv\n')]
|
import os
import ast
import pytest
from flake8.options.manager import OptionManager
from flake8_fine_pytest.checker import FinePytestChecker
def parse_options(allowed_test_directories, allowed_test_arguments_count, allowed_assert_count):
options = OptionManager()
options.allowed_test_directories = allowed_test_directories
options.allowed_test_arguments_count = allowed_test_arguments_count
options.allowed_assert_count = allowed_assert_count
FinePytestChecker.parse_options(options)
@pytest.fixture
def run_validator_for_test_files():
def _run(filename, allowed_test_directories=None, allowed_test_arguments_count=None, allowed_assert_count=None):
test_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'test_files',
filename,
)
with open(test_file_path, 'r') as file_handler:
raw_content = file_handler.read()
tree = ast.parse(raw_content)
checker = FinePytestChecker(tree=tree, filename=test_file_path)
parse_options(allowed_test_directories, allowed_test_arguments_count, allowed_assert_count)
return list(checker.run())
return _run
|
[
"flake8_fine_pytest.checker.FinePytestChecker.parse_options",
"flake8_fine_pytest.checker.FinePytestChecker",
"os.path.abspath",
"flake8.options.manager.OptionManager",
"ast.parse"
] |
[((256, 271), 'flake8.options.manager.OptionManager', 'OptionManager', ([], {}), '()\n', (269, 271), False, 'from flake8.options.manager import OptionManager\n'), ((470, 510), 'flake8_fine_pytest.checker.FinePytestChecker.parse_options', 'FinePytestChecker.parse_options', (['options'], {}), '(options)\n', (501, 510), False, 'from flake8_fine_pytest.checker import FinePytestChecker\n'), ((954, 976), 'ast.parse', 'ast.parse', (['raw_content'], {}), '(raw_content)\n', (963, 976), False, 'import ast\n'), ((995, 1048), 'flake8_fine_pytest.checker.FinePytestChecker', 'FinePytestChecker', ([], {'tree': 'tree', 'filename': 'test_file_path'}), '(tree=tree, filename=test_file_path)\n', (1012, 1048), False, 'from flake8_fine_pytest.checker import FinePytestChecker\n'), ((749, 774), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (764, 774), False, 'import os\n')]
|
from . import models
from .initialize import initialize_db
from async_asgi_testclient import TestClient
from fastapi.applications import FastAPI
from fastapi_asyncpg import configure_asyncpg
from fastapi_asyncpg import create_pool_test
from fastapi_iam import configure_iam
from pathlib import Path
from pytest_docker_fixtures import images
import asyncpg
import pytest
dir = Path(__file__).parent
images.configure(
"postgresql", "postgres", "11.1", env={"POSTGRES_DB": "test_db"}
)
async def noop(db):
pass
@pytest.fixture
async def pool(pg):
host, port = pg
url = f"postgresql://postgres@{host}:{port}/test_db"
settings = {"db_schema": None}
# apply migrations
conn = await asyncpg.connect(dsn=url)
await initialize_db(settings, conn)
pool = await create_pool_test(url, initialize=noop)
await pool.start()
yield pool
if pool._conn.is_closed():
return
await pool.release()
@pytest.fixture
async def conn(pool):
async with pool.acquire() as db:
yield db
@pytest.fixture
async def theapp(pool):
app = FastAPI()
db = configure_asyncpg(app, "", pool=pool)
settings = {}
iam = configure_iam(settings, fastapi_asyncpg=db)
app.include_router(iam.router, prefix="/auth")
yield iam, app
users_ = [
{
"email": "<EMAIL>",
"password": "<PASSWORD>",
"is_active": True,
"is_staff": True,
"is_admin": False,
},
{
"email": "<EMAIL>",
"password": "<PASSWORD>",
"is_active": True,
"is_staff": True,
"is_admin": True,
},
{
"email": "<EMAIL>",
"password": "<PASSWORD>",
"is_active": False,
"is_staff": True,
"is_admin": True,
},
]
@pytest.fixture
async def users(theapp):
iam, app = theapp
async with TestClient(app) as client:
for user in users_:
await models.create_user(iam, user.copy())
yield client, iam
|
[
"asyncpg.connect",
"async_asgi_testclient.TestClient",
"fastapi.applications.FastAPI",
"pytest_docker_fixtures.images.configure",
"fastapi_iam.configure_iam",
"pathlib.Path",
"fastapi_asyncpg.create_pool_test",
"fastapi_asyncpg.configure_asyncpg"
] |
[((401, 487), 'pytest_docker_fixtures.images.configure', 'images.configure', (['"""postgresql"""', '"""postgres"""', '"""11.1"""'], {'env': "{'POSTGRES_DB': 'test_db'}"}), "('postgresql', 'postgres', '11.1', env={'POSTGRES_DB':\n 'test_db'})\n", (417, 487), False, 'from pytest_docker_fixtures import images\n'), ((378, 392), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (382, 392), False, 'from pathlib import Path\n'), ((1090, 1099), 'fastapi.applications.FastAPI', 'FastAPI', ([], {}), '()\n', (1097, 1099), False, 'from fastapi.applications import FastAPI\n'), ((1109, 1146), 'fastapi_asyncpg.configure_asyncpg', 'configure_asyncpg', (['app', '""""""'], {'pool': 'pool'}), "(app, '', pool=pool)\n", (1126, 1146), False, 'from fastapi_asyncpg import configure_asyncpg\n'), ((1175, 1218), 'fastapi_iam.configure_iam', 'configure_iam', (['settings'], {'fastapi_asyncpg': 'db'}), '(settings, fastapi_asyncpg=db)\n', (1188, 1218), False, 'from fastapi_iam import configure_iam\n'), ((713, 737), 'asyncpg.connect', 'asyncpg.connect', ([], {'dsn': 'url'}), '(dsn=url)\n', (728, 737), False, 'import asyncpg\n'), ((796, 834), 'fastapi_asyncpg.create_pool_test', 'create_pool_test', (['url'], {'initialize': 'noop'}), '(url, initialize=noop)\n', (812, 834), False, 'from fastapi_asyncpg import create_pool_test\n'), ((1848, 1863), 'async_asgi_testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (1858, 1863), False, 'from async_asgi_testclient import TestClient\n')]
|
from starlette.responses import PlainTextResponse, RedirectResponse
from starlette.applications import Starlette
from starlette.templating import Jinja2Templates
from starlette.routing import Mount, Route
from starlette.staticfiles import StaticFiles
from starlette.exceptions import HTTPException
from template_functions import (
format_power,
osm_link,
country_name,
format_length,
format_voltage,
format_percent,
)
from config import database, config
from util import cache_for, country_required
from sitemap import sitemap
from data import (
get_countries,
stats_power_line,
get_plant,
get_plant_generator_summary,
get_wikidata,
get_commons_thumbnail,
)
DEBUG = config("DEBUG", cast=bool, default=False)
templates = Jinja2Templates(directory="templates")
templates.env.filters["power"] = format_power
templates.env.filters["distance"] = format_length
templates.env.filters["voltage"] = format_voltage
templates.env.filters["percent"] = format_percent
templates.env.filters["country_name"] = country_name
templates.env.globals["osm_link"] = osm_link
app = Starlette(
debug=DEBUG,
on_startup=[database.connect],
on_shutdown=[database.disconnect],
routes=[
Mount("/static", app=StaticFiles(directory="static"), name="static"),
Route("/sitemap.xml", sitemap),
],
)
@app.route("/")
async def main(request):
# Dummy response - this endpoint is served statically in production from the webpack build
return PlainTextResponse("")
@app.route("/about")
@cache_for(3600)
async def about(request):
return templates.TemplateResponse("about.html", {"request": request})
@app.route("/about/exports")
@cache_for(3600)
async def exports(request):
return templates.TemplateResponse("exports.html", {"request": request})
@app.route("/copyright")
@cache_for(3600)
async def copyright(request):
return templates.TemplateResponse("copyright.html", {"request": request})
@app.route("/stats")
@cache_for(86400)
async def stats(request):
power_lines = await stats_power_line()
return templates.TemplateResponse(
"index.html",
{
"request": request,
"countries": await get_countries(),
"power_lines": power_lines,
},
)
@app.route("/stats/area/{country}")
@country_required
@cache_for(3600)
async def country(request, country):
plant_stats = await database.fetch_one(
query="""SELECT SUM(convert_power(output)) AS output, COUNT(*)
FROM power_plant
WHERE ST_Contains(
(SELECT ST_Transform(geom, 3857) FROM countries.country_eez where gid = :gid),
geometry)
AND tags -> 'construction:power' IS NULL
""",
values={"gid": country["gid"]},
)
plant_source_stats = await database.fetch_all(
query="""SELECT first_semi(source) AS source, sum(convert_power(output)) AS output, count(*)
FROM power_plant
WHERE ST_Contains(
(SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),
geometry)
AND tags -> 'construction:power' IS NULL
GROUP BY first_semi(source)
ORDER BY SUM(convert_power(output)) DESC NULLS LAST""",
values={"gid": country["gid"]},
)
power_lines = await stats_power_line(country["union"])
return templates.TemplateResponse(
"country.html",
{
"request": request,
"country": country["union"],
"plant_stats": plant_stats,
"plant_source_stats": plant_source_stats,
"power_lines": power_lines,
"canonical": request.url_for("country", country=country["union"]),
},
)
@app.route("/stats/area/{country}/plants")
@country_required
@cache_for(3600)
async def plants_country(request, country):
gid = country[0]
plants = await database.fetch_all(
query="""SELECT osm_id, name, tags->'name:en' AS name_en, tags->'wikidata' AS wikidata,
tags->'plant:method' AS method, tags->'operator' AS operator,
convert_power(output) AS output,
source, ST_GeometryType(geometry) AS geom_type
FROM power_plant
WHERE ST_Contains(
(SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),
geometry)
AND tags -> 'construction:power' IS NULL
ORDER BY convert_power(output) DESC NULLS LAST, name ASC NULLS LAST """,
values={"gid": gid},
)
source = None
if "source" in request.query_params:
source = request.query_params["source"].lower()
plants = [
plant for plant in plants if source in plant["source"].lower().split(";")
]
min_output = None
if "min_output" in request.query_params:
try:
min_output = int(request.query_params["min_output"])
plants = [
plant
for plant in plants
if plant["output"] and plant["output"] >= min_output
]
except ValueError:
pass
return templates.TemplateResponse(
"plants_country.html",
{
"request": request,
"plants": plants,
"country": country["union"],
"source": source,
"min_output": min_output,
# Canonical URL for all plants without the source filter, to avoid confusing Google.
"canonical": request.url_for("plants_country", country=country["union"]),
},
)
@app.route("/stats/area/{country}/plants/construction")
@country_required
@cache_for(3600)
async def plants_construction_country(request, country):
gid = country[0]
plants = await database.fetch_all(
query="""SELECT osm_id, name, tags->'name:en' AS name_en, tags->'wikidata' AS wikidata,
tags->'plant:method' AS method, tags->'operator' AS operator,
tags->'start_date' AS start_date,
convert_power(output) AS output,
source, ST_GeometryType(geometry) AS geom_type
FROM power_plant
WHERE ST_Contains(
(SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),
geometry)
AND tags -> 'construction:power' IS NOT NULL
ORDER BY convert_power(output) DESC NULLS LAST, name ASC NULLS LAST """,
values={"gid": gid},
)
return templates.TemplateResponse(
"plants_country.html",
{
"construction": True,
"request": request,
"plants": plants,
"country": country["union"],
},
)
@app.route("/stats/object/plant/{id}")
@cache_for(86400)
async def stats_object(request):
try:
id = int(request.path_params["id"])
except ValueError:
raise HTTPException(400)
res = await database.fetch_one(
"""SELECT country_eez."union" FROM power_plant, countries.country_eez WHERE
ST_Contains(ST_Transform(country_eez.geom, 3857), geometry)
AND power_plant.osm_id = :id""",
values={"id": id},
)
if not res:
raise HTTPException(404)
return RedirectResponse(
request.url_for("plant_detail", country=res["union"], id=id)
)
@app.route("/stats/area/{country}/plants/{id}")
@country_required
@cache_for(3600)
async def plant_detail(request, country):
try:
plant_id = int(request.path_params["id"])
except ValueError:
raise HTTPException(404, "Invalid plant ID")
plant = await get_plant(plant_id, country["gid"])
if plant is None:
raise HTTPException(404, "Nonexistent power plant")
generator_summary = await get_plant_generator_summary(plant_id)
if "wikidata" in plant["tags"]:
wd = await get_wikidata(plant["tags"]["wikidata"])
else:
wd = None
image_data = None
if (
wd
and "P18" in wd["claims"]
and wd["claims"]["P18"][0]["mainsnak"]["datatype"] == "commonsMedia"
):
image_data = await get_commons_thumbnail(
wd["claims"]["P18"][0]["mainsnak"]["datavalue"]["value"], 400
)
ref_tags = []
for k, v in plant["tags"].items():
if k.startswith("ref:") or k in ["repd:id"]:
for split_val in v.split(";"):
ref_tags.append((k, split_val))
return templates.TemplateResponse(
"plant_detail.html",
{
"construction": True,
"plant": plant,
"request": request,
"generator_summary": generator_summary,
"country": country["union"],
"wikidata": wd,
"image_data": image_data,
"ref_tags": ref_tags,
},
)
import wikidata # noqa
|
[
"config.config",
"config.database.fetch_one",
"data.stats_power_line",
"data.get_plant_generator_summary",
"data.get_wikidata",
"util.cache_for",
"starlette.exceptions.HTTPException",
"starlette.staticfiles.StaticFiles",
"starlette.responses.PlainTextResponse",
"data.get_countries",
"data.get_plant",
"data.get_commons_thumbnail",
"config.database.fetch_all",
"starlette.templating.Jinja2Templates",
"starlette.routing.Route"
] |
[((717, 758), 'config.config', 'config', (['"""DEBUG"""'], {'cast': 'bool', 'default': '(False)'}), "('DEBUG', cast=bool, default=False)\n", (723, 758), False, 'from config import database, config\n'), ((771, 809), 'starlette.templating.Jinja2Templates', 'Jinja2Templates', ([], {'directory': '"""templates"""'}), "(directory='templates')\n", (786, 809), False, 'from starlette.templating import Jinja2Templates\n'), ((1550, 1565), 'util.cache_for', 'cache_for', (['(3600)'], {}), '(3600)\n', (1559, 1565), False, 'from util import cache_for, country_required\n'), ((1698, 1713), 'util.cache_for', 'cache_for', (['(3600)'], {}), '(3600)\n', (1707, 1713), False, 'from util import cache_for, country_required\n'), ((1846, 1861), 'util.cache_for', 'cache_for', (['(3600)'], {}), '(3600)\n', (1855, 1861), False, 'from util import cache_for, country_required\n'), ((1994, 2010), 'util.cache_for', 'cache_for', (['(86400)'], {}), '(86400)\n', (2003, 2010), False, 'from util import cache_for, country_required\n'), ((2345, 2360), 'util.cache_for', 'cache_for', (['(3600)'], {}), '(3600)\n', (2354, 2360), False, 'from util import cache_for, country_required\n'), ((3964, 3979), 'util.cache_for', 'cache_for', (['(3600)'], {}), '(3600)\n', (3973, 3979), False, 'from util import cache_for, country_required\n'), ((5903, 5918), 'util.cache_for', 'cache_for', (['(3600)'], {}), '(3600)\n', (5912, 5918), False, 'from util import cache_for, country_required\n'), ((7080, 7096), 'util.cache_for', 'cache_for', (['(86400)'], {}), '(86400)\n', (7089, 7096), False, 'from util import cache_for, country_required\n'), ((7742, 7757), 'util.cache_for', 'cache_for', (['(3600)'], {}), '(3600)\n', (7751, 7757), False, 'from util import cache_for, country_required\n'), ((1504, 1525), 'starlette.responses.PlainTextResponse', 'PlainTextResponse', (['""""""'], {}), "('')\n", (1521, 1525), False, 'from starlette.responses import PlainTextResponse, RedirectResponse\n'), ((2061, 2079), 'data.stats_power_line', 'stats_power_line', ([], {}), '()\n', (2077, 2079), False, 'from data import get_countries, stats_power_line, get_plant, get_plant_generator_summary, get_wikidata, get_commons_thumbnail\n'), ((2422, 2844), 'config.database.fetch_one', 'database.fetch_one', ([], {'query': '"""SELECT SUM(convert_power(output)) AS output, COUNT(*)\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez where gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NULL\n """', 'values': "{'gid': country['gid']}"}), '(query=\n """SELECT SUM(convert_power(output)) AS output, COUNT(*)\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez where gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NULL\n """\n , values={\'gid\': country[\'gid\']})\n', (2440, 2844), False, 'from config import database, config\n'), ((2890, 3449), 'config.database.fetch_all', 'database.fetch_all', ([], {'query': '"""SELECT first_semi(source) AS source, sum(convert_power(output)) AS output, count(*)\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NULL\n GROUP BY first_semi(source)\n ORDER BY SUM(convert_power(output)) DESC NULLS LAST"""', 'values': "{'gid': country['gid']}"}), '(query=\n """SELECT first_semi(source) AS source, sum(convert_power(output)) AS output, count(*)\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NULL\n GROUP BY first_semi(source)\n ORDER BY SUM(convert_power(output)) DESC NULLS LAST"""\n , values={\'gid\': country[\'gid\']})\n', (2908, 3449), False, 'from config import database, config\n'), ((3488, 3522), 'data.stats_power_line', 'stats_power_line', (["country['union']"], {}), "(country['union'])\n", (3504, 3522), False, 'from data import get_countries, stats_power_line, get_plant, get_plant_generator_summary, get_wikidata, get_commons_thumbnail\n'), ((4065, 4775), 'config.database.fetch_all', 'database.fetch_all', ([], {'query': '"""SELECT osm_id, name, tags->\'name:en\' AS name_en, tags->\'wikidata\' AS wikidata,\n tags->\'plant:method\' AS method, tags->\'operator\' AS operator,\n convert_power(output) AS output,\n source, ST_GeometryType(geometry) AS geom_type\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NULL\n ORDER BY convert_power(output) DESC NULLS LAST, name ASC NULLS LAST """', 'values': "{'gid': gid}"}), '(query=\n """SELECT osm_id, name, tags->\'name:en\' AS name_en, tags->\'wikidata\' AS wikidata,\n tags->\'plant:method\' AS method, tags->\'operator\' AS operator,\n convert_power(output) AS output,\n source, ST_GeometryType(geometry) AS geom_type\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NULL\n ORDER BY convert_power(output) DESC NULLS LAST, name ASC NULLS LAST """\n , values={\'gid\': gid})\n', (4083, 4775), False, 'from config import database, config\n'), ((6017, 6789), 'config.database.fetch_all', 'database.fetch_all', ([], {'query': '"""SELECT osm_id, name, tags->\'name:en\' AS name_en, tags->\'wikidata\' AS wikidata,\n tags->\'plant:method\' AS method, tags->\'operator\' AS operator,\n tags->\'start_date\' AS start_date,\n convert_power(output) AS output,\n source, ST_GeometryType(geometry) AS geom_type\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NOT NULL\n ORDER BY convert_power(output) DESC NULLS LAST, name ASC NULLS LAST """', 'values': "{'gid': gid}"}), '(query=\n """SELECT osm_id, name, tags->\'name:en\' AS name_en, tags->\'wikidata\' AS wikidata,\n tags->\'plant:method\' AS method, tags->\'operator\' AS operator,\n tags->\'start_date\' AS start_date,\n convert_power(output) AS output,\n source, ST_GeometryType(geometry) AS geom_type\n FROM power_plant\n WHERE ST_Contains(\n (SELECT ST_Transform(geom, 3857) FROM countries.country_eez WHERE gid = :gid),\n geometry)\n AND tags -> \'construction:power\' IS NOT NULL\n ORDER BY convert_power(output) DESC NULLS LAST, name ASC NULLS LAST """\n , values={\'gid\': gid})\n', (6035, 6789), False, 'from config import database, config\n'), ((7256, 7504), 'config.database.fetch_one', 'database.fetch_one', (['"""SELECT country_eez."union" FROM power_plant, countries.country_eez WHERE\n ST_Contains(ST_Transform(country_eez.geom, 3857), geometry)\n AND power_plant.osm_id = :id"""'], {'values': "{'id': id}"}), '(\n """SELECT country_eez."union" FROM power_plant, countries.country_eez WHERE\n ST_Contains(ST_Transform(country_eez.geom, 3857), geometry)\n AND power_plant.osm_id = :id"""\n , values={\'id\': id})\n', (7274, 7504), False, 'from config import database, config\n'), ((7549, 7567), 'starlette.exceptions.HTTPException', 'HTTPException', (['(404)'], {}), '(404)\n', (7562, 7567), False, 'from starlette.exceptions import HTTPException\n'), ((7954, 7989), 'data.get_plant', 'get_plant', (['plant_id', "country['gid']"], {}), "(plant_id, country['gid'])\n", (7963, 7989), False, 'from data import get_countries, stats_power_line, get_plant, get_plant_generator_summary, get_wikidata, get_commons_thumbnail\n'), ((8026, 8071), 'starlette.exceptions.HTTPException', 'HTTPException', (['(404)', '"""Nonexistent power plant"""'], {}), "(404, 'Nonexistent power plant')\n", (8039, 8071), False, 'from starlette.exceptions import HTTPException\n'), ((8103, 8140), 'data.get_plant_generator_summary', 'get_plant_generator_summary', (['plant_id'], {}), '(plant_id)\n', (8130, 8140), False, 'from data import get_countries, stats_power_line, get_plant, get_plant_generator_summary, get_wikidata, get_commons_thumbnail\n'), ((1314, 1344), 'starlette.routing.Route', 'Route', (['"""/sitemap.xml"""', 'sitemap'], {}), "('/sitemap.xml', sitemap)\n", (1319, 1344), False, 'from starlette.routing import Mount, Route\n'), ((7220, 7238), 'starlette.exceptions.HTTPException', 'HTTPException', (['(400)'], {}), '(400)\n', (7233, 7238), False, 'from starlette.exceptions import HTTPException\n'), ((7896, 7934), 'starlette.exceptions.HTTPException', 'HTTPException', (['(404)', '"""Invalid plant ID"""'], {}), "(404, 'Invalid plant ID')\n", (7909, 7934), False, 'from starlette.exceptions import HTTPException\n'), ((8197, 8236), 'data.get_wikidata', 'get_wikidata', (["plant['tags']['wikidata']"], {}), "(plant['tags']['wikidata'])\n", (8209, 8236), False, 'from data import get_countries, stats_power_line, get_plant, get_plant_generator_summary, get_wikidata, get_commons_thumbnail\n'), ((8453, 8542), 'data.get_commons_thumbnail', 'get_commons_thumbnail', (["wd['claims']['P18'][0]['mainsnak']['datavalue']['value']", '(400)'], {}), "(wd['claims']['P18'][0]['mainsnak']['datavalue'][\n 'value'], 400)\n", (8474, 8542), False, 'from data import get_countries, stats_power_line, get_plant, get_plant_generator_summary, get_wikidata, get_commons_thumbnail\n'), ((2214, 2229), 'data.get_countries', 'get_countries', ([], {}), '()\n', (2227, 2229), False, 'from data import get_countries, stats_power_line, get_plant, get_plant_generator_summary, get_wikidata, get_commons_thumbnail\n'), ((1257, 1288), 'starlette.staticfiles.StaticFiles', 'StaticFiles', ([], {'directory': '"""static"""'}), "(directory='static')\n", (1268, 1288), False, 'from starlette.staticfiles import StaticFiles\n')]
|
import pytest
from cle_parcel_lookup import create_app
@pytest.fixture
def app():
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
|
[
"cle_parcel_lookup.create_app"
] |
[((95, 107), 'cle_parcel_lookup.create_app', 'create_app', ([], {}), '()\n', (105, 107), False, 'from cle_parcel_lookup import create_app\n')]
|
import numpy as np
m,n = [int(i) for i in '2 7'.strip().split(' ')]
data1=[
'0.18 0.89 109.85',
'1.0 0.26 155.72',
'0.92 0.11 137.66',
'0.07 0.37 76.17',
'0.85 0.16 139.75',
'0.99 0.41 162.6',
'0.87 0.47 151.77'
]
X = []
Y = []
for item in data1:
data = item.strip().split(' ')
X.append(data[:m])
Y.append(data[m:])
data2 = [
'0.49 0.18',
'0.57 0.83',
'0.56 0.64',
'0.76 0.18'
]
X_new = []
for item in data2:
X_new.append(item.strip().split(' '))
X = np.array(X,float)
Y = np.array(Y,float)
X_new = np.array(X_new,float)
#center
X_R = X-np.mean(X,axis=0)
Y_R = Y-np.mean(Y)
#calculate beta
beta = np.dot(np.linalg.inv(np.dot(X_R.T,X_R)),np.dot(X_R.T,Y_R))
#predict
X_new_R = X_new-np.mean(X,axis=0)
Y_new_R = np.dot(X_new_R,beta)
Y_new = Y_new_R + np.mean(Y)
#print
for i in Y_new:
print(round(float(i),2))
|
[
"numpy.dot",
"numpy.mean",
"numpy.array"
] |
[((467, 485), 'numpy.array', 'np.array', (['X', 'float'], {}), '(X, float)\n', (475, 485), True, 'import numpy as np\n'), ((489, 507), 'numpy.array', 'np.array', (['Y', 'float'], {}), '(Y, float)\n', (497, 507), True, 'import numpy as np\n'), ((515, 537), 'numpy.array', 'np.array', (['X_new', 'float'], {}), '(X_new, float)\n', (523, 537), True, 'import numpy as np\n'), ((728, 749), 'numpy.dot', 'np.dot', (['X_new_R', 'beta'], {}), '(X_new_R, beta)\n', (734, 749), True, 'import numpy as np\n'), ((554, 572), 'numpy.mean', 'np.mean', (['X'], {'axis': '(0)'}), '(X, axis=0)\n', (561, 572), True, 'import numpy as np\n'), ((580, 590), 'numpy.mean', 'np.mean', (['Y'], {}), '(Y)\n', (587, 590), True, 'import numpy as np\n'), ((655, 673), 'numpy.dot', 'np.dot', (['X_R.T', 'Y_R'], {}), '(X_R.T, Y_R)\n', (661, 673), True, 'import numpy as np\n'), ((700, 718), 'numpy.mean', 'np.mean', (['X'], {'axis': '(0)'}), '(X, axis=0)\n', (707, 718), True, 'import numpy as np\n'), ((767, 777), 'numpy.mean', 'np.mean', (['Y'], {}), '(Y)\n', (774, 777), True, 'import numpy as np\n'), ((636, 654), 'numpy.dot', 'np.dot', (['X_R.T', 'X_R'], {}), '(X_R.T, X_R)\n', (642, 654), True, 'import numpy as np\n')]
|
from components.material import Material
# This Assumes most pieces of armor will have 1 AC as base.
Skin = Material('skin', 'Skin', hardness=0, sharpness=0, potency=0.2, weight=0.1, value=0)
Flesh = Material('flesh', 'Flesh', hardness=0, sharpness=0, potency=0.2, weight=0.15, value=0)
Fur = Material('fur', 'Fur', hardness=0.1, sharpness=0, potency=0.2, weight=1, value=1)
Leather = Material('leather', 'Leather', hardness=0.1, sharpness=0, potency=0.2, weight=1, value=1)
StuddedLeather = Material('studded_leather', 'Studded Leather', hardness=0.2, sharpness=0, potency=0.2, weight=1.3, value=4.5)
Wood = Material('wood', 'Wood', hardness=0.3, sharpness=0.4, potency=0.5, weight=3, value=0.5)
Scale = Material('scale', 'Scale', hardness=0.4, sharpness=0.5, potency=0.4, weight=4.5, value=5)
Bone = Material('bone', 'Bone', hardness=0.5, sharpness=0.5, potency=0.5, weight=5, value=1)
Stone = Material('stone', 'Stone', hardness=0.5, sharpness=0.2, potency=0.1, weight=6, value=0.1)
Silver = Material('silver', 'Silver', hardness=0.5, sharpness=0.5, potency=1, weight=6, value=125)
Gold = Material('gold', 'Gold', hardness=0.5, sharpness=0.5, potency=2, weight=6, value=250)
Chain = Material('chain', 'Chain', hardness=0.6, sharpness=0.3, potency=0.5, weight=5.5, value=7.5)
Bronze = Material('bronze', 'Bronze', hardness=0.6, sharpness=0.8, potency=0.6, weight=7, value=8)
Iron = Material('iron', 'Iron', hardness=0.7, sharpness=1, potency=0.6, weight=5.85, value=20)
Steel = Material('steel', 'Steel', hardness=0.8, sharpness=1.2, potency=0.8, weight=6.5, value=150)
material_templates = {
Skin.uid: Skin,
Flesh.uid: Flesh,
Fur.uid: Fur,
Leather.uid: Leather,
StuddedLeather.uid: StuddedLeather,
Wood.uid: Wood,
Scale.uid: Scale,
Bone.uid: Bone,
Stone.uid: Stone,
Silver.uid: Silver,
Gold.uid: Gold,
Chain.uid: Chain,
Bronze.uid: Bronze,
Iron.uid: Iron,
Steel.uid: Steel
}
|
[
"components.material.Material"
] |
[((112, 199), 'components.material.Material', 'Material', (['"""skin"""', '"""Skin"""'], {'hardness': '(0)', 'sharpness': '(0)', 'potency': '(0.2)', 'weight': '(0.1)', 'value': '(0)'}), "('skin', 'Skin', hardness=0, sharpness=0, potency=0.2, weight=0.1,\n value=0)\n", (120, 199), False, 'from components.material import Material\n'), ((204, 295), 'components.material.Material', 'Material', (['"""flesh"""', '"""Flesh"""'], {'hardness': '(0)', 'sharpness': '(0)', 'potency': '(0.2)', 'weight': '(0.15)', 'value': '(0)'}), "('flesh', 'Flesh', hardness=0, sharpness=0, potency=0.2, weight=\n 0.15, value=0)\n", (212, 295), False, 'from components.material import Material\n'), ((297, 382), 'components.material.Material', 'Material', (['"""fur"""', '"""Fur"""'], {'hardness': '(0.1)', 'sharpness': '(0)', 'potency': '(0.2)', 'weight': '(1)', 'value': '(1)'}), "('fur', 'Fur', hardness=0.1, sharpness=0, potency=0.2, weight=1,\n value=1)\n", (305, 382), False, 'from components.material import Material\n'), ((389, 482), 'components.material.Material', 'Material', (['"""leather"""', '"""Leather"""'], {'hardness': '(0.1)', 'sharpness': '(0)', 'potency': '(0.2)', 'weight': '(1)', 'value': '(1)'}), "('leather', 'Leather', hardness=0.1, sharpness=0, potency=0.2,\n weight=1, value=1)\n", (397, 482), False, 'from components.material import Material\n'), ((496, 609), 'components.material.Material', 'Material', (['"""studded_leather"""', '"""Studded Leather"""'], {'hardness': '(0.2)', 'sharpness': '(0)', 'potency': '(0.2)', 'weight': '(1.3)', 'value': '(4.5)'}), "('studded_leather', 'Studded Leather', hardness=0.2, sharpness=0,\n potency=0.2, weight=1.3, value=4.5)\n", (504, 609), False, 'from components.material import Material\n'), ((613, 704), 'components.material.Material', 'Material', (['"""wood"""', '"""Wood"""'], {'hardness': '(0.3)', 'sharpness': '(0.4)', 'potency': '(0.5)', 'weight': '(3)', 'value': '(0.5)'}), "('wood', 'Wood', hardness=0.3, sharpness=0.4, potency=0.5, weight=3,\n value=0.5)\n", (621, 704), False, 'from components.material import Material\n'), ((709, 803), 'components.material.Material', 'Material', (['"""scale"""', '"""Scale"""'], {'hardness': '(0.4)', 'sharpness': '(0.5)', 'potency': '(0.4)', 'weight': '(4.5)', 'value': '(5)'}), "('scale', 'Scale', hardness=0.4, sharpness=0.5, potency=0.4, weight\n =4.5, value=5)\n", (717, 803), False, 'from components.material import Material\n'), ((806, 895), 'components.material.Material', 'Material', (['"""bone"""', '"""Bone"""'], {'hardness': '(0.5)', 'sharpness': '(0.5)', 'potency': '(0.5)', 'weight': '(5)', 'value': '(1)'}), "('bone', 'Bone', hardness=0.5, sharpness=0.5, potency=0.5, weight=5,\n value=1)\n", (814, 895), False, 'from components.material import Material\n'), ((900, 994), 'components.material.Material', 'Material', (['"""stone"""', '"""Stone"""'], {'hardness': '(0.5)', 'sharpness': '(0.2)', 'potency': '(0.1)', 'weight': '(6)', 'value': '(0.1)'}), "('stone', 'Stone', hardness=0.5, sharpness=0.2, potency=0.1, weight\n =6, value=0.1)\n", (908, 994), False, 'from components.material import Material\n'), ((999, 1093), 'components.material.Material', 'Material', (['"""silver"""', '"""Silver"""'], {'hardness': '(0.5)', 'sharpness': '(0.5)', 'potency': '(1)', 'weight': '(6)', 'value': '(125)'}), "('silver', 'Silver', hardness=0.5, sharpness=0.5, potency=1, weight\n =6, value=125)\n", (1007, 1093), False, 'from components.material import Material\n'), ((1096, 1185), 'components.material.Material', 'Material', (['"""gold"""', '"""Gold"""'], {'hardness': '(0.5)', 'sharpness': '(0.5)', 'potency': '(2)', 'weight': '(6)', 'value': '(250)'}), "('gold', 'Gold', hardness=0.5, sharpness=0.5, potency=2, weight=6,\n value=250)\n", (1104, 1185), False, 'from components.material import Material\n'), ((1190, 1286), 'components.material.Material', 'Material', (['"""chain"""', '"""Chain"""'], {'hardness': '(0.6)', 'sharpness': '(0.3)', 'potency': '(0.5)', 'weight': '(5.5)', 'value': '(7.5)'}), "('chain', 'Chain', hardness=0.6, sharpness=0.3, potency=0.5, weight\n =5.5, value=7.5)\n", (1198, 1286), False, 'from components.material import Material\n'), ((1291, 1384), 'components.material.Material', 'Material', (['"""bronze"""', '"""Bronze"""'], {'hardness': '(0.6)', 'sharpness': '(0.8)', 'potency': '(0.6)', 'weight': '(7)', 'value': '(8)'}), "('bronze', 'Bronze', hardness=0.6, sharpness=0.8, potency=0.6,\n weight=7, value=8)\n", (1299, 1384), False, 'from components.material import Material\n'), ((1388, 1480), 'components.material.Material', 'Material', (['"""iron"""', '"""Iron"""'], {'hardness': '(0.7)', 'sharpness': '(1)', 'potency': '(0.6)', 'weight': '(5.85)', 'value': '(20)'}), "('iron', 'Iron', hardness=0.7, sharpness=1, potency=0.6, weight=\n 5.85, value=20)\n", (1396, 1480), False, 'from components.material import Material\n'), ((1484, 1580), 'components.material.Material', 'Material', (['"""steel"""', '"""Steel"""'], {'hardness': '(0.8)', 'sharpness': '(1.2)', 'potency': '(0.8)', 'weight': '(6.5)', 'value': '(150)'}), "('steel', 'Steel', hardness=0.8, sharpness=1.2, potency=0.8, weight\n =6.5, value=150)\n", (1492, 1580), False, 'from components.material import Material\n')]
|
# coding: utf-8
"""
Reseplaneraren
Provides access to Västtrafik journey planner
OpenAPI spec version: 1.10.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Vehicle(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'lcolor': 'str',
'prod_class': 'str',
'bcolor': 'str',
'direction': 'int',
'name': 'str',
'gid': 'str',
'delay': 'int',
'y': 'float',
'x': 'float'
}
attribute_map = {
'lcolor': 'lcolor',
'prod_class': 'prodClass',
'bcolor': 'bcolor',
'direction': 'direction',
'name': 'name',
'gid': 'gid',
'delay': 'delay',
'y': 'y',
'x': 'x'
}
def __init__(self, lcolor=None, prod_class=None, bcolor=None, direction=None, name=None, gid=None, delay=None, y=None, x=None):
"""
Vehicle - a model defined in Swagger
"""
self._lcolor = None
self._prod_class = None
self._bcolor = None
self._direction = None
self._name = None
self._gid = None
self._delay = None
self._y = None
self._x = None
self.lcolor = lcolor
self.prod_class = prod_class
self.bcolor = bcolor
self.direction = direction
self.name = name
self.gid = gid
self.delay = delay
self.y = y
self.x = x
@property
def lcolor(self):
"""
Gets the lcolor of this Vehicle.
Line color of the journey
:return: The lcolor of this Vehicle.
:rtype: str
"""
return self._lcolor
@lcolor.setter
def lcolor(self, lcolor):
"""
Sets the lcolor of this Vehicle.
Line color of the journey
:param lcolor: The lcolor of this Vehicle.
:type: str
"""
if lcolor is None:
raise ValueError("Invalid value for `lcolor`, must not be `None`")
self._lcolor = lcolor
@property
def prod_class(self):
"""
Gets the prod_class of this Vehicle.
Product class
:return: The prod_class of this Vehicle.
:rtype: str
"""
return self._prod_class
@prod_class.setter
def prod_class(self, prod_class):
"""
Sets the prod_class of this Vehicle.
Product class
:param prod_class: The prod_class of this Vehicle.
:type: str
"""
if prod_class is None:
raise ValueError("Invalid value for `prod_class`, must not be `None`")
allowed_values = ["VAS", "LDT", "REG", "BUS", "BOAT", "TRAM", "TAXI"]
if prod_class not in allowed_values:
raise ValueError(
"Invalid value for `prod_class` ({0}), must be one of {1}"
.format(prod_class, allowed_values)
)
self._prod_class = prod_class
@property
def bcolor(self):
"""
Gets the bcolor of this Vehicle.
Background color of the journey
:return: The bcolor of this Vehicle.
:rtype: str
"""
return self._bcolor
@bcolor.setter
def bcolor(self, bcolor):
"""
Sets the bcolor of this Vehicle.
Background color of the journey
:param bcolor: The bcolor of this Vehicle.
:type: str
"""
if bcolor is None:
raise ValueError("Invalid value for `bcolor`, must not be `None`")
self._bcolor = bcolor
@property
def direction(self):
"""
Gets the direction of this Vehicle.
Direction of the vehicle. This is a value between 0 and 31 which is describing a direction vector
:return: The direction of this Vehicle.
:rtype: int
"""
return self._direction
@direction.setter
def direction(self, direction):
"""
Sets the direction of this Vehicle.
Direction of the vehicle. This is a value between 0 and 31 which is describing a direction vector
:param direction: The direction of this Vehicle.
:type: int
"""
if direction is None:
raise ValueError("Invalid value for `direction`, must not be `None`")
self._direction = direction
@property
def name(self):
"""
Gets the name of this Vehicle.
Journey name
:return: The name of this Vehicle.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this Vehicle.
Journey name
:param name: The name of this Vehicle.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._name = name
@property
def gid(self):
"""
Gets the gid of this Vehicle.
Service GID
:return: The gid of this Vehicle.
:rtype: str
"""
return self._gid
@gid.setter
def gid(self, gid):
"""
Sets the gid of this Vehicle.
Service GID
:param gid: The gid of this Vehicle.
:type: str
"""
if gid is None:
raise ValueError("Invalid value for `gid`, must not be `None`")
self._gid = gid
@property
def delay(self):
"""
Gets the delay of this Vehicle.
Current delay of the vehicle in minutes, can be negative, zero or positive
:return: The delay of this Vehicle.
:rtype: int
"""
return self._delay
@delay.setter
def delay(self, delay):
"""
Sets the delay of this Vehicle.
Current delay of the vehicle in minutes, can be negative, zero or positive
:param delay: The delay of this Vehicle.
:type: int
"""
if delay is None:
raise ValueError("Invalid value for `delay`, must not be `None`")
self._delay = delay
@property
def y(self):
"""
Gets the y of this Vehicle.
Y coordinate (latitude) of the position in WGS84 * 1000000
:return: The y of this Vehicle.
:rtype: float
"""
return self._y
@y.setter
def y(self, y):
"""
Sets the y of this Vehicle.
Y coordinate (latitude) of the position in WGS84 * 1000000
:param y: The y of this Vehicle.
:type: float
"""
if y is None:
raise ValueError("Invalid value for `y`, must not be `None`")
self._y = y
@property
def x(self):
"""
Gets the x of this Vehicle.
X coordinate (longitude) of the position in WGS84 * 1000000
:return: The x of this Vehicle.
:rtype: float
"""
return self._x
@x.setter
def x(self, x):
"""
Sets the x of this Vehicle.
X coordinate (longitude) of the position in WGS84 * 1000000
:param x: The x of this Vehicle.
:type: float
"""
if x is None:
raise ValueError("Invalid value for `x`, must not be `None`")
self._x = x
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, Vehicle):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"six.iteritems"
] |
[((7801, 7830), 'six.iteritems', 'iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (7810, 7830), False, 'from six import iteritems\n')]
|
from easydict import EasyDict
cartpole_dqfd_config = dict(
exp_name='cartpole_dqfd_seed0',
env=dict(
collector_env_num=8,
evaluator_env_num=5,
n_evaluator_episode=5,
stop_value=195,
),
policy=dict(
cuda=True,
priority=True,
model=dict(
obs_shape=4,
action_shape=2,
encoder_hidden_size_list=[128, 128, 64],
dueling=True,
),
nstep=3,
discount_factor=0.97,
learn=dict(
batch_size=64,
learning_rate=0.001,
lambda1=1, # n-step return
lambda2=3.0, # supervised loss
# set this to be 0 (L2 loss = 0) with expert_replay_buffer_size = 0 and lambda1 = 0
# recover the one step pdd dqn
lambda3=0, # L2 regularization
per_train_iter_k=10,
expert_replay_buffer_size=10000, # justify the buffer size of the expert buffer
),
collect=dict(
n_sample=8,
# Users should add their own model path here. Model path should lead to a model.
# Absolute path is recommended.
# In DI-engine, it is ``exp_name/ckpt/ckpt_best.pth.tar``.
model_path='model_path_placeholder',
),
# note: this is the times after which you learns to evaluate
eval=dict(evaluator=dict(eval_freq=50, )),
other=dict(
eps=dict(
type='exp',
start=0.95,
end=0.1,
decay=10000,
),
replay_buffer=dict(replay_buffer_size=20000, ),
),
),
)
cartpole_dqfd_config = EasyDict(cartpole_dqfd_config)
main_config = cartpole_dqfd_config
cartpole_dqfd_create_config = dict(
env=dict(
type='cartpole',
import_names=['dizoo.classic_control.cartpole.envs.cartpole_env'],
),
env_manager=dict(type='base'),
policy=dict(type='dqfd'),
)
cartpole_dqfd_create_config = EasyDict(cartpole_dqfd_create_config)
create_config = cartpole_dqfd_create_config
if __name__ == "__main__":
# or you can enter `ding -m serial_dqfd -c cartpole_dqfd_config.py -s 0`
# then input ``cartpole_dqfd_config.py`` upon the instructions.
# The reason we need to input the dqfd config is we have to borrow its ``_get_train_sample`` function
# in the collector part even though the expert model may be generated from other Q learning algos.
from ding.entry.serial_entry_dqfd import serial_pipeline_dqfd
from dizoo.classic_control.cartpole.config import cartpole_dqfd_config, cartpole_dqfd_create_config
expert_main_config = cartpole_dqfd_config
expert_create_config = cartpole_dqfd_create_config
serial_pipeline_dqfd((main_config, create_config), (expert_main_config, expert_create_config), seed=0)
|
[
"ding.entry.serial_entry_dqfd.serial_pipeline_dqfd",
"easydict.EasyDict"
] |
[((1687, 1717), 'easydict.EasyDict', 'EasyDict', (['cartpole_dqfd_config'], {}), '(cartpole_dqfd_config)\n', (1695, 1717), False, 'from easydict import EasyDict\n'), ((2007, 2044), 'easydict.EasyDict', 'EasyDict', (['cartpole_dqfd_create_config'], {}), '(cartpole_dqfd_create_config)\n', (2015, 2044), False, 'from easydict import EasyDict\n'), ((2746, 2852), 'ding.entry.serial_entry_dqfd.serial_pipeline_dqfd', 'serial_pipeline_dqfd', (['(main_config, create_config)', '(expert_main_config, expert_create_config)'], {'seed': '(0)'}), '((main_config, create_config), (expert_main_config,\n expert_create_config), seed=0)\n', (2766, 2852), False, 'from ding.entry.serial_entry_dqfd import serial_pipeline_dqfd\n')]
|
import argparse
import yaml
import os
from glob import glob
import inspect
import sys
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import time
import numpy as np
import torch
from torch.utils.data import DataLoader
import skimage.io as io
from segmentation_dataset import RawChromosomeDataset as Dataset
from loss import DiceLoss, evals
from models.UNet import UNet
from models.ResUNet import ResUNet
from models.PreactivationResUNet import PreactResUNet
from models.CENet import CE_Net
from models.Segnet import SegNet
from models.AttentionUnet import AttU_Net
from models.FCN import FCN_ResNet101
from models.Unet_nested import UNet_Nested
from models.DeepLabV3 import Deeplabv3_ResNet101
from models.PSPNet import PSPNet
def main(args):
# args.model = "preactivation_resunet"
# args.model_path = "preactivation_resunet-20210416T1703"
# args.weight_num = 1
# args.images = "./datasets/raw_chromosome_data".format(Dataset.name)
# args.batch_size = 2
# args.test_results = False
if args.model == "unet":
model = UNet(
in_channels=Dataset.in_channels,
num_classes=Dataset.num_classes,
init_features=32,
)
net_name = UNet.net_name
elif args.model == "resunet":
model = ResUNet(
in_channels=Dataset.in_channels,
num_classes=Dataset.num_classes,
init_features=32,
)
net_name = "resunet"
elif args.model == "preactivation_resunet":
model = PreactResUNet(
in_channels=Dataset.in_channels,
num_classes=Dataset.num_classes,
init_features=32,
)
net_name = "preactivation_resunet"
elif args.model == "cenet":
model = CE_Net(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes)
net_name = "cenet"
elif args.model == "segnet":
model = SegNet(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes)
net_name = "segnet"
elif args.model == "nested_unet":
model = UNet_Nested(
in_channels=Dataset.in_channels, num_classes=Dataset.num_classes
)
net_name = "nested_unet"
elif args.model == "attention_unet":
model = AttU_Net(
in_channels=Dataset.in_channels, num_classes=Dataset.num_classes
)
net_name = "attention_unet"
elif args.model == "fcn_resnet101":
model = FCN_ResNet101(in_channels=1, num_classes=3)
net_name = "fcn_resnet101"
elif args.model == "deeplabv3_resnet101":
model = Deeplabv3_ResNet101(in_channels=1, num_classes=3)
net_name = "deeplabv3_resnet101"
elif args.model == "pspnet":
model = PSPNet(
num_classes=Dataset.num_classes, pretrained=False, backend="resnet101"
)
net_name = "pspnet"
device = torch.device("cpu" if not torch.cuda.is_available() else args.device)
model.to(device)
weights_dir = "output/{}/{}/weights".format(Dataset.name, args.model_path)
print(weights_dir)
model_name = glob(weights_dir + "/{}-{}*".format(net_name, args.weight_num))[0]
state_dict = torch.load(model_name, map_location=device)
model.load_state_dict(state_dict)
test_dir = "output/{}/{}/test".format(Dataset.name, args.model_path)
model.eval()
dsc = DiceLoss()
evaluations_np = []
total_dsc_loss = []
loader = data_loaders(args)
loaders = {"test": loader}
start = time.time()
print("clock started")
test_img_num = 1
for i, data in enumerate(loaders["test"], 0):
x, y_true = data
x, y_true = x.to(device, dtype=torch.float), y_true.to(
device, dtype=torch.float
)
with torch.set_grad_enabled(False):
y_pred = model(x)
dsc_loss = dsc(y_pred, y_true)
evaluations_ = evals(y_pred, y_true)
evaluations_np += evaluations_
total_dsc_loss.append(dsc_loss.item())
if args.test_results:
y_pred_np = y_pred.detach().cpu().numpy()
x_np = x.detach().cpu().numpy()
for img_num in range(y_pred_np.shape[0]):
for mask_num in range(y_pred_np.shape[1]):
io.imsave(
os.path.join(
test_dir,
"{}_label{}.png".format(test_img_num, mask_num),
),
y_pred_np[img_num, mask_num, :, :],
)
for mask_num in range(x_np.shape[1]):
io.imsave(
os.path.join(test_dir, "%d_image.png" % test_img_num),
x_np[img_num, mask_num, :, :] * 255,
)
test_img_num += 1
end = time.time()
print("{} seconds past".format(end - start))
evaluations_np = np.array(evaluations_np)
with open(
"output/{}/{}/test-eval.npy".format(Dataset.name, args.model_path), "wb"
) as f:
np.save(f, evaluations_np)
mean_dsc_loss = float(np.mean(total_dsc_loss))
mean_DSC = 1 - mean_dsc_loss
metrics = {
"mean_dsc_loss": mean_dsc_loss,
"mean_DSC": mean_DSC,
}
with open(
"output/{}/{}/metrics.yaml".format(Dataset.name, args.model_path), "w"
) as fp:
yaml.dump(metrics, fp)
print(f"mean dsc loss={mean_dsc_loss}")
print(f"mean DSC={mean_DSC}")
def data_loaders(args):
dataset_test = datasets(args)
return DataLoader(
dataset_test,
batch_size=args.batch_size,
drop_last=False,
num_workers=args.workers,
)
def datasets(args):
return Dataset(
args,
images_dir=args.images,
subset="test",
image_size=args.image_size,
random_sampling=False,
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Semantic segmentation of G-banding chromosome Images"
)
parser.add_argument(
"--model",
type=str,
default="preactivation_resunet",
help="choose model",
)
parser.add_argument(
"--weight-num",
type=int,
default=0,
help="weight number for inference",
)
parser.add_argument(
"--model-path", type=str, default="", help="path to weights file"
)
parser.add_argument(
"--batch-size",
type=int,
default=2,
help="input batch size for training (default: 2)",
)
parser.add_argument(
"--device",
type=str,
default="cuda:0",
help="device for training (default: cuda:0)",
)
parser.add_argument(
"--workers",
type=int,
default=1,
help="number of workers for data loading (default: 1)",
)
parser.add_argument(
"--images",
type=str,
default="./datasets/{}_data/train".format(Dataset.name),
help="root folder with images",
)
parser.add_argument(
"--image-size",
type=int,
default=Dataset.img_size,
help="target input image size (default: 256x256)",
)
parser.add_argument(
"--test-results",
type=bool,
default=False,
help="Do you want to output the test results? (defauld: False)",
)
args = parser.parse_args()
main(args)
|
[
"argparse.ArgumentParser",
"segmentation_dataset.RawChromosomeDataset",
"models.UNet.UNet",
"yaml.dump",
"numpy.mean",
"models.Segnet.SegNet",
"models.AttentionUnet.AttU_Net",
"loss.DiceLoss",
"os.path.join",
"torch.utils.data.DataLoader",
"os.path.dirname",
"torch.load",
"models.FCN.FCN_ResNet101",
"numpy.save",
"models.ResUNet.ResUNet",
"torch.cuda.is_available",
"torch.set_grad_enabled",
"inspect.currentframe",
"models.CENet.CE_Net",
"models.DeepLabV3.Deeplabv3_ResNet101",
"models.PreactivationResUNet.PreactResUNet",
"loss.evals",
"models.Unet_nested.UNet_Nested",
"sys.path.insert",
"time.time",
"numpy.array",
"models.PSPNet.PSPNet"
] |
[((196, 224), 'os.path.dirname', 'os.path.dirname', (['current_dir'], {}), '(current_dir)\n', (211, 224), False, 'import os\n'), ((226, 256), 'sys.path.insert', 'sys.path.insert', (['(0)', 'parent_dir'], {}), '(0, parent_dir)\n', (241, 256), False, 'import sys\n'), ((3355, 3398), 'torch.load', 'torch.load', (['model_name'], {'map_location': 'device'}), '(model_name, map_location=device)\n', (3365, 3398), False, 'import torch\n'), ((3547, 3557), 'loss.DiceLoss', 'DiceLoss', ([], {}), '()\n', (3555, 3557), False, 'from loss import DiceLoss, evals\n'), ((3692, 3703), 'time.time', 'time.time', ([], {}), '()\n', (3701, 3703), False, 'import time\n'), ((5142, 5153), 'time.time', 'time.time', ([], {}), '()\n', (5151, 5153), False, 'import time\n'), ((5228, 5252), 'numpy.array', 'np.array', (['evaluations_np'], {}), '(evaluations_np)\n', (5236, 5252), True, 'import numpy as np\n'), ((5884, 5983), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset_test'], {'batch_size': 'args.batch_size', 'drop_last': '(False)', 'num_workers': 'args.workers'}), '(dataset_test, batch_size=args.batch_size, drop_last=False,\n num_workers=args.workers)\n', (5894, 5983), False, 'from torch.utils.data import DataLoader\n'), ((6061, 6169), 'segmentation_dataset.RawChromosomeDataset', 'Dataset', (['args'], {'images_dir': 'args.images', 'subset': '"""test"""', 'image_size': 'args.image_size', 'random_sampling': '(False)'}), "(args, images_dir=args.images, subset='test', image_size=args.\n image_size, random_sampling=False)\n", (6068, 6169), True, 'from segmentation_dataset import RawChromosomeDataset as Dataset\n'), ((6264, 6360), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Semantic segmentation of G-banding chromosome Images"""'}), "(description=\n 'Semantic segmentation of G-banding chromosome Images')\n", (6287, 6360), False, 'import argparse\n'), ((1212, 1304), 'models.UNet.UNet', 'UNet', ([], {'in_channels': 'Dataset.in_channels', 'num_classes': 'Dataset.num_classes', 'init_features': '(32)'}), '(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes,\n init_features=32)\n', (1216, 1304), False, 'from models.UNet import UNet\n'), ((5373, 5399), 'numpy.save', 'np.save', (['f', 'evaluations_np'], {}), '(f, evaluations_np)\n', (5380, 5399), True, 'import numpy as np\n'), ((5429, 5452), 'numpy.mean', 'np.mean', (['total_dsc_loss'], {}), '(total_dsc_loss)\n', (5436, 5452), True, 'import numpy as np\n'), ((5703, 5725), 'yaml.dump', 'yaml.dump', (['metrics', 'fp'], {}), '(metrics, fp)\n', (5712, 5725), False, 'import yaml\n'), ((156, 178), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (176, 178), False, 'import inspect\n'), ((1438, 1533), 'models.ResUNet.ResUNet', 'ResUNet', ([], {'in_channels': 'Dataset.in_channels', 'num_classes': 'Dataset.num_classes', 'init_features': '(32)'}), '(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes,\n init_features=32)\n', (1445, 1533), False, 'from models.ResUNet import ResUNet\n'), ((3966, 3995), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(False)'], {}), '(False)\n', (3988, 3995), False, 'import torch\n'), ((4102, 4123), 'loss.evals', 'evals', (['y_pred', 'y_true'], {}), '(y_pred, y_true)\n', (4107, 4123), False, 'from loss import DiceLoss, evals\n'), ((1677, 1779), 'models.PreactivationResUNet.PreactResUNet', 'PreactResUNet', ([], {'in_channels': 'Dataset.in_channels', 'num_classes': 'Dataset.num_classes', 'init_features': '(32)'}), '(in_channels=Dataset.in_channels, num_classes=Dataset.\n num_classes, init_features=32)\n', (1690, 1779), False, 'from models.PreactivationResUNet import PreactResUNet\n'), ((3080, 3105), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3103, 3105), False, 'import torch\n'), ((1920, 1992), 'models.CENet.CE_Net', 'CE_Net', ([], {'in_channels': 'Dataset.in_channels', 'num_classes': 'Dataset.num_classes'}), '(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes)\n', (1926, 1992), False, 'from models.CENet import CE_Net\n'), ((2072, 2144), 'models.Segnet.SegNet', 'SegNet', ([], {'in_channels': 'Dataset.in_channels', 'num_classes': 'Dataset.num_classes'}), '(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes)\n', (2078, 2144), False, 'from models.Segnet import SegNet\n'), ((2230, 2307), 'models.Unet_nested.UNet_Nested', 'UNet_Nested', ([], {'in_channels': 'Dataset.in_channels', 'num_classes': 'Dataset.num_classes'}), '(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes)\n', (2241, 2307), False, 'from models.Unet_nested import UNet_Nested\n'), ((4942, 4995), 'os.path.join', 'os.path.join', (['test_dir', "('%d_image.png' % test_img_num)"], {}), "(test_dir, '%d_image.png' % test_img_num)\n", (4954, 4995), False, 'import os\n'), ((2425, 2499), 'models.AttentionUnet.AttU_Net', 'AttU_Net', ([], {'in_channels': 'Dataset.in_channels', 'num_classes': 'Dataset.num_classes'}), '(in_channels=Dataset.in_channels, num_classes=Dataset.num_classes)\n', (2433, 2499), False, 'from models.AttentionUnet import AttU_Net\n'), ((2619, 2662), 'models.FCN.FCN_ResNet101', 'FCN_ResNet101', ([], {'in_channels': '(1)', 'num_classes': '(3)'}), '(in_channels=1, num_classes=3)\n', (2632, 2662), False, 'from models.FCN import FCN_ResNet101\n'), ((2763, 2812), 'models.DeepLabV3.Deeplabv3_ResNet101', 'Deeplabv3_ResNet101', ([], {'in_channels': '(1)', 'num_classes': '(3)'}), '(in_channels=1, num_classes=3)\n', (2782, 2812), False, 'from models.DeepLabV3 import Deeplabv3_ResNet101\n'), ((2906, 2984), 'models.PSPNet.PSPNet', 'PSPNet', ([], {'num_classes': 'Dataset.num_classes', 'pretrained': '(False)', 'backend': '"""resnet101"""'}), "(num_classes=Dataset.num_classes, pretrained=False, backend='resnet101')\n", (2912, 2984), False, 'from models.PSPNet import PSPNet\n')]
|
import os
import struct
import numpy as np
import xarray as xr
import netCDF4 as ds
from pathlib import Path
import matplotlib.pyplot as plt
import struct
import itertools
import Homogenizer_GUI
from enum import Enum
from collections import OrderedDict
import pickle
class UserPrefs(Enum):
ScanFoldersPath = 0
CalculateReconstructedImages = 1
CalculateFieldMaps = 2
CalculateInterpolatedFieldMap = 3
SaveReconstructedImages = 4
SaveFieldMaps = 5
SaveInterpolatedFieldMap = 6
ShowReconstructedImages = 7
ShowFieldMaps = 8
ShowInterpolatedFieldMap = 9
class Homogenizer:
def __init__(self):
self.hGUI = None
self.submit_button = "Submit"
self.gamma = 48.52*10**6
self.te_array = []
self.delta_te = 0.0001 #standard initialization
self.dimensions = np.array([128,128]) #standard initialization
self.scan_folders_path = None
self.save_path = None
self.fids_dict = OrderedDict([])
self.reconstructed_image_dict = OrderedDict([])
self.field_map_dict = OrderedDict([])
self.interpolated_field_map = OrderedDict([])
def get_input(self, user_pref: UserPrefs):
return self.hGUI.user_prefs[user_pref.value]
def display_image(self, image_list, abs_values = False):
'''
Displays given images. mark abs_values as True to get the display images of abs values
'''
for image in image_list:
if abs_values:
image = abs(image)
plt.title("Reconstructed Image")
else:
plt.title("Field Map - B[T] Values as function of location")
plt.xlabel("Location")
plt.ylabel("Location")
plt.imshow(image)
plt.colorbar()
plt.show()
def get_tes(self, folder_path):
'''
Finds the TE value in a specific scan (the information exists in the 'method' file of each scan)
Then creates an array of all TEs
'''
dir_list = os.listdir(folder_path)
for scan_dir in dir_list:
file_path = folder_path + '\\' + scan_dir
if os.path.isdir(file_path):
method_path = self.find_file_by_name(file_path, 'method')
with open(method_path, mode='rb') as file:
method_r = file.read()
f=method_r.find(b'EchoTime=')
te_locked=method_r[f+9:f+12]
te_str=str(te_locked)[2:5]
if (str(te_str).find('n') != -1):
te=int(te_str[0])
else:
te=float(te_str)
self.te_array.append(te*10**-3)
del self.te_array[-1]
self.te_array = np.array(self.te_array)
self.delta_te = self.te_array[1] - self.te_array[0]
def get_dimensions(self, folder_path):
'''
Finds the dimensions of the matrix (the information exists in the 'method' file of each scan)
'''
dir_list = os.listdir(folder_path)
for scan_dir in dir_list:
file_path = folder_path + '\\' + scan_dir
if os.path.isdir(file_path):
method_path = self.find_file_by_name(file_path, 'method')
break
with open(method_path, mode='rb') as file:
method_r = file.read()
f=method_r.find(b'PVM_Matrix=( 2 )\n')
dimension_locked=method_r[f+17:f+24]
arr=np.zeros(2, np.int16)
arr[0]=(str(dimension_locked)[2:5])
arr[0]=int(arr[0])
arr[1]=(str(dimension_locked)[6:9])
arr[1]=int(arr[1])
self.dimensions = arr
pickle.dump(self.dimensions, open("dimensions.dat","wb"))
def find_file_by_name(self, containing_folder, name_string):
'''
Finds and returns the fid file within the given folder
'''
pickle.dump(containing_folder, open("containing_folder.dat","wb"))
pickle.dump(name_string, open("name_string.dat","wb"))
dir_list = os.listdir(containing_folder)
for file_name in dir_list:
if file_name == name_string:
file_path = containing_folder + '\\' + file_name
return file_path
def save_arrays_to_disk(self, save_path, arrays_dictionary: dict, name_prefix: str):
"""
Converts every numpy array in arrays_dictionary to xarray and save it in the given path as a NetCDF file.
"""
if not os.path.exists(save_path):
os.makedirs(save_path)
for key, array in arrays_dictionary.items():
x_arr = xr.DataArray(array)
file_name = name_prefix + str(key)
x_arr.to_netcdf(f'{save_path}\\{file_name}.nc', mode='w')
def reconstruct_images_from_fids(self, fid_dict):
for name_prefix, fid in fid_dict.items():
self.reconstructed_image_dict[name_prefix] = self.reconstruct_image(fid, self.dimensions)
def reconstruct_image(self, fid_arr, dimensions):
'''
Calculates the K space matrix -> calculates the
reconstructed image and returns it
'''
pickle.dump(fid_arr, open("fid_arr.dat","wb"))
real_vals = fid_arr[:-1:2]
imag_vals = fid_arr[1::2]
complex_vals = real_vals + 1j*imag_vals
if (len(fid_arr) == dimensions[0]*dimensions[1]*2):
k_space_scan = np.reshape(complex_vals,(dimensions[0],dimensions[1]))
k_casting = k_space_scan.astype(complex)
img = np.fft.fftshift(np.fft.ifft2(k_casting))
return img
else:
raise IndexError('Fid_arr cannot be reshaped to these dimensions')
def calc_field_maps_from_fids (self, fid_dict, dimension):
''' Gets an ordered dictionary of FID files and calculates dictionary of field maps
by running on pairs of FID files
'''
pickle.dump(fid_dict, open("fid_dict.dat","wb"))
self.reconstruct_images_from_fids(fid_dict)
image_pairs = self.pairwise(self.reconstructed_image_dict.values())
name_index = 0
name_list = list(self.reconstructed_image_dict.keys())
for img1, img2 in image_pairs:
field_map_prefix = name_list[name_index] + name_list[name_index+1]
name_index +=1
self.field_map_dict[field_map_prefix] = self.calc_field_map_from_reconstructed_images(img1,img2)
def calc_field_map_from_reconstructed_images(self, img1,img2):
pickle.dump(img1, open("img1.dat","wb"))
pickle.dump(img2, open("img2.dat","wb"))
phase_map = self.compute_phase(img1,img2)
bmap = phase_map/((2*np.pi*self.gamma*(self.delta_te)))
return bmap
def compute_phase(self, img1,img2):
'''
Gets two reconstructed images and computes one phase image
'''
conj_img2 = np.conj(img2)
if (img1.shape[1] == img2.shape[0]):
multiplic_img1_img2 = conj_img2*img1
phase_map = np.angle(multiplic_img1_img2)
return phase_map
else:
raise IndexError('Size of matrices not suitable for linear multiplication')
def pairwise(self, object_list):
'''
Creates pairs of objects from a list of objects
list_of_fids -> (fid0,fid1), (fid1,fid2), (fid2, fid3), and so forth...
'''
pickle.dump(list(object_list), open("object_list.dat","wb"))
obj1, obj2 = itertools.tee(object_list)
next(obj2, None)
return zip(obj1, obj2)
def interpolate_field_map_from_fids(self, fid_dict):
'''
Gets an ordered dictionary of FID files and calculates one interpolated field map
'''
signals_amount = len(fid_dict)
self.calc_field_maps_from_fids(fid_dict, self.dimensions)
self.interpolate_field_map(list(self.field_map_dict.values()), self.te_array, self.dimensions,signals_amount)
def interpolate_field_map(self, field_maps_list,te_values, dimension, signals_amount):
'''
Calculates one interpolated field map from all the calculated field maps
'''
pickle.dump(field_maps_list, open("field_maps_list.dat","wb"))
pickle.dump(te_values, open("te_values.dat","wb"))
pickle.dump(signals_amount, open("signals_amoung.dat","wb"))
slope=np.zeros((dimension[0],dimension[1]))
value_vec_in_phase_map = np.zeros(len(field_maps_list))
for x in range(dimension[0]-1):
for y in range(dimension[1]-1):
for z in range(signals_amount-1):
value_vec_in_phase_map[z] = field_maps_list[z][x,y]
s,intercept = np.polyfit((te_values[:]),value_vec_in_phase_map,1)
slope[x,y] = (s)
interp_b=slope/self.gamma
self.interpolated_field_map = OrderedDict([('',interp_b)])
def create_fid_dict(self, folder_path):
'''
Creates an ordered dictionary of numpy arrays from fid files
'''
pickle.dump(folder_path, open("folder_path.dat","wb"))
dir_list = os.listdir(folder_path)
for scan_dir in dir_list:
file_path = folder_path + '\\' + scan_dir
if os.path.isdir(file_path):
fid_path = self.find_file_by_name(file_path, 'fid')
if isinstance(fid_path, str):
self.fids_dict[scan_dir] = self.fid_to_nparray(fid_path)
def fid_to_nparray(self, fid_path):
'''
Opens a binary file and inserts it to a numpy array
'''
pickle.dump(fid_path, open("fid_path.dat","wb"))
with open(fid_path, mode='rb') as file: # b is important -> binary
fid_r = file.read()
fid_l = list(struct.unpack("i" * ((len(fid_r) -4) // 4), fid_r[0:-4]))
fid_l.append(struct.unpack("i", fid_r[-4:])[0])
fid_arr = np.array(fid_l)
return fid_arr
def start(self):
'''
Triggers calculations begin with given inputs by the user throughout the GUI.
'''
self.scan_folders_path = self.hGUI.open_main_window()
# Starts job if user had pressed submit:
if self.hGUI.last_button_pressed == self.submit_button:
# Checks if user requested to save any files, and if so pops up a browser to choose path.
if (self.get_input(UserPrefs.SaveReconstructedImages)
or self.get_input(UserPrefs.SaveFieldMaps)
or self.get_input(UserPrefs.SaveInterpolatedFieldMap)
):
self.save_path = self.hGUI.request_save_path()
# Cancels the job if the user had closed the window / pressed "Cancel":
if self.hGUI.last_button_pressed != self.submit_button:
self.start()
return
if self.save_path == self.hGUI.default_folder_expression:
self.save_path = self.scan_folders_path
self.create_fid_dict(self.scan_folders_path)
self.get_dimensions(self.scan_folders_path)
self.get_tes(self.scan_folders_path)
# Checks what calculation the user had requested, and performs them:
if self.get_input(UserPrefs.CalculateReconstructedImages):
self.reconstruct_images_from_fids(self.fids_dict)
else:
if self.get_input(UserPrefs.CalculateFieldMaps):
self.calc_field_maps_from_fids(self.fids_dict, self.dimensions)
else:
self.interpolate_field_map_from_fids(self.fids_dict)
if self.get_input(UserPrefs.SaveInterpolatedFieldMap):
self.save_arrays_to_disk(self.save_path, self.interpolated_field_map,'Interpolated_field_map')
if self.get_input(UserPrefs.ShowInterpolatedFieldMap):
self.display_image(list(self.interpolated_field_map.values()))
if self.get_input(UserPrefs.SaveFieldMaps):
self.save_arrays_to_disk(self.save_path, self.field_map_dict, 'Field_map_')
if self.get_input(UserPrefs.ShowFieldMaps):
self.display_image(list(self.field_map_dict.values()))
if self.get_input(UserPrefs.SaveReconstructedImages):
[real_dict, imaginary_dict] = seperate_complex_values_dict(self.reconstructed_image_dict)
self.save_arrays_to_disk(self.save_path, real_dict, 'Reconstructed_image_real')
self.save_arrays_to_disk(self.save_path, imaginary_dict, 'Reconstructed_image_imaginary')
if self.get_input(UserPrefs.ShowReconstructedImages):
self.display_image(list(self.field_map_dict.values()), True)
def seperate_complex_values_dict(dict):
real_dict = OrderedDict([])
imaginary_dict = OrderedDict([])
for name, complexNum in dict.items():
real_dict[name] = complexNum.real
imaginary_dict[name] = complexNum.imag
return [real_dict, imaginary_dict]
if __name__ == "__main__":
homogenizer = Homogenizer()
homogenizer.hGUI = Homogenizer_GUI.Homogenizer_GUI()
homogenizer.start()
|
[
"matplotlib.pyplot.title",
"Homogenizer_GUI.Homogenizer_GUI",
"numpy.polyfit",
"numpy.angle",
"numpy.fft.ifft2",
"matplotlib.pyplot.imshow",
"os.path.exists",
"matplotlib.pyplot.colorbar",
"numpy.reshape",
"numpy.conj",
"matplotlib.pyplot.show",
"struct.unpack",
"itertools.tee",
"matplotlib.pyplot.ylabel",
"os.listdir",
"os.makedirs",
"os.path.isdir",
"numpy.zeros",
"numpy.array",
"xarray.DataArray",
"collections.OrderedDict",
"matplotlib.pyplot.xlabel"
] |
[((13222, 13237), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (13233, 13237), False, 'from collections import OrderedDict\n'), ((13260, 13275), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (13271, 13275), False, 'from collections import OrderedDict\n'), ((13545, 13578), 'Homogenizer_GUI.Homogenizer_GUI', 'Homogenizer_GUI.Homogenizer_GUI', ([], {}), '()\n', (13576, 13578), False, 'import Homogenizer_GUI\n'), ((875, 895), 'numpy.array', 'np.array', (['[128, 128]'], {}), '([128, 128])\n', (883, 895), True, 'import numpy as np\n'), ((1016, 1031), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (1027, 1031), False, 'from collections import OrderedDict\n'), ((1073, 1088), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (1084, 1088), False, 'from collections import OrderedDict\n'), ((1120, 1135), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (1131, 1135), False, 'from collections import OrderedDict\n'), ((1175, 1190), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (1186, 1190), False, 'from collections import OrderedDict\n'), ((2121, 2144), 'os.listdir', 'os.listdir', (['folder_path'], {}), '(folder_path)\n', (2131, 2144), False, 'import os\n'), ((2838, 2861), 'numpy.array', 'np.array', (['self.te_array'], {}), '(self.te_array)\n', (2846, 2861), True, 'import numpy as np\n'), ((3121, 3144), 'os.listdir', 'os.listdir', (['folder_path'], {}), '(folder_path)\n', (3131, 3144), False, 'import os\n'), ((3581, 3602), 'numpy.zeros', 'np.zeros', (['(2)', 'np.int16'], {}), '(2, np.int16)\n', (3589, 3602), True, 'import numpy as np\n'), ((4171, 4200), 'os.listdir', 'os.listdir', (['containing_folder'], {}), '(containing_folder)\n', (4181, 4200), False, 'import os\n'), ((7123, 7136), 'numpy.conj', 'np.conj', (['img2'], {}), '(img2)\n', (7130, 7136), True, 'import numpy as np\n'), ((7731, 7757), 'itertools.tee', 'itertools.tee', (['object_list'], {}), '(object_list)\n', (7744, 7757), False, 'import itertools\n'), ((8653, 8691), 'numpy.zeros', 'np.zeros', (['(dimension[0], dimension[1])'], {}), '((dimension[0], dimension[1]))\n', (8661, 8691), True, 'import numpy as np\n'), ((9157, 9186), 'collections.OrderedDict', 'OrderedDict', (["[('', interp_b)]"], {}), "([('', interp_b)])\n", (9168, 9186), False, 'from collections import OrderedDict\n'), ((9417, 9440), 'os.listdir', 'os.listdir', (['folder_path'], {}), '(folder_path)\n', (9427, 9440), False, 'import os\n'), ((1740, 1762), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Location"""'], {}), "('Location')\n", (1750, 1762), True, 'import matplotlib.pyplot as plt\n'), ((1776, 1798), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Location"""'], {}), "('Location')\n", (1786, 1798), True, 'import matplotlib.pyplot as plt\n'), ((1812, 1829), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image'], {}), '(image)\n', (1822, 1829), True, 'import matplotlib.pyplot as plt\n'), ((1843, 1857), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (1855, 1857), True, 'import matplotlib.pyplot as plt\n'), ((1871, 1881), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1879, 1881), True, 'import matplotlib.pyplot as plt\n'), ((2251, 2275), 'os.path.isdir', 'os.path.isdir', (['file_path'], {}), '(file_path)\n', (2264, 2275), False, 'import os\n'), ((3251, 3275), 'os.path.isdir', 'os.path.isdir', (['file_path'], {}), '(file_path)\n', (3264, 3275), False, 'import os\n'), ((4628, 4653), 'os.path.exists', 'os.path.exists', (['save_path'], {}), '(save_path)\n', (4642, 4653), False, 'import os\n'), ((4668, 4690), 'os.makedirs', 'os.makedirs', (['save_path'], {}), '(save_path)\n', (4679, 4690), False, 'import os\n'), ((4766, 4785), 'xarray.DataArray', 'xr.DataArray', (['array'], {}), '(array)\n', (4778, 4785), True, 'import xarray as xr\n'), ((5583, 5639), 'numpy.reshape', 'np.reshape', (['complex_vals', '(dimensions[0], dimensions[1])'], {}), '(complex_vals, (dimensions[0], dimensions[1]))\n', (5593, 5639), True, 'import numpy as np\n'), ((7269, 7298), 'numpy.angle', 'np.angle', (['multiplic_img1_img2'], {}), '(multiplic_img1_img2)\n', (7277, 7298), True, 'import numpy as np\n'), ((9547, 9571), 'os.path.isdir', 'os.path.isdir', (['file_path'], {}), '(file_path)\n', (9560, 9571), False, 'import os\n'), ((10237, 10252), 'numpy.array', 'np.array', (['fid_l'], {}), '(fid_l)\n', (10245, 10252), True, 'import numpy as np\n'), ((1597, 1629), 'matplotlib.pyplot.title', 'plt.title', (['"""Reconstructed Image"""'], {}), "('Reconstructed Image')\n", (1606, 1629), True, 'import matplotlib.pyplot as plt\n'), ((1666, 1726), 'matplotlib.pyplot.title', 'plt.title', (['"""Field Map - B[T] Values as function of location"""'], {}), "('Field Map - B[T] Values as function of location')\n", (1675, 1726), True, 'import matplotlib.pyplot as plt\n'), ((5741, 5764), 'numpy.fft.ifft2', 'np.fft.ifft2', (['k_casting'], {}), '(k_casting)\n', (5753, 5764), True, 'import numpy as np\n'), ((8997, 9048), 'numpy.polyfit', 'np.polyfit', (['te_values[:]', 'value_vec_in_phase_map', '(1)'], {}), '(te_values[:], value_vec_in_phase_map, 1)\n', (9007, 9048), True, 'import numpy as np\n'), ((10179, 10209), 'struct.unpack', 'struct.unpack', (['"""i"""', 'fid_r[-4:]'], {}), "('i', fid_r[-4:])\n", (10192, 10209), False, 'import struct\n')]
|
import numpy as np
import math
Esubo = 8.854 * pow(10,-12)
k = 8.988 * pow(10,9)
def fluxDisk():
radius = float(input("Radius: "))
radius = radius /1000
electricField = float(input("Electric Field: "))
electricField = (electricField*pow(10,3))
theta = float(input("Theta: "))
actualTheta = 90-theta
flux = math.cos(math.radians(actualTheta))*electricField*pow(radius,2) * math.pi
print(flux)
fluxDisk()
|
[
"math.radians"
] |
[((360, 385), 'math.radians', 'math.radians', (['actualTheta'], {}), '(actualTheta)\n', (372, 385), False, 'import math\n')]
|
"""Optimization
* :function:`.single_nested_cvrs`
* :function:`.dual_nested_cvrs`
* :function:`.single_cv`
* :function:`.chi2_test`
"""
# data wrangling
import numpy as np
import pandas as pd
from itertools import product
from scipy import stats
# validation
from sklearn.metrics import balanced_accuracy_score, accuracy_score, f1_score, roc_auc_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import MinMaxScaler
# from scipy import stats
# from pandas import *
# # Store sample sizes and number of errors
# n1 = 1000 # samples
# m1 = 300 # errors
# n2 = 1000 # samples
# m2 = 360 # errors
# # Store errors and correct classifications in a 2x2 table
# perf = DataFrame([[m1, m2], [n1-m1, n2-m2]], index=["Error", "Correct"])
# perf.columns = ["S_1", "S_2"]
# print(perf)
# ##### Chi-2 test for equality of error rates
# pvalue = stats.chi2_contingency(perf)[1]
# print("p-value = ", '{0:.6f}'.format(pvalue))
# ##### Fisher test for equality of error rates
# pvalue = stats.fisher_exact(perf)[1]
# print("p-value = ", ’{0:.6f}’.format(pvalue))
# import pandas as pd
# res = pd.read_csv("Crossval.csv", index_col=0)
# print(res)
# """
# algo1 algo2
# 1 75.05 78.08
# 2 74.24 79.77
# 3 76.20 79.61
# 4 81.35 88.39
# 5 80.96 88.27
# 6 84.22 76.20
# 7 77.68 88.04
# 8 82.10 87.50
# 9 81.35 84.37
# 10 81.80 84.04
# """
# ##### t-student test for equality of error rates
# pvalue = stats.ttest_rel(res[’algo2’], res[’algo1’])[1]
# print("p-value = ", ’{0:.6f}’.format(pvalue))
def nested_single_cv(x_t, y_t, L, grid, k_ext, k_int):
"""
Summary: Help set a hyper-parameters list for a given model before makes
-------- its comparison with others hyper-parameterized models.
Input: - x_t: features train (numpy.arrays)
------ - y_t: labels train (numpy.arrays)
- L: learning algorithm (class method .predict())
- grid: keys as a parameter name; values as the array of the parameter' values (dict)
- K_ext: number of external folds (integer)
- K_int: number of internal folds (integer)
Output: - inner_result_frame: index: [k_ext], columns: [hp_set], values: [v_bcr_(k_int_mean)]
------- - outter_result_frame: index: [k_ext, hp_hat], columns:[t_bcr, v_bcr], values:[t_bcr, v_bcr]
Example: model1= BaggingTrees
-------- grid1 = {'epochs':[1]
, 'n_trees':[100]
, 'criterion': ['entropy']
, 'min_samples_leaf':[0.06] #
, 'max_depth':[3]
, 'min_samples_split':[0.03] #
, 'max_leaf_nodes':[200]
}
K_int, K_ext = 4, 10
outter, inner = nested_single_cv(x_t, y_t, model1, grid1, K_ext, K_int)
outter.groupby('hp_hat').agg({'t_bcr': ['count', 'mean', 'std']
, 'v_bcr': ['mean', 'std']}).reset_index('hp_hat')
"""
hp_set = [v for v in product(*grid.values())]
inner_results = pd.DataFrame(columns = hp_set)
outter_results = pd.DataFrame(columns = ['hp_hat'
, 't_bcr'
, 'v_bcr'
])
# frame pointer
i = 0
# partionate "training rows" into "K_ext" sets
K_ext_folds = KFold(n_splits = k_ext, shuffle=False).split(x_t) # (markers t_i, v_i)
for t_ext_fold, v_ext_fold in K_ext_folds:
# sectioning "train set" between "S_k" into "ext_fold" sets
x_S_k = x_t[t_ext_fold] # training x
y_S_k = y_t[t_ext_fold] # training y
x_ext_fold = x_t[v_ext_fold] # test x
y_ext_fold = y_t[v_ext_fold] # test y
# get hp_hat in the inner loop
hp_dic = {}
for idx, hp in enumerate(hp_set):
hp_dic[idx]=[]
# partionate "S_k training rows" into "K_int" sets
K_int_folds = KFold(n_splits = k_int, shuffle=False).split(x_S_k)
for t_int_fold, v_int_fold in K_int_folds:
# sectioning "S_k" between "Ss_k" into "int_fold" sets
x_Ss_k = x_S_k[t_int_fold] # training x
y_Ss_k = y_S_k[t_int_fold] # training y
x_int_fold = x_S_k[v_int_fold] # test x
y_int_fold = y_S_k[v_int_fold] # test y
# must scaler after partition, for specific a training normalization
min_max_scaler = MinMaxScaler(feature_range=(0, 1))
X_t = min_max_scaler.fit_transform(x_Ss_k)
X_v = min_max_scaler.fit_transform(x_int_fold)
Y_t = y_Ss_k
Y_v = y_int_fold
# Loading and fitting model
model = L(hp)
model.fit(X_t, Y_t)
# prediction
Y_v_predicted = model.predict(X_v)
# validation
v_bcr = balanced_accuracy_score(Y_v, Y_v_predicted)
# append all
hp_dic[idx].append(v_bcr)
# # Averages the k_int iteractions for each hp in hp_set and stores it
inner_results.loc[i] = [sum(arr) / len(arr) for arr in hp_dic.values()]
# avg all hp predictions scores to define hp_hat (the highest) # use t-test?
ixd_max= max([(k,np.mean(v)) for k,v in hp_dic.items()],key=lambda item:item[1])[0]
hp_hat = hp_set[ixd_max]
# must scaler after partition, for specific a training normalization
min_max_scaler = MinMaxScaler(feature_range=(0, 1))
X_t = min_max_scaler.fit_transform(x_S_k)
X_v = min_max_scaler.fit_transform(x_ext_fold)
Y_t = y_S_k
Y_v = y_ext_fold
# Loading and fitting model
model = L(hp)
model.fit(X_t, Y_t)
# prediction
Y_v_predicted = model.predict(X_v)
# training metrics
t_acc = model.acc
t_bcr = model.bcr
t_f1 = model.f1
t_auc = model.auc
# validation metrics
v_acc = accuracy_score(Y_v, Y_v_predicted)
v_bcr = balanced_accuracy_score(Y_v, Y_v_predicted)
v_f1 = f1_score(Y_v, Y_v_predicted, average='macro')
v_auc = roc_auc_score(Y_v, Y_v_predicted, average='macro')
outter_results.loc[i] = [hp_hat
, t_bcr
, v_bcr]
i += 1
return outter_results, inner_results
|
[
"pandas.DataFrame",
"sklearn.metrics.accuracy_score",
"sklearn.preprocessing.MinMaxScaler",
"sklearn.metrics.balanced_accuracy_score",
"sklearn.model_selection.KFold",
"sklearn.metrics.roc_auc_score",
"sklearn.metrics.f1_score",
"numpy.mean"
] |
[((3295, 3323), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'hp_set'}), '(columns=hp_set)\n', (3307, 3323), True, 'import pandas as pd\n'), ((3350, 3400), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['hp_hat', 't_bcr', 'v_bcr']"}), "(columns=['hp_hat', 't_bcr', 'v_bcr'])\n", (3362, 3400), True, 'import pandas as pd\n'), ((5842, 5876), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'feature_range': '(0, 1)'}), '(feature_range=(0, 1))\n', (5854, 5876), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((6371, 6405), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['Y_v', 'Y_v_predicted'], {}), '(Y_v, Y_v_predicted)\n', (6385, 6405), False, 'from sklearn.metrics import balanced_accuracy_score, accuracy_score, f1_score, roc_auc_score\n'), ((6423, 6466), 'sklearn.metrics.balanced_accuracy_score', 'balanced_accuracy_score', (['Y_v', 'Y_v_predicted'], {}), '(Y_v, Y_v_predicted)\n', (6446, 6466), False, 'from sklearn.metrics import balanced_accuracy_score, accuracy_score, f1_score, roc_auc_score\n'), ((6483, 6528), 'sklearn.metrics.f1_score', 'f1_score', (['Y_v', 'Y_v_predicted'], {'average': '"""macro"""'}), "(Y_v, Y_v_predicted, average='macro')\n", (6491, 6528), False, 'from sklearn.metrics import balanced_accuracy_score, accuracy_score, f1_score, roc_auc_score\n'), ((6546, 6596), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['Y_v', 'Y_v_predicted'], {'average': '"""macro"""'}), "(Y_v, Y_v_predicted, average='macro')\n", (6559, 6596), False, 'from sklearn.metrics import balanced_accuracy_score, accuracy_score, f1_score, roc_auc_score\n'), ((3634, 3670), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'k_ext', 'shuffle': '(False)'}), '(n_splits=k_ext, shuffle=False)\n', (3639, 3670), False, 'from sklearn.model_selection import KFold\n'), ((4764, 4798), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'feature_range': '(0, 1)'}), '(feature_range=(0, 1))\n', (4776, 4798), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((5239, 5282), 'sklearn.metrics.balanced_accuracy_score', 'balanced_accuracy_score', (['Y_v', 'Y_v_predicted'], {}), '(Y_v, Y_v_predicted)\n', (5262, 5282), False, 'from sklearn.metrics import balanced_accuracy_score, accuracy_score, f1_score, roc_auc_score\n'), ((4234, 4270), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'k_int', 'shuffle': '(False)'}), '(n_splits=k_int, shuffle=False)\n', (4239, 4270), False, 'from sklearn.model_selection import KFold\n'), ((5633, 5643), 'numpy.mean', 'np.mean', (['v'], {}), '(v)\n', (5640, 5643), True, 'import numpy as np\n')]
|
import sys
import logging
from gifi.utils import git_utils
from gifi.command import Command, AggregatedCommand, UnknownCommandException, CommandException
import gifi.epic
import gifi.feature
import pkg_resources
import gifi.queue
import gifi.git_hub
logging.basicConfig(filename='/tmp/gifi.log', level=logging.DEBUG)
command = AggregatedCommand('gifi', 'Git and github enhancements to git.', [
gifi.epic.command,
gifi.feature.command,
gifi.queue.command,
gifi.git_hub.command,
Command('version', 'Show version number.', lambda: pkg_resources.require("git-gifi")[0].version)
])
class HelpGenerator(object):
def __init__(self, main):
self.main = main
def __call__(self):
help = str(self.main)
help += '\nUsage:\n\t%s command [command arguments]\n\nCommands:\n' % self.main.name
# it does not have to be recursive as there are only two levels
for command in self.main.nested_commands():
help += str(command)
if len(command.nested_commands()) != 0:
help += ' See below subcommands:\n'
for subcommand in command.nested_commands():
help += '\t%s\n' % str(subcommand)
else:
help += '\n'
return help
command.add_command(Command('help', 'Display this window.', HelpGenerator(command)))
class AliasesInstaller(object):
def __init__(self, main):
self.main = main
def __call__(self, config_level='global'):
repo = git_utils.get_repo()
config_writer = repo.config_writer(config_level)
# it does not have to be recursive as there are only two levels
for command in self.main.nested_commands():
if len(command.nested_commands()) != 0:
for subcommand in command.nested_commands():
alias = '%s-%s' % (command.name, subcommand.name)
value = '"!%s %s %s"' % (sys.argv[0], command.name, subcommand.name)
config_writer.set_value('alias', alias, value)
config_writer.release()
command.add_command(Command('install', 'Install gifi as a bunch of git aliases.', AliasesInstaller(command)))
def main():
args = list(sys.argv)
args.pop(0)
_main(args)
def _main(args):
if len(args) == 0:
args.append('help')
try:
result = command(*args)
if result is not None:
print(result)
except UnknownCommandException:
print("Wrong command, try 'help'.")
except CommandException as e:
print("ERROR: ", e)
|
[
"pkg_resources.require",
"gifi.utils.git_utils.get_repo",
"logging.basicConfig"
] |
[((252, 318), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""/tmp/gifi.log"""', 'level': 'logging.DEBUG'}), "(filename='/tmp/gifi.log', level=logging.DEBUG)\n", (271, 318), False, 'import logging\n'), ((1519, 1539), 'gifi.utils.git_utils.get_repo', 'git_utils.get_repo', ([], {}), '()\n', (1537, 1539), False, 'from gifi.utils import git_utils\n'), ((551, 584), 'pkg_resources.require', 'pkg_resources.require', (['"""git-gifi"""'], {}), "('git-gifi')\n", (572, 584), False, 'import pkg_resources\n')]
|
import subprocess,sys,random
import GreenLib
#Mensagens
sys.path.insert(0, 'messages')
import msg_control
import msg_logo
sys.path.insert(0, 'core')
import core_cripto
import core_recon
import core_menu
#--
# Função Menu principal
#--
def main_menu():
subprocess.run(["clear"])
logos = [msg_logo.msg_logo1,msg_logo.msg_logo2,msg_logo.msg_logo3]
print(random.choice(logos))
print(msg_control.msg_menu)
choice = input(" >> ")
exec_menu(choice)
return
def cripto_main_menu():
subprocess.run(["clear"])
logos = [msg_logo.msg_logo1,msg_logo.msg_logo2,msg_logo.msg_logo3]
print(random.choice(logos))
print(msg_control.msg_cripto)
choice = input(" >> ")
exec_menu(choice)
return
def recon_main_menu():
subprocess.run(["clear"])
logos = [msg_logo.msg_logo1,msg_logo.msg_logo2,msg_logo.msg_logo3]
print(random.choice(logos))
print(msg_control.msg_cripto)
choice = input(" >> ")
exec_menu(choice)
return
#--
# Função executa menu
#--
def exec_menu(choice):
subprocess.run(["clear"])
# TODO
# Deixa a escolha em letra minuscula
# Foi comentado devida as strings com as hashs criptografadas com maiusculas
#ch = choice.lower()
ch = choice
# Filtra por (Espaço , numeros e vazio)
retorno_filtro = filtra_entrada(ch)
# Caso tenha algum erro vai ser direcionado para a pagina principal
if (retorno_filtro != True):
exec_menu('main_menu')
else:
try:
# Cria uma lista com o que foi digitado
entrada_de_dados = [str(x) for x in ch.split()]
# Caso o tamanho seja de 1 a escolha vai para a função menu_actions
# para redirecionar para uma função
if(len(entrada_de_dados) == 1):
GreenLib.menu_actions[ch]()
#Caso a entrada for menor que 1 ou maior que 3 por ter algo errado
#Ja que por enquanto o maximo é de 3 (cripto reverse "54321")
#Se for maior que 3 vamos alterar
elif(len(entrada_de_dados) <= 1) or (len(entrada_de_dados) > 3):
exec_menu('main_menu')
#Se for igual a 3 argumentos vamos enviar para a função navega_green
elif(len(entrada_de_dados) == 3):
navega_green(entrada_de_dados[0],entrada_de_dados[1],entrada_de_dados[2])
else:
#
# AQUI CHAMA MENU COM MULTIPLAS ESCOLHAS USANDO A LISTA CRIADA
#
navega_green(entrada_de_dados[0],entrada_de_dados[1],entrada_de_dados[2])
# Caso de algum erro vai ser retornado a mensagem avisando.
except KeyError:
print ("Invalid selection, please try again.\n")
exec_menu('main_menu')
return
# Exit program
def exit():
subprocess.run(["clear"])
print("O "+sys.argv[0]+" foi finalizado com segurança!")
sys.exit()
# Back Menu
# TODO Implementar ele
def back():
GreenLib.exec_menu('main_menu')
def espera():
print("Aperte um botao para continua...")
go = input(">")
exec_menu('main_menu')
def filtra_entrada(ch):
#Checa se tem apenas numeros
if(ch.isdigit() == True):
return False
#Checa se tem espaços
elif(ch.isspace() == True):
return False
#Checa se esta vazio
elif(ch == ''):
return False
else:
return True
# Navegar
def navega_green(escolha,sub_escolha,key):
if (escolha == "search") or (escolha.upper() == "SEARCH"):
print("Função search")
time.sleep(4)
elif (escolha == "set") or (escolha.upper() == "SET"):
print("Função set")
time.sleep(4)
elif (escolha == "cripto") or (escolha.upper() == "CRIPTO"):
teste_cripto = core_cripto.Cripto(sub_escolha,key)
teste_cripto.greenCripto(sub_escolha,key)
espera()
elif (escolha == "greenrecon") or (escolha.upper() == "GREENRECON"):
teste_cripto = core_recon.Recon(sub_escolha,key)
teste_cripto.greenScan()
espera()
elif (escolha == "reverseshell") or (escolha.upper() == "REVERSESHELL"):
print("Sub Escolha:"+sub_escolha)
print("Key:"+key)
server = core_reverseshell.MultiServer()
server.print_help()
server.start_turtle()
else:
GreenLib.menu_actions['main_menu']()
|
[
"subprocess.run",
"random.choice",
"sys.path.insert",
"core_cripto.Cripto",
"GreenLib.exec_menu",
"core_recon.Recon",
"sys.exit"
] |
[((58, 88), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""messages"""'], {}), "(0, 'messages')\n", (73, 88), False, 'import subprocess, sys, random\n'), ((125, 151), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""core"""'], {}), "(0, 'core')\n", (140, 151), False, 'import subprocess, sys, random\n'), ((260, 285), 'subprocess.run', 'subprocess.run', (["['clear']"], {}), "(['clear'])\n", (274, 285), False, 'import subprocess, sys, random\n'), ((510, 535), 'subprocess.run', 'subprocess.run', (["['clear']"], {}), "(['clear'])\n", (524, 535), False, 'import subprocess, sys, random\n'), ((761, 786), 'subprocess.run', 'subprocess.run', (["['clear']"], {}), "(['clear'])\n", (775, 786), False, 'import subprocess, sys, random\n'), ((1042, 1067), 'subprocess.run', 'subprocess.run', (["['clear']"], {}), "(['clear'])\n", (1056, 1067), False, 'import subprocess, sys, random\n'), ((2806, 2831), 'subprocess.run', 'subprocess.run', (["['clear']"], {}), "(['clear'])\n", (2820, 2831), False, 'import subprocess, sys, random\n'), ((2897, 2907), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2905, 2907), False, 'import subprocess, sys, random\n'), ((2960, 2991), 'GreenLib.exec_menu', 'GreenLib.exec_menu', (['"""main_menu"""'], {}), "('main_menu')\n", (2978, 2991), False, 'import GreenLib\n'), ((367, 387), 'random.choice', 'random.choice', (['logos'], {}), '(logos)\n', (380, 387), False, 'import subprocess, sys, random\n'), ((617, 637), 'random.choice', 'random.choice', (['logos'], {}), '(logos)\n', (630, 637), False, 'import subprocess, sys, random\n'), ((868, 888), 'random.choice', 'random.choice', (['logos'], {}), '(logos)\n', (881, 888), False, 'import subprocess, sys, random\n'), ((3752, 3788), 'core_cripto.Cripto', 'core_cripto.Cripto', (['sub_escolha', 'key'], {}), '(sub_escolha, key)\n', (3770, 3788), False, 'import core_cripto\n'), ((3951, 3985), 'core_recon.Recon', 'core_recon.Recon', (['sub_escolha', 'key'], {}), '(sub_escolha, key)\n', (3967, 3985), False, 'import core_recon\n')]
|
from Bio.SeqUtils import ProtParam, ProtParamData
from warnings import warn
# mod for DIWV
def mod(sequence):
"""
This is a not implemented function. It is a fix for ProtParam.ProteinAnalysis().protein_scale and the DIWV scale.
As the latter requires knowldge of the preceeding amino acid it will fail.
>>> p = ProtParam.ProteinAnalysis(sequence)
>>> p.protein_scale(ProtParamData.DIWV, window=9, edge=.4)
hashtag epicfail.
So this is the repalacement.
:param sequence: sequence to score
:type sequence: str
:return: DIWV score.
:rtype: list[int]
"""
p = ProtParam.ProteinAnalysis(sequence)
param_dict = ProtParamData.DIWV
window = 9
edge = 0.4
weights = p._weight_list(window, edge)
sum_of_weights = sum(weights) * 2 + 1
scores = []
for i in range(p.length - window):
subsequence = p.sequence[i:i + window]
score = 0.0
for j in range(window // 2):
try:
front = param_dict[subsequence[j]][subsequence[j + 1]]
back = param_dict[subsequence[window - j]][subsequence[window - j + 1]]
score += weights[j] * front + weights[j] * back
except KeyError:
warn(f'warning: {subsequence[j]} or {subsequence[window - j - 1]} is not a standard amino acid.')
middle = subsequence[window // 2]
if middle in param_dict:
score += param_dict[middle]
else:
warn(f'warning: {middle} is not a standard amino acid.')
scores.append(score / sum_of_weights)
return scores
|
[
"warnings.warn",
"Bio.SeqUtils.ProtParam.ProteinAnalysis"
] |
[((608, 643), 'Bio.SeqUtils.ProtParam.ProteinAnalysis', 'ProtParam.ProteinAnalysis', (['sequence'], {}), '(sequence)\n', (633, 643), False, 'from Bio.SeqUtils import ProtParam, ProtParamData\n'), ((1478, 1534), 'warnings.warn', 'warn', (['f"""warning: {middle} is not a standard amino acid."""'], {}), "(f'warning: {middle} is not a standard amino acid.')\n", (1482, 1534), False, 'from warnings import warn\n'), ((1239, 1346), 'warnings.warn', 'warn', (['f"""warning: {subsequence[j]} or {subsequence[window - j - 1]} is not a standard amino acid."""'], {}), "(\n f'warning: {subsequence[j]} or {subsequence[window - j - 1]} is not a standard amino acid.'\n )\n", (1243, 1346), False, 'from warnings import warn\n')]
|
from __future__ import print_function
import smbus
import time
import struct
import argparse
import sys
import math
# Argument definition and handling
parser = argparse.ArgumentParser(description="Read an Atari 2600 cartridge via I2C")
parser.add_argument("-s", dest="rom_size", metavar="size", type=int, required=True, choices=[2, 4, 8, 16], help="ROM size in kb (2, 4, 8, 16)")
parser.add_argument("-o", dest="output_file", metavar="filename", required=True, help="ROM output file")
parser.add_argument("-b", dest="rom_bank", metavar="type", default="auto", choices=["auto", "F8", "F6"], help="ROM bank switching method (auto, F8, F6) [default: F8]")
parser.add_argument("--rom-delay", metavar="delay", type=float, default=0.2, help="ROM delay in seconds between setting the address and reading a byte [default=0.2]")
parser.add_argument("--retries", metavar="num", type=int, default=3, help="Number of retried when an I/O error is received during reading [default: 3]")
parser.add_argument("--i2c-bus", metavar="num", type=int, default=1, help="The I2C bus to read from (0=/dev/i2c-0, 1=/dev/i2c-1) [default: 1]")
parser.add_argument("--write-bus1", metavar="addr", default="0x20", help="The I2C bus address to use to write the first 8 bytes of the ROM address [default: 0x20]")
parser.add_argument("--write-bank1", metavar="num", type=int, default=0, choices=[0, 1, 2], help="The MCP23017 or MCP23008 bank to use to write the first 8 bytes of the ROM address (0=MCP23017 Bank A, 1=MCP23017 Bank B, 2=MCP23008) [default: 0]")
parser.add_argument("--write-bus2", metavar="addr", default="0x20", help="The I2C bus address to use to write the last 5 bytes of the ROM address [default: 0x20]")
parser.add_argument("--write-bank2", metavar="num", type=int, default=1, choices=[0, 1, 2], help="The MCP23017 or MCP23008 bank to use to write the last 5 bytes of the ROM address (0=MCP23017 Bank A, 1=MCP23017 Bank B, 2=MCP23008) [default: 1]")
parser.add_argument("--read-bus", metavar="addr", default="0x24", help="The I2C bus address to use to read the ROM data [default: 0x24]")
parser.add_argument("--read-bank", metavar="num", type=int, default=0, choices=[0, 1, 2], help="The MCP23017 or MCP23008 bank to use to read the ROM data (0=MCP23017 Bank A, 1=MCP23017 Bank B, 2=MCP23008) [default: 0]")
args = parser.parse_args()
# Output settings
OUTPUT_FILE = args.output_file
# ROM settings
ROM_SIZE = args.rom_size * 1024
ROM_OFFSET = 0x1000
ROM_MAX_BANK = 4096
ROM_BANK = args.rom_bank
ROM_F8_BANKS = [ 0x1FF8, 0x1FF9 ]
ROM_F6_BANKS = [ 0x1FF6, 0x1FF7, 0x1FF8, 0x1FF9 ]
ROM_DELAY = args.rom_delay
MAX_RETRIES = args.retries
RETRY_DELAY = 5
# I2C bus settings
I2C_BUS = args.i2c_bus
# The 2600 has 13 address pins, so we need to spread these over two banks
# with the first 8 bits on the first bank and the remaining 5 on the second.
ADDR_WRITE_BUS1 = int(args.write_bus1, 0)
ADDR_WRITE_BANK1 = args.write_bank1
ADDR_WRITE_BUS2 = int(args.write_bus2, 0)
ADDR_WRITE_BANK2 = args.write_bank2
# The 2600 has 8 data pins, so we can use a single bank for that
ADDR_READ_BUS = int(args.read_bus, 0)
ADDR_READ_BANK = args.read_bank
# I2C Register Constants for MCP23017 and MCP23008
#
# Taken from the following datasheets:
# MCP23017: http://ww1.microchip.com/downloads/en/DeviceDoc/20001952C.pdf (table 3-3)
# MCP23008: http://ww1.microchip.com/downloads/en/DeviceDoc/21919e.pdf (table 1-3)
I2C_REG_IODIR = [ 0x00, 0x01, 0x00 ]
I2C_REG_GPIO = [ 0x12, 0x13, 0x09 ]
I2C_IODIR_PORT_READ = 0xFF
I2C_IODIR_PORT_WRITE = 0x00
# Configure the MCP23017/MCP23008 chips for reading and writing
def configBus(bus):
# Write bus
print("Configuring bus 0x{0:02x}, bank {1} for writing (reg: 0x{2:02x})" . format(ADDR_WRITE_BUS1, ADDR_WRITE_BANK1, I2C_REG_IODIR[ ADDR_WRITE_BANK1 ]))
bus.write_byte_data(ADDR_WRITE_BUS1, I2C_REG_IODIR[ ADDR_WRITE_BANK1 ], I2C_IODIR_PORT_WRITE)
print("Configuring bus 0x{0:02x}, bank {1} for writing (reg: 0x{2:02x})" . format(ADDR_WRITE_BUS2, ADDR_WRITE_BANK2, I2C_REG_IODIR[ ADDR_WRITE_BANK2 ]))
bus.write_byte_data(ADDR_WRITE_BUS2, I2C_REG_IODIR[ ADDR_WRITE_BANK2 ], I2C_IODIR_PORT_WRITE)
# Read bus
print("Configuring bus 0x{0:02x}, bank {1} for reading (reg: 0x{2:02x})" . format(ADDR_READ_BUS, ADDR_READ_BANK, I2C_REG_IODIR[ ADDR_READ_BANK ]))
bus.write_byte_data(ADDR_READ_BUS, I2C_REG_IODIR[ ADDR_READ_BANK ], I2C_IODIR_PORT_READ)
def realAddress(address):
return ( ( address - ROM_OFFSET ) % ROM_MAX_BANK ) + ROM_OFFSET
def bankNumber(address):
return int(math.floor( ( address - ROM_OFFSET ) / ROM_MAX_BANK ))
# Perform bank switching to correct the bank before reading, if needed
def bankSwitch(bus, address, rom_bank):
real_address = realAddress(address)
bank_number = bankNumber(address)
if rom_bank == "F8" and ( real_address == ROM_OFFSET or ( real_address - 1 ) in ROM_F8_BANKS ):
print("\nBank switch! {0:x} {1:x}" . format(address, ROM_F8_BANKS[ bank_number ]))
setAddress(bus, ROM_F8_BANKS[ bank_number ])
elif rom_bank == "F6" and ( real_address == ROM_OFFSET or ( real_address - 1 ) in ROM_F6_BANKS ):
print("\nBank switch! {0:x} {1:x}" . format(address, ROM_F6_BANKS[ bank_number ]))
setAddress(bus, ROM_F6_BANKS[ bank_number ])
# Set the address to read from the cartridge
def setAddress(bus, address):
bus.write_byte_data(ADDR_WRITE_BUS1, I2C_REG_GPIO[ ADDR_WRITE_BANK1 ], address & 0xFF)
bus.write_byte_data(ADDR_WRITE_BUS2, I2C_REG_GPIO[ ADDR_WRITE_BANK2 ], address >> 8)
# time.sleep(ROM_DELAY)
# Read a byte from the cartridge
def readByte(bus, retry=0):
try:
return bus.read_byte_data(ADDR_READ_BUS, I2C_REG_GPIO[ ADDR_READ_BANK ])
except:
if retry < MAX_RETRIES:
print("\nRetry delay!")
time.sleep(RETRY_DELAY)
return readByte(bus, retry + 1)
else:
raise
def readByteFast(bus, retry=0):
last_byte = None
byte_count = 0
while byte_count < 10:
byte = readByte(bus, retry)
if byte == last_byte:
byte_count += 1
else:
if last_byte != None:
print("Mismatch {0:x} {1:x}" . format(last_byte, byte))
time.sleep(ROM_DELAY)
last_byte = byte
byte_count = 0
return byte
# Check the ROM for basic errors
def checkRom(bus):
print("Checking ROM...")
bytes = []
for x in range(0, 16):
setAddress(bus, x + ROM_OFFSET)
byte = readByte(bus)
bytes.append(byte)
if checkRomZeros(bytes) and checkRomDuplicate(bytes):
print("ROM checks passed")
return True
return False
# Check the ROM for all zeros
def checkRomZeros(bytes):
if bytes.count(0) == len(bytes):
print("Error: all zeros returned, is cartridge inserted?")
return False
return True
# Check the ROM for pairs of bytes with duplicate values
def checkRomDuplicate(bytes):
num_bytes = len(bytes)
count = 0
for x in range(0, num_bytes/2):
if bytes[x * 2] == bytes[x * 2 + 1]:
count += 1
if count == num_bytes/2:
print("Error: duplicate bytes returned, wiring issue?")
return False
return True
# Test code to validate the address line wiring, moves from the first
# address pin to the last with a 30 second delay
#
#bit = 1
#
#for x in range(0, 13):
# setAddress(bit)
# bit = bit << 1
# time.sleep(30)
bus = smbus.SMBus(I2C_BUS)
configBus(bus)
if checkRom(bus):
# Set the default ROM bank method
if ROM_SIZE == 8192 and ROM_BANK == "auto":
ROM_BANK = "F8"
if ROM_SIZE == 16384 and ROM_BANK == "auto":
ROM_BANK = "F6"
if ROM_BANK == "auto":
ROM_BANK = None
file = open(OUTPUT_FILE, "wb")
for x in range(0, ROM_SIZE):
bankSwitch(bus, x + ROM_OFFSET, ROM_BANK)
setAddress(bus, realAddress(x + ROM_OFFSET))
byte = readByteFast(bus)
file.write(struct.pack('B', byte))
sys.stdout.write("\rRead {0} of {1} bytes" . format(x + 1, ROM_SIZE));
sys.stdout.flush()
file.close()
print("\nDone!")
bus.close();
|
[
"argparse.ArgumentParser",
"math.floor",
"struct.pack",
"time.sleep",
"sys.stdout.flush",
"smbus.SMBus"
] |
[((163, 238), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Read an Atari 2600 cartridge via I2C"""'}), "(description='Read an Atari 2600 cartridge via I2C')\n", (186, 238), False, 'import argparse\n'), ((7469, 7489), 'smbus.SMBus', 'smbus.SMBus', (['I2C_BUS'], {}), '(I2C_BUS)\n', (7480, 7489), False, 'import smbus\n'), ((4531, 4580), 'math.floor', 'math.floor', (['((address - ROM_OFFSET) / ROM_MAX_BANK)'], {}), '((address - ROM_OFFSET) / ROM_MAX_BANK)\n', (4541, 4580), False, 'import math\n'), ((8098, 8116), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (8114, 8116), False, 'import sys\n'), ((7987, 8009), 'struct.pack', 'struct.pack', (['"""B"""', 'byte'], {}), "('B', byte)\n", (7998, 8009), False, 'import struct\n'), ((5796, 5819), 'time.sleep', 'time.sleep', (['RETRY_DELAY'], {}), '(RETRY_DELAY)\n', (5806, 5819), False, 'import time\n'), ((6229, 6250), 'time.sleep', 'time.sleep', (['ROM_DELAY'], {}), '(ROM_DELAY)\n', (6239, 6250), False, 'import time\n')]
|
import json
f = open('removed_duplicates_sorted_2.json')
data = json.load(f)
f.close()
ones = []
for i in data["ones"]:
if "ali" in i["comment"]:
i["scam"] = 1
ones.append(i)
else:
ones.append(i)
data["ones"] = ones
with open('removed_duplicates_sorted_2.json', 'w') as outfile:
json.dump(data, outfile)
|
[
"json.dump",
"json.load"
] |
[((65, 77), 'json.load', 'json.load', (['f'], {}), '(f)\n', (74, 77), False, 'import json\n'), ((319, 343), 'json.dump', 'json.dump', (['data', 'outfile'], {}), '(data, outfile)\n', (328, 343), False, 'import json\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This module provides basic and minimum necessary functions for carrying out
data query and download for Xena GDC ETL pipelines.
"""
# Ensure Python 2 and 3 compatibility
from __future__ import division
from __future__ import print_function
import json
import os
import sys
import warnings
import pandas as pd
import requests
from .utils import mkdir_p, reduce_json_array, get_json_objects, get_to_drops
GDC_API_BASE = 'https://api.gdc.cancer.gov'
_SUPPORTED_FILE_TYPES = {
'txt',
'vcf',
'bam',
'tsv',
'xml',
'maf',
'xlsx',
'tar',
'gz',
'md5',
'xls',
}
_SUPPORTED_DATASETS = [
{'data_type': 'Copy Number Segment'},
{'data_type': 'Masked Copy Number Segment'},
{'data_type': 'Isoform Expression Quantification'},
{'data_type': 'miRNA Expression Quantification'},
{'data_type': 'Methylation Beta Value'},
{'analysis.workflow_type': 'HTSeq - Counts'},
{'analysis.workflow_type': 'HTSeq - FPKM'},
{'analysis.workflow_type': 'HTSeq - FPKM-UQ'},
{'analysis.workflow_type': 'MuSE Variant Aggregation and Masking'},
{'analysis.workflow_type': 'MuTect2 Variant Aggregation and Masking'},
{
'analysis.workflow_type':
'SomaticSniper Variant Aggregation and Masking'
},
{'analysis.workflow_type': 'VarScan2 Variant Aggregation and Masking'},
{'data_type': 'Biospecimen Supplement'},
{'data_type': 'Clinical Supplement'},
]
# https://gdc.cancer.gov/resources-tcga-users/tcga-code-tables/tcga-study-abbreviations
TCGA_STUDY_ABBR = {
'LAML': 'Acute Myeloid Leukemia',
'ACC': 'Adrenocortical carcinoma',
'BLCA': 'Bladder Urothelial Carcinoma',
'LGG': 'Brain Lower Grade Glioma',
'BRCA': 'Breast invasive carcinoma',
'CESC': (
'Cervical squamous cell carcinoma and endocervical adenocarcinoma'
),
'CHOL': 'Cholangiocarcinoma',
'LCML': 'Chronic Myelogenous Leukemia',
'COAD': 'Colon adenocarcinoma',
'CNTL': 'Controls',
'ESCA': 'Esophageal carcinoma',
'FPPP': 'FFPE Pilot Phase II',
'GBM': 'Glioblastoma multiforme',
'HNSC': 'Head and Neck squamous cell carcinoma',
'KICH': 'Kidney Chromophobe',
'KIRC': 'Kidney renal clear cell carcinoma',
'KIRP': 'Kidney renal papillary cell carcinoma',
'LIHC': 'Liver hepatocellular carcinoma',
'LUAD': 'Lung adenocarcinoma',
'LUSC': 'Lung squamous cell carcinoma',
'DLBC': 'Lymphoid Neoplasm Diffuse Large B-cell Lymphoma',
'MESO': 'Mesothelioma',
'MISC': 'Miscellaneous',
'OV': 'Ovarian serous cystadenocarcinoma',
'PAAD': 'Pancreatic adenocarcinoma',
'PCPG': 'Pheochromocytoma and Paraganglioma',
'PRAD': 'Prostate adenocarcinoma',
'READ': 'Rectum adenocarcinoma',
'SARC': 'Sarcoma',
'SKCM': 'Skin Cutaneous Melanoma',
'STAD': 'Stomach adenocarcinoma',
'TGCT': 'Testicular Germ Cell Tumors',
'THYM': 'Thymoma',
'THCA': 'Thyroid carcinoma',
'UCS': 'Uterine Carcinosarcoma',
'UCEC': 'Uterine Corpus Endometrial Carcinoma',
'UVM': 'Uveal Melanoma',
}
def simple_and_filter(in_dict={}, exclude_dict={}):
"""Make a simple GDC API compatible query filter from a dict, in which
individual conditions are joint by the "and" logic.
In the return filter, individual conditions in the ``in_dict`` and
``exclude_dict`` will be joint by the "and" operator, meaning a hit has to
match all conditions. Here, a condition can use either a "in" operator
(specified in the ``in_dict``) or a "exclude" operator (specified in the
``exclude_dict``).
See details at
https://docs.gdc.cancer.gov/API/Users_Guide/Search_and_Retrieval/#filters-specifying-the-query
Args:
in_dict (dict): A dict describing query conditions with the "in"
operator. Each (key, value) pair represents for one condition. The
"key" is the 'field' operand. Operator between "key" and "value"
is "in".
exclude_dict (dict): A dict describing query conditions with the
"exclude" operator. Each (key, value) pair represents for one
condition. The "key" is the 'field' operand. Operator between
"key" and "value" is "exclude_dict".
Returns:
dict: A dict of filter conforming to GDC API's format. It should then
be converted to JSON format and used in the following http request.
"""
if not in_dict and not exclude_dict:
return in_dict
operation_list = []
for key in in_dict:
value = in_dict[key]
if not isinstance(value, list):
value = [value]
operation_list.append(
{"op": "in", "content": {"field": key, "value": value}}
)
for key in exclude_dict:
value = exclude_dict[key]
if not isinstance(value, list):
value = [value]
operation_list.append(
{"op": "exclude", "content": {"field": key, "value": value}}
)
return {"op": "and", "content": operation_list}
def search(
endpoint,
in_filter={},
exclude_filter={},
fields=[],
expand=[],
typ='dataframe',
method='GET',
):
"""Search one GDC endpoints and return searching results in a pandas
DataFrame if possible.
When searching results cannot be safely converted to a pandas DataFrame,
results will be returned in the JSON format as it is returned from GDC
API.
Args:
endpoint (str): One string of GDC API supported endpoint. See:
https://docs.gdc.cancer.gov/API/Users_Guide/Getting_Started/#api-endpoints
in_filter (dict, optional): A dict of query conditions which will be
used to perform the query. Each (key, value) pair represents for
one ondition. It will be passed to ``simple_and_filter`` for
making a query filter compatible with GDC API. Please check
``simple_and_filter`` function for details.
exclude_filter (dict, optional): An optional dict of query conditions
which will be used to perform the query. Each (key, value) pair
represents for one condition. It will be passed to
``simple_and_filter`` for making a query filter compatible with
GDC API. Please check ``simple_and_filter`` function for details.
fields (list or str, optional): One or more fields to be queried. Each
field will be used as a column name in the returned DataFrame. It
can be a comma separated string or a list of field strings or a
combination of both.
expand (list or str, optional): One or more field groups to be
queried. It can be a comma separated string or a list of field
strings or a combination of both.
typ (str): type of search result to return (JSON or dataframe).
Defaults to 'dataframe'.
method (str): HTTP method for the search. Defaults to 'GET'.
..
Returns:
pandas.core.frame.DataFrame or str: A search result in form of a
pandas DataFrame or a JSON formatted string, depending on the
value of ``typ`` and the DataFrame convertibility of JSON.
"""
try:
assert typ.lower() in ['json', 'dataframe']
except (AttributeError, AssertionError):
raise ValueError(
'typ should be a string of either JSON or dataframe, '
'not {}'.format(typ)
)
filters = simple_and_filter(in_dict=in_filter, exclude_dict=exclude_filter)
if isinstance(fields, str):
fields = [fields]
if isinstance(expand, str):
expand = [expand]
payload = {'size': 1}
if filters:
payload['filters'] = json.dumps(filters)
if fields:
payload['fields'] = ','.join(fields)
if expand:
payload['expand'] = ','.join(expand)
url = '{}/{}'.format(GDC_API_BASE, endpoint)
if method.upper() == 'POST':
response = requests.post(url, data=payload)
elif method.upper() == 'GET':
response = requests.get(url, params=payload)
else:
raise ValueError(
'Invalid method: {}\n method must be either "GET" '
'or "POST".'.format(method)
)
try:
payload['size'] = response.json()['data']['pagination']['total']
except KeyError:
payload.pop('size')
response = requests.get(url, params=payload)
if typ.lower() == 'json':
return response.json()
else:
warnings.warn(
'Fail to get a table of results. JSON returned. '
'Please check the result carefully.',
stacklevel=2,
)
return response.json()
if method.upper() == 'POST':
response = requests.post(url, data=payload)
else:
response = requests.get(url, params=payload)
if response.status_code == 200:
results = response.json()['data']['hits']
if typ.lower() == 'json':
return results
try:
return pd.io.json.json_normalize(reduce_json_array(results))
except Exception:
warnings.warn(
'Fail to convert searching results into table. '
'JSON will be returned.',
stacklevel=2,
)
return results
else:
warnings.warn(
'Searching failed with HTTP status code: '
'{}'.format(response.status_code),
stacklevel=2,
)
return None
def get_ext(file_name):
"""Get all extensions supported by this module in the file name.
Supported extensions are defined in the constant "_SUPPORTED_FILE_TYPES".
Multiple extensions will be separated by ".".
Args:
file_name (str): The filename will be split by "." and checked from
left to right. Extensions will be kept starting from the first
(and left most) supported extension.
Returns:
str: A string of extensions joint by ".".
"""
# https://github.com/broadinstitute/gdctools/blob/master/gdctools/lib/meta.py
name_list = file_name.split('.')
for i in range(len(name_list)):
if name_list[i] in _SUPPORTED_FILE_TYPES:
break
return '.'.join(name_list[i:])
def download(uuids, download_dir='.', chunk_size=4096):
"""Download GDC's open access data according to UUID(s).
Args:
uuids (str, list or dict): A single UUID (str), a list of UUIDs (list)
or a dict whose keys are UUIDs for target file(s). If "uuids" is
str or list, downloaded file(s) will be renamed to
"UUID.extension" where "extension" is extracted by "get_ext()"
from the original filename. Renamed file(s) will be saved at
"download_dir". If "uuids" is a dict, the argument "download_dir"
will be ignored; values of dict will be paths for saving
corresponding downloaded files.
download_dir (str, optional): The directory for saving downloaded
file(s) when "uuids" is str or list. It will be ignored if "uuids"
is dict. Defaults to ".".
chunk_size (int, optional): The chunk size is the number of bytes it
should read into memory, when the response is got with
"stream=True". Check the documentation of "requests" module for
details. Defaults to 4096.
Returns:
list: a list of paths for downloaded files.
"""
if isinstance(uuids, str):
uuids = {uuids: None}
elif isinstance(uuids, list):
uuids = {uuid: None for uuid in uuids}
elif not isinstance(uuids, dict):
raise TypeError(
'uuids is a {}; it should be a string, a list or a '
'dict'.format(type(uuids))
)
total = len(uuids)
count = 0
download_list = []
data_endpt = '{}/data/'.format(GDC_API_BASE)
for uuid in uuids:
count += 1
response = requests.get(data_endpt + uuid, stream=True)
if response.status_code == 200:
file_size = int(response.headers['Content-Length'])
if uuids[uuid] is None:
content_disp = response.headers['Content-Disposition']
ori_name = content_disp[content_disp.find('filename=') + 9 :] # noqa: E203, E501
new_filename = uuid + '.' + get_ext(ori_name)
path = os.path.join(
os.path.abspath(download_dir), new_filename
)
else:
path = os.path.abspath(uuids[uuid])
status = '\r[{:d}/{:d}] Download to "{}": {:4.0%}'
mkdir_p(os.path.dirname(path))
with open(path, 'wb') as f:
downloaded = 0
print(status.format(count, total, path, 0), end='')
sys.stdout.flush()
for chunk in response.iter_content(chunk_size):
f.write(chunk)
downloaded = downloaded + chunk_size
print(
status.format(
count, total, path, min(1, downloaded / file_size)
),
end='',
)
sys.stdout.flush()
download_list.append(path)
else:
print('\rFail to download file {}.'.format(uuid))
print('')
return download_list
def get_project_info(projects=None):
"""Get info for project(s) of interest through GDC API.
Args:
projects (list or str): one (str) or a list of GDC "project_id"(s),
whose info will be returned. If None, projects will not be
filtered, i.e. info for all GDC projects will be returned.
Defaults to None.
Returns:
pandas.core.frame.DataFrame: A DataFrame of project info including
"project ID", "project name", "primary site" and "program name".
"""
in_filter = {}
if projects is not None:
if isinstance(projects, list):
in_filter = {'projects.project_id': projects}
else:
in_filter = {'projects.project_id': [projects]}
project_df = search(
'projects',
in_filter=in_filter,
fields=['name', 'primary_site', 'project_id', 'program.name'],
)
return project_df.set_index('id')
def get_samples_clinical(projects=None):
"""Get info for all samples of ``projects`` and clinical info for all
cases of ``projects`` through GDC API.
Args:
projects (list or str): one (str) or a list of GDC "project_id"(s),
whose info will be returned. If None, projects will not be
filtered, i.e. info for all GDC projects will be returned.
Defaults to None.
Returns:
pandas.core.frame.DataFrame: A DataFrame organized by samples, having
info for all samples of ``projects``, as well as corresponding
clinical info.
"""
in_filter = {}
if projects is not None:
if isinstance(projects, list):
in_filter = {'project.project_id': projects}
else:
in_filter = {'project.project_id': [projects]}
fields = [
'case_id',
'created_datetime',
'disease_type',
'id',
'primary_site',
'state',
'submitter_id',
'updated_datetime',
]
expand = [
'demographic',
'diagnoses',
'exposures',
'family_histories',
'project',
'samples',
'tissue_source_site',
]
res = search(
'cases', in_filter=in_filter, fields=fields, expand=expand, typ='json'
)
to_drops = set()
for ele in res:
to_drops |= set(get_to_drops(ele))
print("Dropping columns {} for {} projects".format(to_drops, projects))
reduced_no_samples_json = reduce_json_array(
[{k: v for k, v in d.items() if k != 'samples'} for d in res]
)
cases_df = pd.io.json.json_normalize(reduced_no_samples_json)
# In the list of reduced json, "samples" fields for each case are not
# consistently ``list`` (if there is only 1 sample for the case, it will
# be reduced into "naked" ``dict``). Therefore, it cannot be normalized
# correctly with ``record_path `` "samples". Use the raw json instead.
# Besides, there are cases (34 by 12/11/2017) which doesn't have any
# samples and thus doesn't have the key "samples". Ignore them.
# for r in res:
# r.setdefault('samples', [{}])
# samples_json.append(r)
samples_df = pd.io.json.json_normalize(
[r for r in res if 'samples' in r],
'samples',
'id',
record_prefix='samples.',
)
merged_df = pd.merge(cases_df, samples_df, how='inner', on='id')
merged_df.drop(list(to_drops), axis=1, inplace=True)
return merged_df
def gdc_check_new(new_file_uuids):
"""
This function help check a list of GDC's updated files and summarize
impacted project(s), data_type(s) and analysis.workflow_type(s).
"""
df_list = []
for uuids in (
new_file_uuids[i : i + 20000] # noqa: E203
for i in range(0, len(new_file_uuids), 20000)
):
df = search(
'files',
in_filter={'access': 'open', 'file_id': uuids},
fields=[
'cases.project.project_id',
'data_type',
'analysis.workflow_type',
],
method='POST',
)
try:
df['cases'] = df['cases'].map(
lambda c: ', '.join({p['project']['project_id'] for p in c})
)
except: # noqa: E722
pass
df_list.append(df)
df = pd.concat(df_list, axis=0)
try:
df = df.drop('id', axis=1)
except KeyError:
pass
try:
df = df.drop_duplicates()
except: # noqa: E722
pass
df.to_csv(sys.stdout, sep='\t', index=False)
def map_two_fields(endpoint, input_field, output_field, input_values=[]):
"""This function helps map values from ``input_field`` of certain
``endpoint`` to values from ``output_field`` of the same ``endpoint``. It
returns a dict whose keys are values from ``input_field`` of ``endpoint``
and values are values from ``output_field`` of ``endpoint``. It can also
accept a list of values from ``input_field`` to filter the return dict.
Args:
endpoint (str): One string of GDC API supported endpoint. This
function only does mapping for two fields from the same endpoint.
For available endpoints, see:
https://docs.gdc.cancer.gov/API/Users_Guide/Getting_Started/#api-endpoints
input_field (str): One valid field of the ``endpoint``. Values from
this field will be used as keys of the return dict.
``input_values``, if provided, are values on this field.
output_field (str):One valid field of the ``endpoint``.
input_values (list, optional): query values on ``input_field`` which
needs to be mapped. It helps limit/filter the return.
Returns:
dict: A dict whose keys are ``input_values`` if it's not empty or all
possible values from ``input_field`` of ``endpoint``. Values of return
dict are values from ``output_field`` of ``endpoint``.
"""
raw_results = search(
endpoint=endpoint,
in_filter={input_field: input_values} if input_values else {},
fields=[input_field, output_field],
typ="json",
method='POST',
)
# Split input_field and output_field into shared_path, input_specific_path
# and output_specific_path
input_keys = input_field.split('.')
output_keys = output_field.split('.')
for i in range(min([len(input_keys), len(output_keys)])):
if input_keys[i] != output_keys[i]:
break
shared_path = '.'.join(input_keys[:i])
input_sub_path = '.'.join(input_keys[i:])
output_sub_path = '.'.join(output_keys[i:])
# Get the list of dicts by shared_path
if shared_path:
shared_objs = get_json_objects(raw_results, shared_path)
else:
if isinstance(raw_results, list):
shared_objs = raw_results
else:
shared_objs = [raw_results]
while shared_objs and isinstance(shared_objs[0], list):
shared_objs = [obj for objs in shared_objs for obj in objs]
# For shared_objects, get the list of values by input_specific_path and
# output_specific_path
map = {}
for shared_obj in shared_objs:
input_found = get_json_objects(shared_obj, input_sub_path)
output_found = get_json_objects(shared_obj, output_sub_path)
for v in input_found:
if input_values and v not in input_values:
continue
while output_found and isinstance(output_found[0], list):
output_found = [obj for objs in output_found for obj in objs]
if v in map:
map[v] |= set(output_found)
else:
map[v] = set(output_found)
# Fill in failed input_values
for v in input_values:
if v not in map:
map[v] = set()
return {k: list(map[k]) for k in map}
|
[
"os.path.abspath",
"pandas.merge",
"pandas.io.json.json_normalize",
"os.path.dirname",
"json.dumps",
"warnings.warn",
"sys.stdout.flush",
"requests.get",
"requests.post",
"pandas.concat"
] |
[((16041, 16091), 'pandas.io.json.json_normalize', 'pd.io.json.json_normalize', (['reduced_no_samples_json'], {}), '(reduced_no_samples_json)\n', (16066, 16091), True, 'import pandas as pd\n'), ((16654, 16762), 'pandas.io.json.json_normalize', 'pd.io.json.json_normalize', (["[r for r in res if 'samples' in r]", '"""samples"""', '"""id"""'], {'record_prefix': '"""samples."""'}), "([r for r in res if 'samples' in r], 'samples',\n 'id', record_prefix='samples.')\n", (16679, 16762), True, 'import pandas as pd\n'), ((16814, 16866), 'pandas.merge', 'pd.merge', (['cases_df', 'samples_df'], {'how': '"""inner"""', 'on': '"""id"""'}), "(cases_df, samples_df, how='inner', on='id')\n", (16822, 16866), True, 'import pandas as pd\n'), ((17810, 17836), 'pandas.concat', 'pd.concat', (['df_list'], {'axis': '(0)'}), '(df_list, axis=0)\n', (17819, 17836), True, 'import pandas as pd\n'), ((7754, 7773), 'json.dumps', 'json.dumps', (['filters'], {}), '(filters)\n', (7764, 7773), False, 'import json\n'), ((7995, 8027), 'requests.post', 'requests.post', (['url'], {'data': 'payload'}), '(url, data=payload)\n', (8008, 8027), False, 'import requests\n'), ((8810, 8842), 'requests.post', 'requests.post', (['url'], {'data': 'payload'}), '(url, data=payload)\n', (8823, 8842), False, 'import requests\n'), ((8872, 8905), 'requests.get', 'requests.get', (['url'], {'params': 'payload'}), '(url, params=payload)\n', (8884, 8905), False, 'import requests\n'), ((12029, 12073), 'requests.get', 'requests.get', (['(data_endpt + uuid)'], {'stream': '(True)'}), '(data_endpt + uuid, stream=True)\n', (12041, 12073), False, 'import requests\n'), ((8081, 8114), 'requests.get', 'requests.get', (['url'], {'params': 'payload'}), '(url, params=payload)\n', (8093, 8114), False, 'import requests\n'), ((8415, 8448), 'requests.get', 'requests.get', (['url'], {'params': 'payload'}), '(url, params=payload)\n', (8427, 8448), False, 'import requests\n'), ((8544, 8666), 'warnings.warn', 'warnings.warn', (['"""Fail to get a table of results. JSON returned. Please check the result carefully."""'], {'stacklevel': '(2)'}), "(\n 'Fail to get a table of results. JSON returned. Please check the result carefully.'\n , stacklevel=2)\n", (8557, 8666), False, 'import warnings\n'), ((9177, 9285), 'warnings.warn', 'warnings.warn', (['"""Fail to convert searching results into table. JSON will be returned."""'], {'stacklevel': '(2)'}), "(\n 'Fail to convert searching results into table. JSON will be returned.',\n stacklevel=2)\n", (9190, 9285), False, 'import warnings\n'), ((12605, 12633), 'os.path.abspath', 'os.path.abspath', (['uuids[uuid]'], {}), '(uuids[uuid])\n', (12620, 12633), False, 'import os\n'), ((12717, 12738), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (12732, 12738), False, 'import os\n'), ((12895, 12913), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (12911, 12913), False, 'import sys\n'), ((12502, 12531), 'os.path.abspath', 'os.path.abspath', (['download_dir'], {}), '(download_dir)\n', (12517, 12531), False, 'import os\n'), ((13316, 13334), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (13332, 13334), False, 'import sys\n')]
|
#!/usr/bin/python3
import defusedxml.ElementTree as ET
import urllib.request
import urllib.parse
import sys
import ssl
__author__ = '<NAME> <<EMAIL>>'
class NexposeException(Exception):
'''Raise this exception when the Nexpose API returns errors.'''
pass
class Nexpose:
'''
Nexpose API wrapper.
'''
def __init__(self, hostname, port):
self.hostname = hostname
self.port = port
self.url = 'https://%s:%s/api/1.1/xml' % (self.hostname, self.port)
self.session_id = None
# Often the Nexpose Console is run with a self-signed cert.
# We allow for that here.
self.ctx = ssl.create_default_context()
self.ctx.check_hostname = False
self.ctx.verify_mode = ssl.CERT_NONE
def api_request(self, xml_string):
'''Send an API request and return the response\'s root XML element.'''
# Encode the xml so that urllib will accept it.
post_data = (xml_string).encode('utf-8')
# Prepare the request.
request = urllib.request.Request(self.url)
request.add_header("Content-type", "text/xml")
# Get a response.
response = urllib.request.urlopen(request,
post_data,
context=self.ctx).read()
xml_response = ET.fromstring(response)
# Check for errors and return response.
if xml_response.attrib.get('success') != ('0' or None):
return xml_response
else:
raise NexposeException(response)
def login(self, username, password):
'''Send a LoginRequest and capture the returned session-id.'''
xml_string = '<LoginRequest user-id=\"%s\" password=\"%s\" />'\
% (username, password)
xml_response = self.api_request(xml_string)
self.session_id = xml_response.attrib.get('session-id')
return xml_response
def logout(self):
'''Send a LogoutRequest.'''
xml_string = "<LogoutRequest session-id=\"%s\" />" % (self.session_id)
xml_response = self.api_request(xml_string)
return xml_response
def get_sites(self):
'''Return a list of dicts containing site information.'''
xml_string = '<SiteListingRequest session-id=\"%s\">\
</SiteListingRequest>' % self.session_id
xml_response = self.api_request(xml_string)
site_list = []
for SiteSummary in xml_response.iter('SiteSummary'):
site = {}
site['id'] = SiteSummary.get('id')
site['name'] = SiteSummary.get('name')
site['description'] = SiteSummary.get('description')
site['riskfactor'] = SiteSummary.get('riskfactor')
site['riskscore'] = SiteSummary.get('riskscore')
site_list.append(site)
return site_list
def get_site_hosts(self, site_id):
'''Return list of ranges and hostnames associated with a site.'''
xml_string = '<SiteConfigRequest session-id=\"%s\" site-id=\"%s\">\
</SiteConfigRequest>' % (self.session_id, site_id)
xml_response = self.api_request(xml_string)
host_list = []
site = xml_response.find('Site')
hosts = site.find('Hosts')
for host in hosts.getchildren():
if host.tag == 'range':
if host.attrib.get('to') is None:
host_list.append({'range' : host.attrib.get('from')})
else:
host_list.append({'range' : ('%s-%s' % \
(host.attrib.get('from'), host.attrib.get('to')))})
elif host.tag == 'host':
host_list.append({'host' : host.text})
return host_list
def get_site_scan_config(self, site_id):
'''Return a dict of configuration info for a site.'''
xml_string = '<SiteConfigRequest session-id=\"%s\" site-id=\"%s\">\
</SiteConfigRequest>' % (self.session_id, site_id)
xml_response = self.api_request(xml_string)
site = xml_response.find('Site')
scan_config = site.find('ScanConfig')
config = {}
config['template_id'] = scan_config.attrib.get('templateID')
config['name'] = scan_config.attrib.get('name')
config['id'] = scan_config.attrib.get('configID')
config['engine_id'] = scan_config.attrib.get('engineID')
config['config_version'] = scan_config.attrib.get('configVersion')
return config
def get_scan_summary_attributes(self, scan_id, engine_id):
'''
Send a ScanStatisticsRequest and return the ScanSummary
attributes as a dict.
'''
xml_string = '<ScanStatisticsRequest session-id = \"%s\" \
engine-id = \"%s\" scan-id = \"%s\">\
</ScanStatisticsRequest>' % \
(self.session_id, engine_id, scan_id)
xml_response = self.api_request(xml_string)
scan_summary = xml_response.find('ScanSummary')
scan_summary_attributes = {}
for key in scan_summary.attrib:
scan_summary_attributes[key] = scan_summary.attrib[key]
return scan_summary_attributes
def scan_site(self, site_id):
'''Send SiteScanRequest and return dict of scan id and engine id.'''
xml_string = '<SiteScanRequest session-id = \"%s\" site-id=\"%s\">\
</SiteScanRequest>' % (self.session_id, site_id)
xml_response = self.api_request(xml_string)
scan = xml_response.find('Scan')
scan_id = scan.attrib.get('scan-id')
engine_id = scan.attrib.get('engine-id')
return {'scan_id' : scan_id, 'engine_id' : engine_id}
def get_site_devices(self, site_id):
'''Return a list of devices in a site.'''
xml_string = '<SiteDeviceListingRequest session-id = \"%s\" \
site-id = \"%s\"></SiteDeviceListingRequest>' % \
(self.session_id, site_id)
xml_response = self.api_request(xml_string)
print(ET.tostring(xml_response, encoding='ascii', method='xml'))
def scan_site_hosts(self, site_id, host_list):
'''
Send SiteDevicesScanRequest and return dict of scan id and engine
id. host_list is a list of ranges or hostnames as get_site_hosts()
would return.
'''
hosts_string = ''
for host in host_list:
ip_range = host.get('range')
if ip_range is not None:
split_ip_range = ip_range.split('-')
if len(split_ip_range) == 1:
hosts_string += ('<range from=\"%s\"/>' % \
str(split_ip_range[0]))
elif len(split_ip_range) == 2:
hosts_string += ('<range from=\"%s\" to=\"%s\"/>' % \
(split_ip_range[0],
split_ip_range[1]))
else:
raise Exception('Invalid IP range: %s' % ip_range)
else:
hostname = host.get('host')
hostname = hostname.replace("'","")
hosts_string += ('<host>%s</host>' % hostname)
xml_string = '<SiteDevicesScanRequest session-id=\"%s\" \
site-id=\"%s\"><Devices></Devices><Hosts>%s</Hosts>\
</SiteDevicesScanRequest>' % (self.session_id,
site_id,
hosts_string)
xml_response = self.api_request(xml_string)
scan = xml_response.find('Scan')
scan_id = scan.attrib.get('scan-id')
engine_id = scan.attrib.get('engine-id')
return {'scan_id': scan_id, 'engine_id' : engine_id}
if __name__ == '__main__':
# Usage: ./nexpose.py hostname port username password
try:
nexpose = Nexpose(sys.argv[1], sys.argv[2])
nexpose.login(sys.argv[3], sys.argv[4])
print(nexpose.get_site_scan_config('1'))
except urllib.error.URLError as e:
print("URLError: Perhaps you entered the wrong URL or port? %s" % e)
exit()
try:
nexpose.logout()
except:
print('Tried to logout when we weren\'t signed in.')
pass
|
[
"defusedxml.ElementTree.tostring",
"ssl.create_default_context",
"defusedxml.ElementTree.fromstring"
] |
[((590, 618), 'ssl.create_default_context', 'ssl.create_default_context', ([], {}), '()\n', (616, 618), False, 'import ssl\n'), ((1156, 1179), 'defusedxml.ElementTree.fromstring', 'ET.fromstring', (['response'], {}), '(response)\n', (1169, 1179), True, 'import defusedxml.ElementTree as ET\n'), ((5187, 5244), 'defusedxml.ElementTree.tostring', 'ET.tostring', (['xml_response'], {'encoding': '"""ascii"""', 'method': '"""xml"""'}), "(xml_response, encoding='ascii', method='xml')\n", (5198, 5244), True, 'import defusedxml.ElementTree as ET\n')]
|
import re, collections
def count_valid(passwords, valid):
count = 0
m = re.compile(r"(\d+)-(\d+) (.): (.+)")
for p in passwords:
n1, n2, c, password = m.match(p).groups()
if valid(int(n1), int(n2), c, password):
count += 1
return count
def policy1(lo, hi, c, password):
letterfreq = collections.Counter(password)
return lo <= letterfreq[c] <= hi
def policy2(p1, p2, c, password):
return (password[p1-1] == c) ^ (password[p2-1] == c)
with open("day02.txt", "r") as fh:
passwords = [line.strip() for line in fh.readlines()]
print("2020 day 02 part 1: %d" % count_valid(passwords, policy1))
print("2020 day 02 part 2: %d" % count_valid(passwords, policy2))
|
[
"collections.Counter",
"re.compile"
] |
[((81, 118), 're.compile', 're.compile', (['"""(\\\\d+)-(\\\\d+) (.): (.+)"""'], {}), "('(\\\\d+)-(\\\\d+) (.): (.+)')\n", (91, 118), False, 'import re, collections\n'), ((333, 362), 'collections.Counter', 'collections.Counter', (['password'], {}), '(password)\n', (352, 362), False, 'import re, collections\n')]
|
"""
Basic extension tests.
The tests are structured as .txt files, parsed and executed here.
The structure of the file is::
number of expected autolinks
# split
lines to add to the default conf.py
# split
index.html content
"""
import re
import sys
import pytest
from pathlib import Path
from bs4 import BeautifulSoup
from sphinx.cmd.build import main as sphinx_main
# Insert test package root to path for all tests
sys.path.insert(0, str(Path(__file__).parent / "src"))
default_conf = """
extensions = [
"sphinx.ext.autodoc",
"sphinx_codeautolink",
]
autodoc_default_options = {
"members": True,
"undoc-members": True,
}
"""
txt_tests = list(Path(__file__).parent.glob('*.txt'))
any_whitespace = re.compile(r'\s*')
xfails = {
'ref_fluent_attrs.txt': sys.version_info < (3, 8),
'ref_fluent_call.txt': sys.version_info < (3, 8),
'ref_import_from_complex.txt': sys.version_info < (3, 8),
}
@pytest.mark.parametrize('file', txt_tests)
def test_extension(file: Path, tmp_path: Path):
if xfails.get(file.name, False):
pytest.xfail('Expected to fail.')
links, conf, index = file.read_text('utf-8').split('# split')
links = links.strip().split('\n')
if len(links) == 1 and not links[0]:
links = []
src_dir = tmp_path / 'src'
src_dir.mkdir()
(src_dir / 'conf.py').write_text(default_conf + conf, 'utf-8')
(src_dir / 'index.rst').write_text(index, 'utf-8')
build_dir = tmp_path / 'build'
sphinx_main(['-M', 'html', str(src_dir), str(build_dir)])
index_html = build_dir / 'html' / 'index.html'
text = index_html.read_text('utf-8')
soup = BeautifulSoup(text, 'html.parser')
blocks = list(soup.find_all('a', attrs={'class': 'sphinx-codeautolink-a'}))
assert len(blocks) == len(links)
for block, link in zip(blocks, links):
assert any_whitespace.sub('', ''.join(block.strings)) == link
|
[
"pytest.xfail",
"pathlib.Path",
"bs4.BeautifulSoup",
"pytest.mark.parametrize",
"re.compile"
] |
[((735, 753), 're.compile', 're.compile', (['"""\\\\s*"""'], {}), "('\\\\s*')\n", (745, 753), False, 'import re\n'), ((941, 983), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""file"""', 'txt_tests'], {}), "('file', txt_tests)\n", (964, 983), False, 'import pytest\n'), ((1652, 1686), 'bs4.BeautifulSoup', 'BeautifulSoup', (['text', '"""html.parser"""'], {}), "(text, 'html.parser')\n", (1665, 1686), False, 'from bs4 import BeautifulSoup\n'), ((1077, 1110), 'pytest.xfail', 'pytest.xfail', (['"""Expected to fail."""'], {}), "('Expected to fail.')\n", (1089, 1110), False, 'import pytest\n'), ((458, 472), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (462, 472), False, 'from pathlib import Path\n'), ((681, 695), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (685, 695), False, 'from pathlib import Path\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.