repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
tpudlik/RaspAC | website/raspac.py | e0a01a8b9123e74f6e4fb53f084e4ddf3ea24677 | import sqlite3
import subprocess, datetime
from flask import Flask, request, session, g, redirect, url_for, \
abort, render_template, flash
from contextlib import closing
from tquery import get_latest_record
from config import *
app = Flask(__name__)
app.config.from_object(__name__)
# DB helper functions
def connect_db():
return sqlite3.connect(app.config['DATABASE'])
def init_db():
"""Initializes the sqlite3 database. This function must be imported and
executed from the Python interpreter before the application is first run."""
with closing(connect_db()) as db:
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
# Auto-open and close DB when serving requests
@app.before_request
def before_request():
g.db = connect_db()
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
db.close()
@app.route('/', methods=['GET', 'POST'])
def welcome_page():
if 'username' in session and session['username']:
return redirect(url_for('submit_page'))
error = None
if request.method == 'POST': # someone's logging in
if not request.form['username'] in app.config['USERNAMES']:
error = 'username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'password'
else: # successful login
session['username'] = request.form['username']
flash('Hi ' + session['username'] + '!')
return redirect(url_for('submit_page'))
return render_template('welcome_page.html', commands=command_history(),
error=error, last_record=last_record())
@app.route('/submit', methods=['GET', 'POST'])
def submit_page():
error = None
if not session.get('username'):
abort(401)
if request.method == 'POST': # command is being issued to AC
user_mode = request.form['mode']
user_temperature = request.form['temperature']
validation_codes = validate_AC_command(user_mode, user_temperature)
if (validation_codes['mode_error'] or
validation_codes['temperature_error']):
error=validation_codes
else:
subprocess.call(['/usr/bin/irsend','SEND_ONCE', 'lgac',
validation_codes['command']])
g.db.execute('insert into commands (command, ts, user) values (?, ?, ?)',
[validation_codes['command'],
datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
session['username']])
g.db.commit()
flash('Command submitted')
return render_template('submit_page.html', commands=command_history(),
error=error, last_record=last_record())
@app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('welcome_page'))
def validate_AC_command(user_mode, user_temperature):
"""Validates and sanitizes user-input command; translates command
into irsend call."""
codes = dict()
if user_mode not in app.config['ACMODES']:
codes['mode_error'] = True
else:
codes['mode_error'] = False
if user_mode is not 'off' and user_temperature not in app.config['ACTEMPERATURES']:
codes['temperature_error'] = True
else:
codes['temperature_error'] = False
if not codes['mode_error'] and not codes['temperature_error']:
codes['mode'] = user_mode
codes['temperature'] = user_temperature
if codes['mode'] == 'off':
command_postfix = 'off'
elif codes['mode'] == 'heat':
command_postfix = 'heat' + codes['temperature']
else:
command_postfix = codes['temperature']
codes['command'] = command_postfix
return codes
def command_history():
"""Returns a list of dictionaries, each containing a command issued
to the AC previously. The list is ordered chronologically, from newest
to oldest."""
cur = g.db.execute('select command, ts, user from commands order by id desc')
command_history = []
for row in cur.fetchall():
if row[0][0] == 'h':
cmd = 'heat to ' + row[0][4:]
elif row[0] == 'off':
cmd = 'off'
else:
cmd = 'cool to ' + row[0]
command_history.append(dict(command=cmd, ts=row[1], user=row[2]))
return command_history
def last_record():
"""Returns the last temperature and humidity record data.
The returned object is a dict with keys ts, fahrenheit, celsius and
humidity.
"""
db_record = get_latest_record()
out_record = dict()
out_record['date'] = db_record[0].strftime("%Y-%m-%d")
out_record['time'] = db_record[0].strftime("%H:%M")
out_record['celsius'] = db_record[1]
out_record['fahrenheit'] = int(round(out_record['celsius']*9/5.0 + 32))
out_record['humidity'] = int(round(db_record[2]))
return out_record
if __name__ == '__main__':
app.run(host='0.0.0.0')
| [((272, 287), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (277, 287), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((373, 412), 'sqlite3.connect', 'sqlite3.connect', (["app.config['DATABASE']"], {}), "(app.config['DATABASE'])\n", (388, 412), False, 'import sqlite3\n'), ((2950, 2980), 'flask.session.pop', 'session.pop', (['"""logged_in"""', 'None'], {}), "('logged_in', None)\n", (2961, 2980), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((2985, 3013), 'flask.flash', 'flash', (['"""You were logged out"""'], {}), "('You were logged out')\n", (2990, 3013), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((4199, 4270), 'flask.g.db.execute', 'g.db.execute', (['"""select command, ts, user from commands order by id desc"""'], {}), "('select command, ts, user from commands order by id desc')\n", (4211, 4270), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((4802, 4821), 'tquery.get_latest_record', 'get_latest_record', ([], {}), '()\n', (4819, 4821), False, 'from tquery import get_latest_record\n'), ((1877, 1900), 'flask.session.get', 'session.get', (['"""username"""'], {}), "('username')\n", (1888, 1900), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((1910, 1920), 'flask.abort', 'abort', (['(401)'], {}), '(401)\n', (1915, 1920), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((3034, 3057), 'flask.url_for', 'url_for', (['"""welcome_page"""'], {}), "('welcome_page')\n", (3041, 3057), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((1150, 1172), 'flask.url_for', 'url_for', (['"""submit_page"""'], {}), "('submit_page')\n", (1157, 1172), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((2317, 2408), 'subprocess.call', 'subprocess.call', (["['/usr/bin/irsend', 'SEND_ONCE', 'lgac', validation_codes['command']]"], {}), "(['/usr/bin/irsend', 'SEND_ONCE', 'lgac', validation_codes[\n 'command']])\n", (2332, 2408), False, 'import subprocess, datetime\n'), ((2714, 2727), 'flask.g.db.commit', 'g.db.commit', ([], {}), '()\n', (2725, 2727), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((2740, 2766), 'flask.flash', 'flash', (['"""Command submitted"""'], {}), "('Command submitted')\n", (2745, 2766), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((1546, 1586), 'flask.flash', 'flash', (["('Hi ' + session['username'] + '!')"], {}), "('Hi ' + session['username'] + '!')\n", (1551, 1586), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((1615, 1637), 'flask.url_for', 'url_for', (['"""submit_page"""'], {}), "('submit_page')\n", (1622, 1637), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash\n'), ((2599, 2622), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2620, 2622), False, 'import subprocess, datetime\n')] |
NLESC-JCER/pyspectra | tests/util_test.py | b7ece1fff537039f3306b23e00812aa1c8ffc729 | """Helper functions to tests."""
import numpy as np
def norm(vs: np.array) -> float:
"""Compute the norm of a vector."""
return np.sqrt(np.dot(vs, vs))
def create_random_matrix(size: int) -> np.array:
"""Create a numpy random matrix."""
return np.random.normal(size=size ** 2).reshape(size, size)
def create_symmetic_matrix(size: int) -> np.array:
"""Create a numpy symmetric matrix."""
xs = create_random_matrix(size)
return xs + xs.T
def check_eigenpairs(
matrix: np.ndarray, eigenvalues: np.ndarray,
eigenvectors: np.ndarray) -> bool:
"""Check that the eigenvalue equation holds."""
for i, value in enumerate(eigenvalues):
residue = np.dot(
matrix, eigenvectors[:, i]) - value * eigenvectors[:, i]
assert norm(residue) < 1e-8
| [((147, 161), 'numpy.dot', 'np.dot', (['vs', 'vs'], {}), '(vs, vs)\n', (153, 161), True, 'import numpy as np\n'), ((265, 297), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(size ** 2)'}), '(size=size ** 2)\n', (281, 297), True, 'import numpy as np\n'), ((705, 741), 'numpy.dot', 'np.dot', (['matrix', 'eigenvectors[:, (i)]'], {}), '(matrix, eigenvectors[:, (i)])\n', (711, 741), True, 'import numpy as np\n')] |
leetcode-notebook/wonz | solutions/Interview-03-shu-zu-zhong-zhong-fu-de-shu-zi-lcof/03.py | 9ffd2ce9b5f3a544ee958f5a0673215afd176c2b | from typing import List
class Solution:
def findRepeatNumber(self, nums: List[int]) -> int:
# solution one: 哈希表
n = len(nums)
flag = [False for i in range(n)]
for i in range(n):
if flag[nums[i]] == False:
flag[nums[i]] = True
else:
return nums[i]
return -1
# solution two: 排序
nums.sort()
pre = nums[0]
for i in range(1, len(nums)):
if pre == nums[i]:
return nums[i]
else:
pre = nums[i]
return -1
# solution three: 两个萝卜一个坑
n = len(nums)
for i in range(n):
if nums[i] == i:
continue
# 有重复
elif nums[nums[i]] == nums[i]:
return nums[i]
# 交换
else:
nums[nums[i]], nums[i] = nums[i], nums[nums[i]]
return -1
if __name__ == "__main__":
nums = [2, 3, 1, 0, 2, 5, 3]
print(Solution().findRepeatNumber(nums)) | [] |
Charles-Peeke/gwu_nn | examples/test_network.py | 3f5e9937abf2bfb81a74a2d6f3653a661e705f67 | import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from gwu_nn.gwu_network import GWUNetwork
from gwu_nn.layers import Dense
from gwu_nn.activation_layers import Sigmoid
np.random.seed(8)
num_obs = 8000
# Create our features to draw from two distinct 2D normal distributions
x1 = np.random.multivariate_normal([0, 0], [[1, .75],[.75, 1]], num_obs)
x2 = np.random.multivariate_normal([3, 8], [[1, .25],[.25, 1]], num_obs)
# Stack our inputs into one feature space
X = np.vstack((x1, x2))
print(X.shape)
y = np.hstack((np.zeros(num_obs), np.ones(num_obs)))
print(y.shape)
# colors = ['red'] * num_obs + ['blue'] * num_obs
# plt.figure(figsize=(12,8))
# plt.scatter(X[:, 0], X[:, 1], c = colors, alpha = 0.5)
# Lets randomly split things into training and testing sets so we don't cheat
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=42)
# Create our model
network = GWUNetwork()
network.add(Dense(2, 1, True, 'sigmoid'))
network.add(Sigmoid())
#network.set_loss('mse')
network.compile('log_loss', 0.001)
network.fit(X_train, y_train, epochs=100)
from scipy.special import logit
colors = ['red'] * num_obs + ['blue'] * num_obs
plt.figure(figsize=(12, 8))
plt.scatter(X[:, 0], X[:, 1], c=colors, alpha=0.5)
# Range of our X values
start_x1 = -5
end_x1 = 7
weights = network.layers[0].weights.reshape(-1).tolist()
bias = network.layers[0].bias[0][0]
start_y = (bias + start_x1 * weights[0] - logit(0.5)) / - weights[1]
end_y = (bias + end_x1 * weights[0] - logit(0.5)) / -weights[1]
plt.plot([start_x1, end_x1], [start_y, end_y], color='grey') | [((233, 250), 'numpy.random.seed', 'np.random.seed', (['(8)'], {}), '(8)\n', (247, 250), True, 'import numpy as np\n'), ((348, 418), 'numpy.random.multivariate_normal', 'np.random.multivariate_normal', (['[0, 0]', '[[1, 0.75], [0.75, 1]]', 'num_obs'], {}), '([0, 0], [[1, 0.75], [0.75, 1]], num_obs)\n', (377, 418), True, 'import numpy as np\n'), ((422, 492), 'numpy.random.multivariate_normal', 'np.random.multivariate_normal', (['[3, 8]', '[[1, 0.25], [0.25, 1]]', 'num_obs'], {}), '([3, 8], [[1, 0.25], [0.25, 1]], num_obs)\n', (451, 492), True, 'import numpy as np\n'), ((540, 559), 'numpy.vstack', 'np.vstack', (['(x1, x2)'], {}), '((x1, x2))\n', (549, 559), True, 'import numpy as np\n'), ((908, 963), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.33)', 'random_state': '(42)'}), '(X, y, test_size=0.33, random_state=42)\n', (924, 963), False, 'from sklearn.model_selection import train_test_split\n'), ((997, 1009), 'gwu_nn.gwu_network.GWUNetwork', 'GWUNetwork', ([], {}), '()\n', (1007, 1009), False, 'from gwu_nn.gwu_network import GWUNetwork\n'), ((1275, 1302), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (1285, 1302), True, 'import matplotlib.pyplot as plt\n'), ((1304, 1358), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X[:, (0)]', 'X[:, (1)]'], {'c': 'colors', 'alpha': '(0.5)'}), '(X[:, (0)], X[:, (1)], c=colors, alpha=0.5)\n', (1315, 1358), True, 'import matplotlib.pyplot as plt\n'), ((1642, 1702), 'matplotlib.pyplot.plot', 'plt.plot', (['[start_x1, end_x1]', '[start_y, end_y]'], {'color': '"""grey"""'}), "([start_x1, end_x1], [start_y, end_y], color='grey')\n", (1650, 1702), True, 'import matplotlib.pyplot as plt\n'), ((1023, 1051), 'gwu_nn.layers.Dense', 'Dense', (['(2)', '(1)', '(True)', '"""sigmoid"""'], {}), "(2, 1, True, 'sigmoid')\n", (1028, 1051), False, 'from gwu_nn.layers import Dense\n'), ((1066, 1075), 'gwu_nn.activation_layers.Sigmoid', 'Sigmoid', ([], {}), '()\n', (1073, 1075), False, 'from gwu_nn.activation_layers import Sigmoid\n'), ((594, 611), 'numpy.zeros', 'np.zeros', (['num_obs'], {}), '(num_obs)\n', (602, 611), True, 'import numpy as np\n'), ((613, 629), 'numpy.ones', 'np.ones', (['num_obs'], {}), '(num_obs)\n', (620, 629), True, 'import numpy as np\n'), ((1549, 1559), 'scipy.special.logit', 'logit', (['(0.5)'], {}), '(0.5)\n', (1554, 1559), False, 'from scipy.special import logit\n'), ((1615, 1625), 'scipy.special.logit', 'logit', (['(0.5)'], {}), '(0.5)\n', (1620, 1625), False, 'from scipy.special import logit\n')] |
XiaoboLinlin/scattering | scattering/van_hove.py | 0173b63f3243bdbcccfa562dbf5e3714920cded2 | import itertools as it
import numpy as np
import mdtraj as md
from progressbar import ProgressBar
from scattering.utils.utils import get_dt
from scattering.utils.constants import get_form_factor
def compute_van_hove(trj, chunk_length, water=False,
r_range=(0, 1.0), bin_width=0.005, n_bins=None,
self_correlation=True, periodic=True, opt=True, partial=False):
"""Compute the partial van Hove function of a trajectory
Parameters
----------
trj : mdtraj.Trajectory
trajectory on which to compute the Van Hove function
chunk_length : int
length of time between restarting averaging
water : bool
use X-ray form factors for water that account for polarization
r_range : array-like, shape=(2,), optional, default=(0.0, 1.0)
Minimum and maximum radii.
bin_width : float, optional, default=0.005
Width of the bins in nanometers.
n_bins : int, optional, default=None
The number of bins. If specified, this will override the `bin_width`
parameter.
self_correlation : bool, default=True
Whether or not to include the self-self correlations
Returns
-------
r : numpy.ndarray
r positions generated by histogram binning
g_r_t : numpy.ndarray
Van Hove function at each time and position
"""
n_physical_atoms = len([a for a in trj.top.atoms if a.element.mass > 0])
unique_elements = list(set([a.element for a in trj.top.atoms if a.element.mass > 0]))
partial_dict = dict()
for elem1, elem2 in it.combinations_with_replacement(unique_elements[::-1], 2):
print('doing {0} and {1} ...'.format(elem1, elem2))
r, g_r_t_partial = compute_partial_van_hove(trj=trj,
chunk_length=chunk_length,
selection1='element {}'.format(elem1.symbol),
selection2='element {}'.format(elem2.symbol),
r_range=r_range,
bin_width=bin_width,
n_bins=n_bins,
self_correlation=self_correlation,
periodic=periodic,
opt=opt)
partial_dict[(elem1, elem2)] = g_r_t_partial
if partial:
return partial_dict
norm = 0
g_r_t = None
for key, val in partial_dict.items():
elem1, elem2 = key
concentration1 = trj.atom_slice(trj.top.select('element {}'.format(elem1.symbol))).n_atoms / n_physical_atoms
concentration2 = trj.atom_slice(trj.top.select('element {}'.format(elem2.symbol))).n_atoms / n_physical_atoms
form_factor1 = get_form_factor(element_name=elem1.symbol, water=water)
form_factor2 = get_form_factor(element_name=elem2.symbol, water=water)
coeff = form_factor1 * concentration1 * form_factor2 * concentration2
if g_r_t is None:
g_r_t = np.zeros_like(val)
g_r_t += val * coeff
norm += coeff
# Reshape g_r_t to better represent the discretization in both r and t
g_r_t_final = np.empty(shape=(chunk_length, len(r)))
for i in range(chunk_length):
g_r_t_final[i, :] = np.mean(g_r_t[i::chunk_length], axis=0)
g_r_t_final /= norm
t = trj.time[:chunk_length]
return r, t, g_r_t_final
def compute_partial_van_hove(trj, chunk_length=10, selection1=None, selection2=None,
r_range=(0, 1.0), bin_width=0.005, n_bins=200,
self_correlation=True, periodic=True, opt=True):
"""Compute the partial van Hove function of a trajectory
Parameters
----------
trj : mdtraj.Trajectory
trajectory on which to compute the Van Hove function
chunk_length : int
length of time between restarting averaging
selection1 : str
selection to be considered, in the style of MDTraj atom selection
selection2 : str
selection to be considered, in the style of MDTraj atom selection
r_range : array-like, shape=(2,), optional, default=(0.0, 1.0)
Minimum and maximum radii.
bin_width : float, optional, default=0.005
Width of the bins in nanometers.
n_bins : int, optional, default=None
The number of bins. If specified, this will override the `bin_width`
parameter.
self_correlation : bool, default=True
Whether or not to include the self-self correlations
Returns
-------
r : numpy.ndarray
r positions generated by histogram binning
g_r_t : numpy.ndarray
Van Hove function at each time and position
"""
unique_elements = (
set([a.element for a in trj.atom_slice(trj.top.select(selection1)).top.atoms]),
set([a.element for a in trj.atom_slice(trj.top.select(selection2)).top.atoms]),
)
if any([len(val) > 1 for val in unique_elements]):
raise UserWarning(
'Multiple elements found in a selection(s). Results may not be '
'direcitly comprable to scattering experiments.'
)
# Don't need to store it, but this serves to check that dt is constant
dt = get_dt(trj)
pairs = trj.top.select_pairs(selection1=selection1, selection2=selection2)
n_chunks = int(trj.n_frames / chunk_length)
g_r_t = None
pbar = ProgressBar()
for i in pbar(range(n_chunks)):
times = list()
for j in range(chunk_length):
times.append([chunk_length*i, chunk_length*i+j])
r, g_r_t_frame = md.compute_rdf_t(
traj=trj,
pairs=pairs,
times=times,
r_range=r_range,
bin_width=bin_width,
n_bins=n_bins,
period_length=chunk_length,
self_correlation=self_correlation,
periodic=periodic,
opt=opt,
)
if g_r_t is None:
g_r_t = np.zeros_like(g_r_t_frame)
g_r_t += g_r_t_frame
return r, g_r_t
| [((1585, 1643), 'itertools.combinations_with_replacement', 'it.combinations_with_replacement', (['unique_elements[::-1]', '(2)'], {}), '(unique_elements[::-1], 2)\n', (1617, 1643), True, 'import itertools as it\n'), ((5407, 5418), 'scattering.utils.utils.get_dt', 'get_dt', (['trj'], {}), '(trj)\n', (5413, 5418), False, 'from scattering.utils.utils import get_dt\n'), ((5577, 5590), 'progressbar.ProgressBar', 'ProgressBar', ([], {}), '()\n', (5588, 5590), False, 'from progressbar import ProgressBar\n'), ((2927, 2982), 'scattering.utils.constants.get_form_factor', 'get_form_factor', ([], {'element_name': 'elem1.symbol', 'water': 'water'}), '(element_name=elem1.symbol, water=water)\n', (2942, 2982), False, 'from scattering.utils.constants import get_form_factor\n'), ((3006, 3061), 'scattering.utils.constants.get_form_factor', 'get_form_factor', ([], {'element_name': 'elem2.symbol', 'water': 'water'}), '(element_name=elem2.symbol, water=water)\n', (3021, 3061), False, 'from scattering.utils.constants import get_form_factor\n'), ((3454, 3493), 'numpy.mean', 'np.mean', (['g_r_t[i::chunk_length]'], {'axis': '(0)'}), '(g_r_t[i::chunk_length], axis=0)\n', (3461, 3493), True, 'import numpy as np\n'), ((5775, 5979), 'mdtraj.compute_rdf_t', 'md.compute_rdf_t', ([], {'traj': 'trj', 'pairs': 'pairs', 'times': 'times', 'r_range': 'r_range', 'bin_width': 'bin_width', 'n_bins': 'n_bins', 'period_length': 'chunk_length', 'self_correlation': 'self_correlation', 'periodic': 'periodic', 'opt': 'opt'}), '(traj=trj, pairs=pairs, times=times, r_range=r_range,\n bin_width=bin_width, n_bins=n_bins, period_length=chunk_length,\n self_correlation=self_correlation, periodic=periodic, opt=opt)\n', (5791, 5979), True, 'import mdtraj as md\n'), ((3188, 3206), 'numpy.zeros_like', 'np.zeros_like', (['val'], {}), '(val)\n', (3201, 3206), True, 'import numpy as np\n'), ((6150, 6176), 'numpy.zeros_like', 'np.zeros_like', (['g_r_t_frame'], {}), '(g_r_t_frame)\n', (6163, 6176), True, 'import numpy as np\n')] |
QDucasse/nn_benchmark | nn_benchmark/networks/__init__.py | 0a32db241e75853c7d78dccf6d7b6940e5a0e4d0 | # -*- coding: utf-8 -*-
# nn_benchmark
# author - Quentin Ducasse
# https://github.com/QDucasse
# [email protected]
from __future__ import absolute_import
__all__ = ["lenet","lenet5","quant_lenet5",
"quant_cnv", "quant_tfc",
"mobilenetv1","quant_mobilenetv1",
"vggnet", "quant_vggnet",
"common", "alexnet", "quant_alexnet"]
from .alexnet import *
from .lenet import *
from .lenet5 import *
from .mobilenetv1 import *
from .quant_mobilenetv1 import *
from .quant_alexnet import *
from .quant_lenet5 import *
from .quant_cnv import *
from .quant_tfc import *
from .vggnet import *
from .quant_vggnet import *
from .common import *
| [] |
NeeharikaDva/opencv_course | Section1_Basics/contours.py | 234515ab59a1228c8dfd3c69f310dbc1d86c6089 | #pylint:disable=no-member
import cv2 as cv
import numpy as np
img = cv.imread('/Users/webileapp/Desktop/niharika_files/projects/opencv_course_master/Resources/Photos/cats.jpg')
cv.imshow('Cats', img)
#
blank = np.zeros(img.shape[:2], dtype='uint8')
cv.imshow('Blank', blank)
gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
cv.imshow('Gray', gray)
#
blur = cv.GaussianBlur(gray, (5,5), cv.BORDER_DEFAULT)
cv.imshow('Blur', blur)
canny = cv.Canny(blur, 125, 175)
cv.imshow('Canny Edges', canny)
#
ret, thresh = cv.threshold(gray, 125, 255, cv.THRESH_BINARY)
cv.imshow('Thresh', thresh)
#
contours, hierarchies = cv.findContours(canny, cv.RETR_LIST, cv.CHAIN_APPROX_SIMPLE)
print(f'{len(contours)} contour(s) found!')
#
cv.drawContours(blank, contours, -1, (200,120,100), 1)
cv.imshow('Contours Drawn', blank)
cv.waitKey(0) | [((70, 188), 'cv2.imread', 'cv.imread', (['"""/Users/webileapp/Desktop/niharika_files/projects/opencv_course_master/Resources/Photos/cats.jpg"""'], {}), "(\n '/Users/webileapp/Desktop/niharika_files/projects/opencv_course_master/Resources/Photos/cats.jpg'\n )\n", (79, 188), True, 'import cv2 as cv\n'), ((179, 201), 'cv2.imshow', 'cv.imshow', (['"""Cats"""', 'img'], {}), "('Cats', img)\n", (188, 201), True, 'import cv2 as cv\n'), ((212, 250), 'numpy.zeros', 'np.zeros', (['img.shape[:2]'], {'dtype': '"""uint8"""'}), "(img.shape[:2], dtype='uint8')\n", (220, 250), True, 'import numpy as np\n'), ((251, 276), 'cv2.imshow', 'cv.imshow', (['"""Blank"""', 'blank'], {}), "('Blank', blank)\n", (260, 276), True, 'import cv2 as cv\n'), ((285, 320), 'cv2.cvtColor', 'cv.cvtColor', (['img', 'cv.COLOR_BGR2GRAY'], {}), '(img, cv.COLOR_BGR2GRAY)\n', (296, 320), True, 'import cv2 as cv\n'), ((321, 344), 'cv2.imshow', 'cv.imshow', (['"""Gray"""', 'gray'], {}), "('Gray', gray)\n", (330, 344), True, 'import cv2 as cv\n'), ((354, 402), 'cv2.GaussianBlur', 'cv.GaussianBlur', (['gray', '(5, 5)', 'cv.BORDER_DEFAULT'], {}), '(gray, (5, 5), cv.BORDER_DEFAULT)\n', (369, 402), True, 'import cv2 as cv\n'), ((402, 425), 'cv2.imshow', 'cv.imshow', (['"""Blur"""', 'blur'], {}), "('Blur', blur)\n", (411, 425), True, 'import cv2 as cv\n'), ((435, 459), 'cv2.Canny', 'cv.Canny', (['blur', '(125)', '(175)'], {}), '(blur, 125, 175)\n', (443, 459), True, 'import cv2 as cv\n'), ((460, 491), 'cv2.imshow', 'cv.imshow', (['"""Canny Edges"""', 'canny'], {}), "('Canny Edges', canny)\n", (469, 491), True, 'import cv2 as cv\n'), ((508, 554), 'cv2.threshold', 'cv.threshold', (['gray', '(125)', '(255)', 'cv.THRESH_BINARY'], {}), '(gray, 125, 255, cv.THRESH_BINARY)\n', (520, 554), True, 'import cv2 as cv\n'), ((555, 582), 'cv2.imshow', 'cv.imshow', (['"""Thresh"""', 'thresh'], {}), "('Thresh', thresh)\n", (564, 582), True, 'import cv2 as cv\n'), ((609, 669), 'cv2.findContours', 'cv.findContours', (['canny', 'cv.RETR_LIST', 'cv.CHAIN_APPROX_SIMPLE'], {}), '(canny, cv.RETR_LIST, cv.CHAIN_APPROX_SIMPLE)\n', (624, 669), True, 'import cv2 as cv\n'), ((716, 772), 'cv2.drawContours', 'cv.drawContours', (['blank', 'contours', '(-1)', '(200, 120, 100)', '(1)'], {}), '(blank, contours, -1, (200, 120, 100), 1)\n', (731, 772), True, 'import cv2 as cv\n'), ((771, 805), 'cv2.imshow', 'cv.imshow', (['"""Contours Drawn"""', 'blank'], {}), "('Contours Drawn', blank)\n", (780, 805), True, 'import cv2 as cv\n'), ((807, 820), 'cv2.waitKey', 'cv.waitKey', (['(0)'], {}), '(0)\n', (817, 820), True, 'import cv2 as cv\n')] |
RangeKing/FCOSR | mmdet/ops/fcosr_tools/__init__.py | b92f0cee2e89d6a268884bacd02fb28881cd44a4 | from . import fcosr_tools
__all__ = ['fcosr_tools'] | [] |
Jnalis/frappe-health-care | health_care/health_care/doctype/practitioner/practitioner.py | ed347c216f568cc044c1365965d35945697cf7dc | # Copyright (c) 2022, Juve and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class Practitioner(Document):
def before_save(self):
self.practitioner_full_name = f'{self.first_name} {self.second_name or ""}'
| [] |
JustasGau/DonjinKrawler | install-hooks.py | faff50dcfcebf82028c9af10434359f975247d33 | import sys
from os import path
import urllib; from urllib.request import urlretrieve
from subprocess import call
def install_hooks(directory):
checkstyleUrl = 'https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.36.1/checkstyle-8.36.1-all.jar'
preCommitUrl = 'https://gist.githubusercontent.com/EdotJ/d512826d5b4fd3e6cdc285b9236511b2/raw/43e5087ed173fd03aab640b0b3db22f11319c623/pre-commit'
checkstyleName = checkstyleUrl.split('/')[len(checkstyleUrl.split('/')) - 1]
basePath = path.abspath(directory)
print("Downloading checkstyle to %s..." % basePath + "/.git/hooks/" + checkstyleName)
urlretrieve(checkstyleUrl, basePath + "/.git/hooks/" + checkstyleName)
print("Downloading pre-commit script to %s" % basePath + "/.git/hooks/pre-commit")
urlretrieve(preCommitUrl, basePath + "/.git/hooks/pre-commit")
with open(basePath + '/.git/config', 'a+') as gitConfig:
if ("[checkstyle]" not in gitConfig.read()):
print("Adding git configurations to .git/config")
gitConfig.write("[checkstyle]\n")
gitConfig.write("jar = %s\n" % (basePath + "/.git/hooks/" + checkstyleName))
gitConfig.write("checkfile = %s\n" % (basePath + "/checkstyle_config.xml"))
print("Changing permissions for pre-commit. Has to run as root, enter password plz")
call(["sudo", "chmod", "+x", (basePath + "/.git/hooks/pre-commit")])
if __name__ == "__main__":
if (len(sys.argv) < 2):
print("Enter a directory to install hooks")
else:
if (path.exists(sys.argv[1])):
install_hooks(sys.argv[1])
| [((516, 539), 'os.path.abspath', 'path.abspath', (['directory'], {}), '(directory)\n', (528, 539), False, 'from os import path\n'), ((634, 704), 'urllib.request.urlretrieve', 'urlretrieve', (['checkstyleUrl', "(basePath + '/.git/hooks/' + checkstyleName)"], {}), "(checkstyleUrl, basePath + '/.git/hooks/' + checkstyleName)\n", (645, 704), False, 'from urllib.request import urlretrieve\n'), ((796, 858), 'urllib.request.urlretrieve', 'urlretrieve', (['preCommitUrl', "(basePath + '/.git/hooks/pre-commit')"], {}), "(preCommitUrl, basePath + '/.git/hooks/pre-commit')\n", (807, 858), False, 'from urllib.request import urlretrieve\n'), ((1351, 1417), 'subprocess.call', 'call', (["['sudo', 'chmod', '+x', basePath + '/.git/hooks/pre-commit']"], {}), "(['sudo', 'chmod', '+x', basePath + '/.git/hooks/pre-commit'])\n", (1355, 1417), False, 'from subprocess import call\n'), ((1551, 1575), 'os.path.exists', 'path.exists', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1562, 1575), False, 'from os import path\n')] |
Rockfish/PythonCourse | 09_MicroServer_Cookies/micro_server.py | 1d650e49950d1987d052028139fcdfcb0bbfcc70 | """
Micro webapp based on WebOb, Jinja2, WSGI with a simple router
"""
import os
import hmac
import hashlib
import mimetypes
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler
from webob import Request
from webob import Response
from jinja2 import Environment, FileSystemLoader
class MicroServer(object):
"""Small web server."""
def __init__(self):
"""Initializes the class and configures the paths
and the Jinja2 environment so it can find and render pages."""
if self.static_root is None:
self.static_root = 'static'
if self.templates_root is None:
self.templates_root = 'templates'
if self.routes is None:
self.routes = {}
# Set up the paths and environment for Jinja. This is how it finds the templates.
self.template_path = os.path.join(os.path.dirname(__file__), self.templates_root)
self.env = Environment(autoescape=True, loader=FileSystemLoader(self.template_path))
# Figure out what directory the server is running it as save the path.
# The path will be used later to find the site's resources.
self.current_dir = os.path.dirname(os.path.realpath(__file__))
def __call__(self, environ, start_response):
"""This method is called by the HTTPServer when
there is a request to be handled."""
# Create the WebOb Request and Response objects for
# used to read the request and write the response.
self.request = Request(environ)
self.response = Response()
# Find a handler for the path if there is one.
handler = self.routes.get(self.request.path_info)
# If there is call it. If not call the static handler.
if handler:
handler()
else:
self.static()
return self.response(environ, start_response)
def static(self, resource=''):
"""Handles request for static pages. It is the default handler."""
# Build a file path using either the resource parameter or the path in the request.
if resource:
file_path = os.path.join(self.current_dir, self.static_root, resource)
else:
file_path = os.path.join(self.current_dir, self.static_root, self.request.path_info[1:])
print("File path:", file_path)
# Try to open the file. If we can then guess its type and write its
# content to the response object to send it to the client.
# If we can't find the file then return an error to the client.
try:
file_type = mimetypes.guess_type(file_path)[0]
self.response.content_type = file_type
data = open(file_path, 'rb').read()
self.response.body_file.write(data)
except Exception as e:
self.response.status = 404
self.response.write(str(e))
def render_template(self, template_name, template_values={}):
"""Renders Jinja2 templates into HTML"""
# Find the template and render it to HTML
# then write it to the response object to send it to the client.
template = self.env.get_template(template_name)
html = template.render(template_values)
self.response.write(html)
def get_signature(self, passphrase, *parts):
"""Creates a hash from strings based on a passphrase."""
cookiehash = hmac.new(passphrase.encode(), digestmod=hashlib.sha1)
for part in parts:
cookiehash.update(part.encode())
return cookiehash.hexdigest()
def run(self, port):
"""Starts the HTTP server and tells it what port to listen on"""
# Create the WSGI HTTP server. Set the port it should listen on.
# And start the server.
server = WSGIServer(('', 8000), WSGIRequestHandler)
server.set_app(self)
print("Serving on http://localhost:8000/ ...")
server.serve_forever()
| [((1522, 1538), 'webob.Request', 'Request', (['environ'], {}), '(environ)\n', (1529, 1538), False, 'from webob import Request\n'), ((1563, 1573), 'webob.Response', 'Response', ([], {}), '()\n', (1571, 1573), False, 'from webob import Response\n'), ((3802, 3844), 'wsgiref.simple_server.WSGIServer', 'WSGIServer', (["('', 8000)", 'WSGIRequestHandler'], {}), "(('', 8000), WSGIRequestHandler)\n", (3812, 3844), False, 'from wsgiref.simple_server import WSGIServer, WSGIRequestHandler\n'), ((867, 892), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (882, 892), False, 'import os\n'), ((1199, 1225), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1215, 1225), False, 'import os\n'), ((2139, 2197), 'os.path.join', 'os.path.join', (['self.current_dir', 'self.static_root', 'resource'], {}), '(self.current_dir, self.static_root, resource)\n', (2151, 2197), False, 'import os\n'), ((2236, 2312), 'os.path.join', 'os.path.join', (['self.current_dir', 'self.static_root', 'self.request.path_info[1:]'], {}), '(self.current_dir, self.static_root, self.request.path_info[1:])\n', (2248, 2312), False, 'import os\n'), ((970, 1006), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['self.template_path'], {}), '(self.template_path)\n', (986, 1006), False, 'from jinja2 import Environment, FileSystemLoader\n'), ((2606, 2637), 'mimetypes.guess_type', 'mimetypes.guess_type', (['file_path'], {}), '(file_path)\n', (2626, 2637), False, 'import mimetypes\n')] |
clouserw/olympia | apps/addons/management/commands/jetpackers.py | 1d5755b08a526372ec66e6bc64ab636018181969 | import logging
from django.core import mail
from django.conf import settings
from django.core.management.base import BaseCommand
import amo.utils
from users.models import UserProfile
log = logging.getLogger('z.mailer')
FROM = settings.DEFAULT_FROM_EMAIL
class Command(BaseCommand):
help = "Send the email for bug 662571"
def handle(self, *args, **options):
sendmail()
def sendmail():
addrs = set(UserProfile.objects.values_list('email', flat=True)
# whoa
.filter(addons__versions__files__jetpack_version__isnull=False))
log.info('There are %d emails to send.' % len(addrs))
count = 0
for addr in addrs:
count += 1
try:
mail.send_mail(SUBJECT, MSG, FROM, [addr])
log.info('%s. DONE: %s' % (count, addr))
except Exception, e:
log.info('%s. FAIL: %s (%s)' % (count, addr, e))
SUBJECT = 'Instructions for Automatic Upgrade to Add-on SDK 1.0'
MSG = """\
Hello Mozilla Add-ons Developer!
With the final version of the Add-on SDK only a week away, we wanted to
get in touch with all add-on developers who have existing SDK-based
(Jetpack) add-ons. We would like you to know that going forward AMO
will be auto-updating add-ons with new versions of the Add-on SDK upon
release.
To ensure that your add-on(s) are auto-updated with the 1.0 final
version of the SDK, we would ask that you download the latest release
candidate build -
https://ftp.mozilla.org/pub/mozilla.org/labs/jetpack/addon-sdk-1.0rc2.tar.gz,
https://ftp.mozilla.org/pub/mozilla.org/labs/jetpack/addon-sdk-1.0rc2.zip
- and update your add-on(s) on AMO. After the 1.0 release, we will scan
our add-ons database and automatically upgrade any SDK-based add-ons we
find that are using verions 1.0RC2 or greater to the 1.0 final version
of the SDK. Any add-ons we find using versions of the SDK below 1.0RC2
will not be auto-updated and you will need to upgrade them to the 1.0
version of the SDK manually.
Thank you for participating in the early stages of the Add-on SDK's
development. Feedback and engagement from developers like you are the
foundations for success in our open source community!
Sincerely,
The Mozilla Add-ons Team
"""
| [] |
edose/astroplan | astroplan/constraints.py | b3cf55340c50ccf69ec363889c1fe8ff2f93cada | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Specify and constraints to determine which targets are observable for
an observer.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
# Standard library
from abc import ABCMeta, abstractmethod
import datetime
import time
import warnings
# Third-party
from astropy.time import Time
import astropy.units as u
from astropy.coordinates import get_body, get_sun, get_moon, Galactic, SkyCoord
from astropy import table
import numpy as np
from numpy.lib.stride_tricks import as_strided
# Package
from .moon import moon_illumination
from .utils import time_grid_from_range
from .target import get_skycoord
__all__ = ["AltitudeConstraint", "AirmassConstraint", "AtNightConstraint",
"is_observable", "is_always_observable", "time_grid_from_range",
"GalacticLatitudeConstraint", "SunSeparationConstraint",
"MoonSeparationConstraint", "MoonIlluminationConstraint",
"LocalTimeConstraint", "PrimaryEclipseConstraint",
"SecondaryEclipseConstraint", "Constraint", "TimeConstraint",
"observability_table", "months_observable", "max_best_rescale",
"min_best_rescale", "PhaseConstraint", "is_event_observable"]
_current_year = time.localtime().tm_year # needed for backward compatibility
_current_year_time_range = Time( # needed for backward compatibility
[str(_current_year) + '-01-01',
str(_current_year) + '-12-31']
)
def _make_cache_key(times, targets):
"""
Make a unique key to reference this combination of ``times`` and ``targets``.
Often, we wish to store expensive calculations for a combination of
``targets`` and ``times`` in a cache on an ``observer``` object. This
routine will provide an appropriate, hashable, key to store these
calculations in a dictionary.
Parameters
----------
times : `~astropy.time.Time`
Array of times on which to test the constraint.
targets : `~astropy.coordinates.SkyCoord`
Target or list of targets.
Returns
-------
cache_key : tuple
A hashable tuple for use as a cache key
"""
# make a tuple from times
try:
timekey = tuple(times.jd) + times.shape
except BaseException: # must be scalar
timekey = (times.jd,)
# make hashable thing from targets coords
try:
if hasattr(targets, 'frame'):
# treat as a SkyCoord object. Accessing the longitude
# attribute of the frame data should be unique and is
# quicker than accessing the ra attribute.
targkey = tuple(targets.frame.data.lon.value.ravel()) + targets.shape
else:
# assume targets is a string.
targkey = (targets,)
except BaseException:
targkey = (targets.frame.data.lon,)
return timekey + targkey
def _get_altaz(times, observer, targets, force_zero_pressure=False):
"""
Calculate alt/az for ``target`` at times linearly spaced between
the two times in ``time_range`` with grid spacing ``time_resolution``
for ``observer``.
Cache the result on the ``observer`` object.
Parameters
----------
times : `~astropy.time.Time`
Array of times on which to test the constraint.
targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`}
Target or list of targets.
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``.
force_zero_pressure : bool
Forcefully use 0 pressure.
Returns
-------
altaz_dict : dict
Dictionary containing two key-value pairs. (1) 'times' contains the
times for the alt/az computations, (2) 'altaz' contains the
corresponding alt/az coordinates at those times.
"""
if not hasattr(observer, '_altaz_cache'):
observer._altaz_cache = {}
# convert times, targets to tuple for hashing
aakey = _make_cache_key(times, targets)
if aakey not in observer._altaz_cache:
try:
if force_zero_pressure:
observer_old_pressure = observer.pressure
observer.pressure = 0
altaz = observer.altaz(times, targets, grid_times_targets=False)
observer._altaz_cache[aakey] = dict(times=times,
altaz=altaz)
finally:
if force_zero_pressure:
observer.pressure = observer_old_pressure
return observer._altaz_cache[aakey]
def _get_moon_data(times, observer, force_zero_pressure=False):
"""
Calculate moon altitude az and illumination for an array of times for
``observer``.
Cache the result on the ``observer`` object.
Parameters
----------
times : `~astropy.time.Time`
Array of times on which to test the constraint.
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``.
force_zero_pressure : bool
Forcefully use 0 pressure.
Returns
-------
moon_dict : dict
Dictionary containing three key-value pairs. (1) 'times' contains the
times for the computations, (2) 'altaz' contains the
corresponding alt/az coordinates at those times and (3) contains
the moon illumination for those times.
"""
if not hasattr(observer, '_moon_cache'):
observer._moon_cache = {}
# convert times to tuple for hashing
aakey = _make_cache_key(times, 'moon')
if aakey not in observer._moon_cache:
try:
if force_zero_pressure:
observer_old_pressure = observer.pressure
observer.pressure = 0
altaz = observer.moon_altaz(times)
illumination = np.array(moon_illumination(times))
observer._moon_cache[aakey] = dict(times=times,
illum=illumination,
altaz=altaz)
finally:
if force_zero_pressure:
observer.pressure = observer_old_pressure
return observer._moon_cache[aakey]
def _get_meridian_transit_times(times, observer, targets):
"""
Calculate next meridian transit for an array of times for ``targets`` and
``observer``.
Cache the result on the ``observer`` object.
Parameters
----------
times : `~astropy.time.Time`
Array of times on which to test the constraint
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``
targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`}
Target or list of targets
Returns
-------
time_dict : dict
Dictionary containing a key-value pair. 'times' contains the
meridian_transit times.
"""
if not hasattr(observer, '_meridian_transit_cache'):
observer._meridian_transit_cache = {}
# convert times to tuple for hashing
aakey = _make_cache_key(times, targets)
if aakey not in observer._meridian_transit_cache:
meridian_transit_times = observer.target_meridian_transit_time(times, targets)
observer._meridian_transit_cache[aakey] = dict(times=meridian_transit_times)
return observer._meridian_transit_cache[aakey]
@abstractmethod
class Constraint(object):
"""
Abstract class for objects defining observational constraints.
"""
__metaclass__ = ABCMeta
def __call__(self, observer, targets, times=None,
time_range=None, time_grid_resolution=0.5*u.hour,
grid_times_targets=False):
"""
Compute the constraint for this class
Parameters
----------
observer : `~astroplan.Observer`
the observation location from which to apply the constraints
targets : sequence of `~astroplan.Target`
The targets on which to apply the constraints.
times : `~astropy.time.Time`
The times to compute the constraint.
WHAT HAPPENS WHEN BOTH TIMES AND TIME_RANGE ARE SET?
time_range : `~astropy.time.Time` (length = 2)
Lower and upper bounds on time sequence.
time_grid_resolution : `~astropy.units.quantity`
Time-grid spacing
grid_times_targets : bool
if True, grids the constraint result with targets along the first
index and times along the second. Otherwise, we rely on broadcasting
the shapes together using standard numpy rules.
Returns
-------
constraint_result : 1D or 2D array of float or bool
The constraints. If 2D with targets along the first index and times along
the second.
"""
if times is None and time_range is not None:
times = time_grid_from_range(time_range,
time_resolution=time_grid_resolution)
if grid_times_targets:
targets = get_skycoord(targets)
# TODO: these broadcasting operations are relatively slow
# but there is potential for huge speedup if the end user
# disables gridding and re-shapes the coords themselves
# prior to evaluating multiple constraints.
if targets.isscalar:
# ensure we have a (1, 1) shape coord
targets = SkyCoord(np.tile(targets, 1))[:, np.newaxis]
else:
targets = targets[..., np.newaxis]
times, targets = observer._preprocess_inputs(times, targets, grid_times_targets=False)
result = self.compute_constraint(times, observer, targets)
# make sure the output has the same shape as would result from
# broadcasting times and targets against each other
if targets is not None:
# broadcasting times v targets is slow due to
# complex nature of these objects. We make
# to simple numpy arrays of the same shape and
# broadcast these to find the correct shape
shp1, shp2 = times.shape, targets.shape
x = np.array([1])
a = as_strided(x, shape=shp1, strides=[0] * len(shp1))
b = as_strided(x, shape=shp2, strides=[0] * len(shp2))
output_shape = np.broadcast(a, b).shape
if output_shape != np.array(result).shape:
result = np.broadcast_to(result, output_shape)
return result
@abstractmethod
def compute_constraint(self, times, observer, targets):
"""
Actually do the real work of computing the constraint. Subclasses
override this.
Parameters
----------
times : `~astropy.time.Time`
The times to compute the constraint
observer : `~astroplan.Observer`
the observaton location from which to apply the constraints
targets : sequence of `~astroplan.Target`
The targets on which to apply the constraints.
Returns
-------
constraint_result : 2D array of float or bool
The constraints, with targets along the first index and times along
the second.
"""
# Should be implemented on each subclass of Constraint
raise NotImplementedError
class AltitudeConstraint(Constraint):
"""
Constrain the altitude of the target.
.. note::
This can misbehave if you try to constrain negative altitudes, as
the `~astropy.coordinates.AltAz` frame tends to mishandle negative
Parameters
----------
min : `~astropy.units.Quantity` or `None`
Minimum altitude of the target (inclusive). `None` indicates no limit.
max : `~astropy.units.Quantity` or `None`
Maximum altitude of the target (inclusive). `None` indicates no limit.
boolean_constraint : bool
If True, the constraint is treated as a boolean (True for within the
limits and False for outside). If False, the constraint returns a
float on [0, 1], where 0 is the min altitude and 1 is the max.
"""
def __init__(self, min=None, max=None, boolean_constraint=True):
if min is None:
self.min = -90*u.deg
else:
self.min = min
if max is None:
self.max = 90*u.deg
else:
self.max = max
self.boolean_constraint = boolean_constraint
def compute_constraint(self, times, observer, targets):
cached_altaz = _get_altaz(times, observer, targets)
alt = cached_altaz['altaz'].alt
if self.boolean_constraint:
lowermask = self.min <= alt
uppermask = alt <= self.max
return lowermask & uppermask
else:
return max_best_rescale(alt, self.min, self.max)
class AirmassConstraint(AltitudeConstraint):
"""
Constrain the airmass of a target.
In the current implementation the airmass is approximated by the secant of
the zenith angle.
.. note::
The ``max`` and ``min`` arguments appear in the order (max, min)
in this initializer to support the common case for users who care
about the upper limit on the airmass (``max``) and not the lower
limit.
Parameters
----------
max : float or `None`
Maximum airmass of the target. `None` indicates no limit.
min : float or `None`
Minimum airmass of the target. `None` indicates no limit.
boolean_contstraint : bool
Examples
--------
To create a constraint that requires the airmass be "better than 2",
i.e. at a higher altitude than airmass=2::
AirmassConstraint(2)
"""
def __init__(self, max=None, min=1, boolean_constraint=True):
self.min = min
self.max = max
self.boolean_constraint = boolean_constraint
def compute_constraint(self, times, observer, targets):
cached_altaz = _get_altaz(times, observer, targets)
secz = cached_altaz['altaz'].secz.value
if self.boolean_constraint:
if self.min is None and self.max is not None:
mask = secz <= self.max
elif self.max is None and self.min is not None:
mask = self.min <= secz
elif self.min is not None and self.max is not None:
mask = (self.min <= secz) & (secz <= self.max)
else:
raise ValueError("No max and/or min specified in "
"AirmassConstraint.")
return mask
else:
if self.max is None:
raise ValueError("Cannot have a float AirmassConstraint if max is None.")
else:
mx = self.max
mi = 1 if self.min is None else self.min
# values below 1 should be disregarded
return min_best_rescale(secz, mi, mx, less_than_min=0)
class AtNightConstraint(Constraint):
"""
Constrain the Sun to be below ``horizon``.
"""
@u.quantity_input(horizon=u.deg)
def __init__(self, max_solar_altitude=0*u.deg, force_pressure_zero=True):
"""
Parameters
----------
max_solar_altitude : `~astropy.units.Quantity`
The altitude of the sun below which it is considered to be "night"
(inclusive).
force_pressure_zero : bool (optional)
Force the pressure to zero for solar altitude calculations. This
avoids errors in the altitude of the Sun that can occur when the
Sun is below the horizon and the corrections for atmospheric
refraction return nonsense values.
"""
self.max_solar_altitude = max_solar_altitude
self.force_pressure_zero = force_pressure_zero
@classmethod
def twilight_civil(cls, **kwargs):
"""
Consider nighttime as time between civil twilights (-6 degrees).
"""
return cls(max_solar_altitude=-6*u.deg, **kwargs)
@classmethod
def twilight_nautical(cls, **kwargs):
"""
Consider nighttime as time between nautical twilights (-12 degrees).
"""
return cls(max_solar_altitude=-12*u.deg, **kwargs)
@classmethod
def twilight_astronomical(cls, **kwargs):
"""
Consider nighttime as time between astronomical twilights (-18 degrees).
"""
return cls(max_solar_altitude=-18*u.deg, **kwargs)
def _get_solar_altitudes(self, times, observer, targets):
if not hasattr(observer, '_altaz_cache'):
observer._altaz_cache = {}
aakey = _make_cache_key(times, 'sun')
if aakey not in observer._altaz_cache:
try:
if self.force_pressure_zero:
observer_old_pressure = observer.pressure
observer.pressure = 0
# find solar altitude at these times
altaz = observer.altaz(times, get_sun(times))
altitude = altaz.alt
# cache the altitude
observer._altaz_cache[aakey] = dict(times=times,
altitude=altitude)
finally:
if self.force_pressure_zero:
observer.pressure = observer_old_pressure
else:
altitude = observer._altaz_cache[aakey]['altitude']
return altitude
def compute_constraint(self, times, observer, targets):
solar_altitude = self._get_solar_altitudes(times, observer, targets)
mask = solar_altitude <= self.max_solar_altitude
return mask
class GalacticLatitudeConstraint(Constraint):
"""
Constrain the distance between the Galactic plane and some targets.
"""
def __init__(self, min=None, max=None):
"""
Parameters
----------
min : `~astropy.units.Quantity` or `None` (optional)
Minimum acceptable Galactic latitude of target (inclusive).
`None` indicates no limit.
max : `~astropy.units.Quantity` or `None` (optional)
Minimum acceptable Galactic latitude of target (inclusive).
`None` indicates no limit.
"""
self.min = min
self.max = max
def compute_constraint(self, times, observer, targets):
separation = abs(targets.transform_to(Galactic).b)
if self.min is None and self.max is not None:
mask = self.max >= separation
elif self.max is None and self.min is not None:
mask = self.min <= separation
elif self.min is not None and self.max is not None:
mask = ((self.min <= separation) & (separation <= self.max))
else:
raise ValueError("No max and/or min specified in "
"GalacticLatitudeConstraint.")
return mask
class SunSeparationConstraint(Constraint):
"""
Constrain the distance between the Sun and some targets.
"""
def __init__(self, min=None, max=None):
"""
Parameters
----------
min : `~astropy.units.Quantity` or `None` (optional)
Minimum acceptable separation between Sun and target (inclusive).
`None` indicates no limit.
max : `~astropy.units.Quantity` or `None` (optional)
Maximum acceptable separation between Sun and target (inclusive).
`None` indicates no limit.
"""
self.min = min
self.max = max
def compute_constraint(self, times, observer, targets):
# use get_body rather than get sun here, since
# it returns the Sun's coordinates in an observer
# centred frame, so the separation is as-seen
# by the observer.
# 'get_sun' returns ICRS coords.
sun = get_body('sun', times, location=observer.location)
solar_separation = sun.separation(targets)
if self.min is None and self.max is not None:
mask = self.max >= solar_separation
elif self.max is None and self.min is not None:
mask = self.min <= solar_separation
elif self.min is not None and self.max is not None:
mask = ((self.min <= solar_separation) &
(solar_separation <= self.max))
else:
raise ValueError("No max and/or min specified in "
"SunSeparationConstraint.")
return mask
class MoonSeparationConstraint(Constraint):
"""
Constrain the distance between the Earth's moon and some targets.
"""
def __init__(self, min=None, max=None, ephemeris=None):
"""
Parameters
----------
min : `~astropy.units.Quantity` or `None` (optional)
Minimum acceptable separation between moon and target (inclusive).
`None` indicates no limit.
max : `~astropy.units.Quantity` or `None` (optional)
Maximum acceptable separation between moon and target (inclusive).
`None` indicates no limit.
ephemeris : str, optional
Ephemeris to use. If not given, use the one set with
``astropy.coordinates.solar_system_ephemeris.set`` (which is
set to 'builtin' by default).
"""
self.min = min
self.max = max
self.ephemeris = ephemeris
def compute_constraint(self, times, observer, targets):
# removed the location argument here, which causes small <1 deg
# innacuracies, but it is needed until astropy PR #5897 is released
# which should be astropy 1.3.2
moon = get_moon(times,
ephemeris=self.ephemeris)
# note to future editors - the order matters here
# moon.separation(targets) is NOT the same as targets.separation(moon)
# the former calculates the separation in the frame of the moon coord
# which is GCRS, and that is what we want.
moon_separation = moon.separation(targets)
if self.min is None and self.max is not None:
mask = self.max >= moon_separation
elif self.max is None and self.min is not None:
mask = self.min <= moon_separation
elif self.min is not None and self.max is not None:
mask = ((self.min <= moon_separation) &
(moon_separation <= self.max))
else:
raise ValueError("No max and/or min specified in "
"MoonSeparationConstraint.")
return mask
class MoonIlluminationConstraint(Constraint):
"""
Constrain the fractional illumination of the Earth's moon.
Constraint is also satisfied if the Moon has set.
"""
def __init__(self, min=None, max=None, ephemeris=None):
"""
Parameters
----------
min : float or `None` (optional)
Minimum acceptable fractional illumination (inclusive). `None`
indicates no limit.
max : float or `None` (optional)
Maximum acceptable fractional illumination (inclusive). `None`
indicates no limit.
ephemeris : str, optional
Ephemeris to use. If not given, use the one set with
`~astropy.coordinates.solar_system_ephemeris` (which is
set to 'builtin' by default).
"""
self.min = min
self.max = max
self.ephemeris = ephemeris
@classmethod
def dark(cls, min=None, max=0.25, **kwargs):
"""
initialize a `~astroplan.constraints.MoonIlluminationConstraint`
with defaults of no minimum and a maximum of 0.25
Parameters
----------
min : float or `None` (optional)
Minimum acceptable fractional illumination (inclusive). `None`
indicates no limit.
max : float or `None` (optional)
Maximum acceptable fractional illumination (inclusive). `None`
indicates no limit.
"""
return cls(min, max, **kwargs)
@classmethod
def grey(cls, min=0.25, max=0.65, **kwargs):
"""
initialize a `~astroplan.constraints.MoonIlluminationConstraint`
with defaults of a minimum of 0.25 and a maximum of 0.65
Parameters
----------
min : float or `None` (optional)
Minimum acceptable fractional illumination (inclusive). `None`
indicates no limit.
max : float or `None` (optional)
Maximum acceptable fractional illumination (inclusive). `None`
indicates no limit.
"""
return cls(min, max, **kwargs)
@classmethod
def bright(cls, min=0.65, max=None, **kwargs):
"""
initialize a `~astroplan.constraints.MoonIlluminationConstraint`
with defaults of a minimum of 0.65 and no maximum
Parameters
----------
min : float or `None` (optional)
Minimum acceptable fractional illumination (inclusive). `None`
indicates no limit.
max : float or `None` (optional)
Maximum acceptable fractional illumination (inclusive). `None`
indicates no limit.
"""
return cls(min, max, **kwargs)
def compute_constraint(self, times, observer, targets):
# first is the moon up?
cached_moon = _get_moon_data(times, observer)
moon_alt = cached_moon['altaz'].alt
moon_down_mask = moon_alt < 0
moon_up_mask = moon_alt >= 0
illumination = cached_moon['illum']
if self.min is None and self.max is not None:
mask = (self.max >= illumination) | moon_down_mask
elif self.max is None and self.min is not None:
mask = (self.min <= illumination) & moon_up_mask
elif self.min is not None and self.max is not None:
mask = ((self.min <= illumination) &
(illumination <= self.max)) & moon_up_mask
else:
raise ValueError("No max and/or min specified in "
"MoonSeparationConstraint.")
return mask
class LocalTimeConstraint(Constraint):
"""
Constrain the observable hours.
"""
def __init__(self, min=None, max=None):
"""
Parameters
----------
min : `~datetime.time`
Earliest local time (inclusive). `None` indicates no limit.
max : `~datetime.time`
Latest local time (inclusive). `None` indicates no limit.
Examples
--------
Constrain the observations to targets that are observable between
23:50 and 04:08 local time:
>>> from astroplan import Observer
>>> from astroplan.constraints import LocalTimeConstraint
>>> import datetime as dt
>>> subaru = Observer.at_site("Subaru", timezone="US/Hawaii")
>>> # bound times between 23:50 and 04:08 local Hawaiian time
>>> constraint = LocalTimeConstraint(min=dt.time(23,50), max=dt.time(4,8))
"""
self.min = min
self.max = max
if self.min is None and self.max is None:
raise ValueError("You must at least supply either a minimum or a maximum time.")
if self.min is not None:
if not isinstance(self.min, datetime.time):
raise TypeError("Time limits must be specified as datetime.time objects.")
if self.max is not None:
if not isinstance(self.max, datetime.time):
raise TypeError("Time limits must be specified as datetime.time objects.")
def compute_constraint(self, times, observer, targets):
timezone = None
# get timezone from time objects, or from observer
if self.min is not None:
timezone = self.min.tzinfo
elif self.max is not None:
timezone = self.max.tzinfo
if timezone is None:
timezone = observer.timezone
if self.min is not None:
min_time = self.min
else:
min_time = self.min = datetime.time(0, 0, 0)
if self.max is not None:
max_time = self.max
else:
max_time = datetime.time(23, 59, 59)
# If time limits occur on same day:
if min_time < max_time:
try:
mask = np.array([min_time <= t.time() <= max_time for t in times.datetime])
except BaseException: # use np.bool so shape queries don't cause problems
mask = np.bool_(min_time <= times.datetime.time() <= max_time)
# If time boundaries straddle midnight:
else:
try:
mask = np.array([(t.time() >= min_time) or
(t.time() <= max_time) for t in times.datetime])
except BaseException:
mask = np.bool_((times.datetime.time() >= min_time) or
(times.datetime.time() <= max_time))
return mask
class TimeConstraint(Constraint):
"""Constrain the observing time to be within certain time limits.
An example use case for this class would be to associate an acceptable
time range with a specific observing block. This can be useful if not
all observing blocks are valid over the time limits used in calls
to `is_observable` or `is_always_observable`.
"""
def __init__(self, min=None, max=None):
"""
Parameters
----------
min : `~astropy.time.Time`
Earliest time (inclusive). `None` indicates no limit.
max : `~astropy.time.Time`
Latest time (inclusive). `None` indicates no limit.
Examples
--------
Constrain the observations to targets that are observable between
2016-03-28 and 2016-03-30:
>>> from astroplan import Observer
>>> from astropy.time import Time
>>> subaru = Observer.at_site("Subaru")
>>> t1 = Time("2016-03-28T12:00:00")
>>> t2 = Time("2016-03-30T12:00:00")
>>> constraint = TimeConstraint(t1,t2)
"""
self.min = min
self.max = max
if self.min is None and self.max is None:
raise ValueError("You must at least supply either a minimum or a "
"maximum time.")
if self.min is not None:
if not isinstance(self.min, Time):
raise TypeError("Time limits must be specified as "
"astropy.time.Time objects.")
if self.max is not None:
if not isinstance(self.max, Time):
raise TypeError("Time limits must be specified as "
"astropy.time.Time objects.")
def compute_constraint(self, times, observer, targets):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
min_time = Time("1950-01-01T00:00:00") if self.min is None else self.min
max_time = Time("2120-01-01T00:00:00") if self.max is None else self.max
mask = np.logical_and(times > min_time, times < max_time)
return mask
class PrimaryEclipseConstraint(Constraint):
"""
Constrain observations to times during primary eclipse.
"""
def __init__(self, eclipsing_system):
"""
Parameters
----------
eclipsing_system : `~astroplan.periodic.EclipsingSystem`
System which must be in primary eclipse.
"""
self.eclipsing_system = eclipsing_system
def compute_constraint(self, times, observer=None, targets=None):
mask = self.eclipsing_system.in_primary_eclipse(times)
return mask
class SecondaryEclipseConstraint(Constraint):
"""
Constrain observations to times during secondary eclipse.
"""
def __init__(self, eclipsing_system):
"""
Parameters
----------
eclipsing_system : `~astroplan.periodic.EclipsingSystem`
System which must be in secondary eclipse.
"""
self.eclipsing_system = eclipsing_system
def compute_constraint(self, times, observer=None, targets=None):
mask = self.eclipsing_system.in_secondary_eclipse(times)
return mask
class PhaseConstraint(Constraint):
"""
Constrain observations to times in some range of phases for a periodic event
(e.g.~transiting exoplanets, eclipsing binaries).
"""
def __init__(self, periodic_event, min=None, max=None):
"""
Parameters
----------
periodic_event : `~astroplan.periodic.PeriodicEvent` or subclass
System on which to compute the phase. For example, the system
could be an eclipsing or non-eclipsing binary, or exoplanet system.
min : float (optional)
Minimum phase (inclusive) on interval [0, 1). Default is zero.
max : float (optional)
Maximum phase (inclusive) on interval [0, 1). Default is one.
Examples
--------
To constrain observations on orbital phases between 0.4 and 0.6,
>>> from astroplan import PeriodicEvent
>>> from astropy.time import Time
>>> import astropy.units as u
>>> binary = PeriodicEvent(epoch=Time('2017-01-01 02:00'), period=1*u.day)
>>> constraint = PhaseConstraint(binary, min=0.4, max=0.6)
The minimum and maximum phase must be described on the interval [0, 1).
To constrain observations on orbital phases between 0.6 and 1.2, for
example, you should subtract one from the second number:
>>> constraint = PhaseConstraint(binary, min=0.6, max=0.2)
"""
self.periodic_event = periodic_event
if (min < 0) or (min > 1) or (max < 0) or (max > 1):
raise ValueError('The minimum of the PhaseConstraint must be within'
' the interval [0, 1).')
self.min = min if min is not None else 0.0
self.max = max if max is not None else 1.0
def compute_constraint(self, times, observer=None, targets=None):
phase = self.periodic_event.phase(times)
mask = np.where(self.max > self.min,
(phase >= self.min) & (phase <= self.max),
(phase >= self.min) | (phase <= self.max))
return mask
def is_always_observable(constraints, observer, targets, times=None,
time_range=None, time_grid_resolution=0.5*u.hour):
"""
A function to determine whether ``targets`` are always observable throughout
``time_range`` given constraints in the ``constraints_list`` for a
particular ``observer``.
Parameters
----------
constraints : list or `~astroplan.constraints.Constraint`
Observational constraint(s)
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``
targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`}
Target or list of targets
times : `~astropy.time.Time` (optional)
Array of times on which to test the constraint
time_range : `~astropy.time.Time` (optional)
Lower and upper bounds on time sequence, with spacing
``time_resolution``. This will be passed as the first argument into
`~astroplan.time_grid_from_range`.
time_grid_resolution : `~astropy.units.Quantity` (optional)
If ``time_range`` is specified, determine whether constraints are met
between test times in ``time_range`` by checking constraint at
linearly-spaced times separated by ``time_resolution``. Default is 0.5
hours.
Returns
-------
ever_observable : list
List of booleans of same length as ``targets`` for whether or not each
target is observable in the time range given the constraints.
"""
if not hasattr(constraints, '__len__'):
constraints = [constraints]
applied_constraints = [constraint(observer, targets, times=times,
time_range=time_range,
time_grid_resolution=time_grid_resolution,
grid_times_targets=True)
for constraint in constraints]
constraint_arr = np.logical_and.reduce(applied_constraints)
return np.all(constraint_arr, axis=1)
def is_observable(constraints, observer, targets, times=None,
time_range=None, time_grid_resolution=0.5*u.hour):
"""
Determines if the ``targets`` are observable during ``time_range`` given
constraints in ``constraints_list`` for a particular ``observer``.
Parameters
----------
constraints : list or `~astroplan.constraints.Constraint`
Observational constraint(s)
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``
targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`}
Target or list of targets
times : `~astropy.time.Time` (optional)
Array of times on which to test the constraint
time_range : `~astropy.time.Time` (optional)
Lower and upper bounds on time sequence, with spacing
``time_resolution``. This will be passed as the first argument into
`~astroplan.time_grid_from_range`.
time_grid_resolution : `~astropy.units.Quantity` (optional)
If ``time_range`` is specified, determine whether constraints are met
between test times in ``time_range`` by checking constraint at
linearly-spaced times separated by ``time_resolution``. Default is 0.5
hours.
Returns
-------
ever_observable : list
List of booleans of same length as ``targets`` for whether or not each
target is ever observable in the time range given the constraints.
"""
if not hasattr(constraints, '__len__'):
constraints = [constraints]
applied_constraints = [constraint(observer, targets, times=times,
time_range=time_range,
time_grid_resolution=time_grid_resolution,
grid_times_targets=True)
for constraint in constraints]
constraint_arr = np.logical_and.reduce(applied_constraints)
return np.any(constraint_arr, axis=1)
def is_event_observable(constraints, observer, target, times=None,
times_ingress_egress=None):
"""
Determines if the ``target`` is observable at each time in ``times``, given
constraints in ``constraints`` for a particular ``observer``.
Parameters
----------
constraints : list or `~astroplan.constraints.Constraint`
Observational constraint(s)
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``
target : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`}
Target
times : `~astropy.time.Time` (optional)
Array of mid-event times on which to test the constraints
times_ingress_egress : `~astropy.time.Time` (optional)
Array of ingress and egress times for ``N`` events, with shape
(``N``, 2).
Returns
-------
event_observable : `~numpy.ndarray`
Array of booleans of same length as ``times`` for whether or not the
target is ever observable at each time, given the constraints.
"""
if not hasattr(constraints, '__len__'):
constraints = [constraints]
if times is not None:
applied_constraints = [constraint(observer, target, times=times,
grid_times_targets=True)
for constraint in constraints]
constraint_arr = np.logical_and.reduce(applied_constraints)
else:
times_ing = times_ingress_egress[:, 0]
times_egr = times_ingress_egress[:, 1]
applied_constraints_ing = [constraint(observer, target, times=times_ing,
grid_times_targets=True)
for constraint in constraints]
applied_constraints_egr = [constraint(observer, target, times=times_egr,
grid_times_targets=True)
for constraint in constraints]
constraint_arr = np.logical_and(np.logical_and.reduce(applied_constraints_ing),
np.logical_and.reduce(applied_constraints_egr))
return constraint_arr
def months_observable(constraints, observer, targets,
time_range=_current_year_time_range,
time_grid_resolution=0.5*u.hour):
"""
Determines which month the specified ``targets`` are observable for a
specific ``observer``, given the supplied ``constraints``.
Parameters
----------
constraints : list or `~astroplan.constraints.Constraint`
Observational constraint(s)
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``
targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`}
Target or list of targets
time_range : `~astropy.time.Time` (optional)
Lower and upper bounds on time sequence
If ``time_range`` is not specified, defaults to current year (localtime)
time_grid_resolution : `~astropy.units.Quantity` (optional)
If ``time_range`` is specified, determine whether constraints are met
between test times in ``time_range`` by checking constraint at
linearly-spaced times separated by ``time_resolution``. Default is 0.5
hours.
Returns
-------
observable_months : list
List of sets of unique integers representing each month that a target is
observable, one set per target. These integers are 1-based so that
January maps to 1, February maps to 2, etc.
"""
# TODO: This method could be sped up a lot by dropping to the trigonometric
# altitude calculations.
if not hasattr(constraints, '__len__'):
constraints = [constraints]
times = time_grid_from_range(time_range, time_grid_resolution)
# TODO: This method could be sped up a lot by dropping to the trigonometric
# altitude calculations.
applied_constraints = [constraint(observer, targets,
times=times,
grid_times_targets=True)
for constraint in constraints]
constraint_arr = np.logical_and.reduce(applied_constraints)
months_observable = []
for target, observable in zip(targets, constraint_arr):
s = set([t.datetime.month for t in times[observable]])
months_observable.append(s)
return months_observable
def observability_table(constraints, observer, targets, times=None,
time_range=None, time_grid_resolution=0.5*u.hour):
"""
Creates a table with information about observability for all the ``targets``
over the requested ``time_range``, given the constraints in
``constraints_list`` for ``observer``.
Parameters
----------
constraints : list or `~astroplan.constraints.Constraint`
Observational constraint(s)
observer : `~astroplan.Observer`
The observer who has constraints ``constraints``
targets : {list, `~astropy.coordinates.SkyCoord`, `~astroplan.FixedTarget`}
Target or list of targets
times : `~astropy.time.Time` (optional)
Array of times on which to test the constraint
time_range : `~astropy.time.Time` (optional)
Lower and upper bounds on time sequence, with spacing
``time_resolution``. This will be passed as the first argument into
`~astroplan.time_grid_from_range`. If a single (scalar) time, the table
will be for a 24 hour period centered on that time.
time_grid_resolution : `~astropy.units.Quantity` (optional)
If ``time_range`` is specified, determine whether constraints are met
between test times in ``time_range`` by checking constraint at
linearly-spaced times separated by ``time_resolution``. Default is 0.5
hours.
Returns
-------
observability_table : `~astropy.table.Table`
A Table containing the observability information for each of the
``targets``. The table contains four columns with information about the
target and it's observability: ``'target name'``, ``'ever observable'``,
``'always observable'``, and ``'fraction of time observable'``. The
column ``'time observable'`` will also be present if the ``time_range``
is given as a scalar. It also contains metadata entries ``'times'``
(with an array of all the times), ``'observer'`` (the
`~astroplan.Observer` object), and ``'constraints'`` (containing the
supplied ``constraints``).
"""
if not hasattr(constraints, '__len__'):
constraints = [constraints]
is_24hr_table = False
if hasattr(time_range, 'isscalar') and time_range.isscalar:
time_range = (time_range-12*u.hour, time_range+12*u.hour)
is_24hr_table = True
applied_constraints = [constraint(observer, targets, times=times,
time_range=time_range,
time_grid_resolution=time_grid_resolution,
grid_times_targets=True)
for constraint in constraints]
constraint_arr = np.logical_and.reduce(applied_constraints)
colnames = ['target name', 'ever observable', 'always observable',
'fraction of time observable']
target_names = [target.name for target in targets]
ever_obs = np.any(constraint_arr, axis=1)
always_obs = np.all(constraint_arr, axis=1)
frac_obs = np.sum(constraint_arr, axis=1) / constraint_arr.shape[1]
tab = table.Table(names=colnames, data=[target_names, ever_obs, always_obs,
frac_obs])
if times is None and time_range is not None:
times = time_grid_from_range(time_range,
time_resolution=time_grid_resolution)
if is_24hr_table:
tab['time observable'] = tab['fraction of time observable'] * 24*u.hour
tab.meta['times'] = times.datetime
tab.meta['observer'] = observer
tab.meta['constraints'] = constraints
return tab
def min_best_rescale(vals, min_val, max_val, less_than_min=1):
"""
rescales an input array ``vals`` to be a score (between zero and one),
where the ``min_val`` goes to one, and the ``max_val`` goes to zero.
Parameters
----------
vals : array-like
the values that need to be rescaled to be between 0 and 1
min_val : float
worst acceptable value (rescales to 0)
max_val : float
best value cared about (rescales to 1)
less_than_min : 0 or 1
what is returned for ``vals`` below ``min_val``. (in some cases
anything less than ``min_val`` should also return one,
in some cases it should return zero)
Returns
-------
array of floats between 0 and 1 inclusive rescaled so that
``vals`` equal to ``max_val`` equal 0 and those equal to
``min_val`` equal 1
Examples
--------
rescale airmasses to between 0 and 1, with the best (1)
and worst (2.25). All values outside the range should
return 0.
>>> from astroplan.constraints import min_best_rescale
>>> import numpy as np
>>> airmasses = np.array([1, 1.5, 2, 3, 0])
>>> min_best_rescale(airmasses, 1, 2.25, less_than_min = 0) # doctest: +FLOAT_CMP
array([ 1. , 0.6, 0.2, 0. , 0. ])
"""
rescaled = (vals - max_val) / (min_val - max_val)
below = vals < min_val
above = vals > max_val
rescaled[below] = less_than_min
rescaled[above] = 0
return rescaled
def max_best_rescale(vals, min_val, max_val, greater_than_max=1):
"""
rescales an input array ``vals`` to be a score (between zero and one),
where the ``max_val`` goes to one, and the ``min_val`` goes to zero.
Parameters
----------
vals : array-like
the values that need to be rescaled to be between 0 and 1
min_val : float
worst acceptable value (rescales to 0)
max_val : float
best value cared about (rescales to 1)
greater_than_max : 0 or 1
what is returned for ``vals`` above ``max_val``. (in some cases
anything higher than ``max_val`` should also return one,
in some cases it should return zero)
Returns
-------
array of floats between 0 and 1 inclusive rescaled so that
``vals`` equal to ``min_val`` equal 0 and those equal to
``max_val`` equal 1
Examples
--------
rescale an array of altitudes to be between 0 and 1,
with the best (60) going to 1 and worst (35) going to
0. For values outside the range, the rescale should
return 0 below 35 and 1 above 60.
>>> from astroplan.constraints import max_best_rescale
>>> import numpy as np
>>> altitudes = np.array([20, 30, 40, 45, 55, 70])
>>> max_best_rescale(altitudes, 35, 60) # doctest: +FLOAT_CMP
array([ 0. , 0. , 0.2, 0.4, 0.8, 1. ])
"""
rescaled = (vals - min_val) / (max_val - min_val)
below = vals < min_val
above = vals > max_val
rescaled[below] = 0
rescaled[above] = greater_than_max
return rescaled
| [((1322, 1338), 'time.localtime', 'time.localtime', ([], {}), '()\n', (1336, 1338), False, 'import time\n'), ((15084, 15115), 'astropy.units.quantity_input', 'u.quantity_input', ([], {'horizon': 'u.deg'}), '(horizon=u.deg)\n', (15100, 15115), True, 'import astropy.units as u\n'), ((36318, 36360), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['applied_constraints'], {}), '(applied_constraints)\n', (36339, 36360), True, 'import numpy as np\n'), ((36372, 36402), 'numpy.all', 'np.all', (['constraint_arr'], {'axis': '(1)'}), '(constraint_arr, axis=1)\n', (36378, 36402), True, 'import numpy as np\n'), ((38319, 38361), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['applied_constraints'], {}), '(applied_constraints)\n', (38340, 38361), True, 'import numpy as np\n'), ((38373, 38403), 'numpy.any', 'np.any', (['constraint_arr'], {'axis': '(1)'}), '(constraint_arr, axis=1)\n', (38379, 38403), True, 'import numpy as np\n'), ((42633, 42675), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['applied_constraints'], {}), '(applied_constraints)\n', (42654, 42675), True, 'import numpy as np\n'), ((45653, 45695), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['applied_constraints'], {}), '(applied_constraints)\n', (45674, 45695), True, 'import numpy as np\n'), ((45886, 45916), 'numpy.any', 'np.any', (['constraint_arr'], {'axis': '(1)'}), '(constraint_arr, axis=1)\n', (45892, 45916), True, 'import numpy as np\n'), ((45934, 45964), 'numpy.all', 'np.all', (['constraint_arr'], {'axis': '(1)'}), '(constraint_arr, axis=1)\n', (45940, 45964), True, 'import numpy as np\n'), ((46048, 46133), 'astropy.table.Table', 'table.Table', ([], {'names': 'colnames', 'data': '[target_names, ever_obs, always_obs, frac_obs]'}), '(names=colnames, data=[target_names, ever_obs, always_obs, frac_obs]\n )\n', (46059, 46133), False, 'from astropy import table\n'), ((19867, 19917), 'astropy.coordinates.get_body', 'get_body', (['"""sun"""', 'times'], {'location': 'observer.location'}), "('sun', times, location=observer.location)\n", (19875, 19917), False, 'from astropy.coordinates import get_body, get_sun, get_moon, Galactic, SkyCoord\n'), ((21668, 21709), 'astropy.coordinates.get_moon', 'get_moon', (['times'], {'ephemeris': 'self.ephemeris'}), '(times, ephemeris=self.ephemeris)\n', (21676, 21709), False, 'from astropy.coordinates import get_body, get_sun, get_moon, Galactic, SkyCoord\n'), ((31097, 31147), 'numpy.logical_and', 'np.logical_and', (['(times > min_time)', '(times < max_time)'], {}), '(times > min_time, times < max_time)\n', (31111, 31147), True, 'import numpy as np\n'), ((34176, 34296), 'numpy.where', 'np.where', (['(self.max > self.min)', '((phase >= self.min) & (phase <= self.max))', '((phase >= self.min) | (phase <= self.max))'], {}), '(self.max > self.min, (phase >= self.min) & (phase <= self.max), (\n phase >= self.min) | (phase <= self.max))\n', (34184, 34296), True, 'import numpy as np\n'), ((39815, 39857), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['applied_constraints'], {}), '(applied_constraints)\n', (39836, 39857), True, 'import numpy as np\n'), ((45980, 46010), 'numpy.sum', 'np.sum', (['constraint_arr'], {'axis': '(1)'}), '(constraint_arr, axis=1)\n', (45986, 46010), True, 'import numpy as np\n'), ((10196, 10209), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (10204, 10209), True, 'import numpy as np\n'), ((28084, 28106), 'datetime.time', 'datetime.time', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (28097, 28106), False, 'import datetime\n'), ((28210, 28235), 'datetime.time', 'datetime.time', (['(23)', '(59)', '(59)'], {}), '(23, 59, 59)\n', (28223, 28235), False, 'import datetime\n'), ((30841, 30866), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (30864, 30866), False, 'import warnings\n'), ((30880, 30911), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (30901, 30911), False, 'import warnings\n'), ((40440, 40486), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['applied_constraints_ing'], {}), '(applied_constraints_ing)\n', (40461, 40486), True, 'import numpy as np\n'), ((40528, 40574), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['applied_constraints_egr'], {}), '(applied_constraints_egr)\n', (40549, 40574), True, 'import numpy as np\n'), ((10371, 10389), 'numpy.broadcast', 'np.broadcast', (['a', 'b'], {}), '(a, b)\n', (10383, 10389), True, 'import numpy as np\n'), ((10476, 10513), 'numpy.broadcast_to', 'np.broadcast_to', (['result', 'output_shape'], {}), '(result, output_shape)\n', (10491, 10513), True, 'import numpy as np\n'), ((30935, 30962), 'astropy.time.Time', 'Time', (['"""1950-01-01T00:00:00"""'], {}), "('1950-01-01T00:00:00')\n", (30939, 30962), False, 'from astropy.time import Time\n'), ((31020, 31047), 'astropy.time.Time', 'Time', (['"""2120-01-01T00:00:00"""'], {}), "('2120-01-01T00:00:00')\n", (31024, 31047), False, 'from astropy.time import Time\n'), ((10427, 10443), 'numpy.array', 'np.array', (['result'], {}), '(result)\n', (10435, 10443), True, 'import numpy as np\n'), ((17016, 17030), 'astropy.coordinates.get_sun', 'get_sun', (['times'], {}), '(times)\n', (17023, 17030), False, 'from astropy.coordinates import get_body, get_sun, get_moon, Galactic, SkyCoord\n'), ((9469, 9488), 'numpy.tile', 'np.tile', (['targets', '(1)'], {}), '(targets, 1)\n', (9476, 9488), True, 'import numpy as np\n')] |
Raulios/django-blog | backend/views.py | ff25c8f21a3f6644e77a2ef5bb7bf7026770e0c2 | from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.core.urlresolvers import reverse
from django.shortcuts import render
from django.http import HttpResponseRedirect
from core.models import Post, Category, Tag
from backend.forms import PostForm, CategoryForm, TagForm
# Create your views here.
@login_required()
def index(request):
context = {}
context['nav_active'] = 'index'
return render(request, 'backend/index.html', context)
@login_required()
def posts(request):
context = {}
context['nav_active'] = 'posts'
post_list = Post.objects.all()
paginator = Paginator(list(reversed(post_list)), 10)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except PageNotAnInteger:
posts = paginator.page(1)
except EmptyPage:
posts = paginator.page(paginator.num_pages)
context['posts'] = posts
return render(request, 'backend/posts.html', context)
@login_required()
def add_post(request):
context = {}
context['nav_active'] = 'posts'
form = PostForm()
if request.method == 'POST':
form = PostForm(request.POST, request.FILES)
if form.is_valid():
form.save()
messages.success(request, 'Post created.')
return HttpResponseRedirect(reverse('user_panel_posts'))
context['form'] = form
return render(request, 'backend/edit_post.html', context)
@login_required()
def edit_post(request, post_id):
context = {}
context['nav_active'] = 'posts'
post = Post.objects.get(pk=post_id)
context['post'] = post
form = PostForm(instance=post)
if request.method == 'POST':
form = PostForm(request.POST, request.FILES, instance=post)
if form.is_valid():
form.save()
messages.success(request, 'Post updated.')
return HttpResponseRedirect(reverse('user_panel_posts'))
context['form'] = form
return render(request, 'backend/edit_post.html', context)
@login_required()
def delete_post(request, post_id):
context = {}
context['nav_active'] = 'posts'
post = Post.objects.get(pk=post_id)
post.delete()
messages.success(request, 'Post deleted.')
return HttpResponseRedirect(reverse('user_panel_posts'))
@login_required()
def categories(request):
context = {}
context['nav_active'] = 'categories'
categories_list = Category.objects.all()
paginator = Paginator(list(reversed(categories_list)), 10)
page = request.GET.get('page')
try:
categories = paginator.page(page)
except PageNotAnInteger:
categories = paginator.page(1)
except EmptyPage:
categories = paginator.page(paginator.num_pages)
context['categories'] = categories
return render(request, 'backend/categories.html', context)
@login_required()
def add_category(request):
context = {}
context['nav_active'] = 'categories'
form = CategoryForm()
if request.method == 'POST':
form = CategoryForm(request.POST, request.FILES)
if form.is_valid():
form.save()
messages.success(request, 'Category created.')
return HttpResponseRedirect(reverse('user_panel_categories'))
context['form'] = form
return render(request, 'backend/edit_category.html', context)
@login_required()
def edit_category(request, category_id):
context = {}
context['nav_active'] = 'categories'
category = Category.objects.get(pk=category_id)
context['category'] = category
form = CategoryForm(instance=category)
if request.method == 'POST':
form = CategoryForm(request.POST, request.FILES, instance=category)
if form.is_valid():
form.save()
messages.success(request, 'Category updated.')
return HttpResponseRedirect(reverse('user_panel_categories'))
context['form'] = form
return render(request, 'backend/edit_category.html', context)
@login_required()
def delete_category(request, category_id):
context = {}
context['nav_active'] = 'categories'
category = Category.objects.get(pk=category_id)
category.delete()
messages.success(request, 'Category deleted.')
return HttpResponseRedirect(reverse('user_panel_categories'))
@login_required()
def tags(request):
context = {}
context['nav_active'] = 'tags'
tags_list = Tag.objects.all()
paginator = Paginator(list(reversed(tags_list)), 10)
page = request.GET.get('page')
try:
tags = paginator.page(page)
except PageNotAnInteger:
tags = paginator.page(1)
except EmptyPage:
tags = paginator.page(paginator.num_pages)
context['tags'] = tags
return render(request, 'backend/tags.html', context)
@login_required()
def add_tag(request):
context = {}
context['nav_active'] = 'tags'
form = TagForm()
if request.method == 'POST':
form = TagForm(request.POST, request.FILES)
if form.is_valid():
form.save()
messages.success(request, 'Tag created.')
return HttpResponseRedirect(reverse('user_panel_tags'))
context['form'] = form
return render(request, 'backend/edit_tag.html', context)
@login_required()
def edit_tag(request, tag_id):
context = {}
context['nav_active'] = 'tags'
tag = Tag.objects.get(pk=tag_id)
context['tag'] = tag
form = TagForm(instance=tag)
if request.method == 'POST':
form = TagForm(request.POST, request.FILES, instance=tag)
if form.is_valid():
form.save()
messages.success(request, 'Tag updated.')
return HttpResponseRedirect(reverse('user_panel_tags'))
context['form'] = form
return render(request, 'backend/edit_tag.html', context)
@login_required()
def delete_tag(request, tag_id):
context = {}
context['nav_active'] = 'tags'
tag = Tag.objects.get(pk=tag_id)
tag.delete()
messages.success(request, 'Tag deleted.')
return HttpResponseRedirect(reverse('user_panel_tags')) | [((424, 440), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (438, 440), False, 'from django.contrib.auth.decorators import login_required\n'), ((575, 591), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (589, 591), False, 'from django.contrib.auth.decorators import login_required\n'), ((1070, 1086), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (1084, 1086), False, 'from django.contrib.auth.decorators import login_required\n'), ((1544, 1560), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (1558, 1560), False, 'from django.contrib.auth.decorators import login_required\n'), ((2124, 2140), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (2138, 2140), False, 'from django.contrib.auth.decorators import login_required\n'), ((2400, 2416), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (2414, 2416), False, 'from django.contrib.auth.decorators import login_required\n'), ((2950, 2966), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (2964, 2966), False, 'from django.contrib.auth.decorators import login_required\n'), ((3454, 3470), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (3468, 3470), False, 'from django.contrib.auth.decorators import login_required\n'), ((4096, 4112), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (4110, 4112), False, 'from django.contrib.auth.decorators import login_required\n'), ((4410, 4426), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (4424, 4426), False, 'from django.contrib.auth.decorators import login_required\n'), ((4895, 4911), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (4909, 4911), False, 'from django.contrib.auth.decorators import login_required\n'), ((5362, 5378), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (5376, 5378), False, 'from django.contrib.auth.decorators import login_required\n'), ((5927, 5943), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (5941, 5943), False, 'from django.contrib.auth.decorators import login_required\n'), ((526, 572), 'django.shortcuts.render', 'render', (['request', '"""backend/index.html"""', 'context'], {}), "(request, 'backend/index.html', context)\n", (532, 572), False, 'from django.shortcuts import render\n'), ((682, 700), 'core.models.Post.objects.all', 'Post.objects.all', ([], {}), '()\n', (698, 700), False, 'from core.models import Post, Category, Tag\n'), ((1021, 1067), 'django.shortcuts.render', 'render', (['request', '"""backend/posts.html"""', 'context'], {}), "(request, 'backend/posts.html', context)\n", (1027, 1067), False, 'from django.shortcuts import render\n'), ((1175, 1185), 'backend.forms.PostForm', 'PostForm', ([], {}), '()\n', (1183, 1185), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((1491, 1541), 'django.shortcuts.render', 'render', (['request', '"""backend/edit_post.html"""', 'context'], {}), "(request, 'backend/edit_post.html', context)\n", (1497, 1541), False, 'from django.shortcuts import render\n'), ((1659, 1687), 'core.models.Post.objects.get', 'Post.objects.get', ([], {'pk': 'post_id'}), '(pk=post_id)\n', (1675, 1687), False, 'from core.models import Post, Category, Tag\n'), ((1727, 1750), 'backend.forms.PostForm', 'PostForm', ([], {'instance': 'post'}), '(instance=post)\n', (1735, 1750), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((2071, 2121), 'django.shortcuts.render', 'render', (['request', '"""backend/edit_post.html"""', 'context'], {}), "(request, 'backend/edit_post.html', context)\n", (2077, 2121), False, 'from django.shortcuts import render\n'), ((2241, 2269), 'core.models.Post.objects.get', 'Post.objects.get', ([], {'pk': 'post_id'}), '(pk=post_id)\n', (2257, 2269), False, 'from core.models import Post, Category, Tag\n'), ((2293, 2335), 'django.contrib.messages.success', 'messages.success', (['request', '"""Post deleted."""'], {}), "(request, 'Post deleted.')\n", (2309, 2335), False, 'from django.contrib import messages\n'), ((2523, 2545), 'core.models.Category.objects.all', 'Category.objects.all', ([], {}), '()\n', (2543, 2545), False, 'from core.models import Post, Category, Tag\n'), ((2896, 2947), 'django.shortcuts.render', 'render', (['request', '"""backend/categories.html"""', 'context'], {}), "(request, 'backend/categories.html', context)\n", (2902, 2947), False, 'from django.shortcuts import render\n'), ((3064, 3078), 'backend.forms.CategoryForm', 'CategoryForm', ([], {}), '()\n', (3076, 3078), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((3397, 3451), 'django.shortcuts.render', 'render', (['request', '"""backend/edit_category.html"""', 'context'], {}), "(request, 'backend/edit_category.html', context)\n", (3403, 3451), False, 'from django.shortcuts import render\n'), ((3586, 3622), 'core.models.Category.objects.get', 'Category.objects.get', ([], {'pk': 'category_id'}), '(pk=category_id)\n', (3606, 3622), False, 'from core.models import Post, Category, Tag\n'), ((3670, 3701), 'backend.forms.CategoryForm', 'CategoryForm', ([], {'instance': 'category'}), '(instance=category)\n', (3682, 3701), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((4039, 4093), 'django.shortcuts.render', 'render', (['request', '"""backend/edit_category.html"""', 'context'], {}), "(request, 'backend/edit_category.html', context)\n", (4045, 4093), False, 'from django.shortcuts import render\n'), ((4230, 4266), 'core.models.Category.objects.get', 'Category.objects.get', ([], {'pk': 'category_id'}), '(pk=category_id)\n', (4250, 4266), False, 'from core.models import Post, Category, Tag\n'), ((4294, 4340), 'django.contrib.messages.success', 'messages.success', (['request', '"""Category deleted."""'], {}), "(request, 'Category deleted.')\n", (4310, 4340), False, 'from django.contrib import messages\n'), ((4515, 4532), 'core.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (4530, 4532), False, 'from core.models import Post, Category, Tag\n'), ((4847, 4892), 'django.shortcuts.render', 'render', (['request', '"""backend/tags.html"""', 'context'], {}), "(request, 'backend/tags.html', context)\n", (4853, 4892), False, 'from django.shortcuts import render\n'), ((4998, 5007), 'backend.forms.TagForm', 'TagForm', ([], {}), '()\n', (5005, 5007), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((5310, 5359), 'django.shortcuts.render', 'render', (['request', '"""backend/edit_tag.html"""', 'context'], {}), "(request, 'backend/edit_tag.html', context)\n", (5316, 5359), False, 'from django.shortcuts import render\n'), ((5473, 5499), 'core.models.Tag.objects.get', 'Tag.objects.get', ([], {'pk': 'tag_id'}), '(pk=tag_id)\n', (5488, 5499), False, 'from core.models import Post, Category, Tag\n'), ((5537, 5558), 'backend.forms.TagForm', 'TagForm', ([], {'instance': 'tag'}), '(instance=tag)\n', (5544, 5558), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((5875, 5924), 'django.shortcuts.render', 'render', (['request', '"""backend/edit_tag.html"""', 'context'], {}), "(request, 'backend/edit_tag.html', context)\n", (5881, 5924), False, 'from django.shortcuts import render\n'), ((6040, 6066), 'core.models.Tag.objects.get', 'Tag.objects.get', ([], {'pk': 'tag_id'}), '(pk=tag_id)\n', (6055, 6066), False, 'from core.models import Post, Category, Tag\n'), ((6089, 6130), 'django.contrib.messages.success', 'messages.success', (['request', '"""Tag deleted."""'], {}), "(request, 'Tag deleted.')\n", (6105, 6130), False, 'from django.contrib import messages\n'), ((1235, 1272), 'backend.forms.PostForm', 'PostForm', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (1243, 1272), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((1800, 1852), 'backend.forms.PostForm', 'PostForm', (['request.POST', 'request.FILES'], {'instance': 'post'}), '(request.POST, request.FILES, instance=post)\n', (1808, 1852), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((2369, 2396), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_posts"""'], {}), "('user_panel_posts')\n", (2376, 2396), False, 'from django.core.urlresolvers import reverse\n'), ((3128, 3169), 'backend.forms.CategoryForm', 'CategoryForm', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (3140, 3169), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((3751, 3811), 'backend.forms.CategoryForm', 'CategoryForm', (['request.POST', 'request.FILES'], {'instance': 'category'}), '(request.POST, request.FILES, instance=category)\n', (3763, 3811), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((4374, 4406), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_categories"""'], {}), "('user_panel_categories')\n", (4381, 4406), False, 'from django.core.urlresolvers import reverse\n'), ((5057, 5093), 'backend.forms.TagForm', 'TagForm', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (5064, 5093), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((5608, 5658), 'backend.forms.TagForm', 'TagForm', (['request.POST', 'request.FILES'], {'instance': 'tag'}), '(request.POST, request.FILES, instance=tag)\n', (5615, 5658), False, 'from backend.forms import PostForm, CategoryForm, TagForm\n'), ((6164, 6190), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_tags"""'], {}), "('user_panel_tags')\n", (6171, 6190), False, 'from django.core.urlresolvers import reverse\n'), ((1338, 1380), 'django.contrib.messages.success', 'messages.success', (['request', '"""Post created."""'], {}), "(request, 'Post created.')\n", (1354, 1380), False, 'from django.contrib import messages\n'), ((1918, 1960), 'django.contrib.messages.success', 'messages.success', (['request', '"""Post updated."""'], {}), "(request, 'Post updated.')\n", (1934, 1960), False, 'from django.contrib import messages\n'), ((3235, 3281), 'django.contrib.messages.success', 'messages.success', (['request', '"""Category created."""'], {}), "(request, 'Category created.')\n", (3251, 3281), False, 'from django.contrib import messages\n'), ((3877, 3923), 'django.contrib.messages.success', 'messages.success', (['request', '"""Category updated."""'], {}), "(request, 'Category updated.')\n", (3893, 3923), False, 'from django.contrib import messages\n'), ((5159, 5200), 'django.contrib.messages.success', 'messages.success', (['request', '"""Tag created."""'], {}), "(request, 'Tag created.')\n", (5175, 5200), False, 'from django.contrib import messages\n'), ((5724, 5765), 'django.contrib.messages.success', 'messages.success', (['request', '"""Tag updated."""'], {}), "(request, 'Tag updated.')\n", (5740, 5765), False, 'from django.contrib import messages\n'), ((1422, 1449), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_posts"""'], {}), "('user_panel_posts')\n", (1429, 1449), False, 'from django.core.urlresolvers import reverse\n'), ((2002, 2029), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_posts"""'], {}), "('user_panel_posts')\n", (2009, 2029), False, 'from django.core.urlresolvers import reverse\n'), ((3323, 3355), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_categories"""'], {}), "('user_panel_categories')\n", (3330, 3355), False, 'from django.core.urlresolvers import reverse\n'), ((3965, 3997), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_categories"""'], {}), "('user_panel_categories')\n", (3972, 3997), False, 'from django.core.urlresolvers import reverse\n'), ((5242, 5268), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_tags"""'], {}), "('user_panel_tags')\n", (5249, 5268), False, 'from django.core.urlresolvers import reverse\n'), ((5807, 5833), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_panel_tags"""'], {}), "('user_panel_tags')\n", (5814, 5833), False, 'from django.core.urlresolvers import reverse\n')] |
FynnBe/tiktorch | tiktorch/server/session/process.py | 60c6fa9700e7ff73e44338e8755c56c6e8846f2f | import dataclasses
import io
import multiprocessing as _mp
import uuid
import zipfile
from concurrent.futures import Future
from multiprocessing.connection import Connection
from typing import List, Optional, Tuple
import numpy
from tiktorch import log
from tiktorch.rpc import Shutdown
from tiktorch.rpc import mp as _mp_rpc
from tiktorch.rpc.mp import MPServer
from tiktorch.server.reader import eval_model_zip
from .backend import base
from .rpc_interface import IRPCModelSession
@dataclasses.dataclass
class ModelInfo:
# TODO: Test for model info
name: str
input_axes: str
output_axes: str
valid_shapes: List[List[Tuple[str, int]]]
halo: List[Tuple[str, int]]
offset: List[Tuple[str, int]]
scale: List[Tuple[str, float]]
class ModelSessionProcess(IRPCModelSession):
def __init__(self, model_zip: bytes, devices: List[str]) -> None:
with zipfile.ZipFile(io.BytesIO(model_zip)) as model_file:
self._model = eval_model_zip(model_file, devices)
self._datasets = {}
self._worker = base.SessionBackend(self._model)
def forward(self, input_tensor: numpy.ndarray) -> Future:
res = self._worker.forward(input_tensor)
return res
def create_dataset(self, mean, stddev):
id_ = uuid.uuid4().hex
self._datasets[id_] = {"mean": mean, "stddev": stddev}
return id_
def get_model_info(self) -> ModelInfo:
return ModelInfo(
self._model.name,
self._model.input_axes,
self._model.output_axes,
valid_shapes=[self._model.input_shape],
halo=self._model.halo,
scale=self._model.scale,
offset=self._model.offset,
)
def shutdown(self) -> Shutdown:
self._worker.shutdown()
return Shutdown()
def _run_model_session_process(
conn: Connection, model_zip: bytes, devices: List[str], log_queue: Optional[_mp.Queue] = None
):
try:
# from: https://github.com/pytorch/pytorch/issues/973#issuecomment-346405667
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (4096, rlimit[1]))
except ModuleNotFoundError:
pass # probably running on windows
if log_queue:
log.configure(log_queue)
session_proc = ModelSessionProcess(model_zip, devices)
srv = MPServer(session_proc, conn)
srv.listen()
def start_model_session_process(
model_zip: bytes, devices: List[str], log_queue: Optional[_mp.Queue] = None
) -> Tuple[_mp.Process, IRPCModelSession]:
client_conn, server_conn = _mp.Pipe()
proc = _mp.Process(
target=_run_model_session_process,
name="ModelSessionProcess",
kwargs={"conn": server_conn, "devices": devices, "log_queue": log_queue, "model_zip": model_zip},
)
proc.start()
return proc, _mp_rpc.create_client(IRPCModelSession, client_conn)
| [((2405, 2433), 'tiktorch.rpc.mp.MPServer', 'MPServer', (['session_proc', 'conn'], {}), '(session_proc, conn)\n', (2413, 2433), False, 'from tiktorch.rpc.mp import MPServer\n'), ((2640, 2650), 'multiprocessing.Pipe', '_mp.Pipe', ([], {}), '()\n', (2648, 2650), True, 'import multiprocessing as _mp\n'), ((2662, 2842), 'multiprocessing.Process', '_mp.Process', ([], {'target': '_run_model_session_process', 'name': '"""ModelSessionProcess"""', 'kwargs': "{'conn': server_conn, 'devices': devices, 'log_queue': log_queue,\n 'model_zip': model_zip}"}), "(target=_run_model_session_process, name='ModelSessionProcess',\n kwargs={'conn': server_conn, 'devices': devices, 'log_queue': log_queue,\n 'model_zip': model_zip})\n", (2673, 2842), True, 'import multiprocessing as _mp\n'), ((1812, 1822), 'tiktorch.rpc.Shutdown', 'Shutdown', ([], {}), '()\n', (1820, 1822), False, 'from tiktorch.rpc import Shutdown\n'), ((2094, 2136), 'resource.getrlimit', 'resource.getrlimit', (['resource.RLIMIT_NOFILE'], {}), '(resource.RLIMIT_NOFILE)\n', (2112, 2136), False, 'import resource\n'), ((2145, 2206), 'resource.setrlimit', 'resource.setrlimit', (['resource.RLIMIT_NOFILE', '(4096, rlimit[1])'], {}), '(resource.RLIMIT_NOFILE, (4096, rlimit[1]))\n', (2163, 2206), False, 'import resource\n'), ((2310, 2334), 'tiktorch.log.configure', 'log.configure', (['log_queue'], {}), '(log_queue)\n', (2323, 2334), False, 'from tiktorch import log\n'), ((2900, 2952), 'tiktorch.rpc.mp.create_client', '_mp_rpc.create_client', (['IRPCModelSession', 'client_conn'], {}), '(IRPCModelSession, client_conn)\n', (2921, 2952), True, 'from tiktorch.rpc import mp as _mp_rpc\n'), ((972, 1007), 'tiktorch.server.reader.eval_model_zip', 'eval_model_zip', (['model_file', 'devices'], {}), '(model_file, devices)\n', (986, 1007), False, 'from tiktorch.server.reader import eval_model_zip\n'), ((1283, 1295), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1293, 1295), False, 'import uuid\n'), ((908, 929), 'io.BytesIO', 'io.BytesIO', (['model_zip'], {}), '(model_zip)\n', (918, 929), False, 'import io\n')] |
dangerstudios/OpenPype | openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py | 10ddcc4699137888616eec57cd7fac9648189714 | from openpype.modules.ftrack.lib import BaseEvent
from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
from openpype.modules.ftrack.event_handlers_server.event_sync_to_avalon import (
SyncToAvalonEvent
)
class DelAvalonIdFromNew(BaseEvent):
'''
This event removes AvalonId from custom attributes of new entities
Result:
- 'Copy->Pasted' entities won't have same AvalonID as source entity
Priority of this event must be less than SyncToAvalon event
'''
priority = SyncToAvalonEvent.priority - 1
ignore_me = True
def launch(self, session, event):
created = []
entities = event['data']['entities']
for entity in entities:
try:
entity_id = entity['entityId']
if entity.get('action', None) == 'add':
id_dict = entity['changes']['id']
if id_dict['new'] is not None and id_dict['old'] is None:
created.append(id_dict['new'])
elif (
entity.get('action', None) == 'update' and
CUST_ATTR_ID_KEY in entity['keys'] and
entity_id in created
):
ftrack_entity = session.get(
self._get_entity_type(entity),
entity_id
)
cust_attrs = ftrack_entity["custom_attributes"]
if cust_attrs[CUST_ATTR_ID_KEY]:
cust_attrs[CUST_ATTR_ID_KEY] = ""
session.commit()
except Exception:
session.rollback()
continue
def register(session):
'''Register plugin. Called when used as an plugin.'''
DelAvalonIdFromNew(session).register()
| [] |
elifesciences/elife-bot | tests/workflow/test_workflow_ingest_accepted_submission.py | d3a102c8030e4b7ec83cbd45e5f839dba4f9ffd9 | import unittest
import tests.settings_mock as settings_mock
from tests.activity.classes_mock import FakeLogger
from workflow.workflow_IngestAcceptedSubmission import workflow_IngestAcceptedSubmission
class TestWorkflowIngestAcceptedSubmission(unittest.TestCase):
def setUp(self):
self.workflow = workflow_IngestAcceptedSubmission(
settings_mock, FakeLogger(), None, None, None, None
)
def test_init(self):
self.assertEqual(self.workflow.name, "IngestAcceptedSubmission")
| [((372, 384), 'tests.activity.classes_mock.FakeLogger', 'FakeLogger', ([], {}), '()\n', (382, 384), False, 'from tests.activity.classes_mock import FakeLogger\n')] |
lynnUg/vumi-go | go/token/views.py | 852f906c46d5d26940bd6699f11488b73bbc3742 | from urllib import urlencode
import urlparse
from django.shortcuts import Http404, redirect
from django.contrib.auth.views import logout
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from vumi.utils import load_class_by_string
from go.base.utils import vumi_api
def token(request, token):
# We only need the redis manager here, but it's saner to get a whole
# vumi_api and not worry about all the setup magic.
api = vumi_api()
token_data = api.token_manager.get(token)
if not token_data:
raise Http404
user_id = int(token_data['user_id'])
redirect_to = token_data['redirect_to']
system_token = token_data['system_token']
# If we're authorized and we're the same user_id then redirect to
# where we need to be
if not user_id or request.user.id == user_id:
path, _, qs = redirect_to.partition('?')
params = urlparse.parse_qs(qs)
# since the token can be custom we prepend the size of the user_token
# to the token being forwarded so the view handling the `redirect_to`
# can lookup the token and verify the system token.
params.update({'token': '%s-%s%s' % (len(token), token, system_token)})
return redirect('%s?%s' % (path, urlencode(params)))
# If we got here then we need authentication and the user's either not
# logged in or is logged in with a wrong account.
if request.user.is_authenticated():
logout(request)
messages.info(request, 'Wrong account for this token.')
return redirect('%s?%s' % (reverse('auth_login'), urlencode({
'next': reverse('token', kwargs={'token': token}),
})))
@login_required
def token_task(request):
api = request.user_api.api
token = request.GET.get('token')
token_data = api.token_manager.verify_get(token)
if not token_data:
raise Http404
params = token_data['extra_params']
callback_name = params['callback_name']
callback_args = params['callback_args']
callback_kwargs = params['callback_kwargs']
return_to = params['return_to']
message = params['message']
message_level = params['message_level']
callback = load_class_by_string(callback_name)
callback(*callback_args, **callback_kwargs)
messages.add_message(request, message_level, message)
return redirect(return_to)
| [((526, 536), 'go.base.utils.vumi_api', 'vumi_api', ([], {}), '()\n', (534, 536), False, 'from go.base.utils import vumi_api\n'), ((2263, 2298), 'vumi.utils.load_class_by_string', 'load_class_by_string', (['callback_name'], {}), '(callback_name)\n', (2283, 2298), False, 'from vumi.utils import load_class_by_string\n'), ((2351, 2404), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'message_level', 'message'], {}), '(request, message_level, message)\n', (2371, 2404), False, 'from django.contrib import messages\n'), ((2416, 2435), 'django.shortcuts.redirect', 'redirect', (['return_to'], {}), '(return_to)\n', (2424, 2435), False, 'from django.shortcuts import Http404, redirect\n'), ((973, 994), 'urlparse.parse_qs', 'urlparse.parse_qs', (['qs'], {}), '(qs)\n', (990, 994), False, 'import urlparse\n'), ((1530, 1545), 'django.contrib.auth.views.logout', 'logout', (['request'], {}), '(request)\n', (1536, 1545), False, 'from django.contrib.auth.views import logout\n'), ((1554, 1609), 'django.contrib.messages.info', 'messages.info', (['request', '"""Wrong account for this token."""'], {}), "(request, 'Wrong account for this token.')\n", (1567, 1609), False, 'from django.contrib import messages\n'), ((1641, 1662), 'django.core.urlresolvers.reverse', 'reverse', (['"""auth_login"""'], {}), "('auth_login')\n", (1648, 1662), False, 'from django.core.urlresolvers import reverse\n'), ((1332, 1349), 'urllib.urlencode', 'urlencode', (['params'], {}), '(params)\n', (1341, 1349), False, 'from urllib import urlencode\n'), ((1692, 1733), 'django.core.urlresolvers.reverse', 'reverse', (['"""token"""'], {'kwargs': "{'token': token}"}), "('token', kwargs={'token': token})\n", (1699, 1733), False, 'from django.core.urlresolvers import reverse\n')] |
tylerbutler/typogrify | typogrify/templatetags/typogrify_tags.py | 7b7a67348a2d51400fd38c0b61e30e34ca98994e | from typogrify.filters import amp, caps, initial_quotes, smartypants, titlecase, typogrify, widont, TypogrifyError
from functools import wraps
from django.conf import settings
from django import template
from django.utils.safestring import mark_safe
from django.utils.encoding import force_unicode
register = template.Library()
def make_safe(f):
"""
A function wrapper to make typogrify play nice with django's
unicode support.
"""
@wraps(f)
def wrapper(text):
text = force_unicode(text)
f.is_safe = True
out = text
try:
out = f(text)
except TypogrifyError, e:
if settings.DEBUG:
raise e
return text
return mark_safe(out)
wrapper.is_safe = True
return wrapper
register.filter('amp', make_safe(amp))
register.filter('caps', make_safe(caps))
register.filter('initial_quotes', make_safe(initial_quotes))
register.filter('smartypants', make_safe(smartypants))
register.filter('titlecase', make_safe(titlecase))
register.filter('typogrify', make_safe(typogrify))
register.filter('widont', make_safe(widont))
| [] |
carbrock/bvbabel | bvbabel/vmr.py | baac12d106455e34d9924309eadb4df991d3d8c9 | """Read, write, create Brainvoyager VMR file format."""
import struct
import numpy as np
from bvbabel.utils import (read_variable_length_string,
write_variable_length_string)
# =============================================================================
def read_vmr(filename):
"""Read Brainvoyager VMR file.
Parameters
----------
filename : string
Path to file.
Returns
-------
header : dictionary
Pre-data and post-data headers.
data : 3D numpy.array
Image data.
"""
header = dict()
with open(filename, 'rb') as f:
# ---------------------------------------------------------------------
# VMR Pre-Data Header
# ---------------------------------------------------------------------
# NOTE(Developer Guide 2.6): VMR files contain anatomical 3D data sets,
# typically containing the whole brain (head) of subjects. The
# intensity values are stored as a series of bytes. See the V16 format
# for a version storing each intensity value with two bytes (short
# integers). The VMR format contains a small header followed by the
# actual data followed by a second, more extensive, header. The current
# version of VMR files is "4", which is only slightly different from
# version 3 (as indicated below). Version 3 added offset values to
# format 2 in order to represent large data sets efficiently, e.g. in
# the context of advanced segmentation processing. Compared to the
# original file version "1", file versions 2 and higher contain
# additional header information after the actual data ("post-data
# header"). This allows to read VMR data sets with minimal header
# checking if the extended information is not needed. The information
# in the post-data header contains position information (if available)
# and stores a series of spatial transformations, which might have been
# performed to the original data set ("history record"). The
# post-header data can be probably ignored for custom routines, but is
# important in BrainVoyager QX for spatial transformation and
# coregistration routines as well as for proper visualization.
# Expected binary data: unsigned short int (2 bytes)
data, = struct.unpack('<H', f.read(2))
header["File version"] = data
data, = struct.unpack('<H', f.read(2))
header["DimX"] = data
data, = struct.unpack('<H', f.read(2))
header["DimY"] = data
data, = struct.unpack('<H', f.read(2))
header["DimZ"] = data
# ---------------------------------------------------------------------
# VMR Data
# ---------------------------------------------------------------------
# NOTE(Developer Guide 2.6): Each data element (intensity value) is
# represented in 1 byte. The data is organized in three loops:
# DimZ
# DimY
# DimX
#
# The axes terminology follows the internal BrainVoyager (BV) format.
# The mapping to Talairach axes is as follows:
# BV (X front -> back) [axis 2 after np.reshape] = Y in Tal space
# BV (Y top -> bottom) [axis 1 after np.reshape] = Z in Tal space
# BV (Z left -> right) [axis 0 after np.reshape] = X in Tal space
# Expected binary data: unsigned char (1 byte)
data_img = np.zeros((header["DimZ"] * header["DimY"] * header["DimX"]),
dtype="<B")
for i in range(data_img.size):
data_img[i], = struct.unpack('<B', f.read(1))
data_img = np.reshape(
data_img, (header["DimZ"], header["DimY"], header["DimX"]))
data_img = np.transpose(data_img, (0, 2, 1)) # BV to Tal
data_img = data_img[::-1, ::-1, ::-1] # Flip BV axes
# ---------------------------------------------------------------------
# VMR Post-Data Header
# ---------------------------------------------------------------------
# NOTE(Developer Guide 2.6): The first four entries of the post-data
# header are new since file version "3" and contain offset values for
# each dimension as well as a value indicating the size of a cube with
# iso-dimensions to which the data set will be internally "expanded"
# for certain operations. The axes labels are in terms of
# BrainVoyager's internal format. These four entries are followed by
# scan position information from the original file headers, e.g. from
# DICOM files. The coordinate axes labels in these entries are not in
# terms of BrainVoyager's internal conventions but follow the DICOM
# standard. Then follows eventually a section listing spatial
# transformations which have been eventually performed to create the
# current VMR (e.g. ACPC transformation). Finally, additional
# information further descries the data set, including the assumed
# left-right convention, the reference space (e.g. Talairach after
# normalization) and voxel resolution.
if header["File version"] >= 3:
# NOTE(Developer Guide 2.6): These four entries have been added in
# file version "3" with BrainVoyager QX 1.7. All other entries are
# identical to file version "2".
# Expected binary data: short int (2 bytes)
data, = struct.unpack('<h', f.read(2))
header["OffsetX"] = data
data, = struct.unpack('<h', f.read(2))
header["OffsetY"] = data
data, = struct.unpack('<h', f.read(2))
header["OffsetZ"] = data
data, = struct.unpack('<h', f.read(2))
header["FramingCubeDim"] = data
# Expected binary data: int (4 bytes)
data, = struct.unpack('<i', f.read(4))
header["PosInfosVerified"] = data
data, = struct.unpack('<i', f.read(4))
header["CoordinateSystem"] = data
# Expected binary data: float (4 bytes)
data, = struct.unpack('<f', f.read(4))
header["Slice1CenterX"] = data # First slice center X coordinate
data, = struct.unpack('<f', f.read(4))
header["Slice1CenterY"] = data # First slice center Y coordinate
data, = struct.unpack('<f', f.read(4))
header["Slice1CenterZ"] = data # First slice center Z coordinate
data, = struct.unpack('<f', f.read(4))
header["SliceNCenterX"] = data # Last slice center X coordinate
data, = struct.unpack('<f', f.read(4))
header["SliceNCenterY"] = data # Last slice center Y coordinate
data, = struct.unpack('<f', f.read(4))
header["SliceNCenterZ"] = data # Last slice center Z coordinate
data, = struct.unpack('<f', f.read(4))
header["RowDirX"] = data # Slice row direction vector X component
data, = struct.unpack('<f', f.read(4))
header["RowDirY"] = data # Slice row direction vector Y component
data, = struct.unpack('<f', f.read(4))
header["RowDirZ"] = data # Slice row direction vector Z component
data, = struct.unpack('<f', f.read(4))
header["ColDirX"] = data # Slice column direction vector X component
data, = struct.unpack('<f', f.read(4))
header["ColDirY"] = data # Slice column direction vector Y component
data, = struct.unpack('<f', f.read(4))
header["ColDirZ"] = data # Slice column direction vector Z component
# Expected binary data: int (4 bytes)
data, = struct.unpack('<i', f.read(4))
header["NRows"] = data # Nr of rows of slice image matrix
data, = struct.unpack('<i', f.read(4))
header["NCols"] = data # Nr of columns of slice image matrix
# Expected binary data: float (4 bytes)
data, = struct.unpack('<f', f.read(4))
header["FoVRows"] = data # Field of view extent in row direction [mm]
data, = struct.unpack('<f', f.read(4))
header["FoVCols"] = data # Field of view extent in column dir. [mm]
data, = struct.unpack('<f', f.read(4))
header["SliceThickness"] = data # Slice thickness [mm]
data, = struct.unpack('<f', f.read(4))
header["GapThickness"] = data # Gap thickness [mm]
# Expected binary data: int (4 bytes)
data, = struct.unpack('<i', f.read(4))
header["NrOfPastSpatialTransformations"] = data
if header["NrOfPastSpatialTransformations"] != 0:
# NOTE(Developer Guide 2.6): For each past transformation, the
# information specified in the following table is stored. The
# "type of transformation" is a value determining how many
# subsequent values define the transformation:
# "1": Rigid body+scale (3 translation, 3 rotation, 3 scale)
# "2": Affine transformation (16 values, 4x4 matrix)
# "4": Talairach transformation
# "5": Un-Talairach transformation (1 - 5 -> BV axes)
header["PastTransformation"] = []
for i in range(header["NrOfPastSpatialTransformations"]):
header["PastTransformation"].append(dict())
# Expected binary data: variable-length string
data = read_variable_length_string(f)
header["PastTransformation"][i]["Name"] = data
# Expected binary data: int (4 bytes)
data, = struct.unpack('<i', f.read(4))
header["PastTransformation"][i]["Type"] = data
# Expected binary data: variable-length string
data = read_variable_length_string(f)
header["PastTransformation"][i]["SourceFileName"] = data
# Expected binary data: int (4 bytes)
data, = struct.unpack('<i', f.read(4))
header["PastTransformation"][i]["NrOfValues"] = data
# Store transformation values as a list
trans_values = []
for j in range(header["PastTransformation"][i]["NrOfValues"]):
# Expected binary data: float (4 bytes)
data, = struct.unpack('<f', f.read(4))
trans_values.append(data)
header["PastTransformation"][i]["Values"] = trans_values
# Expected binary data: char (1 byte)
data, = struct.unpack('<B', f.read(1))
header["LeftRightConvention"] = data # modified in v4
data, = struct.unpack('<B', f.read(1))
header["ReferenceSpaceVMR"] = data # new in v4
# Expected binary data: float (4 bytes)
data, = struct.unpack('<f', f.read(4))
header["VoxelSizeX"] = data # Voxel resolution along X axis
data, = struct.unpack('<f', f.read(4))
header["VoxelSizeY"] = data # Voxel resolution along Y axis
data, = struct.unpack('<f', f.read(4))
header["VoxelSizeZ"] = data # Voxel resolution along Z axis
# Expected binary data: char (1 byte)
data, = struct.unpack('<B', f.read(1))
header["VoxelResolutionVerified"] = data
data, = struct.unpack('<B', f.read(1))
header["VoxelResolutionInTALmm"] = data
# Expected binary data: int (4 bytes)
data, = struct.unpack('<i', f.read(4))
header["VMROrigV16MinValue"] = data # 16-bit data min intensity
data, = struct.unpack('<i', f.read(4))
header["VMROrigV16MeanValue"] = data # 16-bit data mean intensity
data, = struct.unpack('<i', f.read(4))
header["VMROrigV16MaxValue"] = data # 16-bit data max intensity
return header, data_img
# =============================================================================
def write_vmr(filename, header, data_img):
"""Protocol to write Brainvoyager VMR file.
Parameters
----------
filename : string
Output filename.
header : dictionary
Header of VMR file.
data_img : numpy.array, 3D
Image.
"""
with open(filename, 'wb') as f:
# ---------------------------------------------------------------------
# VMR Pre-Data Header
# ---------------------------------------------------------------------
# Expected binary data: unsigned short int (2 bytes)
data = header["File version"]
f.write(struct.pack('<H', data))
data = header["DimX"]
f.write(struct.pack('<H', data))
data = header["DimY"]
f.write(struct.pack('<H', data))
data = header["DimZ"]
f.write(struct.pack('<H', data))
# ---------------------------------------------------------------------
# VMR Data
# ---------------------------------------------------------------------
# Convert axes from Nifti standard back to BV standard
data_img = data_img[::-1, ::-1, ::-1] # Flip BV axes
data_img = np.transpose(data_img, (0, 2, 1)) # BV to Tal
# Expected binary data: unsigned char (1 byte)
data_img = data_img.flatten()
for i in range(data_img.size):
f.write(struct.pack('<B', data_img[i]))
# ---------------------------------------------------------------------
# VMR Post-Data Header
# ---------------------------------------------------------------------
if header["File version"] >= 3:
# Expected binary data: short int (2 bytes)
data = header["OffsetX"]
f.write(struct.pack('<h', data))
data = header["OffsetY"]
f.write(struct.pack('<h', data))
data = header["OffsetZ"]
f.write(struct.pack('<h', data))
data = header["FramingCubeDim"]
f.write(struct.pack('<h', data))
# Expected binary data: int (4 bytes)
data = header["PosInfosVerified"]
f.write(struct.pack('<i', data))
data = header["CoordinateSystem"]
f.write(struct.pack('<i', data))
# Expected binary data: float (4 bytes)
data = header["Slice1CenterX"]
f.write(struct.pack('<f', data))
data = header["Slice1CenterY"]
f.write(struct.pack('<f', data))
data = header["Slice1CenterZ"]
f.write(struct.pack('<f', data))
data = header["SliceNCenterX"]
f.write(struct.pack('<f', data))
data = header["SliceNCenterY"]
f.write(struct.pack('<f', data))
data = header["SliceNCenterZ"]
f.write(struct.pack('<f', data))
data = header["RowDirX"]
f.write(struct.pack('<f', data))
data = header["RowDirY"]
f.write(struct.pack('<f', data))
data = header["RowDirZ"]
f.write(struct.pack('<f', data))
data = header["ColDirX"]
f.write(struct.pack('<f', data))
data = header["ColDirY"]
f.write(struct.pack('<f', data))
data = header["ColDirZ"]
f.write(struct.pack('<f', data))
# Expected binary data: int (4 bytes)
data = header["NRows"]
f.write(struct.pack('<i', data))
data = header["NCols"]
f.write(struct.pack('<i', data))
# Expected binary data: float (4 bytes)
data = header["FoVRows"]
f.write(struct.pack('<f', data))
data = header["FoVCols"]
f.write(struct.pack('<f', data))
data = header["SliceThickness"]
f.write(struct.pack('<f', data))
data = header["GapThickness"]
f.write(struct.pack('<f', data))
# Expected binary data: int (4 bytes)
data = header["NrOfPastSpatialTransformations"]
f.write(struct.pack('<i', data))
if header["NrOfPastSpatialTransformations"] != 0:
for i in range(header["NrOfPastSpatialTransformations"]):
# Expected binary data: variable-length string
data = header["PastTransformation"][i]["Name"]
write_variable_length_string(f, data)
# Expected binary data: int (4 bytes)
data = header["PastTransformation"][i]["Type"]
f.write(struct.pack('<i', data))
# Expected binary data: variable-length string
data = header["PastTransformation"][i]["SourceFileName"]
write_variable_length_string(f, data)
# Expected binary data: int (4 bytes)
data = header["PastTransformation"][i]["NrOfValues"]
f.write(struct.pack('<i', data))
# Transformation values are stored as a list
trans_values = header["PastTransformation"][i]["Values"]
for j in range(header["PastTransformation"][i]["NrOfValues"]):
# Expected binary data: float (4 bytes)
f.write(struct.pack('<f', trans_values[j]))
# Expected binary data: char (1 byte)
data = header["LeftRightConvention"]
f.write(struct.pack('<B', data))
data = header["ReferenceSpaceVMR"]
f.write(struct.pack('<B', data))
# Expected binary data: float (4 bytes)
data = header["VoxelSizeX"]
f.write(struct.pack('<f', data))
data = header["VoxelSizeY"]
f.write(struct.pack('<f', data))
data = header["VoxelSizeZ"]
f.write(struct.pack('<f', data))
# Expected binary data: char (1 byte)
data = header["VoxelResolutionVerified"]
f.write(struct.pack('<B', data))
data = header["VoxelResolutionInTALmm"]
f.write(struct.pack('<B', data))
# Expected binary data: int (4 bytes)
data = header["VMROrigV16MinValue"]
f.write(struct.pack('<i', data))
data = header["VMROrigV16MeanValue"]
f.write(struct.pack('<i', data))
data = header["VMROrigV16MaxValue"]
f.write(struct.pack('<i', data))
return print("VMR saved.")
| [((3535, 3605), 'numpy.zeros', 'np.zeros', (["(header['DimZ'] * header['DimY'] * header['DimX'])"], {'dtype': '"""<B"""'}), "(header['DimZ'] * header['DimY'] * header['DimX'], dtype='<B')\n", (3543, 3605), True, 'import numpy as np\n'), ((3752, 3822), 'numpy.reshape', 'np.reshape', (['data_img', "(header['DimZ'], header['DimY'], header['DimX'])"], {}), "(data_img, (header['DimZ'], header['DimY'], header['DimX']))\n", (3762, 3822), True, 'import numpy as np\n'), ((3856, 3889), 'numpy.transpose', 'np.transpose', (['data_img', '(0, 2, 1)'], {}), '(data_img, (0, 2, 1))\n', (3868, 3889), True, 'import numpy as np\n'), ((13096, 13129), 'numpy.transpose', 'np.transpose', (['data_img', '(0, 2, 1)'], {}), '(data_img, (0, 2, 1))\n', (13108, 13129), True, 'import numpy as np\n'), ((12534, 12557), 'struct.pack', 'struct.pack', (['"""<H"""', 'data'], {}), "('<H', data)\n", (12545, 12557), False, 'import struct\n'), ((12605, 12628), 'struct.pack', 'struct.pack', (['"""<H"""', 'data'], {}), "('<H', data)\n", (12616, 12628), False, 'import struct\n'), ((12676, 12699), 'struct.pack', 'struct.pack', (['"""<H"""', 'data'], {}), "('<H', data)\n", (12687, 12699), False, 'import struct\n'), ((12747, 12770), 'struct.pack', 'struct.pack', (['"""<H"""', 'data'], {}), "('<H', data)\n", (12758, 12770), False, 'import struct\n'), ((14056, 14079), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (14067, 14079), False, 'import struct\n'), ((14139, 14162), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (14150, 14162), False, 'import struct\n'), ((14268, 14291), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14279, 14291), False, 'import struct\n'), ((14348, 14371), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14359, 14371), False, 'import struct\n'), ((14428, 14451), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14439, 14451), False, 'import struct\n'), ((14508, 14531), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14519, 14531), False, 'import struct\n'), ((14588, 14611), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14599, 14611), False, 'import struct\n'), ((14668, 14691), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14679, 14691), False, 'import struct\n'), ((14742, 14765), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14753, 14765), False, 'import struct\n'), ((14816, 14839), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14827, 14839), False, 'import struct\n'), ((14890, 14913), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14901, 14913), False, 'import struct\n'), ((14964, 14987), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (14975, 14987), False, 'import struct\n'), ((15038, 15061), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (15049, 15061), False, 'import struct\n'), ((15112, 15135), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (15123, 15135), False, 'import struct\n'), ((15231, 15254), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (15242, 15254), False, 'import struct\n'), ((15303, 15326), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (15314, 15326), False, 'import struct\n'), ((15426, 15449), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (15437, 15449), False, 'import struct\n'), ((15500, 15523), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (15511, 15523), False, 'import struct\n'), ((15581, 15604), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (15592, 15604), False, 'import struct\n'), ((15660, 15683), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (15671, 15683), False, 'import struct\n'), ((15804, 15827), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (15815, 15827), False, 'import struct\n'), ((17115, 17138), 'struct.pack', 'struct.pack', (['"""<B"""', 'data'], {}), "('<B', data)\n", (17126, 17138), False, 'import struct\n'), ((17199, 17222), 'struct.pack', 'struct.pack', (['"""<B"""', 'data'], {}), "('<B', data)\n", (17210, 17222), False, 'import struct\n'), ((17325, 17348), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (17336, 17348), False, 'import struct\n'), ((17402, 17425), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (17413, 17425), False, 'import struct\n'), ((17479, 17502), 'struct.pack', 'struct.pack', (['"""<f"""', 'data'], {}), "('<f', data)\n", (17490, 17502), False, 'import struct\n'), ((17616, 17639), 'struct.pack', 'struct.pack', (['"""<B"""', 'data'], {}), "('<B', data)\n", (17627, 17639), False, 'import struct\n'), ((17705, 17728), 'struct.pack', 'struct.pack', (['"""<B"""', 'data'], {}), "('<B', data)\n", (17716, 17728), False, 'import struct\n'), ((17837, 17860), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (17848, 17860), False, 'import struct\n'), ((17923, 17946), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (17934, 17946), False, 'import struct\n'), ((18008, 18031), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (18019, 18031), False, 'import struct\n'), ((9457, 9487), 'bvbabel.utils.read_variable_length_string', 'read_variable_length_string', (['f'], {}), '(f)\n', (9484, 9487), False, 'from bvbabel.utils import read_variable_length_string, write_variable_length_string\n'), ((9811, 9841), 'bvbabel.utils.read_variable_length_string', 'read_variable_length_string', (['f'], {}), '(f)\n', (9838, 9841), False, 'from bvbabel.utils import read_variable_length_string, write_variable_length_string\n'), ((13296, 13326), 'struct.pack', 'struct.pack', (['"""<B"""', 'data_img[i]'], {}), "('<B', data_img[i])\n", (13307, 13326), False, 'import struct\n'), ((13673, 13696), 'struct.pack', 'struct.pack', (['"""<h"""', 'data'], {}), "('<h', data)\n", (13684, 13696), False, 'import struct\n'), ((13755, 13778), 'struct.pack', 'struct.pack', (['"""<h"""', 'data'], {}), "('<h', data)\n", (13766, 13778), False, 'import struct\n'), ((13837, 13860), 'struct.pack', 'struct.pack', (['"""<h"""', 'data'], {}), "('<h', data)\n", (13848, 13860), False, 'import struct\n'), ((13926, 13949), 'struct.pack', 'struct.pack', (['"""<h"""', 'data'], {}), "('<h', data)\n", (13937, 13949), False, 'import struct\n'), ((16100, 16137), 'bvbabel.utils.write_variable_length_string', 'write_variable_length_string', (['f', 'data'], {}), '(f, data)\n', (16128, 16137), False, 'from bvbabel.utils import read_variable_length_string, write_variable_length_string\n'), ((16458, 16495), 'bvbabel.utils.write_variable_length_string', 'write_variable_length_string', (['f', 'data'], {}), '(f, data)\n', (16486, 16495), False, 'from bvbabel.utils import read_variable_length_string, write_variable_length_string\n'), ((16280, 16303), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (16291, 16303), False, 'import struct\n'), ((16644, 16667), 'struct.pack', 'struct.pack', (['"""<i"""', 'data'], {}), "('<i', data)\n", (16655, 16667), False, 'import struct\n'), ((16971, 17005), 'struct.pack', 'struct.pack', (['"""<f"""', 'trans_values[j]'], {}), "('<f', trans_values[j])\n", (16982, 17005), False, 'import struct\n')] |
Vikas-kum/incubator-mxnet | example/image-classification/test_score.py | ba02bf2fe2da423caa59ddb3fd5e433b90b730bf | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from score import score
VAL_DATA='data/val-5k-256.rec'
def download_data():
return mx.test_utils.download(
'http://data.mxnet.io/data/val-5k-256.rec', VAL_DATA)
def test_imagenet1k_resnet(**kwargs):
models = ['imagenet1k-resnet-50', 'imagenet1k-resnet-152']
accs = [.77, .78]
for (m, g) in zip(models, accs):
acc = mx.metric.create('acc')
(speed,) = score(model=m, data_val=VAL_DATA,
rgb_mean='0,0,0', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s, acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
def test_imagenet1k_inception_bn(**kwargs):
acc = mx.metric.create('acc')
m = 'imagenet1k-inception-bn'
g = 0.75
(speed,) = score(model=m,
data_val=VAL_DATA,
rgb_mean='123.68,116.779,103.939', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
if __name__ == '__main__':
gpus = mx.test_utils.list_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
kwargs = {'gpus':gpus, 'batch_size':batch_size, 'max_num_examples':500}
download_data()
test_imagenet1k_resnet(**kwargs)
test_imagenet1k_inception_bn(**kwargs)
| [((1002, 1078), 'mxnet.test_utils.download', 'mx.test_utils.download', (['"""http://data.mxnet.io/data/val-5k-256.rec"""', 'VAL_DATA'], {}), "('http://data.mxnet.io/data/val-5k-256.rec', VAL_DATA)\n", (1024, 1078), True, 'import mxnet as mx\n'), ((1595, 1618), 'mxnet.metric.create', 'mx.metric.create', (['"""acc"""'], {}), "('acc')\n", (1611, 1618), True, 'import mxnet as mx\n'), ((1681, 1776), 'score.score', 'score', ([], {'model': 'm', 'data_val': 'VAL_DATA', 'rgb_mean': '"""123.68,116.779,103.939"""', 'metrics': 'acc'}), "(model=m, data_val=VAL_DATA, rgb_mean='123.68,116.779,103.939',\n metrics=acc, **kwargs)\n", (1686, 1776), False, 'from score import score\n'), ((1975, 2000), 'mxnet.test_utils.list_gpus', 'mx.test_utils.list_gpus', ([], {}), '()\n', (1998, 2000), True, 'import mxnet as mx\n'), ((1263, 1286), 'mxnet.metric.create', 'mx.metric.create', (['"""acc"""'], {}), "('acc')\n", (1279, 1286), True, 'import mxnet as mx\n'), ((1306, 1380), 'score.score', 'score', ([], {'model': 'm', 'data_val': 'VAL_DATA', 'rgb_mean': '"""0,0,0"""', 'metrics': 'acc'}), "(model=m, data_val=VAL_DATA, rgb_mean='0,0,0', metrics=acc, **kwargs)\n", (1311, 1380), False, 'from score import score\n')] |
vertica/vertica_ml_python | verticapy/vcolumn.py | 9e82dba94afe8447bfa2492f343af6669128e2fb | # (c) Copyright [2018-2022] Micro Focus or one of its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# |_ |~) _ _| _ /~\ _ |.
# |_)\/ |_)(_|(_|| \_/|_|(_|||
# /
# ____________ ______
# / __ `\ / /
# | \/ / / /
# |______ / / /
# |____/ / /
# _____________ / /
# \ / / /
# \ / / /
# \_______/ / /
# ______ / /
# \ / / /
# \ / / /
# \/ / /
# / /
# / /
# \ /
# \ /
# \/
# _
# \ / _ __|_. _ _ |_)
# \/ (/_| | |(_(_|| \/
# /
# VerticaPy is a Python library with scikit-like functionality for conducting
# data science projects on data stored in Vertica, taking advantage Vertica’s
# speed and built-in analytics and machine learning features. It supports the
# entire data science life cycle, uses a ‘pipeline’ mechanism to sequentialize
# data transformation operations, and offers beautiful graphical options.
#
# VerticaPy aims to do all of the above. The idea is simple: instead of moving
# data around for processing, VerticaPy brings the logic to the data.
#
#
# Modules
#
# Standard Python Modules
import math, re, decimal, warnings, datetime
from collections.abc import Iterable
from typing import Union
# VerticaPy Modules
import verticapy
from verticapy.utilities import *
from verticapy.toolbox import *
from verticapy.errors import *
##
#
# __ __ ______ ______ __ __ __ __ __ __ __
# /\ \ / / /\ ___\ /\ __ \ /\ \ /\ \/\ \ /\ "-./ \ /\ "-.\ \
# \ \ \'/ \ \ \____ \ \ \/\ \ \ \ \____ \ \ \_\ \ \ \ \-./\ \ \ \ \-. \
# \ \__| \ \_____\ \ \_____\ \ \_____\ \ \_____\ \ \_\ \ \_\ \ \_\\"\_\
# \/_/ \/_____/ \/_____/ \/_____/ \/_____/ \/_/ \/_/ \/_/ \/_/
#
#
# ---#
class vColumn(str_sql):
"""
---------------------------------------------------------------------------
Python object which that stores all user transformations. If the vDataFrame
represents the entire relation, a vColumn can be seen as one column of that
relation. vColumns simplify several processes with its abstractions.
Parameters
----------
alias: str
vColumn alias.
transformations: list, optional
List of the different transformations. Each transformation must be similar
to the following: (function, type, category)
parent: vDataFrame, optional
Parent of the vColumn. One vDataFrame can have multiple children vColumns
whereas one vColumn can only have one parent.
catalog: dict, optional
Catalog where each key corresponds to an aggregation. vColumns will memorize
the already computed aggregations to gain in performance. The catalog will
be updated when the parent vDataFrame is modified.
Attributes
----------
alias, str : vColumn alias.
catalog, dict : Catalog of pre-computed aggregations.
parent, vDataFrame : Parent of the vColumn.
transformations, str : List of the different transformations.
"""
#
# Special Methods
#
# ---#
def __init__(
self, alias: str, transformations: list = [], parent=None, catalog: dict = {}
):
self.parent, self.alias, self.transformations = (
parent,
alias,
[elem for elem in transformations],
)
self.catalog = {
"cov": {},
"pearson": {},
"spearman": {},
"spearmand": {},
"kendall": {},
"cramer": {},
"biserial": {},
"regr_avgx": {},
"regr_avgy": {},
"regr_count": {},
"regr_intercept": {},
"regr_r2": {},
"regr_slope": {},
"regr_sxx": {},
"regr_sxy": {},
"regr_syy": {},
}
for elem in catalog:
self.catalog[elem] = catalog[elem]
# ---#
def __getitem__(self, index):
if isinstance(index, slice):
assert index.step in (1, None), ValueError(
"vColumn doesn't allow slicing having steps different than 1."
)
index_stop = index.stop
index_start = index.start
if not (isinstance(index_start, int)):
index_start = 0
if index_start < 0:
index_start += self.parent.shape()[0]
if isinstance(index_stop, int):
if index_stop < 0:
index_stop += self.parent.shape()[0]
limit = index_stop - index_start
if limit <= 0:
limit = 0
limit = " LIMIT {}".format(limit)
else:
limit = ""
query = "(SELECT {} FROM {}{} OFFSET {}{}) VERTICAPY_SUBTABLE".format(
self.alias,
self.parent.__genSQL__(),
self.parent.__get_last_order_by__(),
index_start,
limit,
)
return vDataFrameSQL(query)
elif isinstance(index, int):
cast = "::float" if self.category() == "float" else ""
if index < 0:
index += self.parent.shape()[0]
query = "SELECT {}{} FROM {}{} OFFSET {} LIMIT 1".format(
self.alias,
cast,
self.parent.__genSQL__(),
self.parent.__get_last_order_by__(),
index,
)
return executeSQL(
query=query,
title="Getting the vColumn element.",
method="fetchfirstelem",
)
else:
return getattr(self, index)
# ---#
def __len__(self):
return int(self.count())
# ---#
def __nonzero__(self):
return self.count() > 0
# ---#
def __repr__(self):
return self.head(limit=verticapy.options["max_rows"]).__repr__()
# ---#
def _repr_html_(self):
return self.head(limit=verticapy.options["max_rows"])._repr_html_()
# ---#
def __setattr__(self, attr, val):
self.__dict__[attr] = val
#
# Methods
#
# ---#
def aad(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'aad' (Average Absolute Deviation).
Returns
-------
float
aad
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["aad"]).values[self.alias][0]
# ---#
def abs(self):
"""
---------------------------------------------------------------------------
Applies the absolute value function to the input vColumn.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the input vColumn.
"""
return self.apply(func="ABS({})")
# ---#
def add(self, x: float):
"""
---------------------------------------------------------------------------
Adds the input element to the vColumn.
Parameters
----------
x: float
If the vColumn type is date like (date, datetime ...), the parameter 'x'
will represent the number of seconds, otherwise it will represent a number.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the input vColumn.
"""
check_types([("x", x, [int, float])])
if self.isdate():
return self.apply(func="TIMESTAMPADD(SECOND, {}, {})".format(x, "{}"))
else:
return self.apply(func="{} + ({})".format("{}", x))
# ---#
def add_copy(self, name: str):
"""
---------------------------------------------------------------------------
Adds a copy vColumn to the parent vDataFrame.
Parameters
----------
name: str
Name of the copy.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.eval : Evaluates a customized expression.
"""
check_types([("name", name, [str])])
name = quote_ident(name.replace('"', "_"))
assert name.replace('"', ""), EmptyParameter(
"The parameter 'name' must not be empty"
)
assert not (self.parent.is_colname_in(name)), NameError(
f"A vColumn has already the alias {name}.\nBy changing the parameter 'name', you'll be able to solve this issue."
)
new_vColumn = vColumn(
name,
parent=self.parent,
transformations=[item for item in self.transformations],
catalog=self.catalog,
)
setattr(self.parent, name, new_vColumn)
setattr(self.parent, name[1:-1], new_vColumn)
self.parent._VERTICAPY_VARIABLES_["columns"] += [name]
self.parent.__add_to_history__(
"[Add Copy]: A copy of the vColumn {} named {} was added to the vDataFrame.".format(
self.alias, name
)
)
return self.parent
# ---#
def aggregate(self, func: list):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using the input functions.
Parameters
----------
func: list
List of the different aggregation.
aad : average absolute deviation
approx_unique : approximative cardinality
count : number of non-missing elements
cvar : conditional value at risk
dtype : vColumn type
iqr : interquartile range
kurtosis : kurtosis
jb : Jarque-Bera index
mad : median absolute deviation
max : maximum
mean : average
median : median
min : minimum
mode : most occurent element
percent : percent of non-missing elements
q% : q quantile (ex: 50% for the median)
prod : product
range : difference between the max and the min
sem : standard error of the mean
skewness : skewness
sum : sum
std : standard deviation
topk : kth most occurent element (ex: top1 for the mode)
topk_percent : kth most occurent element density
unique : cardinality (count distinct)
var : variance
Other aggregations could work if it is part of
the DB version you are using.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame.analytic : Adds a new vColumn to the vDataFrame by using an advanced
analytical function on a specific vColumn.
"""
return self.parent.aggregate(func=func, columns=[self.alias]).transpose()
agg = aggregate
# ---#
def apply(self, func: str, copy_name: str = ""):
"""
---------------------------------------------------------------------------
Applies a function to the vColumn.
Parameters
----------
func: str,
Function in pure SQL used to transform the vColumn.
The function variable must be composed of two flower brackets {}. For
example to apply the function: x -> x^2 + 2 use "POWER({}, 2) + 2".
copy_name: str, optional
If not empty, a copy will be created using the input Name.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.apply : Applies functions to the input vColumns.
vDataFrame.applymap : Applies a function to all the vColumns.
vDataFrame.eval : Evaluates a customized expression.
"""
if isinstance(func, str_sql):
func = str(func)
check_types([("func", func, [str]), ("copy_name", copy_name, [str])])
try:
try:
ctype = get_data_types(
"SELECT {} AS apply_test_feature FROM {} WHERE {} IS NOT NULL LIMIT 0".format(
func.replace("{}", self.alias),
self.parent.__genSQL__(),
self.alias,
),
"apply_test_feature",
)
except:
ctype = get_data_types(
"SELECT {} AS apply_test_feature FROM {} WHERE {} IS NOT NULL LIMIT 0".format(
func.replace("{}", self.alias),
self.parent.__genSQL__(),
self.alias,
),
"apply_test_feature",
)
category = get_category_from_vertica_type(ctype=ctype)
all_cols, max_floor = self.parent.get_columns(), 0
for column in all_cols:
try:
if (quote_ident(column) in func) or (
re.search(
re.compile("\\b{}\\b".format(column.replace('"', ""))), func
)
):
max_floor = max(
len(self.parent[column].transformations), max_floor
)
except:
pass
max_floor -= len(self.transformations)
if copy_name:
self.add_copy(name=copy_name)
for k in range(max_floor):
self.parent[copy_name].transformations += [
("{}", self.ctype(), self.category())
]
self.parent[copy_name].transformations += [(func, ctype, category)]
self.parent[copy_name].catalog = self.catalog
self.parent.__add_to_history__(
"[Apply]: The vColumn '{}' was transformed with the func 'x -> {}'.".format(
copy_name.replace('"', ""), func.replace("{}", "x"),
)
)
else:
for k in range(max_floor):
self.transformations += [("{}", self.ctype(), self.category())]
self.transformations += [(func, ctype, category)]
self.parent.__update_catalog__(erase=True, columns=[self.alias])
self.parent.__add_to_history__(
"[Apply]: The vColumn '{}' was transformed with the func 'x -> {}'.".format(
self.alias.replace('"', ""), func.replace("{}", "x"),
)
)
return self.parent
except Exception as e:
raise QueryError(
"{}\nError when applying the func 'x -> {}' to '{}'".format(
e, func.replace("{}", "x"), self.alias.replace('"', "")
)
)
# ---#
def apply_fun(self, func: str, x: float = 2):
"""
---------------------------------------------------------------------------
Applies a default function to the vColumn.
Parameters
----------
func: str
Function to use to transform the vColumn.
abs : absolute value
acos : trigonometric inverse cosine
asin : trigonometric inverse sine
atan : trigonometric inverse tangent
cbrt : cube root
ceil : value up to the next whole number
cos : trigonometric cosine
cosh : hyperbolic cosine
cot : trigonometric cotangent
exp : exponential function
floor : value down to the next whole number
ln : natural logarithm
log : logarithm
log10 : base 10 logarithm
mod : remainder of a division operation
pow : number raised to the power of another number
round : rounds a value to a specified number of decimal places
sign : arithmetic sign
sin : trigonometric sine
sinh : hyperbolic sine
sqrt : arithmetic square root
tan : trigonometric tangent
tanh : hyperbolic tangent
x: int/float, optional
If the function has two arguments (example, power or mod), 'x' represents
the second argument.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the vColumn.
"""
check_types(
[
(
"func",
func,
[
"abs",
"acos",
"asin",
"atan",
"cbrt",
"ceil",
"cos",
"cosh",
"cot",
"exp",
"floor",
"ln",
"log",
"log10",
"mod",
"pow",
"round",
"sign",
"sin",
"sinh",
"sqrt",
"tan",
"tanh",
],
),
("x", x, [int, float]),
]
)
if func not in ("log", "mod", "pow", "round"):
expr = "{}({})".format(func.upper(), "{}")
else:
expr = "{}({}, {})".format(func.upper(), "{}", x)
return self.apply(func=expr)
# ---#
def astype(self, dtype: str):
"""
---------------------------------------------------------------------------
Converts the vColumn to the input type.
Parameters
----------
dtype: str
New type.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.astype : Converts the vColumns to the input type.
"""
check_types([("dtype", dtype, [str])])
try:
query = "SELECT {}::{} AS {} FROM {} WHERE {} IS NOT NULL LIMIT 20".format(
self.alias, dtype, self.alias, self.parent.__genSQL__(), self.alias
)
executeSQL(query, title="Testing the Type casting.")
self.transformations += [
(
"{}::{}".format("{}", dtype),
dtype,
get_category_from_vertica_type(ctype=dtype),
)
]
self.parent.__add_to_history__(
"[AsType]: The vColumn {} was converted to {}.".format(
self.alias, dtype
)
)
return self.parent
except Exception as e:
raise ConversionError(
"{}\nThe vColumn {} can not be converted to {}".format(
e, self.alias, dtype
)
)
# ---#
def avg(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'avg' (Average).
Returns
-------
float
average
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["avg"]).values[self.alias][0]
mean = avg
# ---#
def bar(
self,
method: str = "density",
of: str = "",
max_cardinality: int = 6,
nbins: int = 0,
h: float = 0,
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the bar chart of the vColumn based on an aggregation.
Parameters
----------
method: str, optional
The method to use to aggregate the data.
count : Number of elements.
density : Percentage of the distribution.
mean : Average of the vColumn 'of'.
min : Minimum of the vColumn 'of'.
max : Maximum of the vColumn 'of'.
sum : Sum of the vColumn 'of'.
q% : q Quantile of the vColumn 'of' (ex: 50% to get the median).
It can also be a cutomized aggregation (ex: AVG(column1) + 5).
of: str, optional
The vColumn to use to compute the aggregation.
max_cardinality: int, optional
Maximum number of the vColumn distinct elements to be used as categorical
(No h will be picked or computed)
nbins: int, optional
Number of nbins. If empty, an optimized number of nbins will be computed.
h: float, optional
Interval width of the bar. If empty, an optimized h will be computed.
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame[].hist : Draws the histogram of the vColumn based on an aggregation.
"""
check_types(
[
("method", method, [str]),
("of", of, [str]),
("max_cardinality", max_cardinality, [int, float]),
("nbins", nbins, [int, float]),
("h", h, [int, float]),
]
)
if of:
self.parent.are_namecols_in(of)
of = self.parent.format_colnames(of)
from verticapy.plot import bar
return bar(self, method, of, max_cardinality, nbins, h, ax=ax, **style_kwds)
# ---#
def boxplot(
self,
by: str = "",
h: float = 0,
max_cardinality: int = 8,
cat_priority: list = [],
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the box plot of the vColumn.
Parameters
----------
by: str, optional
vColumn to use to partition the data.
h: float, optional
Interval width if the vColumn is numerical or of type date like. Optimized
h will be computed if the parameter is empty or invalid.
max_cardinality: int, optional
Maximum number of vColumn distinct elements to be used as categorical.
The less frequent elements will be gathered together to create a new
category : 'Others'.
cat_priority: list, optional
List of the different categories to consider when drawing the box plot.
The other categories will be filtered.
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame.boxplot : Draws the Box Plot of the input vColumns.
"""
if isinstance(cat_priority, str) or not (isinstance(cat_priority, Iterable)):
cat_priority = [cat_priority]
check_types(
[
("by", by, [str]),
("max_cardinality", max_cardinality, [int, float]),
("h", h, [int, float]),
("cat_priority", cat_priority, [list]),
]
)
if by:
self.parent.are_namecols_in(by)
by = self.parent.format_colnames(by)
from verticapy.plot import boxplot
return boxplot(self, by, h, max_cardinality, cat_priority, ax=ax, **style_kwds)
# ---#
def category(self):
"""
---------------------------------------------------------------------------
Returns the category of the vColumn. The category will be one of the following:
date / int / float / text / binary / spatial / uuid / undefined
Returns
-------
str
vColumn category.
See Also
--------
vDataFrame[].ctype : Returns the vColumn database type.
"""
return self.transformations[-1][2]
# ---#
def clip(self, lower=None, upper=None):
"""
---------------------------------------------------------------------------
Clips the vColumn by transforming the values lesser than the lower bound to
the lower bound itself and the values higher than the upper bound to the upper
bound itself.
Parameters
----------
lower: float, optional
Lower bound.
upper: float, optional
Upper bound.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].fill_outliers : Fills the vColumn outliers using the input method.
"""
check_types([("lower", lower, [float, int]), ("upper", upper, [float, int])])
assert (lower != None) or (upper != None), ParameterError(
"At least 'lower' or 'upper' must have a numerical value"
)
lower_when = (
"WHEN {} < {} THEN {} ".format("{}", lower, lower)
if (isinstance(lower, (float, int)))
else ""
)
upper_when = (
"WHEN {} > {} THEN {} ".format("{}", upper, upper)
if (isinstance(upper, (float, int)))
else ""
)
func = "(CASE {}{}ELSE {} END)".format(lower_when, upper_when, "{}")
self.apply(func=func)
return self.parent
# ---#
def count(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'count' (Number of non-Missing elements).
Returns
-------
int
number of non-Missing elements.
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["count"]).values[self.alias][0]
# ---#
def cut(
self,
breaks: list,
labels: list = [],
include_lowest: bool = True,
right: bool = True,
):
"""
---------------------------------------------------------------------------
Discretizes the vColumn using the input list.
Parameters
----------
breaks: list
List of values used to cut the vColumn.
labels: list, optional
Labels used to name the new categories. If empty, names will be generated.
include_lowest: bool, optional
If set to True, the lowest element of the list will be included.
right: bool, optional
How the intervals should be closed. If set to True, the intervals will be
closed on the right.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the input vColumn.
"""
check_types(
[
("breaks", breaks, [list]),
("labels", labels, [list]),
("include_lowest", include_lowest, [bool]),
("right", right, [bool]),
]
)
assert self.isnum() or self.isdate(), TypeError(
"cut only works on numerical / date-like vColumns."
)
assert len(breaks) >= 2, ParameterError(
"Length of parameter 'breaks' must be greater or equal to 2."
)
assert len(breaks) == len(labels) + 1 or not (labels), ParameterError(
"Length of parameter breaks must be equal to the length of parameter 'labels' + 1 or parameter 'labels' must be empty."
)
conditions, column = [], self.alias
for idx in range(len(breaks) - 1):
first_elem, second_elem = breaks[idx], breaks[idx + 1]
if right:
op1, op2, close_l, close_r = "<", "<=", "]", "]"
else:
op1, op2, close_l, close_r = "<=", "<", "[", "["
if idx == 0 and include_lowest:
op1, close_l = "<=", "["
elif idx == 0:
op1, close_l = "<", "]"
if labels:
label = labels[idx]
else:
label = f"{close_l}{first_elem};{second_elem}{close_r}"
conditions += [
f"'{first_elem}' {op1} {column} AND {column} {op2} '{second_elem}' THEN '{label}'"
]
expr = "CASE WHEN " + " WHEN ".join(conditions) + " END"
self.apply(func=expr)
# ---#
def ctype(self):
"""
---------------------------------------------------------------------------
Returns the vColumn DB type.
Returns
-------
str
vColumn DB type.
"""
return self.transformations[-1][1].lower()
dtype = ctype
# ---#
def date_part(self, field: str):
"""
---------------------------------------------------------------------------
Extracts a specific TS field from the vColumn (only if the vColumn type is
date like). The vColumn will be transformed.
Parameters
----------
field: str
The field to extract. It must be one of the following:
CENTURY / DAY / DECADE / DOQ / DOW / DOY / EPOCH / HOUR / ISODOW / ISOWEEK /
ISOYEAR / MICROSECONDS / MILLENNIUM / MILLISECONDS / MINUTE / MONTH / QUARTER /
SECOND / TIME ZONE / TIMEZONE_HOUR / TIMEZONE_MINUTE / WEEK / YEAR
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].slice : Slices the vColumn using a time series rule.
"""
return self.apply(func="DATE_PART('{}', {})".format(field, "{}"))
# ---#
def decode(self, *argv):
"""
---------------------------------------------------------------------------
Encodes the vColumn using a user-defined encoding.
Parameters
----------
argv: object
Any amount of expressions.
The expression generated will look like:
even: CASE ... WHEN vColumn = argv[2 * i] THEN argv[2 * i + 1] ... END
odd : CASE ... WHEN vColumn = argv[2 * i] THEN argv[2 * i + 1] ... ELSE argv[n] END
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.case_when : Creates a new feature by evaluating some conditions.
vDataFrame[].discretize : Discretizes the vColumn.
vDataFrame[].label_encode : Encodes the vColumn with Label Encoding.
vDataFrame[].get_dummies : Encodes the vColumn with One-Hot Encoding.
vDataFrame[].mean_encode : Encodes the vColumn using the mean encoding of a response.
"""
import verticapy.stats as st
return self.apply(func=st.decode(str_sql("{}"), *argv))
# ---#
def density(
self,
by: str = "",
bandwidth: float = 1.0,
kernel: str = "gaussian",
nbins: int = 200,
xlim: tuple = None,
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the vColumn Density Plot.
Parameters
----------
by: str, optional
vColumn to use to partition the data.
bandwidth: float, optional
The bandwidth of the kernel.
kernel: str, optional
The method used for the plot.
gaussian : Gaussian kernel.
logistic : Logistic kernel.
sigmoid : Sigmoid kernel.
silverman : Silverman kernel.
nbins: int, optional
Maximum number of points to use to evaluate the approximate density function.
Increasing this parameter will increase the precision but will also increase
the time of the learning and scoring phases.
xlim: tuple, optional
Set the x limits of the current axes.
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame[].hist : Draws the histogram of the vColumn based on an aggregation.
"""
check_types(
[
("by", by, [str]),
("kernel", kernel, ["gaussian", "logistic", "sigmoid", "silverman"]),
("bandwidth", bandwidth, [int, float]),
("nbins", nbins, [float, int]),
]
)
if by:
self.parent.are_namecols_in(by)
by = self.parent.format_colnames(by)
from verticapy.plot import gen_colors
from matplotlib.lines import Line2D
colors = gen_colors()
if not xlim:
xmin = self.min()
xmax = self.max()
else:
xmin, xmax = xlim
custom_lines = []
columns = self.parent[by].distinct()
for idx, column in enumerate(columns):
param = {"color": colors[idx % len(colors)]}
ax = self.parent.search(
"{} = '{}'".format(self.parent[by].alias, column)
)[self.alias].density(
bandwidth=bandwidth,
kernel=kernel,
nbins=nbins,
xlim=(xmin, xmax),
ax=ax,
**updated_dict(param, style_kwds, idx),
)
custom_lines += [
Line2D(
[0],
[0],
color=updated_dict(param, style_kwds, idx)["color"],
lw=4,
),
]
ax.set_title("KernelDensity")
ax.legend(
custom_lines,
columns,
title=by,
loc="center left",
bbox_to_anchor=[1, 0.5],
)
ax.set_xlabel(self.alias)
return ax
kernel = kernel.lower()
from verticapy.learn.neighbors import KernelDensity
schema = verticapy.options["temp_schema"]
if not (schema):
schema = "public"
name = gen_tmp_name(schema=schema, name="kde")
if isinstance(xlim, (tuple, list)):
xlim_tmp = [xlim]
else:
xlim_tmp = []
model = KernelDensity(
name,
bandwidth=bandwidth,
kernel=kernel,
nbins=nbins,
xlim=xlim_tmp,
store=False,
)
try:
result = model.fit(self.parent.__genSQL__(), [self.alias]).plot(
ax=ax, **style_kwds
)
model.drop()
return result
except:
model.drop()
raise
# ---#
def describe(
self, method: str = "auto", max_cardinality: int = 6, numcol: str = ""
):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using multiple statistical aggregations:
min, max, median, unique... depending on the input method.
Parameters
----------
method: str, optional
The describe method.
auto : Sets the method to 'numerical' if the vColumn is numerical
, 'categorical' otherwise.
categorical : Uses only categorical aggregations during the computation.
cat_stats : Computes statistics of a numerical column for each vColumn
category. In this case, the parameter 'numcol' must be defined.
numerical : Uses popular numerical aggregations during the computation.
max_cardinality: int, optional
Cardinality threshold to use to determine if the vColumn will be considered
as categorical.
numcol: str, optional
Numerical vColumn to use when the parameter method is set to 'cat_stats'.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
check_types(
[
("method", method, ["auto", "numerical", "categorical", "cat_stats"]),
("max_cardinality", max_cardinality, [int, float]),
("numcol", numcol, [str]),
]
)
method = method.lower()
assert (method != "cat_stats") or (numcol), ParameterError(
"The parameter 'numcol' must be a vDataFrame column if the method is 'cat_stats'"
)
distinct_count, is_numeric, is_date = (
self.nunique(),
self.isnum(),
self.isdate(),
)
if (is_date) and not (method == "categorical"):
result = self.aggregate(["count", "min", "max"])
index = result.values["index"]
result = result.values[self.alias]
elif (method == "cat_stats") and (numcol != ""):
numcol = self.parent.format_colnames(numcol)
assert self.parent[numcol].category() in ("float", "int"), TypeError(
"The column 'numcol' must be numerical"
)
cast = "::int" if (self.parent[numcol].isbool()) else ""
query, cat = [], self.distinct()
if len(cat) == 1:
lp, rp = "(", ")"
else:
lp, rp = "", ""
for category in cat:
tmp_query = """SELECT
'{0}' AS 'index',
COUNT({1}) AS count,
100 * COUNT({1}) / {2} AS percent,
AVG({3}{4}) AS mean,
STDDEV({3}{4}) AS std,
MIN({3}{4}) AS min,
APPROXIMATE_PERCENTILE ({3}{4}
USING PARAMETERS percentile = 0.1) AS 'approx_10%',
APPROXIMATE_PERCENTILE ({3}{4}
USING PARAMETERS percentile = 0.25) AS 'approx_25%',
APPROXIMATE_PERCENTILE ({3}{4}
USING PARAMETERS percentile = 0.5) AS 'approx_50%',
APPROXIMATE_PERCENTILE ({3}{4}
USING PARAMETERS percentile = 0.75) AS 'approx_75%',
APPROXIMATE_PERCENTILE ({3}{4}
USING PARAMETERS percentile = 0.9) AS 'approx_90%',
MAX({3}{4}) AS max
FROM vdf_table""".format(
category, self.alias, self.parent.shape()[0], numcol, cast,
)
tmp_query += (
" WHERE {} IS NULL".format(self.alias)
if (category in ("None", None))
else " WHERE {} = '{}'".format(
bin_spatial_to_str(self.category(), self.alias), category,
)
)
query += [lp + tmp_query + rp]
query = "WITH vdf_table AS (SELECT * FROM {}) {}".format(
self.parent.__genSQL__(), " UNION ALL ".join(query)
)
title = "Describes the statics of {} partitioned by {}.".format(
numcol, self.alias
)
values = to_tablesample(query, title=title).values
elif (
((distinct_count < max_cardinality + 1) and (method != "numerical"))
or not (is_numeric)
or (method == "categorical")
):
query = """(SELECT
{0} || '',
COUNT(*)
FROM vdf_table
GROUP BY {0}
ORDER BY COUNT(*) DESC
LIMIT {1})""".format(
self.alias, max_cardinality
)
if distinct_count > max_cardinality:
query += (
"UNION ALL (SELECT 'Others', SUM(count) FROM (SELECT COUNT(*) AS count"
" FROM vdf_table WHERE {0} IS NOT NULL GROUP BY {0} ORDER BY COUNT(*)"
" DESC OFFSET {1}) VERTICAPY_SUBTABLE) ORDER BY count DESC"
).format(self.alias, max_cardinality + 1)
query = "WITH vdf_table AS (SELECT * FROM {}) {}".format(
self.parent.__genSQL__(), query
)
query_result = executeSQL(
query=query,
title="Computing the descriptive statistics of {}.".format(self.alias),
method="fetchall",
)
result = [distinct_count, self.count()] + [item[1] for item in query_result]
index = ["unique", "count"] + [item[0] for item in query_result]
else:
result = (
self.parent.describe(
method="numerical", columns=[self.alias], unique=False
)
.transpose()
.values[self.alias]
)
result = [distinct_count] + result
index = [
"unique",
"count",
"mean",
"std",
"min",
"approx_25%",
"approx_50%",
"approx_75%",
"max",
]
if method != "cat_stats":
values = {
"index": ["name", "dtype"] + index,
"value": [self.alias, self.ctype()] + result,
}
if ((is_date) and not (method == "categorical")) or (
method == "is_numeric"
):
self.parent.__update_catalog__({"index": index, self.alias: result})
for elem in values:
for i in range(len(values[elem])):
if isinstance(values[elem][i], decimal.Decimal):
values[elem][i] = float(values[elem][i])
return tablesample(values)
# ---#
def discretize(
self,
method: str = "auto",
h: float = 0,
nbins: int = -1,
k: int = 6,
new_category: str = "Others",
RFmodel_params: dict = {},
response: str = "",
return_enum_trans: bool = False,
):
"""
---------------------------------------------------------------------------
Discretizes the vColumn using the input method.
Parameters
----------
method: str, optional
The method to use to discretize the vColumn.
auto : Uses method 'same_width' for numerical vColumns, cast
the other types to varchar.
same_freq : Computes bins with the same number of elements.
same_width : Computes regular width bins.
smart : Uses the Random Forest on a response column to find the most
relevant interval to use for the discretization.
topk : Keeps the topk most frequent categories and merge the other
into one unique category.
h: float, optional
The interval size to convert to use to convert the vColumn. If this parameter
is equal to 0, an optimised interval will be computed.
nbins: int, optional
Number of bins used for the discretization (must be > 1)
k: int, optional
The integer k of the 'topk' method.
new_category: str, optional
The name of the merging category when using the 'topk' method.
RFmodel_params: dict, optional
Dictionary of the Random Forest model parameters used to compute the best splits
when 'method' is set to 'smart'. A RF Regressor will be trained if the response
is numerical (except ints and bools), a RF Classifier otherwise.
Example: Write {"n_estimators": 20, "max_depth": 10} to train a Random Forest with
20 trees and a maximum depth of 10.
response: str, optional
Response vColumn when method is set to 'smart'.
return_enum_trans: bool, optional
Returns the transformation instead of the vDataFrame parent and do not apply
it. This parameter is very useful for testing to be able to look at the final
transformation.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].decode : Encodes the vColumn with user defined Encoding.
vDataFrame[].get_dummies : Encodes the vColumn with One-Hot Encoding.
vDataFrame[].label_encode : Encodes the vColumn with Label Encoding.
vDataFrame[].mean_encode : Encodes the vColumn using the mean encoding of a response.
"""
check_types(
[
("RFmodel_params", RFmodel_params, [dict]),
("return_enum_trans", return_enum_trans, [bool]),
("h", h, [int, float]),
("response", response, [str]),
("nbins", nbins, [int, float]),
(
"method",
method,
["auto", "smart", "same_width", "same_freq", "topk"],
),
("return_enum_trans", return_enum_trans, [bool]),
]
)
method = method.lower()
if self.isnum() and method == "smart":
schema = verticapy.options["temp_schema"]
if not (schema):
schema = "public"
tmp_view_name = gen_tmp_name(schema=schema, name="view")
tmp_model_name = gen_tmp_name(schema=schema, name="model")
assert nbins >= 2, ParameterError(
"Parameter 'nbins' must be greater or equals to 2 in case of discretization using the method 'smart'."
)
assert response, ParameterError(
"Parameter 'response' can not be empty in case of discretization using the method 'smart'."
)
self.parent.are_namecols_in(response)
response = self.parent.format_colnames(response)
drop(tmp_view_name, method="view")
self.parent.to_db(tmp_view_name)
from verticapy.learn.ensemble import (
RandomForestClassifier,
RandomForestRegressor,
)
drop(tmp_model_name, method="model")
if self.parent[response].category() == "float":
model = RandomForestRegressor(tmp_model_name)
else:
model = RandomForestClassifier(tmp_model_name)
model.set_params({"n_estimators": 20, "max_depth": 8, "nbins": 100})
model.set_params(RFmodel_params)
parameters = model.get_params()
try:
model.fit(tmp_view_name, [self.alias], response)
query = [
"(SELECT READ_TREE(USING PARAMETERS model_name = '{}', tree_id = {}, format = 'tabular'))".format(
tmp_model_name, i
)
for i in range(parameters["n_estimators"])
]
query = "SELECT split_value FROM (SELECT split_value, MAX(weighted_information_gain) FROM ({}) VERTICAPY_SUBTABLE WHERE split_value IS NOT NULL GROUP BY 1 ORDER BY 2 DESC LIMIT {}) VERTICAPY_SUBTABLE ORDER BY split_value::float".format(
" UNION ALL ".join(query), nbins - 1
)
result = executeSQL(
query=query,
title="Computing the optimized histogram nbins using Random Forest.",
method="fetchall",
)
result = [elem[0] for elem in result]
except:
drop(tmp_view_name, method="view")
drop(tmp_model_name, method="model")
raise
drop(tmp_view_name, method="view")
drop(tmp_model_name, method="model")
result = [self.min()] + result + [self.max()]
elif method == "topk":
assert k >= 2, ParameterError(
"Parameter 'k' must be greater or equals to 2 in case of discretization using the method 'topk'"
)
distinct = self.topk(k).values["index"]
trans = (
"(CASE WHEN {} IN ({}) THEN {} || '' ELSE '{}' END)".format(
bin_spatial_to_str(self.category()),
", ".join(
[
"'{}'".format(str(elem).replace("'", "''"))
for elem in distinct
]
),
bin_spatial_to_str(self.category()),
new_category.replace("'", "''"),
),
"varchar",
"text",
)
elif self.isnum() and method == "same_freq":
assert nbins >= 2, ParameterError(
"Parameter 'nbins' must be greater or equals to 2 in case of discretization using the method 'same_freq'"
)
count = self.count()
nb = int(float(count / int(nbins)))
assert nb != 0, Exception(
"Not enough values to compute the Equal Frequency discretization"
)
total, query, nth_elems = nb, [], []
while total < int(float(count / int(nbins))) * int(nbins):
nth_elems += [str(total)]
total += nb
where = "WHERE _verticapy_row_nb_ IN ({})".format(
", ".join(["1"] + nth_elems + [str(count)])
)
query = "SELECT {} FROM (SELECT {}, ROW_NUMBER() OVER (ORDER BY {}) AS _verticapy_row_nb_ FROM {} WHERE {} IS NOT NULL) VERTICAPY_SUBTABLE {}".format(
self.alias,
self.alias,
self.alias,
self.parent.__genSQL__(),
self.alias,
where,
)
result = executeSQL(
query=query,
title="Computing the equal frequency histogram bins.",
method="fetchall",
)
result = [elem[0] for elem in result]
elif self.isnum() and method in ("same_width", "auto"):
if not (h) or h <= 0:
if nbins <= 0:
h = self.numh()
else:
h = (self.max() - self.min()) * 1.01 / nbins
if h > 0.01:
h = round(h, 2)
elif h > 0.0001:
h = round(h, 4)
elif h > 0.000001:
h = round(h, 6)
if self.category() == "int":
h = int(max(math.floor(h), 1))
floor_end = -1 if (self.category() == "int") else ""
if (h > 1) or (self.category() == "float"):
trans = (
"'[' || FLOOR({} / {}) * {} || ';' || (FLOOR({} / {}) * {} + {}{}) || ']'".format(
"{}", h, h, "{}", h, h, h, floor_end
),
"varchar",
"text",
)
else:
trans = ("FLOOR({}) || ''", "varchar", "text")
else:
trans = ("{} || ''", "varchar", "text")
if (self.isnum() and method == "same_freq") or (
self.isnum() and method == "smart"
):
n = len(result)
trans = "(CASE "
for i in range(1, n):
trans += "WHEN {} BETWEEN {} AND {} THEN '[{};{}]' ".format(
"{}", result[i - 1], result[i], result[i - 1], result[i]
)
trans += " ELSE NULL END)"
trans = (trans, "varchar", "text")
if return_enum_trans:
return trans
else:
self.transformations += [trans]
sauv = {}
for elem in self.catalog:
sauv[elem] = self.catalog[elem]
self.parent.__update_catalog__(erase=True, columns=[self.alias])
try:
if "count" in sauv:
self.catalog["count"] = sauv["count"]
self.catalog["percent"] = (
100 * sauv["count"] / self.parent.shape()[0]
)
except:
pass
self.parent.__add_to_history__(
"[Discretize]: The vColumn {} was discretized.".format(self.alias)
)
return self.parent
# ---#
def distinct(self, **kwargs):
"""
---------------------------------------------------------------------------
Returns the distinct categories of the vColumn.
Returns
-------
list
Distinct caterogies of the vColumn.
See Also
--------
vDataFrame.topk : Returns the vColumn most occurent elements.
"""
if "agg" not in kwargs:
query = "SELECT {} AS {} FROM {} WHERE {} IS NOT NULL GROUP BY {} ORDER BY {}".format(
bin_spatial_to_str(self.category(), self.alias),
self.alias,
self.parent.__genSQL__(),
self.alias,
self.alias,
self.alias,
)
else:
query = "SELECT {} FROM (SELECT {} AS {}, {} AS verticapy_agg FROM {} WHERE {} IS NOT NULL GROUP BY 1) x ORDER BY verticapy_agg DESC".format(
self.alias,
bin_spatial_to_str(self.category(), self.alias),
self.alias,
kwargs["agg"],
self.parent.__genSQL__(),
self.alias,
)
query_result = executeSQL(
query=query,
title="Computing the distinct categories of {}.".format(self.alias),
method="fetchall",
)
return [item for sublist in query_result for item in sublist]
# ---#
def div(self, x: float):
"""
---------------------------------------------------------------------------
Divides the vColumn by the input element.
Parameters
----------
x: float
Input number.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the input vColumn.
"""
check_types([("x", x, [int, float])])
assert x != 0, ValueError("Division by 0 is forbidden !")
return self.apply(func="{} / ({})".format("{}", x))
# ---#
def drop(self, add_history: bool = True):
"""
---------------------------------------------------------------------------
Drops the vColumn from the vDataFrame. Dropping a vColumn means simply
not selecting it in the final generated SQL code.
Note: Dropping a vColumn can make the vDataFrame "heavier" if it is used
to compute other vColumns.
Parameters
----------
add_history: bool, optional
If set to True, the information will be stored in the vDataFrame history.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.drop: Drops the input vColumns from the vDataFrame.
"""
check_types([("add_history", add_history, [bool])])
try:
parent = self.parent
force_columns = [
column for column in self.parent._VERTICAPY_VARIABLES_["columns"]
]
force_columns.remove(self.alias)
executeSQL(
"SELECT * FROM {} LIMIT 10".format(
self.parent.__genSQL__(force_columns=force_columns)
),
print_time_sql=False,
)
self.parent._VERTICAPY_VARIABLES_["columns"].remove(self.alias)
delattr(self.parent, self.alias)
except:
self.parent._VERTICAPY_VARIABLES_["exclude_columns"] += [self.alias]
if add_history:
self.parent.__add_to_history__(
"[Drop]: vColumn {} was deleted from the vDataFrame.".format(self.alias)
)
return parent
# ---#
def drop_outliers(
self, threshold: float = 4.0, use_threshold: bool = True, alpha: float = 0.05
):
"""
---------------------------------------------------------------------------
Drops outliers in the vColumn.
Parameters
----------
threshold: float, optional
Uses the Gaussian distribution to identify outliers. After normalizing
the data (Z-Score), if the absolute value of the record is greater than
the threshold, it will be considered as an outlier.
use_threshold: bool, optional
Uses the threshold instead of the 'alpha' parameter.
alpha: float, optional
Number representing the outliers threshold. Values lesser than
quantile(alpha) or greater than quantile(1-alpha) will be dropped.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.fill_outliers : Fills the outliers in the vColumn.
vDataFrame.outliers : Adds a new vColumn labeled with 0 and 1
(1 meaning global outlier).
"""
check_types(
[
("alpha", alpha, [int, float]),
("use_threshold", use_threshold, [bool]),
("threshold", threshold, [int, float]),
]
)
if use_threshold:
result = self.aggregate(func=["std", "avg"]).transpose().values
self.parent.filter(
"ABS({} - {}) / {} < {}".format(
self.alias, result["avg"][0], result["std"][0], threshold
)
)
else:
p_alpha, p_1_alpha = (
self.parent.quantile([alpha, 1 - alpha], [self.alias])
.transpose()
.values[self.alias]
)
self.parent.filter(
"({} BETWEEN {} AND {})".format(self.alias, p_alpha, p_1_alpha)
)
return self.parent
# ---#
def dropna(self):
"""
---------------------------------------------------------------------------
Filters the vDataFrame where the vColumn is missing.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.filter: Filters the data using the input expression.
"""
self.parent.filter("{} IS NOT NULL".format(self.alias))
return self.parent
# ---#
def fill_outliers(
self,
method: str = "winsorize",
threshold: float = 4.0,
use_threshold: bool = True,
alpha: float = 0.05,
):
"""
---------------------------------------------------------------------------
Fills the vColumns outliers using the input method.
Parameters
----------
method: str, optional
Method to use to fill the vColumn outliers.
mean : Replaces the upper and lower outliers by their respective
average.
null : Replaces the outliers by the NULL value.
winsorize : Clips the vColumn using as lower bound quantile(alpha) and as
upper bound quantile(1-alpha) if 'use_threshold' is set to False else
the lower and upper ZScores.
threshold: float, optional
Uses the Gaussian distribution to define the outliers. After normalizing the
data (Z-Score), if the absolute value of the record is greater than the
threshold it will be considered as an outlier.
use_threshold: bool, optional
Uses the threshold instead of the 'alpha' parameter.
alpha: float, optional
Number representing the outliers threshold. Values lesser than quantile(alpha)
or greater than quantile(1-alpha) will be filled.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].drop_outliers : Drops outliers in the vColumn.
vDataFrame.outliers : Adds a new vColumn labeled with 0 and 1
(1 meaning global outlier).
"""
if isinstance(method, str):
method = method.lower()
check_types(
[
("method", method, ["winsorize", "null", "mean"]),
("alpha", alpha, [int, float]),
("use_threshold", use_threshold, [bool]),
("threshold", threshold, [int, float]),
]
)
if use_threshold:
result = self.aggregate(func=["std", "avg"]).transpose().values
p_alpha, p_1_alpha = (
-threshold * result["std"][0] + result["avg"][0],
threshold * result["std"][0] + result["avg"][0],
)
else:
query = "SELECT PERCENTILE_CONT({}) WITHIN GROUP (ORDER BY {}) OVER (), PERCENTILE_CONT(1 - {}) WITHIN GROUP (ORDER BY {}) OVER () FROM {} LIMIT 1".format(
alpha, self.alias, alpha, self.alias, self.parent.__genSQL__()
)
p_alpha, p_1_alpha = executeSQL(
query=query,
title="Computing the quantiles of {}.".format(self.alias),
method="fetchrow",
)
if method == "winsorize":
self.clip(lower=p_alpha, upper=p_1_alpha)
elif method == "null":
self.apply(
func="(CASE WHEN ({} BETWEEN {} AND {}) THEN {} ELSE NULL END)".format(
"{}", p_alpha, p_1_alpha, "{}"
)
)
elif method == "mean":
query = "WITH vdf_table AS (SELECT * FROM {}) (SELECT AVG({}) FROM vdf_table WHERE {} < {}) UNION ALL (SELECT AVG({}) FROM vdf_table WHERE {} > {})".format(
self.parent.__genSQL__(),
self.alias,
self.alias,
p_alpha,
self.alias,
self.alias,
p_1_alpha,
)
mean_alpha, mean_1_alpha = [
item[0]
for item in executeSQL(
query=query,
title="Computing the average of the {}'s lower and upper outliers.".format(
self.alias
),
method="fetchall",
)
]
if mean_alpha == None:
mean_alpha = "NULL"
if mean_1_alpha == None:
mean_alpha = "NULL"
self.apply(
func="(CASE WHEN {} < {} THEN {} WHEN {} > {} THEN {} ELSE {} END)".format(
"{}", p_alpha, mean_alpha, "{}", p_1_alpha, mean_1_alpha, "{}"
)
)
return self.parent
# ---#
def fillna(
self,
val=None,
method: str = "auto",
expr: str = "",
by: list = [],
order_by: list = [],
):
"""
---------------------------------------------------------------------------
Fills missing elements in the vColumn with a user-specified rule.
Parameters
----------
val: int/float/str, optional
Value to use to impute the vColumn.
method: dict, optional
Method to use to impute the missing values.
auto : Mean for the numerical and Mode for the categorical vColumns.
bfill : Back Propagation of the next element (Constant Interpolation).
ffill : Propagation of the first element (Constant Interpolation).
mean : Average.
median : median.
mode : mode (most occurent element).
0ifnull : 0 when the vColumn is null, 1 otherwise.
expr: str, optional
SQL expression.
by: list, optional
vColumns used in the partition.
order_by: list, optional
List of the vColumns to use to sort the data when using TS methods.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].dropna : Drops the vColumn missing values.
"""
if isinstance(by, str):
by = [by]
if isinstance(order_by, str):
order_by = [order_by]
check_types(
[
(
"method",
method,
[
"auto",
"mode",
"0ifnull",
"mean",
"avg",
"median",
"ffill",
"pad",
"bfill",
"backfill",
],
),
("expr", expr, [str]),
("by", by, [list]),
("order_by", order_by, [list]),
]
)
method = method.lower()
self.parent.are_namecols_in([elem for elem in order_by] + by)
by = self.parent.format_colnames(by)
if method == "auto":
method = "mean" if (self.isnum() and self.nunique(True) > 6) else "mode"
total = self.count()
if (method == "mode") and (val == None):
val = self.mode(dropna=True)
if val == None:
warning_message = "The vColumn {} has no mode (only missing values).\nNothing was filled.".format(
self.alias
)
warnings.warn(warning_message, Warning)
return self.parent
if isinstance(val, str):
val = val.replace("'", "''")
if val != None:
new_column = "COALESCE({}, '{}')".format("{}", val)
elif expr:
new_column = "COALESCE({}, {})".format("{}", expr)
elif method == "0ifnull":
new_column = "DECODE({}, NULL, 0, 1)"
elif method in ("mean", "avg", "median"):
fun = "MEDIAN" if (method == "median") else "AVG"
if by == []:
if fun == "AVG":
val = self.avg()
elif fun == "MEDIAN":
val = self.median()
new_column = "COALESCE({}, {})".format("{}", val)
elif (len(by) == 1) and (self.parent[by[0]].nunique() < 50):
try:
if fun == "MEDIAN":
fun = "APPROXIMATE_MEDIAN"
query = "SELECT {}, {}({}) FROM {} GROUP BY {};".format(
by[0], fun, self.alias, self.parent.__genSQL__(), by[0]
)
result = executeSQL(
query,
title="Computing the different aggregations.",
method="fetchall",
)
for idx, elem in enumerate(result):
result[idx][0] = (
"NULL"
if (elem[0] == None)
else "'{}'".format(str(elem[0]).replace("'", "''"))
)
result[idx][1] = "NULL" if (elem[1] == None) else str(elem[1])
new_column = "COALESCE({}, DECODE({}, {}, NULL))".format(
"{}",
by[0],
", ".join(
["{}, {}".format(elem[0], elem[1]) for elem in result]
),
)
executeSQL(
"SELECT {} FROM {} LIMIT 1".format(
new_column.format(self.alias), self.parent.__genSQL__()
),
print_time_sql=False,
)
except:
new_column = "COALESCE({}, {}({}) OVER (PARTITION BY {}))".format(
"{}", fun, "{}", ", ".join(by)
)
else:
new_column = "COALESCE({}, {}({}) OVER (PARTITION BY {}))".format(
"{}", fun, "{}", ", ".join(by)
)
elif method in ("ffill", "pad", "bfill", "backfill"):
assert order_by, ParameterError(
"If the method is in ffill|pad|bfill|backfill then 'order_by' must be a list of at least one element to use to order the data"
)
desc = "" if (method in ("ffill", "pad")) else " DESC"
partition_by = (
"PARTITION BY {}".format(
", ".join([quote_ident(column) for column in by])
)
if (by)
else ""
)
order_by_ts = ", ".join([quote_ident(column) + desc for column in order_by])
new_column = "COALESCE({}, LAST_VALUE({} IGNORE NULLS) OVER ({} ORDER BY {}))".format(
"{}", "{}", partition_by, order_by_ts
)
if method in ("mean", "median") or isinstance(val, float):
category, ctype = "float", "float"
elif method == "0ifnull":
category, ctype = "int", "bool"
else:
category, ctype = self.category(), self.ctype()
copy_trans = [elem for elem in self.transformations]
total = self.count()
if method not in ["mode", "0ifnull"]:
max_floor = 0
all_partition = by
if method in ["ffill", "pad", "bfill", "backfill"]:
all_partition += [elem for elem in order_by]
for elem in all_partition:
if len(self.parent[elem].transformations) > max_floor:
max_floor = len(self.parent[elem].transformations)
max_floor -= len(self.transformations)
for k in range(max_floor):
self.transformations += [("{}", self.ctype(), self.category())]
self.transformations += [(new_column, ctype, category)]
try:
sauv = {}
for elem in self.catalog:
sauv[elem] = self.catalog[elem]
self.parent.__update_catalog__(erase=True, columns=[self.alias])
total = abs(self.count() - total)
except Exception as e:
self.transformations = [elem for elem in copy_trans]
raise QueryError("{}\nAn Error happened during the filling.".format(e))
if total > 0:
try:
if "count" in sauv:
self.catalog["count"] = int(sauv["count"]) + total
self.catalog["percent"] = (
100 * (int(sauv["count"]) + total) / self.parent.shape()[0]
)
except:
pass
total = int(total)
conj = "s were " if total > 1 else " was "
if verticapy.options["print_info"]:
print("{} element{}filled.".format(total, conj))
self.parent.__add_to_history__(
"[Fillna]: {} {} missing value{} filled.".format(
total, self.alias, conj,
)
)
else:
if verticapy.options["print_info"]:
print("Nothing was filled.")
self.transformations = [elem for elem in copy_trans]
for elem in sauv:
self.catalog[elem] = sauv[elem]
return self.parent
# ---#
def geo_plot(self, *args, **kwargs):
"""
---------------------------------------------------------------------------
Draws the Geospatial object.
Parameters
----------
*args / **kwargs
Any optional parameter to pass to the geopandas plot function.
For more information, see:
https://geopandas.readthedocs.io/en/latest/docs/reference/api/
geopandas.GeoDataFrame.plot.html
Returns
-------
ax
Matplotlib axes object
"""
columns = [self.alias]
check = True
if len(args) > 0:
column = args[0]
elif "column" in kwargs:
column = kwargs["column"]
else:
check = False
if check:
self.parent.are_namecols_in(column)
column = self.parent.format_colnames(column)
columns += [column]
if not ("cmap" in kwargs):
from verticapy.plot import gen_cmap
kwargs["cmap"] = gen_cmap()[0]
else:
if not ("color" in kwargs):
from verticapy.plot import gen_colors
kwargs["color"] = gen_colors()[0]
if not ("legend" in kwargs):
kwargs["legend"] = True
if not ("figsize" in kwargs):
kwargs["figsize"] = (14, 10)
return self.parent[columns].to_geopandas(self.alias).plot(*args, **kwargs)
# ---#
def get_dummies(
self,
prefix: str = "",
prefix_sep: str = "_",
drop_first: bool = True,
use_numbers_as_suffix: bool = False,
):
"""
---------------------------------------------------------------------------
Encodes the vColumn with the One-Hot Encoding algorithm.
Parameters
----------
prefix: str, optional
Prefix of the dummies.
prefix_sep: str, optional
Prefix delimitor of the dummies.
drop_first: bool, optional
Drops the first dummy to avoid the creation of correlated features.
use_numbers_as_suffix: bool, optional
Uses numbers as suffix instead of the vColumns categories.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].decode : Encodes the vColumn with user defined Encoding.
vDataFrame[].discretize : Discretizes the vColumn.
vDataFrame[].label_encode : Encodes the vColumn with Label Encoding.
vDataFrame[].mean_encode : Encodes the vColumn using the mean encoding of a response.
"""
check_types(
[
("prefix", prefix, [str]),
("prefix_sep", prefix_sep, [str]),
("drop_first", drop_first, [bool]),
("use_numbers_as_suffix", use_numbers_as_suffix, [bool]),
]
)
distinct_elements = self.distinct()
if distinct_elements not in ([0, 1], [1, 0]) or self.isbool():
all_new_features = []
prefix = (
self.alias.replace('"', "") + prefix_sep.replace('"', "_")
if not (prefix)
else prefix.replace('"', "_") + prefix_sep.replace('"', "_")
)
n = 1 if drop_first else 0
for k in range(len(distinct_elements) - n):
name = (
'"{}{}"'.format(prefix, k)
if (use_numbers_as_suffix)
else '"{}{}"'.format(
prefix, str(distinct_elements[k]).replace('"', "_")
)
)
assert not (self.parent.is_colname_in(name)), NameError(
f"A vColumn has already the alias of one of the dummies ({name}).\n"
"It can be the result of using previously the method on the vColumn "
"or simply because of ambiguous columns naming.\nBy changing one of "
"the parameters ('prefix', 'prefix_sep'), you'll be able to solve this "
"issue."
)
for k in range(len(distinct_elements) - n):
name = (
'"{}{}"'.format(prefix, k)
if (use_numbers_as_suffix)
else '"{}{}"'.format(
prefix, str(distinct_elements[k]).replace('"', "_")
)
)
name = (
name.replace(" ", "_")
.replace("/", "_")
.replace(",", "_")
.replace("'", "_")
)
expr = "DECODE({}, '{}', 1, 0)".format(
"{}", str(distinct_elements[k]).replace("'", "''")
)
transformations = self.transformations + [(expr, "bool", "int")]
new_vColumn = vColumn(
name,
parent=self.parent,
transformations=transformations,
catalog={
"min": 0,
"max": 1,
"count": self.parent.shape()[0],
"percent": 100.0,
"unique": 2,
"approx_unique": 2,
"prod": 0,
},
)
setattr(self.parent, name, new_vColumn)
setattr(self.parent, name.replace('"', ""), new_vColumn)
self.parent._VERTICAPY_VARIABLES_["columns"] += [name]
all_new_features += [name]
conj = "s were " if len(all_new_features) > 1 else " was "
self.parent.__add_to_history__(
"[Get Dummies]: One hot encoder was applied to the vColumn {}\n{} feature{}created: {}".format(
self.alias, len(all_new_features), conj, ", ".join(all_new_features)
)
+ "."
)
return self.parent
one_hot_encode = get_dummies
# ---#
def head(self, limit: int = 5):
"""
---------------------------------------------------------------------------
Returns the head of the vColumn.
Parameters
----------
limit: int, optional
Number of elements to display.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame[].tail : Returns the a part of the vColumn.
"""
return self.iloc(limit=limit)
# ---#
def hist(
self,
method: str = "density",
of: str = "",
max_cardinality: int = 6,
nbins: int = 0,
h: float = 0,
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the histogram of the vColumn based on an aggregation.
Parameters
----------
method: str, optional
The method to use to aggregate the data.
count : Number of elements.
density : Percentage of the distribution.
mean : Average of the vColumn 'of'.
min : Minimum of the vColumn 'of'.
max : Maximum of the vColumn 'of'.
sum : Sum of the vColumn 'of'.
q% : q Quantile of the vColumn 'of' (ex: 50% to get the median).
It can also be a cutomized aggregation (ex: AVG(column1) + 5).
of: str, optional
The vColumn to use to compute the aggregation.
max_cardinality: int, optional
Maximum number of the vColumn distinct elements to be used as categorical
(No h will be picked or computed)
nbins: int, optional
Number of bins. If empty, an optimized number of bins will be computed.
h: float, optional
Interval width of the bar. If empty, an optimized h will be computed.
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame[].bar : Draws the Bar Chart of vColumn based on an aggregation.
"""
check_types(
[
("method", method, [str]),
("of", of, [str]),
("max_cardinality", max_cardinality, [int, float]),
("h", h, [int, float]),
("nbins", nbins, [int, float]),
]
)
if of:
self.parent.are_namecols_in(of)
of = self.parent.format_colnames(of)
from verticapy.plot import hist
return hist(self, method, of, max_cardinality, nbins, h, ax=ax, **style_kwds)
# ---#
def iloc(self, limit: int = 5, offset: int = 0):
"""
---------------------------------------------------------------------------
Returns a part of the vColumn (delimited by an offset and a limit).
Parameters
----------
limit: int, optional
Number of elements to display.
offset: int, optional
Number of elements to skip.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame[].head : Returns the head of the vColumn.
vDataFrame[].tail : Returns the tail of the vColumn.
"""
check_types([("limit", limit, [int, float]), ("offset", offset, [int, float])])
if offset < 0:
offset = max(0, self.parent.shape()[0] - limit)
title = "Reads {}.".format(self.alias)
tail = to_tablesample(
"SELECT {} AS {} FROM {}{} LIMIT {} OFFSET {}".format(
bin_spatial_to_str(self.category(), self.alias),
self.alias,
self.parent.__genSQL__(),
self.parent.__get_last_order_by__(),
limit,
offset,
),
title=title,
)
tail.count = self.parent.shape()[0]
tail.offset = offset
tail.dtype[self.alias] = self.ctype()
tail.name = self.alias
return tail
# ---#
def isbool(self):
"""
---------------------------------------------------------------------------
Returns True if the vColumn is boolean, False otherwise.
Returns
-------
bool
True if the vColumn is boolean.
See Also
--------
vDataFrame[].isdate : Returns True if the vColumn category is date.
vDataFrame[].isnum : Returns True if the vColumn is numerical.
"""
return self.ctype().lower() in ("bool", "boolean")
# ---#
def isdate(self):
"""
---------------------------------------------------------------------------
Returns True if the vColumn category is date, False otherwise.
Returns
-------
bool
True if the vColumn category is date.
See Also
--------
vDataFrame[].isbool : Returns True if the vColumn is boolean.
vDataFrame[].isnum : Returns True if the vColumn is numerical.
"""
return self.category() == "date"
# ---#
def isin(self, val: list, *args):
"""
---------------------------------------------------------------------------
Looks if some specific records are in the vColumn and it returns the new
vDataFrame of the search.
Parameters
----------
val: list
List of the different records. For example, to check if Badr and Fouad
are in the vColumn. You can write the following list: ["Fouad", "Badr"]
Returns
-------
vDataFrame
The vDataFrame of the search.
See Also
--------
vDataFrame.isin : Looks if some specific records are in the vDataFrame.
"""
if isinstance(val, str) or not (isinstance(val, Iterable)):
val = [val]
val += list(args)
check_types([("val", val, [list])])
val = {self.alias: val}
return self.parent.isin(val)
# ---#
def isnum(self):
"""
---------------------------------------------------------------------------
Returns True if the vColumn is numerical, False otherwise.
Returns
-------
bool
True if the vColumn is numerical.
See Also
--------
vDataFrame[].isbool : Returns True if the vColumn is boolean.
vDataFrame[].isdate : Returns True if the vColumn category is date.
"""
return self.category() in ("float", "int")
# ---#
def iv_woe(self, y: str, nbins: int = 10):
"""
---------------------------------------------------------------------------
Computes the Information Value (IV) / Weight Of Evidence (WOE) Table. It tells
the predictive power of an independent variable in relation to the dependent
variable.
Parameters
----------
y: str
Response vColumn.
nbins: int, optional
Maximum number of nbins used for the discretization (must be > 1)
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame.iv_woe : Computes the Information Value (IV) Table.
"""
check_types([("y", y, [str]), ("nbins", nbins, [int])])
self.parent.are_namecols_in(y)
y = self.parent.format_colnames(y)
assert self.parent[y].nunique() == 2, TypeError(
"vColumn {} must be binary to use iv_woe.".format(y)
)
response_cat = self.parent[y].distinct()
response_cat.sort()
assert response_cat == [0, 1], TypeError(
"vColumn {} must be binary to use iv_woe.".format(y)
)
self.parent[y].distinct()
trans = self.discretize(
method="same_width" if self.isnum() else "topk",
nbins=nbins,
k=nbins,
new_category="Others",
return_enum_trans=True,
)[0].replace("{}", self.alias)
query = "SELECT {} AS {}, {} AS ord, {}::int AS {} FROM {}".format(
trans, self.alias, self.alias, y, y, self.parent.__genSQL__(),
)
query = "SELECT {}, MIN(ord) AS ord, SUM(1 - {}) AS non_events, SUM({}) AS events FROM ({}) x GROUP BY 1".format(
self.alias, y, y, query,
)
query = "SELECT {}, ord, non_events, events, non_events / NULLIFZERO(SUM(non_events) OVER ()) AS pt_non_events, events / NULLIFZERO(SUM(events) OVER ()) AS pt_events FROM ({}) x".format(
self.alias, query,
)
query = "SELECT {} AS index, non_events, events, pt_non_events, pt_events, CASE WHEN non_events = 0 OR events = 0 THEN 0 ELSE ZEROIFNULL(LN(pt_non_events / NULLIFZERO(pt_events))) END AS woe, CASE WHEN non_events = 0 OR events = 0 THEN 0 ELSE (pt_non_events - pt_events) * ZEROIFNULL(LN(pt_non_events / NULLIFZERO(pt_events))) END AS iv FROM ({}) x ORDER BY ord".format(
self.alias, query,
)
title = "Computing WOE & IV of {} (response = {}).".format(self.alias, y)
result = to_tablesample(query, title=title)
result.values["index"] += ["total"]
result.values["non_events"] += [sum(result["non_events"])]
result.values["events"] += [sum(result["events"])]
result.values["pt_non_events"] += [""]
result.values["pt_events"] += [""]
result.values["woe"] += [""]
result.values["iv"] += [sum(result["iv"])]
return result
# ---#
def kurtosis(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'kurtosis'.
Returns
-------
float
kurtosis
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["kurtosis"]).values[self.alias][0]
kurt = kurtosis
# ---#
def label_encode(self):
"""
---------------------------------------------------------------------------
Encodes the vColumn using a bijection from the different categories to
[0, n - 1] (n being the vColumn cardinality).
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].decode : Encodes the vColumn with a user defined Encoding.
vDataFrame[].discretize : Discretizes the vColumn.
vDataFrame[].get_dummies : Encodes the vColumn with One-Hot Encoding.
vDataFrame[].mean_encode : Encodes the vColumn using the mean encoding of a response.
"""
if self.category() in ["date", "float"]:
warning_message = (
"label_encode is only available for categorical variables."
)
warnings.warn(warning_message, Warning)
else:
distinct_elements = self.distinct()
expr = ["DECODE({}"]
text_info = "\n"
for k in range(len(distinct_elements)):
expr += [
"'{}', {}".format(str(distinct_elements[k]).replace("'", "''"), k)
]
text_info += "\t{} => {}".format(distinct_elements[k], k)
expr = ", ".join(expr) + ", {})".format(len(distinct_elements))
self.transformations += [(expr, "int", "int")]
self.parent.__update_catalog__(erase=True, columns=[self.alias])
self.catalog["count"] = self.parent.shape()[0]
self.catalog["percent"] = 100
self.parent.__add_to_history__(
"[Label Encoding]: Label Encoding was applied to the vColumn {} using the following mapping:{}".format(
self.alias, text_info
)
)
return self.parent
# ---#
def mad(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'mad' (median absolute deviation).
Returns
-------
float
mad
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["mad"]).values[self.alias][0]
# ---#
def max(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'max' (Maximum).
Returns
-------
float/str
maximum
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["max"]).values[self.alias][0]
# ---#
def mean_encode(self, response: str):
"""
---------------------------------------------------------------------------
Encodes the vColumn using the average of the response partitioned by the
different vColumn categories.
Parameters
----------
response: str
Response vColumn.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].decode : Encodes the vColumn using a user-defined encoding.
vDataFrame[].discretize : Discretizes the vColumn.
vDataFrame[].label_encode : Encodes the vColumn with Label Encoding.
vDataFrame[].get_dummies : Encodes the vColumn with One-Hot Encoding.
"""
check_types([("response", response, [str])])
self.parent.are_namecols_in(response)
response = self.parent.format_colnames(response)
assert self.parent[response].isnum(), TypeError(
"The response column must be numerical to use a mean encoding"
)
max_floor = len(self.parent[response].transformations) - len(
self.transformations
)
for k in range(max_floor):
self.transformations += [("{}", self.ctype(), self.category())]
self.transformations += [
("AVG({}) OVER (PARTITION BY {})".format(response, "{}"), "int", "float")
]
self.parent.__update_catalog__(erase=True, columns=[self.alias])
self.parent.__add_to_history__(
"[Mean Encode]: The vColumn {} was transformed using a mean encoding with {} as Response Column.".format(
self.alias, response
)
)
if verticapy.options["print_info"]:
print("The mean encoding was successfully done.")
return self.parent
# ---#
def median(
self, approx: bool = True,
):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'median'.
Parameters
----------
approx: bool, optional
If set to True, the approximate median is returned. By setting this
parameter to False, the function's performance can drastically decrease.
Returns
-------
float/str
median
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.quantile(0.5, approx=approx)
# ---#
def memory_usage(self):
"""
---------------------------------------------------------------------------
Returns the vColumn memory usage.
Returns
-------
float
vColumn memory usage (byte)
See Also
--------
vDataFrame.memory_usage : Returns the vDataFrame memory usage.
"""
import sys
total = (
sys.getsizeof(self)
+ sys.getsizeof(self.alias)
+ sys.getsizeof(self.transformations)
+ sys.getsizeof(self.catalog)
)
for elem in self.catalog:
total += sys.getsizeof(elem)
return total
# ---#
def min(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'min' (Minimum).
Returns
-------
float/str
minimum
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["min"]).values[self.alias][0]
# ---#
def mode(self, dropna: bool = False, n: int = 1):
"""
---------------------------------------------------------------------------
Returns the nth most occurent element.
Parameters
----------
dropna: bool, optional
If set to True, NULL values will not be considered during the computation.
n: int, optional
Integer corresponding to the offset. For example, if n = 1 then this
method will return the mode of the vColumn.
Returns
-------
str/float/int
vColumn nth most occurent element.
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
check_types([("dropna", dropna, [bool]), ("n", n, [int, float])])
if n == 1:
pre_comp = self.parent.__get_catalog_value__(self.alias, "top")
if pre_comp != "VERTICAPY_NOT_PRECOMPUTED":
if not (dropna) and (pre_comp != None):
return pre_comp
assert n >= 1, ParameterError("Parameter 'n' must be greater or equal to 1")
where = " WHERE {} IS NOT NULL ".format(self.alias) if (dropna) else " "
result = executeSQL(
"SELECT {} FROM (SELECT {}, COUNT(*) AS _verticapy_cnt_ FROM {}{}GROUP BY {} ORDER BY _verticapy_cnt_ DESC LIMIT {}) VERTICAPY_SUBTABLE ORDER BY _verticapy_cnt_ ASC LIMIT 1".format(
self.alias, self.alias, self.parent.__genSQL__(), where, self.alias, n
),
title="Computing the mode.",
method="fetchall",
)
top = None if not (result) else result[0][0]
if not (dropna):
n = "" if (n == 1) else str(int(n))
if isinstance(top, decimal.Decimal):
top = float(top)
self.parent.__update_catalog__(
{"index": ["top{}".format(n)], self.alias: [top]}
)
return top
# ---#
def mul(self, x: float):
"""
---------------------------------------------------------------------------
Multiplies the vColumn by the input element.
Parameters
----------
x: float
Input number.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the input vColumn.
"""
check_types([("x", x, [int, float])])
return self.apply(func="{} * ({})".format("{}", x))
# ---#
def nlargest(self, n: int = 10):
"""
---------------------------------------------------------------------------
Returns the n largest vColumn elements.
Parameters
----------
n: int, optional
Offset.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame[].nsmallest : Returns the n smallest elements in the vColumn.
"""
check_types([("n", n, [int, float])])
query = "SELECT * FROM {} WHERE {} IS NOT NULL ORDER BY {} DESC LIMIT {}".format(
self.parent.__genSQL__(), self.alias, self.alias, n
)
title = "Reads {} {} largest elements.".format(self.alias, n)
return to_tablesample(query, title=title)
# ---#
def normalize(
self, method: str = "zscore", by: list = [], return_trans: bool = False
):
"""
---------------------------------------------------------------------------
Normalizes the input vColumns using the input method.
Parameters
----------
method: str, optional
Method to use to normalize.
zscore : Normalization using the Z-Score (avg and std).
(x - avg) / std
robust_zscore : Normalization using the Robust Z-Score (median and mad).
(x - median) / (1.4826 * mad)
minmax : Normalization using the MinMax (min and max).
(x - min) / (max - min)
by: list, optional
vColumns used in the partition.
return_trans: bool, optimal
If set to True, the method will return the transformation used instead of
the parent vDataFrame. This parameter is used for testing purpose.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.outliers : Computes the vDataFrame Global Outliers.
"""
if isinstance(by, str):
by = [by]
check_types(
[
("method", method, ["zscore", "robust_zscore", "minmax"]),
("by", by, [list]),
("return_trans", return_trans, [bool]),
]
)
method = method.lower()
self.parent.are_namecols_in(by)
by = self.parent.format_colnames(by)
nullifzero, n = 1, len(by)
if self.isbool():
warning_message = "Normalize doesn't work on booleans".format(self.alias)
warnings.warn(warning_message, Warning)
elif self.isnum():
if method == "zscore":
if n == 0:
nullifzero = 0
avg, stddev = self.aggregate(["avg", "std"]).values[self.alias]
if stddev == 0:
warning_message = "Can not normalize {} using a Z-Score - The Standard Deviation is null !".format(
self.alias
)
warnings.warn(warning_message, Warning)
return self
elif (n == 1) and (self.parent[by[0]].nunique() < 50):
try:
result = executeSQL(
"SELECT {}, AVG({}), STDDEV({}) FROM {} GROUP BY {}".format(
by[0],
self.alias,
self.alias,
self.parent.__genSQL__(),
by[0],
),
title="Computing the different categories to normalize.",
method="fetchall",
)
for i in range(len(result)):
if result[i][2] == None:
pass
elif math.isnan(result[i][2]):
result[i][2] = None
avg = "DECODE({}, {}, NULL)".format(
by[0],
", ".join(
[
"{}, {}".format(
"'{}'".format(str(elem[0]).replace("'", "''"))
if elem[0] != None
else "NULL",
elem[1] if elem[1] != None else "NULL",
)
for elem in result
if elem[1] != None
]
),
)
stddev = "DECODE({}, {}, NULL)".format(
by[0],
", ".join(
[
"{}, {}".format(
"'{}'".format(str(elem[0]).replace("'", "''"))
if elem[0] != None
else "NULL",
elem[2] if elem[2] != None else "NULL",
)
for elem in result
if elem[2] != None
]
),
)
executeSQL(
"SELECT {}, {} FROM {} LIMIT 1".format(
avg, stddev, self.parent.__genSQL__()
),
print_time_sql=False,
)
except:
avg, stddev = (
"AVG({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
"STDDEV({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
)
else:
avg, stddev = (
"AVG({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
"STDDEV({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
)
if return_trans:
return "({} - {}) / {}({})".format(
self.alias, avg, "NULLIFZERO" if (nullifzero) else "", stddev
)
else:
final_transformation = [
(
"({} - {}) / {}({})".format(
"{}", avg, "NULLIFZERO" if (nullifzero) else "", stddev
),
"float",
"float",
)
]
elif method == "robust_zscore":
if n > 0:
warning_message = "The method 'robust_zscore' is available only if the parameter 'by' is empty\nIf you want to normalize by grouping by elements, please use a method in zscore|minmax"
warnings.warn(warning_message, Warning)
return self
mad, med = self.aggregate(["mad", "approx_median"]).values[self.alias]
mad *= 1.4826
if mad != 0:
if return_trans:
return "({} - {}) / ({})".format(self.alias, med, mad)
else:
final_transformation = [
(
"({} - {}) / ({})".format("{}", med, mad),
"float",
"float",
)
]
else:
warning_message = "Can not normalize {} using a Robust Z-Score - The MAD is null !".format(
self.alias
)
warnings.warn(warning_message, Warning)
return self
elif method == "minmax":
if n == 0:
nullifzero = 0
cmin, cmax = self.aggregate(["min", "max"]).values[self.alias]
if cmax - cmin == 0:
warning_message = "Can not normalize {} using the MIN and the MAX. MAX = MIN !".format(
self.alias
)
warnings.warn(warning_message, Warning)
return self
elif n == 1:
try:
result = executeSQL(
"SELECT {}, MIN({}), MAX({}) FROM {} GROUP BY {}".format(
by[0],
self.alias,
self.alias,
self.parent.__genSQL__(),
by[0],
),
title="Computing the different categories {} to normalize.".format(
by[0]
),
method="fetchall",
)
cmin = "DECODE({}, {}, NULL)".format(
by[0],
", ".join(
[
"{}, {}".format(
"'{}'".format(str(elem[0]).replace("'", "''"))
if elem[0] != None
else "NULL",
elem[1] if elem[1] != None else "NULL",
)
for elem in result
if elem[1] != None
]
),
)
cmax = "DECODE({}, {}, NULL)".format(
by[0],
", ".join(
[
"{}, {}".format(
"'{}'".format(str(elem[0]).replace("'", "''"))
if elem[0] != None
else "NULL",
elem[2] if elem[2] != None else "NULL",
)
for elem in result
if elem[2] != None
]
),
)
executeSQL(
"SELECT {}, {} FROM {} LIMIT 1".format(
cmax, cmin, self.parent.__genSQL__()
),
print_time_sql=False,
)
except:
cmax, cmin = (
"MAX({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
"MIN({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
)
else:
cmax, cmin = (
"MAX({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
"MIN({}) OVER (PARTITION BY {})".format(
self.alias, ", ".join(by)
),
)
if return_trans:
return "({} - {}) / {}({} - {})".format(
self.alias,
cmin,
"NULLIFZERO" if (nullifzero) else "",
cmax,
cmin,
)
else:
final_transformation = [
(
"({} - {}) / {}({} - {})".format(
"{}",
cmin,
"NULLIFZERO" if (nullifzero) else "",
cmax,
cmin,
),
"float",
"float",
)
]
if method != "robust_zscore":
max_floor = 0
for elem in by:
if len(self.parent[elem].transformations) > max_floor:
max_floor = len(self.parent[elem].transformations)
max_floor -= len(self.transformations)
for k in range(max_floor):
self.transformations += [("{}", self.ctype(), self.category())]
self.transformations += final_transformation
sauv = {}
for elem in self.catalog:
sauv[elem] = self.catalog[elem]
self.parent.__update_catalog__(erase=True, columns=[self.alias])
try:
if "count" in sauv:
self.catalog["count"] = sauv["count"]
self.catalog["percent"] = (
100 * sauv["count"] / self.parent.shape()[0]
)
for elem in sauv:
if "top" in elem:
if "percent" in elem:
self.catalog[elem] = sauv[elem]
elif elem == None:
self.catalog[elem] = None
elif method == "robust_zscore":
self.catalog[elem] = (sauv[elem] - sauv["approx_50%"]) / (
1.4826 * sauv["mad"]
)
elif method == "zscore":
self.catalog[elem] = (sauv[elem] - sauv["mean"]) / sauv[
"std"
]
elif method == "minmax":
self.catalog[elem] = (sauv[elem] - sauv["min"]) / (
sauv["max"] - sauv["min"]
)
except:
pass
if method == "robust_zscore":
self.catalog["median"] = 0
self.catalog["mad"] = 1 / 1.4826
elif method == "zscore":
self.catalog["mean"] = 0
self.catalog["std"] = 1
elif method == "minmax":
self.catalog["min"] = 0
self.catalog["max"] = 1
self.parent.__add_to_history__(
"[Normalize]: The vColumn '{}' was normalized with the method '{}'.".format(
self.alias, method
)
)
else:
raise TypeError("The vColumn must be numerical for Normalization")
return self.parent
# ---#
def nsmallest(self, n: int = 10):
"""
---------------------------------------------------------------------------
Returns the n smallest elements in the vColumn.
Parameters
----------
n: int, optional
Offset.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame[].nlargest : Returns the n largest vColumn elements.
"""
check_types([("n", n, [int, float])])
query = "SELECT * FROM {} WHERE {} IS NOT NULL ORDER BY {} ASC LIMIT {}".format(
self.parent.__genSQL__(), self.alias, self.alias, n
)
title = "Reads {} {} smallest elements.".format(n, self.alias)
return to_tablesample(query, title=title)
# ---#
def numh(self, method: str = "auto"):
"""
---------------------------------------------------------------------------
Computes the optimal vColumn bar width.
Parameters
----------
method: str, optional
Method to use to compute the optimal h.
auto : Combination of Freedman Diaconis and Sturges.
freedman_diaconis : Freedman Diaconis [2 * IQR / n ** (1 / 3)]
sturges : Sturges [CEIL(log2(n)) + 1]
Returns
-------
float
optimal bar width.
"""
check_types(
[("method", method, ["sturges", "freedman_diaconis", "fd", "auto"])]
)
method = method.lower()
if method == "auto":
pre_comp = self.parent.__get_catalog_value__(self.alias, "numh")
if pre_comp != "VERTICAPY_NOT_PRECOMPUTED":
return pre_comp
assert self.isnum() or self.isdate(), ParameterError(
"numh is only available on type numeric|date"
)
if self.isnum():
result = (
self.parent.describe(
method="numerical", columns=[self.alias], unique=False
)
.transpose()
.values[self.alias]
)
count, vColumn_min, vColumn_025, vColumn_075, vColumn_max = (
result[0],
result[3],
result[4],
result[6],
result[7],
)
elif self.isdate():
min_date = self.min()
table = "(SELECT DATEDIFF('second', '{}'::timestamp, {}) AS {} FROM {}) VERTICAPY_OPTIMAL_H_TABLE".format(
min_date, self.alias, self.alias, self.parent.__genSQL__()
)
query = "SELECT COUNT({}) AS NAs, MIN({}) AS min, APPROXIMATE_PERCENTILE({} USING PARAMETERS percentile = 0.25) AS Q1, APPROXIMATE_PERCENTILE({} USING PARAMETERS percentile = 0.75) AS Q3, MAX({}) AS max FROM {}".format(
self.alias, self.alias, self.alias, self.alias, self.alias, table
)
result = executeSQL(
query,
title="Different aggregations to compute the optimal h.",
method="fetchrow",
)
count, vColumn_min, vColumn_025, vColumn_075, vColumn_max = result
sturges = max(
float(vColumn_max - vColumn_min) / int(math.floor(math.log(count, 2) + 2)),
1e-99,
)
fd = max(2.0 * (vColumn_075 - vColumn_025) / (count) ** (1.0 / 3.0), 1e-99)
if method.lower() == "sturges":
best_h = sturges
elif method.lower() in ("freedman_diaconis", "fd"):
best_h = fd
else:
best_h = max(sturges, fd)
self.parent.__update_catalog__({"index": ["numh"], self.alias: [best_h]})
if self.category() == "int":
best_h = max(math.floor(best_h), 1)
return best_h
# ---#
def nunique(self, approx: bool = True):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'unique' (cardinality).
Parameters
----------
approx: bool, optional
If set to True, the approximate cardinality is returned. By setting
this parameter to False, the function's performance can drastically
decrease.
Returns
-------
int
vColumn cardinality (or approximate cardinality).
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
check_types([("approx", approx, [bool])])
if approx:
return self.aggregate(func=["approx_unique"]).values[self.alias][0]
else:
return self.aggregate(func=["unique"]).values[self.alias][0]
# ---#
def pie(
self,
method: str = "density",
of: str = "",
max_cardinality: int = 6,
h: float = 0,
pie_type: str = "auto",
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the pie chart of the vColumn based on an aggregation.
Parameters
----------
method: str, optional
The method to use to aggregate the data.
count : Number of elements.
density : Percentage of the distribution.
mean : Average of the vColumn 'of'.
min : Minimum of the vColumn 'of'.
max : Maximum of the vColumn 'of'.
sum : Sum of the vColumn 'of'.
q% : q Quantile of the vColumn 'of' (ex: 50% to get the median).
It can also be a cutomized aggregation (ex: AVG(column1) + 5).
of: str, optional
The vColumn to use to compute the aggregation.
max_cardinality: int, optional
Maximum number of the vColumn distinct elements to be used as categorical
(No h will be picked or computed)
h: float, optional
Interval width of the bar. If empty, an optimized h will be computed.
pie_type: str, optional
The type of pie chart.
auto : Regular pie chart.
donut : Donut chart.
rose : Rose chart.
It can also be a cutomized aggregation (ex: AVG(column1) + 5).
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame.donut : Draws the donut chart of the vColumn based on an aggregation.
"""
if isinstance(pie_type, str):
pie_type = pie_type.lower()
check_types(
[
("method", method, [str]),
("of", of, [str]),
("max_cardinality", max_cardinality, [int, float]),
("h", h, [int, float]),
("pie_type", pie_type, ["auto", "donut", "rose"]),
]
)
donut = True if pie_type == "donut" else False
rose = True if pie_type == "rose" else False
if of:
self.parent.are_namecols_in(of)
of = self.parent.format_colnames(of)
from verticapy.plot import pie
return pie(
self, method, of, max_cardinality, h, donut, rose, ax=None, **style_kwds,
)
# ---#
def plot(
self,
ts: str,
by: str = "",
start_date: Union[str, datetime.datetime, datetime.date] = "",
end_date: Union[str, datetime.datetime, datetime.date] = "",
area: bool = False,
step: bool = False,
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the Time Series of the vColumn.
Parameters
----------
ts: str
TS (Time Series) vColumn to use to order the data. The vColumn type must be
date like (date, datetime, timestamp...) or numerical.
by: str, optional
vColumn to use to partition the TS.
start_date: str / date, optional
Input Start Date. For example, time = '03-11-1993' will filter the data when
'ts' is lesser than November 1993 the 3rd.
end_date: str / date, optional
Input End Date. For example, time = '03-11-1993' will filter the data when
'ts' is greater than November 1993 the 3rd.
area: bool, optional
If set to True, draw an Area Plot.
step: bool, optional
If set to True, draw a Step Plot.
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame.plot : Draws the time series.
"""
check_types(
[
("ts", ts, [str]),
("by", by, [str]),
("start_date", start_date, [str, datetime.datetime, datetime.date]),
("end_date", end_date, [str, datetime.datetime, datetime.date]),
("area", area, [bool]),
("step", step, [bool]),
]
)
self.parent.are_namecols_in(ts)
ts = self.parent.format_colnames(ts)
if by:
self.parent.are_namecols_in(by)
by = self.parent.format_colnames(by)
from verticapy.plot import ts_plot
return ts_plot(
self, ts, by, start_date, end_date, area, step, ax=ax, **style_kwds,
)
# ---#
def product(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'product'.
Returns
-------
float
product
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(func=["prod"]).values[self.alias][0]
prod = product
# ---#
def quantile(self, x: float, approx: bool = True):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using an input 'quantile'.
Parameters
----------
x: float
A float between 0 and 1 that represents the quantile.
For example: 0.25 represents Q1.
approx: bool, optional
If set to True, the approximate quantile is returned. By setting this
parameter to False, the function's performance can drastically decrease.
Returns
-------
float
quantile (or approximate quantile).
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
check_types([("x", x, [int, float], ("approx", approx, [bool]))])
prefix = "approx_" if approx else ""
return self.aggregate(func=[prefix + "{}%".format(x * 100)]).values[self.alias][
0
]
# ---#
def range_plot(
self,
ts: str,
q: tuple = (0.25, 0.75),
start_date: Union[str, datetime.datetime, datetime.date] = "",
end_date: Union[str, datetime.datetime, datetime.date] = "",
plot_median: bool = False,
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the range plot of the vColumn. The aggregations used are the median
and two input quantiles.
Parameters
----------
ts: str
TS (Time Series) vColumn to use to order the data. The vColumn type must be
date like (date, datetime, timestamp...) or numerical.
q: tuple, optional
Tuple including the 2 quantiles used to draw the Plot.
start_date: str / date, optional
Input Start Date. For example, time = '03-11-1993' will filter the data when
'ts' is lesser than November 1993 the 3rd.
end_date: str / date, optional
Input End Date. For example, time = '03-11-1993' will filter the data when
'ts' is greater than November 1993 the 3rd.
plot_median: bool, optional
If set to True, the Median will be drawn.
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame.plot : Draws the time series.
"""
check_types(
[
("ts", ts, [str]),
("q", q, [tuple]),
(
"start_date",
start_date,
[str, datetime.datetime, datetime.date, int, float],
),
(
"end_date",
end_date,
[str, datetime.datetime, datetime.date, int, float],
),
("plot_median", plot_median, [bool]),
]
)
self.parent.are_namecols_in(ts)
ts = self.parent.format_colnames(ts)
from verticapy.plot import range_curve_vdf
return range_curve_vdf(
self, ts, q, start_date, end_date, plot_median, ax=ax, **style_kwds,
)
# ---#
def rename(self, new_name: str):
"""
---------------------------------------------------------------------------
Renames the vColumn by dropping the current vColumn and creating a copy with
the specified name.
\u26A0 Warning : SQL code generation will be slower if the vDataFrame has been
transformed multiple times, so it's better practice to use
this method when first preparing your data.
Parameters
----------
new_name: str
The new vColumn alias.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame.add_copy : Creates a copy of the vColumn.
"""
check_types([("new_name", new_name, [str])])
old_name = quote_ident(self.alias)
new_name = new_name.replace('"', "")
assert not (self.parent.is_colname_in(new_name)), NameError(
f"A vColumn has already the alias {new_name}.\nBy changing the parameter 'new_name', you'll be able to solve this issue."
)
self.add_copy(new_name)
parent = self.drop(add_history=False)
parent.__add_to_history__(
"[Rename]: The vColumn {} was renamed '{}'.".format(old_name, new_name)
)
return parent
# ---#
def round(self, n: int):
"""
---------------------------------------------------------------------------
Rounds the vColumn by keeping only the input number of digits after the comma.
Parameters
----------
n: int
Number of digits to keep after the comma.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the input vColumn.
"""
check_types([("n", n, [int, float])])
return self.apply(func="ROUND({}, {})".format("{}", n))
# ---#
def sem(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'sem' (standard error of mean).
Returns
-------
float
sem
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["sem"]).values[self.alias][0]
# ---#
def skewness(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'skewness'.
Returns
-------
float
skewness
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["skewness"]).values[self.alias][0]
skew = skewness
# ---#
def slice(self, length: int, unit: str = "second", start: bool = True):
"""
---------------------------------------------------------------------------
Slices and transforms the vColumn using a time series rule.
Parameters
----------
length: int
Slice size.
unit: str, optional
Slice size unit. For example, it can be 'minute' 'hour'...
start: bool, optional
If set to True, the record will be sliced using the floor of the slicing
instead of the ceiling.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].date_part : Extracts a specific TS field from the vColumn.
"""
check_types(
[
("length", length, [int, float]),
("unit", unit, [str]),
("start", start, [bool]),
]
)
start_or_end = "START" if (start) else "END"
return self.apply(
func="TIME_SLICE({}, {}, '{}', '{}')".format(
"{}", length, unit.upper(), start_or_end
)
)
# ---#
def spider(
self,
by: str = "",
method: str = "density",
of: str = "",
max_cardinality: Union[int, tuple] = (6, 6),
h: Union[int, float, tuple] = (None, None),
ax=None,
**style_kwds,
):
"""
---------------------------------------------------------------------------
Draws the spider plot of the input vColumn based on an aggregation.
Parameters
----------
by: str, optional
vColumn to use to partition the data.
method: str, optional
The method to use to aggregate the data.
count : Number of elements.
density : Percentage of the distribution.
mean : Average of the vColumn 'of'.
min : Minimum of the vColumn 'of'.
max : Maximum of the vColumn 'of'.
sum : Sum of the vColumn 'of'.
q% : q Quantile of the vColumn 'of' (ex: 50% to get the median).
It can also be a cutomized aggregation (ex: AVG(column1) + 5).
of: str, optional
The vColumn to use to compute the aggregation.
h: int/float/tuple, optional
Interval width of the vColumns 1 and 2 bars. It is only valid if the
vColumns are numerical. Optimized h will be computed if the parameter
is empty or invalid.
max_cardinality: int/tuple, optional
Maximum number of distinct elements for vColumns 1 and 2 to be used as
categorical (No h will be picked or computed)
ax: Matplotlib axes object, optional
The axes to plot on.
**style_kwds
Any optional parameter to pass to the Matplotlib functions.
Returns
-------
ax
Matplotlib axes object
See Also
--------
vDataFrame.bar : Draws the Bar Chart of the input vColumns based on an aggregation.
"""
check_types(
[
("by", by, [str]),
("method", method, [str]),
("of", of, [str]),
("max_cardinality", max_cardinality, [list]),
("h", h, [list, float, int]),
]
)
if by:
self.parent.are_namecols_in(by)
by = self.parent.format_colnames(by)
columns = [self.alias, by]
else:
columns = [self.alias]
if of:
self.parent.are_namecols_in(of)
of = self.parent.format_colnames(of)
from verticapy.plot import spider as spider_plot
return spider_plot(
self.parent, columns, method, of, max_cardinality, h, ax=ax, **style_kwds,
)
# ---#
def std(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'std' (Standard Deviation).
Returns
-------
float
std
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["stddev"]).values[self.alias][0]
stddev = std
# ---#
def store_usage(self):
"""
---------------------------------------------------------------------------
Returns the vColumn expected store usage (unit: b).
Returns
-------
int
vColumn expected store usage.
See Also
--------
vDataFrame.expected_store_usage : Returns the vDataFrame expected store usage.
"""
pre_comp = self.parent.__get_catalog_value__(self.alias, "store_usage")
if pre_comp != "VERTICAPY_NOT_PRECOMPUTED":
return pre_comp
store_usage = executeSQL(
"SELECT ZEROIFNULL(SUM(LENGTH({}::varchar))) FROM {}".format(
bin_spatial_to_str(self.category(), self.alias),
self.parent.__genSQL__(),
),
title="Computing the Store Usage of the vColumn {}.".format(self.alias),
method="fetchfirstelem",
)
self.parent.__update_catalog__(
{"index": ["store_usage"], self.alias: [store_usage]}
)
return store_usage
# ---#
def str_contains(self, pat: str):
"""
---------------------------------------------------------------------------
Verifies if the regular expression is in each of the vColumn records.
The vColumn will be transformed.
Parameters
----------
pat: str
Regular expression.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].str_count : Computes the number of matches for the regular expression
in each record of the vColumn.
vDataFrame[].extract : Extracts the regular expression in each record of the
vColumn.
vDataFrame[].str_replace : Replaces the regular expression matches in each of the
vColumn records by an input value.
vDataFrame[].str_slice : Slices the vColumn.
"""
check_types([("pat", pat, [str])])
return self.apply(
func="REGEXP_COUNT({}, '{}') > 0".format("{}", pat.replace("'", "''"))
)
# ---#
def str_count(self, pat: str):
"""
---------------------------------------------------------------------------
Computes the number of matches for the regular expression in each record of
the vColumn. The vColumn will be transformed.
Parameters
----------
pat: str
regular expression.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].str_contains : Verifies if the regular expression is in each of the
vColumn records.
vDataFrame[].extract : Extracts the regular expression in each record of the
vColumn.
vDataFrame[].str_replace : Replaces the regular expression matches in each of the
vColumn records by an input value.
vDataFrame[].str_slice : Slices the vColumn.
"""
check_types([("pat", pat, [str])])
return self.apply(
func="REGEXP_COUNT({}, '{}')".format("{}", pat.replace("'", "''"))
)
# ---#
def str_extract(self, pat: str):
"""
---------------------------------------------------------------------------
Extracts the regular expression in each record of the vColumn.
The vColumn will be transformed.
Parameters
----------
pat: str
regular expression.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].str_contains : Verifies if the regular expression is in each of the
vColumn records.
vDataFrame[].str_count : Computes the number of matches for the regular expression
in each record of the vColumn.
vDataFrame[].str_replace : Replaces the regular expression matches in each of the
vColumn records by an input value.
vDataFrame[].str_slice : Slices the vColumn.
"""
check_types([("pat", pat, [str])])
return self.apply(
func="REGEXP_SUBSTR({}, '{}')".format("{}", pat.replace("'", "''"))
)
# ---#
def str_replace(self, to_replace: str, value: str = ""):
"""
---------------------------------------------------------------------------
Replaces the regular expression matches in each of the vColumn record by an
input value. The vColumn will be transformed.
Parameters
----------
to_replace: str
Regular expression to replace.
value: str, optional
New value.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].str_contains : Verifies if the regular expression is in each of the
vColumn records.
vDataFrame[].str_count : Computes the number of matches for the regular expression
in each record of the vColumn.
vDataFrame[].extract : Extracts the regular expression in each record of the
vColumn.
vDataFrame[].str_slice : Slices the vColumn.
"""
check_types([("to_replace", to_replace, [str]), ("value", value, [str])])
return self.apply(
func="REGEXP_REPLACE({}, '{}', '{}')".format(
"{}", to_replace.replace("'", "''"), value.replace("'", "''")
)
)
# ---#
def str_slice(self, start: int, step: int):
"""
---------------------------------------------------------------------------
Slices the vColumn. The vColumn will be transformed.
Parameters
----------
start: int
Start of the slicing.
step: int
Size of the slicing.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].str_contains : Verifies if the regular expression is in each of the
vColumn records.
vDataFrame[].str_count : Computes the number of matches for the regular expression
in each record of the vColumn.
vDataFrame[].extract : Extracts the regular expression in each record of the
vColumn.
vDataFrame[].str_replace : Replaces the regular expression matches in each of the
vColumn records by an input value.
"""
check_types([("start", start, [int, float]), ("step", step, [int, float])])
return self.apply(func="SUBSTR({}, {}, {})".format("{}", start, step))
# ---#
def sub(self, x: float):
"""
---------------------------------------------------------------------------
Subtracts the input element from the vColumn.
Parameters
----------
x: float
If the vColumn type is date like (date, datetime ...), the parameter 'x'
will represent the number of seconds, otherwise it will represent a number.
Returns
-------
vDataFrame
self.parent
See Also
--------
vDataFrame[].apply : Applies a function to the input vColumn.
"""
check_types([("x", x, [int, float])])
if self.isdate():
return self.apply(func="TIMESTAMPADD(SECOND, -({}), {})".format(x, "{}"))
else:
return self.apply(func="{} - ({})".format("{}", x))
# ---#
def sum(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'sum'.
Returns
-------
float
sum
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["sum"]).values[self.alias][0]
# ---#
def tail(self, limit: int = 5):
"""
---------------------------------------------------------------------------
Returns the tail of the vColumn.
Parameters
----------
limit: int, optional
Number of elements to display.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame[].head : Returns the head of the vColumn.
"""
return self.iloc(limit=limit, offset=-1)
# ---#
def topk(self, k: int = -1, dropna: bool = True):
"""
---------------------------------------------------------------------------
Returns the k most occurent elements and their distributions as percents.
Parameters
----------
k: int, optional
Number of most occurent elements to return.
dropna: bool, optional
If set to True, NULL values will not be considered during the computation.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame[].describe : Computes the vColumn descriptive statistics.
"""
check_types([("k", k, [int, float]), ("dropna", dropna, [bool])])
topk = "" if (k < 1) else "LIMIT {}".format(k)
dropna = " WHERE {} IS NOT NULL".format(self.alias) if (dropna) else ""
query = "SELECT {} AS {}, COUNT(*) AS _verticapy_cnt_, 100 * COUNT(*) / {} AS percent FROM {}{} GROUP BY {} ORDER BY _verticapy_cnt_ DESC {}".format(
bin_spatial_to_str(self.category(), self.alias),
self.alias,
self.parent.shape()[0],
self.parent.__genSQL__(),
dropna,
self.alias,
topk,
)
result = executeSQL(
query,
title="Computing the top{} categories of {}.".format(
k if k > 0 else "", self.alias
),
method="fetchall",
)
values = {
"index": [item[0] for item in result],
"count": [int(item[1]) for item in result],
"percent": [float(round(item[2], 3)) for item in result],
}
return tablesample(values)
# ---#
def value_counts(self, k: int = 30):
"""
---------------------------------------------------------------------------
Returns the k most occurent elements, how often they occur, and other
statistical information.
Parameters
----------
k: int, optional
Number of most occurent elements to return.
Returns
-------
tablesample
An object containing the result. For more information, see
utilities.tablesample.
See Also
--------
vDataFrame[].describe : Computes the vColumn descriptive statistics.
"""
return self.describe(method="categorical", max_cardinality=k)
# ---#
def var(self):
"""
---------------------------------------------------------------------------
Aggregates the vColumn using 'var' (Variance).
Returns
-------
float
var
See Also
--------
vDataFrame.aggregate : Computes the vDataFrame input aggregations.
"""
return self.aggregate(["variance"]).values[self.alias][0]
variance = var
| [((21802, 21871), 'verticapy.plot.bar', 'bar', (['self', 'method', 'of', 'max_cardinality', 'nbins', 'h'], {'ax': 'ax'}), '(self, method, of, max_cardinality, nbins, h, ax=ax, **style_kwds)\n', (21805, 21871), False, 'from verticapy.plot import bar\n'), ((23663, 23735), 'verticapy.plot.boxplot', 'boxplot', (['self', 'by', 'h', 'max_cardinality', 'cat_priority'], {'ax': 'ax'}), '(self, by, h, max_cardinality, cat_priority, ax=ax, **style_kwds)\n', (23670, 23735), False, 'from verticapy.plot import boxplot\n'), ((34053, 34154), 'verticapy.learn.neighbors.KernelDensity', 'KernelDensity', (['name'], {'bandwidth': 'bandwidth', 'kernel': 'kernel', 'nbins': 'nbins', 'xlim': 'xlim_tmp', 'store': '(False)'}), '(name, bandwidth=bandwidth, kernel=kernel, nbins=nbins, xlim=\n xlim_tmp, store=False)\n', (34066, 34154), False, 'from verticapy.learn.neighbors import KernelDensity\n'), ((78840, 78910), 'verticapy.plot.hist', 'hist', (['self', 'method', 'of', 'max_cardinality', 'nbins', 'h'], {'ax': 'ax'}), '(self, method, of, max_cardinality, nbins, h, ax=ax, **style_kwds)\n', (78844, 78910), False, 'from verticapy.plot import hist\n'), ((116534, 116611), 'verticapy.plot.pie', 'pie', (['self', 'method', 'of', 'max_cardinality', 'h', 'donut', 'rose'], {'ax': 'None'}), '(self, method, of, max_cardinality, h, donut, rose, ax=None, **style_kwds)\n', (116537, 116611), False, 'from verticapy.plot import pie\n'), ((118679, 118755), 'verticapy.plot.ts_plot', 'ts_plot', (['self', 'ts', 'by', 'start_date', 'end_date', 'area', 'step'], {'ax': 'ax'}), '(self, ts, by, start_date, end_date, area, step, ax=ax, **style_kwds)\n', (118686, 118755), False, 'from verticapy.plot import ts_plot\n'), ((122295, 122384), 'verticapy.plot.range_curve_vdf', 'range_curve_vdf', (['self', 'ts', 'q', 'start_date', 'end_date', 'plot_median'], {'ax': 'ax'}), '(self, ts, q, start_date, end_date, plot_median, ax=ax, **\n style_kwds)\n', (122310, 122384), False, 'from verticapy.plot import range_curve_vdf\n'), ((128550, 128641), 'verticapy.plot.spider', 'spider_plot', (['self.parent', 'columns', 'method', 'of', 'max_cardinality', 'h'], {'ax': 'ax'}), '(self.parent, columns, method, of, max_cardinality, h, ax=ax, **\n style_kwds)\n', (128561, 128641), True, 'from verticapy.plot import spider as spider_plot\n'), ((32354, 32366), 'verticapy.plot.gen_colors', 'gen_colors', ([], {}), '()\n', (32364, 32366), False, 'from verticapy.plot import gen_colors\n'), ((86794, 86833), 'warnings.warn', 'warnings.warn', (['warning_message', 'Warning'], {}), '(warning_message, Warning)\n', (86807, 86833), False, 'import math, re, decimal, warnings, datetime\n'), ((91370, 91397), 'sys.getsizeof', 'sys.getsizeof', (['self.catalog'], {}), '(self.catalog)\n', (91383, 91397), False, 'import sys\n'), ((91463, 91482), 'sys.getsizeof', 'sys.getsizeof', (['elem'], {}), '(elem)\n', (91476, 91482), False, 'import sys\n'), ((96570, 96609), 'warnings.warn', 'warnings.warn', (['warning_message', 'Warning'], {}), '(warning_message, Warning)\n', (96583, 96609), False, 'import math, re, decimal, warnings, datetime\n'), ((46007, 46044), 'verticapy.learn.ensemble.RandomForestRegressor', 'RandomForestRegressor', (['tmp_model_name'], {}), '(tmp_model_name)\n', (46028, 46044), False, 'from verticapy.learn.ensemble import RandomForestClassifier, RandomForestRegressor\n'), ((46087, 46125), 'verticapy.learn.ensemble.RandomForestClassifier', 'RandomForestClassifier', (['tmp_model_name'], {}), '(tmp_model_name)\n', (46109, 46125), False, 'from verticapy.learn.ensemble import RandomForestClassifier, RandomForestRegressor\n'), ((64488, 64527), 'warnings.warn', 'warnings.warn', (['warning_message', 'Warning'], {}), '(warning_message, Warning)\n', (64501, 64527), False, 'import math, re, decimal, warnings, datetime\n'), ((91320, 91355), 'sys.getsizeof', 'sys.getsizeof', (['self.transformations'], {}), '(self.transformations)\n', (91333, 91355), False, 'import sys\n'), ((113257, 113275), 'math.floor', 'math.floor', (['best_h'], {}), '(best_h)\n', (113267, 113275), False, 'import math, re, decimal, warnings, datetime\n'), ((71394, 71404), 'verticapy.plot.gen_cmap', 'gen_cmap', ([], {}), '()\n', (71402, 71404), False, 'from verticapy.plot import gen_cmap\n'), ((71551, 71563), 'verticapy.plot.gen_colors', 'gen_colors', ([], {}), '()\n', (71561, 71563), False, 'from verticapy.plot import gen_colors\n'), ((91246, 91265), 'sys.getsizeof', 'sys.getsizeof', (['self'], {}), '(self)\n', (91259, 91265), False, 'import sys\n'), ((91280, 91305), 'sys.getsizeof', 'sys.getsizeof', (['self.alias'], {}), '(self.alias)\n', (91293, 91305), False, 'import sys\n'), ((97070, 97109), 'warnings.warn', 'warnings.warn', (['warning_message', 'Warning'], {}), '(warning_message, Warning)\n', (97083, 97109), False, 'import math, re, decimal, warnings, datetime\n'), ((101480, 101519), 'warnings.warn', 'warnings.warn', (['warning_message', 'Warning'], {}), '(warning_message, Warning)\n', (101493, 101519), False, 'import math, re, decimal, warnings, datetime\n'), ((102343, 102382), 'warnings.warn', 'warnings.warn', (['warning_message', 'Warning'], {}), '(warning_message, Warning)\n', (102356, 102382), False, 'import math, re, decimal, warnings, datetime\n'), ((112765, 112783), 'math.log', 'math.log', (['count', '(2)'], {}), '(count, 2)\n', (112773, 112783), False, 'import math, re, decimal, warnings, datetime\n'), ((102841, 102880), 'warnings.warn', 'warnings.warn', (['warning_message', 'Warning'], {}), '(warning_message, Warning)\n', (102854, 102880), False, 'import math, re, decimal, warnings, datetime\n'), ((50296, 50309), 'math.floor', 'math.floor', (['h'], {}), '(h)\n', (50306, 50309), False, 'import math, re, decimal, warnings, datetime\n'), ((97966, 97990), 'math.isnan', 'math.isnan', (['result[i][2]'], {}), '(result[i][2])\n', (97976, 97990), False, 'import math, re, decimal, warnings, datetime\n')] |
MagicSword/Booktags | booktags/flaskapp/book/views.py | 44142e19aec5ce75266233964d7ab21503bbe57c | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
example.py
~~~~~~~~~
A simple command line application to run flask apps.
:copyright: 2019 Miller
:license: BSD-3-Clause
"""
# Known bugs that can't be fixed here:
# - synopsis() cannot be prevented from clobbering existing
# loaded modules.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
from flask import render_template, redirect, request, url_for, flash,jsonify,current_app
from flask_login import login_user, logout_user, login_required, current_user
from . import book
from flask_sqlalchemy import get_debug_queries
from sqlalchemy.sql.expression import cast
from datatables import ColumnDT, DataTables
from .. import auth
from .. import db
from .forms import EditBookForm, HackmdMeta
# from booktags.db.basemodels import Book
from booktags.flaskapp.model.models import BookMain
# --------------------------------------------------------- common routines
@book.after_app_request
def after_request(response):
for query in get_debug_queries():
if query.duration >= current_app.config['PROJECT_SLOW_DB_QUERY_TIME']:
current_app.logger.warning(
'Slow query: %s\nParameters: %s\nDuration: %fs\nContext: %s\n'
% (query.statement, query.parameters, query.duration,
query.context))
return response
@book.route('/', methods=['GET', 'POST'])
def index():
# books=BookMain.get_all_book()
query = BookMain.query
page = request.args.get('page', 1, type=int)
pagination = query.order_by(cast(BookMain.id, db.Integer)).paginate(
page, per_page=current_app.config['PROJECT_BOOKS_PER_PAGE'],
error_out=False)
books = pagination.items
return render_template('book/index.html',books=books,pagination=pagination)
# @book.route('/list/', methods=['GET', 'POST'])
# def list_book():
# """
#
# :param field: col name
# :param order: asc or desc
# :return: renew query
# """
# books = BookMain.get_all_book()
# return render_template('book/list_book.html',books=books)
@book.route("/list")
def list_book():
"""List users with DataTables <= 1.10.x."""
return render_template('book/list_book.html')
@book.route('/data', methods=['GET', 'POST'])
def data():
"""Return server side data."""
# defining columns
# - explicitly cast date to string, so string searching the date
# will search a date formatted equal to how it is presented
# in the table
columns = [
# ColumnDT(cast(BookMain.id, db.Integer)),
ColumnDT(BookMain.id),
ColumnDT(BookMain.isbn),
ColumnDT(BookMain.title_short),
ColumnDT(BookMain.title),
ColumnDT(BookMain.catalogue),
ColumnDT(BookMain.cutter),
ColumnDT(BookMain.pub_year),
ColumnDT(BookMain.copy_info)
# ColumnDT(BookMain.get_link),
# ColumnDT(BookMain.note),
# ColumnDT(BookMain.reprint),
# ColumnDT(BookMain.removed),
# ColumnDT(BookMain.keepsite)
]
# defining the initial query depending on your purpose
query = db.session.query().select_from(BookMain)
# GET parameters
params = request.args.to_dict()
# instantiating a DataTable for the query and table needed
rowTable = DataTables(params, query, columns)
# returns what is needed by DataTable
return jsonify(rowTable.output_result())
@book.route('/get/<int:id>', methods=['GET', 'POST'])
def get_book():
return f"Hello book index : {id}"
@book.route('/post/', methods=['GET', 'POST'])
def post_book():
"""
post new book entry
:return:
"""
book = BookMain.query.all()
id = int(book[-1].id) + 1
print(f"id is : {id}")
form = EditBookForm()
if form.validate_on_submit():
book.id = form.id.data
book.isbn = form.isbn.data
book.title_short = form.title_short.data
book.title = form.title.data
book.catalogue = form.catalogue.data
book.cutter = form.cutter.data
book.pub_year = form.pub_year.data
book.copy_info = form.copy_info.data
book.get_link = form.get_link.data
book.note = form.note.data
book.reprint = form.reprint.data
book.removed = form.removed.data
book.keepsite = form.keepsite.data
db.session.add(book)
db.session.commit()
flash('Your book data has been added.', 'success')
return redirect(url_for('book.index'))
form.id.data = id
return render_template('book/edit_book.html', form=form)
@book.route('/edit/<int:id>', methods=['GET', 'POST'])
def edit_book(id):
"""
edit , put book data
:param id:
:return:
"""
form = EditBookForm()
book = BookMain.query.filter_by(id=id).first_or_404()
if form.validate_on_submit():
# book.id = form.id.data
book.isbn = form.isbn.data
book.title_short = form.title_short.data
book.title = form.title.data
book.catalogue = form.catalogue.data
book.cutter = form.cutter.data
book.pub_year = form.pub_year.data
book.copy_info = form.copy_info.data
book.get_link = form.get_link.data
book.note = form.note.data
book.reprint = form.reprint.data
book.removed = form.removed.data
book.keepsite = form.keepsite.data
db.session.add(book)
db.session.commit()
flash('Your book data has been updated.', 'success')
return redirect(url_for('book.index'))
form.id.data = book.id
form.isbn.data = book.isbn
form.title_short.data = book.title_short
form.title.data = book.title
form.catalogue.data = book.catalogue
form.cutter.data = book.cutter
form.pub_year.data = book.pub_year
form.copy_info.data = book.copy_info
form.get_link.data = book.get_link
form.note.data = book.note
form.reprint.data = book.reprint
form.removed.data = book.removed
form.keepsite.data = book.keepsite
return render_template('book/edit_book.html', form=form)
@book.route('/del/<int:id>', methods=['GET', 'POST'])
def del_book(id):
return f"Hello book index: del {id}"
@book.route('/hackmdmeta', methods=['GET', 'POST'])
def hackmd_meta():
"""
:return:
"""
from booktags.vendor.hackmd_meta import get_hackmdmeta
form = HackmdMeta()
if form.validate_on_submit():
booksn = str(form.booksn.data)
# print(f"booksn is : {booksn}")
temp = get_hackmdmeta(booksn)
# print(temp)
form.body.data = temp
# flash('Your book data has been updated.', 'success')
# return redirect(url_for('book.hackmd_meta'))
return render_template('book/hackmd_meta.html',form=form)
if __name__ == '__main__':
pass
| [((1136, 1155), 'flask_sqlalchemy.get_debug_queries', 'get_debug_queries', ([], {}), '()\n', (1153, 1155), False, 'from flask_sqlalchemy import get_debug_queries\n'), ((1611, 1648), 'flask.request.args.get', 'request.args.get', (['"""page"""', '(1)'], {'type': 'int'}), "('page', 1, type=int)\n", (1627, 1648), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((1856, 1926), 'flask.render_template', 'render_template', (['"""book/index.html"""'], {'books': 'books', 'pagination': 'pagination'}), "('book/index.html', books=books, pagination=pagination)\n", (1871, 1926), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((2304, 2342), 'flask.render_template', 'render_template', (['"""book/list_book.html"""'], {}), "('book/list_book.html')\n", (2319, 2342), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((3315, 3337), 'flask.request.args.to_dict', 'request.args.to_dict', ([], {}), '()\n', (3335, 3337), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((3418, 3452), 'datatables.DataTables', 'DataTables', (['params', 'query', 'columns'], {}), '(params, query, columns)\n', (3428, 3452), False, 'from datatables import ColumnDT, DataTables\n'), ((3782, 3802), 'booktags.flaskapp.model.models.BookMain.query.all', 'BookMain.query.all', ([], {}), '()\n', (3800, 3802), False, 'from booktags.flaskapp.model.models import BookMain\n'), ((4644, 4693), 'flask.render_template', 'render_template', (['"""book/edit_book.html"""'], {'form': 'form'}), "('book/edit_book.html', form=form)\n", (4659, 4693), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((6139, 6188), 'flask.render_template', 'render_template', (['"""book/edit_book.html"""'], {'form': 'form'}), "('book/edit_book.html', form=form)\n", (6154, 6188), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((6829, 6880), 'flask.render_template', 'render_template', (['"""book/hackmd_meta.html"""'], {'form': 'form'}), "('book/hackmd_meta.html', form=form)\n", (6844, 6880), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((2695, 2716), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.id'], {}), '(BookMain.id)\n', (2703, 2716), False, 'from datatables import ColumnDT, DataTables\n'), ((2726, 2749), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.isbn'], {}), '(BookMain.isbn)\n', (2734, 2749), False, 'from datatables import ColumnDT, DataTables\n'), ((2759, 2789), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.title_short'], {}), '(BookMain.title_short)\n', (2767, 2789), False, 'from datatables import ColumnDT, DataTables\n'), ((2799, 2823), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.title'], {}), '(BookMain.title)\n', (2807, 2823), False, 'from datatables import ColumnDT, DataTables\n'), ((2833, 2861), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.catalogue'], {}), '(BookMain.catalogue)\n', (2841, 2861), False, 'from datatables import ColumnDT, DataTables\n'), ((2871, 2896), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.cutter'], {}), '(BookMain.cutter)\n', (2879, 2896), False, 'from datatables import ColumnDT, DataTables\n'), ((2906, 2933), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.pub_year'], {}), '(BookMain.pub_year)\n', (2914, 2933), False, 'from datatables import ColumnDT, DataTables\n'), ((2943, 2971), 'datatables.ColumnDT', 'ColumnDT', (['BookMain.copy_info'], {}), '(BookMain.copy_info)\n', (2951, 2971), False, 'from datatables import ColumnDT, DataTables\n'), ((4513, 4563), 'flask.flash', 'flash', (['"""Your book data has been added."""', '"""success"""'], {}), "('Your book data has been added.', 'success')\n", (4518, 4563), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((5553, 5605), 'flask.flash', 'flash', (['"""Your book data has been updated."""', '"""success"""'], {}), "('Your book data has been updated.', 'success')\n", (5558, 5605), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((6622, 6644), 'booktags.vendor.hackmd_meta.get_hackmdmeta', 'get_hackmdmeta', (['booksn'], {}), '(booksn)\n', (6636, 6644), False, 'from booktags.vendor.hackmd_meta import get_hackmdmeta\n'), ((1248, 1417), 'flask.current_app.logger.warning', 'current_app.logger.warning', (['("""Slow query: %s\nParameters: %s\nDuration: %fs\nContext: %s\n""" % (query.\n statement, query.parameters, query.duration, query.context))'], {}), '(\n """Slow query: %s\nParameters: %s\nDuration: %fs\nContext: %s\n""" % (query\n .statement, query.parameters, query.duration, query.context))\n', (1274, 1417), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((4588, 4609), 'flask.url_for', 'url_for', (['"""book.index"""'], {}), "('book.index')\n", (4595, 4609), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((4877, 4908), 'booktags.flaskapp.model.models.BookMain.query.filter_by', 'BookMain.query.filter_by', ([], {'id': 'id'}), '(id=id)\n', (4901, 4908), False, 'from booktags.flaskapp.model.models import BookMain\n'), ((5630, 5651), 'flask.url_for', 'url_for', (['"""book.index"""'], {}), "('book.index')\n", (5637, 5651), False, 'from flask import render_template, redirect, request, url_for, flash, jsonify, current_app\n'), ((1681, 1710), 'sqlalchemy.sql.expression.cast', 'cast', (['BookMain.id', 'db.Integer'], {}), '(BookMain.id, db.Integer)\n', (1685, 1710), False, 'from sqlalchemy.sql.expression import cast\n')] |
Snider/narwhallet | narwhallet/core/kws/http/enumerations/mediatypes.py | 0d528763c735f1e68b8264e302854d41e7cf1956 | from enum import Enum
class content_type(Enum):
# https://www.iana.org/assignments/media-types/media-types.xhtml
css = 'text/css'
gif = 'image/gif'
htm = 'text/html'
html = 'text/html'
ico = 'image/bmp'
jpg = 'image/jpeg'
jpeg = 'image/jpeg'
js = 'application/javascript'
png = 'image/png'
txt = 'text/plain; charset=us-ascii'
json = 'application/json'
svg = 'image/svg+xml'
| [] |
drix00/ElectronDiffraction | electrondiffraction/__init__.py | 9dc258d90d0b73745b904b1bb6e1e3e794403a27 | # -*- coding: utf-8 -*-
__author__ = """Hendrix Demers"""
__email__ = '[email protected]'
__version__ = '0.1.0'
| [] |
markembling/storelet | storelet.py | 9951368e2f143855d2c14509bdb8cf796d6e54b8 | import os
import logging
from tempfile import mkstemp, mkdtemp
from shutil import rmtree
from zipfile import ZipFile, ZIP_DEFLATED
from datetime import datetime
from boto.s3.connection import S3Connection
from boto.s3.key import Key
__version__ = "0.1.8"
__author__ = "Mark Embling"
__email__ = "[email protected]"
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
LOGGING_DEFAULTS = {"level": logging.INFO,
"format": "%(asctime)s [%(levelname)s]: %(message)s"}
def setup_logging(**kwargs):
"""Convenience function for setting up some sane logging defaults"""
opts = dict(LOGGING_DEFAULTS.items() + kwargs.items())
logging.basicConfig(**opts)
class ZipBackup(object):
"""
A compressed ZIP file backup
Note: large inclusion operations can sometimes take time as files
are compressed on the fly. This prevents all the files being copied
to a temporary location (and using unnecessary extra space) and
storing up the need for a potentially large compression at the end.
"""
def __init__(self, name):
self.name = name
_, self._path = mkstemp()
logger.debug("Created temporary file %s" % self._path)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
os.remove(self._path)
logger.debug("Removed temporary file %s" % self._path)
def include_directory(self, path, preserve_paths=False, name=None):
"""Add the contents of a directory to the backup"""
path = os.path.abspath(path)
logger.debug("Adding directory %s" % path)
with ZipFile(self._path, 'a', ZIP_DEFLATED, allowZip64=True) as zipfile:
for base,dirs,files in os.walk(path):
logger.debug("Walking directory %s" % path)
for file in files:
filename = os.path.join(base, file)
try:
zipfile.write(filename,
self._get_filename_for_archive(
path, filename, preserve_paths, name))
logger.info("Added file %s" % filename)
except:
logger.warn("Could not add file %s" % file, exc_info=True)
logger.debug("Finished directory %s" % path)
def save_to_s3(self, bucket, access_key, secret_key, **kwargs):
"""Save the backup to Amazon S3"""
logger.info("Saving to S3 in '%s' bucket" % bucket)
conn = S3Connection(access_key, secret_key, **kwargs)
bucket = conn.get_bucket(bucket)
key = Key(bucket)
key.key = '%s_%s.zip' % \
(self.name, datetime.now().strftime("%Y%m%d%H%M%S"))
key.set_contents_from_filename(self._path)
logger.info("Saving to S3 done %s" % key.key)
def include_new_dir(self, name):
"""Add a new empty directory to the backup"""
return BackupIncludedDirectory(name, self)
def _get_filename_for_archive(self, directory, filename,
preserve_paths, name):
if not preserve_paths:
filename = filename.replace(directory, "")
if name is not None:
filename = name + os.sep + filename
return filename
class BackupIncludedDirectory(object):
"""A new directory which is subsequently added to the backup"""
def __init__(self, name, owner):
self.name = name
self.path = mkdtemp()
self._owner = owner
logger.debug("Created temporary directory %s" % self.path)
def __str__(self):
return self.path
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self._owner.include_directory(self.path, preserve_paths=False,
name=self.name)
rmtree(self.path)
logger.debug("Removed temporary directory %s" % self.path)
| [((330, 357), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (347, 357), False, 'import logging\n'), ((376, 397), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (395, 397), False, 'import logging\n'), ((683, 710), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '(**opts)\n', (702, 710), False, 'import logging\n'), ((1153, 1162), 'tempfile.mkstemp', 'mkstemp', ([], {}), '()\n', (1160, 1162), False, 'from tempfile import mkstemp, mkdtemp\n'), ((1392, 1413), 'os.remove', 'os.remove', (['self._path'], {}), '(self._path)\n', (1401, 1413), False, 'import os\n'), ((1629, 1650), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (1644, 1650), False, 'import os\n'), ((2611, 2657), 'boto.s3.connection.S3Connection', 'S3Connection', (['access_key', 'secret_key'], {}), '(access_key, secret_key, **kwargs)\n', (2623, 2657), False, 'from boto.s3.connection import S3Connection\n'), ((2713, 2724), 'boto.s3.key.Key', 'Key', (['bucket'], {}), '(bucket)\n', (2716, 2724), False, 'from boto.s3.key import Key\n'), ((3587, 3596), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (3594, 3596), False, 'from tempfile import mkstemp, mkdtemp\n'), ((3997, 4014), 'shutil.rmtree', 'rmtree', (['self.path'], {}), '(self.path)\n', (4003, 4014), False, 'from shutil import rmtree\n'), ((1715, 1770), 'zipfile.ZipFile', 'ZipFile', (['self._path', '"""a"""', 'ZIP_DEFLATED'], {'allowZip64': '(True)'}), "(self._path, 'a', ZIP_DEFLATED, allowZip64=True)\n", (1722, 1770), False, 'from zipfile import ZipFile, ZIP_DEFLATED\n'), ((1818, 1831), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1825, 1831), False, 'import os\n'), ((1959, 1983), 'os.path.join', 'os.path.join', (['base', 'file'], {}), '(base, file)\n', (1971, 1983), False, 'import os\n'), ((2783, 2797), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2795, 2797), False, 'from datetime import datetime\n')] |
jimforit/lagou | apps/delivery/migrations/0001_initial.py | 165593a15597012092b5e0ba34158fbc1d1c213d | # Generated by Django 2.0.2 on 2019-03-08 13:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Delivery',
fields=[
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='投递ID')),
('delivery_status', models.CharField(choices=[('DD', '待定'), ('YQ', '邀请面试'), ('WJ', '婉拒')], default='DD', max_length=2, verbose_name='投递状态')),
],
options={
'verbose_name': '面试',
'verbose_name_plural': '面试',
},
),
]
| [((313, 373), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (333, 373), False, 'from django.db import migrations, models\n'), ((408, 464), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""更新时间"""'}), "(auto_now=True, verbose_name='更新时间')\n", (428, 464), False, 'from django.db import migrations, models\n'), ((497, 552), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""删除标记"""'}), "(default=False, verbose_name='删除标记')\n", (516, 552), False, 'from django.db import migrations, models\n'), ((578, 650), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""投递ID"""'}), "(primary_key=True, serialize=False, verbose_name='投递ID')\n", (594, 650), False, 'from django.db import migrations, models\n'), ((689, 812), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('DD', '待定'), ('YQ', '邀请面试'), ('WJ', '婉拒')]", 'default': '"""DD"""', 'max_length': '(2)', 'verbose_name': '"""投递状态"""'}), "(choices=[('DD', '待定'), ('YQ', '邀请面试'), ('WJ', '婉拒')],\n default='DD', max_length=2, verbose_name='投递状态')\n", (705, 812), False, 'from django.db import migrations, models\n')] |
vargad/exercises | elementary/date-and-time-convertor.py | 1a2fc2557672749d590ebdf596f99f53405320a1 | #!/usr/bin/env python3
def date_time(time):
months = ["January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December"]
hour, minute = int(time[11:13]), int(time[14:16])
return f"{int(time[0:2])} {months[int(time[3:5])-1]} {time[6:10]} year {hour} hour{'s' if hour!=1 else ''} {minute} minute{'s' if minute!=1 else ''}"
if __name__ == '__main__':
print(date_time("01.01.2018 00:00"))
assert date_time("01.01.2018 00:00") == "1 January 2018 year 0 hours 0 minutes"
assert date_time("04.08.1984 08:15") == "4 August 1984 year 8 hours 15 minutes"
assert date_time("17.12.1990 07:42") == "17 December 1990 year 7 hours 42 minutes"
| [] |
snapperVibes/lbry-sdk | lbry/wallet/server/peer.py | 77a51d1ad43404e5dc52af715a7bebfaeb3fee16 | # Copyright (c) 2017, Neil Booth
#
# All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Representation of a peer server."""
from ipaddress import ip_address
from lbry.wallet.server import util
from lbry.wallet.server.util import cachedproperty
from typing import Dict
class Peer:
# Protocol version
ATTRS = ('host', 'features',
# metadata
'source', 'ip_addr',
'last_good', 'last_try', 'try_count')
FEATURES = ('pruning', 'server_version', 'protocol_min', 'protocol_max',
'ssl_port', 'tcp_port')
# This should be set by the application
DEFAULT_PORTS: Dict[str, int] = {}
def __init__(self, host, features, source='unknown', ip_addr=None,
last_good=0, last_try=0, try_count=0):
"""Create a peer given a host name (or IP address as a string),
a dictionary of features, and a record of the source."""
assert isinstance(host, str)
assert isinstance(features, dict)
assert host in features.get('hosts', {})
self.host = host
self.features = features.copy()
# Canonicalize / clean-up
for feature in self.FEATURES:
self.features[feature] = getattr(self, feature)
# Metadata
self.source = source
self.ip_addr = ip_addr
# last_good represents the last connection that was
# successful *and* successfully verified, at which point
# try_count is set to 0. Failure to connect or failure to
# verify increment the try_count.
self.last_good = last_good
self.last_try = last_try
self.try_count = try_count
# Transient, non-persisted metadata
self.bad = False
self.other_port_pairs = set()
self.status = 2
@classmethod
def peers_from_features(cls, features, source):
peers = []
if isinstance(features, dict):
hosts = features.get('hosts')
if isinstance(hosts, dict):
peers = [Peer(host, features, source=source)
for host in hosts if isinstance(host, str)]
return peers
@classmethod
def deserialize(cls, item):
"""Deserialize from a dictionary."""
return cls(**item)
def matches(self, peers):
"""Return peers whose host matches our hostname or IP address.
Additionally include all peers whose IP address matches our
hostname if that is an IP address.
"""
candidates = (self.host.lower(), self.ip_addr)
return [peer for peer in peers
if peer.host.lower() in candidates
or peer.ip_addr == self.host]
def __str__(self):
return self.host
def update_features(self, features):
"""Update features in-place."""
try:
tmp = Peer(self.host, features)
except Exception:
pass
else:
self.update_features_from_peer(tmp)
def update_features_from_peer(self, peer):
if peer != self:
self.features = peer.features
for feature in self.FEATURES:
setattr(self, feature, getattr(peer, feature))
def connection_port_pairs(self):
"""Return a list of (kind, port) pairs to try when making a
connection."""
# Use a list not a set - it's important to try the registered
# ports first.
pairs = [('SSL', self.ssl_port), ('TCP', self.tcp_port)]
while self.other_port_pairs:
pairs.append(self.other_port_pairs.pop())
return [pair for pair in pairs if pair[1]]
def mark_bad(self):
"""Mark as bad to avoid reconnects but also to remember for a
while."""
self.bad = True
def check_ports(self, other):
"""Remember differing ports in case server operator changed them
or removed one."""
if other.ssl_port != self.ssl_port:
self.other_port_pairs.add(('SSL', other.ssl_port))
if other.tcp_port != self.tcp_port:
self.other_port_pairs.add(('TCP', other.tcp_port))
return bool(self.other_port_pairs)
@cachedproperty
def is_tor(self):
return self.host.endswith('.onion')
@cachedproperty
def is_valid(self):
ip = self.ip_address
if ip:
return ((ip.is_global or ip.is_private)
and not (ip.is_multicast or ip.is_unspecified))
return util.is_valid_hostname(self.host)
@cachedproperty
def is_public(self):
ip = self.ip_address
if ip:
return self.is_valid and not ip.is_private
else:
return self.is_valid and self.host != 'localhost'
@cachedproperty
def ip_address(self):
"""The host as a python ip_address object, or None."""
try:
return ip_address(self.host)
except ValueError:
return None
def bucket(self):
if self.is_tor:
return 'onion'
if not self.ip_addr:
return ''
return tuple(self.ip_addr.split('.')[:2])
def serialize(self):
"""Serialize to a dictionary."""
return {attr: getattr(self, attr) for attr in self.ATTRS}
def _port(self, key):
hosts = self.features.get('hosts')
if isinstance(hosts, dict):
host = hosts.get(self.host)
port = self._integer(key, host)
if port and 0 < port < 65536:
return port
return None
def _integer(self, key, d=None):
d = d or self.features
result = d.get(key) if isinstance(d, dict) else None
if isinstance(result, str):
try:
result = int(result)
except ValueError:
pass
return result if isinstance(result, int) else None
def _string(self, key):
result = self.features.get(key)
return result if isinstance(result, str) else None
@cachedproperty
def genesis_hash(self):
"""Returns None if no SSL port, otherwise the port as an integer."""
return self._string('genesis_hash')
@cachedproperty
def ssl_port(self):
"""Returns None if no SSL port, otherwise the port as an integer."""
return self._port('ssl_port')
@cachedproperty
def tcp_port(self):
"""Returns None if no TCP port, otherwise the port as an integer."""
return self._port('tcp_port')
@cachedproperty
def server_version(self):
"""Returns the server version as a string if known, otherwise None."""
return self._string('server_version')
@cachedproperty
def pruning(self):
"""Returns the pruning level as an integer. None indicates no
pruning."""
pruning = self._integer('pruning')
if pruning and pruning > 0:
return pruning
return None
def _protocol_version_string(self, key):
version_str = self.features.get(key)
ptuple = util.protocol_tuple(version_str)
return util.version_string(ptuple)
@cachedproperty
def protocol_min(self):
"""Minimum protocol version as a string, e.g., 1.0"""
return self._protocol_version_string('protocol_min')
@cachedproperty
def protocol_max(self):
"""Maximum protocol version as a string, e.g., 1.1"""
return self._protocol_version_string('protocol_max')
def to_tuple(self):
"""The tuple ((ip, host, details) expected in response
to a peers subscription."""
details = self.real_name().split()[1:]
return (self.ip_addr or self.host, self.host, details)
def real_name(self):
"""Real name of this peer as used on IRC."""
def port_text(letter, port):
if port == self.DEFAULT_PORTS.get(letter):
return letter
else:
return letter + str(port)
parts = [self.host, 'v' + self.protocol_max]
if self.pruning:
parts.append(f'p{self.pruning:d}')
for letter, port in (('s', self.ssl_port), ('t', self.tcp_port)):
if port:
parts.append(port_text(letter, port))
return ' '.join(parts)
@classmethod
def from_real_name(cls, real_name, source):
"""Real name is a real name as on IRC, such as
"erbium1.sytes.net v1.0 s t"
Returns an instance of this Peer class.
"""
host = 'nohost'
features = {}
ports = {}
for n, part in enumerate(real_name.split()):
if n == 0:
host = part
continue
if part[0] in ('s', 't'):
if len(part) == 1:
port = cls.DEFAULT_PORTS[part[0]]
else:
port = part[1:]
if part[0] == 's':
ports['ssl_port'] = port
else:
ports['tcp_port'] = port
elif part[0] == 'v':
features['protocol_max'] = features['protocol_min'] = part[1:]
elif part[0] == 'p':
features['pruning'] = part[1:]
features.update(ports)
features['hosts'] = {host: ports}
return cls(host, features, source)
| [((5528, 5561), 'lbry.wallet.server.util.is_valid_hostname', 'util.is_valid_hostname', (['self.host'], {}), '(self.host)\n', (5550, 5561), False, 'from lbry.wallet.server import util\n'), ((8080, 8112), 'lbry.wallet.server.util.protocol_tuple', 'util.protocol_tuple', (['version_str'], {}), '(version_str)\n', (8099, 8112), False, 'from lbry.wallet.server import util\n'), ((8128, 8155), 'lbry.wallet.server.util.version_string', 'util.version_string', (['ptuple'], {}), '(ptuple)\n', (8147, 8155), False, 'from lbry.wallet.server import util\n'), ((5925, 5946), 'ipaddress.ip_address', 'ip_address', (['self.host'], {}), '(self.host)\n', (5935, 5946), False, 'from ipaddress import ip_address\n')] |
aomann/core | tests/components/deconz/test_diagnostics.py | 5e71e7b775461cd4849c36075c6a1459a7d0ad22 | """Test deCONZ diagnostics."""
from unittest.mock import patch
from pydeconz.websocket import STATE_RUNNING
from homeassistant.const import Platform
from .test_gateway import DECONZ_CONFIG, setup_deconz_integration
from tests.components.diagnostics import get_diagnostics_for_config_entry
async def test_entry_diagnostics(
hass, hass_client, aioclient_mock, mock_deconz_websocket
):
"""Test config entry diagnostics."""
config_entry = await setup_deconz_integration(hass, aioclient_mock)
await mock_deconz_websocket(state=STATE_RUNNING)
await hass.async_block_till_done()
with patch(
"homeassistant.helpers.system_info.async_get_system_info",
return_value={"get_system_info": "fake data"},
):
assert await get_diagnostics_for_config_entry(
hass, hass_client, config_entry
) == {
"home_assistant": {"get_system_info": "fake data"},
"config_entry": dict(config_entry.data),
"deconz_config": DECONZ_CONFIG,
"websocket_state": STATE_RUNNING,
"deconz_ids": {},
"entities": {
str(Platform.ALARM_CONTROL_PANEL): [],
str(Platform.BINARY_SENSOR): [],
str(Platform.CLIMATE): [],
str(Platform.COVER): [],
str(Platform.FAN): [],
str(Platform.LIGHT): [],
str(Platform.LOCK): [],
str(Platform.NUMBER): [],
str(Platform.SENSOR): [],
str(Platform.SIREN): [],
str(Platform.SWITCH): [],
},
"events": {},
"alarm_systems": {},
"groups": {},
"lights": {},
"scenes": {},
"sensors": {},
}
| [((610, 725), 'unittest.mock.patch', 'patch', (['"""homeassistant.helpers.system_info.async_get_system_info"""'], {'return_value': "{'get_system_info': 'fake data'}"}), "('homeassistant.helpers.system_info.async_get_system_info',\n return_value={'get_system_info': 'fake data'})\n", (615, 725), False, 'from unittest.mock import patch\n'), ((767, 832), 'tests.components.diagnostics.get_diagnostics_for_config_entry', 'get_diagnostics_for_config_entry', (['hass', 'hass_client', 'config_entry'], {}), '(hass, hass_client, config_entry)\n', (799, 832), False, 'from tests.components.diagnostics import get_diagnostics_for_config_entry\n')] |
l1zp/jax-md | jax_md/partition.py | 2440794aebb1c77116459e2ec2051d537a94ecf4 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code to transform functions on individual tuples of particles to sets."""
from absl import logging
from functools import reduce, partial
from collections import namedtuple
from enum import Enum
from typing import Any, Callable, Optional, Dict, Tuple, Generator, Union
import math
from operator import mul
import numpy as onp
from jax import lax
from jax import ops
from jax import jit, vmap, eval_shape
from jax.abstract_arrays import ShapedArray
from jax.interpreters import partial_eval as pe
import jax.numpy as jnp
from jax_md import quantity, space, dataclasses, util
import jraph
# Types
Array = util.Array
f32 = util.f32
f64 = util.f64
i32 = util.i32
i64 = util.i64
Box = space.Box
DisplacementOrMetricFn = space.DisplacementOrMetricFn
MetricFn = space.MetricFn
# Cell List
@dataclasses.dataclass
class CellList:
"""Stores the spatial partition of a system into a cell list.
See cell_list(...) for details on the construction / specification.
Cell list buffers all have a common shape, S, where
* `S = [cell_count_x, cell_count_y, cell_capacity]`
* `S = [cell_count_x, cell_count_y, cell_count_z, cell_capacity]`
in two- and three-dimensions respectively. It is assumed that each cell has
the same capacity.
Attributes:
position_buffer: An ndarray of floating point positions with shape
S + [spatial_dimension].
id_buffer: An ndarray of int32 particle ids of shape S. Note that empty
slots are specified by id = N where N is the number of particles in the
system.
kwarg_buffers: A dictionary of ndarrays of shape S + [...]. This contains
side data placed into the cell list.
"""
position_buffer: Array
id_buffer: Array
kwarg_buffers: Dict[str, Array]
def _cell_dimensions(spatial_dimension: int,
box_size: Box,
minimum_cell_size: float) -> Tuple[Box, Array, Array, int]:
"""Compute the number of cells-per-side and total number of cells in a box."""
if isinstance(box_size, int) or isinstance(box_size, float):
box_size = float(box_size)
# NOTE(schsam): Should we auto-cast based on box_size? I can't imagine a case
# in which the box_size would not be accurately represented by an f32.
if (isinstance(box_size, onp.ndarray) and
(box_size.dtype == jnp.int32 or box_size.dtype == jnp.int64)):
box_size = float(box_size)
cells_per_side = onp.floor(box_size / minimum_cell_size)
cell_size = box_size / cells_per_side
cells_per_side = onp.array(cells_per_side, dtype=jnp.int64)
if isinstance(box_size, onp.ndarray):
if box_size.ndim == 1 or box_size.ndim == 2:
assert box_size.size == spatial_dimension
flat_cells_per_side = onp.reshape(cells_per_side, (-1,))
for cells in flat_cells_per_side:
if cells < 3:
raise ValueError(
('Box must be at least 3x the size of the grid spacing in each '
'dimension.'))
cell_count = reduce(mul, flat_cells_per_side, 1)
elif box_size.ndim == 0:
cell_count = cells_per_side ** spatial_dimension
else:
raise ValueError('Box must either be a scalar or a vector.')
else:
cell_count = cells_per_side ** spatial_dimension
return box_size, cell_size, cells_per_side, int(cell_count)
def count_cell_filling(R: Array,
box_size: Box,
minimum_cell_size: float) -> Array:
"""Counts the number of particles per-cell in a spatial partition."""
dim = int(R.shape[1])
box_size, cell_size, cells_per_side, cell_count = \
_cell_dimensions(dim, box_size, minimum_cell_size)
hash_multipliers = _compute_hash_constants(dim, cells_per_side)
particle_index = jnp.array(R / cell_size, dtype=jnp.int64)
particle_hash = jnp.sum(particle_index * hash_multipliers, axis=1)
filling = ops.segment_sum(jnp.ones_like(particle_hash),
particle_hash,
cell_count)
return filling
def _is_variable_compatible_with_positions(R: Array) -> bool:
if (util.is_array(R) and
len(R.shape) == 2 and
jnp.issubdtype(R.dtype, jnp.floating)):
return True
return False
def _compute_hash_constants(spatial_dimension: int,
cells_per_side: Array) -> Array:
if cells_per_side.size == 1:
return jnp.array([[cells_per_side ** d for d in range(spatial_dimension)]],
dtype=jnp.int64)
elif cells_per_side.size == spatial_dimension:
one = jnp.array([[1]], dtype=jnp.int32)
cells_per_side = jnp.concatenate((one, cells_per_side[:, :-1]), axis=1)
return jnp.array(jnp.cumprod(cells_per_side), dtype=jnp.int64)
else:
raise ValueError()
def _neighboring_cells(dimension: int) -> Generator[onp.ndarray, None, None]:
for dindex in onp.ndindex(*([3] * dimension)):
yield onp.array(dindex, dtype=jnp.int64) - 1
def _estimate_cell_capacity(R: Array,
box_size: Box,
cell_size: float,
buffer_size_multiplier: float) -> int:
# TODO(schsam): We might want to do something more sophisticated here or at
# least expose this constant.
spatial_dim = R.shape[-1]
cell_capacity = onp.max(count_cell_filling(R, box_size, cell_size))
return int(cell_capacity * buffer_size_multiplier)
def _unflatten_cell_buffer(arr: Array,
cells_per_side: Array,
dim: int) -> Array:
if (isinstance(cells_per_side, int) or
isinstance(cells_per_side, float) or
(util.is_array(cells_per_side) and not cells_per_side.shape)):
cells_per_side = (int(cells_per_side),) * dim
elif util.is_array(cells_per_side) and len(cells_per_side.shape) == 1:
cells_per_side = tuple([int(x) for x in cells_per_side[::-1]])
elif util.is_array(cells_per_side) and len(cells_per_side.shape) == 2:
cells_per_side = tuple([int(x) for x in cells_per_side[0][::-1]])
else:
raise ValueError() # TODO
return jnp.reshape(arr, cells_per_side + (-1,) + arr.shape[1:])
def _shift_array(arr: onp.ndarray, dindex: Array) -> Array:
if len(dindex) == 2:
dx, dy = dindex
dz = 0
elif len(dindex) == 3:
dx, dy, dz = dindex
if dx < 0:
arr = jnp.concatenate((arr[1:], arr[:1]))
elif dx > 0:
arr = jnp.concatenate((arr[-1:], arr[:-1]))
if dy < 0:
arr = jnp.concatenate((arr[:, 1:], arr[:, :1]), axis=1)
elif dy > 0:
arr = jnp.concatenate((arr[:, -1:], arr[:, :-1]), axis=1)
if dz < 0:
arr = jnp.concatenate((arr[:, :, 1:], arr[:, :, :1]), axis=2)
elif dz > 0:
arr = jnp.concatenate((arr[:, :, -1:], arr[:, :, :-1]), axis=2)
return arr
def _vectorize(f: Callable, dim: int) -> Callable:
if dim == 2:
return vmap(vmap(f, 0, 0), 0, 0)
elif dim == 3:
return vmap(vmap(vmap(f, 0, 0), 0, 0), 0, 0)
raise ValueError('Cell list only supports 2d or 3d.')
def cell_list(box_size: Box,
minimum_cell_size: float,
cell_capacity_or_example_R: Union[int, Array],
buffer_size_multiplier: float=1.1
) -> Callable[[Array], CellList]:
r"""Returns a function that partitions point data spatially.
Given a set of points {x_i \in R^d} with associated data {k_i \in R^m} it is
often useful to partition the points / data spatially. A simple partitioning
that can be implemented efficiently within XLA is a dense partition into a
uniform grid called a cell list.
Since XLA requires that shapes be statically specified, we allocate fixed
sized buffers for each cell. The size of this buffer can either be specified
manually or it can be estimated automatically from a set of positions. Note,
if the distribution of points changes significantly it is likely the buffer
the buffer sizes will have to be adjusted.
This partitioning will likely form the groundwork for parallelizing
simulations over different accelerators.
Args:
box_size: A float or an ndarray of shape [spatial_dimension] specifying the
size of the system. Note, this code is written for the case where the
boundaries are periodic. If this is not the case, then the current code
will be slightly less efficient.
minimum_cell_size: A float specifying the minimum side length of each cell.
Cells are enlarged so that they exactly fill the box.
cell_capacity_or_example_R: Either an integer specifying the size
number of particles that can be stored in each cell or an ndarray of
positions of shape [particle_count, spatial_dimension] that is used to
estimate the cell_capacity.
buffer_size_multiplier: A floating point multiplier that multiplies the
estimated cell capacity to allow for fluctuations in the maximum cell
occupancy.
Returns:
A function `cell_list_fn(R, **kwargs)` that partitions positions, `R`, and
side data specified by kwargs into a cell list. Returns a CellList
containing the partition.
"""
if util.is_array(box_size):
box_size = onp.array(box_size)
if len(box_size.shape) == 1:
box_size = jnp.reshape(box_size, (1, -1))
if util.is_array(minimum_cell_size):
minimum_cell_size = onp.array(minimum_cell_size)
cell_capacity = cell_capacity_or_example_R
if _is_variable_compatible_with_positions(cell_capacity):
cell_capacity = _estimate_cell_capacity(
cell_capacity, box_size, minimum_cell_size, buffer_size_multiplier)
elif not isinstance(cell_capacity, int):
msg = (
'cell_capacity_or_example_positions must either be an integer '
'specifying the cell capacity or a set of positions that will be used '
'to estimate a cell capacity. Found {}.'.format(type(cell_capacity))
)
raise ValueError(msg)
def build_cells(R: Array, extra_capacity: int=0, **kwargs) -> CellList:
N = R.shape[0]
dim = R.shape[1]
_cell_capacity = cell_capacity + extra_capacity
if dim != 2 and dim != 3:
# NOTE(schsam): Do we want to check this in compute_fn as well?
raise ValueError(
'Cell list spatial dimension must be 2 or 3. Found {}'.format(dim))
neighborhood_tile_count = 3 ** dim
_, cell_size, cells_per_side, cell_count = \
_cell_dimensions(dim, box_size, minimum_cell_size)
hash_multipliers = _compute_hash_constants(dim, cells_per_side)
# Create cell list data.
particle_id = lax.iota(jnp.int64, N)
# NOTE(schsam): We use the convention that particles that are successfully,
# copied have their true id whereas particles empty slots have id = N.
# Then when we copy data back from the grid, copy it to an array of shape
# [N + 1, output_dimension] and then truncate it to an array of shape
# [N, output_dimension] which ignores the empty slots.
mask_id = jnp.ones((N,), jnp.int64) * N
cell_R = jnp.zeros((cell_count * _cell_capacity, dim), dtype=R.dtype)
cell_id = N * jnp.ones((cell_count * _cell_capacity, 1), dtype=i32)
# It might be worth adding an occupied mask. However, that will involve
# more compute since often we will do a mask for species that will include
# an occupancy test. It seems easier to design around this empty_data_value
# for now and revisit the issue if it comes up later.
empty_kwarg_value = 10 ** 5
cell_kwargs = {}
for k, v in kwargs.items():
if not util.is_array(v):
raise ValueError((
'Data must be specified as an ndarry. Found "{}" with '
'type {}'.format(k, type(v))))
if v.shape[0] != R.shape[0]:
raise ValueError(
('Data must be specified per-particle (an ndarray with shape '
'(R.shape[0], ...)). Found "{}" with shape {}'.format(k, v.shape)))
kwarg_shape = v.shape[1:] if v.ndim > 1 else (1,)
cell_kwargs[k] = empty_kwarg_value * jnp.ones(
(cell_count * _cell_capacity,) + kwarg_shape, v.dtype)
indices = jnp.array(R / cell_size, dtype=i32)
hashes = jnp.sum(indices * hash_multipliers, axis=1)
# Copy the particle data into the grid. Here we use a trick to allow us to
# copy into all cells simultaneously using a single lax.scatter call. To do
# this we first sort particles by their cell hash. We then assign each
# particle to have a cell id = hash * cell_capacity + grid_id where grid_id
# is a flat list that repeats 0, .., cell_capacity. So long as there are
# fewer than cell_capacity particles per cell, each particle is guarenteed
# to get a cell id that is unique.
sort_map = jnp.argsort(hashes)
sorted_R = R[sort_map]
sorted_hash = hashes[sort_map]
sorted_id = particle_id[sort_map]
sorted_kwargs = {}
for k, v in kwargs.items():
sorted_kwargs[k] = v[sort_map]
sorted_cell_id = jnp.mod(lax.iota(jnp.int64, N), _cell_capacity)
sorted_cell_id = sorted_hash * _cell_capacity + sorted_cell_id
cell_R = ops.index_update(cell_R, sorted_cell_id, sorted_R)
sorted_id = jnp.reshape(sorted_id, (N, 1))
cell_id = ops.index_update(
cell_id, sorted_cell_id, sorted_id)
cell_R = _unflatten_cell_buffer(cell_R, cells_per_side, dim)
cell_id = _unflatten_cell_buffer(cell_id, cells_per_side, dim)
for k, v in sorted_kwargs.items():
if v.ndim == 1:
v = jnp.reshape(v, v.shape + (1,))
cell_kwargs[k] = ops.index_update(cell_kwargs[k], sorted_cell_id, v)
cell_kwargs[k] = _unflatten_cell_buffer(
cell_kwargs[k], cells_per_side, dim)
return CellList(cell_R, cell_id, cell_kwargs) # pytype: disable=wrong-arg-count
return build_cells
def _displacement_or_metric_to_metric_sq(
displacement_or_metric: DisplacementOrMetricFn) -> MetricFn:
"""Checks whether or not a displacement or metric was provided."""
for dim in range(1, 4):
try:
R = ShapedArray((dim,), f32)
dR_or_dr = eval_shape(displacement_or_metric, R, R, t=0)
if len(dR_or_dr.shape) == 0:
return lambda Ra, Rb, **kwargs: \
displacement_or_metric(Ra, Rb, **kwargs) ** 2
else:
return lambda Ra, Rb, **kwargs: space.square_distance(
displacement_or_metric(Ra, Rb, **kwargs))
except TypeError:
continue
except ValueError:
continue
raise ValueError(
'Canonicalize displacement not implemented for spatial dimension larger'
'than 4.')
class NeighborListFormat(Enum):
"""An enum listing the different neighbor list formats.
Attributes:
Dense: A dense neighbor list where the ids are a square matrix
of shape `(N, max_neighbors_per_atom)`. Here the capacity of the neighbor
list must scale with the highest connectivity neighbor.
Sparse: A sparse neighbor list where the ids are a rectangular
matrix of shape `(2, max_neighbors)` specifying the start / end particle
of each neighbor pair.
OrderedSparse: A sparse neighbor list whose format is the same as `Sparse`
where only bonds with i < j are included.
"""
Dense = 0
Sparse = 1
OrderedSparse = 2
def is_sparse(format: NeighborListFormat) -> bool:
return (format is NeighborListFormat.Sparse or
format is NeighborListFormat.OrderedSparse)
def is_format_valid(format: NeighborListFormat):
if not format in list(NeighborListFormat):
raise ValueError((
'Neighbor list format must be a member of NeighorListFormat'
f' found {format}.'))
@dataclasses.dataclass
class NeighborList(object):
"""A struct containing the state of a Neighbor List.
Attributes:
idx: For an N particle system this is an `[N, max_occupancy]` array of
integers such that `idx[i, j]` is the jth neighbor of particle i.
reference_position: The positions of particles when the neighbor list was
constructed. This is used to decide whether the neighbor list ought to be
updated.
did_buffer_overflow: A boolean that starts out False. If there are ever
more neighbors than max_neighbors this is set to true to indicate that
there was a buffer overflow. If this happens, it means that the results
of the simulation will be incorrect and the simulation needs to be rerun
using a larger buffer.
max_occupancy: A static integer specifying the maximum size of the
neighbor list. Changing this will invoke a recompilation.
format: A NeighborListFormat enum specifying the format of the neighbor
list.
cell_list_fn: A static python callable that is used to construct a cell
list used in an intermediate step of the neighbor list calculation.
update_fn: A static python function used to update the neighbor list.
"""
idx: Array
reference_position: Array
did_buffer_overflow: Array
max_occupancy: int = dataclasses.static_field()
format: NeighborListFormat = dataclasses.static_field()
cell_list_fn: Callable[[Array], CellList] = dataclasses.static_field()
update_fn: Callable[[Array, 'NeighborList'],
'NeighborList'] = dataclasses.static_field()
def update(self, R, **kwargs):
return self.update_fn(R, self, **kwargs)
@dataclasses.dataclass
class NeighborListFns:
"""A struct containing functions to allocate and update neighbor lists.
Attributes:
allocate: A function to allocate a new neighbor list. This function cannot
be compiled, since it uses the values of positions to infer the shapes.
update: A function to update a neighbor list given a new set of positions
and a new neighbor list.
"""
allocate: Callable[..., NeighborList] = dataclasses.static_field()
update: Callable[[Array, NeighborList],
NeighborList] = dataclasses.static_field()
def __call__(self,
R: Array,
neighbor_list: Optional[NeighborList]=None,
extra_capacity: int=0,
**kwargs) -> NeighborList:
"""A function for backward compatibility with previous neighbor lists.
Attributes:
R: An `(N, dim)` array of particle positions.
neighbor_list: An optional neighor list object. If it is provided then
the function updates the neighbor list, otherwise it allocates a new
neighbor list.
extra_capacity: Extra capacity to add if allocating the neighbor list.
"""
logging.warning('Using a depricated code path to create / update neighbor '
'lists. It will be removed in a later version of JAX MD. '
'Using `neighbor_fn.allocate` and `neighbor_fn.update` '
'is preferred.')
if neighbor_list is None:
return self.allocate(R, extra_capacity, **kwargs)
return self.update(R, neighbor_list, **kwargs)
def __iter__(self):
return iter((self.allocate, self.update))
NeighborFn = Callable[[Array, Optional[NeighborList], Optional[int]],
NeighborList]
def neighbor_list(displacement_or_metric: DisplacementOrMetricFn,
box_size: Box,
r_cutoff: float,
dr_threshold: float,
capacity_multiplier: float=1.25,
disable_cell_list: bool=False,
mask_self: bool=True,
fractional_coordinates: bool=False,
format: NeighborListFormat=NeighborListFormat.Dense,
**static_kwargs) -> NeighborFn:
"""Returns a function that builds a list neighbors for collections of points.
Neighbor lists must balance the need to be jit compatable with the fact that
under a jit the maximum number of neighbors cannot change (owing to static
shape requirements). To deal with this, our `neighbor_list` returns a
`NeighborListFns` object that contains two functions: 1)
`neighbor_fn.allocate` create a new neighbor list and 2) `neighbor_fn.update`
updates an existing neighbor list. Neighbor lists themselves additionally
have a convenience `update` member function.
Note that allocation of a new neighbor list cannot be jit compiled since it
uses the positions to infer the maximum number of neighbors (along with
additional space specified by the `capacity_multiplier`). Updating the
neighbor list can be jit compiled; if the neighbor list capacity is not
sufficient to store all the neighbors, the `did_buffer_overflow` bit
will be set to `True` and a new neighbor list will need to be reallocated.
Here is a typical example of a simulation loop with neighbor lists:
>>> init_fn, apply_fn = simulate.nve(energy_fn, shift, 1e-3)
>>> exact_init_fn, exact_apply_fn = simulate.nve(exact_energy_fn, shift, 1e-3)
>>>
>>> nbrs = neighbor_fn.allocate(R)
>>> state = init_fn(random.PRNGKey(0), R, neighbor_idx=nbrs.idx)
>>>
>>> def body_fn(i, state):
>>> state, nbrs = state
>>> nbrs = nbrs.update(state.position)
>>> state = apply_fn(state, neighbor_idx=nbrs.idx)
>>> return state, nbrs
>>>
>>> step = 0
>>> for _ in range(20):
>>> new_state, nbrs = lax.fori_loop(0, 100, body_fn, (state, nbrs))
>>> if nbrs.did_buffer_overflow:
>>> nbrs = neighbor_fn.allocate(state.position)
>>> else:
>>> state = new_state
>>> step += 1
Args:
displacement: A function `d(R_a, R_b)` that computes the displacement
between pairs of points.
box_size: Either a float specifying the size of the box or an array of
shape [spatial_dim] specifying the box size in each spatial dimension.
r_cutoff: A scalar specifying the neighborhood radius.
dr_threshold: A scalar specifying the maximum distance particles can move
before rebuilding the neighbor list.
capacity_multiplier: A floating point scalar specifying the fractional
increase in maximum neighborhood occupancy we allocate compared with the
maximum in the example positions.
disable_cell_list: An optional boolean. If set to True then the neighbor
list is constructed using only distances. This can be useful for
debugging but should generally be left as False.
mask_self: An optional boolean. Determines whether points can consider
themselves to be their own neighbors.
fractional_coordinates: An optional boolean. Specifies whether positions
will be supplied in fractional coordinates in the unit cube, [0, 1]^d.
If this is set to True then the box_size will be set to 1.0 and the
cell size used in the cell list will be set to cutoff / box_size.
format: The format of the neighbor list; see the NeighborListFormat enum
for details about the different choices for formats. Defaults to `Dense`.
**static_kwargs: kwargs that get threaded through the calculation of
example positions.
Returns:
A pair. The first element is a NeighborList containing the current neighbor
list. The second element contains a function
`neighbor_list_fn(R, neighbor_list=None)` that will update the neighbor
list. If neighbor_list is None then the function will construct a new
neighbor list whose capacity is inferred from R. If neighbor_list is given
then it will update the neighbor list (with fixed capacity) if any particle
has moved more than dr_threshold / 2. Note that only
`neighbor_list_fn(R, neighbor_list)` can be `jit` since it keeps array
shapes fixed.
"""
is_format_valid(format)
box_size = lax.stop_gradient(box_size)
r_cutoff = lax.stop_gradient(r_cutoff)
dr_threshold = lax.stop_gradient(dr_threshold)
box_size = f32(box_size)
cutoff = r_cutoff + dr_threshold
cutoff_sq = cutoff ** 2
threshold_sq = (dr_threshold / f32(2)) ** 2
metric_sq = _displacement_or_metric_to_metric_sq(displacement_or_metric)
cell_size = cutoff
if fractional_coordinates:
cell_size = cutoff / box_size
box_size = f32(1)
use_cell_list = jnp.all(cell_size < box_size / 3.) and not disable_cell_list
@jit
def candidate_fn(R, **kwargs):
return jnp.broadcast_to(jnp.reshape(jnp.arange(R.shape[0]), (1, R.shape[0])),
(R.shape[0], R.shape[0]))
@jit
def cell_list_candidate_fn(cl, R, **kwargs):
N, dim = R.shape
R = cl.position_buffer
idx = cl.id_buffer
cell_idx = [idx]
for dindex in _neighboring_cells(dim):
if onp.all(dindex == 0):
continue
cell_idx += [_shift_array(idx, dindex)]
cell_idx = jnp.concatenate(cell_idx, axis=-2)
cell_idx = cell_idx[..., jnp.newaxis, :, :]
cell_idx = jnp.broadcast_to(cell_idx, idx.shape[:-1] + cell_idx.shape[-2:])
def copy_values_from_cell(value, cell_value, cell_id):
scatter_indices = jnp.reshape(cell_id, (-1,))
cell_value = jnp.reshape(cell_value, (-1,) + cell_value.shape[-2:])
return ops.index_update(value, scatter_indices, cell_value)
# NOTE(schsam): Currently, this makes a verlet list that is larger than
# needed since the idx buffer inherets its size from the cell-list. In
# three-dimensions this seems to translate into an occupancy of ~70%. We
# can make this more efficient by shrinking the verlet list at the cost of
# another sort. However, this seems possibly less efficient than just
# computing everything.
neighbor_idx = jnp.zeros((N + 1,) + cell_idx.shape[-2:], jnp.int32)
neighbor_idx = copy_values_from_cell(neighbor_idx, cell_idx, idx)
return neighbor_idx[:-1, :, 0]
@jit
def mask_self_fn(idx):
self_mask = idx == jnp.reshape(jnp.arange(idx.shape[0]), (idx.shape[0], 1))
return jnp.where(self_mask, idx.shape[0], idx)
@jit
def prune_neighbor_list_dense(R, idx, **kwargs):
d = partial(metric_sq, **kwargs)
d = space.map_neighbor(d)
N = R.shape[0]
neigh_R = R[idx]
dR = d(R, neigh_R)
mask = (dR < cutoff_sq) & (idx < N)
out_idx = N * jnp.ones(idx.shape, jnp.int32)
cumsum = jnp.cumsum(mask, axis=1)
index = jnp.where(mask, cumsum - 1, idx.shape[1] - 1)
p_index = jnp.arange(idx.shape[0])[:, None]
out_idx = out_idx.at[p_index, index].set(idx)
max_occupancy = jnp.max(cumsum[:, -1])
return out_idx[:, :-1], max_occupancy
@jit
def prune_neighbor_list_sparse(R, idx, **kwargs):
d = partial(metric_sq, **kwargs)
d = space.map_bond(d)
N = R.shape[0]
sender_idx = jnp.broadcast_to(jnp.arange(N)[:, None], idx.shape)
sender_idx = jnp.reshape(sender_idx, (-1,))
receiver_idx = jnp.reshape(idx, (-1,))
dR = d(R[sender_idx], R[receiver_idx])
mask = (dR < cutoff_sq) & (receiver_idx < N)
if format is NeighborListFormat.OrderedSparse:
mask = mask & (receiver_idx < sender_idx)
out_idx = N * jnp.ones(receiver_idx.shape, jnp.int32)
cumsum = jnp.cumsum(mask)
index = jnp.where(mask, cumsum - 1, len(receiver_idx) - 1)
receiver_idx = out_idx.at[index].set(receiver_idx)
sender_idx = out_idx.at[index].set(sender_idx)
max_occupancy = cumsum[-1]
return jnp.stack((receiver_idx[:-1], sender_idx[:-1])), max_occupancy
def neighbor_list_fn(R: Array,
neighbor_list: Optional[NeighborList]=None,
extra_capacity: int=0,
**kwargs) -> NeighborList:
nbrs = neighbor_list
def neighbor_fn(R_and_overflow, max_occupancy=None):
R, overflow = R_and_overflow
N = R.shape[0]
if cell_list_fn is not None:
cl = cell_list_fn(R)
idx = cell_list_candidate_fn(cl, R, **kwargs)
else:
idx = candidate_fn(R, **kwargs)
if mask_self:
idx = mask_self_fn(idx)
if is_sparse(format):
idx, occupancy = prune_neighbor_list_sparse(R, idx, **kwargs)
else:
idx, occupancy = prune_neighbor_list_dense(R, idx, **kwargs)
if max_occupancy is None:
_extra_capacity = (extra_capacity if not is_sparse(format)
else N * extra_capacity)
max_occupancy = int(occupancy * capacity_multiplier + _extra_capacity)
if max_occupancy > R.shape[0] and not is_sparse(format):
max_occupancy = R.shape[0]
padding = max_occupancy - occupancy
if max_occupancy > occupancy:
idx = jnp.concatenate(
[idx, N * jnp.ones((idx.shape[0], padding), dtype=idx.dtype)],
axis=1)
idx = idx[:, :max_occupancy]
update_fn = (neighbor_list_fn if neighbor_list is None else
neighbor_list.update_fn)
return NeighborList(
idx,
R,
jnp.logical_or(overflow, (max_occupancy < occupancy)),
max_occupancy,
format,
cell_list_fn,
update_fn) # pytype: disable=wrong-arg-count
if nbrs is None:
cell_list_fn = (cell_list(box_size, cell_size, R, capacity_multiplier) if
use_cell_list else None)
return neighbor_fn((R, False))
else:
cell_list_fn = nbrs.cell_list_fn
neighbor_fn = partial(neighbor_fn, max_occupancy=nbrs.max_occupancy)
d = partial(metric_sq, **kwargs)
d = vmap(d)
return lax.cond(
jnp.any(d(R, nbrs.reference_position) > threshold_sq),
(R, nbrs.did_buffer_overflow), neighbor_fn,
nbrs, lambda x: x)
return NeighborListFns(lambda R, extra_capacity=0, **kwargs:
neighbor_list_fn(R,
extra_capacity=extra_capacity,
**kwargs),
lambda R, nbrs, **kwargs: # pytype: disable=wrong-arg-count
neighbor_list_fn(R, nbrs, **kwargs))
def neighbor_list_mask(neighbor: NeighborList, mask_self: bool=False) -> Array:
"""Compute a mask for neighbor list."""
if is_sparse(neighbor.format):
mask = neighbor.idx[0] < len(neighbor.reference_position)
if mask_self:
mask = mask & (neighbor.idx[0] != neighbor.idx[1])
return mask
mask = neighbor.idx < len(neighbor.idx)
if mask_self:
N = len(neighbor.reference_position)
self_mask = neighbor.idx != jnp.reshape(jnp.arange(N), (N, 1))
mask = mask & self_mask
return mask
def to_jraph(neighbor: NeighborList, mask: Array=None) -> jraph.GraphsTuple:
"""Convert a sparse neighbor list to a `jraph.GraphsTuple`.
As in jraph, padding here is accomplished by adding a ficticious graph with a
single node.
Args:
neighbor: A neighbor list that we will convert to the jraph format. Must be
sparse.
mask: An optional mask on the edges.
Returns:
A `jraph.GraphsTuple` that contains the topology of the neighbor list.
"""
if not is_sparse(neighbor.format):
raise ValueError('Cannot convert a dense neighbor list to jraph format. '
'Please use either NeighborListFormat.Sparse or '
'NeighborListFormat.OrderedSparse.')
receivers, senders = neighbor.idx
N = len(neighbor.reference_position)
_mask = neighbor_list_mask(neighbor)
if mask is not None:
_mask = _mask & mask
cumsum = jnp.cumsum(_mask)
index = jnp.where(_mask, cumsum - 1, len(receivers))
ordered = N * jnp.ones((len(receivers) + 1,), jnp.int32)
receivers = ordered.at[index].set(receivers)[:-1]
senders = ordered.at[index].set(senders)[:-1]
mask = receivers < N
return jraph.GraphsTuple(
nodes=None,
edges=None,
receivers=receivers,
senders=senders,
globals=None,
n_node=jnp.array([N, 1]),
n_edge=jnp.array([jnp.sum(_mask), jnp.sum(~_mask)]),
)
def to_dense(neighbor: NeighborList) -> Array:
"""Converts a sparse neighbor list to dense ids. Cannot be JIT."""
if neighbor.format is not Sparse:
raise ValueError('Can only convert sparse neighbor lists to dense ones.')
receivers, senders = neighbor.idx
mask = neighbor_list_mask(neighbor)
receivers = receivers[mask]
senders = senders[mask]
N = len(neighbor.reference_position)
count = ops.segment_sum(jnp.ones(len(receivers), jnp.int32), receivers, N)
max_count = jnp.max(count)
offset = jnp.tile(jnp.arange(max_count), N)[:len(senders)]
hashes = senders * max_count + offset
dense_idx = N * jnp.ones((N * max_count,), jnp.int32)
dense_idx = dense_idx.at[hashes].set(receivers).reshape((N, max_count))
return dense_idx
Dense = NeighborListFormat.Dense
Sparse = NeighborListFormat.Sparse
OrderedSparse = NeighborListFormat.OrderedSparse
| [((2979, 3018), 'numpy.floor', 'onp.floor', (['(box_size / minimum_cell_size)'], {}), '(box_size / minimum_cell_size)\n', (2988, 3018), True, 'import numpy as onp\n'), ((3078, 3120), 'numpy.array', 'onp.array', (['cells_per_side'], {'dtype': 'jnp.int64'}), '(cells_per_side, dtype=jnp.int64)\n', (3087, 3120), True, 'import numpy as onp\n'), ((4283, 4324), 'jax.numpy.array', 'jnp.array', (['(R / cell_size)'], {'dtype': 'jnp.int64'}), '(R / cell_size, dtype=jnp.int64)\n', (4292, 4324), True, 'import jax.numpy as jnp\n'), ((4343, 4393), 'jax.numpy.sum', 'jnp.sum', (['(particle_index * hash_multipliers)'], {'axis': '(1)'}), '(particle_index * hash_multipliers, axis=1)\n', (4350, 4393), True, 'import jax.numpy as jnp\n'), ((5377, 5408), 'numpy.ndindex', 'onp.ndindex', (['*([3] * dimension)'], {}), '(*([3] * dimension))\n', (5388, 5408), True, 'import numpy as onp\n'), ((6587, 6643), 'jax.numpy.reshape', 'jnp.reshape', (['arr', '(cells_per_side + (-1,) + arr.shape[1:])'], {}), '(arr, cells_per_side + (-1,) + arr.shape[1:])\n', (6598, 6643), True, 'import jax.numpy as jnp\n'), ((9571, 9594), 'jax_md.util.is_array', 'util.is_array', (['box_size'], {}), '(box_size)\n', (9584, 9594), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((9718, 9750), 'jax_md.util.is_array', 'util.is_array', (['minimum_cell_size'], {}), '(minimum_cell_size)\n', (9731, 9750), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((17308, 17334), 'jax_md.dataclasses.static_field', 'dataclasses.static_field', ([], {}), '()\n', (17332, 17334), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((17367, 17393), 'jax_md.dataclasses.static_field', 'dataclasses.static_field', ([], {}), '()\n', (17391, 17393), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((17440, 17466), 'jax_md.dataclasses.static_field', 'dataclasses.static_field', ([], {}), '()\n', (17464, 17466), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((17554, 17580), 'jax_md.dataclasses.static_field', 'dataclasses.static_field', ([], {}), '()\n', (17578, 17580), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((18111, 18137), 'jax_md.dataclasses.static_field', 'dataclasses.static_field', ([], {}), '()\n', (18135, 18137), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((18215, 18241), 'jax_md.dataclasses.static_field', 'dataclasses.static_field', ([], {}), '()\n', (18239, 18241), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((23877, 23904), 'jax.lax.stop_gradient', 'lax.stop_gradient', (['box_size'], {}), '(box_size)\n', (23894, 23904), False, 'from jax import lax\n'), ((23918, 23945), 'jax.lax.stop_gradient', 'lax.stop_gradient', (['r_cutoff'], {}), '(r_cutoff)\n', (23935, 23945), False, 'from jax import lax\n'), ((23963, 23994), 'jax.lax.stop_gradient', 'lax.stop_gradient', (['dr_threshold'], {}), '(dr_threshold)\n', (23980, 23994), False, 'from jax import lax\n'), ((32436, 32450), 'jax.numpy.max', 'jnp.max', (['count'], {}), '(count)\n', (32443, 32450), True, 'import jax.numpy as jnp\n'), ((4423, 4451), 'jax.numpy.ones_like', 'jnp.ones_like', (['particle_hash'], {}), '(particle_hash)\n', (4436, 4451), True, 'import jax.numpy as jnp\n'), ((4623, 4639), 'jax_md.util.is_array', 'util.is_array', (['R'], {}), '(R)\n', (4636, 4639), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((4678, 4715), 'jax.numpy.issubdtype', 'jnp.issubdtype', (['R.dtype', 'jnp.floating'], {}), '(R.dtype, jnp.floating)\n', (4692, 4715), True, 'import jax.numpy as jnp\n'), ((6833, 6868), 'jax.numpy.concatenate', 'jnp.concatenate', (['(arr[1:], arr[:1])'], {}), '((arr[1:], arr[:1]))\n', (6848, 6868), True, 'import jax.numpy as jnp\n'), ((6956, 7005), 'jax.numpy.concatenate', 'jnp.concatenate', (['(arr[:, 1:], arr[:, :1])'], {'axis': '(1)'}), '((arr[:, 1:], arr[:, :1]), axis=1)\n', (6971, 7005), True, 'import jax.numpy as jnp\n'), ((7107, 7162), 'jax.numpy.concatenate', 'jnp.concatenate', (['(arr[:, :, 1:], arr[:, :, :1])'], {'axis': '(2)'}), '((arr[:, :, 1:], arr[:, :, :1]), axis=2)\n', (7122, 7162), True, 'import jax.numpy as jnp\n'), ((9611, 9630), 'numpy.array', 'onp.array', (['box_size'], {}), '(box_size)\n', (9620, 9630), True, 'import numpy as onp\n'), ((9776, 9804), 'numpy.array', 'onp.array', (['minimum_cell_size'], {}), '(minimum_cell_size)\n', (9785, 9804), True, 'import numpy as onp\n'), ((10987, 11009), 'jax.lax.iota', 'lax.iota', (['jnp.int64', 'N'], {}), '(jnp.int64, N)\n', (10995, 11009), False, 'from jax import lax\n'), ((11433, 11493), 'jax.numpy.zeros', 'jnp.zeros', (['(cell_count * _cell_capacity, dim)'], {'dtype': 'R.dtype'}), '((cell_count * _cell_capacity, dim), dtype=R.dtype)\n', (11442, 11493), True, 'import jax.numpy as jnp\n'), ((12510, 12545), 'jax.numpy.array', 'jnp.array', (['(R / cell_size)'], {'dtype': 'i32'}), '(R / cell_size, dtype=i32)\n', (12519, 12545), True, 'import jax.numpy as jnp\n'), ((12559, 12602), 'jax.numpy.sum', 'jnp.sum', (['(indices * hash_multipliers)'], {'axis': '(1)'}), '(indices * hash_multipliers, axis=1)\n', (12566, 12602), True, 'import jax.numpy as jnp\n'), ((13128, 13147), 'jax.numpy.argsort', 'jnp.argsort', (['hashes'], {}), '(hashes)\n', (13139, 13147), True, 'import jax.numpy as jnp\n'), ((13492, 13542), 'jax.ops.index_update', 'ops.index_update', (['cell_R', 'sorted_cell_id', 'sorted_R'], {}), '(cell_R, sorted_cell_id, sorted_R)\n', (13508, 13542), False, 'from jax import ops\n'), ((13559, 13589), 'jax.numpy.reshape', 'jnp.reshape', (['sorted_id', '(N, 1)'], {}), '(sorted_id, (N, 1))\n', (13570, 13589), True, 'import jax.numpy as jnp\n'), ((13604, 13656), 'jax.ops.index_update', 'ops.index_update', (['cell_id', 'sorted_cell_id', 'sorted_id'], {}), '(cell_id, sorted_cell_id, sorted_id)\n', (13620, 13656), False, 'from jax import ops\n'), ((18838, 19047), 'absl.logging.warning', 'logging.warning', (['"""Using a depricated code path to create / update neighbor lists. It will be removed in a later version of JAX MD. Using `neighbor_fn.allocate` and `neighbor_fn.update` is preferred."""'], {}), "(\n 'Using a depricated code path to create / update neighbor lists. It will be removed in a later version of JAX MD. Using `neighbor_fn.allocate` and `neighbor_fn.update` is preferred.'\n )\n", (18853, 19047), False, 'from absl import logging\n'), ((24332, 24367), 'jax.numpy.all', 'jnp.all', (['(cell_size < box_size / 3.0)'], {}), '(cell_size < box_size / 3.0)\n', (24339, 24367), True, 'import jax.numpy as jnp\n'), ((24873, 24907), 'jax.numpy.concatenate', 'jnp.concatenate', (['cell_idx'], {'axis': '(-2)'}), '(cell_idx, axis=-2)\n', (24888, 24907), True, 'import jax.numpy as jnp\n'), ((24971, 25035), 'jax.numpy.broadcast_to', 'jnp.broadcast_to', (['cell_idx', '(idx.shape[:-1] + cell_idx.shape[-2:])'], {}), '(cell_idx, idx.shape[:-1] + cell_idx.shape[-2:])\n', (24987, 25035), True, 'import jax.numpy as jnp\n'), ((25718, 25770), 'jax.numpy.zeros', 'jnp.zeros', (['((N + 1,) + cell_idx.shape[-2:])', 'jnp.int32'], {}), '((N + 1,) + cell_idx.shape[-2:], jnp.int32)\n', (25727, 25770), True, 'import jax.numpy as jnp\n'), ((26000, 26039), 'jax.numpy.where', 'jnp.where', (['self_mask', 'idx.shape[0]', 'idx'], {}), '(self_mask, idx.shape[0], idx)\n', (26009, 26039), True, 'import jax.numpy as jnp\n'), ((26107, 26135), 'functools.partial', 'partial', (['metric_sq'], {}), '(metric_sq, **kwargs)\n', (26114, 26135), False, 'from functools import reduce, partial\n'), ((26144, 26165), 'jax_md.space.map_neighbor', 'space.map_neighbor', (['d'], {}), '(d)\n', (26162, 26165), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((26334, 26358), 'jax.numpy.cumsum', 'jnp.cumsum', (['mask'], {'axis': '(1)'}), '(mask, axis=1)\n', (26344, 26358), True, 'import jax.numpy as jnp\n'), ((26371, 26416), 'jax.numpy.where', 'jnp.where', (['mask', '(cumsum - 1)', '(idx.shape[1] - 1)'], {}), '(mask, cumsum - 1, idx.shape[1] - 1)\n', (26380, 26416), True, 'import jax.numpy as jnp\n'), ((26535, 26559), 'jax.numpy.max', 'jnp.max', (['cumsum[:, (-1)]'], {}), '(cumsum[:, (-1)])\n', (26542, 26559), True, 'import jax.numpy as jnp\n'), ((26669, 26697), 'functools.partial', 'partial', (['metric_sq'], {}), '(metric_sq, **kwargs)\n', (26676, 26697), False, 'from functools import reduce, partial\n'), ((26706, 26723), 'jax_md.space.map_bond', 'space.map_bond', (['d'], {}), '(d)\n', (26720, 26723), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((26831, 26861), 'jax.numpy.reshape', 'jnp.reshape', (['sender_idx', '(-1,)'], {}), '(sender_idx, (-1,))\n', (26842, 26861), True, 'import jax.numpy as jnp\n'), ((26881, 26904), 'jax.numpy.reshape', 'jnp.reshape', (['idx', '(-1,)'], {}), '(idx, (-1,))\n', (26892, 26904), True, 'import jax.numpy as jnp\n'), ((27170, 27186), 'jax.numpy.cumsum', 'jnp.cumsum', (['mask'], {}), '(mask)\n', (27180, 27186), True, 'import jax.numpy as jnp\n'), ((29447, 29475), 'functools.partial', 'partial', (['metric_sq'], {}), '(metric_sq, **kwargs)\n', (29454, 29475), False, 'from functools import reduce, partial\n'), ((29484, 29491), 'jax.vmap', 'vmap', (['d'], {}), '(d)\n', (29488, 29491), False, 'from jax import jit, vmap, eval_shape\n'), ((31446, 31463), 'jax.numpy.cumsum', 'jnp.cumsum', (['_mask'], {}), '(_mask)\n', (31456, 31463), True, 'import jax.numpy as jnp\n'), ((32570, 32607), 'jax.numpy.ones', 'jnp.ones', (['(N * max_count,)', 'jnp.int32'], {}), '((N * max_count,), jnp.int32)\n', (32578, 32607), True, 'import jax.numpy as jnp\n'), ((3287, 3321), 'numpy.reshape', 'onp.reshape', (['cells_per_side', '(-1,)'], {}), '(cells_per_side, (-1,))\n', (3298, 3321), True, 'import numpy as onp\n'), ((3536, 3571), 'functools.reduce', 'reduce', (['mul', 'flat_cells_per_side', '(1)'], {}), '(mul, flat_cells_per_side, 1)\n', (3542, 3571), False, 'from functools import reduce, partial\n'), ((5073, 5106), 'jax.numpy.array', 'jnp.array', (['[[1]]'], {'dtype': 'jnp.int32'}), '([[1]], dtype=jnp.int32)\n', (5082, 5106), True, 'import jax.numpy as jnp\n'), ((5128, 5182), 'jax.numpy.concatenate', 'jnp.concatenate', (['(one, cells_per_side[:, :-1])'], {'axis': '(1)'}), '((one, cells_per_side[:, :-1]), axis=1)\n', (5143, 5182), True, 'import jax.numpy as jnp\n'), ((6145, 6174), 'jax_md.util.is_array', 'util.is_array', (['cells_per_side'], {}), '(cells_per_side)\n', (6158, 6174), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((6264, 6293), 'jax_md.util.is_array', 'util.is_array', (['cells_per_side'], {}), '(cells_per_side)\n', (6277, 6293), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((6894, 6931), 'jax.numpy.concatenate', 'jnp.concatenate', (['(arr[-1:], arr[:-1])'], {}), '((arr[-1:], arr[:-1]))\n', (6909, 6931), True, 'import jax.numpy as jnp\n'), ((7031, 7082), 'jax.numpy.concatenate', 'jnp.concatenate', (['(arr[:, -1:], arr[:, :-1])'], {'axis': '(1)'}), '((arr[:, -1:], arr[:, :-1]), axis=1)\n', (7046, 7082), True, 'import jax.numpy as jnp\n'), ((7188, 7245), 'jax.numpy.concatenate', 'jnp.concatenate', (['(arr[:, :, -1:], arr[:, :, :-1])'], {'axis': '(2)'}), '((arr[:, :, -1:], arr[:, :, :-1]), axis=2)\n', (7203, 7245), True, 'import jax.numpy as jnp\n'), ((7344, 7357), 'jax.vmap', 'vmap', (['f', '(0)', '(0)'], {}), '(f, 0, 0)\n', (7348, 7357), False, 'from jax import jit, vmap, eval_shape\n'), ((9681, 9711), 'jax.numpy.reshape', 'jnp.reshape', (['box_size', '(1, -1)'], {}), '(box_size, (1, -1))\n', (9692, 9711), True, 'import jax.numpy as jnp\n'), ((11390, 11415), 'jax.numpy.ones', 'jnp.ones', (['(N,)', 'jnp.int64'], {}), '((N,), jnp.int64)\n', (11398, 11415), True, 'import jax.numpy as jnp\n'), ((11512, 11565), 'jax.numpy.ones', 'jnp.ones', (['(cell_count * _cell_capacity, 1)'], {'dtype': 'i32'}), '((cell_count * _cell_capacity, 1), dtype=i32)\n', (11520, 11565), True, 'import jax.numpy as jnp\n'), ((13371, 13393), 'jax.lax.iota', 'lax.iota', (['jnp.int64', 'N'], {}), '(jnp.int64, N)\n', (13379, 13393), False, 'from jax import lax\n'), ((13926, 13977), 'jax.ops.index_update', 'ops.index_update', (['cell_kwargs[k]', 'sorted_cell_id', 'v'], {}), '(cell_kwargs[k], sorted_cell_id, v)\n', (13942, 13977), False, 'from jax import ops\n'), ((14400, 14424), 'jax.abstract_arrays.ShapedArray', 'ShapedArray', (['(dim,)', 'f32'], {}), '((dim,), f32)\n', (14411, 14424), False, 'from jax.abstract_arrays import ShapedArray\n'), ((14442, 14487), 'jax.eval_shape', 'eval_shape', (['displacement_or_metric', 'R', 'R'], {'t': '(0)'}), '(displacement_or_metric, R, R, t=0)\n', (14452, 14487), False, 'from jax import jit, vmap, eval_shape\n'), ((24772, 24792), 'numpy.all', 'onp.all', (['(dindex == 0)'], {}), '(dindex == 0)\n', (24779, 24792), True, 'import numpy as onp\n'), ((25120, 25147), 'jax.numpy.reshape', 'jnp.reshape', (['cell_id', '(-1,)'], {}), '(cell_id, (-1,))\n', (25131, 25147), True, 'import jax.numpy as jnp\n'), ((25167, 25221), 'jax.numpy.reshape', 'jnp.reshape', (['cell_value', '((-1,) + cell_value.shape[-2:])'], {}), '(cell_value, (-1,) + cell_value.shape[-2:])\n', (25178, 25221), True, 'import jax.numpy as jnp\n'), ((25235, 25287), 'jax.ops.index_update', 'ops.index_update', (['value', 'scatter_indices', 'cell_value'], {}), '(value, scatter_indices, cell_value)\n', (25251, 25287), False, 'from jax import ops\n'), ((26289, 26319), 'jax.numpy.ones', 'jnp.ones', (['idx.shape', 'jnp.int32'], {}), '(idx.shape, jnp.int32)\n', (26297, 26319), True, 'import jax.numpy as jnp\n'), ((26431, 26455), 'jax.numpy.arange', 'jnp.arange', (['idx.shape[0]'], {}), '(idx.shape[0])\n', (26441, 26455), True, 'import jax.numpy as jnp\n'), ((27116, 27155), 'jax.numpy.ones', 'jnp.ones', (['receiver_idx.shape', 'jnp.int32'], {}), '(receiver_idx.shape, jnp.int32)\n', (27124, 27155), True, 'import jax.numpy as jnp\n'), ((27399, 27446), 'jax.numpy.stack', 'jnp.stack', (['(receiver_idx[:-1], sender_idx[:-1])'], {}), '((receiver_idx[:-1], sender_idx[:-1]))\n', (27408, 27446), True, 'import jax.numpy as jnp\n'), ((29383, 29437), 'functools.partial', 'partial', (['neighbor_fn'], {'max_occupancy': 'nbrs.max_occupancy'}), '(neighbor_fn, max_occupancy=nbrs.max_occupancy)\n', (29390, 29437), False, 'from functools import reduce, partial\n'), ((31859, 31876), 'jax.numpy.array', 'jnp.array', (['[N, 1]'], {}), '([N, 1])\n', (31868, 31876), True, 'import jax.numpy as jnp\n'), ((32471, 32492), 'jax.numpy.arange', 'jnp.arange', (['max_count'], {}), '(max_count)\n', (32481, 32492), True, 'import jax.numpy as jnp\n'), ((5204, 5231), 'jax.numpy.cumprod', 'jnp.cumprod', (['cells_per_side'], {}), '(cells_per_side)\n', (5215, 5231), True, 'import jax.numpy as jnp\n'), ((5420, 5454), 'numpy.array', 'onp.array', (['dindex'], {'dtype': 'jnp.int64'}), '(dindex, dtype=jnp.int64)\n', (5429, 5454), True, 'import numpy as onp\n'), ((6404, 6433), 'jax_md.util.is_array', 'util.is_array', (['cells_per_side'], {}), '(cells_per_side)\n', (6417, 6433), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((11958, 11974), 'jax_md.util.is_array', 'util.is_array', (['v'], {}), '(v)\n', (11971, 11974), False, 'from jax_md import quantity, space, dataclasses, util\n'), ((12422, 12485), 'jax.numpy.ones', 'jnp.ones', (['((cell_count * _cell_capacity,) + kwarg_shape)', 'v.dtype'], {}), '((cell_count * _cell_capacity,) + kwarg_shape, v.dtype)\n', (12430, 12485), True, 'import jax.numpy as jnp\n'), ((13872, 13902), 'jax.numpy.reshape', 'jnp.reshape', (['v', '(v.shape + (1,))'], {}), '(v, v.shape + (1,))\n', (13883, 13902), True, 'import jax.numpy as jnp\n'), ((24474, 24496), 'jax.numpy.arange', 'jnp.arange', (['R.shape[0]'], {}), '(R.shape[0])\n', (24484, 24496), True, 'import jax.numpy as jnp\n'), ((25944, 25968), 'jax.numpy.arange', 'jnp.arange', (['idx.shape[0]'], {}), '(idx.shape[0])\n', (25954, 25968), True, 'import jax.numpy as jnp\n'), ((26778, 26791), 'jax.numpy.arange', 'jnp.arange', (['N'], {}), '(N)\n', (26788, 26791), True, 'import jax.numpy as jnp\n'), ((28950, 29001), 'jax.numpy.logical_or', 'jnp.logical_or', (['overflow', '(max_occupancy < occupancy)'], {}), '(overflow, max_occupancy < occupancy)\n', (28964, 29001), True, 'import jax.numpy as jnp\n'), ((30485, 30498), 'jax.numpy.arange', 'jnp.arange', (['N'], {}), '(N)\n', (30495, 30498), True, 'import jax.numpy as jnp\n'), ((7403, 7416), 'jax.vmap', 'vmap', (['f', '(0)', '(0)'], {}), '(f, 0, 0)\n', (7407, 7416), False, 'from jax import jit, vmap, eval_shape\n'), ((31902, 31916), 'jax.numpy.sum', 'jnp.sum', (['_mask'], {}), '(_mask)\n', (31909, 31916), True, 'import jax.numpy as jnp\n'), ((31918, 31933), 'jax.numpy.sum', 'jnp.sum', (['(~_mask)'], {}), '(~_mask)\n', (31925, 31933), True, 'import jax.numpy as jnp\n'), ((28667, 28717), 'jax.numpy.ones', 'jnp.ones', (['(idx.shape[0], padding)'], {'dtype': 'idx.dtype'}), '((idx.shape[0], padding), dtype=idx.dtype)\n', (28675, 28717), True, 'import jax.numpy as jnp\n')] |
sebascuri/rhucrl | rhucrl_experiments/evaluate/launch_evaluate_mass.py | 27663e1302f3bbc636dff28495c6f2667bb7c1da | """Run from rhucrl_experiments.evaluate folder."""
import socket
from lsf_runner import init_runner, make_commands
from rhucrl_experiments.evaluate.utilities import ENVIRONMENTS
RARL_DIR = "../../runs/RARLAgent"
ZERO_SUM_DIR = "../../runs/ZeroSumAgent"
SCRIPT = "evaluate_mass_change.py"
EXPERIMENTS = {
"supermodularity": {"algorithm": "RARL_MF", "base-dir": RARL_DIR},
"shallow": {"algorithm": "RHUCRL", "base-dir": ZERO_SUM_DIR},
"greedy": {"algorithm": "RHUCRL", "base-dir": ZERO_SUM_DIR},
"lazy": {"algorithm": "HUCRL", "base-dir": RARL_DIR},
}.get(socket.gethostname(), {"algorithm": "RARL", "base-dir": RARL_DIR})
runner = init_runner("EvaluateMassChange.", num_threads=4)
for seed in [0, 1, 2, 3, 4]:
base_args = {"num-runs": 10, "seed": seed}
base_args.update(**EXPERIMENTS)
commands = make_commands(
SCRIPT, base_args=base_args, common_hyper_args={"environment": ENVIRONMENTS}
)
runner.run_batch(commands)
| [((650, 699), 'lsf_runner.init_runner', 'init_runner', (['"""EvaluateMassChange."""'], {'num_threads': '(4)'}), "('EvaluateMassChange.', num_threads=4)\n", (661, 699), False, 'from lsf_runner import init_runner, make_commands\n'), ((573, 593), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (591, 593), False, 'import socket\n'), ((827, 922), 'lsf_runner.make_commands', 'make_commands', (['SCRIPT'], {'base_args': 'base_args', 'common_hyper_args': "{'environment': ENVIRONMENTS}"}), "(SCRIPT, base_args=base_args, common_hyper_args={'environment':\n ENVIRONMENTS})\n", (840, 922), False, 'from lsf_runner import init_runner, make_commands\n')] |
seukjung/sentry-custom | src/sentry/api/endpoints/project_tags.py | c5f6bb2019aef3caff7f3e2b619f7a70f2b9b963 | from __future__ import absolute_import
import six
from rest_framework.response import Response
from sentry.api.bases.project import ProjectEndpoint
from sentry.models import TagKey, TagKeyStatus
class ProjectTagsEndpoint(ProjectEndpoint):
def get(self, request, project):
tag_keys = TagKey.objects.filter(
project=project,
status=TagKeyStatus.VISIBLE,
)
data = []
for tag_key in tag_keys:
data.append({
'id': six.text_type(tag_key.id),
'key': TagKey.get_standardized_key(tag_key.key),
'name': tag_key.get_label(),
'uniqueValues': tag_key.values_seen,
})
return Response(data)
| [((300, 367), 'sentry.models.TagKey.objects.filter', 'TagKey.objects.filter', ([], {'project': 'project', 'status': 'TagKeyStatus.VISIBLE'}), '(project=project, status=TagKeyStatus.VISIBLE)\n', (321, 367), False, 'from sentry.models import TagKey, TagKeyStatus\n'), ((724, 738), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (732, 738), False, 'from rest_framework.response import Response\n'), ((503, 528), 'six.text_type', 'six.text_type', (['tag_key.id'], {}), '(tag_key.id)\n', (516, 528), False, 'import six\n'), ((553, 593), 'sentry.models.TagKey.get_standardized_key', 'TagKey.get_standardized_key', (['tag_key.key'], {}), '(tag_key.key)\n', (580, 593), False, 'from sentry.models import TagKey, TagKeyStatus\n')] |
TensorTom/async-Eel | examples/02 - callbacks/callbacks.py | d6484b6c5c9f89b64f5119d908fcdf29b173bd57 | from __future__ import print_function # For Py2/3 compatibility
import async_eel
import random
import asyncio
loop = asyncio.get_event_loop()
@async_eel.expose
async def py_random():
return random.random()
async def print_num(n):
"""callback of js_random"""
print('Got this from Javascript:', n)
async def main():
try:
async_eel.init('web')
await async_eel.start('callbacks.html', size=(400, 300))
# Call Javascript function, and pass explicit callback function
await async_eel.js_random()(print_num)
# Do the same with an inline callback
await async_eel.js_random()(lambda n: print('2Got this from Javascript:', n))
except Exception:
import traceback
traceback.print_exc()
if __name__ == '__main__':
asyncio.run_coroutine_threadsafe(main(), loop)
loop.run_forever()
| [((119, 143), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (141, 143), False, 'import asyncio\n'), ((198, 213), 'random.random', 'random.random', ([], {}), '()\n', (211, 213), False, 'import random\n'), ((351, 372), 'async_eel.init', 'async_eel.init', (['"""web"""'], {}), "('web')\n", (365, 372), False, 'import async_eel\n'), ((387, 437), 'async_eel.start', 'async_eel.start', (['"""callbacks.html"""'], {'size': '(400, 300)'}), "('callbacks.html', size=(400, 300))\n", (402, 437), False, 'import async_eel\n'), ((746, 767), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (765, 767), False, 'import traceback\n'), ((525, 546), 'async_eel.js_random', 'async_eel.js_random', ([], {}), '()\n', (544, 546), False, 'import async_eel\n'), ((619, 640), 'async_eel.js_random', 'async_eel.js_random', ([], {}), '()\n', (638, 640), False, 'import async_eel\n')] |
AMA-AC/datacube-core | datacube/index/_api.py | 0d2fe0792cb9298cc93d1a97bbb921cfa59d6f2d | # coding=utf-8
"""
Access methods for indexing datasets & products.
"""
import logging
from datacube.config import LocalConfig
from datacube.drivers import index_driver_by_name, index_drivers
from .index import Index
_LOG = logging.getLogger(__name__)
def index_connect(local_config=None, application_name=None, validate_connection=True):
# type: (LocalConfig, str, bool) -> Index
"""
Create a Data Cube Index that can connect to a PostgreSQL server
It contains all the required connection parameters, but doesn't actually
check that the server is available.
:param application_name: A short, alphanumeric name to identify this application.
:param datacube.config.LocalConfig local_config: Config object to use. (optional)
:param validate_connection: Validate database connection and schema immediately
:rtype: datacube.index.index.Index
:raises datacube.drivers.postgres._connections.IndexSetupError:
"""
if local_config is None:
local_config = LocalConfig.find()
driver_name = local_config.get('index_driver', 'default')
index_driver = index_driver_by_name(driver_name)
if not index_driver:
raise RuntimeError(
"No index driver found for %r. %s available: %s" % (
driver_name, len(index_drivers()), ', '.join(index_drivers())
)
)
return index_driver.connect_to_index(local_config,
application_name=application_name,
validate_connection=validate_connection)
| [((227, 254), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (244, 254), False, 'import logging\n'), ((1110, 1143), 'datacube.drivers.index_driver_by_name', 'index_driver_by_name', (['driver_name'], {}), '(driver_name)\n', (1130, 1143), False, 'from datacube.drivers import index_driver_by_name, index_drivers\n'), ((1009, 1027), 'datacube.config.LocalConfig.find', 'LocalConfig.find', ([], {}), '()\n', (1025, 1027), False, 'from datacube.config import LocalConfig\n'), ((1295, 1310), 'datacube.drivers.index_drivers', 'index_drivers', ([], {}), '()\n', (1308, 1310), False, 'from datacube.drivers import index_driver_by_name, index_drivers\n'), ((1323, 1338), 'datacube.drivers.index_drivers', 'index_drivers', ([], {}), '()\n', (1336, 1338), False, 'from datacube.drivers import index_driver_by_name, index_drivers\n')] |
WeilerWebServices/PostgreSQL | pgarchives/loader/load_message.py | ae594ed077bebbad1be3c1d95c38b7c2c2683e8c | #!/usr/bin/env python3
#
# load_message.py - takes a single email or mbox formatted
# file on stdin or in a file and reads it into the database.
#
import os
import sys
from optparse import OptionParser
from configparser import ConfigParser
import psycopg2
from lib.storage import ArchivesParserStorage
from lib.mbox import MailboxBreakupParser
from lib.exception import IgnorableException
from lib.log import log, opstatus
from lib.varnish import VarnishPurger
def log_failed_message(listid, srctype, src, msg, err):
try:
msgid = msg.msgid
except Exception:
msgid = "<unknown>"
log.error("Failed to load message (msgid %s) from %s, spec %s: %s" % (msgid, srctype, src, err))
# We also put the data in the db. This happens in the main transaction
# so if the whole script dies, it goes away...
conn.cursor().execute("INSERT INTO loaderrors (listid, msgid, srctype, src, err) VALUES (%(listid)s, %(msgid)s, %(srctype)s, %(src)s, %(err)s)", {
'listid': listid,
'msgid': msgid,
'srctype': srctype,
'src': src,
'err': str(str(err), 'us-ascii', 'replace'),
})
if __name__ == "__main__":
optparser = OptionParser()
optparser.add_option('-l', '--list', dest='list', help='Name of list to load message for')
optparser.add_option('-d', '--directory', dest='directory', help='Load all messages in directory')
optparser.add_option('-m', '--mbox', dest='mbox', help='Load all messages in mbox')
optparser.add_option('-i', '--interactive', dest='interactive', action='store_true', help='Prompt after each message')
optparser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='Verbose output')
optparser.add_option('--force-date', dest='force_date', help='Override date (used for dates that can\'t be parsed)')
optparser.add_option('--filter-msgid', dest='filter_msgid', help='Only process message with given msgid')
(opt, args) = optparser.parse_args()
if (len(args)):
print("No bare arguments accepted")
optparser.print_usage()
sys.exit(1)
if not opt.list:
print("List must be specified")
optparser.print_usage()
sys.exit(1)
if opt.directory and opt.mbox:
print("Can't specify both directory and mbox!")
optparser.print_usage()
sys.exit(1)
if opt.force_date and (opt.directory or opt.mbox) and not opt.filter_msgid:
print("Can't use force_date with directory or mbox - only individual messages")
optparser.print_usage()
sys.exit(1)
if opt.filter_msgid and not (opt.directory or opt.mbox):
print("filter_msgid makes no sense without directory or mbox!")
optparser.print_usage()
sys.exit(1)
log.set(opt.verbose)
cfg = ConfigParser()
cfg.read('%s/archives.ini' % os.path.realpath(os.path.dirname(sys.argv[0])))
try:
connstr = cfg.get('db', 'connstr')
except Exception:
connstr = 'need_connstr'
conn = psycopg2.connect(connstr)
curs = conn.cursor()
# Take an advisory lock to force serialization.
# We could do this "properly" by reordering operations and using ON CONFLICT,
# but concurrency is not that important and this is easier...
try:
curs.execute("SET statement_timeout='30s'")
curs.execute("SELECT pg_advisory_xact_lock(8059944559669076)")
except Exception as e:
print(("Failed to wait on advisory lock: %s" % e))
sys.exit(1)
# Get the listid we're working on
curs.execute("SELECT listid FROM lists WHERE listname=%(list)s", {
'list': opt.list
})
r = curs.fetchall()
if len(r) != 1:
log.error("List %s not found" % opt.list)
conn.close()
sys.exit(1)
listid = r[0][0]
purges = set()
if opt.directory:
# Parse all files in directory
for x in os.listdir(opt.directory):
log.status("Parsing file %s" % x)
with open(os.path.join(opt.directory, x)) as f:
ap = ArchivesParserStorage()
ap.parse(f)
if opt.filter_msgid and not ap.is_msgid(opt.filter_msgid):
continue
try:
ap.analyze(date_override=opt.force_date)
except IgnorableException as e:
log_failed_message(listid, "directory", os.path.join(opt.directory, x), ap, e)
opstatus.failed += 1
continue
ap.store(conn, listid)
purges.update(ap.purges)
if opt.interactive:
print("Interactive mode, committing transaction")
conn.commit()
print("Proceed to next message with Enter, or input a period (.) to stop processing")
x = input()
if x == '.':
print("Ok, aborting!")
break
print("---------------------------------")
elif opt.mbox:
if not os.path.isfile(opt.mbox):
print("File %s does not exist" % opt.mbox)
sys.exit(1)
mboxparser = MailboxBreakupParser(opt.mbox)
while not mboxparser.EOF:
ap = ArchivesParserStorage()
msg = next(mboxparser)
if not msg:
break
ap.parse(msg)
if opt.filter_msgid and not ap.is_msgid(opt.filter_msgid):
continue
try:
ap.analyze(date_override=opt.force_date)
except IgnorableException as e:
log_failed_message(listid, "mbox", opt.mbox, ap, e)
opstatus.failed += 1
continue
ap.store(conn, listid)
purges.update(ap.purges)
if mboxparser.returncode():
log.error("Failed to parse mbox:")
log.error(mboxparser.stderr_output())
sys.exit(1)
else:
# Parse single message on stdin
ap = ArchivesParserStorage()
ap.parse(sys.stdin.buffer)
try:
ap.analyze(date_override=opt.force_date)
except IgnorableException as e:
log_failed_message(listid, "stdin", "", ap, e)
conn.close()
sys.exit(1)
ap.store(conn, listid)
purges.update(ap.purges)
if opstatus.stored:
log.log("Stored message with message-id %s" % ap.msgid)
conn.commit()
conn.close()
opstatus.print_status()
VarnishPurger(cfg).purge(purges)
| [((612, 712), 'lib.log.log.error', 'log.error', (["('Failed to load message (msgid %s) from %s, spec %s: %s' % (msgid, srctype,\n src, err))"], {}), "('Failed to load message (msgid %s) from %s, spec %s: %s' % (msgid,\n srctype, src, err))\n", (621, 712), False, 'from lib.log import log, opstatus\n'), ((1190, 1204), 'optparse.OptionParser', 'OptionParser', ([], {}), '()\n', (1202, 1204), False, 'from optparse import OptionParser\n'), ((2778, 2798), 'lib.log.log.set', 'log.set', (['opt.verbose'], {}), '(opt.verbose)\n', (2785, 2798), False, 'from lib.log import log, opstatus\n'), ((2810, 2824), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (2822, 2824), False, 'from configparser import ConfigParser\n'), ((3025, 3050), 'psycopg2.connect', 'psycopg2.connect', (['connstr'], {}), '(connstr)\n', (3041, 3050), False, 'import psycopg2\n'), ((6498, 6521), 'lib.log.opstatus.print_status', 'opstatus.print_status', ([], {}), '()\n', (6519, 6521), False, 'from lib.log import log, opstatus\n'), ((2096, 2107), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2104, 2107), False, 'import sys\n'), ((2210, 2221), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2218, 2221), False, 'import sys\n'), ((2354, 2365), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2362, 2365), False, 'import sys\n'), ((2575, 2586), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2583, 2586), False, 'import sys\n'), ((2761, 2772), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2769, 2772), False, 'import sys\n'), ((3709, 3750), 'lib.log.log.error', 'log.error', (["('List %s not found' % opt.list)"], {}), "('List %s not found' % opt.list)\n", (3718, 3750), False, 'from lib.log import log, opstatus\n'), ((3780, 3791), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3788, 3791), False, 'import sys\n'), ((3912, 3937), 'os.listdir', 'os.listdir', (['opt.directory'], {}), '(opt.directory)\n', (3922, 3937), False, 'import os\n'), ((3503, 3514), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3511, 3514), False, 'import sys\n'), ((3951, 3984), 'lib.log.log.status', 'log.status', (["('Parsing file %s' % x)"], {}), "('Parsing file %s' % x)\n", (3961, 3984), False, 'from lib.log import log, opstatus\n'), ((5176, 5206), 'lib.mbox.MailboxBreakupParser', 'MailboxBreakupParser', (['opt.mbox'], {}), '(opt.mbox)\n', (5196, 5206), False, 'from lib.mbox import MailboxBreakupParser\n'), ((6025, 6048), 'lib.storage.ArchivesParserStorage', 'ArchivesParserStorage', ([], {}), '()\n', (6046, 6048), False, 'from lib.storage import ArchivesParserStorage\n'), ((6527, 6545), 'lib.varnish.VarnishPurger', 'VarnishPurger', (['cfg'], {}), '(cfg)\n', (6540, 6545), False, 'from lib.varnish import VarnishPurger\n'), ((2875, 2903), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (2890, 2903), False, 'import os\n'), ((4066, 4089), 'lib.storage.ArchivesParserStorage', 'ArchivesParserStorage', ([], {}), '()\n', (4087, 4089), False, 'from lib.storage import ArchivesParserStorage\n'), ((5050, 5074), 'os.path.isfile', 'os.path.isfile', (['opt.mbox'], {}), '(opt.mbox)\n', (5064, 5074), False, 'import os\n'), ((5143, 5154), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5151, 5154), False, 'import sys\n'), ((5258, 5281), 'lib.storage.ArchivesParserStorage', 'ArchivesParserStorage', ([], {}), '()\n', (5279, 5281), False, 'from lib.storage import ArchivesParserStorage\n'), ((5853, 5887), 'lib.log.log.error', 'log.error', (['"""Failed to parse mbox:"""'], {}), "('Failed to parse mbox:')\n", (5862, 5887), False, 'from lib.log import log, opstatus\n'), ((5950, 5961), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5958, 5961), False, 'import sys\n'), ((6402, 6457), 'lib.log.log.log', 'log.log', (["('Stored message with message-id %s' % ap.msgid)"], {}), "('Stored message with message-id %s' % ap.msgid)\n", (6409, 6457), False, 'from lib.log import log, opstatus\n'), ((4007, 4037), 'os.path.join', 'os.path.join', (['opt.directory', 'x'], {}), '(opt.directory, x)\n', (4019, 4037), False, 'import os\n'), ((6286, 6297), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6294, 6297), False, 'import sys\n'), ((4412, 4442), 'os.path.join', 'os.path.join', (['opt.directory', 'x'], {}), '(opt.directory, x)\n', (4424, 4442), False, 'import os\n')] |
manson800819/test | shop/migrations/0009_auto_20200310_1430.py | 6df7d92eababe76a54585cb8102a00a6d79ca467 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-03-10 14:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('shop', '0008_auto_20200310_1134'),
]
operations = [
migrations.RemoveField(
model_name='category',
name='id',
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(db_index=True, max_length=200, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='product',
name='type1',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='shop.Type1'),
),
]
| [((331, 387), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""category"""', 'name': '"""id"""'}), "(model_name='category', name='id')\n", (353, 387), False, 'from django.db import migrations, models\n'), ((533, 620), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(200)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=200, primary_key=True, serialize\n =False)\n', (549, 620), False, 'from django.db import migrations, models\n'), ((737, 846), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""products"""', 'to': '"""shop.Type1"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='products', to='shop.Type1')\n", (754, 846), False, 'from django.db import migrations, models\n')] |
jrcai/ACE | lib/dataset/iNaturalist.py | 1e2b04d1cf4bb517f107664ac489a1a96e95a4c1 | from dataset.baseset import BaseSet
import random, cv2
import numpy as np
class iNaturalist(BaseSet):
def __init__(self, mode='train', cfg=None, transform=None):
super(iNaturalist, self).__init__(mode, cfg, transform)
random.seed(0)
self.class_dict = self._get_class_dict()
def __getitem__(self, index):
if self.cfg.TRAIN.SAMPLER.TYPE == "weighted sampler" and self.mode == 'train':
assert self.cfg.TRAIN.SAMPLER.WEIGHTED_SAMPLER.TYPE in ["balance", 'square', 'progressive']
if self.cfg.TRAIN.SAMPLER.WEIGHTED_SAMPLER.TYPE == "balance":
sample_class = random.randint(0, self.num_classes - 1)
elif self.cfg.TRAIN.SAMPLER.WEIGHTED_SAMPLER.TYPE == "square":
sample_class = np.random.choice(np.arange(self.num_classes), p=self.square_p)
else:
sample_class = np.random.choice(np.arange(self.num_classes), p=self.progress_p)
sample_indexes = self.class_dict[sample_class]
index = random.choice(sample_indexes)
now_info = self.data[index]
img = self._get_image(now_info)
image = self.transform(img)
meta = dict()
image_label = now_info['category_id'] # 0-index
return image, image_label, meta
| [((248, 262), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\n', (259, 262), False, 'import random, cv2\n'), ((1059, 1088), 'random.choice', 'random.choice', (['sample_indexes'], {}), '(sample_indexes)\n', (1072, 1088), False, 'import random, cv2\n'), ((651, 690), 'random.randint', 'random.randint', (['(0)', '(self.num_classes - 1)'], {}), '(0, self.num_classes - 1)\n', (665, 690), False, 'import random, cv2\n'), ((816, 843), 'numpy.arange', 'np.arange', (['self.num_classes'], {}), '(self.num_classes)\n', (825, 843), True, 'import numpy as np\n'), ((930, 957), 'numpy.arange', 'np.arange', (['self.num_classes'], {}), '(self.num_classes)\n', (939, 957), True, 'import numpy as np\n')] |
mattclark/osf.io | tests/test_conferences.py | 7a362ceb6af3393d3d0423aafef336ee13277303 | # -*- coding: utf-8 -*-
import mock
from nose.tools import * # noqa (PEP8 asserts)
import hmac
import hashlib
from StringIO import StringIO
from django.core.exceptions import ValidationError
from django.db import IntegrityError
import furl
from framework.auth import get_or_create_user
from framework.auth.core import Auth
from osf.models import OSFUser, AbstractNode
from addons.wiki.models import WikiVersion
from osf.exceptions import BlacklistedEmailError
from website import settings
from website.conferences import views
from website.conferences import utils, message
from website.util import api_url_for, web_url_for
from tests.base import OsfTestCase, fake
from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory
def assert_absolute(url):
parsed_domain = furl.furl(settings.DOMAIN)
parsed_url = furl.furl(url)
assert_equal(parsed_domain.host, parsed_url.host)
def assert_equal_urls(first, second):
parsed_first = furl.furl(first)
parsed_first.port = None
parsed_second = furl.furl(second)
parsed_second.port = None
assert_equal(parsed_first, parsed_second)
def create_fake_conference_nodes(n, conference):
nodes = []
for i in range(n):
node = ProjectFactory(is_public=True)
conference.submissions.add(node)
node.save()
nodes.append(node)
return nodes
def create_fake_conference_nodes_bad_data(conference, n, bad_n, endpoint):
nodes = []
for i in range(n):
node = ProjectFactory(is_public=True)
conference.submissions.add(node)
# inject bad data
if i < bad_n:
# Delete only contributor
node.contributor_set.filter(user=node.contributors.first()).delete()
node.save()
nodes.append(node)
return nodes
class TestConferenceUtils(OsfTestCase):
def test_get_or_create_user_exists(self):
user = UserFactory()
fetched, created = get_or_create_user(user.fullname, user.username, is_spam=True)
assert_false(created)
assert_equal(user._id, fetched._id)
assert_false('is_spam' in fetched.system_tags)
def test_get_or_create_user_not_exists(self):
fullname = 'Roger Taylor'
username = '[email protected]'
fetched, created = get_or_create_user(fullname, username, is_spam=False)
fetched.save() # in order to access m2m fields, e.g. tags
assert_true(created)
assert_equal(fetched.fullname, fullname)
assert_equal(fetched.username, username)
assert_false('is_spam' in fetched.system_tags)
def test_get_or_create_user_is_spam(self):
fullname = 'John Deacon'
username = '[email protected]'
fetched, created = get_or_create_user(fullname, username, is_spam=True)
fetched.save() # in order to access m2m fields, e.g. tags
assert_true(created)
assert_equal(fetched.fullname, fullname)
assert_equal(fetched.username, username)
assert_true('is_spam' in fetched.system_tags)
def test_get_or_create_user_with_blacklisted_domain(self):
fullname = 'Kanye West'
username = '[email protected]'
with assert_raises(BlacklistedEmailError) as e:
get_or_create_user(fullname, username, is_spam=True)
assert_equal(e.exception.message, 'Invalid Email')
class ContextTestCase(OsfTestCase):
MAILGUN_API_KEY = 'mailkimp'
@classmethod
def setUpClass(cls):
super(ContextTestCase, cls).setUpClass()
settings.MAILGUN_API_KEY, cls._MAILGUN_API_KEY = cls.MAILGUN_API_KEY, settings.MAILGUN_API_KEY
@classmethod
def tearDownClass(cls):
super(ContextTestCase, cls).tearDownClass()
settings.MAILGUN_API_KEY = cls._MAILGUN_API_KEY
def make_context(self, method='POST', **kwargs):
data = {
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
}
data.update(kwargs.pop('data', {}))
data = {
key: value
for key, value in data.items()
if value is not None
}
return self.app.app.test_request_context(method=method, data=data, **kwargs)
class TestProvisionNode(ContextTestCase):
def setUp(self):
super(TestProvisionNode, self).setUp()
self.node = ProjectFactory()
self.user = self.node.creator
self.conference = ConferenceFactory()
self.body = 'dragon on my back'
self.content = 'dragon attack'
self.attachment = StringIO(self.content)
self.recipient = '{0}{1}[email protected]'.format(
'test-' if settings.DEV_MODE else '',
self.conference.endpoint,
)
def make_context(self, **kwargs):
data = {
'attachment-count': '1',
'attachment-1': (self.attachment, 'attachment-1'),
'X-Mailgun-Sscore': 0,
'recipient': self.recipient,
'stripped-text': self.body,
}
data.update(kwargs.pop('data', {}))
return super(TestProvisionNode, self).make_context(data=data, **kwargs)
def test_provision(self):
with self.make_context():
msg = message.ConferenceMessage()
utils.provision_node(self.conference, msg, self.node, self.user)
assert_true(self.node.is_public)
assert_in(self.conference.admins.first(), self.node.contributors)
assert_in('emailed', self.node.system_tags)
assert_in(self.conference.endpoint, self.node.system_tags)
assert self.node in self.conference.submissions.all()
assert_not_in('spam', self.node.system_tags)
def test_provision_private(self):
self.conference.public_projects = False
self.conference.save()
with self.make_context():
msg = message.ConferenceMessage()
utils.provision_node(self.conference, msg, self.node, self.user)
assert_false(self.node.is_public)
assert_in(self.conference.admins.first(), self.node.contributors)
assert_in('emailed', self.node.system_tags)
assert_not_in('spam', self.node.system_tags)
def test_provision_spam(self):
with self.make_context(data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}):
msg = message.ConferenceMessage()
utils.provision_node(self.conference, msg, self.node, self.user)
assert_false(self.node.is_public)
assert_in(self.conference.admins.first(), self.node.contributors)
assert_in('emailed', self.node.system_tags)
assert_in('spam', self.node.system_tags)
@mock.patch('website.conferences.utils.waterbutler_api_url_for')
@mock.patch('website.conferences.utils.requests.put')
def test_upload(self, mock_put, mock_get_url):
mock_get_url.return_value = 'http://queen.com/'
file_name = 'hammer-to-fall'
self.attachment.filename = file_name
self.attachment.content_type = 'application/json'
utils.upload_attachment(self.user, self.node, self.attachment)
mock_get_url.assert_called_with(
self.node._id,
'osfstorage',
_internal=True,
base_url=self.node.osfstorage_region.waterbutler_url,
cookie=self.user.get_or_create_cookie(),
name=file_name
)
mock_put.assert_called_with(
mock_get_url.return_value,
data=self.content,
)
@mock.patch('website.conferences.utils.waterbutler_api_url_for')
@mock.patch('website.conferences.utils.requests.put')
def test_upload_no_file_name(self, mock_put, mock_get_url):
mock_get_url.return_value = 'http://queen.com/'
self.attachment.filename = ''
self.attachment.content_type = 'application/json'
utils.upload_attachment(self.user, self.node, self.attachment)
mock_get_url.assert_called_with(
self.node._id,
'osfstorage',
_internal=True,
base_url=self.node.osfstorage_region.waterbutler_url,
cookie=self.user.get_or_create_cookie(),
name=settings.MISSING_FILE_NAME,
)
mock_put.assert_called_with(
mock_get_url.return_value,
data=self.content,
)
@mock.patch('website.conferences.utils.upload_attachments')
def test_add_poster_by_email(self, mock_upload_attachments):
conference = ConferenceFactory()
with self.make_context(data={'from': '[email protected]', 'subject': 'It\'s PARTY TIME!'}):
msg = message.ConferenceMessage()
views.add_poster_by_email(conference, msg)
user = OSFUser.objects.get(username='[email protected]')
assert user.email == '[email protected]'
assert user.fullname == user._id # user's shouldn't be able to use email as fullname, so we use the guid.
class TestMessage(ContextTestCase):
PUSH_CONTEXT = False
def test_verify_signature_valid(self):
with self.make_context():
msg = message.ConferenceMessage()
msg.verify_signature()
def test_verify_signature_invalid(self):
with self.make_context(data={'signature': 'fake'}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.verify_signature()
def test_is_spam_false_missing_headers(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1},
)
with ctx:
msg = message.ConferenceMessage()
assert not msg.is_spam
def test_is_spam_false_all_headers(self):
ctx = self.make_context(
method='POST',
data={
'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1,
'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0],
'X-Mailgun-Spf': message.SPF_PASS_VALUES[0],
},
)
with ctx:
msg = message.ConferenceMessage()
assert not msg.is_spam
def test_is_spam_true_sscore(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1},
)
with ctx:
msg = message.ConferenceMessage()
assert msg.is_spam
def test_is_spam_true_dkim(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0][::-1]},
)
with ctx:
msg = message.ConferenceMessage()
assert msg.is_spam
def test_is_spam_true_spf(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Spf': message.SPF_PASS_VALUES[0][::-1]},
)
with ctx:
msg = message.ConferenceMessage()
assert msg.is_spam
def test_subject(self):
ctx = self.make_context(
method='POST',
data={'subject': 'RE: Hip Hopera'},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(msg.subject, 'Hip Hopera')
def test_recipient(self):
address = '[email protected]'
ctx = self.make_context(
method='POST',
data={'recipient': address},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(msg.recipient, address)
def test_text(self):
text = 'welcome to my nuclear family'
ctx = self.make_context(
method='POST',
data={'stripped-text': text},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(msg.text, text)
def test_sender_name(self):
names = [
(' Fred', 'Fred'),
(u'Me䬟', u'Me䬟'),
(u'[email protected]', u'[email protected]'),
(u'Fred <[email protected]>', u'Fred'),
(u'"Fred" <[email protected]>', u'Fred'),
]
for name in names:
with self.make_context(data={'from': name[0]}):
msg = message.ConferenceMessage()
assert_equal(msg.sender_name, name[1])
def test_sender_email(self):
emails = [
(u'[email protected]', u'[email protected]'),
(u'[email protected]', u'[email protected]')
]
for email in emails:
with self.make_context(data={'from': email[0]}):
msg = message.ConferenceMessage()
assert_equal(msg.sender_email, email[1])
def test_route_invalid_pattern(self):
with self.make_context(data={'recipient': '[email protected]'}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.route
def test_route_invalid_test(self):
recipient = '{0}[email protected]'.format('' if settings.DEV_MODE else 'stage-')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.route
def test_route_valid_alternate(self):
conf = ConferenceFactory(endpoint='chocolate', active=True)
conf.name = 'Chocolate Conference'
conf.field_names['submission2'] = 'data'
conf.save()
recipient = '{0}[email protected]'.format('test-' if settings.DEV_MODE else '')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
assert_equal(msg.conference_name, 'chocolate')
assert_equal(msg.conference_category, 'data')
conf.__class__.delete(conf)
def test_route_valid_b(self):
recipient = '{0}[email protected]'.format('test-' if settings.DEV_MODE else '')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
assert_equal(msg.conference_name, 'conf')
assert_equal(msg.conference_category, 'poster')
def test_alternate_route_invalid(self):
recipient = '{0}[email protected]'.format('test-' if settings.DEV_MODE else '')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.route
def test_attachments_count_zero(self):
with self.make_context(data={'attachment-count': '0'}):
msg = message.ConferenceMessage()
assert_equal(msg.attachments, [])
def test_attachments_count_one(self):
content = 'slightly mad'
sio = StringIO(content)
ctx = self.make_context(
method='POST',
data={
'attachment-count': 1,
'attachment-1': (sio, 'attachment-1'),
},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(len(msg.attachments), 1)
assert_equal(msg.attachments[0].read(), content)
class TestConferenceEmailViews(OsfTestCase):
def test_redirect_to_meetings_url(self):
url = '/presentations/'
res = self.app.get(url)
assert_equal(res.status_code, 302)
res = res.follow()
assert_equal(res.request.path, '/meetings/')
def test_conference_submissions(self):
AbstractNode.objects.all().delete()
conference1 = ConferenceFactory()
conference2 = ConferenceFactory()
# Create conference nodes
create_fake_conference_nodes(
3,
conference1,
)
create_fake_conference_nodes(
2,
conference2,
)
url = api_url_for('conference_submissions')
res = self.app.get(url)
assert_equal(res.json['success'], True)
def test_conference_plain_returns_200(self):
conference = ConferenceFactory()
url = web_url_for('conference_results__plain', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_conference_data(self):
conference = ConferenceFactory()
# Create conference nodes
n_conference_nodes = 3
create_fake_conference_nodes(
n_conference_nodes,
conference,
)
# Create a non-conference node
ProjectFactory()
url = api_url_for('conference_data', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), n_conference_nodes)
# Regression for OSF-8864 to confirm bad project data does not make whole conference break
def test_conference_bad_data(self):
conference = ConferenceFactory()
# Create conference nodes
n_conference_nodes = 3
n_conference_nodes_bad = 1
create_fake_conference_nodes_bad_data(
conference,
n_conference_nodes,
n_conference_nodes_bad,
conference,
)
# Create a non-conference node
ProjectFactory()
url = api_url_for('conference_data', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), n_conference_nodes - n_conference_nodes_bad)
def test_conference_data_url_upper(self):
conference = ConferenceFactory()
# Create conference nodes
n_conference_nodes = 3
create_fake_conference_nodes(
n_conference_nodes,
conference,
)
# Create a non-conference node
ProjectFactory()
url = api_url_for('conference_data', meeting=conference.endpoint.upper())
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), n_conference_nodes)
def test_conference_data_tag_upper(self):
conference = ConferenceFactory()
# Create conference nodes
n_conference_nodes = 3
create_fake_conference_nodes(
n_conference_nodes,
conference,
)
# Create a non-conference node
ProjectFactory()
url = api_url_for('conference_data', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), n_conference_nodes)
def test_conference_results(self):
conference = ConferenceFactory()
url = web_url_for('conference_results', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_confererence_results_endpoint_is_case_insensitive(self):
ConferenceFactory(endpoint='StudySwap')
url = web_url_for('conference_results', meeting='studyswap')
res = self.app.get(url)
assert_equal(res.status_code, 200)
class TestConferenceModel(OsfTestCase):
def test_endpoint_is_required(self):
with assert_raises(IntegrityError):
ConferenceFactory(endpoint=None, name=fake.company()).save()
def test_name_is_required(self):
with assert_raises(IntegrityError):
ConferenceFactory(endpoint='spsp2014', name=None).save()
def test_default_field_names(self):
conf = ConferenceFactory(endpoint='cookie', name='Cookies Conference')
conf.save()
assert_equal(conf.field_names['submission1'], 'poster')
assert_equal(conf.field_names['mail_subject'], 'Presentation title')
def test_conference_valid_submissions(self):
conf = ConferenceFactory(endpoint='Hamburgers', name='Hamburger conference')
conf.save()
# 3 good nodes added
create_fake_conference_nodes(3, conf)
# Deleted node added
deleted_node = ProjectFactory(is_public=True)
deleted_node.is_deleted = True
deleted_node.save()
conf.submissions.add(deleted_node)
# Private node added
private_node = ProjectFactory(is_public=False)
conf.submissions.add(private_node)
assert_equal(conf.submissions.count(), 5)
assert_equal(conf.valid_submissions.count(), 3)
class TestConferenceIntegration(ContextTestCase):
@mock.patch('website.conferences.views.send_mail')
@mock.patch('website.conferences.utils.upload_attachments')
def test_integration(self, mock_upload, mock_send_mail):
fullname = 'John Deacon'
username = '[email protected]'
title = 'good songs'
conference = ConferenceFactory()
body = 'dragon on my back'
content = 'dragon attack'
recipient = '{0}{1}[email protected]'.format(
'test-' if settings.DEV_MODE else '',
conference.endpoint,
)
self.app.post(
api_url_for('meeting_hook'),
{
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
'attachment-count': '1',
'X-Mailgun-Sscore': 0,
'from': '{0} <{1}>'.format(fullname, username),
'recipient': recipient,
'subject': title,
'stripped-text': body,
},
upload_files=[
('attachment-1', 'attachment-1', content),
],
)
assert_true(mock_upload.called)
users = OSFUser.objects.filter(username=username)
assert_equal(users.count(), 1)
nodes = AbstractNode.objects.filter(title=title)
assert_equal(nodes.count(), 1)
node = nodes[0]
assert_equal(WikiVersion.objects.get_for_node(node, 'home').content, body)
assert_true(mock_send_mail.called)
call_args, call_kwargs = mock_send_mail.call_args
assert_absolute(call_kwargs['conf_view_url'])
assert_absolute(call_kwargs['set_password_url'])
assert_absolute(call_kwargs['profile_url'])
assert_absolute(call_kwargs['file_url'])
assert_absolute(call_kwargs['node_url'])
@mock.patch('website.conferences.views.send_mail')
def test_integration_inactive(self, mock_send_mail):
conference = ConferenceFactory(active=False)
fullname = 'John Deacon'
username = '[email protected]'
title = 'good songs'
body = 'dragon on my back'
recipient = '{0}{1}[email protected]'.format(
'test-' if settings.DEV_MODE else '',
conference.endpoint,
)
res = self.app.post(
api_url_for('meeting_hook'),
{
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
'attachment-count': '1',
'X-Mailgun-Sscore': 0,
'from': '{0} <{1}>'.format(fullname, username),
'recipient': recipient,
'subject': title,
'stripped-text': body,
},
expect_errors=True,
)
assert_equal(res.status_code, 406)
call_args, call_kwargs = mock_send_mail.call_args
assert_equal(call_args, (username, views.CONFERENCE_INACTIVE))
assert_equal(call_kwargs['fullname'], fullname)
assert_equal_urls(
call_kwargs['presentations_url'],
web_url_for('conference_view', _absolute=True),
)
@mock.patch('website.conferences.views.send_mail')
@mock.patch('website.conferences.utils.upload_attachments')
def test_integration_wo_full_name(self, mock_upload, mock_send_mail):
username = '[email protected]'
title = 'no full name only email'
conference = ConferenceFactory()
body = 'dragon on my back'
content = 'dragon attack'
recipient = '{0}{1}[email protected]'.format(
'test-' if settings.DEV_MODE else '',
conference.endpoint,
)
self.app.post(
api_url_for('meeting_hook'),
{
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
'attachment-count': '1',
'X-Mailgun-Sscore': 0,
'from': username,
'recipient': recipient,
'subject': title,
'stripped-text': body,
},
upload_files=[
('attachment-1', 'attachment-1', content),
],
)
assert_true(mock_upload.called)
users = OSFUser.objects.filter(username=username)
assert_equal(users.count(), 1)
nodes = AbstractNode.objects.filter(title=title)
assert_equal(nodes.count(), 1)
node = nodes[0]
assert_equal(WikiVersion.objects.get_for_node(node, 'home').content, body)
assert_true(mock_send_mail.called)
call_args, call_kwargs = mock_send_mail.call_args
assert_absolute(call_kwargs['conf_view_url'])
assert_absolute(call_kwargs['set_password_url'])
assert_absolute(call_kwargs['profile_url'])
assert_absolute(call_kwargs['file_url'])
assert_absolute(call_kwargs['node_url'])
@mock.patch('website.conferences.views.send_mail')
@mock.patch('website.conferences.utils.upload_attachments')
def test_create_conference_node_with_same_name_as_existing_node(self, mock_upload, mock_send_mail):
conference = ConferenceFactory()
user = UserFactory()
title = 'Long Live Greg'
ProjectFactory(creator=user, title=title)
body = 'Greg is a good plant'
content = 'Long may they reign.'
recipient = '{0}{1}[email protected]'.format(
'test-' if settings.DEV_MODE else '',
conference.endpoint,
)
self.app.post(
api_url_for('meeting_hook'),
{
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
'attachment-count': '1',
'X-Mailgun-Sscore': 0,
'from': '{0} <{1}>'.format(user.fullname, user.username),
'recipient': recipient,
'subject': title,
'stripped-text': body,
},
upload_files=[
('attachment-1', 'attachment-1', content),
],
)
assert AbstractNode.objects.filter(title=title, creator=user).count() == 2
assert mock_upload.called
assert mock_send_mail.called
| [((799, 825), 'furl.furl', 'furl.furl', (['settings.DOMAIN'], {}), '(settings.DOMAIN)\n', (808, 825), False, 'import furl\n'), ((843, 857), 'furl.furl', 'furl.furl', (['url'], {}), '(url)\n', (852, 857), False, 'import furl\n'), ((971, 987), 'furl.furl', 'furl.furl', (['first'], {}), '(first)\n', (980, 987), False, 'import furl\n'), ((1037, 1054), 'furl.furl', 'furl.furl', (['second'], {}), '(second)\n', (1046, 1054), False, 'import furl\n'), ((6839, 6902), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.waterbutler_api_url_for"""'], {}), "('website.conferences.utils.waterbutler_api_url_for')\n", (6849, 6902), False, 'import mock\n'), ((6908, 6960), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.requests.put"""'], {}), "('website.conferences.utils.requests.put')\n", (6918, 6960), False, 'import mock\n'), ((7680, 7743), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.waterbutler_api_url_for"""'], {}), "('website.conferences.utils.waterbutler_api_url_for')\n", (7690, 7743), False, 'import mock\n'), ((7749, 7801), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.requests.put"""'], {}), "('website.conferences.utils.requests.put')\n", (7759, 7801), False, 'import mock\n'), ((8508, 8566), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.upload_attachments"""'], {}), "('website.conferences.utils.upload_attachments')\n", (8518, 8566), False, 'import mock\n'), ((20837, 20886), 'mock.patch', 'mock.patch', (['"""website.conferences.views.send_mail"""'], {}), "('website.conferences.views.send_mail')\n", (20847, 20886), False, 'import mock\n'), ((20892, 20950), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.upload_attachments"""'], {}), "('website.conferences.utils.upload_attachments')\n", (20902, 20950), False, 'import mock\n'), ((22867, 22916), 'mock.patch', 'mock.patch', (['"""website.conferences.views.send_mail"""'], {}), "('website.conferences.views.send_mail')\n", (22877, 22916), False, 'import mock\n'), ((24413, 24462), 'mock.patch', 'mock.patch', (['"""website.conferences.views.send_mail"""'], {}), "('website.conferences.views.send_mail')\n", (24423, 24462), False, 'import mock\n'), ((24468, 24526), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.upload_attachments"""'], {}), "('website.conferences.utils.upload_attachments')\n", (24478, 24526), False, 'import mock\n'), ((26411, 26460), 'mock.patch', 'mock.patch', (['"""website.conferences.views.send_mail"""'], {}), "('website.conferences.views.send_mail')\n", (26421, 26460), False, 'import mock\n'), ((26466, 26524), 'mock.patch', 'mock.patch', (['"""website.conferences.utils.upload_attachments"""'], {}), "('website.conferences.utils.upload_attachments')\n", (26476, 26524), False, 'import mock\n'), ((1235, 1265), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {'is_public': '(True)'}), '(is_public=True)\n', (1249, 1265), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((1500, 1530), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {'is_public': '(True)'}), '(is_public=True)\n', (1514, 1530), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((1906, 1919), 'osf_tests.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (1917, 1919), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((1947, 2009), 'framework.auth.get_or_create_user', 'get_or_create_user', (['user.fullname', 'user.username'], {'is_spam': '(True)'}), '(user.fullname, user.username, is_spam=True)\n', (1965, 2009), False, 'from framework.auth import get_or_create_user\n'), ((2288, 2341), 'framework.auth.get_or_create_user', 'get_or_create_user', (['fullname', 'username'], {'is_spam': '(False)'}), '(fullname, username, is_spam=False)\n', (2306, 2341), False, 'from framework.auth import get_or_create_user\n'), ((2737, 2789), 'framework.auth.get_or_create_user', 'get_or_create_user', (['fullname', 'username'], {'is_spam': '(True)'}), '(fullname, username, is_spam=True)\n', (2755, 2789), False, 'from framework.auth import get_or_create_user\n'), ((4546, 4562), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {}), '()\n', (4560, 4562), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((4627, 4646), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (4644, 4646), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((4752, 4774), 'StringIO.StringIO', 'StringIO', (['self.content'], {}), '(self.content)\n', (4760, 4774), False, 'from StringIO import StringIO\n'), ((7216, 7278), 'website.conferences.utils.upload_attachment', 'utils.upload_attachment', (['self.user', 'self.node', 'self.attachment'], {}), '(self.user, self.node, self.attachment)\n', (7239, 7278), False, 'from website.conferences import utils, message\n'), ((8026, 8088), 'website.conferences.utils.upload_attachment', 'utils.upload_attachment', (['self.user', 'self.node', 'self.attachment'], {}), '(self.user, self.node, self.attachment)\n', (8049, 8088), False, 'from website.conferences import utils, message\n'), ((8653, 8672), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (8670, 8672), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((8894, 8946), 'osf.models.OSFUser.objects.get', 'OSFUser.objects.get', ([], {'username': '"""[email protected]"""'}), "(username='[email protected]')\n", (8913, 8946), False, 'from osf.models import OSFUser, AbstractNode\n'), ((13602, 13654), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {'endpoint': '"""chocolate"""', 'active': '(True)'}), "(endpoint='chocolate', active=True)\n", (13619, 13654), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((15225, 15242), 'StringIO.StringIO', 'StringIO', (['content'], {}), '(content)\n', (15233, 15242), False, 'from StringIO import StringIO\n'), ((16006, 16025), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (16023, 16025), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((16048, 16067), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (16065, 16067), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((16293, 16330), 'website.util.api_url_for', 'api_url_for', (['"""conference_submissions"""'], {}), "('conference_submissions')\n", (16304, 16330), False, 'from website.util import api_url_for, web_url_for\n'), ((16482, 16501), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (16499, 16501), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((16516, 16585), 'website.util.web_url_for', 'web_url_for', (['"""conference_results__plain"""'], {'meeting': 'conference.endpoint'}), "('conference_results__plain', meeting=conference.endpoint)\n", (16527, 16585), False, 'from website.util import api_url_for, web_url_for\n'), ((16719, 16738), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (16736, 16738), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((16956, 16972), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {}), '()\n', (16970, 16972), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((16988, 17047), 'website.util.api_url_for', 'api_url_for', (['"""conference_data"""'], {'meeting': 'conference.endpoint'}), "('conference_data', meeting=conference.endpoint)\n", (16999, 17047), False, 'from website.util import api_url_for, web_url_for\n'), ((17336, 17355), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (17353, 17355), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((17677, 17693), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {}), '()\n', (17691, 17693), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((17709, 17768), 'website.util.api_url_for', 'api_url_for', (['"""conference_data"""'], {'meeting': 'conference.endpoint'}), "('conference_data', meeting=conference.endpoint)\n", (17720, 17768), False, 'from website.util import api_url_for, web_url_for\n'), ((17993, 18012), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (18010, 18012), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((18230, 18246), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {}), '()\n', (18244, 18246), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((18529, 18548), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (18546, 18548), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((18766, 18782), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {}), '()\n', (18780, 18782), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((18798, 18857), 'website.util.api_url_for', 'api_url_for', (['"""conference_data"""'], {'meeting': 'conference.endpoint'}), "('conference_data', meeting=conference.endpoint)\n", (18809, 18857), False, 'from website.util import api_url_for, web_url_for\n'), ((19050, 19069), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (19067, 19069), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((19085, 19147), 'website.util.web_url_for', 'web_url_for', (['"""conference_results"""'], {'meeting': 'conference.endpoint'}), "('conference_results', meeting=conference.endpoint)\n", (19096, 19147), False, 'from website.util import api_url_for, web_url_for\n'), ((19302, 19341), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {'endpoint': '"""StudySwap"""'}), "(endpoint='StudySwap')\n", (19319, 19341), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((19356, 19410), 'website.util.web_url_for', 'web_url_for', (['"""conference_results"""'], {'meeting': '"""studyswap"""'}), "('conference_results', meeting='studyswap')\n", (19367, 19410), False, 'from website.util import api_url_for, web_url_for\n'), ((19894, 19957), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {'endpoint': '"""cookie"""', 'name': '"""Cookies Conference"""'}), "(endpoint='cookie', name='Cookies Conference')\n", (19911, 19957), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((20184, 20253), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {'endpoint': '"""Hamburgers"""', 'name': '"""Hamburger conference"""'}), "(endpoint='Hamburgers', name='Hamburger conference')\n", (20201, 20253), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((20403, 20433), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {'is_public': '(True)'}), '(is_public=True)\n', (20417, 20433), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((20597, 20628), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {'is_public': '(False)'}), '(is_public=False)\n', (20611, 20628), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((21133, 21152), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (21150, 21152), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((22215, 22256), 'osf.models.OSFUser.objects.filter', 'OSFUser.objects.filter', ([], {'username': 'username'}), '(username=username)\n', (22237, 22256), False, 'from osf.models import OSFUser, AbstractNode\n'), ((22312, 22352), 'osf.models.AbstractNode.objects.filter', 'AbstractNode.objects.filter', ([], {'title': 'title'}), '(title=title)\n', (22339, 22352), False, 'from osf.models import OSFUser, AbstractNode\n'), ((22995, 23026), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {'active': '(False)'}), '(active=False)\n', (23012, 23026), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((24707, 24726), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (24724, 24726), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((25759, 25800), 'osf.models.OSFUser.objects.filter', 'OSFUser.objects.filter', ([], {'username': 'username'}), '(username=username)\n', (25781, 25800), False, 'from osf.models import OSFUser, AbstractNode\n'), ((25856, 25896), 'osf.models.AbstractNode.objects.filter', 'AbstractNode.objects.filter', ([], {'title': 'title'}), '(title=title)\n', (25883, 25896), False, 'from osf.models import OSFUser, AbstractNode\n'), ((26650, 26669), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {}), '()\n', (26667, 26669), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((26685, 26698), 'osf_tests.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (26696, 26698), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((26740, 26781), 'osf_tests.factories.ProjectFactory', 'ProjectFactory', ([], {'creator': 'user', 'title': 'title'}), '(creator=user, title=title)\n', (26754, 26781), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((3244, 3296), 'framework.auth.get_or_create_user', 'get_or_create_user', (['fullname', 'username'], {'is_spam': '(True)'}), '(fullname, username, is_spam=True)\n', (3262, 3296), False, 'from framework.auth import get_or_create_user\n'), ((5418, 5445), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (5443, 5445), False, 'from website.conferences import utils, message\n'), ((5458, 5522), 'website.conferences.utils.provision_node', 'utils.provision_node', (['self.conference', 'msg', 'self.node', 'self.user'], {}), '(self.conference, msg, self.node, self.user)\n', (5478, 5522), False, 'from website.conferences import utils, message\n'), ((6042, 6069), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (6067, 6069), False, 'from website.conferences import utils, message\n'), ((6082, 6146), 'website.conferences.utils.provision_node', 'utils.provision_node', (['self.conference', 'msg', 'self.node', 'self.user'], {}), '(self.conference, msg, self.node, self.user)\n', (6102, 6146), False, 'from website.conferences import utils, message\n'), ((6511, 6538), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (6536, 6538), False, 'from website.conferences import utils, message\n'), ((6551, 6615), 'website.conferences.utils.provision_node', 'utils.provision_node', (['self.conference', 'msg', 'self.node', 'self.user'], {}), '(self.conference, msg, self.node, self.user)\n', (6571, 6615), False, 'from website.conferences import utils, message\n'), ((8795, 8822), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (8820, 8822), False, 'from website.conferences import utils, message\n'), ((8835, 8877), 'website.conferences.views.add_poster_by_email', 'views.add_poster_by_email', (['conference', 'msg'], {}), '(conference, msg)\n', (8860, 8877), False, 'from website.conferences import views\n'), ((9274, 9301), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (9299, 9301), False, 'from website.conferences import utils, message\n'), ((9507, 9534), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (9532, 9534), False, 'from website.conferences import utils, message\n'), ((9857, 9884), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (9882, 9884), False, 'from website.conferences import utils, message\n'), ((10310, 10337), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (10335, 10337), False, 'from website.conferences import utils, message\n'), ((10589, 10616), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (10614, 10616), False, 'from website.conferences import utils, message\n'), ((10878, 10905), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (10903, 10905), False, 'from website.conferences import utils, message\n'), ((11151, 11178), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (11176, 11178), False, 'from website.conferences import utils, message\n'), ((11393, 11420), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (11418, 11420), False, 'from website.conferences import utils, message\n'), ((11694, 11721), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (11719, 11721), False, 'from website.conferences import utils, message\n'), ((11991, 12018), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (12016, 12018), False, 'from website.conferences import utils, message\n'), ((13067, 13094), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (13092, 13094), False, 'from website.conferences import utils, message\n'), ((13433, 13460), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (13458, 13460), False, 'from website.conferences import utils, message\n'), ((13986, 14013), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (14011, 14013), False, 'from website.conferences import utils, message\n'), ((14418, 14445), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (14443, 14445), False, 'from website.conferences import utils, message\n'), ((14824, 14851), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (14849, 14851), False, 'from website.conferences import utils, message\n'), ((15061, 15088), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (15086, 15088), False, 'from website.conferences import utils, message\n'), ((15477, 15504), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (15502, 15504), False, 'from website.conferences import utils, message\n'), ((21401, 21428), 'website.util.api_url_for', 'api_url_for', (['"""meeting_hook"""'], {}), "('meeting_hook')\n", (21412, 21428), False, 'from website.util import api_url_for, web_url_for\n'), ((23347, 23374), 'website.util.api_url_for', 'api_url_for', (['"""meeting_hook"""'], {}), "('meeting_hook')\n", (23358, 23374), False, 'from website.util import api_url_for, web_url_for\n'), ((24349, 24395), 'website.util.web_url_for', 'web_url_for', (['"""conference_view"""'], {'_absolute': '(True)'}), "('conference_view', _absolute=True)\n", (24360, 24395), False, 'from website.util import api_url_for, web_url_for\n'), ((24975, 25002), 'website.util.api_url_for', 'api_url_for', (['"""meeting_hook"""'], {}), "('meeting_hook')\n", (24986, 25002), False, 'from website.util import api_url_for, web_url_for\n'), ((27041, 27068), 'website.util.api_url_for', 'api_url_for', (['"""meeting_hook"""'], {}), "('meeting_hook')\n", (27052, 27068), False, 'from website.util import api_url_for, web_url_for\n'), ((12447, 12474), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (12472, 12474), False, 'from website.conferences import utils, message\n'), ((12808, 12835), 'website.conferences.message.ConferenceMessage', 'message.ConferenceMessage', ([], {}), '()\n', (12833, 12835), False, 'from website.conferences import utils, message\n'), ((15948, 15974), 'osf.models.AbstractNode.objects.all', 'AbstractNode.objects.all', ([], {}), '()\n', (15972, 15974), False, 'from osf.models import OSFUser, AbstractNode\n'), ((22437, 22483), 'addons.wiki.models.WikiVersion.objects.get_for_node', 'WikiVersion.objects.get_for_node', (['node', '"""home"""'], {}), "(node, 'home')\n", (22469, 22483), False, 'from addons.wiki.models import WikiVersion\n'), ((25981, 26027), 'addons.wiki.models.WikiVersion.objects.get_for_node', 'WikiVersion.objects.get_for_node', (['node', '"""home"""'], {}), "(node, 'home')\n", (26013, 26027), False, 'from addons.wiki.models import WikiVersion\n'), ((19781, 19830), 'osf_tests.factories.ConferenceFactory', 'ConferenceFactory', ([], {'endpoint': '"""spsp2014"""', 'name': 'None'}), "(endpoint='spsp2014', name=None)\n", (19798, 19830), False, 'from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory\n'), ((27825, 27879), 'osf.models.AbstractNode.objects.filter', 'AbstractNode.objects.filter', ([], {'title': 'title', 'creator': 'user'}), '(title=title, creator=user)\n', (27852, 27879), False, 'from osf.models import OSFUser, AbstractNode\n'), ((19664, 19678), 'tests.base.fake.company', 'fake.company', ([], {}), '()\n', (19676, 19678), False, 'from tests.base import OsfTestCase, fake\n')] |
innovationgarage/socket-tentacles | socket_tentacles/__init__.py | 1cfbf7649017493fafacfcbc96cd05f3c4c5d6b6 | import socketserver
import socket
import sys
import threading
import json
import queue
import time
import datetime
import traceback
class TCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
class Listener(threading.Thread):
def run(self):
kwargs = self._kwargs
print("Listener: Started: %s" % kwargs)
Handler = self._kwargs["handler"]
server = self._kwargs["server"]
class Server(socketserver.BaseRequestHandler):
def handle(self):
print("Listener: Connection request received: %s" % kwargs)
Handler(server, self.request)
self.server = TCPServer((kwargs["host"], kwargs["port"]), Server)
self.server.serve_forever()
def stop(self):
self.server.shutdown()
self.server.server_close()
class Connector(threading.Thread):
def __init__(self, *arg, **kw):
self.is_stopping = False
threading.Thread.__init__(self, *arg, **kw)
def run(self):
print("Connector: Started: %s" % self._kwargs)
while not self.is_stopping:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
try:
sock.connect((self._kwargs["host"], self._kwargs["port"]))
print("Connector: Connected: %s" % self._kwargs)
self._kwargs["handler"](self._kwargs["server"], sock)
except Exception as e:
print(e)
traceback.print_exc()
finally:
sock.close()
time.sleep(1)
def stop(self):
self.is_stopping = True
class Handler(object):
encoding = "utf-8"
binary = False
filemode = "r"
def __init__(self, server, conn):
self.server = server
self.conn = conn
self.makefile()
self.handle()
def makefile(self):
args = {"mode": self.filemode + ["", "b"][self.binary]}
if not self.binary:
args["encoding"] = self.encoding
self.file = self.conn.makefile(**args)
def handle(self):
"""self.conn is a socket object, self.file a file wrapper for that
socket"""
def __hash__(self):
return id(self)
class ReceiveHandler(Handler):
filemode = "r"
class SendHandler(Handler):
filemode = "w"
class Server(object):
def __init__(self, handlers):
self.handlers = handlers
self.config = None
self.servers = {}
def configure(self, config):
self.config = config
connections = {self.connection_key(connection): connection for connection in config["connections"]}
to_create = connections.keys() - self.servers.keys()
to_destroy = self.servers.keys() - connections.keys()
for key in to_create:
server = self.start_connection(connections[key])
server.start()
self.servers[key] = server
for key in to_destroy:
server = self.servers.pop(key)
server.stop()
def connection_key(self, connection):
return json.dumps(connection, sort_keys=True, separators=(',', ':'))
def start_connection(self, connection):
handler = self.handlers[connection["handler"]]
addr = connection["address"].split(":")
assert addr[0] == "tcp"
host = "0.0.0.0"
port = 1024
if len(addr) == 2:
port = addr[1]
if len(addr) == 3:
host, port = addr[1:]
port = int(port)
connhandler = {"listen": Listener, "connect": Connector}[connection["type"]]
return connhandler(kwargs={"server": self, "host": host, "port": port, "handler": handler})
def run(config, handlers):
server = Server(handlers)
server.configure(config)
return server
| [((1097, 1140), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self', '*arg'], {}), '(self, *arg, **kw)\n', (1122, 1140), False, 'import threading\n'), ((3331, 3392), 'json.dumps', 'json.dumps', (['connection'], {'sort_keys': '(True)', 'separators': "(',', ':')"}), "(connection, sort_keys=True, separators=(',', ':'))\n", (3341, 3392), False, 'import json\n'), ((1270, 1319), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (1283, 1319), False, 'import socket\n'), ((1752, 1765), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1762, 1765), False, 'import time\n'), ((1668, 1689), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1687, 1689), False, 'import traceback\n')] |
shooking/ZoomPedalFun | G5/DerivedData/ParameterProbing/checkme.py | 7b9f5f4441cfe42e988e06cf6b98603c21ac2466 | # -*- coding: ascii -*-
import sys
import json
def check(data):
OnOffstart = data.find(b"OnOff")
if OnOffstart != -1:
fxName=""
OnOffblockSize = 0x30
for j in range(12):
if data[OnOffstart + j + OnOffblockSize] == 0x00:
break
fxName = fxName + chr(data[OnOffstart + j + OnOffblockSize])
tD = {
"fxname" :fxName
}
mmax = []
mdefault = []
name = []
mpedal = []
numParameters = 0
#print("OnOffStart at {}".format(OnOffstart))
try:
# this is WAY too large, let except break the loop
for j in range(0, 2000):
"""
if not ( data[OnOffstart + (j+1) * OnOffblockSize - 1] == 0x00
and data[OnOffstart + (j+1) * OnOffblockSize - 2] == 0x00):
# ZD2 format has a length and PRME offset. ZDL has none of this.
print("End of the parameters")
break;
if not ( data[OnOffstart + (j) * OnOffblockSize + 0x18 ] == 0x00
and data[OnOffstart + (j) * OnOffblockSize + 0x19] == 0x00
and data[OnOffstart + (j) * OnOffblockSize + 0x1A] == 0x00
and data[OnOffstart + (j) * OnOffblockSize + 0x1B] == 0x00 ):
print("Empty next slot")
break
"""
currName = ""
for i in range(12):
if data[OnOffstart + j * OnOffblockSize + i] == 0x00:
break
currName = currName + chr(data[OnOffstart + j * OnOffblockSize + i])
if data[OnOffstart + j * OnOffblockSize + i] & 0x80:
raise Exception("Non binary char")
if currName == "":
break
name.append(currName)
mmax.append( data[OnOffstart + j * OnOffblockSize + 12] +
data[OnOffstart + j * OnOffblockSize + 13] * 256)
mdefault.append(data[OnOffstart + j * OnOffblockSize + 16] +
data[OnOffstart + j * OnOffblockSize + 17] * 256);
if data[OnOffstart + j * OnOffblockSize + 0x2C]:
mpedal.append(True)
else:
mpedal.append(False)
#print(mmax[j])
#print(mdefault[j])
"""
print("[{}] {} {} {} {}".format(
OnOffstart + (j+1) * OnOffblockSize,
hex(data[OnOffstart + (j+1) * OnOffblockSize]),
hex(data[OnOffstart + (j+1) * OnOffblockSize + 1]),
hex(data[OnOffstart + (j+1) * OnOffblockSize + 2]),
hex(data[OnOffstart + (j+1) * OnOffblockSize + 3])) )
"""
#print("increment params")
numParameters = numParameters + 1
except:
pass
#print("Found {} parameters.".format(numParameters))
tD['Parameters'] = []
# 0 is the OnOff state
# 1 is the name
# so actual paramters start from index 2, but clearly there are 2 less
for i in range(numParameters - 2):
#print(i)
tD['Parameters'].append({'name': name[i+2], 'mmax': mmax[i + 2], 'mdefault': mdefault[i + 2], 'pedal': mpedal[i+2]})
#json.dump(tD, sys.stdout, indent=4)
f = open(fxName+'.json', "w")
json.dump(tD, f, indent=4)
f.close()
return fxName+'.OnOff'
# handles a zoom firmware
if __name__ == "__main__":
if len(sys.argv) == 2:
f = open(sys.argv[1], "rb")
data = f.read()
f.close()
check(data)
| [((3647, 3673), 'json.dump', 'json.dump', (['tD', 'f'], {'indent': '(4)'}), '(tD, f, indent=4)\n', (3656, 3673), False, 'import json\n')] |
pilihaotian/pythonlearning | leehao/learn63.py | e84b7766cc9ea8131e9720fb1f06761c9581d0da | # 随机6位密码 a-zA-Z0-9下划线
import random
source = ''
lower_char = [chr(x) for x in range(ord('a'), ord('z') + 1)]
upper_char = [chr(x) for x in range(ord('A'), ord('Z') + 1)]
number_char = [chr(x) for x in range(ord('0'), ord('9') + 1)]
source += "".join(lower_char)
source += "".join(upper_char)
source += "".join(number_char)
source += "_"
print(source)
# 随机取出20位字符串,包括下划线
while True:
s = "".join(random.sample(source, 20))
if '_' in s:
print(s)
break
| [((400, 425), 'random.sample', 'random.sample', (['source', '(20)'], {}), '(source, 20)\n', (413, 425), False, 'import random\n')] |
fga-eps-mds/2017.2-SiGI-Op_API | gbic/tests.py | 4532019c15414fd17e06bb3aa78501886e00da1d | from django.test import TestCase
from rest_framework.test import APIRequestFactory
from .models import GBIC, GBICType
from .views import GBICListViewSet
# Create your tests here.
class GBICTest(TestCase):
def test_gbic_view_set(self):
request = APIRequestFactory().get("")
gbic_detail = GBICListViewSet.as_view(actions={'get': 'retrieve'})
gbic_type_test = GBICType.objects.create(description='muito_bom')
gbic_test = GBIC.objects.create(
serial='showdaxuxa',
patrimony_number='666',
gbic_type=gbic_type_test
)
response = gbic_detail(request, pk=gbic_test.pk)
self.assertEqual(response.status_code, 200)
def test_deleted_gbic_view_set(self):
request = APIRequestFactory().get("")
gbic_detail = GBICListViewSet.as_view(actions={'get': 'retrieve'})
gbic_type_test = GBICType.objects.create(description='muitoruim')
gbic_test = GBIC.objects.create(
serial='showdomilhao',
patrimony_number='777',
gbic_type=gbic_type_test
)
gbic_test.delete()
response = gbic_detail(request, pk=gbic_test.pk)
self.assertEqual(response.status_code, 404)
| [((260, 279), 'rest_framework.test.APIRequestFactory', 'APIRequestFactory', ([], {}), '()\n', (277, 279), False, 'from rest_framework.test import APIRequestFactory\n'), ((765, 784), 'rest_framework.test.APIRequestFactory', 'APIRequestFactory', ([], {}), '()\n', (782, 784), False, 'from rest_framework.test import APIRequestFactory\n')] |
alienkrieg/fruits | fruits/core/fruit.py | b3b4b6afd7f97d2d4060909689f9811dc97981ed | import inspect
from typing import List, Union, Set, Any
import numpy as np
from fruits.cache import Cache, CoquantileCache
from fruits.scope import force_input_shape, FitTransform
from fruits.core.callback import AbstractCallback
from fruits.signature.iss import SignatureCalculator, CachePlan
from fruits.words.word import Word
from fruits.sieving.abstract import FeatureSieve
from fruits.preparation.abstract import DataPreparateur
class Fruit:
"""Feature Extractor using iterated sums.
A Fruit consists of a number of
:class:`~fruits.core.fruit.FruitBranch` objects.
At the end of the pipeline, each branch returns their own features
and they will be concatenated by this class.
A simple example (using two branches):
.. code-block:: python
fruit = fruits.Fruit("My Fruit")
# optional: add preparateurs for preprocessing
fruit.add(fruits.preparation.INC)
# add words for iterated sums calculation
fruit.add(fruits.words.creation.simplewords_by_weight(4))
# choose sieves
fruit.add(fruits.sieving.PPV(0.5))
fruit.add(fruits.sieving.END)
# add a new branch without INC
fruit.fork()
fruit.add(fruits.words.creation.simplewords_by_weight(4))
fruit.add(fruits.sieving.PPV(0.5))
fruit.add(fruits.sieving.END)
# configure the fruit
fruit.configure(mode="extended")
# fit the fruit on a time series dataset
fruit.fit(X_train)
# transform the dataset
X_train_transformed = fruit.transform(X_train)
X_test_tranformed = fruit.transform(X_test)
# use the transformed results (features) in a classifier
...
The ``fruit`` above will result in ``2*8*2=32`` features per time
series.
"""
def __init__(self, name: str = ""):
self.name: str = name
# list of FruitBranches
self._branches: List[FruitBranch] = []
# pointer for the current branch index
self._cbi: int = 0
self._fitted: bool = False
@property
def name(self) -> str:
"""Simple identifier for the Fruit object."""
return self._name
@name.setter
def name(self, name: str):
self._name = name
def fork(self, branch: "FruitBranch" = None):
"""Adds a new branch to the pipeline. If none is given, an
empty FruitBranch will be created and switched to.
:type branch: FruitBranch, optional
"""
if branch is None:
branch = FruitBranch()
self._branches.append(branch)
self._cbi = len(self._branches) - 1
self._fitted = False
def branch(self, index: int = None):
"""Returns the currently selected branch or the branch with the
given index.
:rtype: FruitBranch
"""
if index is None:
return self._branches[self._cbi]
return self._branches[index]
def branches(self) -> list:
"""Returns all branches of this Fruit object.
:rtype: list
"""
return self._branches
def switch_branch(self, index: int):
"""Switches to the branch with the given index.
:param index: Integer in ``[0, 1, ..., len(self.branches())-1]``
:type index: int
"""
if not (0 <= index < len(self._branches)):
raise IndexError("Index has to be in [0, len(self.branches()))")
self._cbi = index
def add(self, *objects: Union[FitTransform, Word, type]):
"""Adds one or multiple object(s) to the currently selected
branch.
:param objects: One or more objects of the following types:
- :class:`~fruits.preparation.abstract.DataPreparateur`
- :class:`~fruits.words.word.Word`
- :class:`~fruits.sieving.abstract.FeatureSieve`
:type objects: Union[FitTransform, Word]
"""
if len(self._branches) == 0:
self.fork()
self._branches[self._cbi].add(*objects)
self._fitted = False
def nfeatures(self) -> int:
"""Returns the total number of features of all branches
combined.
:rtype: int
"""
return sum([branch.nfeatures() for branch in self._branches])
def configure(self, **kwargs: Any):
"""Makes changes to the default configuration of a all branches
if arguments differ from ``None``.
:param kwargs: For possible options, have a look at
:meth:`fruits.core.fruit.FruitBranch.configure`.
:type kwargs: Any
"""
for branch in self._branches:
branch.configure(**kwargs)
def fit(self, X: np.ndarray):
"""Fits all branches to the given data.
:param X: (Multidimensional) time series dataset as an array
of three dimensions. Have a look at
:meth:`~fruits.scope.force_input_shape`.
:type X: np.ndarray
"""
for branch in self._branches:
branch.fit(X)
self._fitted = True
def transform(self, X: np.ndarray,
callbacks: List[AbstractCallback] = []) -> np.ndarray:
"""Returns a two dimensional array of all features from all
branches this Fruit object contains.
:param X: (Multidimensional) time series dataset as an array
of three dimensions. Have a look at
:meth:`~fruits.scope.force_input_shape`.
:type X: np.ndarray
:param callbacks: List of callbacks. To write your own callback,
override the class
:class:`~fruits.core.callback.AbstractCallback`.,
defaults to None
:type callbacks: List[AbstractCallback], optional
:rtype: np.ndarray
:raises: RuntimeError if Fruit.fit wasn't called
"""
if not self._fitted:
raise RuntimeError("Missing call of self.fit")
result = np.zeros((X.shape[0], self.nfeatures()))
index = 0
for branch in self._branches:
for callback in callbacks:
callback.on_next_branch()
k = branch.nfeatures()
result[:, index:index+k] = branch.transform(X, callbacks)
index += k
result = np.nan_to_num(result, copy=False, nan=0.0)
return result
def fit_transform(self, X: np.ndarray) -> np.ndarray:
"""Fits all branches to the given dataset and returns the
transformed results of X from all branches.
:param X: (Multidimensional) time series dataset
:type X: np.ndarray
:returns: Two dimensional feature array
:rtype: np.ndarray
"""
self.fit(X)
return self.transform(X)
def summary(self) -> str:
"""Returns a summary of this object. The summary contains a
summary for each FruitBranch in this Fruit object.
:rtype: str
"""
summary = "{:=^80}".format(f"Summary of fruits.Fruit: '{self.name}'")
summary += f"\nBranches: {len(self.branches())}"
summary += f"\nFeatures: {self.nfeatures()}"
for branch in self.branches():
summary += "\n\n" + branch.summary()
summary += "\n{:=^80}".format(f"End of Summary")
return summary
def copy(self) -> "Fruit":
"""Creates a shallow copy of this Fruit object.
This also creates shallow copies of all branches in this object.
:rtype: Fruit
"""
copy_ = Fruit(self.name+" (Copy)")
for branch in self._branches:
copy_.fork(branch.copy())
return copy_
def deepcopy(self) -> "Fruit":
"""Creates a deep copy of this Fruit object.
This also creates deep copies of all branches in this object.
:rtype: Fruit
"""
copy_ = Fruit(self.name+" (Copy)")
for branch in self._branches:
copy_.fork(branch.deepcopy())
return copy_
class FruitBranch:
"""One branch of a Fruit object.
A FruitBranch object extracts values from time series data that are
somehow representative of the input data.
The user can customize any of the following three steps.
- Preparing data:
Apply functions at the start of the extraction procedure.
There are many so called
:class:`~fruits.preparation.abstract.DataPreparateur`
objects in fruits available for preprocessing. The
preparateurs will be applied sequentially to the input data.
- Calculating Iterated Sums:
The preprocessed data is now used to calculate the iterated
sums signature for different
:class:`~fruits.words.word.Word` objects the user can
specify.
- Extracting Features:
Each :class:`~fruits.sieving.abstract.FeatureSieve` added to
the branch will be fitted on the iterated sums from the
previous step. The branch then returns an array of numbers
(the transformed results from those sieves), i.e. the
features for each time series.
"""
def __init__(self):
# lists of used classes for data processing
self._preparateurs: list = []
self._words: list = []
self._sieves: list = []
# calculator options used in the ISS calculation
self._calculator_options: dict = {"batch_size": 1, "mode": "single"}
# list with inner lists containing sieves
# all sieves in one list are trained on one specific output
# of an ISS-result
self._sieves_extended: list = []
# configurations for fitting
self._fitted: bool = False
self._fit_sample_size: Union[float, int] = 1
# cache that is calculated at fitting and also used in the
# transformation process
self._cache: Cache
def configure(self,
mode: str = None,
batch_size: int = None,
fit_sample_size: Union[float, int] = None):
"""Makes changes to the default configuration of a fruit branch
if arguments differ from ``None``.
:param mode: See
:meth:`fruits.signature.iss.SignatureCalculator.transform`,
defaults to None
:type mode: str, optional
:param batch_size: See
:meth:`~ruits.signature.iss.SignatureCalculator.transform`,
defaults to None
:type batch_size: int, optional
:param fit_sample_size: Size of the random time series sample
that is used for fitting. This is represented as a float
which will be multiplied by ``X.shape[0]`` or ``1`` for one
random time series., defaults to 1
:type fit_sample_size: Union[float, int]
"""
if mode is not None:
self._calculator_options["mode"] = mode
if batch_size is not None:
self._calculator_options["batch_size"] = batch_size
if fit_sample_size is not None:
self._fit_sample_size = fit_sample_size
def add_preparateur(self, preparateur: DataPreparateur):
"""Adds a preparateur to the branch.
:type preparateur: DataPreparateur
"""
if not isinstance(preparateur, DataPreparateur):
raise TypeError
self._preparateurs.append(preparateur)
self._fitted = False
def get_preparateurs(self) -> List[DataPreparateur]:
"""Returns a list of all preparateurs added to the
branch.
:rtype: List[DataPreparateur]
"""
return self._preparateurs
def clear_preparateurs(self):
"""Removes all preparateurs that were added to this branch."""
self._preparateurs = []
self._fitted = False
def add_word(self, word: Word):
"""Adds a word to the branch.
:type word: Word
"""
if not isinstance(word, Word):
raise TypeError
self._words.append(word)
self._fitted = False
def get_words(self) -> List[Word]:
"""Returns a list of all words in the branch.
:rtype: List[Word]
"""
return self._words
def clear_words(self):
"""Removes all words that were added to this branch."""
self._words = []
self._sieves_extended = []
self._fitted = False
def add_sieve(self, sieve: FeatureSieve):
"""Appends a new feature sieve to the FruitBranch.
:type sieve: FeatureSieve
"""
if not isinstance(sieve, FeatureSieve):
raise TypeError
self._sieves.append(sieve)
self._fitted = False
def get_sieves(self) -> List[FeatureSieve]:
"""Returns a list of all feature sieves added to the branch.
:rtype: List[FeatureSieve]
"""
return self._sieves
def clear_sieves(self):
"""Removes all feature sieves that were added to this branch."""
self._sieves = []
self._sieve_prerequisites = None
self._sieves_extended = []
self._fitted = False
def add(self, *objects: Union[FitTransform, Word, type]):
"""Adds one or multiple object(s) to the branch.
:type objects: One or more objects of the following types:
- :class:`~fruits.preparation.abstract.DataPreparateur`
- :class:`~fruits.words.word.Word`
- :class:`~fruits.sieving.abstract.FeatureSieve`
"""
objects_flattened = np.array(objects, dtype=object).flatten()
for obj in objects_flattened:
if inspect.isclass(obj):
obj = obj()
if isinstance(obj, DataPreparateur):
self.add_preparateur(obj)
elif isinstance(obj, Word):
self.add_word(obj)
elif isinstance(obj, FeatureSieve):
self.add_sieve(obj)
else:
raise TypeError("Cannot add variable of type"+str(type(obj)))
def clear(self):
"""Clears all settings, configurations and calculated results
the branch has.
After the branch is cleared, it has the same settings as a newly
created FruitBranch object.
"""
self.clear_preparateurs()
self.clear_words()
self.clear_sieves()
self._calculator_options = {"batch_size": 1, "mode": "single"}
def nfeatures(self) -> int:
"""Returns the total number of features the current
configuration produces.
:rtype: int
"""
if self._calculator_options["mode"] == "extended":
return (
sum([s.nfeatures() for s in self._sieves])
* CachePlan(self._words).n_iterated_sums(
list(range(len(self._words)))
)
)
else:
return (
sum([s.nfeatures() for s in self._sieves])
* len(self._words)
)
def _compile(self):
# checks if the FruitBranch is configured correctly and ready
# for fitting
if not self._words:
raise RuntimeError("No words specified for ISS calculation")
if not self._sieves:
raise RuntimeError("No FeatureSieve objects specified")
def _collect_cache_keys(self) -> Set[str]:
# collects cache keys of all FitTransformers in the branch
keys: Set[str] = set()
for prep in self._preparateurs:
prep_keys = prep._get_cache_keys()
if 'coquantile' in prep_keys:
keys = keys.union(prep_keys['coquantile'])
for sieve in self._sieves:
sieve_keys = sieve._get_cache_keys()
if 'coquantile' in sieve_keys:
keys = keys.union(sieve_keys['coquantile'])
return keys
def _get_cache(self, X: np.ndarray):
# returns the already processed cache needed in this branch
self._cache = CoquantileCache()
self._cache.process(X, list(self._collect_cache_keys()))
def _select_fit_sample(self, X: np.ndarray) -> np.ndarray:
# returns a sample of the data used for fitting
if (isinstance(self._fit_sample_size, int)
and self._fit_sample_size == 1):
ind = np.random.randint(0, X.shape[0])
return X[ind:ind+1, :, :]
else:
s = int(self._fit_sample_size * X.shape[0])
if s < 1:
s = 1
indices = np.random.choice(X.shape[0], size=s, replace=False)
return X[indices, :, :]
def fit(self, X: np.ndarray):
"""Fits the branch to the given dataset. What this action
explicitly does depends on the FruitBranch configuration.
:param X: (Multidimensional) time series dataset as an array
of three dimensions. Have a look at
:meth:`~fruits.scope.force_input_shape`.
:type X: np.ndarray
"""
self._compile()
self._get_cache(X)
prepared_data = self._select_fit_sample(X)
for prep in self._preparateurs:
prep.fit(prepared_data)
prepared_data = prep.transform(prepared_data, cache=self._cache)
self._sieves_extended = []
iss_calculations = SignatureCalculator().transform(
prepared_data,
words=self._words,
**self._calculator_options
)[0]
for iterated_data in iss_calculations:
iterated_data = iterated_data.reshape(iterated_data.shape[0]
* iterated_data.shape[1],
iterated_data.shape[2])
sieves_copy = [sieve.copy() for sieve in self._sieves]
for sieve in sieves_copy:
sieve.fit(iterated_data[:, :])
self._sieves_extended.append(sieves_copy)
self._fitted = True
def transform(self, X: np.ndarray,
callbacks: List[AbstractCallback] = []) -> np.ndarray:
"""Transforms the given time series dataset. The results are
the calculated features for the different time series.
:param X: (Multidimensional) time series dataset as an array
of three dimensions. Have a look at
:meth:`~fruits.scope.force_input_shape`.
:type X: np.ndarray
:param callbacks: List of callbacks. To write your own callback,
override the class
:class:`~fruits.core.callback.AbstractCallback`.,
defaults to []
:type callbacks: List[AbstractCallback], optional
:rtype: np.ndarray
:raises: RuntimeError if ``self.fit`` wasn't called
"""
if not self._fitted:
raise RuntimeError("Missing call of self.fit")
self._get_cache(X)
prepared_data = force_input_shape(X)
for prep in self._preparateurs:
prepared_data = prep.transform(prepared_data, cache=self._cache)
for callback in callbacks:
callback.on_preparateur(prepared_data)
for callback in callbacks:
callback.on_preparation_end(prepared_data)
sieved_data = np.zeros((prepared_data.shape[0],
self.nfeatures()))
k = 0
iss_calculations = SignatureCalculator().transform(
prepared_data,
words=self._words,
**self._calculator_options
)[0]
for i, iterated_data in enumerate(iss_calculations):
for callback in callbacks:
callback.on_iterated_sum(iterated_data)
for sieve in self._sieves_extended[i]:
nf = sieve.nfeatures()
new_features = nf * iterated_data.shape[1]
for it in range(iterated_data.shape[1]):
sieved_data[:, k+it*nf:k+(it+1)*nf] = sieve.transform(
iterated_data[:, it, :],
cache=self._cache,
)
for callback in callbacks:
callback.on_sieve(sieved_data[k:k+new_features])
k += new_features
for callback in callbacks:
callback.on_sieving_end(sieved_data)
return sieved_data
def fit_transform(self, X: np.ndarray) -> np.ndarray:
"""This function does the same that calling ``self.fit(X)`` and
``self.transform(X)`` consecutively does.
:param X: (Multidimensional) time series dataset as an array
of three dimensions. Have a look at
`:meth:`~fruits.scope.force_input_shape`.
:type X: np.ndarray
:returns: Array of features.
:rtype: np.ndarray
"""
self.fit(X)
return self.transform(X)
def summary(self) -> str:
"""Returns a summary of this object. The summary contains all
added preparateurs, words and sieves.
:rtype: str
"""
summary = "{:-^80}".format("fruits.FruitBranch")
summary += f"\nNumber of features: {self.nfeatures()}"
summary += f"\n\nPreparateurs ({len(self._preparateurs)}): "
if len(self._preparateurs) == 0:
summary += "-"
else:
summary += "\n\t+ " + \
"\n\t+ ".join([str(x) for x in self._preparateurs])
summary += f"\nIterators ({len(self._words)}): "
if len(self._words) == 0:
summary += "-"
elif len(self._words) > 10:
summary += "\n\t+ " + \
"\n\t+ ".join([str(x) for x in self._words[:9]])
summary += "\n\t..."
else:
summary += "\n\t+ " + \
"\n\t+ ".join([str(x) for x in self._words])
summary += f"\nSieves ({len(self._sieves)}): "
if len(self._sieves) == 0:
summary += "-"
else:
for x in self._sieves:
lines = x.summary().split("\n")
summary += "\n\t+ " + lines[0]
summary += "\n\t "
summary += "\n\t ".join(lines[1:])
return summary
def copy(self) -> "FruitBranch":
"""Returns a shallow copy of this FruitBranch object.
:returns: Copy of the branch with same settings but all
calculations done erased.
:rtype: FruitBranch
"""
copy_ = FruitBranch()
for preparateur in self._preparateurs:
copy_.add(preparateur)
for iterator in self._words:
copy_.add(iterator)
for sieve in self._sieves:
copy_.add(sieve)
return copy_
def deepcopy(self) -> "FruitBranch":
"""Returns a deep copy of this FruitBranch object.
:returns: Deepcopy of the branch with same settings but all
calculations done erased.
:rtype: FruitBranch
"""
copy_ = FruitBranch()
for preparateur in self._preparateurs:
copy_.add(preparateur.copy())
for iterator in self._words:
copy_.add(iterator.copy())
for sieve in self._sieves:
copy_.add(sieve.copy())
copy_._calculator_options = self._calculator_options.copy()
return copy_
| [((6257, 6299), 'numpy.nan_to_num', 'np.nan_to_num', (['result'], {'copy': '(False)', 'nan': '(0.0)'}), '(result, copy=False, nan=0.0)\n', (6270, 6299), True, 'import numpy as np\n'), ((15954, 15971), 'fruits.cache.CoquantileCache', 'CoquantileCache', ([], {}), '()\n', (15969, 15971), False, 'from fruits.cache import Cache, CoquantileCache\n'), ((18848, 18868), 'fruits.scope.force_input_shape', 'force_input_shape', (['X'], {}), '(X)\n', (18865, 18868), False, 'from fruits.scope import force_input_shape, FitTransform\n'), ((13590, 13610), 'inspect.isclass', 'inspect.isclass', (['obj'], {}), '(obj)\n', (13605, 13610), False, 'import inspect\n'), ((16275, 16307), 'numpy.random.randint', 'np.random.randint', (['(0)', 'X.shape[0]'], {}), '(0, X.shape[0])\n', (16292, 16307), True, 'import numpy as np\n'), ((16482, 16533), 'numpy.random.choice', 'np.random.choice', (['X.shape[0]'], {'size': 's', 'replace': '(False)'}), '(X.shape[0], size=s, replace=False)\n', (16498, 16533), True, 'import numpy as np\n'), ((13495, 13526), 'numpy.array', 'np.array', (['objects'], {'dtype': 'object'}), '(objects, dtype=object)\n', (13503, 13526), True, 'import numpy as np\n'), ((17267, 17288), 'fruits.signature.iss.SignatureCalculator', 'SignatureCalculator', ([], {}), '()\n', (17286, 17288), False, 'from fruits.signature.iss import SignatureCalculator, CachePlan\n'), ((19319, 19340), 'fruits.signature.iss.SignatureCalculator', 'SignatureCalculator', ([], {}), '()\n', (19338, 19340), False, 'from fruits.signature.iss import SignatureCalculator, CachePlan\n'), ((14699, 14721), 'fruits.signature.iss.CachePlan', 'CachePlan', (['self._words'], {}), '(self._words)\n', (14708, 14721), False, 'from fruits.signature.iss import SignatureCalculator, CachePlan\n')] |
pa3kDaWae/workoutizer | workoutizer/__main__.py | 15501d0060711bbd8308642bc89b45c1442d4d0f | import os
import argparse
import subprocess
import socket
import sys
import click
from django.core.management import execute_from_command_line
from workoutizer.settings import WORKOUTIZER_DIR, WORKOUTIZER_DB_PATH, TRACKS_DIR
from workoutizer import __version__
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SETUP_DIR = os.path.join(BASE_DIR, 'setup')
os.environ["DJANGO_SETTINGS_MODULE"] = "workoutizer.settings"
example_rpi_cmd = "wkz --setup_rpi vendor_id=091e product_id=4b48"
url_help = 'specify ip address and port pair, like: address:port'
@click.group()
def cli():
pass
@click.command(help='Mandatory command to initialize workoutizer. This fetches the static files, creates the database '
'and applies the required migrations.')
def init():
_build_home()
execute_from_command_line(["manage.py", "collectstatic", "--noinput"])
execute_from_command_line(["manage.py", "migrate"])
execute_from_command_line(["manage.py", "check"])
click.echo(f"database and track files are stored in: {WORKOUTIZER_DIR}")
@click.option('--ip', default="", help=url_help)
@click.option('--product_id', help="product ip of your device", required=True)
@click.option('--vendor_id', help="vendor ip of your device", required=True)
@click.command(help='Configure Raspberry Pi to auto mount devices. Passing vendor and product id is required. Passing '
f'the local ip address and port is optionally. E.g.: {example_rpi_cmd}')
def setup_rpi(ip, vendor_id, product_id):
if not ip:
ip = _get_local_ip_address()
answer = input(f"Are you sure you want to setup your Raspberry Pi?\n\n"
f"This will copy the required udev rule and systemd service file\n"
f"to your system to enable automated mounting of your device.\n"
f"This might take a while...\n\n"
f"Start setup? [Y/n] ")
if answer.lower() == 'y':
click.echo(f"installing ansible...")
_pip_install('ansible==2.9.10')
click.echo(f"starting setup using ansible...")
_setup_rpi(
vendor_id=vendor_id,
product_id=product_id,
ip_port=f"{ip}:8000"
)
_run_ansible(playbook='install_packages.yml')
click.echo(f"Successfully configured to automatically mount your device when plugged in. Note: These changes "
f"require a system restart to take effect.")
else:
click.echo(f"Aborted.")
@click.argument('url', default="")
@click.command(help="Run workoutizer. Passing the local ip address and port is optionally. In case of no ip address "
"being passed, it will be determined automatically. Usage, e.g.: 'wkz run 0.0.0.0:8000'.")
def run(url):
if not url:
url = f"{_get_local_ip_address()}:8000"
execute_from_command_line(["manage.py", "runserver", url])
@click.argument('url', default="")
@click.command(help='Configure workoutizer to run as systemd service. Passing the local ip address and port is '
'optionally. In case of no ip address being passed, it will be determined automatically.')
def wkz_as_service(url):
_pip_install('ansible==2.9.10')
_wkz_as_service(url=url)
@click.argument('cmd', nargs=1)
@click.command(help="Pass commands to django's manage.py. Convenience function to access all django commands which are "
"not yet covered with the given set of workoutizer commands. Usage, e.g.: "
"wkz manage 'runserver 0.0.0.0:8000 --noreload'.")
def manage(cmd):
execute_from_command_line(["manage.py"] + cmd.split(' '))
@click.command(help='Show the version of currently installed workoutizer.')
def version():
click.echo(__version__)
@click.command(help='Check for a newer version and install if there is any.')
def upgrade():
_upgrade()
cli.add_command(upgrade)
cli.add_command(version)
cli.add_command(init)
cli.add_command(setup_rpi)
cli.add_command(run)
cli.add_command(manage)
cli.add_command(wkz_as_service)
def _upgrade():
latest_version = _get_latest_version_of("workoutizer")
from workoutizer import __version__ as current_version
if latest_version:
click.echo(f"found newer version: {latest_version}, you have {current_version} installed")
_pip_install('workoutizer', upgrade=True)
execute_from_command_line(["manage.py", "collectstatic", "--noinput"])
execute_from_command_line(["manage.py", "migrate"])
execute_from_command_line(["manage.py", "check"])
click.echo(f"Successfully upgraded from {current_version} to {latest_version}")
else:
click.echo(f"No update available. You are running the latest version: {current_version}")
def _get_latest_version_of(package: str):
outdated = str(
subprocess.check_output([sys.executable, "-m", "pip", "list", '--outdated', '--disable-pip-version-check']))
if package in outdated:
output = str(subprocess.check_output([sys.executable, "-m", "pip", "search", package]))
latest_version = output[output.find('LATEST'):].split('\\n')[0].split(' ')[-1]
return latest_version
else:
return False
def _setup_rpi(vendor_id: str, product_id: str, ip_port: str = None):
if not ip_port:
ip_port = f"{_get_local_ip_address()}:8000"
result = _run_ansible(
playbook='setup_on_rpi.yml',
variables={
'vendor_id': vendor_id,
'product_id': product_id,
'address_plus_port': ip_port,
}
)
if result == 0:
pass
else:
click.echo(f"ERROR: Could not configure Raspberry Pi, see above errors.")
quit()
return result
def _wkz_as_service(url: str):
click.echo(f"configuring workoutizer to run as system service")
if not url:
url = f"{_get_local_ip_address()}:8000"
env_binaries = sys.executable
wkz_executable = env_binaries[:env_binaries.find('python')] + "wkz"
result = _run_ansible(
playbook='wkz_as_service.yml',
variables={
'address_plus_port': url,
'wkz_executable': wkz_executable,
}
)
if result == 0:
click.echo(f"Successfully configured workoutizer as systemd service. Run it with: systemctl start wkz.service")
else:
click.echo(f"ERROR: Could not configure workoutizer as systemd service, see above errors.")
return result
def _get_local_ip_address():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip_address = s.getsockname()[0]
s.close()
return ip_address
def _build_home():
if os.path.isdir(WORKOUTIZER_DIR):
if os.path.isfile(WORKOUTIZER_DB_PATH):
click.echo(f"Found existing workoutizer database at: {WORKOUTIZER_DB_PATH}\n")
answer = input(f"Workoutizer could try to use the existing database instead of creating a new one.\n"
f"Note that this could lead to faulty behaviour because of mismatching applied\n"
f"migrations on this database.\n\n"
f"Do you want to use the existing database instead of creating a new one? [Y/n] ")
if answer.lower() == 'y':
click.echo(f"keeping existing database at {WORKOUTIZER_DB_PATH}")
return
else:
click.echo(f"removed database at {WORKOUTIZER_DB_PATH}")
os.remove(WORKOUTIZER_DB_PATH)
_make_tracks_dir(TRACKS_DIR)
else:
os.mkdir(WORKOUTIZER_DIR)
_make_tracks_dir(TRACKS_DIR)
def _make_tracks_dir(path):
if not os.path.isdir(path):
os.mkdir(path)
class ParseDict(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
d = {}
if values:
for item in values:
split_items = item.split("=", 1)
key = split_items[0].strip() # we remove blanks around keys, as is logical
value = split_items[1]
d[key] = value
setattr(namespace, self.dest, d)
def _pip_install(package, upgrade: bool = False):
if upgrade:
subprocess.check_call([sys.executable, "-m", "pip", "install", package, '--upgrade'])
else:
subprocess.check_call([sys.executable, "-m", "pip", "install", package])
def _run_ansible(playbook: str, variables: dict = None):
if variables is None:
variables = {}
from ansible import context
from ansible.cli import CLI
from ansible.module_utils.common.collections import ImmutableDict
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.parsing.dataloader import DataLoader
from ansible.inventory.manager import InventoryManager
from ansible.vars.manager import VariableManager
loader = DataLoader()
context.CLIARGS = ImmutableDict(
tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None,
forks=100, remote_user='xxx', private_key_file=None, ssh_common_args=None, ssh_extra_args=None,
sftp_extra_args=None, scp_extra_args=None, become=True, become_method='sudo', become_user='root',
verbosity=True, check=False, start_at_task=None
)
inventory = InventoryManager(loader=loader, sources=())
variable_manager = VariableManager(loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False))
variable_manager._extra_vars = variables
pbex = PlaybookExecutor(playbooks=[os.path.join(SETUP_DIR, 'ansible', playbook)], inventory=inventory,
variable_manager=variable_manager,
loader=loader, passwords={})
return pbex.run()
if __name__ == '__main__':
cli()
| [((330, 361), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""setup"""'], {}), "(BASE_DIR, 'setup')\n", (342, 361), False, 'import os\n'), ((561, 574), 'click.group', 'click.group', ([], {}), '()\n', (572, 574), False, 'import click\n'), ((598, 763), 'click.command', 'click.command', ([], {'help': '"""Mandatory command to initialize workoutizer. This fetches the static files, creates the database and applies the required migrations."""'}), "(help=\n 'Mandatory command to initialize workoutizer. This fetches the static files, creates the database and applies the required migrations.'\n )\n", (611, 763), False, 'import click\n'), ((1072, 1119), 'click.option', 'click.option', (['"""--ip"""'], {'default': '""""""', 'help': 'url_help'}), "('--ip', default='', help=url_help)\n", (1084, 1119), False, 'import click\n'), ((1121, 1198), 'click.option', 'click.option', (['"""--product_id"""'], {'help': '"""product ip of your device"""', 'required': '(True)'}), "('--product_id', help='product ip of your device', required=True)\n", (1133, 1198), False, 'import click\n'), ((1200, 1275), 'click.option', 'click.option', (['"""--vendor_id"""'], {'help': '"""vendor ip of your device"""', 'required': '(True)'}), "('--vendor_id', help='vendor ip of your device', required=True)\n", (1212, 1275), False, 'import click\n'), ((1277, 1475), 'click.command', 'click.command', ([], {'help': 'f"""Configure Raspberry Pi to auto mount devices. Passing vendor and product id is required. Passing the local ip address and port is optionally. E.g.: {example_rpi_cmd}"""'}), "(help=\n f'Configure Raspberry Pi to auto mount devices. Passing vendor and product id is required. Passing the local ip address and port is optionally. E.g.: {example_rpi_cmd}'\n )\n", (1290, 1475), False, 'import click\n'), ((2509, 2542), 'click.argument', 'click.argument', (['"""url"""'], {'default': '""""""'}), "('url', default='')\n", (2523, 2542), False, 'import click\n'), ((2544, 2758), 'click.command', 'click.command', ([], {'help': '"""Run workoutizer. Passing the local ip address and port is optionally. In case of no ip address being passed, it will be determined automatically. Usage, e.g.: \'wkz run 0.0.0.0:8000\'."""'}), '(help=\n "Run workoutizer. Passing the local ip address and port is optionally. In case of no ip address being passed, it will be determined automatically. Usage, e.g.: \'wkz run 0.0.0.0:8000\'."\n )\n', (2557, 2758), False, 'import click\n'), ((2916, 2949), 'click.argument', 'click.argument', (['"""url"""'], {'default': '""""""'}), "('url', default='')\n", (2930, 2949), False, 'import click\n'), ((2951, 3160), 'click.command', 'click.command', ([], {'help': '"""Configure workoutizer to run as systemd service. Passing the local ip address and port is optionally. In case of no ip address being passed, it will be determined automatically."""'}), "(help=\n 'Configure workoutizer to run as systemd service. Passing the local ip address and port is optionally. In case of no ip address being passed, it will be determined automatically.'\n )\n", (2964, 3160), False, 'import click\n'), ((3267, 3297), 'click.argument', 'click.argument', (['"""cmd"""'], {'nargs': '(1)'}), "('cmd', nargs=1)\n", (3281, 3297), False, 'import click\n'), ((3299, 3549), 'click.command', 'click.command', ([], {'help': '"""Pass commands to django\'s manage.py. Convenience function to access all django commands which are not yet covered with the given set of workoutizer commands. Usage, e.g.: wkz manage \'runserver 0.0.0.0:8000 --noreload\'."""'}), '(help=\n "Pass commands to django\'s manage.py. Convenience function to access all django commands which are not yet covered with the given set of workoutizer commands. Usage, e.g.: wkz manage \'runserver 0.0.0.0:8000 --noreload\'."\n )\n', (3312, 3549), False, 'import click\n'), ((3668, 3742), 'click.command', 'click.command', ([], {'help': '"""Show the version of currently installed workoutizer."""'}), "(help='Show the version of currently installed workoutizer.')\n", (3681, 3742), False, 'import click\n'), ((3789, 3865), 'click.command', 'click.command', ([], {'help': '"""Check for a newer version and install if there is any."""'}), "(help='Check for a newer version and install if there is any.')\n", (3802, 3865), False, 'import click\n'), ((291, 316), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (306, 316), False, 'import os\n'), ((811, 881), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (["['manage.py', 'collectstatic', '--noinput']"], {}), "(['manage.py', 'collectstatic', '--noinput'])\n", (836, 881), False, 'from django.core.management import execute_from_command_line\n'), ((886, 937), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (["['manage.py', 'migrate']"], {}), "(['manage.py', 'migrate'])\n", (911, 937), False, 'from django.core.management import execute_from_command_line\n'), ((942, 991), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (["['manage.py', 'check']"], {}), "(['manage.py', 'check'])\n", (967, 991), False, 'from django.core.management import execute_from_command_line\n'), ((996, 1068), 'click.echo', 'click.echo', (['f"""database and track files are stored in: {WORKOUTIZER_DIR}"""'], {}), "(f'database and track files are stored in: {WORKOUTIZER_DIR}')\n", (1006, 1068), False, 'import click\n'), ((2854, 2912), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (["['manage.py', 'runserver', url]"], {}), "(['manage.py', 'runserver', url])\n", (2879, 2912), False, 'from django.core.management import execute_from_command_line\n'), ((3762, 3785), 'click.echo', 'click.echo', (['__version__'], {}), '(__version__)\n', (3772, 3785), False, 'import click\n'), ((5783, 5846), 'click.echo', 'click.echo', (['f"""configuring workoutizer to run as system service"""'], {}), "(f'configuring workoutizer to run as system service')\n", (5793, 5846), False, 'import click\n'), ((6510, 6558), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (6523, 6558), False, 'import socket\n'), ((6690, 6720), 'os.path.isdir', 'os.path.isdir', (['WORKOUTIZER_DIR'], {}), '(WORKOUTIZER_DIR)\n', (6703, 6720), False, 'import os\n'), ((8910, 8922), 'ansible.parsing.dataloader.DataLoader', 'DataLoader', ([], {}), '()\n', (8920, 8922), False, 'from ansible.parsing.dataloader import DataLoader\n'), ((8945, 9332), 'ansible.module_utils.common.collections.ImmutableDict', 'ImmutableDict', ([], {'tags': '{}', 'listtags': '(False)', 'listtasks': '(False)', 'listhosts': '(False)', 'syntax': '(False)', 'connection': '"""ssh"""', 'module_path': 'None', 'forks': '(100)', 'remote_user': '"""xxx"""', 'private_key_file': 'None', 'ssh_common_args': 'None', 'ssh_extra_args': 'None', 'sftp_extra_args': 'None', 'scp_extra_args': 'None', 'become': '(True)', 'become_method': '"""sudo"""', 'become_user': '"""root"""', 'verbosity': '(True)', 'check': '(False)', 'start_at_task': 'None'}), "(tags={}, listtags=False, listtasks=False, listhosts=False,\n syntax=False, connection='ssh', module_path=None, forks=100,\n remote_user='xxx', private_key_file=None, ssh_common_args=None,\n ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=\n True, become_method='sudo', become_user='root', verbosity=True, check=\n False, start_at_task=None)\n", (8958, 9332), False, 'from ansible.module_utils.common.collections import ImmutableDict\n'), ((9365, 9408), 'ansible.inventory.manager.InventoryManager', 'InventoryManager', ([], {'loader': 'loader', 'sources': '()'}), '(loader=loader, sources=())\n', (9381, 9408), False, 'from ansible.inventory.manager import InventoryManager\n'), ((1964, 2000), 'click.echo', 'click.echo', (['f"""installing ansible..."""'], {}), "(f'installing ansible...')\n", (1974, 2000), False, 'import click\n'), ((2049, 2095), 'click.echo', 'click.echo', (['f"""starting setup using ansible..."""'], {}), "(f'starting setup using ansible...')\n", (2059, 2095), False, 'import click\n'), ((2289, 2450), 'click.echo', 'click.echo', (['f"""Successfully configured to automatically mount your device when plugged in. Note: These changes require a system restart to take effect."""'], {}), "(\n f'Successfully configured to automatically mount your device when plugged in. Note: These changes require a system restart to take effect.'\n )\n", (2299, 2450), False, 'import click\n'), ((2482, 2505), 'click.echo', 'click.echo', (['f"""Aborted."""'], {}), "(f'Aborted.')\n", (2492, 2505), False, 'import click\n'), ((4241, 4341), 'click.echo', 'click.echo', (['f"""found newer version: {latest_version}, you have {current_version} installed"""'], {}), "(\n f'found newer version: {latest_version}, you have {current_version} installed'\n )\n", (4251, 4341), False, 'import click\n'), ((4390, 4460), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (["['manage.py', 'collectstatic', '--noinput']"], {}), "(['manage.py', 'collectstatic', '--noinput'])\n", (4415, 4460), False, 'from django.core.management import execute_from_command_line\n'), ((4469, 4520), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (["['manage.py', 'migrate']"], {}), "(['manage.py', 'migrate'])\n", (4494, 4520), False, 'from django.core.management import execute_from_command_line\n'), ((4529, 4578), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (["['manage.py', 'check']"], {}), "(['manage.py', 'check'])\n", (4554, 4578), False, 'from django.core.management import execute_from_command_line\n'), ((4587, 4666), 'click.echo', 'click.echo', (['f"""Successfully upgraded from {current_version} to {latest_version}"""'], {}), "(f'Successfully upgraded from {current_version} to {latest_version}')\n", (4597, 4666), False, 'import click\n'), ((4685, 4784), 'click.echo', 'click.echo', (['f"""No update available. You are running the latest version: {current_version}"""'], {}), "(\n f'No update available. You are running the latest version: {current_version}'\n )\n", (4695, 4784), False, 'import click\n'), ((4847, 4958), 'subprocess.check_output', 'subprocess.check_output', (["[sys.executable, '-m', 'pip', 'list', '--outdated',\n '--disable-pip-version-check']"], {}), "([sys.executable, '-m', 'pip', 'list', '--outdated',\n '--disable-pip-version-check'])\n", (4870, 4958), False, 'import subprocess\n'), ((5639, 5712), 'click.echo', 'click.echo', (['f"""ERROR: Could not configure Raspberry Pi, see above errors."""'], {}), "(f'ERROR: Could not configure Raspberry Pi, see above errors.')\n", (5649, 5712), False, 'import click\n'), ((6231, 6352), 'click.echo', 'click.echo', (['f"""Successfully configured workoutizer as systemd service. Run it with: systemctl start wkz.service"""'], {}), "(\n f'Successfully configured workoutizer as systemd service. Run it with: systemctl start wkz.service'\n )\n", (6241, 6352), False, 'import click\n'), ((6361, 6462), 'click.echo', 'click.echo', (['f"""ERROR: Could not configure workoutizer as systemd service, see above errors."""'], {}), "(\n f'ERROR: Could not configure workoutizer as systemd service, see above errors.'\n )\n", (6371, 6462), False, 'import click\n'), ((6733, 6768), 'os.path.isfile', 'os.path.isfile', (['WORKOUTIZER_DB_PATH'], {}), '(WORKOUTIZER_DB_PATH)\n', (6747, 6768), False, 'import os\n'), ((7593, 7618), 'os.mkdir', 'os.mkdir', (['WORKOUTIZER_DIR'], {}), '(WORKOUTIZER_DIR)\n', (7601, 7618), False, 'import os\n'), ((7697, 7716), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (7710, 7716), False, 'import os\n'), ((7726, 7740), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (7734, 7740), False, 'import os\n'), ((8243, 8332), 'subprocess.check_call', 'subprocess.check_call', (["[sys.executable, '-m', 'pip', 'install', package, '--upgrade']"], {}), "([sys.executable, '-m', 'pip', 'install', package,\n '--upgrade'])\n", (8264, 8332), False, 'import subprocess\n'), ((8347, 8419), 'subprocess.check_call', 'subprocess.check_call', (["[sys.executable, '-m', 'pip', 'install', package]"], {}), "([sys.executable, '-m', 'pip', 'install', package])\n", (8368, 8419), False, 'import subprocess\n'), ((5005, 5078), 'subprocess.check_output', 'subprocess.check_output', (["[sys.executable, '-m', 'pip', 'search', package]"], {}), "([sys.executable, '-m', 'pip', 'search', package])\n", (5028, 5078), False, 'import subprocess\n'), ((6782, 6860), 'click.echo', 'click.echo', (['f"""Found existing workoutizer database at: {WORKOUTIZER_DB_PATH}\n"""'], {}), "(f'Found existing workoutizer database at: {WORKOUTIZER_DB_PATH}\\n')\n", (6792, 6860), False, 'import click\n'), ((9497, 9528), 'ansible.cli.CLI.version_info', 'CLI.version_info', ([], {'gitinfo': '(False)'}), '(gitinfo=False)\n', (9513, 9528), False, 'from ansible.cli import CLI\n'), ((7311, 7376), 'click.echo', 'click.echo', (['f"""keeping existing database at {WORKOUTIZER_DB_PATH}"""'], {}), "(f'keeping existing database at {WORKOUTIZER_DB_PATH}')\n", (7321, 7376), False, 'import click\n'), ((7434, 7490), 'click.echo', 'click.echo', (['f"""removed database at {WORKOUTIZER_DB_PATH}"""'], {}), "(f'removed database at {WORKOUTIZER_DB_PATH}')\n", (7444, 7490), False, 'import click\n'), ((7507, 7537), 'os.remove', 'os.remove', (['WORKOUTIZER_DB_PATH'], {}), '(WORKOUTIZER_DB_PATH)\n', (7516, 7537), False, 'import os\n'), ((9614, 9658), 'os.path.join', 'os.path.join', (['SETUP_DIR', '"""ansible"""', 'playbook'], {}), "(SETUP_DIR, 'ansible', playbook)\n", (9626, 9658), False, 'import os\n')] |
matanhofree/bcbio-nextgen | bcbio/bam/trim.py | e6938cedb20ff3b7632165105941d71189e46aac | """Provide trimming of input reads from Fastq or BAM files.
"""
import os
import sys
import tempfile
from bcbio.utils import (file_exists, safe_makedir,
replace_suffix, append_stem, is_pair,
replace_directory, map_wrap)
from bcbio.log import logger
from bcbio.bam import fastq
from bcbio.provenance import do
from Bio.Seq import Seq
from itertools import izip, repeat
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import config_utils
SUPPORTED_ADAPTERS = {
"illumina": ["AACACTCTTTCCCT", "AGATCGGAAGAGCG"],
"truseq": ["AGATCGGAAGAG"],
"polya": ["AAAAAAAAAAAAA"],
"nextera": ["AATGATACGGCGA", "CAAGCAGAAGACG"]}
QUALITY_FLAGS = {5: ['"E"', '"&"'],
20: ['"T"', '"5"']}
def trim_adapters(fastq_files, dirs, config):
QUALITY_CUTOFF = 5
to_trim = _get_sequences_to_trim(config)
resources = config_utils.get_resources("AlienTrimmer", config)
try:
jarpath = config_utils.get_program("AlienTrimmer", config, "dir")
# fall back on Cutadapt if AlienTrimmer is not installed
# XXX: remove after it has been live for a while
except:
return trim_read_through(fastq_files, dirs, config)
jarfile = config_utils.get_jar("AlienTrimmer", jarpath)
jvm_opts = " ".join(resources.get("jvm_opts", ["-Xms750m", "-Xmx2g"]))
base_cmd = ("java -jar {jvm_opts} {jarfile} -k 10 ")
fastq1 = fastq_files[0]
supplied_quality_format = _get_quality_format(config)
cores = config["algorithm"].get("num_cores", 0)
out_files = _get_read_through_trimmed_outfiles(fastq_files, dirs)
fastq1_out = out_files[0]
if supplied_quality_format == "illumina":
quality_flag = QUALITY_FLAGS[QUALITY_CUTOFF][0]
else:
quality_flag = QUALITY_FLAGS[QUALITY_CUTOFF][1]
quality_flag = '-q ' + quality_flag
if len(fastq_files) == 1:
if file_exists(fastq1_out):
return [fastq1_out]
base_cmd += ("-i {fastq1} -o {tx_fastq1_out} -c {temp_file} "
"{quality_flag}")
message = "Trimming %s from %s with AlienTrimmer." % (to_trim, fastq1)
else:
fastq2 = fastq_files[1]
fastq2_out = out_files[1]
if all(map(file_exists, [fastq1_out, fastq2_out])):
return [fastq1_out, fastq2_out]
base_cmd += ("-if {fastq1} -ir {fastq2} -of {tx_fastq1_out} "
"-or {tx_fastq2_out} -c {temp_file} {quality_flag}")
message = ("Trimming %s from %s and %s with AlienTrimmer."
% (to_trim, fastq1, fastq2))
with tempfile.NamedTemporaryFile(delete=False) as temp:
temp_file = temp.name
for adapter in to_trim:
temp.write(adapter + "\n")
temp.close()
if len(fastq_files) == 1:
with file_transaction(fastq1_out) as tx_fastq1_out:
do.run(base_cmd.format(**locals()), message)
return [fastq1_out]
else:
with file_transaction([fastq1_out, fastq2_out]) as tx_out_files:
tx_fastq1_out = tx_out_files[0]
tx_fastq2_out = tx_out_files[1]
do.run(base_cmd.format(**locals()), message)
return [fastq1_out, fastq2_out]
def trim_read_through(fastq_files, dirs, lane_config):
"""
for small insert sizes, the read length can be longer than the insert
resulting in the reverse complement of the 3' adapter being sequenced.
this takes adapter sequences and trims the only the reverse complement
of the adapter
MYSEQUENCEAAAARETPADA -> MYSEQUENCEAAAA (no polyA trim)
"""
quality_format = _get_quality_format(lane_config)
to_trim = _get_sequences_to_trim(lane_config)
out_files = _get_read_through_trimmed_outfiles(fastq_files, dirs)
fixed_files = append_stem(out_files, ".fixed")
if all(map(file_exists, fixed_files)):
return fixed_files
logger.info("Trimming %s from the 3' end of reads in %s using "
"cutadapt." % (", ".join(to_trim),
", ".join(fastq_files)))
cores = lane_config["algorithm"].get("num_cores", 1)
out_files = _cutadapt_trim(fastq_files, quality_format,
to_trim, out_files, cores)
fixed_files = remove_short_reads(out_files, dirs, lane_config)
return fixed_files
def remove_short_reads(fastq_files, dirs, lane_config):
"""
remove reads from a single or pair of fastq files which fall below
a length threshold (30 bases)
"""
min_length = int(lane_config["algorithm"].get("min_read_length", 20))
supplied_quality_format = _get_quality_format(lane_config)
if supplied_quality_format == "illumina":
quality_format = "fastq-illumina"
else:
quality_format = "fastq-sanger"
if is_pair(fastq_files):
fastq1, fastq2 = fastq_files
out_files = fastq.filter_reads_by_length(fastq1, fastq2, quality_format, min_length)
else:
out_files = [fastq.filter_single_reads_by_length(fastq_files[0],
quality_format, min_length)]
map(os.remove, fastq_files)
return out_files
def _get_read_through_trimmed_outfiles(fastq_files, dirs):
out_dir = os.path.join(dirs["work"], "trim")
safe_makedir(out_dir)
out_files = replace_directory(append_stem(fastq_files, "_trimmed"),
out_dir)
return out_files
def _get_sequences_to_trim(lane_config):
builtin_adapters = _get_builtin_adapters(lane_config)
polya = builtin_adapters.get("polya", [None])[0]
# allow for trimming of custom sequences for advanced users
custom_trim = lane_config["algorithm"].get("custom_trim", [])
builtin_adapters = {k: v for k, v in builtin_adapters.items() if
k != "polya"}
trim_sequences = custom_trim
# for unstranded RNA-seq, libraries, both polyA and polyT can appear
# at the 3' end as well
if polya:
trim_sequences += [polya, str(Seq(polya).reverse_complement())]
# also trim the reverse complement of the adapters
for _, v in builtin_adapters.items():
trim_sequences += [str(Seq(sequence)) for sequence in v]
trim_sequences += [str(Seq(sequence).reverse_complement()) for
sequence in v]
return trim_sequences
def _cutadapt_trim(fastq_files, quality_format, adapters, out_files, cores):
"""Trimming with cutadapt, using version installed with bcbio-nextgen.
Uses the system executable to find the version next to our Anaconda Python.
TODO: Could we use cutadapt as a library to avoid this?
"""
if quality_format == "illumina":
quality_base = "64"
else:
quality_base = "33"
# --times=2 tries twice remove adapters which will allow things like:
# realsequenceAAAAAAadapter to remove both the poly-A and the adapter
# this behavior might not be what we want; we could also do two or
# more passes of cutadapt
cutadapt = os.path.join(os.path.dirname(sys.executable), "cutadapt")
base_cmd = [cutadapt, "--times=" + "2", "--quality-base=" + quality_base,
"--quality-cutoff=5", "--format=fastq", "--minimum-length=0"]
adapter_cmd = map(lambda x: "--adapter=" + x, adapters)
base_cmd.extend(adapter_cmd)
if all(map(file_exists, out_files)):
return out_files
with file_transaction(out_files) as tmp_out_files:
if isinstance(tmp_out_files, basestring):
tmp_out_files = [tmp_out_files]
map(_run_cutadapt_on_single_file, izip(repeat(base_cmd), fastq_files,
tmp_out_files))
return out_files
@map_wrap
def _run_cutadapt_on_single_file(base_cmd, fastq_file, out_file):
stat_file = replace_suffix(out_file, ".trim_stats.txt")
with open(stat_file, "w") as stat_handle:
cmd = list(base_cmd)
cmd.extend(["--output=" + out_file, fastq_file])
do.run(cmd, "Running cutadapt on %s." % (fastq_file), None)
def _get_quality_format(lane_config):
SUPPORTED_FORMATS = ["illumina", "standard"]
quality_format = lane_config["algorithm"].get("quality_format",
"standard").lower()
if quality_format not in SUPPORTED_FORMATS:
logger.error("quality_format is set to an unsupported format. "
"Supported formats are %s."
% (", ".join(SUPPORTED_FORMATS)))
exit(1)
return quality_format
def _get_builtin_adapters(lane_config):
chemistries = lane_config["algorithm"].get("adapters", [])
adapters = {chemistry: SUPPORTED_ADAPTERS[chemistry] for
chemistry in chemistries if chemistry in SUPPORTED_ADAPTERS}
return adapters
| [((917, 967), 'bcbio.pipeline.config_utils.get_resources', 'config_utils.get_resources', (['"""AlienTrimmer"""', 'config'], {}), "('AlienTrimmer', config)\n", (943, 967), False, 'from bcbio.pipeline import config_utils\n'), ((1251, 1296), 'bcbio.pipeline.config_utils.get_jar', 'config_utils.get_jar', (['"""AlienTrimmer"""', 'jarpath'], {}), "('AlienTrimmer', jarpath)\n", (1271, 1296), False, 'from bcbio.pipeline import config_utils\n'), ((3797, 3829), 'bcbio.utils.append_stem', 'append_stem', (['out_files', '""".fixed"""'], {}), "(out_files, '.fixed')\n", (3808, 3829), False, 'from bcbio.utils import file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap\n'), ((4803, 4823), 'bcbio.utils.is_pair', 'is_pair', (['fastq_files'], {}), '(fastq_files)\n', (4810, 4823), False, 'from bcbio.utils import file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap\n'), ((5251, 5285), 'os.path.join', 'os.path.join', (["dirs['work']", '"""trim"""'], {}), "(dirs['work'], 'trim')\n", (5263, 5285), False, 'import os\n'), ((5290, 5311), 'bcbio.utils.safe_makedir', 'safe_makedir', (['out_dir'], {}), '(out_dir)\n', (5302, 5311), False, 'from bcbio.utils import file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap\n'), ((7808, 7851), 'bcbio.utils.replace_suffix', 'replace_suffix', (['out_file', '""".trim_stats.txt"""'], {}), "(out_file, '.trim_stats.txt')\n", (7822, 7851), False, 'from bcbio.utils import file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap\n'), ((995, 1050), 'bcbio.pipeline.config_utils.get_program', 'config_utils.get_program', (['"""AlienTrimmer"""', 'config', '"""dir"""'], {}), "('AlienTrimmer', config, 'dir')\n", (1019, 1050), False, 'from bcbio.pipeline import config_utils\n'), ((1916, 1939), 'bcbio.utils.file_exists', 'file_exists', (['fastq1_out'], {}), '(fastq1_out)\n', (1927, 1939), False, 'from bcbio.utils import file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap\n'), ((2609, 2650), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (2636, 2650), False, 'import tempfile\n'), ((4882, 4954), 'bcbio.bam.fastq.filter_reads_by_length', 'fastq.filter_reads_by_length', (['fastq1', 'fastq2', 'quality_format', 'min_length'], {}), '(fastq1, fastq2, quality_format, min_length)\n', (4910, 4954), False, 'from bcbio.bam import fastq\n'), ((5346, 5382), 'bcbio.utils.append_stem', 'append_stem', (['fastq_files', '"""_trimmed"""'], {}), "(fastq_files, '_trimmed')\n", (5357, 5382), False, 'from bcbio.utils import file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap\n'), ((7044, 7075), 'os.path.dirname', 'os.path.dirname', (['sys.executable'], {}), '(sys.executable)\n', (7059, 7075), False, 'import os\n'), ((7413, 7440), 'bcbio.distributed.transaction.file_transaction', 'file_transaction', (['out_files'], {}), '(out_files)\n', (7429, 7440), False, 'from bcbio.distributed.transaction import file_transaction\n'), ((7992, 8049), 'bcbio.provenance.do.run', 'do.run', (['cmd', "('Running cutadapt on %s.' % fastq_file)", 'None'], {}), "(cmd, 'Running cutadapt on %s.' % fastq_file, None)\n", (7998, 8049), False, 'from bcbio.provenance import do\n'), ((2827, 2855), 'bcbio.distributed.transaction.file_transaction', 'file_transaction', (['fastq1_out'], {}), '(fastq1_out)\n', (2843, 2855), False, 'from bcbio.distributed.transaction import file_transaction\n'), ((2982, 3024), 'bcbio.distributed.transaction.file_transaction', 'file_transaction', (['[fastq1_out, fastq2_out]'], {}), '([fastq1_out, fastq2_out])\n', (2998, 3024), False, 'from bcbio.distributed.transaction import file_transaction\n'), ((4986, 5065), 'bcbio.bam.fastq.filter_single_reads_by_length', 'fastq.filter_single_reads_by_length', (['fastq_files[0]', 'quality_format', 'min_length'], {}), '(fastq_files[0], quality_format, min_length)\n', (5021, 5065), False, 'from bcbio.bam import fastq\n'), ((6187, 6200), 'Bio.Seq.Seq', 'Seq', (['sequence'], {}), '(sequence)\n', (6190, 6200), False, 'from Bio.Seq import Seq\n'), ((7600, 7616), 'itertools.repeat', 'repeat', (['base_cmd'], {}), '(base_cmd)\n', (7606, 7616), False, 'from itertools import izip, repeat\n'), ((6024, 6034), 'Bio.Seq.Seq', 'Seq', (['polya'], {}), '(polya)\n', (6027, 6034), False, 'from Bio.Seq import Seq\n'), ((6252, 6265), 'Bio.Seq.Seq', 'Seq', (['sequence'], {}), '(sequence)\n', (6255, 6265), False, 'from Bio.Seq import Seq\n')] |
nasserarbabi/FEniCSUI-dev | FEniCSUI/AnalysesHub/views.py | f8f161e1b49932843e01301212e7d031fff4f6c8 | from rest_framework.response import Response
from rest_framework.views import APIView
from django.shortcuts import get_object_or_404
from dashboard.models import projects
from .models import AnalysisConfig, SolverResults, SolverProgress, DockerLogs
from rest_framework.parsers import FormParser, JSONParser, MultiPartParser, FileUploadParser
from rest_framework import status
import docker
import os
import json
from zipfile import ZipFile
from django.http import HttpResponse
from threading import Thread
from time import sleep
from datetime import datetime
class solverConfig(APIView):
parser_classes = [FormParser, MultiPartParser]
def get(self, request, *args, **kwargs):
"""
return a list of entries within a given category
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
category = request.query_params.get('category')
parentConfig = AnalysisConfig.objects.get(project=project)
jsonHelper = json.loads(parentConfig.config)
if category in jsonHelper:
return Response(data=jsonHelper[category], status=status.HTTP_200_OK)
else:
return Response(data="The category {} does not exist".format(category), status=status.HTTP_204_NO_CONTENT)
def post(self, request, *args, **kwargs):
"""
create a new category for solver configuration
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
data = request.data.dict()
category = request.query_params.get('category')
parentConfig = AnalysisConfig.objects.get(project=project)
jsonHelper = json.loads(parentConfig.config)
# if request does not contain a name
if not "Name" in data:
return Response(data="Please provide a 'Name' for the entry", status=400)
# if there is no category similar to the user request
if category not in jsonHelper:
jsonHelper[category] = []
jsonHelper[category].append(data)
# check if the entry with the same name exists
elif not list(filter(lambda name: name["Name"] == data["Name"], jsonHelper[category])):
jsonHelper[category].append(data)
else:
return Response(data="an entry with the same name exists", status=400)
parentConfig.config = json.dumps(jsonHelper)
parentConfig.save()
return Response(data=jsonHelper[category], status=status.HTTP_201_CREATED)
def put(self, request, *args, **kwargs):
"""
Edit an existing category entry's data
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
data = request.data.dict()
# if request does not contain a name
if not "Name" in data:
return Response(data="Please provide a 'Name' for the entry", status=400)
category = request.query_params.get('category')
list_id = int(request.query_params.get('id'))
parentConfig = AnalysisConfig.objects.get(project=project)
jsonHelper = json.loads(parentConfig.config)
if category in jsonHelper:
if list_id >= 0 and list_id < len(jsonHelper[category]):
# check if an entry with the same name exists
if not list(filter(lambda name: name["Name"] == data["Name"], jsonHelper[category])) or jsonHelper[category][list_id]["Name"] == data["Name"]:
jsonHelper[category][list_id] = data
parentConfig.config = json.dumps(jsonHelper)
parentConfig.save()
return Response(data=jsonHelper[category], status=status.HTTP_200_OK)
else:
return Response(data="an entry with the same name exists", status=400)
else:
return Response(data="No entry with the id={}".format(list_id), status=status.HTTP_204_NO_CONTENT)
else:
return Response(data="The category {} does not exist".format(category), status=status.HTTP_204_NO_CONTENT)
def delete(self, request, *args, **kwargs):
"""
Delete an entry from the category
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
category = request.query_params.get('category')
list_id = int(request.query_params.get('id'))
parentConfig = AnalysisConfig.objects.get(project=project)
jsonHelper = json.loads(parentConfig.config)
if jsonHelper[category]:
if list_id >= 0 and list_id < len(jsonHelper[category]):
jsonHelper[category].pop(int(list_id))
parentConfig.config = json.dumps(jsonHelper)
parentConfig.save()
return Response(data=jsonHelper[category], status=status.HTTP_200_OK)
else:
return Response(data="No entry with the id={}".format(list_id), status=status.HTTP_204_NO_CONTENT)
else:
return Response(data="The category {} does not exist".format(category), status=status.HTTP_204_NO_CONTENT)
class Categories(APIView):
parser_classes = [FormParser, MultiPartParser]
def get(self, request, *args, **kwargs):
"""
Return the existing categories in the solver config
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
config = json.loads(AnalysisConfig.objects.get(
project=project).config).keys()
return Response(data=config, status=status.HTTP_200_OK)
def delete(self, request, *args, **kwargs):
"""
DELETE the existing categories in the solver config
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
category = request.query_params.get('category')
parentConfig = AnalysisConfig.objects.get(project=project)
jsonHelper = json.loads(parentConfig.config)
if category in jsonHelper:
del jsonHelper[category]
parentConfig.config = json.dumps(jsonHelper)
parentConfig.save()
return Response(data=jsonHelper, status=status.HTTP_410_GONE)
else:
return Response(data="The category {} does not exist!".format(category), status=status.HTTP_404_NOT_FOUND)
class getConfiguration(APIView):
parser_classes = [FormParser, MultiPartParser]
def get(self, request, *args, **kwargs):
"""
Get the solver config to be submitted to the analysis
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
config = AnalysisConfig.objects.filter(project=project).values()[0]
return Response(data=config["config"], status=status.HTTP_200_OK)
def streamDockerLog(container, project):
for line in container.logs(stream=True):
logs = get_object_or_404(DockerLogs, project=project)
now = datetime.now()
current_time = now.strftime("[%H:%M:%S]: ")
logs.log = current_time + str(line.strip(), 'utf-8') + "\n" + logs.log
logs.save()
class solvers(APIView):
parser_classes = [FormParser, MultiPartParser]
def get(self, request, *args, **kwargs):
"""
Runs the related solver defined in url parameters
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
# set progress to initial
SolverProgress.objects.get_or_create(
project=project,
defaults={'progress' :json.dumps({"status": "", "message": ""})})
progress = SolverProgress.objects.get( project=project )
progress.progress = json.dumps({"state":{"status": "RECEIVED", "message": {"progress": "0.0"}}, "logs":""})
progress.save()
# initiate related solver
solver = request.query_params.get('solver')
client = docker.from_env()
solverPath = os.path.abspath('./solvers')
if DockerLogs.objects.filter(project=project).exists():
DockerLogs.objects.filter(project=project).delete()
DockerLogs.objects.create(project=project,log="")
try:
container = client.containers.run(
"quay.io/fenicsproject/stable:current",
volumes={solverPath: {
'bind': '/home/fenics/shared', 'mode': 'rw'}},
working_dir="/home/fenics/shared",
# runs solver.py with two arguments to be passed in to python file
command=["`sudo pip3 install requests \n python3 solverHub.py {} {}`".format(
project.id, solver)],
name="FEniCSDocker",
auto_remove=False,
detach=True)
thread = Thread(target=streamDockerLog, args=(container, project))
thread.start()
except:
message = '''please check if the docker is running, and if a container with the name FEniCSDocker does not exist.
if you are using docker windows, make sure the file sharing setting for the main folder directory is on.
If you are woking with WSL, make sure it has access to the windows docker.
Instructions can be found at: https://nickjanetakis.com/blog/setting-up-docker-for-windows-and-wsl-to-work-flawlessly'''
print(message)
return Response(data=message, status=500)
return Response(data="submitted to analysis", status=status.HTTP_200_OK)
def delete(self, request, *args, **kwargs):
"""
kills the running docker container
"""
client = docker.from_env()
try:
container = client.containers.get("FEniCSDocker")
container.stop()
return Response(data="container stopped successfully", status=200)
except:
return Response(data="No container running", status=404)
class saveResults(APIView):
parser_classes = [FileUploadParser]
def put(self, request, filename, format=None, *args, **kwargs):
"""
save results to media folder. a query will be created to make it available for download
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
fileType = request.query_params.get('fileType')
data = request.data['file']
folderPath = os.path.abspath(
"../FEniCSUI/media/{}/results/".format(kwargs['project_id']))
os.makedirs(folderPath, exist_ok=True)
filePath = '{}/{}.{}'.format(folderPath, filename, fileType)
with open(filePath, 'wb+') as destination:
for chunk in data.chunks():
destination.write(chunk)
if not SolverResults.objects.filter(project=project).exists():
SolverResults.objects.create(project=project, path=folderPath)
return Response(data="results updated at {}".format(filePath), status=status.HTTP_201_CREATED)
class downloadResults(APIView):
def get(self, request, *args, **kwargs):
"""
Get the results saved in the database
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
if (SolverResults.objects.filter(project=project).exists()):
resutls = SolverResults.objects.filter(project=project).values()[0]
folderPath = resutls['path']
# create a ZipFile object
with ZipFile('{}/results.zip'.format(folderPath), 'w') as zipObj:
# Iterate over all the files in directory
for folderName, subfolders, filenames in os.walk(folderPath):
for filename in filenames:
if not filename == 'results.zip':
filePath = os.path.join(folderName, filename)
# Add file to zip
zipObj.write(filePath, os.path.basename(filePath))
zipFile = open('{}/results.zip'.format(folderPath), 'rb')
response= HttpResponse(zipFile,content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=results.zip'
return response
else:
return Response(data="not found", status=404)
class solverProgress(APIView):
parser_classes = [JSONParser]
def get(self, request, *args, **kwargs):
"""
Get the progress
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
if (SolverProgress.objects.filter(project=project).exists()):
progress = json.loads(get_object_or_404(SolverProgress, project=project).progress)
logs = get_object_or_404(DockerLogs, project=project).log
else:
progress = "null"
logs=""
return Response(data=json.dumps({"state":progress,"logs":logs}), status=status.HTTP_200_OK)
def post(self, request, *args, **kwargs):
"""
Update the progress from solver
"""
project = get_object_or_404(projects, id=kwargs['project_id'])
data = request.data
if SolverProgress.objects.filter(project=project).exists():
progress = get_object_or_404(SolverProgress, project=project)
progress.progress = json.dumps(data)
progress.save()
else:
SolverProgress.objects.create(project=project, progress=data)
return Response(data=get_object_or_404(SolverProgress, project=project).progress, status=status.HTTP_201_CREATED)
| [((787, 839), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (804, 839), False, 'from django.shortcuts import get_object_or_404\n'), ((984, 1015), 'json.loads', 'json.loads', (['parentConfig.config'], {}), '(parentConfig.config)\n', (994, 1015), False, 'import json\n'), ((1411, 1463), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (1428, 1463), False, 'from django.shortcuts import get_object_or_404\n'), ((1643, 1674), 'json.loads', 'json.loads', (['parentConfig.config'], {}), '(parentConfig.config)\n', (1653, 1674), False, 'import json\n'), ((2350, 2372), 'json.dumps', 'json.dumps', (['jsonHelper'], {}), '(jsonHelper)\n', (2360, 2372), False, 'import json\n'), ((2416, 2483), 'rest_framework.response.Response', 'Response', ([], {'data': 'jsonHelper[category]', 'status': 'status.HTTP_201_CREATED'}), '(data=jsonHelper[category], status=status.HTTP_201_CREATED)\n', (2424, 2483), False, 'from rest_framework.response import Response\n'), ((2619, 2671), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (2636, 2671), False, 'from django.shortcuts import get_object_or_404\n'), ((3069, 3100), 'json.loads', 'json.loads', (['parentConfig.config'], {}), '(parentConfig.config)\n', (3079, 3100), False, 'import json\n'), ((4191, 4243), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (4208, 4243), False, 'from django.shortcuts import get_object_or_404\n'), ((4442, 4473), 'json.loads', 'json.loads', (['parentConfig.config'], {}), '(parentConfig.config)\n', (4452, 4473), False, 'import json\n'), ((5310, 5362), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (5327, 5362), False, 'from django.shortcuts import get_object_or_404\n'), ((5478, 5526), 'rest_framework.response.Response', 'Response', ([], {'data': 'config', 'status': 'status.HTTP_200_OK'}), '(data=config, status=status.HTTP_200_OK)\n', (5486, 5526), False, 'from rest_framework.response import Response\n'), ((5678, 5730), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (5695, 5730), False, 'from django.shortcuts import get_object_or_404\n'), ((5875, 5906), 'json.loads', 'json.loads', (['parentConfig.config'], {}), '(parentConfig.config)\n', (5885, 5906), False, 'import json\n'), ((6513, 6565), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (6530, 6565), False, 'from django.shortcuts import get_object_or_404\n'), ((6657, 6715), 'rest_framework.response.Response', 'Response', ([], {'data': "config['config']", 'status': 'status.HTTP_200_OK'}), "(data=config['config'], status=status.HTTP_200_OK)\n", (6665, 6715), False, 'from rest_framework.response import Response\n'), ((6818, 6864), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['DockerLogs'], {'project': 'project'}), '(DockerLogs, project=project)\n', (6835, 6864), False, 'from django.shortcuts import get_object_or_404\n'), ((6879, 6893), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6891, 6893), False, 'from datetime import datetime\n'), ((7271, 7323), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (7288, 7323), False, 'from django.shortcuts import get_object_or_404\n'), ((7587, 7680), 'json.dumps', 'json.dumps', (["{'state': {'status': 'RECEIVED', 'message': {'progress': '0.0'}}, 'logs': ''}"], {}), "({'state': {'status': 'RECEIVED', 'message': {'progress': '0.0'}},\n 'logs': ''})\n", (7597, 7680), False, 'import json\n'), ((7804, 7821), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (7819, 7821), False, 'import docker\n'), ((7843, 7871), 'os.path.abspath', 'os.path.abspath', (['"""./solvers"""'], {}), "('./solvers')\n", (7858, 7871), False, 'import os\n'), ((9335, 9400), 'rest_framework.response.Response', 'Response', ([], {'data': '"""submitted to analysis"""', 'status': 'status.HTTP_200_OK'}), "(data='submitted to analysis', status=status.HTTP_200_OK)\n", (9343, 9400), False, 'from rest_framework.response import Response\n'), ((9534, 9551), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (9549, 9551), False, 'import docker\n'), ((10097, 10149), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (10114, 10149), False, 'from django.shortcuts import get_object_or_404\n'), ((10362, 10400), 'os.makedirs', 'os.makedirs', (['folderPath'], {'exist_ok': '(True)'}), '(folderPath, exist_ok=True)\n', (10373, 10400), False, 'import os\n'), ((11021, 11073), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (11038, 11073), False, 'from django.shortcuts import get_object_or_404\n'), ((12327, 12379), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (12344, 12379), False, 'from django.shortcuts import get_object_or_404\n'), ((12908, 12960), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['projects'], {'id': "kwargs['project_id']"}), "(projects, id=kwargs['project_id'])\n", (12925, 12960), False, 'from django.shortcuts import get_object_or_404\n'), ((1071, 1133), 'rest_framework.response.Response', 'Response', ([], {'data': 'jsonHelper[category]', 'status': 'status.HTTP_200_OK'}), '(data=jsonHelper[category], status=status.HTTP_200_OK)\n', (1079, 1133), False, 'from rest_framework.response import Response\n'), ((1771, 1837), 'rest_framework.response.Response', 'Response', ([], {'data': '"""Please provide a \'Name\' for the entry"""', 'status': '(400)'}), '(data="Please provide a \'Name\' for the entry", status=400)\n', (1779, 1837), False, 'from rest_framework.response import Response\n'), ((2803, 2869), 'rest_framework.response.Response', 'Response', ([], {'data': '"""Please provide a \'Name\' for the entry"""', 'status': '(400)'}), '(data="Please provide a \'Name\' for the entry", status=400)\n', (2811, 2869), False, 'from rest_framework.response import Response\n'), ((6013, 6035), 'json.dumps', 'json.dumps', (['jsonHelper'], {}), '(jsonHelper)\n', (6023, 6035), False, 'import json\n'), ((6087, 6141), 'rest_framework.response.Response', 'Response', ([], {'data': 'jsonHelper', 'status': 'status.HTTP_410_GONE'}), '(data=jsonHelper, status=status.HTTP_410_GONE)\n', (6095, 6141), False, 'from rest_framework.response import Response\n'), ((8672, 8729), 'threading.Thread', 'Thread', ([], {'target': 'streamDockerLog', 'args': '(container, project)'}), '(target=streamDockerLog, args=(container, project))\n', (8678, 8729), False, 'from threading import Thread\n'), ((9675, 9734), 'rest_framework.response.Response', 'Response', ([], {'data': '"""container stopped successfully"""', 'status': '(200)'}), "(data='container stopped successfully', status=200)\n", (9683, 9734), False, 'from rest_framework.response import Response\n'), ((11912, 11965), 'django.http.HttpResponse', 'HttpResponse', (['zipFile'], {'content_type': '"""application/zip"""'}), "(zipFile, content_type='application/zip')\n", (11924, 11965), False, 'from django.http import HttpResponse\n'), ((12107, 12145), 'rest_framework.response.Response', 'Response', ([], {'data': '"""not found"""', 'status': '(404)'}), "(data='not found', status=404)\n", (12115, 12145), False, 'from rest_framework.response import Response\n'), ((13080, 13130), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['SolverProgress'], {'project': 'project'}), '(SolverProgress, project=project)\n', (13097, 13130), False, 'from django.shortcuts import get_object_or_404\n'), ((13163, 13179), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (13173, 13179), False, 'import json\n'), ((2255, 2318), 'rest_framework.response.Response', 'Response', ([], {'data': '"""an entry with the same name exists"""', 'status': '(400)'}), "(data='an entry with the same name exists', status=400)\n", (2263, 2318), False, 'from rest_framework.response import Response\n'), ((4669, 4691), 'json.dumps', 'json.dumps', (['jsonHelper'], {}), '(jsonHelper)\n', (4679, 4691), False, 'import json\n'), ((4751, 4813), 'rest_framework.response.Response', 'Response', ([], {'data': 'jsonHelper[category]', 'status': 'status.HTTP_200_OK'}), '(data=jsonHelper[category], status=status.HTTP_200_OK)\n', (4759, 4813), False, 'from rest_framework.response import Response\n'), ((9285, 9319), 'rest_framework.response.Response', 'Response', ([], {'data': 'message', 'status': '(500)'}), '(data=message, status=500)\n', (9293, 9319), False, 'from rest_framework.response import Response\n'), ((9770, 9819), 'rest_framework.response.Response', 'Response', ([], {'data': '"""No container running"""', 'status': '(404)'}), "(data='No container running', status=404)\n", (9778, 9819), False, 'from rest_framework.response import Response\n'), ((11495, 11514), 'os.walk', 'os.walk', (['folderPath'], {}), '(folderPath)\n', (11502, 11514), False, 'import os\n'), ((12564, 12610), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['DockerLogs'], {'project': 'project'}), '(DockerLogs, project=project)\n', (12581, 12610), False, 'from django.shortcuts import get_object_or_404\n'), ((12708, 12753), 'json.dumps', 'json.dumps', (["{'state': progress, 'logs': logs}"], {}), "({'state': progress, 'logs': logs})\n", (12718, 12753), False, 'import json\n'), ((3526, 3548), 'json.dumps', 'json.dumps', (['jsonHelper'], {}), '(jsonHelper)\n', (3536, 3548), False, 'import json\n'), ((3616, 3678), 'rest_framework.response.Response', 'Response', ([], {'data': 'jsonHelper[category]', 'status': 'status.HTTP_200_OK'}), '(data=jsonHelper[category], status=status.HTTP_200_OK)\n', (3624, 3678), False, 'from rest_framework.response import Response\n'), ((3728, 3791), 'rest_framework.response.Response', 'Response', ([], {'data': '"""an entry with the same name exists"""', 'status': '(400)'}), "(data='an entry with the same name exists', status=400)\n", (3736, 3791), False, 'from rest_framework.response import Response\n'), ((7450, 7491), 'json.dumps', 'json.dumps', (["{'status': '', 'message': ''}"], {}), "({'status': '', 'message': ''})\n", (7460, 7491), False, 'import json\n'), ((12484, 12534), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['SolverProgress'], {'project': 'project'}), '(SolverProgress, project=project)\n', (12501, 12534), False, 'from django.shortcuts import get_object_or_404\n'), ((13326, 13376), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['SolverProgress'], {'project': 'project'}), '(SolverProgress, project=project)\n', (13343, 13376), False, 'from django.shortcuts import get_object_or_404\n'), ((11660, 11694), 'os.path.join', 'os.path.join', (['folderName', 'filename'], {}), '(folderName, filename)\n', (11672, 11694), False, 'import os\n'), ((11792, 11818), 'os.path.basename', 'os.path.basename', (['filePath'], {}), '(filePath)\n', (11808, 11818), False, 'import os\n')] |
EnjoyLifeFund/macHighSierra-py36-pkgs | fs/opener/appfs.py | 5668b5785296b314ea1321057420bcd077dba9ea | # coding: utf-8
"""``AppFS`` opener definition.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .base import Opener
from .errors import OpenerError
from ..subfs import ClosingSubFS
from .. import appfs
class AppFSOpener(Opener):
"""``AppFS`` opener.
"""
protocols = [
'userdata',
'userconf',
'sitedata',
'siteconf',
'usercache',
'userlog'
]
_protocol_mapping = {
'userdata': appfs.UserDataFS,
'userconf': appfs.UserConfigFS,
'sitedata': appfs.SiteDataFS,
'siteconf': appfs.SiteConfigFS,
'usercache': appfs.UserCacheFS,
'userlog': appfs.UserLogFS
}
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
fs_class = self._protocol_mapping[parse_result.protocol]
resource, delim, path = parse_result.resource.partition('/')
tokens = resource.split(':', 3)
if len(tokens) == 2:
appname, author = tokens
version = None
elif len(tokens) == 3:
appname, author, version = tokens
else:
raise OpenerError(
'resource should be <appname>:<author> '
'or <appname>:<author>:<version>'
)
app_fs = fs_class(
appname,
author=author,
version=version,
create=create
)
app_fs = (
app_fs.opendir(path, factory=ClosingSubFS)
if delim
else app_fs
)
return app_fs
| [] |
cyberj0g/verification-classifier | scripts/modeling_toolbox/evaluation.py | efb19a3864e27a7f149a1c27ee8e13eaa19f96eb | import numpy as np
from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix
from sklearn.decomposition import PCA
from sklearn import random_projection
from sklearn import svm
from sklearn.ensemble import IsolationForest
import matplotlib.pyplot as plt
from keras.layers import Dense, Input, Dropout
from keras.models import Model
from keras import regularizers
from keras.models import Sequential
from keras.optimizers import Adam
from keras.regularizers import l2
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import AdaBoostClassifier
import xgboost as xgb
def one_class_svm(x_train, x_test, x_attacks, svm_results):
# SVM Hyper-parameters
nus = [0.01]
gammas = ['auto']
dimensions = [int(i*x_test.shape[1]) for i in [0.25, 0.35, 0.5, 0.75, 0.9, 1]]
dimensions = list(filter(lambda x: x > 0, dimensions))
for n in dimensions:
x_reduced_pca, test_reduced_pca, attack_reduced_pca = reduce_dimensionality(n, x_train, x_test, 'PCA',
attack=x_attacks)
for nu in nus:
for gamma in gammas:
# Fit classifier with PCA reduced data
classifier = svm.OneClassSVM(kernel='rbf', gamma=gamma, nu=nu, cache_size=7000)
classifier.fit(x_reduced_pca)
fb, area, tnr, tpr_train, tpr_test = unsupervised_evaluation(classifier, x_reduced_pca,
test_reduced_pca,
attack_reduced_pca)
svm_results = svm_results.append({'nu': nu, 'gamma': gamma, 'n_components': n, 'TPR_train': tpr_train,
'TPR_test': tpr_test, 'TNR': tnr, 'model': 'svm', 'auc': area,
'f_beta': fb, 'projection': 'PCA'}, ignore_index=True)
# Fit classifier with RP reduced data
classifier = svm.OneClassSVM(kernel='rbf', gamma=gamma, nu=nu, cache_size=7000)
classifier.fit(x_train)
fb, area, tnr, tpr_train, tpr_test = unsupervised_evaluation(classifier, x_train,
x_test, x_attacks)
svm_results = svm_results.append({'nu': nu, 'gamma': gamma, 'n_components': x_test.shape[1],
'TPR_train': tpr_train,
'TPR_test': tpr_test, 'TNR': tnr, 'model': 'svm', 'auc': area,
'f_beta': fb, 'projection': 'None'}, ignore_index=True)
return svm_results
def isolation_forest(x_train, x_test, x_attacks, isolation_results):
# Isolation Forest Hyper-parameters
estimators = [200, 100]
contaminations = [0.01]
dimensions = [int(i*x_test.shape[1]) for i in [0.25, 0.5, 0.9, 1]]
dimensions = list(filter(lambda x: x > 0, dimensions))
for n in dimensions:
x_reduced_pca, test_reduced_pca, attack_reduced_pca = reduce_dimensionality(n, x_train, x_test, 'PCA',
attack=x_attacks)
x_reduced_rp, test_reduced_rp, attack_reduced_rp = reduce_dimensionality(n, x_train, x_test, 'RP',
attack=x_attacks)
max_features = list(range(1, n + 1, 4))
for estimator in estimators:
for contamination in contaminations:
for max_feature in max_features:
classifier = IsolationForest(n_estimators=estimator,
contamination=contamination,
max_features=max_feature,
n_jobs=7)
classifier.fit(x_reduced_pca)
fb, area, tnr, tpr_train, tpr_test = unsupervised_evaluation(classifier, x_reduced_pca,
test_reduced_pca, attack_reduced_pca)
isolation_results = isolation_results.append({'estimators': estimator, 'contamination': contamination,
'n_components': n, 'max_features': max_feature,
'TPR_train': tpr_train,
'TPR_test': tpr_test,
'TNR': tnr,
'model': 'isolation_forest',
'auc': area,
'f_beta': fb,
'projection': 'PCA'}, ignore_index=True)
classifier = IsolationForest(n_estimators=estimator,
contamination=contamination,
max_features=max_feature,
n_jobs=7)
classifier.fit(x_reduced_rp)
fb, area, tnr, tpr_train, tpr_test = unsupervised_evaluation(classifier, x_reduced_rp,
test_reduced_rp, attack_reduced_rp)
isolation_results = isolation_results.append({'estimators': estimator, 'contamination': contamination,
'n_components': n, 'max_features': max_feature,
'TPR_train': tpr_train,
'TPR_test': tpr_test,
'TNR': tnr,
'model': 'isolation_forest',
'auc': area,
'f_beta': fb,
'projection': 'RP'}, ignore_index=True)
return isolation_results
def autoencoder(x_train, x_test, x_attacks, ae_svm_results):
latent_dim = 3
input_vector = Input(shape=(x_train.shape[1],))
encoded = Dense(latent_dim, activation='relu')(input_vector)
decoded = Dense(x_train.shape[1], activity_regularizer=regularizers.l1(10e-5))(encoded)
autoencoder = Model(input_vector, decoded)
encoder = Model(input_vector, encoded)
autoencoder.compile(optimizer=Adam(lr=0.001), loss='mse')
network_history = autoencoder.fit(x_train, x_train, shuffle=True, batch_size=16, epochs=10,
validation_data=(x_test, x_test), verbose=True)
plot_history(network_history, 'AE history')
print('Mean loss on train: {}'.format(autoencoder.evaluate(x_train, x_train, batch_size=8, verbose=False)))
print('Mean loss on test: {}'.format(autoencoder.evaluate(x_test, x_test, batch_size=8, verbose=False)))
print('Mean loss on attacks: {}'.format(autoencoder.evaluate(x_attacks, x_attacks, batch_size=8, verbose=False)))
x_train_red = encoder.predict(x_train, batch_size=8)
x_test_red = encoder.predict(x_test, batch_size=8)
x_attacks_red = encoder.predict(x_attacks, batch_size=8)
nus = [0.01]
gammas = [x_train_red.shape[1], 2*x_train_red.shape[1], x_train_red.shape[1]/2, 'auto']
for nu in nus:
for gamma in gammas:
classifier = svm.OneClassSVM(kernel='rbf', gamma=gamma, nu=nu, cache_size=7000)
classifier.fit(x_train_red)
fb, area, tnr, tpr_train, tpr_test = unsupervised_evaluation(classifier, x_train_red,
x_test_red, x_attacks_red)
ae_svm_results = ae_svm_results.append({'nu': nu, 'gamma': gamma, 'n_components': latent_dim,
'TPR_train': tpr_train, 'TPR_test': tpr_test, 'TNR': tnr,
'model': 'ae-svm', 'auc': area, 'f_beta': fb}, ignore_index=True)
return ae_svm_results
def unsupervised_evaluation(classifier, train_set, test_set, attack_set, beta=20):
y_pred_train = classifier.predict(train_set)
y_pred_test = classifier.predict(test_set)
y_pred_outliers = classifier.predict(attack_set)
n_accurate_train = y_pred_train[y_pred_train == 1].size
n_accurate_test = y_pred_test[y_pred_test == 1].size
n_accurate_outliers = y_pred_outliers[y_pred_outliers == -1].size
fpr, tpr, _ = roc_curve(np.concatenate([np.ones(y_pred_test.shape[0]), -1*np.ones(y_pred_outliers.shape[0])]),
np.concatenate([y_pred_test, y_pred_outliers]), pos_label=1)
fb = fbeta_score(np.concatenate([np.ones(y_pred_test.shape[0]), -1*np.ones(y_pred_outliers.shape[0])]),
np.concatenate([y_pred_test, y_pred_outliers]), beta=beta, pos_label=1)
tnr = n_accurate_outliers/attack_set.shape[0]
tpr_test = n_accurate_test/test_set.shape[0]
tpr_train = n_accurate_train/train_set.shape[0]
area = auc(fpr, tpr)
return fb, area, tnr, tpr_train, tpr_test
def neural_network(x_train, y_train, x_test, y_test):
model = Sequential()
model.add(Dense(128, input_shape=(x_train.shape[1],), activation='relu', kernel_regularizer=l2(0.01)))
model.add(Dropout(0.1))
model.add(Dense(64, activation='relu', kernel_regularizer=l2(0.01)))
model.add(Dropout(0.2))
model.add(Dense(128, kernel_initializer='glorot_uniform', activation='sigmoid'))
model.add(Dropout(0.4))
model.add(Dense(64, kernel_initializer='glorot_uniform', activation='tanh'))
model.add(Dropout(0.5))
model.add(Dense(32, kernel_initializer='glorot_uniform', activation='tanh'))
model.add(Dropout(0.4))
model.add(Dense(128, kernel_initializer='glorot_uniform', activation='tanh'))
model.add(Dropout(0.3))
model.add(Dense(1, kernel_initializer='normal', activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='sgd', metrics=['accuracy'])
network_history = model.fit(x_train, y_train, batch_size=128, epochs=10, verbose=0,
validation_data=(x_test, y_test))
plot_history_with_acc(network_history)
return model
def random_forest(x_train, y_train, x_test, y_test, random_forest_results):
# Random forest Hyper-parameters
estimators = [150, 200]
dimensions = [int(i*x_test.shape[1]) for i in [1]]
for estimator in estimators:
for n in dimensions:
x_reduced_pca, test_reduced_pca = reduce_dimensionality(n, x_train, x_test, 'PCA')
x_reduced_rp, test_reduced_rp = reduce_dimensionality(n, x_train, x_test, 'RP')
classifier = RandomForestClassifier(n_estimators=estimator, n_jobs=7)
classifier.fit(x_reduced_pca, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_pca, y_test)
random_forest_results = random_forest_results.append({'estimators': estimator,
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'random_forest',
'auc': area,
'f_beta': fb,
'projection': 'PCA'}, ignore_index=True)
classifier = RandomForestClassifier(n_estimators=estimator, n_jobs=7)
classifier.fit(x_reduced_rp, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_rp, y_test)
random_forest_results = random_forest_results.append({'estimators': estimator,
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'random_forest',
'auc': area,
'f_beta': fb,
'projection': 'RP'}, ignore_index=True)
classifier = RandomForestClassifier(n_estimators=estimator, n_jobs=7)
classifier.fit(x_train, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, x_test, y_test)
random_forest_results = random_forest_results.append({'estimators': estimator,
'n_components': x_test.shape[1],
'TPR': tpr,
'TNR': tnr,
'model': 'random_forest',
'auc': area,
'f_beta': fb,
'projection': 'None'}, ignore_index=True)
return random_forest_results
def ada_boost(x_train, y_train, x_test, y_test, ada_boost_results):
# AdaBoost Hyper-parameters
learning_rates = [0.55]
dimensions = [int(i*x_test.shape[1]) for i in [1]]
for n in dimensions:
x_reduced_pca, test_reduced_pca = reduce_dimensionality(n, x_train, x_test, 'PCA')
x_reduced_rp, test_reduced_rp = reduce_dimensionality(n, x_train, x_test, 'RP')
for lr in learning_rates:
classifier = AdaBoostClassifier(learning_rate=lr)
classifier.fit(x_reduced_pca, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_pca, y_test)
ada_boost_results = ada_boost_results.append({'LR': lr,
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'ada_boost',
'auc': area,
'f_beta': fb,
'projection': 'PCA'}, ignore_index=True)
classifier = AdaBoostClassifier(learning_rate=lr)
classifier.fit(x_reduced_rp, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_rp, y_test)
ada_boost_results = ada_boost_results.append({'LR': lr,
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'ada_boost',
'auc': area,
'f_beta': fb,
'projection': 'RP'}, ignore_index=True)
return ada_boost_results
def svm_classifier(x_train, y_train, x_test, y_test, svm_results):
# SVC Hyper-parameters
dimensions = [int(i*x_test.shape[1]) for i in [1]]
for n in dimensions:
x_reduced_pca, test_reduced_pca = reduce_dimensionality(n, x_train, x_test, 'PCA')
x_reduced_rp, test_reduced_rp = reduce_dimensionality(n, x_train, x_test, 'RP')
classifier = svm.SVC(gamma='auto', cache_size=7000)
classifier.fit(x_reduced_pca, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_pca, y_test)
svm_results = svm_results.append({
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'svm',
'auc': area,
'f_beta': fb,
'projection': 'PCA'}, ignore_index=True)
classifier = svm.SVC(gamma='auto', cache_size=7000)
classifier.fit(x_reduced_rp, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_rp, y_test)
svm_results = svm_results.append({
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'svm',
'auc': area,
'f_beta': fb,
'projection': 'RP'}, ignore_index=True)
return svm_results
def xg_boost(x_train, y_train, x_test, y_test, xg_boost_results):
# XGBoost Hyper-parameters
dimensions = [int(i*x_test.shape[1]) for i in [1]]
for n in dimensions:
x_reduced_pca, test_reduced_pca = reduce_dimensionality(n, x_train, x_test, 'PCA')
x_reduced_rp, test_reduced_rp = reduce_dimensionality(n, x_train, x_test, 'RP')
classifier = xgb.XGBClassifier()
grid = {'max_depth': 10}
classifier.set_params(**grid)
classifier.fit(x_reduced_pca, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_pca, y_test)
xg_boost_results = xg_boost_results.append({
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'xgboost',
'auc': area,
'f_beta': fb,
'projection': 'PCA'}, ignore_index=True)
classifier = xgb.XGBClassifier()
grid = {'max_depth': 10}
classifier.set_params(**grid)
classifier.fit(x_reduced_rp, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, test_reduced_rp, y_test)
xg_boost_results = xg_boost_results.append({
'n_components': n,
'TPR': tpr,
'TNR': tnr,
'model': 'xgboost',
'auc': area,
'f_beta': fb,
'projection': 'RP'}, ignore_index=True)
classifier = xgb.XGBClassifier()
grid = {'max_depth': 10}
classifier.set_params(**grid)
classifier.fit(x_train, y_train)
fb, area, tnr, tpr = supervised_evaluation(classifier, x_test, y_test)
xg_boost_results = xg_boost_results.append({
'n_components': x_test.shape[1],
'TPR': tpr,
'TNR': tnr,
'model': 'xgboost',
'auc': area,
'f_beta': fb,
'projection': 'None'}, ignore_index=True)
return xg_boost_results
def supervised_evaluation(classifier, x_test, y_test, beta=20, nn=False):
if not nn:
y_pred = classifier.predict(x_test)
confusion_matrix(y_test, y_pred)
fpr, tpr, _ = roc_curve(y_test, y_pred)
fb = fbeta_score(y_test, y_pred, beta=beta, pos_label=1)
area = auc(fpr, tpr)
tpr = tpr[1]
tnr = 1 - fpr[1]
return fb, area, tnr, tpr
def plot_roc(classifier, test, attacks, title):
y_pred_test = classifier.predict(test)
y_pred_outliers = classifier.predict(attacks)
fpr, tpr, _ = roc_curve(np.concatenate([np.ones(y_pred_test.shape[0]),
-1*np.ones(y_pred_outliers.shape[0])]),
np.concatenate([y_pred_test, y_pred_outliers]), pos_label=1)
roc_auc = auc(fpr, tpr)
plt.figure()
lw = 2
plt.plot(fpr, tpr, color='darkorange',
lw=lw, label='ROC curve (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic: {}'.format(title))
plt.legend(loc='lower right')
plt.show()
def plot_roc_supervised(classifier, x_test, y_test, title, nn=False):
y_pred = classifier.predict(x_test)
fpr, tpr, _ = roc_curve(y_test, y_pred)
if nn:
y_pred = [round(x[0]) for x in y_pred]
print(confusion_matrix(y_test, y_pred))
roc_auc = auc(fpr, tpr)
plt.figure()
lw = 2
plt.plot(fpr, tpr, color='darkorange',
lw=lw, label='ROC curve (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic {}'.format(title))
plt.legend(loc='lower right')
plt.show()
def plot_history(network_history, title):
plt.figure(figsize=(10, 5))
plt.title(title)
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.semilogy(network_history.history['loss'])
plt.semilogy(network_history.history['val_loss'])
plt.legend(['Training', 'Validation'])
plt.show()
def plot_history_with_acc(network_history, title='Loss and Accuracy'):
plt.figure(figsize=(15, 10))
plt.subplot(211)
plt.title(title)
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.semilogy(network_history.history['loss'])
plt.semilogy(network_history.history['val_loss'])
plt.legend(['Training', 'Validation'])
plt.subplot(212)
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(network_history.history['acc'])
plt.plot(network_history.history['val_acc'])
plt.legend(['Training', 'Validation'], loc='lower right')
plt.show()
def reduce_dimensionality(n_components, train, test, method, attack=None):
if method == 'PCA':
matrix = PCA(n_components=n_components)
elif method == 'RP':
matrix = random_projection.SparseRandomProjection(n_components=n_components, random_state=7)
else:
print('unknown projection method, choose either RP or PCA')
return None
train = matrix.fit_transform(train)
test = matrix.transform(test)
if attack is None:
return train, test
attack = matrix.transform(attack)
return train, test, attack
| [((6712, 6744), 'keras.layers.Input', 'Input', ([], {'shape': '(x_train.shape[1],)'}), '(shape=(x_train.shape[1],))\n', (6717, 6744), False, 'from keras.layers import Dense, Input, Dropout\n'), ((6920, 6948), 'keras.models.Model', 'Model', (['input_vector', 'decoded'], {}), '(input_vector, decoded)\n', (6925, 6948), False, 'from keras.models import Model\n'), ((6963, 6991), 'keras.models.Model', 'Model', (['input_vector', 'encoded'], {}), '(input_vector, encoded)\n', (6968, 6991), False, 'from keras.models import Model\n'), ((9640, 9653), 'sklearn.metrics.auc', 'auc', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (9643, 9653), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((9768, 9780), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (9778, 9780), False, 'from keras.models import Sequential\n'), ((19241, 19260), 'xgboost.XGBClassifier', 'xgb.XGBClassifier', ([], {}), '()\n', (19258, 19260), True, 'import xgboost as xgb\n'), ((20524, 20537), 'sklearn.metrics.auc', 'auc', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (20527, 20537), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((20542, 20554), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (20552, 20554), True, 'import matplotlib.pyplot as plt\n'), ((20570, 20664), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr', 'tpr'], {'color': '"""darkorange"""', 'lw': 'lw', 'label': "('ROC curve (area = %0.2f)' % roc_auc)"}), "(fpr, tpr, color='darkorange', lw=lw, label=\n 'ROC curve (area = %0.2f)' % roc_auc)\n", (20578, 20664), True, 'import matplotlib.pyplot as plt\n'), ((20677, 20738), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 1]', '[0, 1]'], {'color': '"""navy"""', 'lw': 'lw', 'linestyle': '"""--"""'}), "([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')\n", (20685, 20738), True, 'import matplotlib.pyplot as plt\n'), ((20743, 20763), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0.0, 1.0]'], {}), '([0.0, 1.0])\n', (20751, 20763), True, 'import matplotlib.pyplot as plt\n'), ((20768, 20789), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0.0, 1.05]'], {}), '([0.0, 1.05])\n', (20776, 20789), True, 'import matplotlib.pyplot as plt\n'), ((20794, 20827), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""False Positive Rate"""'], {}), "('False Positive Rate')\n", (20804, 20827), True, 'import matplotlib.pyplot as plt\n'), ((20832, 20864), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True Positive Rate"""'], {}), "('True Positive Rate')\n", (20842, 20864), True, 'import matplotlib.pyplot as plt\n'), ((20938, 20967), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""'}), "(loc='lower right')\n", (20948, 20967), True, 'import matplotlib.pyplot as plt\n'), ((20972, 20982), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (20980, 20982), True, 'import matplotlib.pyplot as plt\n'), ((21114, 21139), 'sklearn.metrics.roc_curve', 'roc_curve', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (21123, 21139), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((21258, 21271), 'sklearn.metrics.auc', 'auc', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (21261, 21271), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((21276, 21288), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (21286, 21288), True, 'import matplotlib.pyplot as plt\n'), ((21304, 21398), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr', 'tpr'], {'color': '"""darkorange"""', 'lw': 'lw', 'label': "('ROC curve (area = %0.2f)' % roc_auc)"}), "(fpr, tpr, color='darkorange', lw=lw, label=\n 'ROC curve (area = %0.2f)' % roc_auc)\n", (21312, 21398), True, 'import matplotlib.pyplot as plt\n'), ((21411, 21472), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 1]', '[0, 1]'], {'color': '"""navy"""', 'lw': 'lw', 'linestyle': '"""--"""'}), "([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')\n", (21419, 21472), True, 'import matplotlib.pyplot as plt\n'), ((21477, 21497), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0.0, 1.0]'], {}), '([0.0, 1.0])\n', (21485, 21497), True, 'import matplotlib.pyplot as plt\n'), ((21502, 21523), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0.0, 1.05]'], {}), '([0.0, 1.05])\n', (21510, 21523), True, 'import matplotlib.pyplot as plt\n'), ((21528, 21561), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""False Positive Rate"""'], {}), "('False Positive Rate')\n", (21538, 21561), True, 'import matplotlib.pyplot as plt\n'), ((21566, 21598), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True Positive Rate"""'], {}), "('True Positive Rate')\n", (21576, 21598), True, 'import matplotlib.pyplot as plt\n'), ((21671, 21700), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""'}), "(loc='lower right')\n", (21681, 21700), True, 'import matplotlib.pyplot as plt\n'), ((21705, 21715), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (21713, 21715), True, 'import matplotlib.pyplot as plt\n'), ((21764, 21791), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 5)'}), '(figsize=(10, 5))\n', (21774, 21791), True, 'import matplotlib.pyplot as plt\n'), ((21796, 21812), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (21805, 21812), True, 'import matplotlib.pyplot as plt\n'), ((21817, 21837), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (21827, 21837), True, 'import matplotlib.pyplot as plt\n'), ((21842, 21860), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (21852, 21860), True, 'import matplotlib.pyplot as plt\n'), ((21865, 21910), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (["network_history.history['loss']"], {}), "(network_history.history['loss'])\n", (21877, 21910), True, 'import matplotlib.pyplot as plt\n'), ((21915, 21964), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (["network_history.history['val_loss']"], {}), "(network_history.history['val_loss'])\n", (21927, 21964), True, 'import matplotlib.pyplot as plt\n'), ((21969, 22007), 'matplotlib.pyplot.legend', 'plt.legend', (["['Training', 'Validation']"], {}), "(['Training', 'Validation'])\n", (21979, 22007), True, 'import matplotlib.pyplot as plt\n'), ((22012, 22022), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (22020, 22022), True, 'import matplotlib.pyplot as plt\n'), ((22100, 22128), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 10)'}), '(figsize=(15, 10))\n', (22110, 22128), True, 'import matplotlib.pyplot as plt\n'), ((22133, 22149), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (22144, 22149), True, 'import matplotlib.pyplot as plt\n'), ((22154, 22170), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (22163, 22170), True, 'import matplotlib.pyplot as plt\n'), ((22175, 22195), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (22185, 22195), True, 'import matplotlib.pyplot as plt\n'), ((22200, 22218), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (22210, 22218), True, 'import matplotlib.pyplot as plt\n'), ((22223, 22268), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (["network_history.history['loss']"], {}), "(network_history.history['loss'])\n", (22235, 22268), True, 'import matplotlib.pyplot as plt\n'), ((22273, 22322), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (["network_history.history['val_loss']"], {}), "(network_history.history['val_loss'])\n", (22285, 22322), True, 'import matplotlib.pyplot as plt\n'), ((22327, 22365), 'matplotlib.pyplot.legend', 'plt.legend', (["['Training', 'Validation']"], {}), "(['Training', 'Validation'])\n", (22337, 22365), True, 'import matplotlib.pyplot as plt\n'), ((22371, 22387), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (22382, 22387), True, 'import matplotlib.pyplot as plt\n'), ((22392, 22412), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (22402, 22412), True, 'import matplotlib.pyplot as plt\n'), ((22417, 22439), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {}), "('Accuracy')\n", (22427, 22439), True, 'import matplotlib.pyplot as plt\n'), ((22444, 22484), 'matplotlib.pyplot.plot', 'plt.plot', (["network_history.history['acc']"], {}), "(network_history.history['acc'])\n", (22452, 22484), True, 'import matplotlib.pyplot as plt\n'), ((22489, 22533), 'matplotlib.pyplot.plot', 'plt.plot', (["network_history.history['val_acc']"], {}), "(network_history.history['val_acc'])\n", (22497, 22533), True, 'import matplotlib.pyplot as plt\n'), ((22538, 22595), 'matplotlib.pyplot.legend', 'plt.legend', (["['Training', 'Validation']"], {'loc': '"""lower right"""'}), "(['Training', 'Validation'], loc='lower right')\n", (22548, 22595), True, 'import matplotlib.pyplot as plt\n'), ((22600, 22610), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (22608, 22610), True, 'import matplotlib.pyplot as plt\n'), ((6759, 6795), 'keras.layers.Dense', 'Dense', (['latent_dim'], {'activation': '"""relu"""'}), "(latent_dim, activation='relu')\n", (6764, 6795), False, 'from keras.layers import Dense, Input, Dropout\n'), ((9214, 9260), 'numpy.concatenate', 'np.concatenate', (['[y_pred_test, y_pred_outliers]'], {}), '([y_pred_test, y_pred_outliers])\n', (9228, 9260), True, 'import numpy as np\n'), ((9404, 9450), 'numpy.concatenate', 'np.concatenate', (['[y_pred_test, y_pred_outliers]'], {}), '([y_pred_test, y_pred_outliers])\n', (9418, 9450), True, 'import numpy as np\n'), ((9903, 9915), 'keras.layers.Dropout', 'Dropout', (['(0.1)'], {}), '(0.1)\n', (9910, 9915), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10005, 10017), 'keras.layers.Dropout', 'Dropout', (['(0.2)'], {}), '(0.2)\n', (10012, 10017), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10034, 10103), 'keras.layers.Dense', 'Dense', (['(128)'], {'kernel_initializer': '"""glorot_uniform"""', 'activation': '"""sigmoid"""'}), "(128, kernel_initializer='glorot_uniform', activation='sigmoid')\n", (10039, 10103), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10119, 10131), 'keras.layers.Dropout', 'Dropout', (['(0.4)'], {}), '(0.4)\n', (10126, 10131), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10148, 10213), 'keras.layers.Dense', 'Dense', (['(64)'], {'kernel_initializer': '"""glorot_uniform"""', 'activation': '"""tanh"""'}), "(64, kernel_initializer='glorot_uniform', activation='tanh')\n", (10153, 10213), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10229, 10241), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (10236, 10241), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10258, 10323), 'keras.layers.Dense', 'Dense', (['(32)'], {'kernel_initializer': '"""glorot_uniform"""', 'activation': '"""tanh"""'}), "(32, kernel_initializer='glorot_uniform', activation='tanh')\n", (10263, 10323), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10339, 10351), 'keras.layers.Dropout', 'Dropout', (['(0.4)'], {}), '(0.4)\n', (10346, 10351), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10368, 10434), 'keras.layers.Dense', 'Dense', (['(128)'], {'kernel_initializer': '"""glorot_uniform"""', 'activation': '"""tanh"""'}), "(128, kernel_initializer='glorot_uniform', activation='tanh')\n", (10373, 10434), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10450, 10462), 'keras.layers.Dropout', 'Dropout', (['(0.3)'], {}), '(0.3)\n', (10457, 10462), False, 'from keras.layers import Dense, Input, Dropout\n'), ((10479, 10538), 'keras.layers.Dense', 'Dense', (['(1)'], {'kernel_initializer': '"""normal"""', 'activation': '"""sigmoid"""'}), "(1, kernel_initializer='normal', activation='sigmoid')\n", (10484, 10538), False, 'from keras.layers import Dense, Input, Dropout\n'), ((16503, 16541), 'sklearn.svm.SVC', 'svm.SVC', ([], {'gamma': '"""auto"""', 'cache_size': '(7000)'}), "(gamma='auto', cache_size=7000)\n", (16510, 16541), False, 'from sklearn import svm\n'), ((17166, 17204), 'sklearn.svm.SVC', 'svm.SVC', ([], {'gamma': '"""auto"""', 'cache_size': '(7000)'}), "(gamma='auto', cache_size=7000)\n", (17173, 17204), False, 'from sklearn import svm\n'), ((18210, 18229), 'xgboost.XGBClassifier', 'xgb.XGBClassifier', ([], {}), '()\n', (18227, 18229), True, 'import xgboost as xgb\n'), ((18729, 18748), 'xgboost.XGBClassifier', 'xgb.XGBClassifier', ([], {}), '()\n', (18746, 18748), True, 'import xgboost as xgb\n'), ((19862, 19894), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (19878, 19894), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((19917, 19942), 'sklearn.metrics.roc_curve', 'roc_curve', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (19926, 19942), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((19956, 20007), 'sklearn.metrics.fbeta_score', 'fbeta_score', (['y_test', 'y_pred'], {'beta': 'beta', 'pos_label': '(1)'}), '(y_test, y_pred, beta=beta, pos_label=1)\n', (19967, 20007), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((20023, 20036), 'sklearn.metrics.auc', 'auc', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (20026, 20036), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((20449, 20495), 'numpy.concatenate', 'np.concatenate', (['[y_pred_test, y_pred_outliers]'], {}), '([y_pred_test, y_pred_outliers])\n', (20463, 20495), True, 'import numpy as np\n'), ((21209, 21241), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (21225, 21241), False, 'from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix\n'), ((22729, 22759), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'n_components'}), '(n_components=n_components)\n', (22732, 22759), False, 'from sklearn.decomposition import PCA\n'), ((7026, 7040), 'keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (7030, 7040), False, 'from keras.optimizers import Adam\n'), ((7980, 8046), 'sklearn.svm.OneClassSVM', 'svm.OneClassSVM', ([], {'kernel': '"""rbf"""', 'gamma': 'gamma', 'nu': 'nu', 'cache_size': '(7000)'}), "(kernel='rbf', gamma=gamma, nu=nu, cache_size=7000)\n", (7995, 8046), False, 'from sklearn import svm\n'), ((11317, 11373), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_estimators': 'estimator', 'n_jobs': '(7)'}), '(n_estimators=estimator, n_jobs=7)\n', (11339, 11373), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((12236, 12292), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_estimators': 'estimator', 'n_jobs': '(7)'}), '(n_estimators=estimator, n_jobs=7)\n', (12258, 12292), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((13152, 13208), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_estimators': 'estimator', 'n_jobs': '(7)'}), '(n_estimators=estimator, n_jobs=7)\n', (13174, 13208), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((14491, 14527), 'sklearn.ensemble.AdaBoostClassifier', 'AdaBoostClassifier', ([], {'learning_rate': 'lr'}), '(learning_rate=lr)\n', (14509, 14527), False, 'from sklearn.ensemble import AdaBoostClassifier\n'), ((15307, 15343), 'sklearn.ensemble.AdaBoostClassifier', 'AdaBoostClassifier', ([], {'learning_rate': 'lr'}), '(learning_rate=lr)\n', (15325, 15343), False, 'from sklearn.ensemble import AdaBoostClassifier\n'), ((22802, 22889), 'sklearn.random_projection.SparseRandomProjection', 'random_projection.SparseRandomProjection', ([], {'n_components': 'n_components', 'random_state': '(7)'}), '(n_components=n_components,\n random_state=7)\n', (22842, 22889), False, 'from sklearn import random_projection\n'), ((1260, 1326), 'sklearn.svm.OneClassSVM', 'svm.OneClassSVM', ([], {'kernel': '"""rbf"""', 'gamma': 'gamma', 'nu': 'nu', 'cache_size': '(7000)'}), "(kernel='rbf', gamma=gamma, nu=nu, cache_size=7000)\n", (1275, 1326), False, 'from sklearn import svm\n'), ((2091, 2157), 'sklearn.svm.OneClassSVM', 'svm.OneClassSVM', ([], {'kernel': '"""rbf"""', 'gamma': 'gamma', 'nu': 'nu', 'cache_size': '(7000)'}), "(kernel='rbf', gamma=gamma, nu=nu, cache_size=7000)\n", (2106, 2157), False, 'from sklearn import svm\n'), ((6869, 6892), 'keras.regularizers.l1', 'regularizers.l1', (['(0.0001)'], {}), '(0.0001)\n', (6884, 6892), False, 'from keras import regularizers\n'), ((9115, 9144), 'numpy.ones', 'np.ones', (['y_pred_test.shape[0]'], {}), '(y_pred_test.shape[0])\n', (9122, 9144), True, 'import numpy as np\n'), ((9312, 9341), 'numpy.ones', 'np.ones', (['y_pred_test.shape[0]'], {}), '(y_pred_test.shape[0])\n', (9319, 9341), True, 'import numpy as np\n'), ((9878, 9886), 'keras.regularizers.l2', 'l2', (['(0.01)'], {}), '(0.01)\n', (9880, 9886), False, 'from keras.regularizers import l2\n'), ((9980, 9988), 'keras.regularizers.l2', 'l2', (['(0.01)'], {}), '(0.01)\n', (9982, 9988), False, 'from keras.regularizers import l2\n'), ((20306, 20335), 'numpy.ones', 'np.ones', (['y_pred_test.shape[0]'], {}), '(y_pred_test.shape[0])\n', (20313, 20335), True, 'import numpy as np\n'), ((3784, 3892), 'sklearn.ensemble.IsolationForest', 'IsolationForest', ([], {'n_estimators': 'estimator', 'contamination': 'contamination', 'max_features': 'max_feature', 'n_jobs': '(7)'}), '(n_estimators=estimator, contamination=contamination,\n max_features=max_feature, n_jobs=7)\n', (3799, 3892), False, 'from sklearn.ensemble import IsolationForest\n'), ((5203, 5311), 'sklearn.ensemble.IsolationForest', 'IsolationForest', ([], {'n_estimators': 'estimator', 'contamination': 'contamination', 'max_features': 'max_feature', 'n_jobs': '(7)'}), '(n_estimators=estimator, contamination=contamination,\n max_features=max_feature, n_jobs=7)\n', (5218, 5311), False, 'from sklearn.ensemble import IsolationForest\n'), ((9149, 9182), 'numpy.ones', 'np.ones', (['y_pred_outliers.shape[0]'], {}), '(y_pred_outliers.shape[0])\n', (9156, 9182), True, 'import numpy as np\n'), ((9346, 9379), 'numpy.ones', 'np.ones', (['y_pred_outliers.shape[0]'], {}), '(y_pred_outliers.shape[0])\n', (9353, 9379), True, 'import numpy as np\n'), ((20384, 20417), 'numpy.ones', 'np.ones', (['y_pred_outliers.shape[0]'], {}), '(y_pred_outliers.shape[0])\n', (20391, 20417), True, 'import numpy as np\n')] |
mrtazz/notifo.py | tests/test_notifo_message.py | 26079db3b40c26661155af20a9f16a0eca06dbde | # encoding: utf-8
import unittest
import os
import sys
sys.path.append(os.getcwd())
from notifo import Notifo, send_message
class TestNotifyUser(unittest.TestCase):
def setUp(self):
self.provider = "test_provider"
self.provider_banned = "test_provider_msg_banned"
self.user = "test_user"
self.sender = "test_user2"
self.banned = "test_user_banned"
self.banned_token = "x128302fd34a60bf7e5670d003d858e6fb06ce6bf"
self.sender_token = "x633a05b18f7f65bf461ffb3900c6eb70eaafb0ed"
self.provider_token = "74515bc044df6594fbdb761b12a42f8028e14588"
self.provider_banned_token = "e34e447385fb4ff9084204cba19731d29c2afd78"
self.user_token = "xbb8b3cba22a5f3d64fd404a07e84cdbb0c3566e5"
def test_message(self):
res = send_message(self.sender, self.sender_token,
to=self.user, msg="foo test")
self.assertEqual(2201, res["response_code"])
def test_message_with_object(self):
res = Notifo(self.sender, self.sender_token).send_message(
to=self.user, msg="foo test")
self.assertEqual(2201, res["response_code"])
def test_message_banned(self):
res = send_message(self.banned, self.banned_token,
to=self.user, msg="foo test")
self.assertEqual(403, res["response_code"])
def test_message_provider(self):
res = send_message(self.provider, self.provider_token,
to=self.user, msg="foo test")
self.assertEqual(2201, res["response_code"])
def test_message_provider_banned(self):
res = send_message(self.provider_banned, self.provider_banned_token,
to=self.user, msg="foo test")
self.assertEqual(403, res["response_code"])
if __name__ == '__main__':
unittest.main()
| [((71, 82), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (80, 82), False, 'import os\n'), ((1880, 1895), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1893, 1895), False, 'import unittest\n'), ((804, 878), 'notifo.send_message', 'send_message', (['self.sender', 'self.sender_token'], {'to': 'self.user', 'msg': '"""foo test"""'}), "(self.sender, self.sender_token, to=self.user, msg='foo test')\n", (816, 878), False, 'from notifo import Notifo, send_message\n'), ((1237, 1311), 'notifo.send_message', 'send_message', (['self.banned', 'self.banned_token'], {'to': 'self.user', 'msg': '"""foo test"""'}), "(self.banned, self.banned_token, to=self.user, msg='foo test')\n", (1249, 1311), False, 'from notifo import Notifo, send_message\n'), ((1448, 1526), 'notifo.send_message', 'send_message', (['self.provider', 'self.provider_token'], {'to': 'self.user', 'msg': '"""foo test"""'}), "(self.provider, self.provider_token, to=self.user, msg='foo test')\n", (1460, 1526), False, 'from notifo import Notifo, send_message\n'), ((1671, 1767), 'notifo.send_message', 'send_message', (['self.provider_banned', 'self.provider_banned_token'], {'to': 'self.user', 'msg': '"""foo test"""'}), "(self.provider_banned, self.provider_banned_token, to=self.user,\n msg='foo test')\n", (1683, 1767), False, 'from notifo import Notifo, send_message\n'), ((1019, 1057), 'notifo.Notifo', 'Notifo', (['self.sender', 'self.sender_token'], {}), '(self.sender, self.sender_token)\n', (1025, 1057), False, 'from notifo import Notifo, send_message\n')] |
wqqpp007/geoist | geoist/cattools/Smoothing.py | 116b674eae3da4ee706902ce7f5feae1f61f43a5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import numpy as np
import .Selection as Sel
import .Exploration as Exp
import .CatUtils as CU
#-----------------------------------------------------------------------------------------
def GaussWin (Dis, Sig):
return np.exp(-(Dis**2)/(Sig**2.))
#-----------------------------------------------------------------------------------------
def SmoothMFD (Db, a, Wkt, Window=GaussWin, Par=50.,
Delta=0.1, SphereGrid=False,
Box=[], Buffer=[], Grid=[],
Threshold=-100, Unwrap=False,
ZeroRates=False):
if Par <= 0:
Par = np.inf
# Catalogue selection
DbS = Sel.AreaSelect(Db, Wkt, Owrite=0, Buffer=Buffer, Unwrap=Unwrap)
x,y,z = Exp.GetHypocenter(DbS)
# Creating the mesh grid
P = CU.Polygon()
P.Load(Wkt)
# Unwrapping coordinates
if Unwrap:
x = [i if i > 0. else i+360. for i in x]
P.Unwrap()
if Grid:
XY = [G for G in Grid if P.IsInside(G[0], G[1])]
else:
if SphereGrid:
XY = P.SphereGrid(Delta=Delta, Unwrap=Unwrap)
else:
XY = P.CartGrid(Dx=Delta, Dy=Delta, Bounds=Box)
Win = []
for xyP in XY:
Win.append(0)
for xyE in zip(x,y):
Dis = CU.WgsDistance(xyP[1], xyP[0], xyE[1], xyE[0])
Win[-1] += Window(Dis, Par)
# Scaling and normalising the rates
Norm = np.sum(Win)
A = []; X = []; Y = []
for I,W in enumerate(Win):
aT = -np.inf
if Norm > 0. and W > 0.:
aT = a + np.log10(W/Norm)
if aT < Threshold:
# Filter below threshold
aT = -np.inf
if ZeroRates:
A.append(aT)
X.append(XY[I][0])
Y.append(XY[I][1])
else:
if aT > -np.inf:
A.append(aT)
X.append(XY[I][0])
Y.append(XY[I][1])
if Unwrap:
# Wrap back longitudes
X = [x if x < 180. else x-360. for x in X]
return X, Y, A | [] |
dkarmon/HebSafeHarbor | hebsafeharbor/identifier/signals/lexicon_based_recognizer.py | fdad7481c74feb78f8c3265c327eae7712cf16ce | from typing import List
from presidio_analyzer import EntityRecognizer, RecognizerResult, AnalysisExplanation
from presidio_analyzer.nlp_engine import NlpArtifacts
from hebsafeharbor.common.terms_recognizer import TermsRecognizer
class LexiconBasedRecognizer(EntityRecognizer):
"""
A class which extends the EntityRecognizer (@Presidio) and recognize entities based on a lexicon
"""
DEFAULT_CONFIDENCE_LEVEL = 0.7 # expected confidence level for this recognizer
def __init__(self, name: str, supported_entity: str, phrase_list: List[str], supported_language: str = "he",
allowed_prepositions: List[str] = None):
"""
Initializes Hebrew LexiconBasedRecognizer
:param name: recognizer's name
:param supported_entity: entity type to be associated with the entities recognized by the lexicon based
recognizer
:param phrase_list: lexicon's phrases
:param supported_language: the language that the recognizer supports. Hebrew is the default
:param allowed_prepositions: prepositions that allowed to be recognized as part of the entity (in addition to
the lexicon phrase itself). Empty list (which means prepositions are not allowed) is the default
"""
super().__init__(name=name, supported_entities=[supported_entity], supported_language=supported_language)
self.terms_recognizer = TermsRecognizer(phrase_list)
self.allowed_prepositions = allowed_prepositions if allowed_prepositions else []
def load(self) -> None:
"""No loading is required."""
pass
def analyze(
self, text: str, entities: List[str], nlp_artifacts: NlpArtifacts
) -> List[RecognizerResult]:
"""
Recognize entities based on lexicon
:param text: text for recognition
:param entities: supported entities
:param nlp_artifacts: artifacts of the nlp engine
:return list of entities recognized based on the lexicon
"""
results = []
terms_offsets = self.terms_recognizer(text, prefixes=self.allowed_prepositions)
# Iterate over the Automaton offsets and create Recognizer result for each of them
for start_offset, length in terms_offsets:
result = RecognizerResult(
entity_type=self.supported_entities[0],
start=start_offset,
end=start_offset + length,
score=self.DEFAULT_CONFIDENCE_LEVEL,
analysis_explanation=AnalysisExplanation(self.name, self.DEFAULT_CONFIDENCE_LEVEL),
recognition_metadata={RecognizerResult.RECOGNIZER_NAME_KEY: self.name}
)
results.append(result)
return results
| [((1415, 1443), 'hebsafeharbor.common.terms_recognizer.TermsRecognizer', 'TermsRecognizer', (['phrase_list'], {}), '(phrase_list)\n', (1430, 1443), False, 'from hebsafeharbor.common.terms_recognizer import TermsRecognizer\n'), ((2536, 2597), 'presidio_analyzer.AnalysisExplanation', 'AnalysisExplanation', (['self.name', 'self.DEFAULT_CONFIDENCE_LEVEL'], {}), '(self.name, self.DEFAULT_CONFIDENCE_LEVEL)\n', (2555, 2597), False, 'from presidio_analyzer import EntityRecognizer, RecognizerResult, AnalysisExplanation\n')] |
cyx233/vim_config | my_plugins/YouCompleteMe/third_party/ycmd/ycmd/tests/clangd/subcommands_test.py | f09c9206344c17df20a05dd2c08a02f098a7e873 | # encoding: utf-8
#
# Copyright (C) 2018 ycmd contributors
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest import ( assert_that,
contains,
contains_string,
equal_to,
has_entries,
has_entry,
matches_regexp )
from pprint import pprint
import requests
import os.path
from ycmd.tests.clangd import ( IsolatedYcmd,
SharedYcmd,
PathToTestFile,
RunAfterInitialized )
from ycmd.tests.test_utils import ( BuildRequest,
ChunkMatcher,
CombineRequest,
LineColMatcher,
LocationMatcher,
ErrorMatcher,
WithRetry,
WaitUntilCompleterServerReady )
from ycmd.utils import ReadFile
# This test is isolated to trigger objcpp hooks, rather than fetching completer
# from cache.
@IsolatedYcmd()
def Subcommands_DefinedSubcommands_test( app ):
file_path = PathToTestFile( 'GoTo_Clang_ZeroBasedLineAndColumn_test.cc' )
RunAfterInitialized( app, {
'request': {
'completer_target': 'filetype_default',
'line_num': 10,
'column_num': 3,
'filetype': 'objcpp',
'filepath': file_path
},
'expect': {
'response': requests.codes.ok,
'data': contains( *sorted( [ 'ExecuteCommand',
'FixIt',
'Format',
'GetDoc',
'GetDocImprecise',
'GetType',
'GetTypeImprecise',
'GoTo',
'GoToDeclaration',
'GoToDefinition',
'GoToImprecise',
'GoToInclude',
'GoToReferences',
'RefactorRename',
'RestartServer' ] ) )
},
'route': '/defined_subcommands',
} )
@SharedYcmd
def Subcommands_GoTo_ZeroBasedLineAndColumn_test( app ):
file_path = PathToTestFile( 'GoTo_Clang_ZeroBasedLineAndColumn_test.cc' )
RunAfterInitialized( app, {
'request': {
'contents': ReadFile( file_path ),
'completer_target': 'filetype_default',
'command_arguments': [ 'GoToDefinition' ],
'line_num': 10,
'column_num': 3,
'filetype': 'cpp',
'filepath': file_path
},
'expect': {
'response': requests.codes.ok,
'data': {
'filepath': os.path.abspath( file_path ),
'line_num': 2,
'column_num': 8
}
},
'route': '/run_completer_command',
} )
@SharedYcmd
def RunGoToTest_all( app, folder, command, test ):
filepath = PathToTestFile( folder, test[ 'req' ][ 0 ] )
common_request = {
'completer_target' : 'filetype_default',
'filepath' : filepath,
'command_arguments': [ command ],
'contents' : ReadFile( filepath ),
'filetype' : 'cpp'
}
request = common_request
request.update( {
'line_num' : test[ 'req' ][ 1 ],
'column_num': test[ 'req' ][ 2 ],
} )
response = test[ 'res' ]
if isinstance( response, list ):
expect = {
'response': requests.codes.ok,
'data': contains( *[
LocationMatcher(
PathToTestFile( folder, os.path.normpath( location[ 0 ] ) ),
location[ 1 ],
location[ 2 ]
) for location in response
] )
}
elif isinstance( response, tuple ):
expect = {
'response': requests.codes.ok,
'data': LocationMatcher(
PathToTestFile( folder, os.path.normpath( response[ 0 ] ) ),
response[ 1 ],
response[ 2 ]
)
}
else:
expect = {
'response': requests.codes.internal_server_error,
'data': ErrorMatcher( RuntimeError, test[ 'res' ] )
}
RunAfterInitialized( app, {
'request': request,
'route' : '/run_completer_command',
'expect' : expect
} )
def Subcommands_GoTo_all_test():
tests = [
# Local::x -> definition/declaration of x
{ 'req': ( 'goto.cc', 23, 21 ), 'res': ( 'goto.cc', 4, 9 ) },
# Local::in_line -> definition/declaration of Local::in_line
{ 'req': ( 'goto.cc', 24, 26 ), 'res': ( 'goto.cc', 6, 10 ) },
# Local -> definition/declaration of Local
{ 'req': ( 'goto.cc', 24, 16 ), 'res': ( 'goto.cc', 2, 11 ) },
# Local::out_of_line -> definition of Local::out_of_line
{ 'req': ( 'goto.cc', 25, 27 ), 'res': ( 'goto.cc', 14, 13 ) },
# GoToDeclaration alternates between definition and declaration
{ 'req': ( 'goto.cc', 14, 13 ), 'res': ( 'goto.cc', 11, 10 ) },
{ 'req': ( 'goto.cc', 11, 10 ), 'res': ( 'goto.cc', 14, 13 ) },
# test -> definition and declaration of test
{ 'req': ( 'goto.cc', 21, 5 ), 'res': ( 'goto.cc', 19, 5 ) },
{ 'req': ( 'goto.cc', 19, 5 ), 'res': ( 'goto.cc', 21, 5 ) },
# Unicøde
{ 'req': ( 'goto.cc', 34, 9 ), 'res': ( 'goto.cc', 32, 26 ) },
# Another_Unicøde
{ 'req': ( 'goto.cc', 36, 17 ), 'res': ( 'goto.cc', 32, 54 ) },
{ 'req': ( 'goto.cc', 36, 25 ), 'res': ( 'goto.cc', 32, 54 ) },
{ 'req': ( 'goto.cc', 38, 3 ), 'res': ( 'goto.cc', 36, 28 ) },
# Expected failures
{ 'req': ( 'goto.cc', 13, 1 ), 'res': 'Cannot jump to location' },
{ 'req': ( 'goto.cc', 16, 6 ), 'res': 'Cannot jump to location' },
]
for test in tests:
for cmd in [ 'GoToDefinition', 'GoTo', 'GoToImprecise' ]:
yield RunGoToTest_all, '', cmd, test
def Subcommands_GoToDeclaration_all_test():
tests = [
# Local::x -> definition/declaration of x
{ 'req': ( 'goto.cc', 23, 21 ), 'res': ( 'goto.cc', 4, 9 ) },
# Local::in_line -> definition/declaration of Local::in_line
{ 'req': ( 'goto.cc', 24, 26 ), 'res': ( 'goto.cc', 6, 10 ) },
# Local -> definition/declaration of Local
{ 'req': ( 'goto.cc', 24, 16 ), 'res': ( 'goto.cc', 2, 11 ) },
# Local::out_of_line -> declaration of Local::out_of_line
{ 'req': ( 'goto.cc', 25, 27 ), 'res': ( 'goto.cc', 11, 10 ) },
# GoToDeclaration alternates between definition and declaration
{ 'req': ( 'goto.cc', 14, 13 ), 'res': ( 'goto.cc', 11, 10 ) },
{ 'req': ( 'goto.cc', 11, 10 ), 'res': ( 'goto.cc', 14, 13 ) },
# test -> definition and declaration of test
{ 'req': ( 'goto.cc', 21, 5 ), 'res': ( 'goto.cc', 19, 5 ) },
{ 'req': ( 'goto.cc', 19, 5 ), 'res': ( 'goto.cc', 21, 5 ) },
# Unicøde
{ 'req': ( 'goto.cc', 34, 9 ), 'res': ( 'goto.cc', 32, 26 ) },
# Another_Unicøde
{ 'req': ( 'goto.cc', 36, 17 ), 'res': ( 'goto.cc', 32, 54 ) },
{ 'req': ( 'goto.cc', 36, 25 ), 'res': ( 'goto.cc', 32, 54 ) },
{ 'req': ( 'goto.cc', 38, 3 ), 'res': ( 'goto.cc', 36, 28 ) },
# Expected failures
{ 'req': ( 'goto.cc', 13, 1 ), 'res': 'Cannot jump to location' },
{ 'req': ( 'goto.cc', 16, 6 ), 'res': 'Cannot jump to location' },
]
for test in tests:
yield RunGoToTest_all, '', 'GoToDeclaration', test
def Subcommands_GoToInclude_test():
tests = [
{ 'req': ( 'main.cpp', 1, 6 ), 'res': ( 'a.hpp', 1, 1 ) },
{ 'req': ( 'main.cpp', 2, 14 ), 'res': ( 'system/a.hpp', 1, 1 ) },
{ 'req': ( 'main.cpp', 3, 1 ), 'res': ( 'quote/b.hpp', 1, 1 ) },
# FIXME: should fail since b.hpp is included with angled brackets but its
# folder is added with -iquote.
{ 'req': ( 'main.cpp', 4, 10 ), 'res': ( 'quote/b.hpp', 1, 1 ) },
{ 'req': ( 'main.cpp', 5, 11 ), 'res': ( 'system/c.hpp', 1, 1 ) },
{ 'req': ( 'main.cpp', 6, 11 ), 'res': ( 'system/c.hpp', 1, 1 ) },
# Expected failures
{ 'req': ( 'main.cpp', 7, 1 ), 'res': 'Cannot jump to location' },
{ 'req': ( 'main.cpp', 10, 13 ), 'res': 'Cannot jump to location' },
]
for test in tests:
for cmd in [ 'GoToInclude', 'GoTo', 'GoToImprecise' ]:
yield RunGoToTest_all, 'test-include', cmd, test
def Subcommands_GoToReferences_test():
tests = [
# Function
{ 'req': ( 'goto.cc', 14, 21 ), 'res': [ ( 'goto.cc', 11, 10 ),
( 'goto.cc', 14, 13 ),
( 'goto.cc', 25, 22 ) ] },
# Namespace
{ 'req': ( 'goto.cc', 24, 17 ), 'res': [ ( 'goto.cc', 2, 11 ),
( 'goto.cc', 14, 6 ),
( 'goto.cc', 23, 14 ),
( 'goto.cc', 24, 15 ),
( 'goto.cc', 25, 15 ) ] },
# Expected failure
{ 'req': ( 'goto.cc', 27, 8 ), 'res': 'Cannot jump to location' },
]
for test in tests:
yield RunGoToTest_all, '', 'GoToReferences', test
@SharedYcmd
def RunGetSemanticTest( app,
filepath,
filetype,
test,
command,
response = requests.codes.ok ):
contents = ReadFile( filepath )
common_args = {
'completer_target' : 'filetype_default',
'command_arguments': command,
'line_num' : 10,
'column_num' : 3,
'filepath' : filepath,
'contents' : contents,
'filetype' : filetype
}
args = test[ 0 ]
if response == requests.codes.ok:
if not isinstance( test[ 1 ], BaseMatcher ):
expected = has_entry( 'message', contains_string( test[ 1 ] ) )
else:
expected = has_entry( 'message', test[ 1 ] )
else:
expected = test[ 1 ]
request = common_args
request.update( args )
test = { 'request': request,
'route': '/run_completer_command',
'expect': { 'response': response,
'data': expected } }
RunAfterInitialized( app, test )
def Subcommands_GetType_test():
tests = [
# Basic pod types
[ { 'line_num': 24, 'column_num': 3 }, 'Foo' ],
# [ { 'line_num': 12, 'column_num': 2 }, 'Foo' ],
[ { 'line_num': 12, 'column_num': 8 }, 'Foo' ],
[ { 'line_num': 12, 'column_num': 9 }, 'Foo' ],
[ { 'line_num': 12, 'column_num': 10 }, 'Foo' ],
# [ { 'line_num': 13, 'column_num': 3 }, 'int' ],
[ { 'line_num': 13, 'column_num': 7 }, 'int' ],
# [ { 'line_num': 15, 'column_num': 7 }, 'char' ],
# Function
# [ { 'line_num': 22, 'column_num': 2 }, 'int main()' ],
[ { 'line_num': 22, 'column_num': 6 }, 'int main()' ],
# Declared and canonical type
# On Ns::
[ { 'line_num': 25, 'column_num': 3 }, 'namespace Ns' ],
# On Type (Type)
# [ { 'line_num': 25, 'column_num': 8 },
# 'Ns::Type => Ns::BasicType<char>' ],
# On "a" (Ns::Type)
# [ { 'line_num': 25, 'column_num': 15 },
# 'Ns::Type => Ns::BasicType<char>' ],
# [ { 'line_num': 26, 'column_num': 13 },
# 'Ns::Type => Ns::BasicType<char>' ],
# Cursor on decl for refs & pointers
[ { 'line_num': 39, 'column_num': 3 }, 'Foo' ],
[ { 'line_num': 39, 'column_num': 11 }, 'Foo &' ],
[ { 'line_num': 39, 'column_num': 15 }, 'Foo' ],
[ { 'line_num': 40, 'column_num': 3 }, 'Foo' ],
[ { 'line_num': 40, 'column_num': 11 }, 'Foo *' ],
[ { 'line_num': 40, 'column_num': 18 }, 'Foo' ],
# [ { 'line_num': 42, 'column_num': 3 }, 'const Foo &' ],
[ { 'line_num': 42, 'column_num': 16 }, 'const struct Foo &' ],
# [ { 'line_num': 43, 'column_num': 3 }, 'const Foo *' ],
[ { 'line_num': 43, 'column_num': 16 }, 'const struct Foo *' ],
# Cursor on usage
[ { 'line_num': 45, 'column_num': 13 }, 'const struct Foo' ],
# [ { 'line_num': 45, 'column_num': 19 }, 'const int' ],
[ { 'line_num': 46, 'column_num': 13 }, 'const struct Foo *' ],
# [ { 'line_num': 46, 'column_num': 20 }, 'const int' ],
[ { 'line_num': 47, 'column_num': 12 }, 'Foo' ],
[ { 'line_num': 47, 'column_num': 17 }, 'int' ],
[ { 'line_num': 48, 'column_num': 12 }, 'Foo *' ],
[ { 'line_num': 48, 'column_num': 18 }, 'int' ],
# Auto in declaration
# [ { 'line_num': 28, 'column_num': 3 }, 'struct Foo &' ],
# [ { 'line_num': 28, 'column_num': 11 }, 'struct Foo &' ],
[ { 'line_num': 28, 'column_num': 18 }, 'struct Foo' ],
# [ { 'line_num': 29, 'column_num': 3 }, 'Foo *' ],
# [ { 'line_num': 29, 'column_num': 11 }, 'Foo *' ],
[ { 'line_num': 29, 'column_num': 18 }, 'Foo' ],
# [ { 'line_num': 31, 'column_num': 3 }, 'const Foo &' ],
# [ { 'line_num': 31, 'column_num': 16 }, 'const Foo &' ],
# [ { 'line_num': 32, 'column_num': 3 }, 'const Foo *' ],
# [ { 'line_num': 32, 'column_num': 16 }, 'const Foo *' ],
# Auto in usage
# [ { 'line_num': 34, 'column_num': 14 }, 'const Foo' ],
# [ { 'line_num': 34, 'column_num': 21 }, 'const int' ],
# [ { 'line_num': 35, 'column_num': 14 }, 'const Foo *' ],
# [ { 'line_num': 35, 'column_num': 22 }, 'const int' ],
[ { 'line_num': 36, 'column_num': 13 }, 'Foo' ],
[ { 'line_num': 36, 'column_num': 19 }, 'int' ],
# [ { 'line_num': 37, 'column_num': 13 }, 'Foo *' ],
[ { 'line_num': 37, 'column_num': 20 }, 'int' ],
# Unicode
[ { 'line_num': 51, 'column_num': 13 }, 'Unicøde *' ],
# Bound methods
# On Win32, methods pick up an __attribute__((thiscall)) to annotate their
# calling convention. This shows up in the type, which isn't ideal, but
# also prohibitively complex to try and strip out.
[ { 'line_num': 53, 'column_num': 15 },
matches_regexp(
r'int bar\(int i\)(?: __attribute__\(\(thiscall\)\))?' ) ],
[ { 'line_num': 54, 'column_num': 18 },
matches_regexp(
r'int bar\(int i\)(?: __attribute__\(\(thiscall\)\))?' ) ],
]
for subcommand in [ 'GetType', 'GetTypeImprecise' ]:
for test in tests:
yield ( RunGetSemanticTest,
PathToTestFile( 'GetType_Clang_test.cc' ),
'cpp',
test,
[ subcommand ] )
def Subcommands_GetDoc_test():
tests = [
# from local file
[ { 'line_num': 5, 'column_num': 10 }, 'docstring', requests.codes.ok ],
# from header
[ { 'line_num': 6, 'column_num': 10 }, 'docstring', requests.codes.ok ],
# no docstring
[ { 'line_num': 7, 'column_num': 7 }, 'int x = 3', requests.codes.ok ],
# no hover
[ { 'line_num': 8, 'column_num': 1 },
ErrorMatcher( RuntimeError, 'No hover information.' ),
requests.codes.server_error ]
]
for subcommand in [ 'GetDoc', 'GetDocImprecise' ]:
for test in tests:
yield ( RunGetSemanticTest,
PathToTestFile( 'GetDoc_Clang_test.cc' ),
'cpp',
test,
[ subcommand ],
test[ 2 ] )
@SharedYcmd
def RunFixItTest( app, line, column, lang, file_path, check ):
contents = ReadFile( file_path )
language_options = {
'cpp11': {
'filetype' : 'cpp',
},
'cuda': {
'filetype' : 'cuda',
},
'objective-c': {
'filetype' : 'objc',
},
}
args = {
'completer_target' : 'filetype_default',
'contents' : contents,
'filepath' : file_path,
'command_arguments': [ 'FixIt' ],
'line_num' : line,
'column_num' : column,
}
args.update( language_options[ lang ] )
test = { 'request': args, 'route': '/detailed_diagnostic' }
# First get diags.
diags = RunAfterInitialized( app, test )
while 'message' in diags and 'diagnostics' in diags[ 'message' ].lower():
receive_diags = { 'request': args, 'route': '/receive_messages' }
RunAfterInitialized( app, receive_diags )
diags = RunAfterInitialized( app, test )
results = app.post_json( '/run_completer_command',
BuildRequest( **args ) ).json
pprint( results )
check( results )
def FixIt_Check_cpp11_Ins( results ):
# First fixit
# switch(A()) { // expected-error{{explicit conversion to}}
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( 'static_cast<int>(' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 16, 'column_num': 10 } ),
'end' : has_entries( { 'line_num': 16, 'column_num': 10 } ),
} ),
} ),
has_entries( {
'replacement_text': equal_to( ')' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 16, 'column_num': 13 } ),
'end' : has_entries( { 'line_num': 16, 'column_num': 13 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 16, 'column_num': 0 } )
} ) )
} ) )
def FixIt_Check_cpp11_InsMultiLine( results ):
# Similar to FixIt_Check_cpp11_1 but inserts split across lines
#
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( 'static_cast<int>(' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 26, 'column_num': 7 } ),
'end' : has_entries( { 'line_num': 26, 'column_num': 7 } ),
} ),
} ),
has_entries( {
'replacement_text': equal_to( ')' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 28, 'column_num': 2 } ),
'end' : has_entries( { 'line_num': 28, 'column_num': 2 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 25, 'column_num': 14 } )
} ) )
} ) )
def FixIt_Check_cpp11_Del( results ):
# Removal of ::
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 35, 'column_num': 7 } ),
'end' : has_entries( { 'line_num': 35, 'column_num': 9 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 35, 'column_num': 7 } )
} ) )
} ) )
def FixIt_Check_cpp11_Repl( results ):
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( 'foo' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 40, 'column_num': 6 } ),
'end' : has_entries( { 'line_num': 40, 'column_num': 9 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 40, 'column_num': 6 } )
} ) )
} ) )
def FixIt_Check_cpp11_DelAdd( results ):
assert_that( results, has_entries( {
'fixits': contains(
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 48, 'column_num': 3 } ),
'end' : has_entries( { 'line_num': 48, 'column_num': 4 } ),
} ),
} ),
has_entries( {
'replacement_text': equal_to( '~' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 48, 'column_num': 9 } ),
'end' : has_entries( { 'line_num': 48, 'column_num': 9 } ),
} ),
} ),
),
'location': has_entries( { 'line_num': 48, 'column_num': 3 } )
} ),
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '= default;' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 48, 'column_num': 15 } ),
'end' : has_entries( { 'line_num': 48, 'column_num': 17 } ),
} ),
} ),
),
'location': has_entries( { 'line_num': 48, 'column_num': 3 } )
} ),
)
} ) )
def FixIt_Check_objc( results ):
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( 'id' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 5, 'column_num': 3 } ),
'end' : has_entries( { 'line_num': 5, 'column_num': 3 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 5, 'column_num': 3 } )
} ) )
} ) )
def FixIt_Check_objc_NoFixIt( results ):
# and finally, a warning with no fixits
assert_that( results, equal_to( { 'fixits': [] } ) )
def FixIt_Check_cpp11_MultiFirst( results ):
assert_that( results, has_entries( {
'fixits': contains(
# first fix-it at 54,16
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( 'foo' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 16 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 19 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 54, 'column_num': 15 } )
} ),
# second fix-it at 54,52
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 52 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 53 } ),
} ),
} ),
has_entries( {
'replacement_text': equal_to( '~' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 58 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 58 } ),
} ),
} ),
),
'location': has_entries( { 'line_num': 54, 'column_num': 15 } )
} ),
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '= default;' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 64 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 67 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 54, 'column_num': 15 } )
} ),
)
} ) )
def FixIt_Check_cpp11_MultiSecond( results ):
assert_that( results, has_entries( {
'fixits': contains(
# first fix-it at 54,16
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( 'foo' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 16 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 19 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 54, 'column_num': 51 } )
} ),
# second fix-it at 54,52
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 52 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 53 } ),
} ),
} ),
has_entries( {
'replacement_text': equal_to( '~' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 58 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 58 } ),
} ),
} ),
),
'location': has_entries( { 'line_num': 54, 'column_num': 51 } )
} ),
has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '= default;' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 54, 'column_num': 64 } ),
'end' : has_entries( { 'line_num': 54, 'column_num': 67 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 54, 'column_num': 51 } )
} ),
)
} ) )
def FixIt_Check_unicode_Ins( results ):
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'chunks': contains(
has_entries( {
'replacement_text': equal_to( '=' ),
'range': has_entries( {
'start': has_entries( { 'line_num': 21, 'column_num': 9 } ),
'end' : has_entries( { 'line_num': 21, 'column_num': 11 } ),
} ),
} )
),
'location': has_entries( { 'line_num': 21, 'column_num': 16 } )
} ) )
} ) )
def FixIt_Check_cpp11_Note( results ):
assert_that( results, has_entries( {
'fixits': contains(
# First note: put parens around it
has_entries( {
'text': contains_string( 'parentheses around the assignment' ),
'chunks': contains(
ChunkMatcher( '(',
LineColMatcher( 59, 8 ),
LineColMatcher( 59, 8 ) ),
ChunkMatcher( ')',
LineColMatcher( 61, 12 ),
LineColMatcher( 61, 12 ) )
),
'location': LineColMatcher( 60, 1 ),
} ),
# Second note: change to ==
has_entries( {
'text': contains_string( '==' ),
'chunks': contains(
ChunkMatcher( '==',
LineColMatcher( 60, 8 ),
LineColMatcher( 60, 9 ) )
),
'location': LineColMatcher( 60, 1 ),
} ),
# Unresolved, requires /resolve_fixit request
has_entries( {
'text': 'Extract subexpression to variable',
'resolve': True,
'command': has_entries( { 'command': 'clangd.applyTweak' } )
} )
)
} ) )
def FixIt_Check_cpp11_SpellCheck( results ):
assert_that( results, has_entries( {
'fixits': contains(
# Change to SpellingIsNotMyStrongPoint
has_entries( {
'text': contains_string( "change 'SpellingIsNotMyStringPiont' to "
"'SpellingIsNotMyStrongPoint'" ),
'chunks': contains(
ChunkMatcher( 'SpellingIsNotMyStrongPoint',
LineColMatcher( 72, 9 ),
LineColMatcher( 72, 35 ) )
),
'location': LineColMatcher( 72, 9 ),
} ) )
} ) )
def FixIt_Check_cuda( results ):
assert_that( results, has_entries( {
'fixits': contains(
has_entries( {
'text': contains_string(
"change 'int' to 'void'" ),
'chunks': contains(
ChunkMatcher( 'void',
LineColMatcher( 3, 12 ),
LineColMatcher( 3, 15 ) )
),
'location': LineColMatcher( 3, 12 ),
} ) )
} ) )
def FixIt_Check_SubexprExtract_Resolved( results ):
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'text': 'Extract subexpression to variable',
'chunks': contains(
ChunkMatcher( 'auto dummy = foo(i + 3);\n ',
LineColMatcher( 84, 3 ),
LineColMatcher( 84, 3 ) ),
ChunkMatcher( 'dummy',
LineColMatcher( 84, 10 ),
LineColMatcher( 84, 22 ) ),
)
} ) )
} ) )
def FixIt_Check_RawStringReplace_Resolved( results ):
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'text': 'Convert to raw string',
'chunks': contains(
ChunkMatcher( 'R"(\\\\r\\asd\n\\v)"',
LineColMatcher( 80, 19 ),
LineColMatcher( 80, 36 ) ),
)
} ) )
} ) )
def FixIt_Check_MacroExpand_Resolved( results ):
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'text': "Expand macro 'DECLARE_INT'",
'chunks': contains(
ChunkMatcher( 'int i',
LineColMatcher( 83, 3 ),
LineColMatcher( 83, 17 ) ),
)
} ) )
} ) )
def FixIt_Check_AutoExpand_Resolved( results ):
assert_that( results, has_entries( {
'fixits': contains( has_entries( {
'text': "Expand auto type",
'chunks': contains(
ChunkMatcher( 'const char *',
LineColMatcher( 80, 1 ),
LineColMatcher( 80, 6 ) ),
)
} ) )
} ) )
def Subcommands_FixIt_all_test():
cfile = PathToTestFile( 'FixIt_Clang_cpp11.cpp' )
mfile = PathToTestFile( 'objc', 'FixIt_Clang_objc.m' )
cufile = PathToTestFile( 'cuda', 'fixit_test.cu' )
ufile = PathToTestFile( 'unicode.cc' )
tests = [
# L
# i C
# n o
# e l Lang File, Checker
[ 16, 0, 'cpp11', cfile, FixIt_Check_cpp11_Ins ],
[ 25, 14, 'cpp11', cfile, FixIt_Check_cpp11_InsMultiLine ],
[ 35, 7, 'cpp11', cfile, FixIt_Check_cpp11_Del ],
[ 40, 6, 'cpp11', cfile, FixIt_Check_cpp11_Repl ],
[ 48, 3, 'cpp11', cfile, FixIt_Check_cpp11_DelAdd ],
[ 5, 3, 'objective-c', mfile, FixIt_Check_objc ],
[ 7, 1, 'objective-c', mfile, FixIt_Check_objc_NoFixIt ],
[ 3, 12, 'cuda', cufile, FixIt_Check_cuda ],
# multiple errors on a single line; both with fixits
[ 54, 15, 'cpp11', cfile, FixIt_Check_cpp11_MultiFirst ],
# should put closest fix-it first?
[ 54, 51, 'cpp11', cfile, FixIt_Check_cpp11_MultiSecond ],
# unicode in line for fixit
[ 21, 16, 'cpp11', ufile, FixIt_Check_unicode_Ins ],
# FixIt attached to a "child" diagnostic (i.e. a Note)
[ 60, 1, 'cpp11', cfile, FixIt_Check_cpp11_Note ],
# FixIt due to forced spell checking
[ 72, 9, 'cpp11', cfile, FixIt_Check_cpp11_SpellCheck ],
]
for test in tests:
yield RunFixItTest, test[ 0 ], test[ 1 ], test[ 2 ], test[ 3 ], test[ 4 ]
@WithRetry
@SharedYcmd
def RunRangedFixItTest( app, rng, expected ):
contents = ReadFile( PathToTestFile( 'FixIt_Clang_cpp11.cpp' ) )
args = {
'completer_target' : 'filetype_default',
'contents' : contents,
'filepath' : PathToTestFile( 'FixIt_Clang_cpp11.cpp' ),
'command_arguments': [ 'FixIt' ],
'range' : rng,
'filetype' : 'cpp'
}
app.post_json( '/event_notification',
CombineRequest( args, {
'event_name': 'FileReadyToParse',
} ),
expect_errors = True )
WaitUntilCompleterServerReady( app, 'cpp' )
response = app.post_json( '/run_completer_command',
BuildRequest( **args ) ).json
args[ 'fixit' ] = response[ 'fixits' ][ 0 ]
response = app.post_json( '/resolve_fixit',
BuildRequest( **args ) ).json
print( 'Resolved fixit response = ' )
print( response )
expected( response )
def Subcommands_FixIt_Ranged_test():
expand_auto_range = {
'start': { 'line_num': 80, 'column_num': 1 },
'end': { 'line_num': 80, 'column_num': 4 },
}
subexpression_extract_range = {
'start': { 'line_num': 84, 'column_num': 14 },
'end': { 'line_num': 84, 'column_num': 20 },
}
macro_expand_range = {
'start': { 'line_num': 83, 'column_num': 3 },
'end': { 'line_num': 83, 'column_num': 13 },
}
raw_string_range = {
'start': { 'line_num': 80, 'column_num': 19 },
'end': { 'line_num': 80, 'column_num': 35 },
}
tests = [
[ expand_auto_range, FixIt_Check_AutoExpand_Resolved ],
[ macro_expand_range, FixIt_Check_MacroExpand_Resolved ],
[ subexpression_extract_range, FixIt_Check_SubexprExtract_Resolved ],
[ raw_string_range, FixIt_Check_RawStringReplace_Resolved ],
]
for test in tests:
yield RunRangedFixItTest, test[ 0 ], test[ 1 ]
@WithRetry
@SharedYcmd
def Subcommands_FixIt_AlreadyResolved_test( app ):
filename = PathToTestFile( 'FixIt_Clang_cpp11.cpp' )
request = {
'completer_target' : 'filetype_default',
'contents' : ReadFile( filename ),
'filepath' : filename,
'command_arguments': [ 'FixIt' ],
'line_num' : 16,
'column_num' : 1,
'filetype' : 'cpp'
}
app.post_json( '/event_notification',
CombineRequest( request, {
'event_name': 'FileReadyToParse',
} ),
expect_errors = True )
WaitUntilCompleterServerReady( app, 'cpp' )
expected = app.post_json( '/run_completer_command',
BuildRequest( **request ) ).json
print( 'expected = ' )
print( expected )
request[ 'fixit' ] = expected[ 'fixits' ][ 0 ]
actual = app.post_json( '/resolve_fixit',
BuildRequest( **request ) ).json
print( 'actual = ' )
print( actual )
assert_that( actual, equal_to( expected ) )
@SharedYcmd
def Subcommands_RefactorRename_test( app ):
test = {
'request': {
'filetype': 'cpp',
'completer_target': 'filetype_default',
'contents': ReadFile( PathToTestFile( 'basic.cpp' ) ),
'filepath': PathToTestFile( 'basic.cpp' ),
'command_arguments': [ 'RefactorRename', 'Bar' ],
'line_num': 17,
'column_num': 4,
},
'expect': {
'response': requests.codes.ok,
'data': has_entries( {
'fixits': contains( has_entries( {
'chunks': contains(
ChunkMatcher( 'Bar',
LineColMatcher( 1, 8 ),
LineColMatcher( 1, 11 ) ),
ChunkMatcher( 'Bar',
LineColMatcher( 9, 3 ),
LineColMatcher( 9, 6 ) ),
ChunkMatcher( '\n\n',
LineColMatcher( 12, 2 ),
LineColMatcher( 15, 1 ) ),
ChunkMatcher( 'Bar',
LineColMatcher( 15, 8 ),
LineColMatcher( 15, 11 ) ),
ChunkMatcher( ' ',
LineColMatcher( 15, 46 ),
LineColMatcher( 16, 1 ) ),
ChunkMatcher( 'Bar',
LineColMatcher( 17, 3 ),
LineColMatcher( 17, 6 ) ),
ChunkMatcher( '',
LineColMatcher( 17, 14 ),
LineColMatcher( 17, 15 ) ),
ChunkMatcher( ' ',
LineColMatcher( 17, 17 ),
LineColMatcher( 17, 17 ) ),
ChunkMatcher( ' ',
LineColMatcher( 17, 19 ),
LineColMatcher( 17, 19 ) ),
)
} ) )
} )
},
'route': '/run_completer_command'
}
RunAfterInitialized( app, test )
| [((1957, 1971), 'ycmd.tests.clangd.IsolatedYcmd', 'IsolatedYcmd', ([], {}), '()\n', (1969, 1971), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((2034, 2093), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""GoTo_Clang_ZeroBasedLineAndColumn_test.cc"""'], {}), "('GoTo_Clang_ZeroBasedLineAndColumn_test.cc')\n", (2048, 2093), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((3294, 3353), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""GoTo_Clang_ZeroBasedLineAndColumn_test.cc"""'], {}), "('GoTo_Clang_ZeroBasedLineAndColumn_test.cc')\n", (3308, 3353), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((4016, 4054), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['folder', "test['req'][0]"], {}), "(folder, test['req'][0])\n", (4030, 4054), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((5146, 5249), 'ycmd.tests.clangd.RunAfterInitialized', 'RunAfterInitialized', (['app', "{'request': request, 'route': '/run_completer_command', 'expect': expect}"], {}), "(app, {'request': request, 'route':\n '/run_completer_command', 'expect': expect})\n", (5165, 5249), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((10245, 10263), 'ycmd.utils.ReadFile', 'ReadFile', (['filepath'], {}), '(filepath)\n', (10253, 10263), False, 'from ycmd.utils import ReadFile\n'), ((11015, 11045), 'ycmd.tests.clangd.RunAfterInitialized', 'RunAfterInitialized', (['app', 'test'], {}), '(app, test)\n', (11034, 11045), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((16013, 16032), 'ycmd.utils.ReadFile', 'ReadFile', (['file_path'], {}), '(file_path)\n', (16021, 16032), False, 'from ycmd.utils import ReadFile\n'), ((16607, 16637), 'ycmd.tests.clangd.RunAfterInitialized', 'RunAfterInitialized', (['app', 'test'], {}), '(app, test)\n', (16626, 16637), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((16991, 17006), 'pprint.pprint', 'pprint', (['results'], {}), '(results)\n', (16997, 17006), False, 'from pprint import pprint\n'), ((29770, 29809), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""FixIt_Clang_cpp11.cpp"""'], {}), "('FixIt_Clang_cpp11.cpp')\n", (29784, 29809), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((29822, 29866), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""objc"""', '"""FixIt_Clang_objc.m"""'], {}), "('objc', 'FixIt_Clang_objc.m')\n", (29836, 29866), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((29880, 29919), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""cuda"""', '"""fixit_test.cu"""'], {}), "('cuda', 'fixit_test.cu')\n", (29894, 29919), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((29932, 29960), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""unicode.cc"""'], {}), "('unicode.cc')\n", (29946, 29960), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((31741, 31782), 'ycmd.tests.test_utils.WaitUntilCompleterServerReady', 'WaitUntilCompleterServerReady', (['app', '"""cpp"""'], {}), "(app, 'cpp')\n", (31770, 31782), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((33126, 33165), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""FixIt_Clang_cpp11.cpp"""'], {}), "('FixIt_Clang_cpp11.cpp')\n", (33140, 33165), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((33640, 33681), 'ycmd.tests.test_utils.WaitUntilCompleterServerReady', 'WaitUntilCompleterServerReady', (['app', '"""cpp"""'], {}), "(app, 'cpp')\n", (33669, 33681), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35933, 35963), 'ycmd.tests.clangd.RunAfterInitialized', 'RunAfterInitialized', (['app', 'test'], {}), '(app, test)\n', (35952, 35963), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((4225, 4243), 'ycmd.utils.ReadFile', 'ReadFile', (['filepath'], {}), '(filepath)\n', (4233, 4243), False, 'from ycmd.utils import ReadFile\n'), ((16790, 16829), 'ycmd.tests.clangd.RunAfterInitialized', 'RunAfterInitialized', (['app', 'receive_diags'], {}), '(app, receive_diags)\n', (16809, 16829), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((16844, 16874), 'ycmd.tests.clangd.RunAfterInitialized', 'RunAfterInitialized', (['app', 'test'], {}), '(app, test)\n', (16863, 16874), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((21777, 21801), 'hamcrest.equal_to', 'equal_to', (["{'fixits': []}"], {}), "({'fixits': []})\n", (21785, 21801), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((31237, 31276), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""FixIt_Clang_cpp11.cpp"""'], {}), "('FixIt_Clang_cpp11.cpp')\n", (31251, 31276), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((31397, 31436), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""FixIt_Clang_cpp11.cpp"""'], {}), "('FixIt_Clang_cpp11.cpp')\n", (31411, 31436), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((31600, 31656), 'ycmd.tests.test_utils.CombineRequest', 'CombineRequest', (['args', "{'event_name': 'FileReadyToParse'}"], {}), "(args, {'event_name': 'FileReadyToParse'})\n", (31614, 31656), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((33252, 33270), 'ycmd.utils.ReadFile', 'ReadFile', (['filename'], {}), '(filename)\n', (33260, 33270), False, 'from ycmd.utils import ReadFile\n'), ((33496, 33555), 'ycmd.tests.test_utils.CombineRequest', 'CombineRequest', (['request', "{'event_name': 'FileReadyToParse'}"], {}), "(request, {'event_name': 'FileReadyToParse'})\n", (33510, 33555), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((34060, 34078), 'hamcrest.equal_to', 'equal_to', (['expected'], {}), '(expected)\n', (34068, 34078), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((10730, 10759), 'hamcrest.has_entry', 'has_entry', (['"""message"""', 'test[1]'], {}), "('message', test[1])\n", (10739, 10759), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((14700, 14775), 'hamcrest.matches_regexp', 'matches_regexp', (['"""int bar\\\\(int i\\\\)(?: __attribute__\\\\(\\\\(thiscall\\\\)\\\\))?"""'], {}), "('int bar\\\\(int i\\\\)(?: __attribute__\\\\(\\\\(thiscall\\\\)\\\\))?')\n", (14714, 14775), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((14836, 14911), 'hamcrest.matches_regexp', 'matches_regexp', (['"""int bar\\\\(int i\\\\)(?: __attribute__\\\\(\\\\(thiscall\\\\)\\\\))?"""'], {}), "('int bar\\\\(int i\\\\)(?: __attribute__\\\\(\\\\(thiscall\\\\)\\\\))?')\n", (14850, 14911), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((15565, 15616), 'ycmd.tests.test_utils.ErrorMatcher', 'ErrorMatcher', (['RuntimeError', '"""No hover information."""'], {}), "(RuntimeError, 'No hover information.')\n", (15577, 15616), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((16958, 16978), 'ycmd.tests.test_utils.BuildRequest', 'BuildRequest', ([], {}), '(**args)\n', (16970, 16978), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((31867, 31887), 'ycmd.tests.test_utils.BuildRequest', 'BuildRequest', ([], {}), '(**args)\n', (31879, 31887), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((32017, 32037), 'ycmd.tests.test_utils.BuildRequest', 'BuildRequest', ([], {}), '(**args)\n', (32029, 32037), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((33766, 33789), 'ycmd.tests.test_utils.BuildRequest', 'BuildRequest', ([], {}), '(**request)\n', (33778, 33789), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((33963, 33986), 'ycmd.tests.test_utils.BuildRequest', 'BuildRequest', ([], {}), '(**request)\n', (33975, 33986), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((34319, 34346), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""basic.cpp"""'], {}), "('basic.cpp')\n", (34333, 34346), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((3427, 3446), 'ycmd.utils.ReadFile', 'ReadFile', (['file_path'], {}), '(file_path)\n', (3435, 3446), False, 'from ycmd.utils import ReadFile\n'), ((5093, 5132), 'ycmd.tests.test_utils.ErrorMatcher', 'ErrorMatcher', (['RuntimeError', "test['res']"], {}), "(RuntimeError, test['res'])\n", (5105, 5132), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((10672, 10696), 'hamcrest.contains_string', 'contains_string', (['test[1]'], {}), '(test[1])\n', (10687, 10696), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((34268, 34295), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""basic.cpp"""'], {}), "('basic.cpp')\n", (34282, 34295), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((15053, 15092), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""GetType_Clang_test.cc"""'], {}), "('GetType_Clang_test.cc')\n", (15067, 15092), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((15784, 15822), 'ycmd.tests.clangd.PathToTestFile', 'PathToTestFile', (['"""GetDoc_Clang_test.cc"""'], {}), "('GetDoc_Clang_test.cc')\n", (15798, 15822), False, 'from ycmd.tests.clangd import IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized\n'), ((17854, 17900), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 16, 'column_num': 0}"], {}), "({'line_num': 16, 'column_num': 0})\n", (17865, 17900), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((18744, 18791), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 25, 'column_num': 14}"], {}), "({'line_num': 25, 'column_num': 14})\n", (18755, 18791), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19279, 19325), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 35, 'column_num': 7}"], {}), "({'line_num': 35, 'column_num': 7})\n", (19290, 19325), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19799, 19845), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 40, 'column_num': 6}"], {}), "({'line_num': 40, 'column_num': 6})\n", (19810, 19845), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20637, 20683), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 3}"], {}), "({'line_num': 48, 'column_num': 3})\n", (20648, 20683), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((21082, 21128), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 3}"], {}), "({'line_num': 48, 'column_num': 3})\n", (21093, 21128), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((21600, 21645), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 5, 'column_num': 3}"], {}), "({'line_num': 5, 'column_num': 3})\n", (21611, 21645), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22323, 22370), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 15}"], {}), "({'line_num': 54, 'column_num': 15})\n", (22334, 22370), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((23084, 23131), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 15}"], {}), "({'line_num': 54, 'column_num': 15})\n", (23095, 23131), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((23529, 23576), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 15}"], {}), "({'line_num': 54, 'column_num': 15})\n", (23540, 23576), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24122, 24169), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 51}"], {}), "({'line_num': 54, 'column_num': 51})\n", (24133, 24169), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24883, 24930), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 51}"], {}), "({'line_num': 54, 'column_num': 51})\n", (24894, 24930), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25328, 25375), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 51}"], {}), "({'line_num': 54, 'column_num': 51})\n", (25339, 25375), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25856, 25903), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 21, 'column_num': 16}"], {}), "({'line_num': 21, 'column_num': 16})\n", (25867, 25903), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((26108, 26160), 'hamcrest.contains_string', 'contains_string', (['"""parentheses around the assignment"""'], {}), "('parentheses around the assignment')\n", (26123, 26160), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((26482, 26503), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(60)', '(1)'], {}), '(60, 1)\n', (26496, 26503), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((26590, 26611), 'hamcrest.contains_string', 'contains_string', (['"""=="""'], {}), "('==')\n", (26605, 26611), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((26803, 26824), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(60)', '(1)'], {}), '(60, 1)\n', (26817, 26824), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((27009, 27054), 'hamcrest.has_entries', 'has_entries', (["{'command': 'clangd.applyTweak'}"], {}), "({'command': 'clangd.applyTweak'})\n", (27020, 27054), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((27275, 27366), 'hamcrest.contains_string', 'contains_string', (['"""change \'SpellingIsNotMyStringPiont\' to \'SpellingIsNotMyStrongPoint\'"""'], {}), '(\n "change \'SpellingIsNotMyStringPiont\' to \'SpellingIsNotMyStrongPoint\'")\n', (27290, 27366), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((27614, 27635), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(72)', '(9)'], {}), '(72, 9)\n', (27628, 27635), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((27794, 27835), 'hamcrest.contains_string', 'contains_string', (['"""change \'int\' to \'void\'"""'], {}), '("change \'int\' to \'void\'")\n', (27809, 27835), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((28040, 28061), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(3)', '(12)'], {}), '(3, 12)\n', (28054, 28061), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((26245, 26266), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(59)', '(8)'], {}), '(59, 8)\n', (26259, 26266), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((26294, 26315), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(59)', '(8)'], {}), '(59, 8)\n', (26308, 26315), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((26374, 26396), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(61)', '(12)'], {}), '(61, 12)\n', (26388, 26396), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((26424, 26446), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(61)', '(12)'], {}), '(61, 12)\n', (26438, 26446), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((26697, 26718), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(60)', '(8)'], {}), '(60, 8)\n', (26711, 26718), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((26746, 26767), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(60)', '(9)'], {}), '(60, 9)\n', (26760, 26767), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((27507, 27528), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(72)', '(9)'], {}), '(72, 9)\n', (27521, 27528), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((27556, 27578), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(72)', '(35)'], {}), '(72, 35)\n', (27570, 27578), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((27934, 27955), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(3)', '(12)'], {}), '(3, 12)\n', (27948, 27955), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((27983, 28004), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(3)', '(15)'], {}), '(3, 15)\n', (27997, 28004), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((28378, 28399), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(84)', '(3)'], {}), '(84, 3)\n', (28392, 28399), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((28427, 28448), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(84)', '(3)'], {}), '(84, 3)\n', (28441, 28448), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((28511, 28533), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(84)', '(10)'], {}), '(84, 10)\n', (28525, 28533), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((28561, 28583), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(84)', '(22)'], {}), '(84, 22)\n', (28575, 28583), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((28892, 28914), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(80)', '(19)'], {}), '(80, 19)\n', (28906, 28914), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((28942, 28964), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(80)', '(36)'], {}), '(80, 36)\n', (28956, 28964), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((29258, 29279), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(83)', '(3)'], {}), '(83, 3)\n', (29272, 29279), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((29308, 29330), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(83)', '(17)'], {}), '(83, 17)\n', (29322, 29330), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((29620, 29641), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(80)', '(1)'], {}), '(80, 1)\n', (29634, 29641), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((29669, 29690), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(80)', '(6)'], {}), '(80, 6)\n', (29683, 29690), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((17305, 17334), 'hamcrest.equal_to', 'equal_to', (['"""static_cast<int>("""'], {}), "('static_cast<int>(')\n", (17313, 17334), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((17601, 17614), 'hamcrest.equal_to', 'equal_to', (['""")"""'], {}), "(')')\n", (17609, 17614), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((18199, 18228), 'hamcrest.equal_to', 'equal_to', (['"""static_cast<int>("""'], {}), "('static_cast<int>(')\n", (18207, 18228), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((18493, 18506), 'hamcrest.equal_to', 'equal_to', (['""")"""'], {}), "(')')\n", (18501, 18506), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19029, 19041), 'hamcrest.equal_to', 'equal_to', (['""""""'], {}), "('')\n", (19037, 19041), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19546, 19561), 'hamcrest.equal_to', 'equal_to', (['"""foo"""'], {}), "('foo')\n", (19554, 19561), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20080, 20092), 'hamcrest.equal_to', 'equal_to', (['""""""'], {}), "('')\n", (20088, 20092), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20371, 20384), 'hamcrest.equal_to', 'equal_to', (['"""~"""'], {}), "('~')\n", (20379, 20384), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20805, 20827), 'hamcrest.equal_to', 'equal_to', (['"""= default;"""'], {}), "('= default;')\n", (20813, 20827), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((21350, 21364), 'hamcrest.equal_to', 'equal_to', (['"""id"""'], {}), "('id')\n", (21358, 21364), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22054, 22069), 'hamcrest.equal_to', 'equal_to', (['"""foo"""'], {}), "('foo')\n", (22062, 22069), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22523, 22535), 'hamcrest.equal_to', 'equal_to', (['""""""'], {}), "('')\n", (22531, 22535), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22816, 22829), 'hamcrest.equal_to', 'equal_to', (['"""~"""'], {}), "('~')\n", (22824, 22829), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((23253, 23275), 'hamcrest.equal_to', 'equal_to', (['"""= default;"""'], {}), "('= default;')\n", (23261, 23275), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((23853, 23868), 'hamcrest.equal_to', 'equal_to', (['"""foo"""'], {}), "('foo')\n", (23861, 23868), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24322, 24334), 'hamcrest.equal_to', 'equal_to', (['""""""'], {}), "('')\n", (24330, 24334), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24615, 24628), 'hamcrest.equal_to', 'equal_to', (['"""~"""'], {}), "('~')\n", (24623, 24628), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25052, 25074), 'hamcrest.equal_to', 'equal_to', (['"""= default;"""'], {}), "('= default;')\n", (25060, 25074), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25604, 25617), 'hamcrest.equal_to', 'equal_to', (['"""="""'], {}), "('=')\n", (25612, 25617), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((34672, 34692), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(1)', '(8)'], {}), '(1, 8)\n', (34686, 34692), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((34722, 34743), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(1)', '(11)'], {}), '(1, 11)\n', (34736, 34743), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((34808, 34828), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(9)', '(3)'], {}), '(9, 3)\n', (34822, 34828), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((34858, 34878), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(9)', '(6)'], {}), '(9, 6)\n', (34872, 34878), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((34944, 34965), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(12)', '(2)'], {}), '(12, 2)\n', (34958, 34965), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((34995, 35016), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(15)', '(1)'], {}), '(15, 1)\n', (35009, 35016), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35081, 35102), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(15)', '(8)'], {}), '(15, 8)\n', (35095, 35102), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35133, 35155), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(15)', '(11)'], {}), '(15, 11)\n', (35147, 35155), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35218, 35240), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(15)', '(46)'], {}), '(15, 46)\n', (35232, 35240), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35271, 35292), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(16)', '(1)'], {}), '(16, 1)\n', (35285, 35292), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35358, 35379), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(3)'], {}), '(17, 3)\n', (35372, 35379), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35409, 35430), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(6)'], {}), '(17, 6)\n', (35423, 35430), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35492, 35514), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(14)'], {}), '(17, 14)\n', (35506, 35514), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35544, 35566), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(15)'], {}), '(17, 15)\n', (35558, 35566), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35629, 35651), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(17)'], {}), '(17, 17)\n', (35643, 35651), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35681, 35703), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(17)'], {}), '(17, 17)\n', (35695, 35703), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35766, 35788), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(19)'], {}), '(17, 19)\n', (35780, 35788), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((35818, 35840), 'ycmd.tests.test_utils.LineColMatcher', 'LineColMatcher', (['(17)', '(19)'], {}), '(17, 19)\n', (35832, 35840), False, 'from ycmd.tests.test_utils import BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady\n'), ((17393, 17440), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 16, 'column_num': 10}"], {}), "({'line_num': 16, 'column_num': 10})\n", (17404, 17440), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((17467, 17514), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 16, 'column_num': 10}"], {}), "({'line_num': 16, 'column_num': 10})\n", (17478, 17514), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((17673, 17720), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 16, 'column_num': 13}"], {}), "({'line_num': 16, 'column_num': 13})\n", (17684, 17720), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((17747, 17794), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 16, 'column_num': 13}"], {}), "({'line_num': 16, 'column_num': 13})\n", (17758, 17794), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((18287, 18333), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 26, 'column_num': 7}"], {}), "({'line_num': 26, 'column_num': 7})\n", (18298, 18333), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((18360, 18406), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 26, 'column_num': 7}"], {}), "({'line_num': 26, 'column_num': 7})\n", (18371, 18406), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((18565, 18611), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 28, 'column_num': 2}"], {}), "({'line_num': 28, 'column_num': 2})\n", (18576, 18611), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((18638, 18684), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 28, 'column_num': 2}"], {}), "({'line_num': 28, 'column_num': 2})\n", (18649, 18684), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19100, 19146), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 35, 'column_num': 7}"], {}), "({'line_num': 35, 'column_num': 7})\n", (19111, 19146), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19173, 19219), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 35, 'column_num': 9}"], {}), "({'line_num': 35, 'column_num': 9})\n", (19184, 19219), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19620, 19666), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 40, 'column_num': 6}"], {}), "({'line_num': 40, 'column_num': 6})\n", (19631, 19666), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((19693, 19739), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 40, 'column_num': 9}"], {}), "({'line_num': 40, 'column_num': 9})\n", (19704, 19739), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20155, 20201), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 3}"], {}), "({'line_num': 48, 'column_num': 3})\n", (20166, 20201), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20230, 20276), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 4}"], {}), "({'line_num': 48, 'column_num': 4})\n", (20241, 20276), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20447, 20493), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 9}"], {}), "({'line_num': 48, 'column_num': 9})\n", (20458, 20493), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20522, 20568), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 9}"], {}), "({'line_num': 48, 'column_num': 9})\n", (20533, 20568), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20890, 20937), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 15}"], {}), "({'line_num': 48, 'column_num': 15})\n", (20901, 20937), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((20966, 21013), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 48, 'column_num': 17}"], {}), "({'line_num': 48, 'column_num': 17})\n", (20977, 21013), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((21423, 21468), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 5, 'column_num': 3}"], {}), "({'line_num': 5, 'column_num': 3})\n", (21434, 21468), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((21495, 21540), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 5, 'column_num': 3}"], {}), "({'line_num': 5, 'column_num': 3})\n", (21506, 21540), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22132, 22179), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 16}"], {}), "({'line_num': 54, 'column_num': 16})\n", (22143, 22179), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22208, 22255), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 19}"], {}), "({'line_num': 54, 'column_num': 19})\n", (22219, 22255), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22598, 22645), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 52}"], {}), "({'line_num': 54, 'column_num': 52})\n", (22609, 22645), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22674, 22721), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 53}"], {}), "({'line_num': 54, 'column_num': 53})\n", (22685, 22721), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22892, 22939), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 58}"], {}), "({'line_num': 54, 'column_num': 58})\n", (22903, 22939), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((22968, 23015), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 58}"], {}), "({'line_num': 54, 'column_num': 58})\n", (22979, 23015), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((23338, 23385), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 64}"], {}), "({'line_num': 54, 'column_num': 64})\n", (23349, 23385), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((23414, 23461), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 67}"], {}), "({'line_num': 54, 'column_num': 67})\n", (23425, 23461), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((23931, 23978), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 16}"], {}), "({'line_num': 54, 'column_num': 16})\n", (23942, 23978), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24007, 24054), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 19}"], {}), "({'line_num': 54, 'column_num': 19})\n", (24018, 24054), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24397, 24444), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 52}"], {}), "({'line_num': 54, 'column_num': 52})\n", (24408, 24444), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24473, 24520), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 53}"], {}), "({'line_num': 54, 'column_num': 53})\n", (24484, 24520), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24691, 24738), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 58}"], {}), "({'line_num': 54, 'column_num': 58})\n", (24702, 24738), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((24767, 24814), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 58}"], {}), "({'line_num': 54, 'column_num': 58})\n", (24778, 24814), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25137, 25184), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 64}"], {}), "({'line_num': 54, 'column_num': 64})\n", (25148, 25184), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25213, 25260), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 54, 'column_num': 67}"], {}), "({'line_num': 54, 'column_num': 67})\n", (25224, 25260), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25676, 25722), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 21, 'column_num': 9}"], {}), "({'line_num': 21, 'column_num': 9})\n", (25687, 25722), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n'), ((25749, 25796), 'hamcrest.has_entries', 'has_entries', (["{'line_num': 21, 'column_num': 11}"], {}), "({'line_num': 21, 'column_num': 11})\n", (25760, 25796), False, 'from hamcrest import assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp\n')] |
TheRensselaerIDEA/covid19_tweet_ids | sentiment/config.py | fee7d951b11cf2650e48668614c30672179ab3af | """
Config class containing all the settings for running sentiment scoring tool
"""
import jsonpickle
class Config(object):
"""Container for sentiment scoring tool settings.
"""
def __init__(self):
"""Initializes the Config instance.
"""
#Elasticsearch settings
self.elasticsearch_host = ""
self.elasticsearch_verify_certs = False
self.elasticsearch_index_name = ""
self.elasticsearch_batch_size = 500
self.elasticsearch_timeout_secs = 30
#Processing settings
self.sentiment_modelpath = ""
self.sentiment_max_seq_length = 512
self.sleep_idle_secs = 5
self.sleep_not_idle_secs = 0.01
self.log_level = "ERROR"
@staticmethod
def load(filepath):
"""Loads the config from a JSON file.
Args:
filepath: path of the JSON file.
"""
with open(filepath, "r") as file:
json = file.read()
config = jsonpickle.decode(json)
return config | [((987, 1010), 'jsonpickle.decode', 'jsonpickle.decode', (['json'], {}), '(json)\n', (1004, 1010), False, 'import jsonpickle\n')] |
GuoSuiming/mindspore | tests/ut/python/dataset/test_invert.py | 48afc4cfa53d970c0b20eedfb46e039db2a133d5 | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Testing Invert op in DE
"""
import numpy as np
import mindspore.dataset as ds
import mindspore.dataset.transforms.py_transforms
import mindspore.dataset.vision.py_transforms as F
import mindspore.dataset.vision.c_transforms as C
from mindspore import log as logger
from util import visualize_list, save_and_check_md5, diff_mse
DATA_DIR = "../data/dataset/testImageNetData/train/"
GENERATE_GOLDEN = False
def test_invert_py(plot=False):
"""
Test Invert python op
"""
logger.info("Test Invert Python op")
# Original Images
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
transforms_original = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(),
F.Resize((224, 224)),
F.ToTensor()])
ds_original = data_set.map(operations=transforms_original, input_columns="image")
ds_original = ds_original.batch(512)
for idx, (image, _) in enumerate(ds_original):
if idx == 0:
images_original = np.transpose(image.asnumpy(), (0, 2, 3, 1))
else:
images_original = np.append(images_original,
np.transpose(image.asnumpy(), (0, 2, 3, 1)),
axis=0)
# Color Inverted Images
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
transforms_invert = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(),
F.Resize((224, 224)),
F.Invert(),
F.ToTensor()])
ds_invert = data_set.map(operations=transforms_invert, input_columns="image")
ds_invert = ds_invert.batch(512)
for idx, (image, _) in enumerate(ds_invert):
if idx == 0:
images_invert = np.transpose(image.asnumpy(), (0, 2, 3, 1))
else:
images_invert = np.append(images_invert,
np.transpose(image.asnumpy(), (0, 2, 3, 1)),
axis=0)
num_samples = images_original.shape[0]
mse = np.zeros(num_samples)
for i in range(num_samples):
mse[i] = np.mean((images_invert[i] - images_original[i]) ** 2)
logger.info("MSE= {}".format(str(np.mean(mse))))
if plot:
visualize_list(images_original, images_invert)
def test_invert_c(plot=False):
"""
Test Invert Cpp op
"""
logger.info("Test Invert cpp op")
# Original Images
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
transforms_original = [C.Decode(), C.Resize(size=[224, 224])]
ds_original = data_set.map(operations=transforms_original, input_columns="image")
ds_original = ds_original.batch(512)
for idx, (image, _) in enumerate(ds_original):
if idx == 0:
images_original = image.asnumpy()
else:
images_original = np.append(images_original,
image.asnumpy(),
axis=0)
# Invert Images
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
transform_invert = [C.Decode(), C.Resize(size=[224, 224]),
C.Invert()]
ds_invert = data_set.map(operations=transform_invert, input_columns="image")
ds_invert = ds_invert.batch(512)
for idx, (image, _) in enumerate(ds_invert):
if idx == 0:
images_invert = image.asnumpy()
else:
images_invert = np.append(images_invert,
image.asnumpy(),
axis=0)
if plot:
visualize_list(images_original, images_invert)
num_samples = images_original.shape[0]
mse = np.zeros(num_samples)
for i in range(num_samples):
mse[i] = diff_mse(images_invert[i], images_original[i])
logger.info("MSE= {}".format(str(np.mean(mse))))
def test_invert_py_c(plot=False):
"""
Test Invert Cpp op and python op
"""
logger.info("Test Invert cpp and python op")
# Invert Images in cpp
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224))], input_columns=["image"])
ds_c_invert = data_set.map(operations=C.Invert(), input_columns="image")
ds_c_invert = ds_c_invert.batch(512)
for idx, (image, _) in enumerate(ds_c_invert):
if idx == 0:
images_c_invert = image.asnumpy()
else:
images_c_invert = np.append(images_c_invert,
image.asnumpy(),
axis=0)
# invert images in python
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224))], input_columns=["image"])
transforms_p_invert = mindspore.dataset.transforms.py_transforms.Compose([lambda img: img.astype(np.uint8),
F.ToPIL(),
F.Invert(),
np.array])
ds_p_invert = data_set.map(operations=transforms_p_invert, input_columns="image")
ds_p_invert = ds_p_invert.batch(512)
for idx, (image, _) in enumerate(ds_p_invert):
if idx == 0:
images_p_invert = image.asnumpy()
else:
images_p_invert = np.append(images_p_invert,
image.asnumpy(),
axis=0)
num_samples = images_c_invert.shape[0]
mse = np.zeros(num_samples)
for i in range(num_samples):
mse[i] = diff_mse(images_p_invert[i], images_c_invert[i])
logger.info("MSE= {}".format(str(np.mean(mse))))
if plot:
visualize_list(images_c_invert, images_p_invert, visualize_mode=2)
def test_invert_one_channel():
"""
Test Invert cpp op with one channel image
"""
logger.info("Test Invert C Op With One Channel Images")
c_op = C.Invert()
try:
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224)),
lambda img: np.array(img[:, :, 0])], input_columns=["image"])
data_set.map(operations=c_op, input_columns="image")
except RuntimeError as e:
logger.info("Got an exception in DE: {}".format(str(e)))
assert "The shape" in str(e)
def test_invert_md5_py():
"""
Test Invert python op with md5 check
"""
logger.info("Test Invert python op with md5 check")
# Generate dataset
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
transforms_invert = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(),
F.Invert(),
F.ToTensor()])
data = data_set.map(operations=transforms_invert, input_columns="image")
# Compare with expected md5 from images
filename = "invert_01_result_py.npz"
save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
def test_invert_md5_c():
"""
Test Invert cpp op with md5 check
"""
logger.info("Test Invert cpp op with md5 check")
# Generate dataset
data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
transforms_invert = [C.Decode(),
C.Resize(size=[224, 224]),
C.Invert(),
F.ToTensor()]
data = data_set.map(operations=transforms_invert, input_columns="image")
# Compare with expected md5 from images
filename = "invert_01_result_c.npz"
save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
if __name__ == "__main__":
test_invert_py(plot=False)
test_invert_c(plot=False)
test_invert_py_c(plot=False)
test_invert_one_channel()
test_invert_md5_py()
test_invert_md5_c()
| [((1160, 1196), 'mindspore.log.info', 'logger.info', (['"""Test Invert Python op"""'], {}), "('Test Invert Python op')\n", (1171, 1196), True, 'from mindspore import log as logger\n'), ((1235, 1293), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (1256, 1293), True, 'import mindspore.dataset as ds\n'), ((2102, 2160), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (2123, 2160), True, 'import mindspore.dataset as ds\n'), ((3041, 3062), 'numpy.zeros', 'np.zeros', (['num_samples'], {}), '(num_samples)\n', (3049, 3062), True, 'import numpy as np\n'), ((3365, 3398), 'mindspore.log.info', 'logger.info', (['"""Test Invert cpp op"""'], {}), "('Test Invert cpp op')\n", (3376, 3398), True, 'from mindspore import log as logger\n'), ((3437, 3495), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (3458, 3495), True, 'import mindspore.dataset as ds\n'), ((4023, 4081), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (4044, 4081), True, 'import mindspore.dataset as ds\n'), ((4707, 4728), 'numpy.zeros', 'np.zeros', (['num_samples'], {}), '(num_samples)\n', (4715, 4728), True, 'import numpy as np\n'), ((4972, 5016), 'mindspore.log.info', 'logger.info', (['"""Test Invert cpp and python op"""'], {}), "('Test Invert cpp and python op')\n", (4983, 5016), True, 'from mindspore import log as logger\n'), ((5060, 5118), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (5081, 5118), True, 'import mindspore.dataset as ds\n'), ((5680, 5738), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (5701, 5738), True, 'import mindspore.dataset as ds\n'), ((6698, 6719), 'numpy.zeros', 'np.zeros', (['num_samples'], {}), '(num_samples)\n', (6706, 6719), True, 'import numpy as np\n'), ((7062, 7117), 'mindspore.log.info', 'logger.info', (['"""Test Invert C Op With One Channel Images"""'], {}), "('Test Invert C Op With One Channel Images')\n", (7073, 7117), True, 'from mindspore import log as logger\n'), ((7130, 7140), 'mindspore.dataset.vision.c_transforms.Invert', 'C.Invert', ([], {}), '()\n', (7138, 7140), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((7697, 7748), 'mindspore.log.info', 'logger.info', (['"""Test Invert python op with md5 check"""'], {}), "('Test Invert python op with md5 check')\n", (7708, 7748), True, 'from mindspore import log as logger\n'), ((7788, 7846), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (7809, 7846), True, 'import mindspore.dataset as ds\n'), ((8282, 8349), 'util.save_and_check_md5', 'save_and_check_md5', (['data', 'filename'], {'generate_golden': 'GENERATE_GOLDEN'}), '(data, filename, generate_golden=GENERATE_GOLDEN)\n', (8300, 8349), False, 'from util import visualize_list, save_and_check_md5, diff_mse\n'), ((8435, 8483), 'mindspore.log.info', 'logger.info', (['"""Test Invert cpp op with md5 check"""'], {}), "('Test Invert cpp op with md5 check')\n", (8446, 8483), True, 'from mindspore import log as logger\n'), ((8523, 8581), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (8544, 8581), True, 'import mindspore.dataset as ds\n'), ((8914, 8981), 'util.save_and_check_md5', 'save_and_check_md5', (['data', 'filename'], {'generate_golden': 'GENERATE_GOLDEN'}), '(data, filename, generate_golden=GENERATE_GOLDEN)\n', (8932, 8981), False, 'from util import visualize_list, save_and_check_md5, diff_mse\n'), ((3113, 3166), 'numpy.mean', 'np.mean', (['((images_invert[i] - images_original[i]) ** 2)'], {}), '((images_invert[i] - images_original[i]) ** 2)\n', (3120, 3166), True, 'import numpy as np\n'), ((3242, 3288), 'util.visualize_list', 'visualize_list', (['images_original', 'images_invert'], {}), '(images_original, images_invert)\n', (3256, 3288), False, 'from util import visualize_list, save_and_check_md5, diff_mse\n'), ((3524, 3534), 'mindspore.dataset.vision.c_transforms.Decode', 'C.Decode', ([], {}), '()\n', (3532, 3534), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((3536, 3561), 'mindspore.dataset.vision.c_transforms.Resize', 'C.Resize', ([], {'size': '[224, 224]'}), '(size=[224, 224])\n', (3544, 3561), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((4107, 4117), 'mindspore.dataset.vision.c_transforms.Decode', 'C.Decode', ([], {}), '()\n', (4115, 4117), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((4119, 4144), 'mindspore.dataset.vision.c_transforms.Resize', 'C.Resize', ([], {'size': '[224, 224]'}), '(size=[224, 224])\n', (4127, 4144), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((4170, 4180), 'mindspore.dataset.vision.c_transforms.Invert', 'C.Invert', ([], {}), '()\n', (4178, 4180), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((4606, 4652), 'util.visualize_list', 'visualize_list', (['images_original', 'images_invert'], {}), '(images_original, images_invert)\n', (4620, 4652), False, 'from util import visualize_list, save_and_check_md5, diff_mse\n'), ((4779, 4825), 'util.diff_mse', 'diff_mse', (['images_invert[i]', 'images_original[i]'], {}), '(images_invert[i], images_original[i])\n', (4787, 4825), False, 'from util import visualize_list, save_and_check_md5, diff_mse\n'), ((6770, 6818), 'util.diff_mse', 'diff_mse', (['images_p_invert[i]', 'images_c_invert[i]'], {}), '(images_p_invert[i], images_c_invert[i])\n', (6778, 6818), False, 'from util import visualize_list, save_and_check_md5, diff_mse\n'), ((6894, 6960), 'util.visualize_list', 'visualize_list', (['images_c_invert', 'images_p_invert'], {'visualize_mode': '(2)'}), '(images_c_invert, images_p_invert, visualize_mode=2)\n', (6908, 6960), False, 'from util import visualize_list, save_and_check_md5, diff_mse\n'), ((7170, 7228), 'mindspore.dataset.ImageFolderDataset', 'ds.ImageFolderDataset', ([], {'dataset_dir': 'DATA_DIR', 'shuffle': '(False)'}), '(dataset_dir=DATA_DIR, shuffle=False)\n', (7191, 7228), True, 'import mindspore.dataset as ds\n'), ((8608, 8618), 'mindspore.dataset.vision.c_transforms.Decode', 'C.Decode', ([], {}), '()\n', (8616, 8618), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((8645, 8670), 'mindspore.dataset.vision.c_transforms.Resize', 'C.Resize', ([], {'size': '[224, 224]'}), '(size=[224, 224])\n', (8653, 8670), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((8697, 8707), 'mindspore.dataset.vision.c_transforms.Invert', 'C.Invert', ([], {}), '()\n', (8705, 8707), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((8734, 8746), 'mindspore.dataset.vision.py_transforms.ToTensor', 'F.ToTensor', ([], {}), '()\n', (8744, 8746), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((1373, 1383), 'mindspore.dataset.vision.py_transforms.Decode', 'F.Decode', ([], {}), '()\n', (1381, 1383), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((1463, 1483), 'mindspore.dataset.vision.py_transforms.Resize', 'F.Resize', (['(224, 224)'], {}), '((224, 224))\n', (1471, 1483), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((1563, 1575), 'mindspore.dataset.vision.py_transforms.ToTensor', 'F.ToTensor', ([], {}), '()\n', (1573, 1575), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((2238, 2248), 'mindspore.dataset.vision.py_transforms.Decode', 'F.Decode', ([], {}), '()\n', (2246, 2248), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((2326, 2346), 'mindspore.dataset.vision.py_transforms.Resize', 'F.Resize', (['(224, 224)'], {}), '((224, 224))\n', (2334, 2346), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((2424, 2434), 'mindspore.dataset.vision.py_transforms.Invert', 'F.Invert', ([], {}), '()\n', (2432, 2434), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((2512, 2524), 'mindspore.dataset.vision.py_transforms.ToTensor', 'F.ToTensor', ([], {}), '()\n', (2522, 2524), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((5262, 5272), 'mindspore.dataset.vision.c_transforms.Invert', 'C.Invert', ([], {}), '()\n', (5270, 5272), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((6030, 6039), 'mindspore.dataset.vision.py_transforms.ToPIL', 'F.ToPIL', ([], {}), '()\n', (6037, 6039), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((6119, 6129), 'mindspore.dataset.vision.py_transforms.Invert', 'F.Invert', ([], {}), '()\n', (6127, 6129), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((7924, 7934), 'mindspore.dataset.vision.py_transforms.Decode', 'F.Decode', ([], {}), '()\n', (7932, 7934), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((8012, 8022), 'mindspore.dataset.vision.py_transforms.Invert', 'F.Invert', ([], {}), '()\n', (8020, 8022), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((8100, 8112), 'mindspore.dataset.vision.py_transforms.ToTensor', 'F.ToTensor', ([], {}), '()\n', (8110, 8112), True, 'import mindspore.dataset.vision.py_transforms as F\n'), ((3204, 3216), 'numpy.mean', 'np.mean', (['mse'], {}), '(mse)\n', (3211, 3216), True, 'import numpy as np\n'), ((4863, 4875), 'numpy.mean', 'np.mean', (['mse'], {}), '(mse)\n', (4870, 4875), True, 'import numpy as np\n'), ((5159, 5169), 'mindspore.dataset.vision.c_transforms.Decode', 'C.Decode', ([], {}), '()\n', (5167, 5169), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((5171, 5191), 'mindspore.dataset.vision.c_transforms.Resize', 'C.Resize', (['(224, 224)'], {}), '((224, 224))\n', (5179, 5191), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((5779, 5789), 'mindspore.dataset.vision.c_transforms.Decode', 'C.Decode', ([], {}), '()\n', (5787, 5789), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((5791, 5811), 'mindspore.dataset.vision.c_transforms.Resize', 'C.Resize', (['(224, 224)'], {}), '((224, 224))\n', (5799, 5811), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((6856, 6868), 'numpy.mean', 'np.mean', (['mse'], {}), '(mse)\n', (6863, 6868), True, 'import numpy as np\n'), ((7273, 7283), 'mindspore.dataset.vision.c_transforms.Decode', 'C.Decode', ([], {}), '()\n', (7281, 7283), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((7285, 7305), 'mindspore.dataset.vision.c_transforms.Resize', 'C.Resize', (['(224, 224)'], {}), '((224, 224))\n', (7293, 7305), True, 'import mindspore.dataset.vision.c_transforms as C\n'), ((7363, 7387), 'numpy.array', 'np.array', (['img[:, :, (0)]'], {}), '(img[:, :, (0)])\n', (7371, 7387), True, 'import numpy as np\n')] |
aliciawyy/dmining | puzzle/tests/test_candy.py | 513f6f036f8f258281e1282fef052a74bf9cc3d3 | from parameterized import parameterized
from numpy.testing import TestCase
from .. import candy
class TestCollectCandies(TestCase):
@parameterized.expand(
[(5, 5, 12,
[[2, 1, 1, 1, 1], [2, 2, 1, 1, 1], [1, 2, 1, 1, 1],
[2, 2, 1, 1, 3], [2, 2, 2, 2, 2]])]
)
def test_candy(self, n, m, t, candies):
collector = candy.CollectCandies(n, m, t, candies)
for pos, expected in [[(1, 1), [(0, 1), (2, 1), (1, 0), (1, 2)]],
[(0, 0), [(1, 0), (0, 1)]],
[(4, 4), [(3, 4), (4, 3)]]]:
self.assertListEqual(
collector.get_next_positions(pos), expected + [pos])
self.assertEqual(collector.get_max_sum(), 27)
| [((140, 266), 'parameterized.parameterized.expand', 'parameterized.expand', (['[(5, 5, 12, [[2, 1, 1, 1, 1], [2, 2, 1, 1, 1], [1, 2, 1, 1, 1], [2, 2, 1, 1,\n 3], [2, 2, 2, 2, 2]])]'], {}), '([(5, 5, 12, [[2, 1, 1, 1, 1], [2, 2, 1, 1, 1], [1, 2, \n 1, 1, 1], [2, 2, 1, 1, 3], [2, 2, 2, 2, 2]])])\n', (160, 266), False, 'from parameterized import parameterized\n')] |
fernandoq/quiz-show | audio.py | 6e130db7923d14cf1976e1c522c58f848e48f2af | import time
import subprocess
import os
print os.uname()
if not os.uname()[0].startswith("Darw"):
import pygame
pygame.mixer.init()
# Plays a song
def playSong(filename):
print "play song"
if not os.uname()[0].startswith("Darw"):
pygame.mixer.music.fadeout(1000) #fadeout current music over 1 sec.
pygame.mixer.music.load("music/" + filename)
pygame.mixer.music.play()
else:
subprocess.call(["afplay", "music/" + filename]) | [] |
unfoldingWord-dev/python-aws-tools | tests/test_dynamodbHandler.py | 8e856697ab07c5c33e60cde2d82ac805dec3ddf3 | from __future__ import absolute_import, unicode_literals, print_function
import mock
import unittest
import d43_aws_tools as aws_tools
from boto3.dynamodb.conditions import Attr
class DynamoDBHandlerTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
with mock.patch("d43_aws_tools.dynamodb_handler.boto3", mock.MagicMock()):
cls.handler = aws_tools.dynamodb_handler.DynamoDBHandler("table_name")
cls.handler.table = mock.MagicMock()
def setUp(self):
self.handler.table.reset_mock()
def test_get_item(self):
"""Test a successful invocation of `get_item`."""
expected = dict(field1="1", field2="2")
self.handler.table.get_item.return_value = {
"Item": expected
}
self.assertEqual(self.handler.get_item("key"), expected)
def test_get_item_malformed(self):
"""Test an unsuccessful invocation of `get_item`."""
self.handler.table.get_item.return_value = {
"TheWrongKey": dict(field1="1", field2="2")
}
self.assertIsNone(self.handler.get_item("key"))
def test_insert_item(self):
"""Test a successful invocation of `insert_item`."""
data = dict(x="x", y="y", three=3)
self.handler.insert_item(data)
self.handler.table.put_item.assert_called_once_with(Item=data)
def test_update_item(self):
"""Test a successful invocation of `update_item`."""
key = {"id": 1}
data = {"age": 40, "name": "John Doe"}
self.handler.update_item(key, data)
self.handler.table.update_item.assert_called_once()
_, kwargs = self.handler.table.update_item.call_args
self.assertIn("Key", kwargs)
self.assertEqual(kwargs["Key"], key)
self.assertIn("UpdateExpression", kwargs)
# ignore whitespace and order of assignments
expr = kwargs["UpdateExpression"].replace(" ", "")
self.assertTrue(expr.startswith("SET"))
self.assertIn("age=:age", expr)
self.assertIn("#item_name=:name", expr)
self.assertIn("ExpressionAttributeValues", kwargs)
self.assertEqual(kwargs["ExpressionAttributeValues"],
{":age": 40, ":name": "John Doe"})
self.assertIn("ExpressionAttributeNames", kwargs)
self.assertEqual(kwargs["ExpressionAttributeNames"],
{"#item_name": "name"})
def test_delete_item(self):
"""Test a successful invocation of `delete_item`."""
key = {"id": 1234}
self.handler.delete_item(key)
self.handler.table.delete_item.assert_called_once_with(Key=key)
def test_query_item(self):
""" Test a successful invocation of `query_item`."""
for cond in ("ne", "lt", "lte", "gt", "gte",
"begins_with", "is_in", "contains"):
self.handler.table.reset_mock()
query = {
"age": {
"condition": "eq",
"value": 25
},
"full_name": {
"condition": cond,
"value": "John Doe"
}
}
data = {"age": 30, "full_name": "John Doe"}
self.handler.table.scan.return_value = {"Items": data}
self.assertEqual(self.handler.query_items(query), data)
self.handler.table.scan.assert_called_once()
def test_query_bool_item(self):
""" Test a successful invocation of `query_item`. with a False boolean query"""
for cond in ("ne", "lt", "lte", "gt", "gte",
"begins_with", "is_in", "contains"):
self.handler.table.reset_mock()
query = {
"ready": False
}
data = {"age": 30, "full_name": "John Doe", "ready": False}
self.handler.table.scan.return_value = {"Items": data}
self.assertEqual(self.handler.query_items(query), data)
self.handler.table.scan.assert_called_once()
err_msg = 'query_items: Expecting FilterExpression parameter for table.scan() but non found'
try:
self.handler.table.scan.assert_called_once_with()
# If the scan ran without an argument this is a failure
self.assertTrue(False, err_msg)
except Exception as e:
if err_msg in str(e):
raise e
def test_query_item_no_query(self):
"""Test a invocation of `query_item` with no query."""
data = {"age": 30, "full_name": "John Doe"}
self.handler.table.scan.return_value = {"Items": data}
self.assertEqual(self.handler.query_items(), data)
self.handler.table.scan.assert_called_once_with()
| [((463, 479), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (477, 479), False, 'import mock\n'), ((378, 434), 'd43_aws_tools.dynamodb_handler.DynamoDBHandler', 'aws_tools.dynamodb_handler.DynamoDBHandler', (['"""table_name"""'], {}), "('table_name')\n", (420, 434), True, 'import d43_aws_tools as aws_tools\n'), ((333, 349), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (347, 349), False, 'import mock\n')] |
sbustamante/heroku_app | app.py | 6c8ff0b570750f3fe53ec67e24b71641167d53ce | from dash import Dash, html, dcc
import plotly.express as px
import pandas as pd
app = Dash(__name__)
server = app.server
# assume you have a "long-form" data frame
# see https://plotly.com/python/px-arguments/ for more options
df = pd.DataFrame({
"Fruit": ["Apples", "Oranges", "Bananas", "Apples", "Oranges", "Bananas"],
"Amount": [4, 1, 2, 2, 4, 5],
"City": ["SF", "SF", "SF", "Montreal", "Montreal", "Montreal"]
})
fig = px.bar(df, x="Fruit", y="Amount", color="City", barmode="group")
app.layout = html.Div(children=[
html.H1(children='Hello Dash'),
html.Div(children='''
Dash: A web application framework for your data.
'''),
dcc.Graph(
id='example-graph',
figure=fig
)
])
if __name__ == '__main__':
app.run_server(debug=True) | [((88, 102), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (92, 102), False, 'from dash import Dash, html, dcc\n'), ((235, 426), 'pandas.DataFrame', 'pd.DataFrame', (["{'Fruit': ['Apples', 'Oranges', 'Bananas', 'Apples', 'Oranges', 'Bananas'],\n 'Amount': [4, 1, 2, 2, 4, 5], 'City': ['SF', 'SF', 'SF', 'Montreal',\n 'Montreal', 'Montreal']}"], {}), "({'Fruit': ['Apples', 'Oranges', 'Bananas', 'Apples', 'Oranges',\n 'Bananas'], 'Amount': [4, 1, 2, 2, 4, 5], 'City': ['SF', 'SF', 'SF',\n 'Montreal', 'Montreal', 'Montreal']})\n", (247, 426), True, 'import pandas as pd\n'), ((440, 504), 'plotly.express.bar', 'px.bar', (['df'], {'x': '"""Fruit"""', 'y': '"""Amount"""', 'color': '"""City"""', 'barmode': '"""group"""'}), "(df, x='Fruit', y='Amount', color='City', barmode='group')\n", (446, 504), True, 'import plotly.express as px\n'), ((543, 573), 'dash.html.H1', 'html.H1', ([], {'children': '"""Hello Dash"""'}), "(children='Hello Dash')\n", (550, 573), False, 'from dash import Dash, html, dcc\n'), ((580, 672), 'dash.html.Div', 'html.Div', ([], {'children': '"""\n Dash: A web application framework for your data.\n """'}), '(children=\n """\n Dash: A web application framework for your data.\n """)\n', (588, 672), False, 'from dash import Dash, html, dcc\n'), ((674, 715), 'dash.dcc.Graph', 'dcc.Graph', ([], {'id': '"""example-graph"""', 'figure': 'fig'}), "(id='example-graph', figure=fig)\n", (683, 715), False, 'from dash import Dash, html, dcc\n')] |
aptise/peter_sslers | peter_sslers/web/lib/form_utils.py | 1dcae3fee0c1f4c67ae8a614aed7e2a3121e88b0 | # pypi
import six
# local
from ...lib import db as lib_db
from ...lib import utils
from ...model import objects as model_objects
from ...model import utils as model_utils
from . import formhandling
# ==============================================================================
def decode_args(getcreate_args):
"""
support for Python2/3
"""
if six.PY3:
for (k, v) in list(getcreate_args.items()):
if isinstance(v, bytes):
getcreate_args[k] = v.decode("utf8")
return getcreate_args
# standardized mapping for `model_utils.DomainsChallenged` to a formStash
DOMAINS_CHALLENGED_FIELDS = {
"http-01": "domain_names_http01",
"dns-01": "domain_names_dns01",
}
class AcmeAccountUploadParser(object):
"""
An AcmeAccount may be uploaded multiple ways:
* a single PEM file
* an intra-associated three file triplet from a Certbot installation
This parser operates on a validated FormEncode results object (via `pyramid_formencode_classic`)
"""
# overwritten in __init__
getcreate_args = None
formStash = None
# tracked
acme_account_provider_id = None
account_key_pem = None
le_meta_jsons = None
le_pkey_jsons = None
le_reg_jsons = None
private_key_cycle_id = None
private_key_technology_id = None
upload_type = None # pem OR letsencrypt
def __init__(self, formStash):
self.formStash = formStash
self.getcreate_args = {}
def require_new(self, require_contact=None, require_technology=True):
"""
routine for creating a NEW AcmeAccount (peter_sslers generates the credentials)
:param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic
:param require_technology: ``True`` if required; ``False`` if not; ``None`` for conditional logic
"""
formStash = self.formStash
acme_account_provider_id = formStash.results.get(
"acme_account_provider_id", None
)
if acme_account_provider_id is None:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="acme_account_provider_id", message="No provider submitted."
)
private_key_cycle = formStash.results.get("account__private_key_cycle", None)
if private_key_cycle is None:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="account__private_key_cycle",
message="No PrivateKey cycle submitted.",
)
private_key_cycle_id = model_utils.PrivateKeyCycle.from_string(
private_key_cycle
)
private_key_technology_id = None
private_key_technology = formStash.results.get(
"account__private_key_technology", None
)
if private_key_technology:
private_key_technology_id = model_utils.KeyTechnology.from_string(
private_key_technology
)
if not private_key_technology_id and require_technology:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="account__private_key_technology",
message="No PrivateKey technology submitted.",
)
contact = formStash.results.get("account__contact", None)
if not contact and require_contact:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="account__contact",
message="`account__contact` is required.",
)
getcreate_args = {}
self.contact = getcreate_args["contact"] = contact
self.acme_account_provider_id = getcreate_args[
"acme_account_provider_id"
] = acme_account_provider_id
self.private_key_cycle_id = getcreate_args[
"private_key_cycle_id"
] = private_key_cycle_id
self.private_key_technology_id = getcreate_args[
"private_key_technology_id"
] = private_key_technology_id
self.getcreate_args = decode_args(getcreate_args)
def require_upload(self, require_contact=None, require_technology=None):
"""
routine for uploading an exiting AcmeAccount+AcmeAccountKey
:param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic
:param require_technology: ``True`` if required; ``False`` if not; ``None`` for conditional logic
"""
formStash = self.formStash
# -------------------
# do a quick parse...
requirements_either_or = (
(
"account_key_file_pem",
# "acme_account_provider_id",
),
(
"account_key_file_le_meta",
"account_key_file_le_pkey",
"account_key_file_le_reg",
),
)
failures = []
passes = []
for idx, option_set in enumerate(requirements_either_or):
option_set_results = [
True if formStash.results[option_set_item] is not None else False
for option_set_item in option_set
]
# if we have any item, we need all of them
if any(option_set_results):
if not all(option_set_results):
failures.append(
"If any of %s is provided, all must be provided."
% str(option_set)
)
else:
passes.append(idx)
if (len(passes) != 1) or failures:
# `formStash.fatal_form()` will raise `FormInvalid()`
formStash.fatal_form(
"You must upload `account_key_file_pem` or all of (`account_key_file_le_meta`, `account_key_file_le_pkey`, `account_key_file_le_reg`)."
)
# -------------------
# validate the provider option
# will be None unless a pem is uploaded
# required for PEM, ignored otherwise
acme_account_provider_id = formStash.results.get(
"acme_account_provider_id", None
)
private_key_cycle = formStash.results.get("account__private_key_cycle", None)
if private_key_cycle is None:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="account__private_key_cycle",
message="No PrivateKey cycle submitted.",
)
private_key_cycle_id = model_utils.PrivateKeyCycle.from_string(
private_key_cycle
)
private_key_technology_id = None
private_key_technology = formStash.results.get(
"account__private_key_technology", None
)
if private_key_technology is not None:
private_key_technology_id = model_utils.KeyTechnology.from_string(
private_key_technology
)
if not private_key_technology_id and require_technology:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="account__private_key_technology",
message="No PrivateKey technology submitted.",
)
# require `contact` when uploading a PEM file
if formStash.results["account_key_file_pem"] is not None:
require_contact = True
contact = formStash.results.get("account__contact")
if not contact and require_contact:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="account__contact",
message="`account__contact` is required.",
)
getcreate_args = {}
self.contact = getcreate_args["contact"] = contact
self.private_key_cycle_id = getcreate_args[
"private_key_cycle_id"
] = private_key_cycle_id
self.private_key_technology_id = getcreate_args[
"private_key_technology_id"
] = private_key_technology_id
if formStash.results["account_key_file_pem"] is not None:
if acme_account_provider_id is None:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="acme_account_provider_id", message="No provider submitted."
)
self.upload_type = "pem"
self.acme_account_provider_id = getcreate_args[
"acme_account_provider_id"
] = acme_account_provider_id
self.account_key_pem = getcreate_args[
"key_pem"
] = formhandling.slurp_file_field(formStash, "account_key_file_pem")
else:
# note that we use `jsonS` to indicate a string
self.le_meta_jsons = getcreate_args[
"le_meta_jsons"
] = formhandling.slurp_file_field(formStash, "account_key_file_le_meta")
self.le_pkey_jsons = getcreate_args[
"le_pkey_jsons"
] = formhandling.slurp_file_field(formStash, "account_key_file_le_pkey")
self.le_reg_jsons = getcreate_args[
"le_reg_jsons"
] = formhandling.slurp_file_field(formStash, "account_key_file_le_reg")
self.getcreate_args = decode_args(getcreate_args)
class _PrivateKeyUploadParser(object):
"""
A PrivateKey is not a complex upload to parse itself
This code exists to mimic the AcmeAccount uploading.
"""
# overwritten in __init__
getcreate_args = None
formStash = None
# tracked
private_key_pem = None
upload_type = None # pem
def __init__(self, formStash):
self.formStash = formStash
self.getcreate_args = {}
def require_upload(self):
"""
routine for uploading an exiting PrivateKey
"""
formStash = self.formStash
getcreate_args = {}
if formStash.results["private_key_file_pem"] is not None:
self.upload_type = "pem"
self.private_key_pem = getcreate_args[
"key_pem"
] = formhandling.slurp_file_field(formStash, "private_key_file_pem")
self.getcreate_args = decode_args(getcreate_args)
class _AcmeAccountSelection(object):
"""
Class used to manage an uploaded AcmeAccount
"""
selection = None
upload_parsed = None # instance of AcmeAccountUploadParser or None
AcmeAccount = None
class _PrivateKeySelection(object):
selection = None
upload_parsed = None # instance of AcmeAccountUploadParser or None
private_key_strategy__requested = None
PrivateKey = None
@property
def private_key_strategy_id__requested(self):
return model_utils.PrivateKeyStrategy.from_string(
self.private_key_strategy__requested
)
def parse_AcmeAccountSelection(
request,
formStash,
account_key_option=None,
allow_none=None,
require_contact=None,
):
"""
:param formStash: an instance of `pyramid_formencode_classic.FormStash`
:param account_key_option:
:param allow_none:
:param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic
"""
account_key_pem = None
account_key_pem_md5 = None
dbAcmeAccount = None
is_global_default = None
# handle the explicit-option
acmeAccountSelection = _AcmeAccountSelection()
if account_key_option == "account_key_file":
# this will handle form validation and raise errors.
parser = AcmeAccountUploadParser(formStash)
# this will have: `contact`, `private_key_cycle`, `private_key_technology`
parser.require_upload(require_contact=require_contact)
# update our object
acmeAccountSelection.selection = "upload"
acmeAccountSelection.upload_parsed = parser
return acmeAccountSelection
else:
if account_key_option == "account_key_global_default":
acmeAccountSelection.selection = "global_default"
account_key_pem_md5 = formStash.results["account_key_global_default"]
is_global_default = True
elif account_key_option == "account_key_existing":
acmeAccountSelection.selection = "existing"
account_key_pem_md5 = formStash.results["account_key_existing"]
elif account_key_option == "account_key_reuse":
acmeAccountSelection.selection = "reuse"
account_key_pem_md5 = formStash.results["account_key_reuse"]
elif account_key_option == "none":
if not allow_none:
# `formStash.fatal_form()` will raise `FormInvalid()`
formStash.fatal_form(
"This form does not support no AcmeAccount selection."
)
# note the lowercase "none"; this is an explicit "no item" selection
# only certain routes allow this
acmeAccountSelection.selection = "none"
account_key_pem_md5 = None
return acmeAccountSelection
else:
formStash.fatal_form(
message="Invalid `account_key_option`",
)
if not account_key_pem_md5:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field=account_key_option, message="You did not provide a value"
)
dbAcmeAccount = lib_db.get.get__AcmeAccount__by_pemMd5(
request.api_context, account_key_pem_md5, is_active=True
)
if not dbAcmeAccount:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field=account_key_option,
message="The selected AcmeAccount is not enrolled in the system.",
)
if is_global_default and not dbAcmeAccount.is_global_default:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field=account_key_option,
message="The selected AcmeAccount is not the current default.",
)
acmeAccountSelection.AcmeAccount = dbAcmeAccount
return acmeAccountSelection
# `formStash.fatal_form()` will raise `FormInvalid()`
formStash.fatal_form("There was an error validating your form.")
def parse_PrivateKeySelection(request, formStash, private_key_option=None):
private_key_pem = None
private_key_pem_md5 = None
PrivateKey = None # :class:`model.objects.PrivateKey`
# handle the explicit-option
privateKeySelection = _PrivateKeySelection()
if private_key_option == "private_key_file":
# this will handle form validation and raise errors.
parser = _PrivateKeyUploadParser(formStash)
parser.require_upload()
# update our object
privateKeySelection.selection = "upload"
privateKeySelection.upload_parsed = parser
privateKeySelection.private_key_strategy__requested = (
model_utils.PrivateKeySelection_2_PrivateKeyStrategy["upload"]
)
return privateKeySelection
else:
if private_key_option == "private_key_existing":
privateKeySelection.selection = "existing"
privateKeySelection.private_key_strategy__requested = (
model_utils.PrivateKeySelection_2_PrivateKeyStrategy["existing"]
)
private_key_pem_md5 = formStash.results["private_key_existing"]
elif private_key_option == "private_key_reuse":
privateKeySelection.selection = "reuse"
privateKeySelection.private_key_strategy__requested = (
model_utils.PrivateKeySelection_2_PrivateKeyStrategy["reuse"]
)
private_key_pem_md5 = formStash.results["private_key_reuse"]
elif private_key_option in (
"private_key_generate",
"private_key_for_account_key",
):
dbPrivateKey = lib_db.get.get__PrivateKey__by_id(request.api_context, 0)
if not dbPrivateKey:
formStash.fatal_field(
field=private_key_option,
message="Could not load the placeholder PrivateKey.",
)
privateKeySelection.PrivateKey = dbPrivateKey
if private_key_option == "private_key_generate":
privateKeySelection.selection = "generate"
privateKeySelection.private_key_strategy__requested = (
model_utils.PrivateKeySelection_2_PrivateKeyStrategy["generate"]
)
elif private_key_option == "private_key_for_account_key":
privateKeySelection.selection = "private_key_for_account_key"
privateKeySelection.private_key_strategy__requested = (
model_utils.PrivateKeySelection_2_PrivateKeyStrategy[
"private_key_for_account_key"
]
)
return privateKeySelection
else:
# `formStash.fatal_form()` will raise `FormInvalid()`
formStash.fatal_form("Invalid `private_key_option`")
if not private_key_pem_md5:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field=private_key_option, message="You did not provide a value"
)
dbPrivateKey = lib_db.get.get__PrivateKey__by_pemMd5(
request.api_context, private_key_pem_md5, is_active=True
)
if not dbPrivateKey:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field=private_key_option,
message="The selected PrivateKey is not enrolled in the system.",
)
privateKeySelection.PrivateKey = dbPrivateKey
return privateKeySelection
# `formStash.fatal_form()` will raise `FormInvalid()`
formStash.fatal_form("There was an error validating your form.")
def form_key_selection(request, formStash, require_contact=None):
"""
:param formStash: an instance of `pyramid_formencode_classic.FormStash`
:param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic
"""
acmeAccountSelection = parse_AcmeAccountSelection(
request,
formStash,
account_key_option=formStash.results["account_key_option"],
require_contact=require_contact,
)
if acmeAccountSelection.selection == "upload":
key_create_args = acmeAccountSelection.upload_parsed.getcreate_args
key_create_args["event_type"] = "AcmeAccount__insert"
key_create_args[
"acme_account_key_source_id"
] = model_utils.AcmeAccountKeySource.from_string("imported")
(dbAcmeAccount, _is_created,) = lib_db.getcreate.getcreate__AcmeAccount(
request.api_context, **key_create_args
)
acmeAccountSelection.AcmeAccount = dbAcmeAccount
privateKeySelection = parse_PrivateKeySelection(
request,
formStash,
private_key_option=formStash.results["private_key_option"],
)
if privateKeySelection.selection == "upload":
key_create_args = privateKeySelection.upload_parsed.getcreate_args
key_create_args["event_type"] = "PrivateKey__insert"
key_create_args[
"private_key_source_id"
] = model_utils.PrivateKeySource.from_string("imported")
key_create_args["private_key_type_id"] = model_utils.PrivateKeyType.from_string(
"standard"
)
(
dbPrivateKey,
_is_created,
) = lib_db.getcreate.getcreate__PrivateKey__by_pem_text(
request.api_context, **key_create_args
)
privateKeySelection.PrivateKey = dbPrivateKey
elif privateKeySelection.selection == "generate":
dbPrivateKey = lib_db.get.get__PrivateKey__by_id(request.api_context, 0)
if not dbPrivateKey:
formStash.fatal_field(
field="private_key_option",
message="Could not load the placeholder PrivateKey for autogeneration.",
)
privateKeySelection.PrivateKey = dbPrivateKey
return (acmeAccountSelection, privateKeySelection)
def form_domains_challenge_typed(request, formStash, http01_only=False):
domains_challenged = model_utils.DomainsChallenged()
domain_names_all = []
try:
# 1: iterate over the submitted domains by segment
for (target_, source_) in DOMAINS_CHALLENGED_FIELDS.items():
submitted_ = formStash.results.get(source_)
if submitted_:
# this function checks the domain names match a simple regex
# it will raise a `ValueError("invalid domain")` on the first invalid domain
submitted_ = utils.domains_from_string(submitted_)
if submitted_:
domain_names_all.extend(submitted_)
domains_challenged[target_] = submitted_
# 2: ensure there are domains
if not domain_names_all:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="Error_Main",
message="no domain names submitted",
)
# 3: ensure there is no overlap
domain_names_all_set = set(domain_names_all)
if len(domain_names_all) != len(domain_names_all_set):
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="Error_Main",
message="a domain name can only be associated to one challenge type",
)
# 4: maybe we only want http01 domains submitted?
if http01_only:
for (k, v) in domains_challenged.items():
if k == "http-01":
continue
if v:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="Error_Main",
message="only http-01 domains are accepted by this form",
)
except ValueError as exc:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="Error_Main", message="invalid domain names detected"
)
return domains_challenged
def form_single_domain_challenge_typed(request, formStash, challenge_type="http-01"):
domains_challenged = model_utils.DomainsChallenged()
# this function checks the domain names match a simple regex
domain_names = utils.domains_from_string(formStash.results["domain_name"])
if not domain_names:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(field="domain_name", message="Found no domain names")
if len(domain_names) != 1:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="domain_name",
message="This endpoint currently supports only 1 domain name",
)
domains_challenged[challenge_type] = domain_names
return domains_challenged
| [] |
jlamperez/Vitis-Tutorials | AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py | 9a5b611caabb5656bbb2879116e032227b164bfd | #
# Copyright 2020–2021 Xilinx, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from math import *
import random
def GenerateTestVector(dtval,pliow,NPhases_s,NStreams_s,NSamples_s,NFrames_s,SeqType_s,Basename_s):
print('DtVal : ',dtval.get())
print('PLIO width : ',pliow.get())
print('NPhases : ',NPhases_s.get())
print('NStreams : ',NStreams_s.get())
print('NSamples : ',NSamples_s.get())
print('NFrames : ',NFrames_s.get())
print('Type of Sequence : ',SeqType_s.get())
print('Base filename : ',Basename_s.get())
NPhases = int(NPhases_s.get())
NStreams = int(NStreams_s.get())
LFrame = int(NSamples_s.get())
NFrames = int(NFrames_s.get())
SequenceType = SeqType_s.get()
Basename = Basename_s.get()
#parameters that should be in the GUI
# SequenceType ='Linear' # 'SinCos' 'Linear' 'Random' 'Dirac'
# Basename = 'PhaseIn'
NSamples = NPhases*NStreams*LFrame*NFrames;
NSamples1 = NPhases*NStreams*LFrame*(NFrames+1); # A little longer to allow for delay in streams
NBitsData = 32;
if( dtval.get() == 'int16'):
NBitsData = 16
HasImag = 0
if (dtval.get() == 'cint16'):
HasImag = 1
if(SequenceType != 'SinCos' and SequenceType != 'Linear' and SequenceType != 'Random' and SequenceType != 'Dirac'):
print ('Unknown Sequence Type')
return
# Create the overall signal that will be distributed over all streams
# it is already separated in phases
S = np.zeros((NPhases,int(NSamples1/NPhases),1+HasImag))
for i in range(int(NSamples1/NPhases)):
for p in range (NPhases):
k = i*NPhases+p
if (SequenceType == 'SinCos'):
vr = int(5000*cos(6.28*5/(NPhases*NStreams*LFrame)*k))
vi = int(5000*sin(6.28*5/(NPhases*NStreams*LFrame)*k))
elif (SequenceType == 'Linear'):
vr = k
vi = -k
elif (SequenceType == 'Random'):
vr = random.randint(-5000,5000)
vi = random.randint(-5000,5000)
elif (SequenceType == 'Dirac'):
vr = 0
vi = 0
if(k%151 == 1):
vr = 1
elif(k%151 == 40):
vi = 1
elif(k%151 == 81):
vr = 2
elif(k%151 == 115):
vi = -2
# if(k%311 == 50):
# vr = 1
# S[p,i,0] =
# if(HasImag==1):
# S[p,i,1] = int(5000*sin(6.28*5/(NPhases*NStreams*LFrame)*k))
S[p,i,0] = vr
if (HasImag == 1 ):
S[p,i,1] = vi
PLIOwidth = int(pliow.get())
NSamplesPerLine = int(PLIOwidth/NBitsData) # Data are read in blocks of 128 bits (4 data in cint16)
# Create an Input test Vector in TestInputS.txt
FileNames = [];
# Easiest case: 1 stream per AI Engine
if (NStreams == 1):
#Creates list of filenames
for Phi in range(NPhases):
FileNames.append(Basename+'_'+str(Phi)+'.txt')
#Open all files
fds = [open(path, 'w') for path in FileNames]
#Fill all files with the right data
for p in range(NPhases):
fd = fds[p]
for s in range(int(NSamples1/NPhases/NSamplesPerLine)):
for d in range(NSamplesPerLine):
index = s*NSamplesPerLine + d
fd.write(str(int(S[p,index,0]))+' ')
if(HasImag):
fd.write(str(int(S[p,index,1]))+' ')
fd.write('\n')
for fd in fds:
fd.close()
if (NStreams == 2):
#Creates list of filenames
for Phi in range(NPhases):
for Stream in range(NStreams):
FileNames.append('PhaseIn_'+str(Phi)+'_'+str(Stream)+'.txt')
# Hash table to associate data to streams
NSamplesIn128bits = int(128/NBitsData )
H = np.zeros((int(NSamples1/NPhases/2),2))
H = H.astype('int32')
index = np.zeros(2)
index = index.astype('int32')
for s in range(int(NSamples1/NPhases)):
k = int(s/NSamplesIn128bits) # Block order
i = k%2 # Which streams
H[index[i],i] = s
index[i] = index[i]+1
#Open all files
fds = [open(path, 'w') for path in FileNames]
#Fill all files with the right data
for p in range(NPhases):
for stream in range(2):
fd = fds[2*p+stream]
for s in range(int(NSamples1/NPhases/NSamplesPerLine/NStreams)):
for d in range(NSamplesPerLine):
index = s*NSamplesPerLine + d
fd.write(str(int(S[p,H[index,stream],0]))+' ')
if(HasImag):
fd.write(str(int(S[p,H[index,stream],1]))+' ')
fd.write('\n')
for fd in fds:
fd.close()
| [((4629, 4640), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (4637, 4640), True, 'import numpy as np\n'), ((2525, 2552), 'random.randint', 'random.randint', (['(-5000)', '(5000)'], {}), '(-5000, 5000)\n', (2539, 2552), False, 'import random\n'), ((2573, 2600), 'random.randint', 'random.randint', (['(-5000)', '(5000)'], {}), '(-5000, 5000)\n', (2587, 2600), False, 'import random\n')] |
ashish-ad/Python-Projects | Object Oriented Programming/Lecture 01/Intro.py | 7f49476b6945189165d536629109030f10603556 | item1='phone'
item1_price = 100
item1_quantity = 5
item1_price_total = item1_price * item1_quantity
print(type(item1)) # str
print(type(item1_price)) # int
print(type(item1_quantity)) # int
print(type(item1_price_total)) # int
# output:
# <class 'str'>
# <class 'int'>
# <class 'int'>
# <class 'int'> | [] |
THU-DA-6D-Pose-Group/self6dpp | configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_16_36WoodBlock.py | c267cfa55e440e212136a5e9940598720fa21d16 | _base_ = "./FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_01_02MasterChefCan.py"
OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/16_36WoodBlock"
DATASETS = dict(TRAIN=("ycbv_036_wood_block_train_pbr",))
| [] |
jhihruei/sqlakeyset | sqlakeyset/__init__.py | 0aa0f6e041dc37bc5f918303578875ad334cad6c |
from .columns import OC
from .paging import get_page, select_page, process_args
from .results import serialize_bookmark, unserialize_bookmark, Page, Paging
__all__ = [
'OC',
'get_page',
'select_page',
'serialize_bookmark',
'unserialize_bookmark',
'Page',
'Paging',
'process_args'
]
| [] |
shaun95/google-research | low_rank_local_connectivity/models/simple_model.py | d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple model for image classification.
The model is multiple
conv/locally_connected/wide_conv/low_rank_locally_connected layers followed
by a fully connected layer. Changes to the model architecture can be made by
modifying simple_model_config.py file.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os
import tensorflow.compat.v1 as tf
from low_rank_local_connectivity import layers
from low_rank_local_connectivity import utils
MOMENTUM = 0.9
EPS = 1e-5
class SimpleNetwork(tf.keras.Model):
"""Locally Connected Network."""
def __init__(self, config, variable_scope='simple_network'):
super(SimpleNetwork, self).__init__()
self.variable_scope = variable_scope
self.config = copy.deepcopy(config)
filters_list = self.config.num_filters_list
depth = len(filters_list)
self.pass_is_training_list = []
self.layers_list = []
if self.config.num_channels < 1:
raise ValueError('num_channels should be > 0')
input_channels = self.config.num_channels
if self.config.coord_conv:
# Add two coordinate conv channels.
input_channels = input_channels + 2
if len(self.config.layer_types) < depth:
self.config.layer_types.extend(
['conv2d'] * (depth - len(self.config.layer_types)))
chin = input_channels
for i, (kernel_size, num_filters, strides, layer_type) in enumerate(zip(
self.config.kernel_size_list,
filters_list,
self.config.strides_list,
self.config.layer_types)):
padding = 'valid'
if layer_type == 'conv2d':
chout = num_filters
layer = tf.keras.layers.Conv2D(
filters=chout,
kernel_size=kernel_size,
strides=(strides, strides),
padding=padding,
activation=None,
use_bias=not self.config.batch_norm,
kernel_initializer=self.config.kernel_initializer,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type))
elif layer_type == 'wide_conv2d':
# Conv. layer with equivalent params to low rank locally connected.
if self.config.rank < 1:
raise ValueError('rank should be > 0 for %s layer.' % layer_type)
chout = int((self.config.rank * chin + num_filters) / float(
chin + num_filters) * num_filters)
layer = tf.keras.layers.Conv2D(
filters=chout if i < (depth-1)
else int(num_filters * self.config.rank),
kernel_size=kernel_size, strides=(strides, strides),
padding=padding,
activation=None,
use_bias=not self.config.batch_norm,
kernel_initializer=self.config.kernel_initializer,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type))
elif layer_type == 'locally_connected2d':
# Full locally connected layer.
chout = num_filters
layer = tf.keras.layers.LocallyConnected2D(
filters=chout,
kernel_size=(kernel_size, kernel_size),
strides=(strides, strides),
padding=padding,
activation=None,
use_bias=True, # not self.config.batch_norm,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type),
kernel_initializer=self.config.kernel_initializer)
elif layer_type == 'low_rank_locally_connected2d':
if self.config.rank < 1:
raise ValueError('rank should be > 0 for %s layer.' % layer_type)
chout = num_filters
layer = layers.LowRankLocallyConnected2D(
filters=chout,
kernel_size=(kernel_size, kernel_size),
strides=(strides, strides),
padding=padding,
activation=None,
use_bias=not self.config.batch_norm,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type),
kernel_initializer=self.config.kernel_initializer,
combining_weights_initializer=(
self.config.combining_weights_initializer),
spatial_rank=self.config.rank,
normalize_weights=self.config.normalize_weights,
input_dependent=config.input_dependent,
share_row_combining_weights=self.config.share_row_combining_weights,
share_col_combining_weights=self.config.share_col_combining_weights)
else:
raise ValueError('Can not recognize layer %s type.' % layer_type)
chin = chout
self.layers_list.append(layer)
self.pass_is_training_list.append(False)
if self.config.batch_norm:
layer = tf.keras.layers.BatchNormalization(
trainable=True, momentum=MOMENTUM, epsilon=EPS)
self.layers_list.append(layer)
self.pass_is_training_list.append(True)
layer = tf.keras.layers.ReLU()
self.layers_list.append(layer)
self.pass_is_training_list.append(False)
if self.config.global_avg_pooling:
self.layers_list.append(tf.keras.layers.GlobalAveragePooling2D())
else:
self.layers_list.append(tf.keras.layers.Flatten())
self.pass_is_training_list.append(False)
self.layers_list.append(tf.keras.layers.Dense(
units=self.config.num_classes, activation=None, use_bias=True,
name='logits'))
self.pass_is_training_list.append(False)
def __call__(self, images, is_training):
endpoints = {}
if self.config.coord_conv:
# Append position channels.
net = tf.concat([images, utils.position_channels(images)], axis=3)
else:
net = images
for i, (pass_is_training, layer) in enumerate(
zip(self.pass_is_training_list, self.layers_list)):
net = layer(net, training=is_training) if pass_is_training else layer(net)
endpoints['layer%d' % i] = net
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, layer.updates)
self.add_update(layer.updates)
logits = net
return logits, endpoints
| [((1396, 1417), 'copy.deepcopy', 'copy.deepcopy', (['config'], {}), '(config)\n', (1409, 1417), False, 'import copy\n'), ((5475, 5497), 'tensorflow.compat.v1.keras.layers.ReLU', 'tf.keras.layers.ReLU', ([], {}), '()\n', (5495, 5497), True, 'import tensorflow.compat.v1 as tf\n'), ((5836, 5939), 'tensorflow.compat.v1.keras.layers.Dense', 'tf.keras.layers.Dense', ([], {'units': 'self.config.num_classes', 'activation': 'None', 'use_bias': '(True)', 'name': '"""logits"""'}), "(units=self.config.num_classes, activation=None,\n use_bias=True, name='logits')\n", (5857, 5939), True, 'import tensorflow.compat.v1 as tf\n'), ((6465, 6525), 'tensorflow.compat.v1.add_to_collection', 'tf.add_to_collection', (['tf.GraphKeys.UPDATE_OPS', 'layer.updates'], {}), '(tf.GraphKeys.UPDATE_OPS, layer.updates)\n', (6485, 6525), True, 'import tensorflow.compat.v1 as tf\n'), ((5276, 5362), 'tensorflow.compat.v1.keras.layers.BatchNormalization', 'tf.keras.layers.BatchNormalization', ([], {'trainable': '(True)', 'momentum': 'MOMENTUM', 'epsilon': 'EPS'}), '(trainable=True, momentum=MOMENTUM,\n epsilon=EPS)\n', (5310, 5362), True, 'import tensorflow.compat.v1 as tf\n'), ((5652, 5692), 'tensorflow.compat.v1.keras.layers.GlobalAveragePooling2D', 'tf.keras.layers.GlobalAveragePooling2D', ([], {}), '()\n', (5690, 5692), True, 'import tensorflow.compat.v1 as tf\n'), ((5734, 5759), 'tensorflow.compat.v1.keras.layers.Flatten', 'tf.keras.layers.Flatten', ([], {}), '()\n', (5757, 5759), True, 'import tensorflow.compat.v1 as tf\n'), ((6158, 6189), 'low_rank_local_connectivity.utils.position_channels', 'utils.position_channels', (['images'], {}), '(images)\n', (6181, 6189), False, 'from low_rank_local_connectivity import utils\n'), ((2606, 2666), 'os.path.join', 'os.path.join', (['self.variable_scope', "('layer%d' % i)", 'layer_type'], {}), "(self.variable_scope, 'layer%d' % i, layer_type)\n", (2618, 2666), False, 'import os\n'), ((3397, 3457), 'os.path.join', 'os.path.join', (['self.variable_scope', "('layer%d' % i)", 'layer_type'], {}), "(self.variable_scope, 'layer%d' % i, layer_type)\n", (3409, 3457), False, 'import os\n'), ((3879, 3939), 'os.path.join', 'os.path.join', (['self.variable_scope', "('layer%d' % i)", 'layer_type'], {}), "(self.variable_scope, 'layer%d' % i, layer_type)\n", (3891, 3939), False, 'import os\n'), ((4490, 4550), 'os.path.join', 'os.path.join', (['self.variable_scope', "('layer%d' % i)", 'layer_type'], {}), "(self.variable_scope, 'layer%d' % i, layer_type)\n", (4502, 4550), False, 'import os\n')] |
jacoblb64/pico_rgb_keypad_hid | adafruit_circuitpython_libs/adafruit-circuitpython-bundle-py-20210214/examples/icm20x_icm20948_gyro_data_rate_test.py | 3251ca6a98ef86d9f98c54f639c4d61810601a0b | # SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
import time
import board
import busio
from adafruit_icm20x import ICM20948
cycles = 200
i2c = busio.I2C(board.SCL, board.SDA)
icm = ICM20948(i2c)
# Cycle between two data rates
# Best viewed in the Mu serial plotter where you can see how
# the data rate affects the resolution of the data
while True:
icm.gyro_data_rate_divisor = 0 # minimum
print("Data Rate:", icm.gyro_data_rate)
time.sleep(2)
for i in range(cycles):
print(icm.gyro)
icm.gyro_data_rate_divisor = 255 # maximum
print("Data Rate:", icm.gyro_data_rate)
time.sleep(2)
for i in range(cycles):
print(icm.gyro)
| [((190, 221), 'busio.I2C', 'busio.I2C', (['board.SCL', 'board.SDA'], {}), '(board.SCL, board.SDA)\n', (199, 221), False, 'import busio\n'), ((228, 241), 'adafruit_icm20x.ICM20948', 'ICM20948', (['i2c'], {}), '(i2c)\n', (236, 241), False, 'from adafruit_icm20x import ICM20948\n'), ((492, 505), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (502, 505), False, 'import time\n'), ((655, 668), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (665, 668), False, 'import time\n')] |
TheLurkingCat/TIOJ | 1186.py | 077e1cd22239d8f6bc1cd7561f27c68143e80263 | a = int(input())
while a:
for x in range(a-1):
out = '*' + ' ' * (a-x-2) + '*' + ' ' * (a-x-2) + '*'
print(out.center(2*a-1))
print('*' * (2 * a - 1))
for x in range(a-1):
out = '*' + ' ' * x + '*' + ' ' * x + '*'
print(out.center(2*a-1))
a = int(input())
| [] |
initialed85/eds-cctv-system | utils/common.py | fcdb7e7e23327bf3a901d23d506b3915833027d1 | import datetime
import json
import os
from pathlib import Path
from types import SimpleNamespace
from typing import List
from typing import NamedTuple, Union, Optional, Callable
from uuid import uuid3, NAMESPACE_DNS
from dateutil.parser import parse
_VIDEO_SUFFIXES = [".mkv", ".mp4"]
_IMAGE_SUFFIXES = [".jpg"]
_PERMITTED_EXTENSIONS = _VIDEO_SUFFIXES + _IMAGE_SUFFIXES
class PathDetails(NamedTuple):
path: Path
event_id: Optional[int]
camera_id: Optional[int]
timestamp: datetime.datetime
camera_name: str
is_image: bool
is_lowres: bool
class Event(SimpleNamespace):
event_id: str
timestamp: Union[datetime.datetime, str]
camera_name: str
high_res_image_path: str
low_res_image_path: str
high_res_video_path: str
low_res_video_path: str
def get_sorted_paths(path: Path) -> List[Path]:
return sorted(Path(path).iterdir(), key=os.path.getmtime)
def format_timestamp_for_go(timestamp: Union[datetime.datetime, str]) -> str:
if isinstance(timestamp, str):
timestamp = parse(timestamp)
us = timestamp.strftime("%f")
tz_raw = timestamp.strftime("%z")
tz = "{}:{}".format(tz_raw[0:3], tz_raw[3:])
return timestamp.strftime(f"%Y-%m-%dT%H:%M:%S.{us}00{tz}")
def parse_paths(paths: List[Path], tzinfo: datetime.tzinfo, parse_method: Callable) -> List[PathDetails]:
return [
y
for y in [parse_method(path=x, tzinfo=tzinfo) for x in paths if x is not None]
if y is not None
]
def build_event_for_some_path_details(some_path_details: List[PathDetails], path: Path):
if len(some_path_details) != 4:
raise ValueError(
f"expected some_path_details to be 4 long (and related); instead it was {len(some_path_details)} long"
)
event_ids = list(set([x.event_id for x in some_path_details]))
if len(event_ids) != 1:
raise ValueError(
f"expected all PathDetails to have a common event_id; instead they were {event_ids}"
)
camera_ids = list(set([x.camera_id for x in some_path_details]))
if len(camera_ids) != 1:
raise ValueError(
f"expected all PathDetails to have a common camera_id; instead they were {camera_ids}"
)
camera_names = list(set([x.camera_name for x in some_path_details]))
if len(camera_names) != 1:
raise ValueError(
f"expected all PathDetails to have a common camera_name; instead they were {camera_names}"
)
high_res_image_paths = list(
set([x.path for x in some_path_details if x.is_image and not x.is_lowres])
)
if len(high_res_image_paths) != 1:
raise ValueError(
f"expected to find 1 high_res_image_path from PathDetails; instead found {high_res_image_paths}"
)
low_res_image_paths = list(
set([x.path for x in some_path_details if x.is_image and x.is_lowres])
)
if len(low_res_image_paths) != 1:
raise ValueError(
f"expected to find 1 low_res_image_path from PathDetails; instead found {low_res_image_paths}"
)
high_res_video_paths = list(
set([x.path for x in some_path_details if not x.is_image and not x.is_lowres])
)
if len(high_res_video_paths) != 1:
raise ValueError(
f"expected to find 1 high_res_video_path from PathDetails; instead found {high_res_video_paths}"
)
low_res_video_paths = list(
set([x.path for x in some_path_details if not x.is_image and x.is_lowres])
)
if len(low_res_video_paths) != 1:
raise ValueError(
f"expected to find 1 low_res_video_path from PathDetails; instead found {low_res_video_paths}"
)
timestamp = sorted([x.timestamp for x in some_path_details])[0]
high_res_image_path = high_res_image_paths[0]
low_res_image_path = low_res_image_paths[0]
high_res_video_path = high_res_video_paths[0]
low_res_video_path = low_res_video_paths[0]
# in Go:
# eventId := uuid.NewSHA1(
# uuid.NameSpaceDNS,
# []byte(fmt.Sprintf("%v, %v, %v, %v, %v", timestamp, highResImagePath, lowResImagePath, highResVideoPath, lowResVideoPath)),
# )
event_id = uuid3(
NAMESPACE_DNS,
f"{format_timestamp_for_go(timestamp)}, {high_res_image_path}, {low_res_image_path}, {high_res_video_path}, {low_res_video_path}",
)
return Event(
event_id=str(event_id),
timestamp=timestamp,
camera_name=camera_names[0],
high_res_image_path=str(path / high_res_image_path),
low_res_image_path=str(path / low_res_image_path),
high_res_video_path=str(path / high_res_video_path),
low_res_video_path=str(path / low_res_video_path),
)
def relate_path_details(
some_path_details: List[PathDetails],
get_key_methods: List[Callable]
) -> List[List[PathDetails]]:
some_path_details_by_key = {}
for path_details in some_path_details:
keys = [x(path_details) for x in get_key_methods]
for key in keys:
some_path_details_by_key.setdefault(key, [])
some_path_details_by_key[key] += [path_details]
viable_some_path_details_by_key = {
k: v for k, v in some_path_details_by_key.items() if len(v) == 4
}
deduplicated_path_details = []
for some_path_details in viable_some_path_details_by_key.values():
if some_path_details not in deduplicated_path_details:
deduplicated_path_details += [some_path_details]
return deduplicated_path_details
def build_events_for_related_path_details(
related_path_details: List[List[PathDetails]], path: Path
) -> List[Event]:
events: List[Event] = []
for some_path_details in related_path_details:
events += [
build_event_for_some_path_details(
some_path_details=some_path_details, path=path
)
]
sorted_events = sorted(events, key=lambda x: x.timestamp)
for event in sorted_events:
event.timestamp = format_timestamp_for_go(timestamp=event.timestamp)
return sorted_events
def build_json_lines_from_events(events: List[Event]) -> str:
return "\n".join(
[
json.dumps(
{
"event_id": x.event_id,
"timestamp": x.timestamp,
"camera_name": x.camera_name,
"high_res_image_path": x.high_res_image_path,
"low_res_image_path": x.low_res_image_path,
"high_res_video_path": x.high_res_video_path,
"low_res_video_path": x.low_res_video_path,
}
)
for x in events
]
)
def write_to_file(path: Path, data: str):
with open(str(path), "w") as f:
f.write(data)
def rebuild_event_store(root_path: Path, tzinfo: datetime.tzinfo, json_path: Path, parse_method: Callable, get_key_methods: List[Callable]):
print(f"getting sorted paths from {root_path}...")
sorted_paths = get_sorted_paths(path=root_path)
print(f"got {len(sorted_paths)} sorted paths")
print("parsing sorted paths...")
some_path_details = parse_paths(paths=sorted_paths, tzinfo=tzinfo, parse_method=parse_method)
print(f"got {len(some_path_details)} parsed paths")
print("relating parsed paths...")
related_path_details = relate_path_details(some_path_details=some_path_details,
get_key_methods=get_key_methods)
print(f"got {len(related_path_details)} related paths")
print("building events...")
events = build_events_for_related_path_details(
related_path_details=related_path_details, path=root_path
)
print(f"built {len(events)} events")
print("building json lines...")
json_lines = build_json_lines_from_events(events=events)
print(f"built {len(json_lines)} bytes")
print(f"writing to {json_path}")
write_to_file(path=json_path, data=json_lines)
print("done.")
| [((1047, 1063), 'dateutil.parser.parse', 'parse', (['timestamp'], {}), '(timestamp)\n', (1052, 1063), False, 'from dateutil.parser import parse\n'), ((6220, 6505), 'json.dumps', 'json.dumps', (["{'event_id': x.event_id, 'timestamp': x.timestamp, 'camera_name': x.\n camera_name, 'high_res_image_path': x.high_res_image_path,\n 'low_res_image_path': x.low_res_image_path, 'high_res_video_path': x.\n high_res_video_path, 'low_res_video_path': x.low_res_video_path}"], {}), "({'event_id': x.event_id, 'timestamp': x.timestamp, 'camera_name':\n x.camera_name, 'high_res_image_path': x.high_res_image_path,\n 'low_res_image_path': x.low_res_image_path, 'high_res_video_path': x.\n high_res_video_path, 'low_res_video_path': x.low_res_video_path})\n", (6230, 6505), False, 'import json\n'), ((868, 878), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (872, 878), False, 'from pathlib import Path\n')] |
schoolio-co/schoolio_site | schoolio/migrations/0005_auto_20190927_1423.py | a616807c504c7a7cab3b9f7c3dab42f827cb0580 | # Generated by Django 2.2.1 on 2019-09-27 14:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schoolio', '0004_auto_20190927_0405'),
]
operations = [
migrations.AlterField(
model_name='student_assessment',
name='assessment_mark',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='student_assessment',
name='assessment_score',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='student_assessment',
name='understanding_level',
field=models.CharField(blank=True, max_length=150, null=True),
),
]
| [((358, 400), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (377, 400), False, 'from django.db import migrations, models\n'), ((544, 586), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (563, 586), False, 'from django.db import migrations, models\n'), ((733, 788), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(150)', 'null': '(True)'}), '(blank=True, max_length=150, null=True)\n', (749, 788), False, 'from django.db import migrations, models\n')] |
linerxliner/ValCAT | taattack/_datasets/dataset.py | e62985c6c64f6415bb2bb4716bd02d9686badd47 | class Dataset:
_data = None
_first_text_col = 'text'
_second_text_col = None
_label_col = 'label'
def __init__(self):
self._idx = 0
if self._data is None:
raise Exception('Dataset is not loaded')
def __iter__(self):
return self
def __next__(self):
if self._idx >= len(self._data):
raise StopIteration
else:
item = self._data.iloc[self._idx]
self._idx += 1
if self._second_text_col:
return item[self._first_text_col], item[self._second_text_col], int(item[self._label_col])
else:
return item[self._first_text_col], int(item[self._label_col])
def __getitem__(self, item):
if isinstance(item, int):
item = self._data.iloc[item]
if self._second_text_col:
return item[self._first_text_col], item[self._second_text_col], int(item[self._label_col])
else:
return item[self._first_text_col], int(item[self._label_col])
elif isinstance(item, slice):
start = item.start if item.start else 0
stop = item.stop if item.stop else len(self._data)
step = item.step if item.step else 1
items = self._data.iloc[start:stop:step]
if self._second_text_col:
return [(item[self._first_text_col], item[self._second_text_col], int(item[self._label_col])) for _, item in items.iterrows()]
else:
return [(item[self._first_text_col], int(item[self._label_col])) for _, item in items.iterrows()]
else:
raise KeyError
def __str__(self):
return str(self._data) | [] |
akhilpandey95/scholarlyimpact | scripts/extract_gs_citations.py | 215ae832c90f0564fa0301e4c3f1c99525617625 | #!/usr/bin/env python3
# This Source Code Form is subject to the terms of the MIT
# License. If a copy of the same was not distributed with this
# file, You can obtain one at
# https://github.com/akhilpandey95/scholarlyimpact/blob/master/LICENSE.
import os
import csv
import glob
import json
import requests
import subprocess
import numpy as np
import pandas as pd
from tqdm import tqdm
from ast import literal_eval
from fp.fp import FreeProxy
from torrequest import TorRequest
from scholarly import scholarly
from collections import Counter, OrderedDict
from operator import attrgetter
# class definition for Rate limiting
class RateLimiter:
"""
Class object for putting a rate limit on the
number of requests made
Parameters
----------
No arguments
Returns
-------
Nothing
"""
def __init__(self, maxRate=5, timeUnit=1):
self.timeUnit = timeUnit
self.deque = deque(maxlen=maxRate)
def __call__(self):
if self.deque.maxlen == len(self.deque):
cTime = time.time()
if cTime - self.deque[0] > self.timeUnit:
self.deque.append(cTime)
return False
else:
return True
self.deque.append(time.time())
return False
# function for obtaining the citations using the dimensions web url
def get_gs_citations_web(title):
"""
Use the google scholar web URL and requests API to obtain the citations
for a given title of a scholarly article
Parameters
----------
arg1 | title: str
The title of a scholarly article
Returns
-------
Dictionary
dict
"""
while True:
try:
# call the lumproxy object
scholarly.use_lum_proxy()
# make the query
query = scholarly.search_pubs(title)
# come out
break
except Exception as e:
# come out and try again
break
# return the response dict
return next(query)
# function for assigning new IP address
def assign_new_ip(text=False):
"""
Reset the identity using TorRequest
Parameters
----------
arg1 [OPTIONAL]| text: bool
A boolean flag to return the IP address tuple (old, morphed)
Returns
-------
boolean
True/False
"""
try:
# pass the hashed password
req = TorRequest(password='scholarly_password')
# return the ip address
normal_identity = requests.get('http://ipecho.net/plain')
# reset the identity using Tor
req.reset_identity()
# make a request now
morphed_identity = req.get('http://ipecho.net/plain')
# return the status depending on the flag
if morphed_identity != normal_identity:
if text == True:
# return the ip address pairs as a tuple
return (normal_identity.text, morphed_identity.text)
else:
return True
else:
# return just the status
return False
except:
return False
# function for assigning a new proxy
def set_new_proxy(text=True):
"""
Reset the identity using FreeProxy
Parameters
----------
arg1 [OPTIONAL]| text: bool
A boolean flag to return the IP address tuple (old, morphed)
Returns
-------
Address
fp.fp.FreeProxy
"""
while True:
# call the freeproxy object
proxy = FreeProxy(rand=True, timeout=1).get()
# allocate the proxy address to scholarly
proxy_works = scholarly.use_proxy(http=proxy, https=proxy)
# check it the ip address works
if proxy_works:
# come out
break
# print the ip address depending on the text argument
if text:
# print the working ip
print("Working proxy:", proxy)
# return the proxy details
return proxy
# function for connecting tor to scholarly
def scholarly_init_connection():
"""
Bind TorRequest to Scholarly service
Parameters
----------
No arguments
Returns
-------
Nothing
"""
while True:
# assign new tor identity
ips = assign_new_ip(text=True)
# use the tor request for scholarly
tor_req = scholarly.use_tor(tor_sock_port=9050, \
tor_control_port=9051, \
tor_pw="scholarly_password")
if tor_req:
# come out of the loop, when successful
break
# print the tor identity
print("Working Tor identity:", ips[1])
# function for restarting the system tor service
def restart_tor_system_service(text=False):
"""
Use the os module to restart the tor service
Parameters
----------
arg1 [OPTIONAL]| text: bool
A boolean flag to return the status of the command
Returns
-------
Boolean
bool
"""
# subprocess command for stopping the tor service
tor_stop = subprocess.Popen(['service', 'tor', 'stop'])
# subprocess command for restarting the tor service
tor_restart = subprocess.Popen(['service', 'tor', 'restart'])
# subprocess command for restarting the tor service
tor_status = subprocess.Popen(['service', 'tor', 'status'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
# if the label is set to true then print the output
if text:
for output in tor_status.stdout.readlines():
print(output.strip())
# pipe out the stdout, stderr for the subprocess
stdout, stderr = tor_status.communicate()
if len(stderr) > 0:
# return False
return False
else:
# return true if successful
return True
def get_articleInfo(title):
"""
Use the google scholar web URL and requests API to obtain the citations
for a given title of a scholarly article
Parameters
----------
arg1 | title: str
The title of a scholarly article
Returns
-------
Dictionary
dict
"""
while True:
try:
# init the connection with scholarly and tor
scholarly_init_connection()
# search for the query
search_query = scholarly.search_pubs(title)
# print success
print("Got the results of the query")
# come out of the loop
break
except Exception as e:
# print error message
print("Attempt Failed, patching new tor identity")
# restart the system tor service
restart_tor_system_service(text=False)
# assign new connection again
scholarly_init_connection()
# obtain the bib entry of the scholarly article
pub = next(search_query)
# return the bib entry
return pub
if __name__ == '__main__':
# iterate over the length
length_of_file = len(open('paper_titles.txt').readlines())
# place the contents of the list into a file
alt_list = open('paper_titles.txt').readlines()
# iterate over the length of the file
# write the results to a file
for i in tqdm(range(length_of_file)):
alt_info = open('paper_titles.txt', 'r+')
cit_info = open('citations_gs.csv', 'a')
cit_info.write(str(alt_list[i].strip(
).split('\t')[0]) + ',' + str(get_articleInfo(alt_list[i].strip().split('\t')[1])))
cit_info.write('\n')
cit_info.close()
alt_info.seek(0)
alt_info.truncate()
alt_info.writelines(alt_list[i+1:])
alt_info.close()
| [((5053, 5097), 'subprocess.Popen', 'subprocess.Popen', (["['service', 'tor', 'stop']"], {}), "(['service', 'tor', 'stop'])\n", (5069, 5097), False, 'import subprocess\n'), ((5173, 5220), 'subprocess.Popen', 'subprocess.Popen', (["['service', 'tor', 'restart']"], {}), "(['service', 'tor', 'restart'])\n", (5189, 5220), False, 'import subprocess\n'), ((5295, 5418), 'subprocess.Popen', 'subprocess.Popen', (["['service', 'tor', 'status']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'universal_newlines': '(True)'}), "(['service', 'tor', 'status'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, universal_newlines=True)\n", (5311, 5418), False, 'import subprocess\n'), ((2409, 2450), 'torrequest.TorRequest', 'TorRequest', ([], {'password': '"""scholarly_password"""'}), "(password='scholarly_password')\n", (2419, 2450), False, 'from torrequest import TorRequest\n'), ((2510, 2549), 'requests.get', 'requests.get', (['"""http://ipecho.net/plain"""'], {}), "('http://ipecho.net/plain')\n", (2522, 2549), False, 'import requests\n'), ((3613, 3657), 'scholarly.scholarly.use_proxy', 'scholarly.use_proxy', ([], {'http': 'proxy', 'https': 'proxy'}), '(http=proxy, https=proxy)\n', (3632, 3657), False, 'from scholarly import scholarly\n'), ((4325, 4419), 'scholarly.scholarly.use_tor', 'scholarly.use_tor', ([], {'tor_sock_port': '(9050)', 'tor_control_port': '(9051)', 'tor_pw': '"""scholarly_password"""'}), "(tor_sock_port=9050, tor_control_port=9051, tor_pw=\n 'scholarly_password')\n", (4342, 4419), False, 'from scholarly import scholarly\n'), ((1746, 1771), 'scholarly.scholarly.use_lum_proxy', 'scholarly.use_lum_proxy', ([], {}), '()\n', (1769, 1771), False, 'from scholarly import scholarly\n'), ((1822, 1850), 'scholarly.scholarly.search_pubs', 'scholarly.search_pubs', (['title'], {}), '(title)\n', (1843, 1850), False, 'from scholarly import scholarly\n'), ((6382, 6410), 'scholarly.scholarly.search_pubs', 'scholarly.search_pubs', (['title'], {}), '(title)\n', (6403, 6410), False, 'from scholarly import scholarly\n'), ((3502, 3533), 'fp.fp.FreeProxy', 'FreeProxy', ([], {'rand': '(True)', 'timeout': '(1)'}), '(rand=True, timeout=1)\n', (3511, 3533), False, 'from fp.fp import FreeProxy\n')] |
Jianwei-Wang/python2.7_lib | dist-packages/reportlab/pdfgen/pathobject.py | 911b8e81512e5ac5f13e669ab46f7693ed897378 | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfgen/pathobject.py
__version__=''' $Id$ '''
__doc__="""
PDFPathObject is an efficient way to draw paths on a Canvas. Do not
instantiate directly, obtain one from the Canvas instead.
Progress Reports:
8.83, 2000-01-13, gmcm: created from pdfgen.py
"""
from reportlab.pdfgen import pdfgeom
from reportlab.lib.rl_accel import fp_str
class PDFPathObject:
"""Represents a graphic path. There are certain 'modes' to PDF
drawing, and making a separate object to expose Path operations
ensures they are completed with no run-time overhead. Ask
the Canvas for a PDFPath with getNewPathObject(); moveto/lineto/
curveto wherever you want; add whole shapes; and then add it back
into the canvas with one of the relevant operators.
Path objects are probably not long, so we pack onto one line
the code argument allows a canvas to get the operatiosn appended directly so
avoiding the final getCode
"""
def __init__(self,code=None):
self._code = (code,[])[code is None]
self._code_append = self._init_code_append
def _init_code_append(self,c):
assert c.endswith(' m') or c.endswith(' re'), 'path must start with a moveto or rect'
code_append = self._code.append
code_append('n')
code_append(c)
self._code_append = code_append
def getCode(self):
"pack onto one line; used internally"
return ' '.join(self._code)
def moveTo(self, x, y):
self._code_append('%s m' % fp_str(x,y))
def lineTo(self, x, y):
self._code_append('%s l' % fp_str(x,y))
def curveTo(self, x1, y1, x2, y2, x3, y3):
self._code_append('%s c' % fp_str(x1, y1, x2, y2, x3, y3))
def arc(self, x1,y1, x2,y2, startAng=0, extent=90):
"""Contributed to piddlePDF by Robert Kern, 28/7/99.
Draw a partial ellipse inscribed within the rectangle x1,y1,x2,y2,
starting at startAng degrees and covering extent degrees. Angles
start with 0 to the right (+x) and increase counter-clockwise.
These should have x1<x2 and y1<y2.
The algorithm is an elliptical generalization of the formulae in
Jim Fitzsimmon's TeX tutorial <URL: http://www.tinaja.com/bezarc1.pdf>."""
self._curves(pdfgeom.bezierArc(x1,y1, x2,y2, startAng, extent))
def arcTo(self, x1,y1, x2,y2, startAng=0, extent=90):
"""Like arc, but draws a line from the current point to
the start if the start is not the current point."""
self._curves(pdfgeom.bezierArc(x1,y1, x2,y2, startAng, extent),'lineTo')
def rect(self, x, y, width, height):
"""Adds a rectangle to the path"""
self._code_append('%s re' % fp_str((x, y, width, height)))
def ellipse(self, x, y, width, height):
"""adds an ellipse to the path"""
self._curves(pdfgeom.bezierArc(x, y, x + width,y + height, 0, 360))
def _curves(self,curves,initial='moveTo'):
getattr(self,initial)(*curves[0][:2])
for curve in curves:
self.curveTo(*curve[2:])
def circle(self, x_cen, y_cen, r):
"""adds a circle to the path"""
x1 = x_cen - r
y1 = y_cen - r
width = height = 2*r
self.ellipse(x1, y1, width, height)
def roundRect(self, x, y, width, height, radius):
"""Draws a rectangle with rounded corners. The corners are
approximately quadrants of a circle, with the given radius."""
#use a precomputed set of factors for the bezier approximation
#to a circle. There are six relevant points on the x axis and y axis.
#sketch them and it should all make sense!
t = 0.4472 * radius
x0 = x
x1 = x0 + t
x2 = x0 + radius
x3 = x0 + width - radius
x4 = x0 + width - t
x5 = x0 + width
y0 = y
y1 = y0 + t
y2 = y0 + radius
y3 = y0 + height - radius
y4 = y0 + height - t
y5 = y0 + height
self.moveTo(x2, y0)
self.lineTo(x3, y0) #bottom row
self.curveTo(x4, y0, x5, y1, x5, y2) #bottom right
self.lineTo(x5, y3) #right edge
self.curveTo(x5, y4, x4, y5, x3, y5) #top right
self.lineTo(x2, y5) #top row
self.curveTo(x1, y5, x0, y4, x0, y3) #top left
self.lineTo(x0, y2) #left edge
self.curveTo(x0, y1, x1, y0, x2, y0) #bottom left
self.close()
def close(self):
"draws a line back to where it started"
self._code_append('h')
| [((2432, 2483), 'reportlab.pdfgen.pdfgeom.bezierArc', 'pdfgeom.bezierArc', (['x1', 'y1', 'x2', 'y2', 'startAng', 'extent'], {}), '(x1, y1, x2, y2, startAng, extent)\n', (2449, 2483), False, 'from reportlab.pdfgen import pdfgeom\n'), ((2687, 2738), 'reportlab.pdfgen.pdfgeom.bezierArc', 'pdfgeom.bezierArc', (['x1', 'y1', 'x2', 'y2', 'startAng', 'extent'], {}), '(x1, y1, x2, y2, startAng, extent)\n', (2704, 2738), False, 'from reportlab.pdfgen import pdfgeom\n'), ((3007, 3061), 'reportlab.pdfgen.pdfgeom.bezierArc', 'pdfgeom.bezierArc', (['x', 'y', '(x + width)', '(y + height)', '(0)', '(360)'], {}), '(x, y, x + width, y + height, 0, 360)\n', (3024, 3061), False, 'from reportlab.pdfgen import pdfgeom\n'), ((1666, 1678), 'reportlab.lib.rl_accel.fp_str', 'fp_str', (['x', 'y'], {}), '(x, y)\n', (1672, 1678), False, 'from reportlab.lib.rl_accel import fp_str\n'), ((1743, 1755), 'reportlab.lib.rl_accel.fp_str', 'fp_str', (['x', 'y'], {}), '(x, y)\n', (1749, 1755), False, 'from reportlab.lib.rl_accel import fp_str\n'), ((1839, 1869), 'reportlab.lib.rl_accel.fp_str', 'fp_str', (['x1', 'y1', 'x2', 'y2', 'x3', 'y3'], {}), '(x1, y1, x2, y2, x3, y3)\n', (1845, 1869), False, 'from reportlab.lib.rl_accel import fp_str\n'), ((2868, 2897), 'reportlab.lib.rl_accel.fp_str', 'fp_str', (['(x, y, width, height)'], {}), '((x, y, width, height))\n', (2874, 2897), False, 'from reportlab.lib.rl_accel import fp_str\n')] |
devagul93/Jarvis-System | client/modules/Wikipedia.py | 8d1865b19bb8530831c868147c3b27a1c3bad59b | import wikipedia
import re
import TCPclient as client
WORDS = ["WIKIPEDIA","SEARCH","INFORMATION"]
def handle(text,mic,profile):
# SEARCH ON WIKIPEDIA
# ny = wikipedia.summary("New York",sentences=3);
# mic.say("%s"% ny)
#mic.say("What you want to search about")
#text = mic.activeListen()
print "entering wiki term"
text = client.grab_input()
while text.upper()=="WIKIPEDIA":
print "entering while"
text = client.grab_input()
print text
answer = wikipedia.summary(text,sentences=3)
answer +="\n"
print answer
client.send_out(answer)
#mic.say(answer)
def isValid(text):
return bool(re.search(r'\bwikipedia\b',text, re.IGNORECASE))
| [] |
nikhil-amin/python-mini-project | Madlibs/madlibs.py | cd70a6a43408ce74cff501ce4d4658ab82260c2d | import random
print("Title : Eat, Drink, And Be Sick")
noun = []
for i in range(4):
n = input("Enter noun : ")
noun.append(n)
plural = []
for i in range(6):
pn = input("Enter plural noun : ")
plural.append(pn)
adjective = []
for i in range(2):
a = input("Enter adjective : ")
adjective.append(a)
adverb = input("Enter adverb : ")
letter = input("Enter any letter : ")
body_part = input("Enter any body part : ")
print("An inspector from the Department of Health and ", random.choice(noun) , " Services paid a surprise visit to our " , random.choice(adjective) , " school cafeteria.")
print("The lunch special, prepared by our " , random.choice(adjective) , "dietician, was spaghetti and " , random.choice(noun) , " balls with a choice of either a " , random.choice(noun) , " salad or French " , random.choice(plural) , ".")
print("The inspector found the meat-" , random.choice(plural) , " to be overcooked and discovered a live " , random.choice(noun) , " in the fries,causing him to have a " + body_part + " ache.")
print("In response, he threw up all over his " , random.choice(plural) , ".")
print("In his report, the inspector " + adverb + " recommended that the school cafeteria serve only nutritious " , random.choice(plural) , " as well as low-calorie " , random.choice(plural) , " and that all of the saturated " , random.choice(plural) , " be eliminated.")
print("He rated the cafeteria a " + letter + "-minus.")
| [((493, 512), 'random.choice', 'random.choice', (['noun'], {}), '(noun)\n', (506, 512), False, 'import random\n'), ((559, 583), 'random.choice', 'random.choice', (['adjective'], {}), '(adjective)\n', (572, 583), False, 'import random\n'), ((654, 678), 'random.choice', 'random.choice', (['adjective'], {}), '(adjective)\n', (667, 678), False, 'import random\n'), ((715, 734), 'random.choice', 'random.choice', (['noun'], {}), '(noun)\n', (728, 734), False, 'import random\n'), ((775, 794), 'random.choice', 'random.choice', (['noun'], {}), '(noun)\n', (788, 794), False, 'import random\n'), ((819, 840), 'random.choice', 'random.choice', (['plural'], {}), '(plural)\n', (832, 840), False, 'import random\n'), ((888, 909), 'random.choice', 'random.choice', (['plural'], {}), '(plural)\n', (901, 909), False, 'import random\n'), ((957, 976), 'random.choice', 'random.choice', (['noun'], {}), '(noun)\n', (970, 976), False, 'import random\n'), ((1091, 1112), 'random.choice', 'random.choice', (['plural'], {}), '(plural)\n', (1104, 1112), False, 'import random\n'), ((1235, 1256), 'random.choice', 'random.choice', (['plural'], {}), '(plural)\n', (1248, 1256), False, 'import random\n'), ((1288, 1309), 'random.choice', 'random.choice', (['plural'], {}), '(plural)\n', (1301, 1309), False, 'import random\n'), ((1348, 1369), 'random.choice', 'random.choice', (['plural'], {}), '(plural)\n', (1361, 1369), False, 'import random\n')] |
saikrishnarallabandi/python_connectionist | scripts/transpose.py | e3f8f92c8de865190ad727951eb2b0e514248afc | import numpy
g = open('/home/srallaba/mgc/transposed/arctic_a0404.mgc','w')
x = numpy.loadtxt('/home/srallaba/mgc_spaces/arctic_a0404.mgc')
numpy.savetxt(g, numpy.transpose(x))
g.close()
| [((82, 141), 'numpy.loadtxt', 'numpy.loadtxt', (['"""/home/srallaba/mgc_spaces/arctic_a0404.mgc"""'], {}), "('/home/srallaba/mgc_spaces/arctic_a0404.mgc')\n", (95, 141), False, 'import numpy\n'), ((159, 177), 'numpy.transpose', 'numpy.transpose', (['x'], {}), '(x)\n', (174, 177), False, 'import numpy\n')] |
yfaway/zone-apis | tests/zone_api_test/core/zone_manager_test.py | 4aa4120cb4a66812dac1d32e86e825bbafe652b8 | from zone_api.core.zone_manager import ZoneManager
from zone_api import platform_encapsulator as pe
from zone_api.core.zone import Zone
from zone_api.core.zone_event import ZoneEvent
from zone_api.core.devices.dimmer import Dimmer
from zone_api.core.devices.switch import Fan, Light, Switch
from zone_api.core.devices.illuminance_sensor import IlluminanceSensor
from zone_api.core.devices.motion_sensor import MotionSensor
from zone_api.core.actions.turn_on_switch import TurnOnSwitch
from zone_api_test.core.device_test import DeviceTest
ILLUMINANCE_THRESHOLD_IN_LUX = 8
INVALID_ITEM_NAME = 'invalid item name'
class ZoneManagerTest(DeviceTest):
""" Unit tests for zone_manager.py. """
def setUp(self):
items = [pe.create_switch_item('TestLightName'),
pe.create_switch_item('TestMotionSensorName'),
pe.create_number_item('IlluminanceSensorName'),
pe.create_string_item('AstroSensorName'),
pe.create_dimmer_item('TestDimmerName'),
pe.create_switch_item('TestFanName'),
]
self.set_items(items)
super(ZoneManagerTest, self).setUp()
[self.lightItem, self.motionSensorItem,
self.illuminanceSensorItem, self.astroSensorItem, self.dimmerItem,
self.fanItem] = items
self.illuminanceSensor = IlluminanceSensor(self.illuminanceSensorItem)
self.light = Light(self.lightItem, 2,
ILLUMINANCE_THRESHOLD_IN_LUX)
self.motionSensor = MotionSensor(self.motionSensorItem)
self.dimmer = Dimmer(self.dimmerItem, 2, 100, "0-23:59")
self.fan = Fan(self.fanItem, 2)
self.zm = ZoneManager()
def tearDown(self):
self.zm.stop_auto_report_watch_dog()
self.fan._cancel_timer()
self.dimmer._cancel_timer()
self.light._cancel_timer()
super(ZoneManagerTest, self).tearDown()
def testAddZone_validZone_zoneAdded(self):
zone1 = Zone('ff')
self.zm.add_zone(zone1)
self.assertEqual(1, len(self.zm.get_zones()))
zone2 = Zone('2f')
self.zm.add_zone(zone2)
self.assertEqual(2, len(self.zm.get_zones()))
def testGetZoneById_validZoneId_returnValidZone(self):
zone1 = Zone('ff')
self.zm.add_zone(zone1)
zone2 = Zone('2f')
self.zm.add_zone(zone2)
self.assertEqual(zone1.get_name(),
self.zm.get_zone_by_id(zone1.get_id()).get_name())
self.assertEqual(zone2.get_name(),
self.zm.get_zone_by_id(zone2.get_id()).get_name())
def testGetZoneById_invalidZoneId_returnNone(self):
self.assertTrue(self.zm.get_zone_by_id('invalid zone id') is None)
def testRemoveZone_validZone_zoneRemoved(self):
zone1 = Zone('ff')
self.zm.add_zone(zone1)
zone2 = Zone('2f')
self.zm.add_zone(zone2)
self.assertEqual(2, len(self.zm.get_zones()))
self.zm.remove_zone(zone1)
self.assertEqual(1, len(self.zm.get_zones()))
self.zm.remove_zone(zone2)
self.assertEqual(0, len(self.zm.get_zones()))
def testContainingZone_validDevice_returnsCorrectZone(self):
zone1 = Zone('ff').add_device(self.light)
zone2 = Zone('sf').add_device(self.fan)
self.zm.add_zone(zone1)
self.zm.add_zone(zone2)
self.assertEqual(zone1,
self.zm.get_immutable_instance().get_containing_zone(self.light))
self.assertEqual(zone2,
self.zm.get_immutable_instance().get_containing_zone(self.fan))
def testContainingZone_invalidDevice_returnsNone(self):
zone1 = Zone('ff').add_device(self.light)
self.zm.add_zone(zone1)
self.assertEqual(None,
self.zm.get_immutable_instance().get_containing_zone(self.fan))
def testGetDevicesByType_variousScenarios_returnsCorrectList(self):
zone1 = Zone('ff').add_device(self.light)
zone2 = Zone('sf').add_device(self.fan)
self.zm.add_zone(zone1)
self.zm.add_zone(zone2)
self.assertEqual(2, len(self.zm.get_zones()))
self.assertEqual(1, len(self.zm.get_devices_by_type(Light)))
self.assertEqual(2, len(self.zm.get_devices_by_type(Switch)))
self.assertEqual(0, len(self.zm.get_devices_by_type(Dimmer)))
def testOnMotionSensorTurnedOn_noZone_returnsFalse(self):
self.assertFalse(self.zm.get_immutable_instance().dispatch_event(
ZoneEvent.MOTION, pe.get_event_dispatcher(), self.motionSensor, pe.create_string_item(INVALID_ITEM_NAME)))
def testOnMotionSensorTurnedOn_withNonApplicableZone_returnsFalse(self):
zone = Zone('ff', [self.light, self.motionSensor])
self.zm.add_zone(zone)
self.assertFalse(self.zm.get_immutable_instance().dispatch_event(
ZoneEvent.MOTION, pe.get_event_dispatcher(), self.motionSensor, pe.create_string_item(INVALID_ITEM_NAME)))
def testOnMotionSensorTurnedOn_withApplicableZone_returnsTrue(self):
self.assertFalse(self.light.is_on())
pe.set_number_value(self.illuminanceSensorItem, ILLUMINANCE_THRESHOLD_IN_LUX - 1)
zone = Zone('ff', [self.light, self.motionSensor, self.illuminanceSensor])
zone = zone.add_action(TurnOnSwitch())
self.zm.add_zone(zone)
self.assertTrue(self.zm.get_immutable_instance().dispatch_event(
ZoneEvent.MOTION, pe.get_event_dispatcher(), self.motionSensor, self.motionSensor.get_item()))
def testOnSwitchTurnedOn_noZone_returnsFalse(self):
self.assertFalse(self.zm.get_immutable_instance().on_switch_turned_on(
pe.get_event_dispatcher(), self.light, pe.create_string_item(INVALID_ITEM_NAME)))
def testOnSwitchTurnedOn_withNonApplicableZone_returnsFalse(self):
zone = Zone('ff', [self.light, self.motionSensor])
self.zm.add_zone(zone)
self.assertFalse(self.zm.get_immutable_instance().on_switch_turned_on(
pe.get_event_dispatcher(), self.light, pe.create_string_item(INVALID_ITEM_NAME)))
def testOnSwitchTurnedOn_withApplicableZone_returnsTrue(self):
zone = Zone('ff', [self.light, self.motionSensor])
self.zm.add_zone(zone)
self.assertTrue(self.zm.get_immutable_instance().on_switch_turned_on(
pe.get_event_dispatcher(), self.light, self.light.get_item()))
def testOnSwitchTurnedOff_noZone_returnsFalse(self):
self.assertFalse(self.zm.get_immutable_instance().on_switch_turned_off(
pe.get_event_dispatcher(), self.light, pe.create_string_item(INVALID_ITEM_NAME)))
def testOnSwitchTurnedOff_withNonApplicableZone_returnsFalse(self):
zone = Zone('ff', [self.light, self.motionSensor])
self.zm.add_zone(zone)
self.assertFalse(self.zm.get_immutable_instance().on_switch_turned_off(
pe.get_event_dispatcher(), self.light, pe.create_string_item(INVALID_ITEM_NAME)))
def testOnSwitchTurnedOff_withApplicableZone_returnsTrue(self):
zone = Zone('ff', [self.light, self.motionSensor])
self.zm.add_zone(zone)
self.assertTrue(self.zm.get_immutable_instance().on_switch_turned_off(
pe.get_event_dispatcher(), self.light, self.light.get_item()))
| [((1361, 1406), 'zone_api.core.devices.illuminance_sensor.IlluminanceSensor', 'IlluminanceSensor', (['self.illuminanceSensorItem'], {}), '(self.illuminanceSensorItem)\n', (1378, 1406), False, 'from zone_api.core.devices.illuminance_sensor import IlluminanceSensor\n'), ((1428, 1482), 'zone_api.core.devices.switch.Light', 'Light', (['self.lightItem', '(2)', 'ILLUMINANCE_THRESHOLD_IN_LUX'], {}), '(self.lightItem, 2, ILLUMINANCE_THRESHOLD_IN_LUX)\n', (1433, 1482), False, 'from zone_api.core.devices.switch import Fan, Light, Switch\n'), ((1538, 1573), 'zone_api.core.devices.motion_sensor.MotionSensor', 'MotionSensor', (['self.motionSensorItem'], {}), '(self.motionSensorItem)\n', (1550, 1573), False, 'from zone_api.core.devices.motion_sensor import MotionSensor\n'), ((1596, 1638), 'zone_api.core.devices.dimmer.Dimmer', 'Dimmer', (['self.dimmerItem', '(2)', '(100)', '"""0-23:59"""'], {}), "(self.dimmerItem, 2, 100, '0-23:59')\n", (1602, 1638), False, 'from zone_api.core.devices.dimmer import Dimmer\n'), ((1658, 1678), 'zone_api.core.devices.switch.Fan', 'Fan', (['self.fanItem', '(2)'], {}), '(self.fanItem, 2)\n', (1661, 1678), False, 'from zone_api.core.devices.switch import Fan, Light, Switch\n'), ((1698, 1711), 'zone_api.core.zone_manager.ZoneManager', 'ZoneManager', ([], {}), '()\n', (1709, 1711), False, 'from zone_api.core.zone_manager import ZoneManager\n'), ((1999, 2009), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""'], {}), "('ff')\n", (2003, 2009), False, 'from zone_api.core.zone import Zone\n'), ((2113, 2123), 'zone_api.core.zone.Zone', 'Zone', (['"""2f"""'], {}), "('2f')\n", (2117, 2123), False, 'from zone_api.core.zone import Zone\n'), ((2286, 2296), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""'], {}), "('ff')\n", (2290, 2296), False, 'from zone_api.core.zone import Zone\n'), ((2346, 2356), 'zone_api.core.zone.Zone', 'Zone', (['"""2f"""'], {}), "('2f')\n", (2350, 2356), False, 'from zone_api.core.zone import Zone\n'), ((2829, 2839), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""'], {}), "('ff')\n", (2833, 2839), False, 'from zone_api.core.zone import Zone\n'), ((2889, 2899), 'zone_api.core.zone.Zone', 'Zone', (['"""2f"""'], {}), "('2f')\n", (2893, 2899), False, 'from zone_api.core.zone import Zone\n'), ((4754, 4797), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""', '[self.light, self.motionSensor]'], {}), "('ff', [self.light, self.motionSensor])\n", (4758, 4797), False, 'from zone_api.core.zone import Zone\n'), ((5150, 5236), 'zone_api.platform_encapsulator.set_number_value', 'pe.set_number_value', (['self.illuminanceSensorItem', '(ILLUMINANCE_THRESHOLD_IN_LUX - 1)'], {}), '(self.illuminanceSensorItem, \n ILLUMINANCE_THRESHOLD_IN_LUX - 1)\n', (5169, 5236), True, 'from zone_api import platform_encapsulator as pe\n'), ((5248, 5315), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""', '[self.light, self.motionSensor, self.illuminanceSensor]'], {}), "('ff', [self.light, self.motionSensor, self.illuminanceSensor])\n", (5252, 5315), False, 'from zone_api.core.zone import Zone\n'), ((5892, 5935), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""', '[self.light, self.motionSensor]'], {}), "('ff', [self.light, self.motionSensor])\n", (5896, 5935), False, 'from zone_api.core.zone import Zone\n'), ((6224, 6267), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""', '[self.light, self.motionSensor]'], {}), "('ff', [self.light, self.motionSensor])\n", (6228, 6267), False, 'from zone_api.core.zone import Zone\n'), ((6773, 6816), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""', '[self.light, self.motionSensor]'], {}), "('ff', [self.light, self.motionSensor])\n", (6777, 6816), False, 'from zone_api.core.zone import Zone\n'), ((7107, 7150), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""', '[self.light, self.motionSensor]'], {}), "('ff', [self.light, self.motionSensor])\n", (7111, 7150), False, 'from zone_api.core.zone import Zone\n'), ((735, 773), 'zone_api.platform_encapsulator.create_switch_item', 'pe.create_switch_item', (['"""TestLightName"""'], {}), "('TestLightName')\n", (756, 773), True, 'from zone_api import platform_encapsulator as pe\n'), ((792, 837), 'zone_api.platform_encapsulator.create_switch_item', 'pe.create_switch_item', (['"""TestMotionSensorName"""'], {}), "('TestMotionSensorName')\n", (813, 837), True, 'from zone_api import platform_encapsulator as pe\n'), ((856, 902), 'zone_api.platform_encapsulator.create_number_item', 'pe.create_number_item', (['"""IlluminanceSensorName"""'], {}), "('IlluminanceSensorName')\n", (877, 902), True, 'from zone_api import platform_encapsulator as pe\n'), ((921, 961), 'zone_api.platform_encapsulator.create_string_item', 'pe.create_string_item', (['"""AstroSensorName"""'], {}), "('AstroSensorName')\n", (942, 961), True, 'from zone_api import platform_encapsulator as pe\n'), ((980, 1019), 'zone_api.platform_encapsulator.create_dimmer_item', 'pe.create_dimmer_item', (['"""TestDimmerName"""'], {}), "('TestDimmerName')\n", (1001, 1019), True, 'from zone_api import platform_encapsulator as pe\n'), ((1038, 1074), 'zone_api.platform_encapsulator.create_switch_item', 'pe.create_switch_item', (['"""TestFanName"""'], {}), "('TestFanName')\n", (1059, 1074), True, 'from zone_api import platform_encapsulator as pe\n'), ((5347, 5361), 'zone_api.core.actions.turn_on_switch.TurnOnSwitch', 'TurnOnSwitch', ([], {}), '()\n', (5359, 5361), False, 'from zone_api.core.actions.turn_on_switch import TurnOnSwitch\n'), ((3249, 3259), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""'], {}), "('ff')\n", (3253, 3259), False, 'from zone_api.core.zone import Zone\n'), ((3299, 3309), 'zone_api.core.zone.Zone', 'Zone', (['"""sf"""'], {}), "('sf')\n", (3303, 3309), False, 'from zone_api.core.zone import Zone\n'), ((3717, 3727), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""'], {}), "('ff')\n", (3721, 3727), False, 'from zone_api.core.zone import Zone\n'), ((3993, 4003), 'zone_api.core.zone.Zone', 'Zone', (['"""ff"""'], {}), "('ff')\n", (3997, 4003), False, 'from zone_api.core.zone import Zone\n'), ((4043, 4053), 'zone_api.core.zone.Zone', 'Zone', (['"""sf"""'], {}), "('sf')\n", (4047, 4053), False, 'from zone_api.core.zone import Zone\n'), ((4572, 4597), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (4595, 4597), True, 'from zone_api import platform_encapsulator as pe\n'), ((4618, 4658), 'zone_api.platform_encapsulator.create_string_item', 'pe.create_string_item', (['INVALID_ITEM_NAME'], {}), '(INVALID_ITEM_NAME)\n', (4639, 4658), True, 'from zone_api import platform_encapsulator as pe\n'), ((4934, 4959), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (4957, 4959), True, 'from zone_api import platform_encapsulator as pe\n'), ((4980, 5020), 'zone_api.platform_encapsulator.create_string_item', 'pe.create_string_item', (['INVALID_ITEM_NAME'], {}), '(INVALID_ITEM_NAME)\n', (5001, 5020), True, 'from zone_api import platform_encapsulator as pe\n'), ((5498, 5523), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (5521, 5523), True, 'from zone_api import platform_encapsulator as pe\n'), ((5723, 5748), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (5746, 5748), True, 'from zone_api import platform_encapsulator as pe\n'), ((5762, 5802), 'zone_api.platform_encapsulator.create_string_item', 'pe.create_string_item', (['INVALID_ITEM_NAME'], {}), '(INVALID_ITEM_NAME)\n', (5783, 5802), True, 'from zone_api import platform_encapsulator as pe\n'), ((6059, 6084), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (6082, 6084), True, 'from zone_api import platform_encapsulator as pe\n'), ((6098, 6138), 'zone_api.platform_encapsulator.create_string_item', 'pe.create_string_item', (['INVALID_ITEM_NAME'], {}), '(INVALID_ITEM_NAME)\n', (6119, 6138), True, 'from zone_api import platform_encapsulator as pe\n'), ((6390, 6415), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (6413, 6415), True, 'from zone_api import platform_encapsulator as pe\n'), ((6603, 6628), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (6626, 6628), True, 'from zone_api import platform_encapsulator as pe\n'), ((6642, 6682), 'zone_api.platform_encapsulator.create_string_item', 'pe.create_string_item', (['INVALID_ITEM_NAME'], {}), '(INVALID_ITEM_NAME)\n', (6663, 6682), True, 'from zone_api import platform_encapsulator as pe\n'), ((6941, 6966), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (6964, 6966), True, 'from zone_api import platform_encapsulator as pe\n'), ((6980, 7020), 'zone_api.platform_encapsulator.create_string_item', 'pe.create_string_item', (['INVALID_ITEM_NAME'], {}), '(INVALID_ITEM_NAME)\n', (7001, 7020), True, 'from zone_api import platform_encapsulator as pe\n'), ((7274, 7299), 'zone_api.platform_encapsulator.get_event_dispatcher', 'pe.get_event_dispatcher', ([], {}), '()\n', (7297, 7299), True, 'from zone_api import platform_encapsulator as pe\n')] |
Zasling/meiduo_mall33 | meiduo_mall/meiduo_mall/apps/orders/views.py | ec55597758d5052b311d65aee44533b001f6ddd8 | from rest_framework.response import Response
from rest_framework.views import APIView
from django_redis import get_redis_connection
from goods.models import SKU
from decimal import Decimal
from rest_framework.generics import CreateAPIView,ListAPIView
from rest_framework.mixins import ListModelMixin
from orders.serializers import OrderShowSerializer, OrderSaveSerializer, OrderListSerializer, CommentSerializers, \
CommentSaveSerializers, CommentShowSerializers
from users.models import User
from orders.models import OrderInfo,OrderGoods
from orders.utils import PageNum
from rest_framework.filters import OrderingFilter
# 展示订单信息
class OrdersShowView(APIView):
def get(self, request):
# 获取用户对象
user = request.user
# 建立redis连接
conn = get_redis_connection('cart')
# 获取hash数据sku_id ,count
sku_id_count = conn.hgetall('cart_%s' %user.id) # {10:1}
# 将byte类型数据转为整形
cart = {}
for sku_id, count in sku_id_count.items():
cart[int(sku_id)] = int(count)
# 获取集合数据
sku_ids = conn.smembers('cart_selected_%s' %user.id)
# 查询所有选中状态的数据对象
skus = SKU.objects.filter(id__in=sku_ids)
# 商品对象添加count属性(sku表中没有count字段,要手动添加属性)
for sku in skus:
sku.count = cart[sku.id]
# 生成运费
freight = Decimal(10.00)
# 序列化返回商品对象
ser = OrderShowSerializer({'freight': freight, 'skus': skus})
return Response(ser.data)
# 保存订单信息
class OrderSaveView(ListModelMixin, CreateAPIView):
serializer_class = OrderSaveSerializer
# 订单列表数据获取
class OrderListView(ListAPIView):
pagination_class = PageNum
serializer_class = OrderListSerializer
def get_queryset(self):
user = self.request.user
order = OrderInfo.objects.filter(user = user)
return order
# 评论-获取商品信息
class OrderComment(ListAPIView):
serializer_class = CommentSerializers
def get_queryset(self):
order_id = self.kwargs['order_id']
skus = OrderGoods.objects.filter(order_id = order_id, is_commented=False)
return skus
# 保存评论
class SaveSkuComment(CreateAPIView):
serializer_class = CommentSaveSerializers
# 商品详情中的评论展示
class ShowComment(ListAPIView):
serializer_class = CommentShowSerializers
def get_queryset(self):
# 从kwargs中获取sku_id
sku_id = self.kwargs['sku_id']
# 获取商品信息
orders = OrderGoods.objects.filter(sku_id=sku_id, is_commented = True)
for sku in orders:
skuinfo = OrderInfo.objects.get(order_id=sku.order_id)
user = User.objects.get(id = skuinfo.user_id)
# 获取用户名,判断是否匿名
sku.username = user.username
if sku.is_anonymous == True:
sku.username = '****'
return orders
| [((777, 805), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""cart"""'], {}), "('cart')\n", (797, 805), False, 'from django_redis import get_redis_connection\n'), ((1156, 1190), 'goods.models.SKU.objects.filter', 'SKU.objects.filter', ([], {'id__in': 'sku_ids'}), '(id__in=sku_ids)\n', (1174, 1190), False, 'from goods.models import SKU\n'), ((1334, 1347), 'decimal.Decimal', 'Decimal', (['(10.0)'], {}), '(10.0)\n', (1341, 1347), False, 'from decimal import Decimal\n'), ((1383, 1438), 'orders.serializers.OrderShowSerializer', 'OrderShowSerializer', (["{'freight': freight, 'skus': skus}"], {}), "({'freight': freight, 'skus': skus})\n", (1402, 1438), False, 'from orders.serializers import OrderShowSerializer, OrderSaveSerializer, OrderListSerializer, CommentSerializers, CommentSaveSerializers, CommentShowSerializers\n'), ((1454, 1472), 'rest_framework.response.Response', 'Response', (['ser.data'], {}), '(ser.data)\n', (1462, 1472), False, 'from rest_framework.response import Response\n'), ((1778, 1813), 'orders.models.OrderInfo.objects.filter', 'OrderInfo.objects.filter', ([], {'user': 'user'}), '(user=user)\n', (1802, 1813), False, 'from orders.models import OrderInfo, OrderGoods\n'), ((2012, 2076), 'orders.models.OrderGoods.objects.filter', 'OrderGoods.objects.filter', ([], {'order_id': 'order_id', 'is_commented': '(False)'}), '(order_id=order_id, is_commented=False)\n', (2037, 2076), False, 'from orders.models import OrderInfo, OrderGoods\n'), ((2410, 2469), 'orders.models.OrderGoods.objects.filter', 'OrderGoods.objects.filter', ([], {'sku_id': 'sku_id', 'is_commented': '(True)'}), '(sku_id=sku_id, is_commented=True)\n', (2435, 2469), False, 'from orders.models import OrderInfo, OrderGoods\n'), ((2521, 2565), 'orders.models.OrderInfo.objects.get', 'OrderInfo.objects.get', ([], {'order_id': 'sku.order_id'}), '(order_id=sku.order_id)\n', (2542, 2565), False, 'from orders.models import OrderInfo, OrderGoods\n'), ((2585, 2621), 'users.models.User.objects.get', 'User.objects.get', ([], {'id': 'skuinfo.user_id'}), '(id=skuinfo.user_id)\n', (2601, 2621), False, 'from users.models import User\n')] |
melmorabity/streamlink | src/streamlink_cli/main.py | 24c59a23103922977991acc28741a323d8efa7a1 | import argparse
import errno
import logging
import os
import platform
import signal
import sys
from collections import OrderedDict
from contextlib import closing
from distutils.version import StrictVersion
from functools import partial
from gettext import gettext
from itertools import chain
from pathlib import Path
from time import sleep
from typing import List
import requests
from socks import __version__ as socks_version
from websocket import __version__ as websocket_version
import streamlink.logger as logger
from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version
from streamlink.cache import Cache
from streamlink.exceptions import FatalPluginError
from streamlink.plugin import Plugin, PluginOptions
from streamlink.stream import StreamIO, StreamProcess
from streamlink.utils.named_pipe import NamedPipe
from streamlink_cli.argparser import build_parser
from streamlink_cli.compat import DeprecatedPath, is_win32, stdout
from streamlink_cli.console import ConsoleOutput, ConsoleUserInputRequester
from streamlink_cli.constants import CONFIG_FILES, DEFAULT_STREAM_METADATA, LOG_DIR, PLUGIN_DIRS, STREAM_SYNONYMS
from streamlink_cli.output import FileOutput, Output, PlayerOutput
from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url
ACCEPTABLE_ERRNO = (errno.EPIPE, errno.EINVAL, errno.ECONNRESET)
try:
ACCEPTABLE_ERRNO += (errno.WSAECONNABORTED,)
except AttributeError:
pass # Not windows
QUIET_OPTIONS = ("json", "stream_url", "subprocess_cmdline", "quiet")
args = None
console: ConsoleOutput = None
output: Output = None
plugin: Plugin = None
stream_fd: StreamIO = None
streamlink: Streamlink = None
log = logging.getLogger("streamlink.cli")
def get_formatter(plugin: Plugin):
return Formatter(
{
"url": lambda: args.url,
"author": lambda: plugin.get_author(),
"category": lambda: plugin.get_category(),
"game": lambda: plugin.get_category(),
"title": lambda: plugin.get_title(),
"time": lambda: datetime.now()
},
{
"time": lambda dt, fmt: dt.strftime(fmt)
}
)
def check_file_output(filename, force):
"""Checks if file already exists and ask the user if it should
be overwritten if it does."""
log.debug("Checking file output")
if os.path.isfile(filename) and not force:
if sys.stdin.isatty():
answer = console.ask(f"File {filename} already exists! Overwrite it? [y/N] ")
if answer.lower() != "y":
sys.exit()
else:
log.error(f"File {filename} already exists, use --force to overwrite it.")
sys.exit()
return FileOutput(filename)
def create_output(formatter: Formatter):
"""Decides where to write the stream.
Depending on arguments it can be one of these:
- The stdout pipe
- A subprocess' stdin pipe
- A named pipe that the subprocess reads from
- A regular file
"""
if (args.output or args.stdout) and (args.record or args.record_and_pipe):
console.exit("Cannot use record options with other file output options.")
if args.output:
if args.output == "-":
out = FileOutput(fd=stdout)
else:
out = check_file_output(formatter.filename(args.output, args.fs_safe_rules), args.force)
elif args.stdout:
out = FileOutput(fd=stdout)
elif args.record_and_pipe:
record = check_file_output(formatter.filename(args.record_and_pipe, args.fs_safe_rules), args.force)
out = FileOutput(fd=stdout, record=record)
else:
http = namedpipe = record = None
if not args.player:
console.exit("The default player (VLC) does not seem to be "
"installed. You must specify the path to a player "
"executable with --player.")
if args.player_fifo:
try:
namedpipe = NamedPipe()
except OSError as err:
console.exit(f"Failed to create pipe: {err}")
elif args.player_http:
http = create_http_server()
if args.record:
record = check_file_output(formatter.filename(args.record, args.fs_safe_rules), args.force)
log.info(f"Starting player: {args.player}")
out = PlayerOutput(
args.player,
args=args.player_args,
quiet=not args.verbose_player,
kill=not args.player_no_close,
namedpipe=namedpipe,
http=http,
record=record,
title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url
)
return out
def create_http_server(*_args, **_kwargs):
"""Creates a HTTP server listening on a given host and port.
If host is empty, listen on all available interfaces, and if port is 0,
listen on a random high port.
"""
try:
http = HTTPServer()
http.bind(*_args, **_kwargs)
except OSError as err:
console.exit(f"Failed to create HTTP server: {err}")
return http
def iter_http_requests(server, player):
"""Repeatedly accept HTTP connections on a server.
Forever if the serving externally, or while a player is running if it is not
empty.
"""
while not player or player.running:
try:
yield server.open(timeout=2.5)
except OSError:
continue
def output_stream_http(plugin, initial_streams, formatter: Formatter, external=False, port=0):
"""Continuously output the stream over HTTP."""
global output
if not external:
if not args.player:
console.exit("The default player (VLC) does not seem to be "
"installed. You must specify the path to a player "
"executable with --player.")
server = create_http_server()
player = output = PlayerOutput(
args.player,
args=args.player_args,
filename=server.url,
quiet=not args.verbose_player,
title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url
)
try:
log.info(f"Starting player: {args.player}")
if player:
player.open()
except OSError as err:
console.exit(f"Failed to start player: {args.player} ({err})")
else:
server = create_http_server(host=None, port=port)
player = None
log.info("Starting server, access with one of:")
for url in server.urls:
log.info(" " + url)
for req in iter_http_requests(server, player):
user_agent = req.headers.get("User-Agent") or "unknown player"
log.info(f"Got HTTP request from {user_agent}")
stream_fd = prebuffer = None
while not stream_fd and (not player or player.running):
try:
streams = initial_streams or fetch_streams(plugin)
initial_streams = None
for stream_name in (resolve_stream_name(streams, s) for s in args.stream):
if stream_name in streams:
stream = streams[stream_name]
break
else:
log.info("Stream not available, will re-fetch streams in 10 sec")
sleep(10)
continue
except PluginError as err:
log.error(f"Unable to fetch new streams: {err}")
continue
try:
log.info(f"Opening stream: {stream_name} ({type(stream).shortname()})")
stream_fd, prebuffer = open_stream(stream)
except StreamError as err:
log.error(err)
if stream_fd and prebuffer:
log.debug("Writing stream to player")
read_stream(stream_fd, server, prebuffer, formatter)
server.close(True)
player.close()
server.close()
def output_stream_passthrough(stream, formatter: Formatter):
"""Prepares a filename to be passed to the player."""
global output
filename = f'"{stream_to_url(stream)}"'
output = PlayerOutput(
args.player,
args=args.player_args,
filename=filename,
call=True,
quiet=not args.verbose_player,
title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url
)
try:
log.info(f"Starting player: {args.player}")
output.open()
except OSError as err:
console.exit(f"Failed to start player: {args.player} ({err})")
return False
return True
def open_stream(stream):
"""Opens a stream and reads 8192 bytes from it.
This is useful to check if a stream actually has data
before opening the output.
"""
global stream_fd
# Attempts to open the stream
try:
stream_fd = stream.open()
except StreamError as err:
raise StreamError(f"Could not open stream: {err}")
# Read 8192 bytes before proceeding to check for errors.
# This is to avoid opening the output unnecessarily.
try:
log.debug("Pre-buffering 8192 bytes")
prebuffer = stream_fd.read(8192)
except OSError as err:
stream_fd.close()
raise StreamError(f"Failed to read data from stream: {err}")
if not prebuffer:
stream_fd.close()
raise StreamError("No data returned from stream")
return stream_fd, prebuffer
def output_stream(stream, formatter: Formatter):
"""Open stream, create output and finally write the stream to output."""
global output
success_open = False
for i in range(args.retry_open):
try:
stream_fd, prebuffer = open_stream(stream)
success_open = True
break
except StreamError as err:
log.error(f"Try {i + 1}/{args.retry_open}: Could not open stream {stream} ({err})")
if not success_open:
console.exit(f"Could not open stream {stream}, tried {args.retry_open} times, exiting")
output = create_output(formatter)
try:
output.open()
except OSError as err:
if isinstance(output, PlayerOutput):
console.exit(f"Failed to start player: {args.player} ({err})")
else:
console.exit(f"Failed to open output: {output.filename} ({err})")
with closing(output):
log.debug("Writing stream to output")
read_stream(stream_fd, output, prebuffer, formatter)
return True
def read_stream(stream, output, prebuffer, formatter: Formatter, chunk_size=8192):
"""Reads data from stream and then writes it to the output."""
is_player = isinstance(output, PlayerOutput)
is_http = isinstance(output, HTTPServer)
is_fifo = is_player and output.namedpipe
show_progress = (
isinstance(output, FileOutput)
and output.fd is not stdout
and (sys.stdout.isatty() or args.force_progress)
)
show_record_progress = (
hasattr(output, "record")
and isinstance(output.record, FileOutput)
and output.record.fd is not stdout
and (sys.stdout.isatty() or args.force_progress)
)
stream_iterator = chain(
[prebuffer],
iter(partial(stream.read, chunk_size), b"")
)
if show_progress:
stream_iterator = progress(
stream_iterator,
prefix=os.path.basename(output.filename)
)
elif show_record_progress:
stream_iterator = progress(
stream_iterator,
prefix=os.path.basename(output.record.filename)
)
try:
for data in stream_iterator:
# We need to check if the player process still exists when
# using named pipes on Windows since the named pipe is not
# automatically closed by the player.
if is_win32 and is_fifo:
output.player.poll()
if output.player.returncode is not None:
log.info("Player closed")
break
try:
output.write(data)
except OSError as err:
if is_player and err.errno in ACCEPTABLE_ERRNO:
log.info("Player closed")
elif is_http and err.errno in ACCEPTABLE_ERRNO:
log.info("HTTP connection closed")
else:
console.exit(f"Error when writing to output: {err}, exiting")
break
except OSError as err:
console.exit(f"Error when reading from stream: {err}, exiting")
finally:
stream.close()
log.info("Stream ended")
def handle_stream(plugin, streams, stream_name):
"""Decides what to do with the selected stream.
Depending on arguments it can be one of these:
- Output internal command-line
- Output JSON represenation
- Continuously output the stream over HTTP
- Output stream data to selected output
"""
stream_name = resolve_stream_name(streams, stream_name)
stream = streams[stream_name]
# Print internal command-line if this stream
# uses a subprocess.
if args.subprocess_cmdline:
if isinstance(stream, StreamProcess):
try:
cmdline = stream.cmdline()
except StreamError as err:
console.exit(err)
console.msg(cmdline)
else:
console.exit("The stream specified cannot be translated to a command")
# Print JSON representation of the stream
elif args.json:
console.msg_json(
stream,
metadata=plugin.get_metadata()
)
elif args.stream_url:
try:
console.msg(stream.to_url())
except TypeError:
console.exit("The stream specified cannot be translated to a URL")
# Output the stream
else:
# Find any streams with a '_alt' suffix and attempt
# to use these in case the main stream is not usable.
alt_streams = list(filter(lambda k: stream_name + "_alt" in k,
sorted(streams.keys())))
file_output = args.output or args.stdout
formatter = get_formatter(plugin)
for stream_name in [stream_name] + alt_streams:
stream = streams[stream_name]
stream_type = type(stream).shortname()
if stream_type in args.player_passthrough and not file_output:
log.info(f"Opening stream: {stream_name} ({stream_type})")
success = output_stream_passthrough(stream, formatter)
elif args.player_external_http:
return output_stream_http(plugin, streams, formatter, external=True,
port=args.player_external_http_port)
elif args.player_continuous_http and not file_output:
return output_stream_http(plugin, streams, formatter)
else:
log.info(f"Opening stream: {stream_name} ({stream_type})")
success = output_stream(stream, formatter)
if success:
break
def fetch_streams(plugin):
"""Fetches streams using correct parameters."""
return plugin.streams(stream_types=args.stream_types,
sorting_excludes=args.stream_sorting_excludes)
def fetch_streams_with_retry(plugin, interval, count):
"""Attempts to fetch streams repeatedly
until some are returned or limit hit."""
try:
streams = fetch_streams(plugin)
except PluginError as err:
log.error(err)
streams = None
if not streams:
log.info(f"Waiting for streams, retrying every {interval} second(s)")
attempts = 0
while not streams:
sleep(interval)
try:
streams = fetch_streams(plugin)
except FatalPluginError:
raise
except PluginError as err:
log.error(err)
if count > 0:
attempts += 1
if attempts >= count:
break
return streams
def resolve_stream_name(streams, stream_name):
"""Returns the real stream name of a synonym."""
if stream_name in STREAM_SYNONYMS and stream_name in streams:
for name, stream in streams.items():
if stream is streams[stream_name] and name not in STREAM_SYNONYMS:
return name
return stream_name
def format_valid_streams(plugin, streams):
"""Formats a dict of streams.
Filters out synonyms and displays them next to
the stream they point to.
Streams are sorted according to their quality
(based on plugin.stream_weight).
"""
delimiter = ", "
validstreams = []
for name, stream in sorted(streams.items(),
key=lambda stream: plugin.stream_weight(stream[0])):
if name in STREAM_SYNONYMS:
continue
def synonymfilter(n):
return stream is streams[n] and n is not name
synonyms = list(filter(synonymfilter, streams.keys()))
if len(synonyms) > 0:
joined = delimiter.join(synonyms)
name = f"{name} ({joined})"
validstreams.append(name)
return delimiter.join(validstreams)
def handle_url():
"""The URL handler.
Attempts to resolve the URL to a plugin and then attempts
to fetch a list of available streams.
Proceeds to handle stream if user specified a valid one,
otherwise output list of valid streams.
"""
try:
plugin = streamlink.resolve_url(args.url)
setup_plugin_options(streamlink, plugin)
log.info(f"Found matching plugin {plugin.module} for URL {args.url}")
if args.retry_max or args.retry_streams:
retry_streams = 1
retry_max = 0
if args.retry_streams:
retry_streams = args.retry_streams
if args.retry_max:
retry_max = args.retry_max
streams = fetch_streams_with_retry(plugin, retry_streams,
retry_max)
else:
streams = fetch_streams(plugin)
except NoPluginError:
console.exit(f"No plugin can handle URL: {args.url}")
except PluginError as err:
console.exit(err)
if not streams:
console.exit(f"No playable streams found on this URL: {args.url}")
if args.default_stream and not args.stream and not args.json:
args.stream = args.default_stream
if args.stream:
validstreams = format_valid_streams(plugin, streams)
for stream_name in args.stream:
if stream_name in streams:
log.info(f"Available streams: {validstreams}")
handle_stream(plugin, streams, stream_name)
return
err = f"The specified stream(s) '{', '.join(args.stream)}' could not be found"
if args.json:
console.msg_json(
plugin=plugin.module,
metadata=plugin.get_metadata(),
streams=streams,
error=err
)
else:
console.exit(f"{err}.\n Available streams: {validstreams}")
elif args.json:
console.msg_json(
plugin=plugin.module,
metadata=plugin.get_metadata(),
streams=streams
)
elif args.stream_url:
try:
console.msg(streams[list(streams)[-1]].to_manifest_url())
except TypeError:
console.exit("The stream specified cannot be translated to a URL")
else:
validstreams = format_valid_streams(plugin, streams)
console.msg(f"Available streams: {validstreams}")
def print_plugins():
"""Outputs a list of all plugins Streamlink has loaded."""
pluginlist = list(streamlink.get_plugins().keys())
pluginlist_formatted = ", ".join(sorted(pluginlist))
if args.json:
console.msg_json(pluginlist)
else:
console.msg(f"Loaded plugins: {pluginlist_formatted}")
def load_plugins(dirs: List[Path], showwarning: bool = True):
"""Attempts to load plugins from a list of directories."""
for directory in dirs:
if directory.is_dir():
success = streamlink.load_plugins(str(directory))
if success and type(directory) is DeprecatedPath:
log.info(f"Loaded plugins from deprecated path, see CLI docs for how to migrate: {directory}")
elif showwarning:
log.warning(f"Plugin path {directory} does not exist or is not a directory!")
def setup_args(parser: argparse.ArgumentParser, config_files: List[Path] = None, ignore_unknown: bool = False):
"""Parses arguments."""
global args
arglist = sys.argv[1:]
# Load arguments from config files
configs = [f"@{config_file}" for config_file in config_files or []]
args, unknown = parser.parse_known_args(configs + arglist)
if unknown and not ignore_unknown:
msg = gettext("unrecognized arguments: %s")
parser.error(msg % " ".join(unknown))
# Force lowercase to allow case-insensitive lookup
if args.stream:
args.stream = [stream.lower() for stream in args.stream]
if not args.url and args.url_param:
args.url = args.url_param
def setup_config_args(parser, ignore_unknown=False):
config_files = []
if args.config:
# We want the config specified last to get highest priority
for config_file in map(lambda path: Path(path).expanduser(), reversed(args.config)):
if config_file.is_file():
config_files.append(config_file)
else:
# Only load first available default config
for config_file in filter(lambda path: path.is_file(), CONFIG_FILES):
if type(config_file) is DeprecatedPath:
log.info(f"Loaded config from deprecated path, see CLI docs for how to migrate: {config_file}")
config_files.append(config_file)
break
if streamlink and args.url:
# Only load first available plugin config
with ignored(NoPluginError):
plugin = streamlink.resolve_url(args.url)
for config_file in CONFIG_FILES:
config_file = config_file.with_name(f"{config_file.name}.{plugin.module}")
if not config_file.is_file():
continue
if type(config_file) is DeprecatedPath:
log.info(f"Loaded plugin config from deprecated path, see CLI docs for how to migrate: {config_file}")
config_files.append(config_file)
break
if config_files:
setup_args(parser, config_files, ignore_unknown=ignore_unknown)
def setup_signals():
# Handle SIGTERM just like SIGINT
signal.signal(signal.SIGTERM, signal.default_int_handler)
def setup_http_session():
"""Sets the global HTTP settings, such as proxy and headers."""
if args.http_proxy:
streamlink.set_option("http-proxy", args.http_proxy)
if args.https_proxy:
streamlink.set_option("https-proxy", args.https_proxy)
if args.http_cookie:
streamlink.set_option("http-cookies", dict(args.http_cookie))
if args.http_header:
streamlink.set_option("http-headers", dict(args.http_header))
if args.http_query_param:
streamlink.set_option("http-query-params", dict(args.http_query_param))
if args.http_ignore_env:
streamlink.set_option("http-trust-env", False)
if args.http_no_ssl_verify:
streamlink.set_option("http-ssl-verify", False)
if args.http_disable_dh:
streamlink.set_option("http-disable-dh", True)
if args.http_ssl_cert:
streamlink.set_option("http-ssl-cert", args.http_ssl_cert)
if args.http_ssl_cert_crt_key:
streamlink.set_option("http-ssl-cert", tuple(args.http_ssl_cert_crt_key))
if args.http_timeout:
streamlink.set_option("http-timeout", args.http_timeout)
def setup_plugins(extra_plugin_dir=None):
"""Loads any additional plugins."""
load_plugins(PLUGIN_DIRS, showwarning=False)
if extra_plugin_dir:
load_plugins([Path(path).expanduser() for path in extra_plugin_dir])
def setup_streamlink():
"""Creates the Streamlink session."""
global streamlink
streamlink = Streamlink({"user-input-requester": ConsoleUserInputRequester(console)})
def setup_options():
"""Sets Streamlink options."""
if args.interface:
streamlink.set_option("interface", args.interface)
if args.ipv4:
streamlink.set_option("ipv4", args.ipv4)
if args.ipv6:
streamlink.set_option("ipv6", args.ipv6)
if args.ringbuffer_size:
streamlink.set_option("ringbuffer-size", args.ringbuffer_size)
if args.mux_subtitles:
streamlink.set_option("mux-subtitles", args.mux_subtitles)
if args.hds_live_edge:
streamlink.set_option("hds-live-edge", args.hds_live_edge)
if args.hls_live_edge:
streamlink.set_option("hls-live-edge", args.hls_live_edge)
if args.hls_playlist_reload_attempts:
streamlink.set_option("hls-playlist-reload-attempts", args.hls_playlist_reload_attempts)
if args.hls_playlist_reload_time:
streamlink.set_option("hls-playlist-reload-time", args.hls_playlist_reload_time)
if args.hls_segment_ignore_names:
streamlink.set_option("hls-segment-ignore-names", args.hls_segment_ignore_names)
if args.hls_segment_key_uri:
streamlink.set_option("hls-segment-key-uri", args.hls_segment_key_uri)
if args.hls_audio_select:
streamlink.set_option("hls-audio-select", args.hls_audio_select)
if args.hls_start_offset:
streamlink.set_option("hls-start-offset", args.hls_start_offset)
if args.hls_duration:
streamlink.set_option("hls-duration", args.hls_duration)
if args.hls_live_restart:
streamlink.set_option("hls-live-restart", args.hls_live_restart)
if args.rtmp_rtmpdump:
streamlink.set_option("rtmp-rtmpdump", args.rtmp_rtmpdump)
elif args.rtmpdump:
streamlink.set_option("rtmp-rtmpdump", args.rtmpdump)
if args.rtmp_proxy:
streamlink.set_option("rtmp-proxy", args.rtmp_proxy)
# deprecated
if args.hds_segment_attempts:
streamlink.set_option("hds-segment-attempts", args.hds_segment_attempts)
if args.hds_segment_threads:
streamlink.set_option("hds-segment-threads", args.hds_segment_threads)
if args.hds_segment_timeout:
streamlink.set_option("hds-segment-timeout", args.hds_segment_timeout)
if args.hds_timeout:
streamlink.set_option("hds-timeout", args.hds_timeout)
if args.hls_segment_attempts:
streamlink.set_option("hls-segment-attempts", args.hls_segment_attempts)
if args.hls_segment_threads:
streamlink.set_option("hls-segment-threads", args.hls_segment_threads)
if args.hls_segment_timeout:
streamlink.set_option("hls-segment-timeout", args.hls_segment_timeout)
if args.hls_timeout:
streamlink.set_option("hls-timeout", args.hls_timeout)
if args.http_stream_timeout:
streamlink.set_option("http-stream-timeout", args.http_stream_timeout)
if args.rtmp_timeout:
streamlink.set_option("rtmp-timeout", args.rtmp_timeout)
# generic stream- arguments take precedence over deprecated stream-type arguments
if args.stream_segment_attempts:
streamlink.set_option("stream-segment-attempts", args.stream_segment_attempts)
if args.stream_segment_threads:
streamlink.set_option("stream-segment-threads", args.stream_segment_threads)
if args.stream_segment_timeout:
streamlink.set_option("stream-segment-timeout", args.stream_segment_timeout)
if args.stream_timeout:
streamlink.set_option("stream-timeout", args.stream_timeout)
if args.ffmpeg_ffmpeg:
streamlink.set_option("ffmpeg-ffmpeg", args.ffmpeg_ffmpeg)
if args.ffmpeg_verbose:
streamlink.set_option("ffmpeg-verbose", args.ffmpeg_verbose)
if args.ffmpeg_verbose_path:
streamlink.set_option("ffmpeg-verbose-path", args.ffmpeg_verbose_path)
if args.ffmpeg_fout:
streamlink.set_option("ffmpeg-fout", args.ffmpeg_fout)
if args.ffmpeg_video_transcode:
streamlink.set_option("ffmpeg-video-transcode", args.ffmpeg_video_transcode)
if args.ffmpeg_audio_transcode:
streamlink.set_option("ffmpeg-audio-transcode", args.ffmpeg_audio_transcode)
if args.ffmpeg_copyts:
streamlink.set_option("ffmpeg-copyts", args.ffmpeg_copyts)
if args.ffmpeg_start_at_zero:
streamlink.set_option("ffmpeg-start-at-zero", args.ffmpeg_start_at_zero)
streamlink.set_option("subprocess-errorlog", args.subprocess_errorlog)
streamlink.set_option("subprocess-errorlog-path", args.subprocess_errorlog_path)
streamlink.set_option("locale", args.locale)
def setup_plugin_args(session, parser):
"""Sets Streamlink plugin options."""
plugin_args = parser.add_argument_group("Plugin options")
for pname, plugin in session.plugins.items():
defaults = {}
group = plugin_args.add_argument_group(pname.capitalize())
for parg in plugin.arguments:
if not parg.is_global:
group.add_argument(parg.argument_name(pname), **parg.options)
defaults[parg.dest] = parg.default
else:
pargdest = parg.dest
for action in parser._actions:
# find matching global argument
if pargdest != action.dest:
continue
defaults[pargdest] = action.default
# add plugin to global argument
plugins = getattr(action, "plugins", [])
plugins.append(pname)
setattr(action, "plugins", plugins)
plugin.options = PluginOptions(defaults)
def setup_plugin_options(session, plugin):
"""Sets Streamlink plugin options."""
pname = plugin.module
required = OrderedDict({})
for parg in plugin.arguments:
if parg.options.get("help") == argparse.SUPPRESS:
continue
value = getattr(args, parg.dest if parg.is_global else parg.namespace_dest(pname))
session.set_plugin_option(pname, parg.dest, value)
if not parg.is_global:
if parg.required:
required[parg.name] = parg
# if the value is set, check to see if any of the required arguments are not set
if parg.required or value:
try:
for rparg in plugin.arguments.requires(parg.name):
required[rparg.name] = rparg
except RuntimeError:
log.error(f"{pname} plugin has a configuration error and the arguments cannot be parsed")
break
if required:
for req in required.values():
if not session.get_plugin_option(pname, req.dest):
prompt = f"{req.prompt or f'Enter {pname} {req.name}'}: "
session.set_plugin_option(
pname,
req.dest,
console.askpass(prompt) if req.sensitive else console.ask(prompt)
)
def log_root_warning():
if hasattr(os, "getuid"):
if os.geteuid() == 0:
log.info("streamlink is running as root! Be careful!")
def log_current_versions():
"""Show current installed versions"""
if not logger.root.isEnabledFor(logging.DEBUG):
return
# macOS
if sys.platform == "darwin":
os_version = f"macOS {platform.mac_ver()[0]}"
# Windows
elif sys.platform == "win32":
os_version = f"{platform.system()} {platform.release()}"
# Linux / other
else:
os_version = platform.platform()
log.debug(f"OS: {os_version}")
log.debug(f"Python: {platform.python_version()}")
log.debug(f"Streamlink: {streamlink_version}")
log.debug(f"Requests({requests.__version__}), "
f"Socks({socks_version}), "
f"Websocket({websocket_version})")
def log_current_arguments(session, parser):
global args
if not logger.root.isEnabledFor(logging.DEBUG):
return
sensitive = set()
for pname, plugin in session.plugins.items():
for parg in plugin.arguments:
if parg.sensitive:
sensitive.add(parg.argument_name(pname))
log.debug("Arguments:")
for action in parser._actions:
if not hasattr(args, action.dest):
continue
value = getattr(args, action.dest)
if action.default != value:
name = next( # pragma: no branch
(option for option in action.option_strings if option.startswith("--")),
action.option_strings[0]
) if action.option_strings else action.dest
log.debug(f" {name}={value if name not in sensitive else '*' * 8}")
def check_version(force=False):
cache = Cache(filename="cli.json")
latest_version = cache.get("latest_version")
if force or not latest_version:
res = requests.get("https://pypi.python.org/pypi/streamlink/json")
data = res.json()
latest_version = data.get("info").get("version")
cache.set("latest_version", latest_version, (60 * 60 * 24))
version_info_printed = cache.get("version_info_printed")
if not force and version_info_printed:
return
installed_version = StrictVersion(streamlink.version)
latest_version = StrictVersion(latest_version)
if latest_version > installed_version:
log.info(f"A new version of Streamlink ({latest_version}) is available!")
cache.set("version_info_printed", True, (60 * 60 * 6))
elif force:
log.info(f"Your Streamlink version ({installed_version}) is up to date!")
if force:
sys.exit()
def setup_logger_and_console(stream=sys.stdout, filename=None, level="info", json=False):
global console
if filename == "-":
filename = LOG_DIR / f"{datetime.now()}.log"
elif filename:
filename = Path(filename).expanduser().resolve()
if filename:
filename.parent.mkdir(parents=True, exist_ok=True)
streamhandler = logger.basicConfig(
stream=stream,
filename=filename,
level=level,
style="{",
format=("[{asctime}]" if level == "trace" else "") + "[{name}][{levelname}] {message}",
datefmt="%H:%M:%S" + (".%f" if level == "trace" else "")
)
console = ConsoleOutput(streamhandler.stream, json)
def main():
error_code = 0
parser = build_parser()
setup_args(parser, ignore_unknown=True)
# call argument set up as early as possible to load args from config files
setup_config_args(parser, ignore_unknown=True)
# Console output should be on stderr if we are outputting
# a stream to stdout.
if args.stdout or args.output == "-" or args.record_and_pipe:
console_out = sys.stderr
else:
console_out = sys.stdout
# We don't want log output when we are printing JSON or a command-line.
silent_log = any(getattr(args, attr) for attr in QUIET_OPTIONS)
log_level = args.loglevel if not silent_log else "none"
log_file = args.logfile if log_level != "none" else None
setup_logger_and_console(console_out, log_file, log_level, args.json)
setup_signals()
setup_streamlink()
# load additional plugins
setup_plugins(args.plugin_dirs)
setup_plugin_args(streamlink, parser)
# call setup args again once the plugin specific args have been added
setup_args(parser)
setup_config_args(parser)
# update the logging level if changed by a plugin specific config
log_level = args.loglevel if not silent_log else "none"
logger.root.setLevel(log_level)
setup_http_session()
log_root_warning()
log_current_versions()
log_current_arguments(streamlink, parser)
if args.version_check or args.auto_version_check:
with ignored(Exception):
check_version(force=args.version_check)
if args.plugins:
print_plugins()
elif args.can_handle_url:
try:
streamlink.resolve_url(args.can_handle_url)
except NoPluginError:
error_code = 1
except KeyboardInterrupt:
error_code = 130
elif args.can_handle_url_no_redirect:
try:
streamlink.resolve_url_no_redirect(args.can_handle_url_no_redirect)
except NoPluginError:
error_code = 1
except KeyboardInterrupt:
error_code = 130
elif args.url:
try:
setup_options()
handle_url()
except KeyboardInterrupt:
# Close output
if output:
output.close()
console.msg("Interrupted! Exiting...")
error_code = 130
finally:
if stream_fd:
try:
log.info("Closing currently open stream...")
stream_fd.close()
except KeyboardInterrupt:
error_code = 130
elif args.help:
parser.print_help()
else:
usage = parser.format_usage()
console.msg(
f"{usage}\n"
f"Use -h/--help to see the available options or read the manual at https://streamlink.github.io"
)
sys.exit(error_code)
def parser_helper():
session = Streamlink()
parser = build_parser()
setup_plugin_args(session, parser)
return parser
| [((1732, 1767), 'logging.getLogger', 'logging.getLogger', (['"""streamlink.cli"""'], {}), "('streamlink.cli')\n", (1749, 1767), False, 'import logging\n'), ((2765, 2785), 'streamlink_cli.output.FileOutput', 'FileOutput', (['filename'], {}), '(filename)\n', (2775, 2785), False, 'from streamlink_cli.output import FileOutput, Output, PlayerOutput\n'), ((22981, 23038), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'signal.default_int_handler'], {}), '(signal.SIGTERM, signal.default_int_handler)\n', (22994, 23038), False, 'import signal\n'), ((30271, 30286), 'collections.OrderedDict', 'OrderedDict', (['{}'], {}), '({})\n', (30282, 30286), False, 'from collections import OrderedDict\n'), ((33269, 33295), 'streamlink.cache.Cache', 'Cache', ([], {'filename': '"""cli.json"""'}), "(filename='cli.json')\n", (33274, 33295), False, 'from streamlink.cache import Cache\n'), ((33753, 33786), 'distutils.version.StrictVersion', 'StrictVersion', (['streamlink.version'], {}), '(streamlink.version)\n', (33766, 33786), False, 'from distutils.version import StrictVersion\n'), ((33808, 33837), 'distutils.version.StrictVersion', 'StrictVersion', (['latest_version'], {}), '(latest_version)\n', (33821, 33837), False, 'from distutils.version import StrictVersion\n'), ((34522, 34756), 'streamlink.logger.basicConfig', 'logger.basicConfig', ([], {'stream': 'stream', 'filename': 'filename', 'level': 'level', 'style': '"""{"""', 'format': "(('[{asctime}]' if level == 'trace' else '') +\n '[{name}][{levelname}] {message}')", 'datefmt': "('%H:%M:%S' + ('.%f' if level == 'trace' else ''))"}), "(stream=stream, filename=filename, level=level, style='{',\n format=('[{asctime}]' if level == 'trace' else '') +\n '[{name}][{levelname}] {message}', datefmt='%H:%M:%S' + ('.%f' if level ==\n 'trace' else ''))\n", (34540, 34756), True, 'import streamlink.logger as logger\n'), ((34814, 34855), 'streamlink_cli.console.ConsoleOutput', 'ConsoleOutput', (['streamhandler.stream', 'json'], {}), '(streamhandler.stream, json)\n', (34827, 34855), False, 'from streamlink_cli.console import ConsoleOutput, ConsoleUserInputRequester\n'), ((34902, 34916), 'streamlink_cli.argparser.build_parser', 'build_parser', ([], {}), '()\n', (34914, 34916), False, 'from streamlink_cli.argparser import build_parser\n'), ((36078, 36109), 'streamlink.logger.root.setLevel', 'logger.root.setLevel', (['log_level'], {}), '(log_level)\n', (36098, 36109), True, 'import streamlink.logger as logger\n'), ((37685, 37705), 'sys.exit', 'sys.exit', (['error_code'], {}), '(error_code)\n', (37693, 37705), False, 'import sys\n'), ((37743, 37755), 'streamlink.Streamlink', 'Streamlink', ([], {}), '()\n', (37753, 37755), False, 'from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version\n'), ((37769, 37783), 'streamlink_cli.argparser.build_parser', 'build_parser', ([], {}), '()\n', (37781, 37783), False, 'from streamlink_cli.argparser import build_parser\n'), ((2403, 2427), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (2417, 2427), False, 'import os\n'), ((2454, 2472), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (2470, 2472), False, 'import sys\n'), ((5045, 5057), 'streamlink_cli.utils.HTTPServer', 'HTTPServer', ([], {}), '()\n', (5055, 5057), False, 'from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url\n'), ((9557, 9600), 'streamlink.StreamError', 'StreamError', (['"""No data returned from stream"""'], {}), "('No data returned from stream')\n", (9568, 9600), False, 'from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version\n'), ((10534, 10549), 'contextlib.closing', 'closing', (['output'], {}), '(output)\n', (10541, 10549), False, 'from contextlib import closing\n'), ((15946, 15961), 'time.sleep', 'sleep', (['interval'], {}), '(interval)\n', (15951, 15961), False, 'from time import sleep\n'), ((21175, 21212), 'gettext.gettext', 'gettext', (['"""unrecognized arguments: %s"""'], {}), "('unrecognized arguments: %s')\n", (21182, 21212), False, 'from gettext import gettext\n'), ((30119, 30142), 'streamlink.plugin.PluginOptions', 'PluginOptions', (['defaults'], {}), '(defaults)\n', (30132, 30142), False, 'from streamlink.plugin import Plugin, PluginOptions\n'), ((31740, 31779), 'streamlink.logger.root.isEnabledFor', 'logger.root.isEnabledFor', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (31764, 31779), True, 'import streamlink.logger as logger\n'), ((32449, 32488), 'streamlink.logger.root.isEnabledFor', 'logger.root.isEnabledFor', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (32473, 32488), True, 'import streamlink.logger as logger\n'), ((33396, 33456), 'requests.get', 'requests.get', (['"""https://pypi.python.org/pypi/streamlink/json"""'], {}), "('https://pypi.python.org/pypi/streamlink/json')\n", (33408, 33456), False, 'import requests\n'), ((34148, 34158), 'sys.exit', 'sys.exit', ([], {}), '()\n', (34156, 34158), False, 'import sys\n'), ((2742, 2752), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2750, 2752), False, 'import sys\n'), ((3292, 3313), 'streamlink_cli.output.FileOutput', 'FileOutput', ([], {'fd': 'stdout'}), '(fd=stdout)\n', (3302, 3313), False, 'from streamlink_cli.output import FileOutput, Output, PlayerOutput\n'), ((3465, 3486), 'streamlink_cli.output.FileOutput', 'FileOutput', ([], {'fd': 'stdout'}), '(fd=stdout)\n', (3475, 3486), False, 'from streamlink_cli.output import FileOutput, Output, PlayerOutput\n'), ((8271, 8292), 'streamlink_cli.utils.stream_to_url', 'stream_to_url', (['stream'], {}), '(stream)\n', (8284, 8292), False, 'from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url\n'), ((9112, 9156), 'streamlink.StreamError', 'StreamError', (['f"""Could not open stream: {err}"""'], {}), "(f'Could not open stream: {err}')\n", (9123, 9156), False, 'from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version\n'), ((9439, 9493), 'streamlink.StreamError', 'StreamError', (['f"""Failed to read data from stream: {err}"""'], {}), "(f'Failed to read data from stream: {err}')\n", (9450, 9493), False, 'from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version\n'), ((11076, 11095), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (11093, 11095), False, 'import sys\n'), ((11295, 11314), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (11312, 11314), False, 'import sys\n'), ((11409, 11441), 'functools.partial', 'partial', (['stream.read', 'chunk_size'], {}), '(stream.read, chunk_size)\n', (11416, 11441), False, 'from functools import partial\n'), ((22283, 22305), 'streamlink_cli.utils.ignored', 'ignored', (['NoPluginError'], {}), '(NoPluginError)\n', (22290, 22305), False, 'from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url\n'), ((24556, 24590), 'streamlink_cli.console.ConsoleUserInputRequester', 'ConsoleUserInputRequester', (['console'], {}), '(console)\n', (24581, 24590), False, 'from streamlink_cli.console import ConsoleOutput, ConsoleUserInputRequester\n'), ((31571, 31583), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (31581, 31583), False, 'import os\n'), ((32060, 32079), 'platform.platform', 'platform.platform', ([], {}), '()\n', (32077, 32079), False, 'import platform\n'), ((36301, 36319), 'streamlink_cli.utils.ignored', 'ignored', (['Exception'], {}), '(Exception)\n', (36308, 36319), False, 'from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url\n'), ((2108, 2122), 'streamlink_cli.utils.datetime.now', 'datetime.now', ([], {}), '()\n', (2120, 2122), False, 'from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url\n'), ((2618, 2628), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2626, 2628), False, 'import sys\n'), ((3641, 3677), 'streamlink_cli.output.FileOutput', 'FileOutput', ([], {'fd': 'stdout', 'record': 'record'}), '(fd=stdout, record=record)\n', (3651, 3677), False, 'from streamlink_cli.output import FileOutput, Output, PlayerOutput\n'), ((11560, 11593), 'os.path.basename', 'os.path.basename', (['output.filename'], {}), '(output.filename)\n', (11576, 11593), False, 'import os\n'), ((32153, 32178), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (32176, 32178), False, 'import platform\n'), ((7490, 7499), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (7495, 7499), False, 'from time import sleep\n'), ((11719, 11759), 'os.path.basename', 'os.path.basename', (['output.record.filename'], {}), '(output.record.filename)\n', (11735, 11759), False, 'import os\n'), ((31872, 31890), 'platform.mac_ver', 'platform.mac_ver', ([], {}), '()\n', (31888, 31890), False, 'import platform\n'), ((31968, 31985), 'platform.system', 'platform.system', ([], {}), '()\n', (31983, 31985), False, 'import platform\n'), ((31988, 32006), 'platform.release', 'platform.release', ([], {}), '()\n', (32004, 32006), False, 'import platform\n'), ((34327, 34341), 'streamlink_cli.utils.datetime.now', 'datetime.now', ([], {}), '()\n', (34339, 34341), False, 'from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url\n'), ((4037, 4048), 'streamlink.utils.named_pipe.NamedPipe', 'NamedPipe', ([], {}), '()\n', (4046, 4048), False, 'from streamlink.utils.named_pipe import NamedPipe\n'), ((21685, 21695), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (21689, 21695), False, 'from pathlib import Path\n'), ((24357, 24367), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (24361, 24367), False, 'from pathlib import Path\n'), ((34386, 34400), 'pathlib.Path', 'Path', (['filename'], {}), '(filename)\n', (34390, 34400), False, 'from pathlib import Path\n')] |
Koushik-ks/FlaskAPP | dbaccesslibUserMailInfo.py | 6f1bd98450bc8f33c3896aa7ec690c51dc414d19 | from io import BytesIO
from io import StringIO
import json
from bson.dbref import DBRef
import datetime
from bson import json_util
import logging
import base64
jsonCode ={
"building":{
"Essae Vaishnavi Solitaire": {
"id": "B1",
"division": {
"SS": {
"id": "D1",
"dept":{
"Semicon":{
"id":"DEP1",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"RND":{
"id":"DEP2",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"Mobile":{
"id":"DEP3",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
}
}
},
"TTEC": {
"id": "D2",
"dept":{
"TTEC-AL":{
"id":"DEP1",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"TTEC-SL":{
"id":"DEP2",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"TTEC-DL":{
"id":"DEP3",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"TTEC-CI":{
"id":"DEP4",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
}
}
}
}
},
"Fortune Summit": {
"id": "B2",
"division": {
"TMSC": {
"id": "D1",
"dept":{
"Medical":{
"id":"DEP1",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"RND":{
"id":"DEP2",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"Imaging":{
"id":"DEP3",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
}
}
},
"tmc": {
"id": "D2",
"dept":{
"tmc-1":{
"id":"DEP1",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"tmc-2":{
"id":"DEP2",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
},
"tmc-3":{
"id":"DEP3",
"floor":{"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6"
}
}
}
}
}
}
}
}
#Create and configure logger
logging.basicConfig(filename="server.log",
format='%(asctime)s %(message)s',
filemode='a')
#Creating an object
logger=logging.getLogger()
#Setting the threshold of logger to DEBUG
logger.setLevel(logging.DEBUG)
import pymongo
uri = "mongodb://218ffa09-0ee0-4-231-b9ee:zTV4cwDG0vM49J2GFsw72JzwOD79Bv3dPU8fbVLb5pbh3p0CmTBYcvhrFKTjtl1s7hgYSfRbMOrsVve6hfvhag==@218ffa09-0ee0-4-231-b9ee.documents.azure.com:10255/?ssl=true&replicaSet=globaldb"
client = pymongo.MongoClient(uri)
print("Obtained the client")
mydb = client.test
def sortingReq(item):
new_thrash_date = datetime.datetime.strptime(item["scan_date"], '%d-%m-%Y').date()
return new_thrash_date
def checkIfAutoThrashed(jsonData,tags):
if(len(tags) < 3):
return False
a = mydb.userInfo.find_one({"name":jsonData["name"]})
newDbref = DBRef("mydb.userInfo",a["_id"])
foundMails = mydb.mltable.find({"otherdbref":newDbref,"status":"trash"})
foundMailsList = list(mydb.mltable.find({"otherdbref":newDbref,"status":"trash"}))
if(len(foundMailsList) < 10):
return False
tagcount = 0
thrashcount = 0
for item in foundMails:
for tag in tags:
if(tag in item["tags"]):
tagcount+=1
if(tagcount >= 3):
thrashcount+=1
if(thrashcount >=10):
return True
return False
def generateqrcode(jsonData,filenameJPG,tags,fromMFP):
logger.debug("Received data for generating color code = ")
logger.debug(jsonData)
ilocation=1
today = datetime.datetime.now()
date = str(today.day)
time = str(today.hour) + ":" + str(today.minute) + ":" + str(today.second)+":"+str(today.microsecond)
dateTimeNow = date+':'+time
logger.debug("Current Datetime - "+dateTimeNow)
dateTimeNow = str(today.day)+str(today.hour)+str(today.minute)+str(today.second)+(str(today.microsecond)[:2])
logger.debug("Unique Code - "+dateTimeNow)
if(int(jsonData["cubicle"])>25 and int(jsonData["cubicle"])<=50):
ilocation=2
elif(int(jsonData["cubicle"])>50 and int(jsonData["cubicle"])<=75):
ilocation=3
else:
ilocation=4
logger.debug(jsonData["building"])
colorCode=jsonCode["building"][jsonData["building"]]["id"]+':'+jsonCode["building"][jsonData["building"]]["division"][jsonData["division"]]["id"]+':'+dateTimeNow
logger.debug("ColorCode - "+colorCode)
logger.debug("generateColorCode:: ColorCode value ="+colorCode)
import qrcode
img = qrcode.make(colorCode)
logger.debug(type(img))
autoThrashed = checkIfAutoThrashed(jsonData,tags)
logger.debug("Auto thrashed value is %d" % autoThrashed)
logger.debug("Tags are %s" % tags)
import sendEmail as se
se.execute(str(jsonData["email"]),filenameJPG,str(colorCode),img,autoThrashed,fromMFP)
#img = qrcode.make(colorCode)
#img.save(colorCode+".png")
newjsonData = {"name":jsonData["name"],"code":colorCode,"email":jsonData["email"],"division":jsonData["division"],"department":jsonData["department"],"floor":jsonData["floor"],"cubicle":jsonData["cubicle"],"building":jsonData["building"]}
if(fromMFP):
newjsonData["source"] = "MFP"
else:
newjsonData["source"] = "Mobile"
return addEntry(newjsonData,tags,autoThrashed);
def addEntry(jsonData,tags,autoThrashed):
a = mydb.userInfo.find_one({"name":jsonData["name"]})
newDbref = DBRef("mydb.userInfo",a["_id"])
scan_date = datetime.datetime.today()
scan_date = scan_date + datetime.timedelta(hours=9)
end_date = scan_date + datetime.timedelta(days=10)
scan_date = str(scan_date.day) +"-"+ str(scan_date.month)+"-" + str(scan_date.year)
end_date = str(end_date.day) +"-" +str(end_date.month)+"-" + str(end_date.year)
if(autoThrashed):
end_date = scan_date
if( not autoThrashed and len(tags) >= 3):
#mydb.mltable.insert({"code":jsonData["code"],"tags": tags,"status":"Keep","user_id":1,"otherdbref":newDbref}) Actual Code
mydb.mltable.insert({"code":jsonData["code"],"tags": tags,"status":"Keep","user_id":1,"otherdbref":newDbref})#Test code to be removed
#end_date = scan_date
mydb.userMailInfo.insert({"code":jsonData["code"],"scan_date":scan_date,"end_date":end_date,"otherdbref":newDbref,"userDeleted":False,"user_id":1,"source":jsonData["source"]})
jsonData["autoThrashed"] = autoThrashed
return json.dumps(jsonData)
def read_fromDB():
new_list = list()
for item in mydb.userMailInfo.find({},{"_id":0,"user_id":0}):
print(item)
otherdbref = item["otherdbref"]
newjson = mydb.userInfo.find_one({"_id":otherdbref.id},{"_id":0,"user_id":0})
dall = {}
item.pop("otherdbref")
dall.update(item)
dall.update(newjson)
print(dall)
new_list.append(dall)
new_list.reverse()
return json.dumps(new_list,default=json_util.default)
def getspecificDate(jsonData):
logger.debug(jsonData)
num = int(jsonData['page'])
skips = 10 * (num - 1)
if(jsonData["action"] == "all"):
all_list = list(mydb.userMailInfo.find({"userDeleted":False},{'_id' : 0,'user_id':0}))
all_list.reverse()
totalsize = len(all_list)
all_list = all_list[skips:]
all_list = all_list[:10]
new_list_new = list()
for item in all_list:
otherdbref = item["otherdbref"]
newjson = mydb.userInfo.find_one({"_id":otherdbref.id},{"_id":0,"user_id":0})
dall = {}
item.pop("otherdbref")
dall.update(item)
dall.update(newjson)
print(dall)
new_list_new.append(dall)
new_list_new.append({"totalsize":totalsize})
logger.debug(new_list_new)
#new_list_new.sort(key = lambda x : x["name"])
return json.dumps(new_list_new, default=json_util.default)
elif(jsonData["action"] == "today"):
all_list = list(mydb.userMailInfo.find({"userDeleted":False},{'_id' : 0,'user_id':0}))
thrash_date = datetime.datetime.today()
thrash_date = thrash_date + datetime.timedelta(hours=9)
thrash_date = str(thrash_date.day) + "-" +str(thrash_date.month)+"-" + str(thrash_date.year)
thrash_date = datetime.datetime.strptime(thrash_date, '%d-%m-%Y').date()
new_list = list()
for item in all_list:
if(item['end_date'] == "DONT TRASH"):
continue
db_date = datetime.datetime.strptime(item['end_date'],'%d-%m-%Y').date()
if(db_date <= thrash_date):
new_list.append(item)
new_list.reverse()
totalsize = len(new_list)
new_list = new_list[skips:]
new_list = new_list[:10]
new_list_new = list()
for item in new_list:
otherdbref = item["otherdbref"]
newjson = mydb.userInfo.find_one({"_id":otherdbref.id},{"_id":0,"user_id":0})
dall = {}
item.pop("otherdbref")
dall.update(item)
dall.update(newjson)
print(dall)
new_list_new.append(dall)
new_list_new.append({"totalsize":totalsize})
logger.debug(new_list_new)
#new_list_new.sort(key = lambda x : x["name"])
return json.dumps(new_list_new, default=json_util.default)
else:
all_list = list(mydb.userMailInfo.find({"userDeleted":False},{'_id' : 0,'user_id':0}))
thrash_date = datetime.datetime.today()
thrash_date = thrash_date + datetime.timedelta(hours=9)
thrash_date = str(thrash_date.day) + "-" +str(thrash_date.month)+"-" + str(thrash_date.year)
thrash_date = datetime.datetime.strptime(thrash_date, '%d-%m-%Y').date()
new_list = list()
for item in all_list:
db_date = datetime.datetime.strptime(item['scan_date'],'%d-%m-%Y').date()
if(db_date == thrash_date):
new_list.append(item)
new_list.reverse()
totalsize = len(new_list)
new_list = new_list[skips:]
new_list = new_list[:10]
new_list_new = list()
for item in new_list:
otherdbref = item["otherdbref"]
newjson = mydb.userInfo.find_one({"_id":otherdbref.id},{"_id":0,"user_id":0})
dall = {}
item.pop("otherdbref")
dall.update(item)
dall.update(newjson)
print(dall)
new_list_new.append(dall)
new_list_new.append({"totalsize":totalsize})
logger.debug(new_list_new)
return json.dumps(new_list_new, default=json_util.default)
def update_DB(jsonData):
logger.debug("DBUMI::Update_db() entry")
logger.debug(jsonData["code"])
logger.debug(jsonData["end_date"])
foundmail = mydb.userMailInfo.find_one({"code":jsonData["code"]},{"_id":1})
logger.debug(foundmail)
foundMl = mydb.mltable.find_one({"code":jsonData["code"]},{"_id":1})
logger.debug(foundMl)
mydb.userMailInfo.update_many({"_id":foundmail["_id"],"user_id":1},{"$set":{'end_date':str(jsonData['end_date'])}})
if(not jsonData['end_date'] == "DONT TRASH"):
mydb.mltable.update_many({"_id":foundMl["_id"],"user_id":1},{"$set":{"status":"trash"}})
return json.dumps({"status": "Success","statusreason": "updateSucess"})
#Clear DB only for testing
def delete_entry(jsonData):
logger.debug("DBUMI::delete_entry() entry")
logger.debug(jsonData["code"])
mydb.userMailInfo.delete_one({"code":jsonData["code"],"user_id":1})
return json.dumps({"status": "Success","statusreason": "updateSucess"})
def clear_db():
mydb.userMailInfo.remove({})
| [((7101, 7195), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""server.log"""', 'format': '"""%(asctime)s %(message)s"""', 'filemode': '"""a"""'}), "(filename='server.log', format='%(asctime)s %(message)s',\n filemode='a')\n", (7120, 7195), False, 'import logging\n'), ((7282, 7301), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (7299, 7301), False, 'import logging\n'), ((7618, 7642), 'pymongo.MongoClient', 'pymongo.MongoClient', (['uri'], {}), '(uri)\n', (7637, 7642), False, 'import pymongo\n'), ((7984, 8016), 'bson.dbref.DBRef', 'DBRef', (['"""mydb.userInfo"""', "a['_id']"], {}), "('mydb.userInfo', a['_id'])\n", (7989, 8016), False, 'from bson.dbref import DBRef\n'), ((8680, 8703), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8701, 8703), False, 'import datetime\n'), ((9637, 9659), 'qrcode.make', 'qrcode.make', (['colorCode'], {}), '(colorCode)\n', (9648, 9659), False, 'import qrcode\n'), ((10546, 10578), 'bson.dbref.DBRef', 'DBRef', (['"""mydb.userInfo"""', "a['_id']"], {}), "('mydb.userInfo', a['_id'])\n", (10551, 10578), False, 'from bson.dbref import DBRef\n'), ((10594, 10619), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (10617, 10619), False, 'import datetime\n'), ((11539, 11559), 'json.dumps', 'json.dumps', (['jsonData'], {}), '(jsonData)\n', (11549, 11559), False, 'import json\n'), ((12001, 12048), 'json.dumps', 'json.dumps', (['new_list'], {'default': 'json_util.default'}), '(new_list, default=json_util.default)\n', (12011, 12048), False, 'import json\n'), ((16407, 16472), 'json.dumps', 'json.dumps', (["{'status': 'Success', 'statusreason': 'updateSucess'}"], {}), "({'status': 'Success', 'statusreason': 'updateSucess'})\n", (16417, 16472), False, 'import json\n'), ((16693, 16758), 'json.dumps', 'json.dumps', (["{'status': 'Success', 'statusreason': 'updateSucess'}"], {}), "({'status': 'Success', 'statusreason': 'updateSucess'})\n", (16703, 16758), False, 'import json\n'), ((10648, 10675), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(9)'}), '(hours=9)\n', (10666, 10675), False, 'import datetime\n'), ((10703, 10730), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(10)'}), '(days=10)\n', (10721, 10730), False, 'import datetime\n'), ((12971, 13022), 'json.dumps', 'json.dumps', (['new_list_new'], {'default': 'json_util.default'}), '(new_list_new, default=json_util.default)\n', (12981, 13022), False, 'import json\n'), ((7735, 7792), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["item['scan_date']", '"""%d-%m-%Y"""'], {}), "(item['scan_date'], '%d-%m-%Y')\n", (7761, 7792), False, 'import datetime\n'), ((13190, 13215), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (13213, 13215), False, 'import datetime\n'), ((14438, 14489), 'json.dumps', 'json.dumps', (['new_list_new'], {'default': 'json_util.default'}), '(new_list_new, default=json_util.default)\n', (14448, 14489), False, 'import json\n'), ((14617, 14642), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (14640, 14642), False, 'import datetime\n'), ((15726, 15777), 'json.dumps', 'json.dumps', (['new_list_new'], {'default': 'json_util.default'}), '(new_list_new, default=json_util.default)\n', (15736, 15777), False, 'import json\n'), ((13252, 13279), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(9)'}), '(hours=9)\n', (13270, 13279), False, 'import datetime\n'), ((14679, 14706), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(9)'}), '(hours=9)\n', (14697, 14706), False, 'import datetime\n'), ((13411, 13462), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['thrash_date', '"""%d-%m-%Y"""'], {}), "(thrash_date, '%d-%m-%Y')\n", (13437, 13462), False, 'import datetime\n'), ((14838, 14889), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['thrash_date', '"""%d-%m-%Y"""'], {}), "(thrash_date, '%d-%m-%Y')\n", (14864, 14889), False, 'import datetime\n'), ((13623, 13679), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["item['end_date']", '"""%d-%m-%Y"""'], {}), "(item['end_date'], '%d-%m-%Y')\n", (13649, 13679), False, 'import datetime\n'), ((14975, 15032), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["item['scan_date']", '"""%d-%m-%Y"""'], {}), "(item['scan_date'], '%d-%m-%Y')\n", (15001, 15032), False, 'import datetime\n')] |
google/cc4d | src/test/dags/bq_to_cm_dag_test.py | 206543832368f96bac7f55c0de93c96e32127779 | # python3
# coding=utf-8
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for dags.bq_to_cm_dag."""
import unittest
from airflow.contrib.hooks import bigquery_hook
from airflow.models import baseoperator
from airflow.models import dag
from airflow.models import variable
import mock
from gps_building_blocks.cloud.utils import cloud_auth
from dags import bq_to_cm_dag
from plugins.pipeline_plugins.hooks import monitoring_hook
_DAG_NAME = bq_to_cm_dag._DAG_NAME
AIRFLOW_VARIABLES = {
'dag_name': _DAG_NAME,
f'{_DAG_NAME}_schedule': '@once',
f'{_DAG_NAME}_retries': 0,
f'{_DAG_NAME}_retry_delay': 3,
f'{_DAG_NAME}_is_retry': True,
f'{_DAG_NAME}_is_run': True,
f'{_DAG_NAME}_enable_run_report': False,
f'{_DAG_NAME}_enable_monitoring': True,
f'{_DAG_NAME}_enable_monitoring_cleanup': False,
'monitoring_data_days_to_live': 50,
'monitoring_dataset': 'test_monitoring_dataset',
'monitoring_table': 'test_monitoring_table',
'monitoring_bq_conn_id': 'test_monitoring_conn',
'bq_dataset_id': 'test_dataset',
'bq_table_id': 'test_table',
'cm_profile_id': 'cm_profile_id',
'cm_service_account': 'cm_service_account'
}
class BQToCMDAGTest(unittest.TestCase):
def setUp(self):
super(BQToCMDAGTest, self).setUp()
self.addCleanup(mock.patch.stopall)
self.build_impersonated_client_mock = mock.patch.object(
cloud_auth, 'build_impersonated_client', autospec=True)
self.build_impersonated_client_mock.return_value = mock.Mock()
self.build_impersonated_client_mock.start()
self.mock_variable = mock.patch.object(
variable, 'Variable', autospec=True).start()
# `side_effect` is assigned to `lambda` to dynamically return values
# each time when self.mock_variable is called.
self.mock_variable.get.side_effect = (
lambda key, value: AIRFLOW_VARIABLES[key])
self.original_bigquery_hook_init = bigquery_hook.BigQueryHook.__init__
bigquery_hook.BigQueryHook.__init__ = mock.MagicMock()
self.original_monitoring_hook = monitoring_hook.MonitoringHook
monitoring_hook.MonitoringHook = mock.MagicMock()
def tearDown(self):
super().tearDown()
bigquery_hook.BigQueryHook.__init__ = self.original_bigquery_hook_init
monitoring_hook.MonitoringHook = self.original_monitoring_hook
def test_create_dag(self):
"""Tests that returned DAG contains correct DAG and tasks."""
expected_task_ids = ['bq_to_cm_retry_task', 'bq_to_cm_task']
test_dag = bq_to_cm_dag.BigQueryToCMDag(
AIRFLOW_VARIABLES['dag_name']).create_dag()
self.assertIsInstance(test_dag, dag.DAG)
self.assertEqual(len(test_dag.tasks), len(expected_task_ids))
for task in test_dag.tasks:
self.assertIsInstance(task, baseoperator.BaseOperator)
actual_task_ids = [t.task_id for t in test_dag.tasks]
self.assertListEqual(actual_task_ids, expected_task_ids)
if __name__ == '__main__':
unittest.main()
| [((3473, 3488), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3486, 3488), False, 'import unittest\n'), ((1901, 1974), 'mock.patch.object', 'mock.patch.object', (['cloud_auth', '"""build_impersonated_client"""'], {'autospec': '(True)'}), "(cloud_auth, 'build_impersonated_client', autospec=True)\n", (1918, 1974), False, 'import mock\n'), ((2039, 2050), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2048, 2050), False, 'import mock\n'), ((2532, 2548), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2546, 2548), False, 'import mock\n'), ((2654, 2670), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2668, 2670), False, 'import mock\n'), ((2124, 2178), 'mock.patch.object', 'mock.patch.object', (['variable', '"""Variable"""'], {'autospec': '(True)'}), "(variable, 'Variable', autospec=True)\n", (2141, 2178), False, 'import mock\n'), ((3036, 3095), 'dags.bq_to_cm_dag.BigQueryToCMDag', 'bq_to_cm_dag.BigQueryToCMDag', (["AIRFLOW_VARIABLES['dag_name']"], {}), "(AIRFLOW_VARIABLES['dag_name'])\n", (3064, 3095), False, 'from dags import bq_to_cm_dag\n')] |
burgwyn/State-TalentMAP-API | talentmap_api/common/management/commands/load_xml.py | 1f4f3659c5743ebfd558cd87af381f5460f284b3 | from django.core.management.base import BaseCommand
import logging
import re
from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag
from talentmap_api.language.models import Language, Proficiency
from talentmap_api.position.models import Grade, Skill, Position, CapsuleDescription, SkillCone
from talentmap_api.organization.models import Organization, Post, TourOfDuty, Location, Country
class Command(BaseCommand):
help = 'Loads an XML into a supported file'
logger = logging.getLogger(__name__)
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.modes = {
'languages': mode_languages,
'proficiencies': mode_proficiencies,
'grades': mode_grades,
'skills': mode_skills,
'organizations': mode_organizations,
'positions': mode_positions,
'tours_of_duty': mode_tour_of_duty,
'posts': mode_post,
'countries': mode_country,
'locations': mode_location,
'capsule_descriptions': mode_capsule_description,
'skill_cone': mode_skill_cone
}
def add_arguments(self, parser):
parser.add_argument('file', nargs=1, type=str, help="The XML file to load")
parser.add_argument('type', nargs=1, type=str, choices=self.modes.keys(), help="The type of data in the XML")
parser.add_argument('--delete', dest='delete', action='store_true', help='Delete collisions')
parser.add_argument('--update', dest='update', action='store_true', help='Update collisions')
parser.add_argument('--skippost', dest='skip_post', action='store_true', help='Skip post load functions')
def handle(self, *args, **options):
model, instance_tag, tag_map, collision_field, post_load_function = self.modes[options['type'][0]]()
# Set / update the collision behavior
collision_behavior = None
if options['delete']:
collision_behavior = "delete"
elif options['update']:
collision_behavior = "update"
else:
collision_behavior = "skip"
loader = XMLloader(model, instance_tag, tag_map, collision_behavior, collision_field)
new_ids, updated_ids = loader.create_models_from_xml(options['file'][0])
# Run the post load function, if it exists
if callable(post_load_function) and not options['skip_post']:
post_load_function(new_ids, updated_ids)
self.logger.info(f"XML Load Report\n\tNew: {len(new_ids)}\n\tUpdated: {len(updated_ids)}\t\t")
def mode_languages():
model = Language
instance_tag = "LANGUAGES:LANGUAGE"
collision_field = "code"
tag_map = {
"LANGUAGES:LANG_CODE": "code",
"LANGUAGES:LANG_LONG_DESC": "long_description",
"LANGUAGES:LANG_SHORT_DESC": "short_description",
"LANGUAGES:LANG_EFFECTIVE_DATE": parse_date("effective_date")
}
return (model, instance_tag, tag_map, collision_field, None)
def mode_proficiencies():
model = Proficiency
instance_tag = "LANGUAGE_PROFICIENCY:LANGUAGE_PROFICIENCY"
collision_field = "code"
tag_map = {
"LANGUAGE_PROFICIENCY:LP_CODE": "code",
"LANGUAGE_PROFICIENCY:LP_DESC": "description"
}
return (model, instance_tag, tag_map, collision_field, None)
def mode_grades():
model = Grade
instance_tag = "GRADES:GRADE"
collision_field = "code"
tag_map = {
"GRADES:GRD_GRADE_CODE": "code"
}
def post_load_function(new_ids, updated_ids):
for pos in Grade.objects.filter(id__in=new_ids + updated_ids):
pos.update_relationships()
return (model, instance_tag, tag_map, collision_field, post_load_function)
def mode_skills():
model = Skill
instance_tag = "SKILLS:SKILL"
collision_field = "code"
tag_map = {
"SKILLS:SKILL_CODE": "code",
"SKILLS:SKILL_DESCRIPTION": "description"
}
return (model, instance_tag, tag_map, collision_field, None)
def mode_organizations():
model = Organization
instance_tag = "DATA_RECORD"
collision_field = "code"
tag_map = {
"ORG_CODE": "code",
"ORG_SHORT_DESC": "short_description",
"ORG_LONG_DESC": strip_extra_spaces("long_description"),
"ORG_PARENT_ORG_CODE": "_parent_organization_code",
"ORG_BUREAU_ORG_CODE": "_parent_bureau_code",
"ORG_LOCATION_CODE": "_location_code"
}
# Update relationships
def post_load_function(new_ids, updated_ids):
for org in Organization.objects.filter(id__in=new_ids + updated_ids):
org.update_relationships()
# Regional code setting is done automatically by DOS Webservices, so
# we now only need this logic when loading from our sample XML files
# Array of regional codes
regional_codes = [
"110000",
"120000",
"130000",
"140000",
"146000",
"150000",
"160000"
]
if org.code in regional_codes:
org.is_regional = True
else:
org.is_regional = False
if org.code == org._parent_bureau_code:
org.is_bureau = True
org.save()
return (model, instance_tag, tag_map, collision_field, post_load_function)
def mode_positions():
model = Position
instance_tag = "POSITIONS:POSITION"
collision_field = "_seq_num"
tag_map = {
"POSITIONS:POS_SEQ_NUM": "_seq_num",
"POSITIONS:POS_NUM_TEXT": "position_number",
"POSITIONS:POS_TITLE_CODE": "_title_code",
"POSITIONS:POS_TITLE_DESC": "title",
"POSITIONS:POS_ORG_CODE": "_org_code",
"POSITIONS:POS_BUREAU_CODE": "_bureau_code",
"POSITIONS:POS_SKILL_CODE": "_skill_code",
"POSITIONS:POS_STAFF_PTRN_SKILL_CODE": "_staff_ptrn_skill_code",
"POSITIONS:POS_OVERSEAS_IND": parse_boolean("is_overseas", ['O']),
"POSITIONS:POS_PAY_PLAN_CODE": "_pay_plan_code",
"POSITIONS:POS_STATUS_CODE": "_status_code",
"POSITIONS:POS_SERVICE_TYPE_CODE": "_service_type_code",
"POSITIONS:POS_GRADE_CODE": "_grade_code",
"POSITIONS:POS_POST_CODE": "_post_code",
"POSITIONS:POS_LANGUAGE_1_CODE": "_language_1_code",
"POSITIONS:POS_LANGUAGE_2_CODE": "_language_2_code",
"POSITIONS:POS_LOCATION_CODE": "_location_code",
"POSITIONS:POS_LANG_REQ_1_CODE": "_language_req_1_code",
"POSITIONS:POS_LANG_REQ_2_CODE": "_language_req_2_code",
"POSITIONS:POS_SPEAK_PROFICIENCY_1_CODE": "_language_1_spoken_proficiency_code",
"POSITIONS:POS_READ_PROFICIENCY_1_CODE": "_language_1_reading_proficiency_code",
"POSITIONS:POS_SPEAK_PROFICIENCY_2_CODE": "_language_2_spoken_proficiency_code",
"POSITIONS:POS_READ_PROFICIENCY_2_CODE": "_language_2_reading_proficiency_code",
"POSITIONS:POS_CREATE_ID": "_create_id",
"POSITIONS:POS_CREATE_DATE": parse_date("create_date"),
"POSITIONS:POS_UPDATE_ID": "_update_id",
"POSITIONS:POS_UPDATE_DATE": parse_date("update_date"),
"POSITIONS:POS_EFFECTIVE_DATE": parse_date("effective_date"),
"POSITIONS:POS_JOBCODE_CODE": "_jobcode_code",
"POSITIONS:POS_OCC_SERIES_CODE": "_occ_series_code",
}
def post_load_function(new_ids, updated_ids):
for pos in Position.objects.filter(id__in=new_ids + updated_ids):
pos.update_relationships()
return (model, instance_tag, tag_map, collision_field, post_load_function)
def mode_tour_of_duty():
model = TourOfDuty
instance_tag = "TOUR_OF_DUTIES:TOUR_OF_DUTY"
collision_field = "code"
tag_map = {
"TOUR_OF_DUTIES:TOD_CODE": "code",
"TOUR_OF_DUTIES:TOD_SHORT_DESC": "short_description",
"TOUR_OF_DUTIES:TOD_DESC_TEXT": lambda instance, item: setattr(instance, "long_description", re.sub('&', '&', item.text).strip()),
"TOUR_OF_DUTIES:TOD_MONTHS_NUM": "months"
}
return (model, instance_tag, tag_map, collision_field, None)
def mode_post():
model = Post
instance_tag = "BIDPOSTS:BIDDING_TOOL"
collision_field = "_location_code"
tag_map = {
"BIDPOSTS:DSC_CD": "_location_code",
"BIDPOSTS:TOD_CODE": "_tod_code",
"BIDPOSTS:BT_COST_OF_LIVING_ADJUST_NUM": "cost_of_living_adjustment",
"BIDPOSTS:BT_DIFFERENTIAL_RATE_NUM": "differential_rate",
"BIDPOSTS:BT_REST_RELAXATION_POINT_TEXT": strip_extra_spaces("rest_relaxation_point"),
"BIDPOSTS:BT_DANGER_PAY_NUM": "danger_pay",
"BIDPOSTS:BT_CONSUMABLE_ALLOWANCE_FLG": parse_boolean("has_consumable_allowance"),
"BIDPOSTS:BT_SERVICE_NEEDS_DIFF_FLG": parse_boolean("has_service_needs_differential"),
}
def post_load_function(new_ids, updated_ids):
for loc in Post.objects.filter(id__in=new_ids + updated_ids):
loc.update_relationships()
return (model, instance_tag, tag_map, collision_field, post_load_function)
def mode_country():
model = Country
instance_tag = "DATA_RECORD"
collision_field = "code"
tag_map = {
"COUNTRY_CODE": "code",
"FULL_NAME": "name",
"SHORT_NAME": "short_name",
"COUNTRY_CODE_2": "short_code",
"LOCATION_PREFIX": "location_prefix"
}
return (model, instance_tag, tag_map, collision_field, None)
def mode_location():
model = Location
instance_tag = "location"
collision_field = "code"
tag_map = {
"code": "code",
"city": strip_extra_spaces("city"),
"state": strip_extra_spaces("state"),
"country": "_country"
}
def post_load_function(new_ids, updated_ids):
# Connect new locations to applicable posts
for loc in Location.objects.filter(id__in=new_ids + updated_ids):
Post.objects.filter(_location_code=loc.code).update(location=loc)
return (model, instance_tag, tag_map, collision_field, post_load_function)
def mode_capsule_description():
model = CapsuleDescription
instance_tag = "position"
collision_field = "_pos_seq_num"
tag_map = {
"POS_SEQ_NUM": "_pos_seq_num",
"capsuleDescription": "content",
}
return (model, instance_tag, tag_map, collision_field, None)
def mode_skill_cone():
model = SkillCone
instance_tag = "jobCategorySkill"
collision_field = None
tag_map = {
"id": "_id",
"name": strip_extra_spaces("name"),
"skill": get_nested_tag("_skill_codes", "code"),
}
return (model, instance_tag, tag_map, collision_field, None)
| [((544, 571), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (561, 571), False, 'import logging\n'), ((2223, 2299), 'talentmap_api.common.xml_helpers.XMLloader', 'XMLloader', (['model', 'instance_tag', 'tag_map', 'collision_behavior', 'collision_field'], {}), '(model, instance_tag, tag_map, collision_behavior, collision_field)\n', (2232, 2299), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((2984, 3012), 'talentmap_api.common.xml_helpers.parse_date', 'parse_date', (['"""effective_date"""'], {}), "('effective_date')\n", (2994, 3012), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((3653, 3703), 'talentmap_api.position.models.Grade.objects.filter', 'Grade.objects.filter', ([], {'id__in': '(new_ids + updated_ids)'}), '(id__in=new_ids + updated_ids)\n', (3673, 3703), False, 'from talentmap_api.position.models import Grade, Skill, Position, CapsuleDescription, SkillCone\n'), ((4332, 4370), 'talentmap_api.common.xml_helpers.strip_extra_spaces', 'strip_extra_spaces', (['"""long_description"""'], {}), "('long_description')\n", (4350, 4370), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((4635, 4692), 'talentmap_api.organization.models.Organization.objects.filter', 'Organization.objects.filter', ([], {'id__in': '(new_ids + updated_ids)'}), '(id__in=new_ids + updated_ids)\n', (4662, 4692), False, 'from talentmap_api.organization.models import Organization, Post, TourOfDuty, Location, Country\n'), ((6084, 6119), 'talentmap_api.common.xml_helpers.parse_boolean', 'parse_boolean', (['"""is_overseas"""', "['O']"], {}), "('is_overseas', ['O'])\n", (6097, 6119), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((7147, 7172), 'talentmap_api.common.xml_helpers.parse_date', 'parse_date', (['"""create_date"""'], {}), "('create_date')\n", (7157, 7172), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((7260, 7285), 'talentmap_api.common.xml_helpers.parse_date', 'parse_date', (['"""update_date"""'], {}), "('update_date')\n", (7270, 7285), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((7327, 7355), 'talentmap_api.common.xml_helpers.parse_date', 'parse_date', (['"""effective_date"""'], {}), "('effective_date')\n", (7337, 7355), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((7549, 7602), 'talentmap_api.position.models.Position.objects.filter', 'Position.objects.filter', ([], {'id__in': '(new_ids + updated_ids)'}), '(id__in=new_ids + updated_ids)\n', (7572, 7602), False, 'from talentmap_api.position.models import Grade, Skill, Position, CapsuleDescription, SkillCone\n'), ((8652, 8695), 'talentmap_api.common.xml_helpers.strip_extra_spaces', 'strip_extra_spaces', (['"""rest_relaxation_point"""'], {}), "('rest_relaxation_point')\n", (8670, 8695), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((8797, 8838), 'talentmap_api.common.xml_helpers.parse_boolean', 'parse_boolean', (['"""has_consumable_allowance"""'], {}), "('has_consumable_allowance')\n", (8810, 8838), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((8886, 8933), 'talentmap_api.common.xml_helpers.parse_boolean', 'parse_boolean', (['"""has_service_needs_differential"""'], {}), "('has_service_needs_differential')\n", (8899, 8933), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((9011, 9060), 'talentmap_api.organization.models.Post.objects.filter', 'Post.objects.filter', ([], {'id__in': '(new_ids + updated_ids)'}), '(id__in=new_ids + updated_ids)\n', (9030, 9060), False, 'from talentmap_api.organization.models import Organization, Post, TourOfDuty, Location, Country\n'), ((9714, 9740), 'talentmap_api.common.xml_helpers.strip_extra_spaces', 'strip_extra_spaces', (['"""city"""'], {}), "('city')\n", (9732, 9740), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((9759, 9786), 'talentmap_api.common.xml_helpers.strip_extra_spaces', 'strip_extra_spaces', (['"""state"""'], {}), "('state')\n", (9777, 9786), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((9946, 9999), 'talentmap_api.organization.models.Location.objects.filter', 'Location.objects.filter', ([], {'id__in': '(new_ids + updated_ids)'}), '(id__in=new_ids + updated_ids)\n', (9969, 9999), False, 'from talentmap_api.organization.models import Organization, Post, TourOfDuty, Location, Country\n'), ((10624, 10650), 'talentmap_api.common.xml_helpers.strip_extra_spaces', 'strip_extra_spaces', (['"""name"""'], {}), "('name')\n", (10642, 10650), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((10669, 10707), 'talentmap_api.common.xml_helpers.get_nested_tag', 'get_nested_tag', (['"""_skill_codes"""', '"""code"""'], {}), "('_skill_codes', 'code')\n", (10683, 10707), False, 'from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag\n'), ((10013, 10057), 'talentmap_api.organization.models.Post.objects.filter', 'Post.objects.filter', ([], {'_location_code': 'loc.code'}), '(_location_code=loc.code)\n', (10032, 10057), False, 'from talentmap_api.organization.models import Organization, Post, TourOfDuty, Location, Country\n'), ((8073, 8104), 're.sub', 're.sub', (['"""&"""', '"""&"""', 'item.text'], {}), "('&', '&', item.text)\n", (8079, 8104), False, 'import re\n')] |
oscarfonts/web2py | gluon/tests/test_recfile.py | a18e0e489fe7a770c62fca510a4299886b0a9bb7 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.recfile
"""
import unittest
import os
import shutil
import uuid
from .fix_path import fix_sys_path
fix_sys_path(__file__)
from gluon import recfile
class TestRecfile(unittest.TestCase):
def setUp(self):
os.mkdir('tests')
def tearDown(self):
shutil.rmtree('tests')
def test_generation(self):
for k in range(10):
teststring = 'test%s' % k
filename = os.path.join('tests', str(uuid.uuid4()) + '.test')
with recfile.open(filename, "w") as g:
g.write(teststring)
self.assertEqual(recfile.open(filename, "r").read(), teststring)
is_there = recfile.exists(filename)
self.assertTrue(is_there)
recfile.remove(filename)
is_there = recfile.exists(filename)
self.assertFalse(is_there)
for k in range(10):
teststring = 'test%s' % k
filename = str(uuid.uuid4()) + '.test'
with recfile.open(filename, "w", path='tests') as g:
g.write(teststring)
self.assertEqual(recfile.open(filename, "r", path='tests').read(), teststring)
is_there = recfile.exists(filename, path='tests')
self.assertTrue(is_there)
recfile.remove(filename, path='tests')
is_there = recfile.exists(filename, path='tests')
self.assertFalse(is_there)
for k in range(10):
teststring = 'test%s' % k
filename = os.path.join('tests', str(uuid.uuid4()), str(uuid.uuid4()) + '.test')
with recfile.open(filename, "w") as g:
g.write(teststring)
self.assertEqual(recfile.open(filename, "r").read(), teststring)
is_there = recfile.exists(filename)
self.assertTrue(is_there)
recfile.remove(filename)
is_there = recfile.exists(filename)
self.assertFalse(is_there)
def test_existing(self):
filename = os.path.join('tests', str(uuid.uuid4()) + '.test')
with open(filename, 'w') as g:
g.write('this file exists')
self.assertTrue(recfile.exists(filename))
self.assertTrue(hasattr(recfile.open(filename, "r"), 'read'))
recfile.remove(filename, path='tests')
self.assertFalse(recfile.exists(filename))
self.assertRaises(IOError, recfile.remove, filename)
self.assertRaises(IOError, recfile.open, filename, "r")
if __name__ == '__main__':
unittest.main()
| [((2565, 2580), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2578, 2580), False, 'import unittest\n'), ((296, 313), 'os.mkdir', 'os.mkdir', (['"""tests"""'], {}), "('tests')\n", (304, 313), False, 'import os\n'), ((347, 369), 'shutil.rmtree', 'shutil.rmtree', (['"""tests"""'], {}), "('tests')\n", (360, 369), False, 'import shutil\n'), ((2317, 2355), 'gluon.recfile.remove', 'recfile.remove', (['filename'], {'path': '"""tests"""'}), "(filename, path='tests')\n", (2331, 2355), False, 'from gluon import recfile\n'), ((729, 753), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {}), '(filename)\n', (743, 753), False, 'from gluon import recfile\n'), ((804, 828), 'gluon.recfile.remove', 'recfile.remove', (['filename'], {}), '(filename)\n', (818, 828), False, 'from gluon import recfile\n'), ((852, 876), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {}), '(filename)\n', (866, 876), False, 'from gluon import recfile\n'), ((1248, 1286), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {'path': '"""tests"""'}), "(filename, path='tests')\n", (1262, 1286), False, 'from gluon import recfile\n'), ((1337, 1375), 'gluon.recfile.remove', 'recfile.remove', (['filename'], {'path': '"""tests"""'}), "(filename, path='tests')\n", (1351, 1375), False, 'from gluon import recfile\n'), ((1399, 1437), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {'path': '"""tests"""'}), "(filename, path='tests')\n", (1413, 1437), False, 'from gluon import recfile\n'), ((1823, 1847), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {}), '(filename)\n', (1837, 1847), False, 'from gluon import recfile\n'), ((1898, 1922), 'gluon.recfile.remove', 'recfile.remove', (['filename'], {}), '(filename)\n', (1912, 1922), False, 'from gluon import recfile\n'), ((1946, 1970), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {}), '(filename)\n', (1960, 1970), False, 'from gluon import recfile\n'), ((2213, 2237), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {}), '(filename)\n', (2227, 2237), False, 'from gluon import recfile\n'), ((2381, 2405), 'gluon.recfile.exists', 'recfile.exists', (['filename'], {}), '(filename)\n', (2395, 2405), False, 'from gluon import recfile\n'), ((559, 586), 'gluon.recfile.open', 'recfile.open', (['filename', '"""w"""'], {}), "(filename, 'w')\n", (571, 586), False, 'from gluon import recfile\n'), ((1050, 1091), 'gluon.recfile.open', 'recfile.open', (['filename', '"""w"""'], {'path': '"""tests"""'}), "(filename, 'w', path='tests')\n", (1062, 1091), False, 'from gluon import recfile\n'), ((1653, 1680), 'gluon.recfile.open', 'recfile.open', (['filename', '"""w"""'], {}), "(filename, 'w')\n", (1665, 1680), False, 'from gluon import recfile\n'), ((2271, 2298), 'gluon.recfile.open', 'recfile.open', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (2283, 2298), False, 'from gluon import recfile\n'), ((1009, 1021), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1019, 1021), False, 'import uuid\n'), ((1592, 1604), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1602, 1604), False, 'import uuid\n'), ((2085, 2097), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2095, 2097), False, 'import uuid\n'), ((517, 529), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (527, 529), False, 'import uuid\n'), ((658, 685), 'gluon.recfile.open', 'recfile.open', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (670, 685), False, 'from gluon import recfile\n'), ((1163, 1204), 'gluon.recfile.open', 'recfile.open', (['filename', '"""r"""'], {'path': '"""tests"""'}), "(filename, 'r', path='tests')\n", (1175, 1204), False, 'from gluon import recfile\n'), ((1611, 1623), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1621, 1623), False, 'import uuid\n'), ((1752, 1779), 'gluon.recfile.open', 'recfile.open', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (1764, 1779), False, 'from gluon import recfile\n')] |
cfrome77/liquid-stats | configLambdas.py | 7a4d751dea215c94b650beb154a90abce7e1592d | import json
import os
import subprocess
from dotenv import load_dotenv
from subprocess import check_output, Popen, PIPE
load_dotenv()
# Accessing variables.
CLIENT_ID = os.environ.get('CLIENT_ID')
CLIENT_SECRET = os.environ.get('CLIENT_SECRET')
USERNAME = os.environ.get('USERNAME')
BUCKET_NAME = os.environ.get('BUCKET_NAME')
def get_lambda_functions():
function_dict = {}
res = subprocess.Popen(
["aws", "lambda", "list-functions"],
stdout=subprocess.PIPE
)
output = res.communicate()
function_dict.update(json.loads(output[0]))
return function_dict['Functions']
lambda_functions = get_lambda_functions()
for lambda_function in lambda_functions:
function_name = lambda_function['FunctionName']
subprocess.run([
"aws", "lambda", "update-function-configuration",
"--function-name", f"{function_name}", "--environment",
f"Variables={{CLIENT_ID={CLIENT_ID},CLIENT_SECRET={CLIENT_SECRET},USERNAME={USERNAME},BUCKET_NAME={BUCKET_NAME}}}"
])
| [((122, 135), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (133, 135), False, 'from dotenv import load_dotenv\n'), ((172, 199), 'os.environ.get', 'os.environ.get', (['"""CLIENT_ID"""'], {}), "('CLIENT_ID')\n", (186, 199), False, 'import os\n'), ((216, 247), 'os.environ.get', 'os.environ.get', (['"""CLIENT_SECRET"""'], {}), "('CLIENT_SECRET')\n", (230, 247), False, 'import os\n'), ((259, 285), 'os.environ.get', 'os.environ.get', (['"""USERNAME"""'], {}), "('USERNAME')\n", (273, 285), False, 'import os\n'), ((300, 329), 'os.environ.get', 'os.environ.get', (['"""BUCKET_NAME"""'], {}), "('BUCKET_NAME')\n", (314, 329), False, 'import os\n'), ((393, 470), 'subprocess.Popen', 'subprocess.Popen', (["['aws', 'lambda', 'list-functions']"], {'stdout': 'subprocess.PIPE'}), "(['aws', 'lambda', 'list-functions'], stdout=subprocess.PIPE)\n", (409, 470), False, 'import subprocess\n'), ((753, 1004), 'subprocess.run', 'subprocess.run', (["['aws', 'lambda', 'update-function-configuration', '--function-name',\n f'{function_name}', '--environment',\n f'Variables={{CLIENT_ID={CLIENT_ID},CLIENT_SECRET={CLIENT_SECRET},USERNAME={USERNAME},BUCKET_NAME={BUCKET_NAME}}}'\n ]"], {}), "(['aws', 'lambda', 'update-function-configuration',\n '--function-name', f'{function_name}', '--environment',\n f'Variables={{CLIENT_ID={CLIENT_ID},CLIENT_SECRET={CLIENT_SECRET},USERNAME={USERNAME},BUCKET_NAME={BUCKET_NAME}}}'\n ])\n", (767, 1004), False, 'import subprocess\n'), ((549, 570), 'json.loads', 'json.loads', (['output[0]'], {}), '(output[0])\n', (559, 570), False, 'import json\n')] |
SaitoYutaka/microbitAnim | microbitAnim.py | 6630d5cdb3ae867d3467a035a1c14358944c0367 | # -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Aug 8 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class MyFrame1
###########################################################################
class MyFrame1 ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.Point( 0,0 ), size = wx.Size( 767,507 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
gbSizer1 = wx.GridBagSizer( 0, 0 )
gbSizer1.SetFlexibleDirection( wx.BOTH )
gbSizer1.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
self.m_button00 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button00.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button00, wx.GBPosition( 0, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button01 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button01.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button01, wx.GBPosition( 0, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button02 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button02.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button02, wx.GBPosition( 0, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button03 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button03.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button03, wx.GBPosition( 0, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button04 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button04.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button04, wx.GBPosition( 0, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button10 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button10.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button10, wx.GBPosition( 1, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button11 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button11.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button11, wx.GBPosition( 1, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button12 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button12.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button12, wx.GBPosition( 1, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button13 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button13.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button13, wx.GBPosition( 1, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button14 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button14.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button14, wx.GBPosition( 1, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button20 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button20.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button20, wx.GBPosition( 2, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button21 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button21.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button21, wx.GBPosition( 2, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button22 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button22.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button22, wx.GBPosition( 2, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button23 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button23.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button23, wx.GBPosition( 2, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button24 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button24.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button24, wx.GBPosition( 2, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button30 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button30.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button30, wx.GBPosition( 3, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button31 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button31.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button31, wx.GBPosition( 3, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button32 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button32.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button32, wx.GBPosition( 3, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button33 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button33.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button33, wx.GBPosition( 3, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button34 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button34.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button34, wx.GBPosition( 3, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button40 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button40.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button40, wx.GBPosition( 4, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button41 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button41.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button41, wx.GBPosition( 4, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button42 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button42.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button42, wx.GBPosition( 4, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button43 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button43.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button43, wx.GBPosition( 4, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button44 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button44.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button44, wx.GBPosition( 4, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.SetSizer( gbSizer1 )
self.Layout()
self.m_menubar1 = wx.MenuBar( 0 )
self.m_menu1 = wx.Menu()
self.m_menuItem3 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"Open", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem3 )
self.m_menuItem1 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"Save", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem1 )
self.m_menuItem2 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"quit", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem2 )
self.m_menubar1.Append( self.m_menu1, u"File" )
self.m_menu2 = wx.Menu()
self.m_menuItem4 = wx.MenuItem( self.m_menu2, wx.ID_ANY, u"python", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu2.Append( self.m_menuItem4 )
self.m_menubar1.Append( self.m_menu2, u"export" )
self.SetMenuBar( self.m_menubar1 )
self.Centre( wx.BOTH )
# Connect Events
self.m_button00.Bind( wx.EVT_BUTTON, self.onButton00Click )
self.m_button01.Bind( wx.EVT_BUTTON, self.onButton01Click )
self.m_button02.Bind( wx.EVT_BUTTON, self.onButton02Click )
self.m_button03.Bind( wx.EVT_BUTTON, self.onButton03Click )
self.m_button04.Bind( wx.EVT_BUTTON, self.onButton04Click )
self.m_button10.Bind( wx.EVT_BUTTON, self.onButton10Click )
self.m_button11.Bind( wx.EVT_BUTTON, self.onButton11Click )
self.m_button12.Bind( wx.EVT_BUTTON, self.onButton12Click )
self.m_button13.Bind( wx.EVT_BUTTON, self.onButton13Click )
self.m_button14.Bind( wx.EVT_BUTTON, self.onButton14Click )
self.m_button20.Bind( wx.EVT_BUTTON, self.onButton20Click )
self.m_button21.Bind( wx.EVT_BUTTON, self.onButton21Click )
self.m_button22.Bind( wx.EVT_BUTTON, self.onButton22Click )
self.m_button23.Bind( wx.EVT_BUTTON, self.onButton23Click )
self.m_button24.Bind( wx.EVT_BUTTON, self.onButton24Click )
self.m_button30.Bind( wx.EVT_BUTTON, self.onButton30Click )
self.m_button31.Bind( wx.EVT_BUTTON, self.onButton31Click )
self.m_button32.Bind( wx.EVT_BUTTON, self.onButton32Click )
self.m_button33.Bind( wx.EVT_BUTTON, self.onButton33Click )
self.m_button34.Bind( wx.EVT_BUTTON, self.onButton34Click )
self.m_button40.Bind( wx.EVT_BUTTON, self.onButton40Click )
self.m_button41.Bind( wx.EVT_BUTTON, self.onButton41Click )
self.m_button42.Bind( wx.EVT_BUTTON, self.onButton42Click )
self.m_button43.Bind( wx.EVT_BUTTON, self.onButton43Click )
self.m_button44.Bind( wx.EVT_BUTTON, self.onButton44Click )
self.Bind( wx.EVT_MENU, self.OnMenuOpenSelect, id = self.m_menuItem3.GetId() )
self.Bind( wx.EVT_MENU, self.OnMenuSaveSelect, id = self.m_menuItem1.GetId() )
self.Bind( wx.EVT_MENU, self.OnMenuQuitSelect, id = self.m_menuItem2.GetId() )
self.Bind( wx.EVT_MENU, self.OnExportPythonSelect, id = self.m_menuItem4.GetId() )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def onButton00Click( self, event ):
event.Skip()
def onButton01Click( self, event ):
event.Skip()
def onButton02Click( self, event ):
event.Skip()
def onButton03Click( self, event ):
event.Skip()
def onButton04Click( self, event ):
event.Skip()
def onButton10Click( self, event ):
event.Skip()
def onButton11Click( self, event ):
event.Skip()
def onButton12Click( self, event ):
event.Skip()
def onButton13Click( self, event ):
event.Skip()
def onButton14Click( self, event ):
event.Skip()
def onButton20Click( self, event ):
event.Skip()
def onButton21Click( self, event ):
event.Skip()
def onButton22Click( self, event ):
event.Skip()
def onButton23Click( self, event ):
event.Skip()
def onButton24Click( self, event ):
event.Skip()
def onButton30Click( self, event ):
event.Skip()
def onButton31Click( self, event ):
event.Skip()
def onButton32Click( self, event ):
event.Skip()
def onButton33Click( self, event ):
event.Skip()
def onButton34Click( self, event ):
event.Skip()
def onButton40Click( self, event ):
event.Skip()
def onButton41Click( self, event ):
event.Skip()
def onButton42Click( self, event ):
event.Skip()
def onButton43Click( self, event ):
event.Skip()
def onButton44Click( self, event ):
event.Skip()
def OnMenuOpenSelect( self, event ):
event.Skip()
def OnMenuSaveSelect( self, event ):
event.Skip()
def OnMenuQuitSelect( self, event ):
event.Skip()
def OnExportPythonSelect( self, event ):
event.Skip()
| [((859, 880), 'wx.GridBagSizer', 'wx.GridBagSizer', (['(0)', '(0)'], {}), '(0, 0)\n', (874, 880), False, 'import wx\n'), ((8427, 8440), 'wx.MenuBar', 'wx.MenuBar', (['(0)'], {}), '(0)\n', (8437, 8440), False, 'import wx\n'), ((8466, 8475), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (8473, 8475), False, 'import wx\n'), ((8503, 8580), 'wx.MenuItem', 'wx.MenuItem', (['self.m_menu1', 'wx.ID_ANY', 'u"""Open"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.m_menu1, wx.ID_ANY, u'Open', wx.EmptyString, wx.ITEM_NORMAL)\n", (8514, 8580), False, 'import wx\n'), ((8667, 8744), 'wx.MenuItem', 'wx.MenuItem', (['self.m_menu1', 'wx.ID_ANY', 'u"""Save"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.m_menu1, wx.ID_ANY, u'Save', wx.EmptyString, wx.ITEM_NORMAL)\n", (8678, 8744), False, 'import wx\n'), ((8831, 8908), 'wx.MenuItem', 'wx.MenuItem', (['self.m_menu1', 'wx.ID_ANY', 'u"""quit"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.m_menu1, wx.ID_ANY, u'quit', wx.EmptyString, wx.ITEM_NORMAL)\n", (8842, 8908), False, 'import wx\n'), ((9057, 9066), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (9064, 9066), False, 'import wx\n'), ((9094, 9173), 'wx.MenuItem', 'wx.MenuItem', (['self.m_menu2', 'wx.ID_ANY', 'u"""python"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.m_menu2, wx.ID_ANY, u'python', wx.EmptyString, wx.ITEM_NORMAL)\n", (9105, 9173), False, 'import wx\n'), ((1101, 1116), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (1108, 1116), False, 'import wx\n'), ((1168, 1188), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (1177, 1188), False, 'import wx\n'), ((1241, 1260), 'wx.GBPosition', 'wx.GBPosition', (['(0)', '(0)'], {}), '(0, 0)\n', (1254, 1260), False, 'import wx\n'), ((1264, 1279), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (1273, 1279), False, 'import wx\n'), ((1394, 1409), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (1401, 1409), False, 'import wx\n'), ((1461, 1481), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (1470, 1481), False, 'import wx\n'), ((1534, 1553), 'wx.GBPosition', 'wx.GBPosition', (['(0)', '(1)'], {}), '(0, 1)\n', (1547, 1553), False, 'import wx\n'), ((1557, 1572), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (1566, 1572), False, 'import wx\n'), ((1687, 1702), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (1694, 1702), False, 'import wx\n'), ((1754, 1774), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (1763, 1774), False, 'import wx\n'), ((1827, 1846), 'wx.GBPosition', 'wx.GBPosition', (['(0)', '(2)'], {}), '(0, 2)\n', (1840, 1846), False, 'import wx\n'), ((1850, 1865), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (1859, 1865), False, 'import wx\n'), ((1980, 1995), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (1987, 1995), False, 'import wx\n'), ((2047, 2067), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (2056, 2067), False, 'import wx\n'), ((2120, 2139), 'wx.GBPosition', 'wx.GBPosition', (['(0)', '(3)'], {}), '(0, 3)\n', (2133, 2139), False, 'import wx\n'), ((2143, 2158), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (2152, 2158), False, 'import wx\n'), ((2273, 2288), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (2280, 2288), False, 'import wx\n'), ((2340, 2360), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (2349, 2360), False, 'import wx\n'), ((2413, 2432), 'wx.GBPosition', 'wx.GBPosition', (['(0)', '(4)'], {}), '(0, 4)\n', (2426, 2432), False, 'import wx\n'), ((2436, 2451), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (2445, 2451), False, 'import wx\n'), ((2566, 2581), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (2573, 2581), False, 'import wx\n'), ((2633, 2653), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (2642, 2653), False, 'import wx\n'), ((2706, 2725), 'wx.GBPosition', 'wx.GBPosition', (['(1)', '(0)'], {}), '(1, 0)\n', (2719, 2725), False, 'import wx\n'), ((2729, 2744), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (2738, 2744), False, 'import wx\n'), ((2859, 2874), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (2866, 2874), False, 'import wx\n'), ((2926, 2946), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (2935, 2946), False, 'import wx\n'), ((2999, 3018), 'wx.GBPosition', 'wx.GBPosition', (['(1)', '(1)'], {}), '(1, 1)\n', (3012, 3018), False, 'import wx\n'), ((3022, 3037), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (3031, 3037), False, 'import wx\n'), ((3152, 3167), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (3159, 3167), False, 'import wx\n'), ((3219, 3239), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (3228, 3239), False, 'import wx\n'), ((3292, 3311), 'wx.GBPosition', 'wx.GBPosition', (['(1)', '(2)'], {}), '(1, 2)\n', (3305, 3311), False, 'import wx\n'), ((3315, 3330), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (3324, 3330), False, 'import wx\n'), ((3445, 3460), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (3452, 3460), False, 'import wx\n'), ((3512, 3532), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (3521, 3532), False, 'import wx\n'), ((3585, 3604), 'wx.GBPosition', 'wx.GBPosition', (['(1)', '(3)'], {}), '(1, 3)\n', (3598, 3604), False, 'import wx\n'), ((3608, 3623), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (3617, 3623), False, 'import wx\n'), ((3738, 3753), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (3745, 3753), False, 'import wx\n'), ((3805, 3825), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (3814, 3825), False, 'import wx\n'), ((3878, 3897), 'wx.GBPosition', 'wx.GBPosition', (['(1)', '(4)'], {}), '(1, 4)\n', (3891, 3897), False, 'import wx\n'), ((3901, 3916), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (3910, 3916), False, 'import wx\n'), ((4031, 4046), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (4038, 4046), False, 'import wx\n'), ((4098, 4118), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (4107, 4118), False, 'import wx\n'), ((4171, 4190), 'wx.GBPosition', 'wx.GBPosition', (['(2)', '(0)'], {}), '(2, 0)\n', (4184, 4190), False, 'import wx\n'), ((4194, 4209), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (4203, 4209), False, 'import wx\n'), ((4324, 4339), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (4331, 4339), False, 'import wx\n'), ((4391, 4411), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (4400, 4411), False, 'import wx\n'), ((4464, 4483), 'wx.GBPosition', 'wx.GBPosition', (['(2)', '(1)'], {}), '(2, 1)\n', (4477, 4483), False, 'import wx\n'), ((4487, 4502), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (4496, 4502), False, 'import wx\n'), ((4617, 4632), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (4624, 4632), False, 'import wx\n'), ((4684, 4704), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (4693, 4704), False, 'import wx\n'), ((4757, 4776), 'wx.GBPosition', 'wx.GBPosition', (['(2)', '(2)'], {}), '(2, 2)\n', (4770, 4776), False, 'import wx\n'), ((4780, 4795), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (4789, 4795), False, 'import wx\n'), ((4910, 4925), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (4917, 4925), False, 'import wx\n'), ((4977, 4997), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (4986, 4997), False, 'import wx\n'), ((5050, 5069), 'wx.GBPosition', 'wx.GBPosition', (['(2)', '(3)'], {}), '(2, 3)\n', (5063, 5069), False, 'import wx\n'), ((5073, 5088), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (5082, 5088), False, 'import wx\n'), ((5203, 5218), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (5210, 5218), False, 'import wx\n'), ((5270, 5290), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (5279, 5290), False, 'import wx\n'), ((5343, 5362), 'wx.GBPosition', 'wx.GBPosition', (['(2)', '(4)'], {}), '(2, 4)\n', (5356, 5362), False, 'import wx\n'), ((5366, 5381), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (5375, 5381), False, 'import wx\n'), ((5496, 5511), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (5503, 5511), False, 'import wx\n'), ((5563, 5583), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (5572, 5583), False, 'import wx\n'), ((5636, 5655), 'wx.GBPosition', 'wx.GBPosition', (['(3)', '(0)'], {}), '(3, 0)\n', (5649, 5655), False, 'import wx\n'), ((5659, 5674), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (5668, 5674), False, 'import wx\n'), ((5789, 5804), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (5796, 5804), False, 'import wx\n'), ((5856, 5876), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (5865, 5876), False, 'import wx\n'), ((5929, 5948), 'wx.GBPosition', 'wx.GBPosition', (['(3)', '(1)'], {}), '(3, 1)\n', (5942, 5948), False, 'import wx\n'), ((5952, 5967), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (5961, 5967), False, 'import wx\n'), ((6082, 6097), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (6089, 6097), False, 'import wx\n'), ((6149, 6169), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (6158, 6169), False, 'import wx\n'), ((6222, 6241), 'wx.GBPosition', 'wx.GBPosition', (['(3)', '(2)'], {}), '(3, 2)\n', (6235, 6241), False, 'import wx\n'), ((6245, 6260), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (6254, 6260), False, 'import wx\n'), ((6375, 6390), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (6382, 6390), False, 'import wx\n'), ((6442, 6462), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (6451, 6462), False, 'import wx\n'), ((6515, 6534), 'wx.GBPosition', 'wx.GBPosition', (['(3)', '(3)'], {}), '(3, 3)\n', (6528, 6534), False, 'import wx\n'), ((6538, 6553), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (6547, 6553), False, 'import wx\n'), ((6668, 6683), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (6675, 6683), False, 'import wx\n'), ((6735, 6755), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (6744, 6755), False, 'import wx\n'), ((6808, 6827), 'wx.GBPosition', 'wx.GBPosition', (['(3)', '(4)'], {}), '(3, 4)\n', (6821, 6827), False, 'import wx\n'), ((6831, 6846), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (6840, 6846), False, 'import wx\n'), ((6961, 6976), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (6968, 6976), False, 'import wx\n'), ((7028, 7048), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (7037, 7048), False, 'import wx\n'), ((7101, 7120), 'wx.GBPosition', 'wx.GBPosition', (['(4)', '(0)'], {}), '(4, 0)\n', (7114, 7120), False, 'import wx\n'), ((7124, 7139), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (7133, 7139), False, 'import wx\n'), ((7254, 7269), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (7261, 7269), False, 'import wx\n'), ((7321, 7341), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (7330, 7341), False, 'import wx\n'), ((7394, 7413), 'wx.GBPosition', 'wx.GBPosition', (['(4)', '(1)'], {}), '(4, 1)\n', (7407, 7413), False, 'import wx\n'), ((7417, 7432), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (7426, 7432), False, 'import wx\n'), ((7547, 7562), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (7554, 7562), False, 'import wx\n'), ((7614, 7634), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (7623, 7634), False, 'import wx\n'), ((7687, 7706), 'wx.GBPosition', 'wx.GBPosition', (['(4)', '(2)'], {}), '(4, 2)\n', (7700, 7706), False, 'import wx\n'), ((7710, 7725), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (7719, 7725), False, 'import wx\n'), ((7840, 7855), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (7847, 7855), False, 'import wx\n'), ((7907, 7927), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (7916, 7927), False, 'import wx\n'), ((7980, 7999), 'wx.GBPosition', 'wx.GBPosition', (['(4)', '(3)'], {}), '(4, 3)\n', (7993, 7999), False, 'import wx\n'), ((8003, 8018), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (8012, 8018), False, 'import wx\n'), ((8133, 8148), 'wx.Size', 'wx.Size', (['(50)', '(50)'], {}), '(50, 50)\n', (8140, 8148), False, 'import wx\n'), ((8200, 8220), 'wx.Colour', 'wx.Colour', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (8209, 8220), False, 'import wx\n'), ((8273, 8292), 'wx.GBPosition', 'wx.GBPosition', (['(4)', '(4)'], {}), '(4, 4)\n', (8286, 8292), False, 'import wx\n'), ((8296, 8311), 'wx.GBSpan', 'wx.GBSpan', (['(1)', '(1)'], {}), '(1, 1)\n', (8305, 8311), False, 'import wx\n'), ((668, 682), 'wx.Point', 'wx.Point', (['(0)', '(0)'], {}), '(0, 0)\n', (676, 682), False, 'import wx\n'), ((692, 709), 'wx.Size', 'wx.Size', (['(767)', '(507)'], {}), '(767, 507)\n', (699, 709), False, 'import wx\n')] |
nicoddemus/dependencies | src/dependencies/contrib/celery.py | 74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9 | """
dependencies.contrib.celery
---------------------------
This module implements injectable Celery task.
:copyright: (c) 2016-2020 by dry-python team.
:license: BSD, see LICENSE for more details.
"""
from _dependencies.contrib.celery import shared_task
from _dependencies.contrib.celery import task
__all__ = ["shared_task", "task"]
| [] |
gaxu/keras-yolo3 | yolo3/utils.py | 7f6be0fb9a8583401246bfe65d2df2ee40777d72 | """Miscellaneous utility functions."""
from functools import reduce
from PIL import Image
import numpy as np
from matplotlib.colors import rgb_to_hsv, hsv_to_rgb
def compose(*funcs):
"""Compose arbitrarily many functions, evaluated left to right.
Reference: https://mathieularose.com/function-composition-in-python/
"""
# return lambda x: reduce(lambda v, f: f(v), funcs, x)
if funcs:
return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs)
else:
raise ValueError('Composition of empty sequence not supported.')
def letterbox_image(image, size):
'''resize image with unchanged aspect ratio using padding'''
iw, ih = image.size
w, h = size
scale = min(w/iw, h/ih)
nw = int(iw*scale)
nh = int(ih*scale)
image = image.resize((nw,nh), Image.BICUBIC)
new_image = Image.new('RGB', size, (128,128,128))
new_image.paste(image, ((w-nw)//2, (h-nh)//2))
return new_image
def rand(a=0, b=1):
return np.random.rand()*(b-a) + a
def get_random_data(annotation_line, input_shape, random=True, max_boxes=20, jitter=.3, hue=.1, sat=1.5, val=1.5, proc_img=True):
'''random preprocessing for real-time data augmentation'''
line = annotation_line.split()
image = Image.open(line[0])
iw, ih = image.size
h, w = input_shape
box = np.array([np.array(list(map(int,box.split(',')))) for box in line[1:]])
if not random:
# resize image
scale = min(w/iw, h/ih)
nw = int(iw*scale)
nh = int(ih*scale)
dx = (w-nw)//2
dy = (h-nh)//2
image_data=0
if proc_img:
image = image.resize((nw,nh), Image.BICUBIC)
new_image = Image.new('RGB', (w,h), (128,128,128))
new_image.paste(image, (dx, dy))
image_data = np.array(new_image)/255.
# correct boxes
box_data = np.zeros((max_boxes,5))
if len(box)>0:
np.random.shuffle(box)
if len(box)>max_boxes: box = box[:max_boxes]
box[:, [0,2]] = box[:, [0,2]]*scale + dx
box[:, [1,3]] = box[:, [1,3]]*scale + dy
box_data[:len(box)] = box
return image_data, box_data
# resize image
new_ar = w/h * rand(1-jitter,1+jitter)/rand(1-jitter,1+jitter)
scale = rand(.25, 2)
if new_ar < 1:
nh = int(scale*h)
nw = int(nh*new_ar)
else:
nw = int(scale*w)
nh = int(nw/new_ar)
image = image.resize((nw,nh), Image.BICUBIC)
# place image
dx = int(rand(0, w-nw))
dy = int(rand(0, h-nh))
new_image = Image.new('RGB', (w,h), (128,128,128))
new_image.paste(image, (dx, dy))
image = new_image
# flip image or not
flip = rand()<.5
if flip: image = image.transpose(Image.FLIP_LEFT_RIGHT)
# distort image
hue = rand(-hue, hue)
sat = rand(1, sat) if rand()<.5 else 1/rand(1, sat)
val = rand(1, val) if rand()<.5 else 1/rand(1, val)
x = rgb_to_hsv(np.array(image)/255.)
x[..., 0] += hue
x[..., 0][x[..., 0]>1] -= 1
x[..., 0][x[..., 0]<0] += 1
x[..., 1] *= sat
x[..., 2] *= val
x[x>1] = 1
x[x<0] = 0
image_data = hsv_to_rgb(x) # numpy array, 0 to 1
# correct boxes
box_data = np.zeros((max_boxes,5))
if len(box)>0:
np.random.shuffle(box)
box[:, [0,2]] = box[:, [0,2]]*nw/iw + dx
box[:, [1,3]] = box[:, [1,3]]*nh/ih + dy
if flip: box[:, [0,2]] = w - box[:, [2,0]]
box[:, 0:2][box[:, 0:2]<0] = 0
box[:, 2][box[:, 2]>w] = w
box[:, 3][box[:, 3]>h] = h
box_w = box[:, 2] - box[:, 0]
box_h = box[:, 3] - box[:, 1]
box = box[np.logical_and(box_w>1, box_h>1)] # discard invalid box
if len(box)>max_boxes: box = box[:max_boxes]
box_data[:len(box)] = box
return image_data, box_data
def get_random_data2(annotation_line, input_shape, random=True, max_boxes=20, jitter=.3, hue=.1, sat=1.5, val=1.5, proc_img=True):
'''random preprocessing for real-time data augmentation'''
line = annotation_line.split()
image = Image.open(line[0])
w, h = image.size #13 14
dx, dy = input_shape
box = np.array([np.array(list(map(int,box.split(',')))) for box in line[1:]])
x_min = w
x_max = 0
y_min = h
y_max = 0
for bbox in box:
x_min = min(x_min, bbox[0])
y_min = min(y_min, bbox[1])
x_max = max(x_max, bbox[2])
y_max = max(y_max, bbox[3])
name = bbox[4]
# 包含所有目标框的最小框到各个边的距离
d_to_left = x_min
d_to_right = w - x_max
d_to_top = y_min
d_to_bottom = h - y_max
# 随机扩展这个最小范围
crop_x_min = int(x_min - rand(0, d_to_left))
crop_y_min = int(y_min - rand(0, d_to_top))
crop_x_max = int(x_max + rand(0, d_to_right))
crop_y_max = int(y_max + rand(0, d_to_bottom))
# 确保不出界
crop_x_min = max(0, crop_x_min)
crop_y_min = max(0, crop_y_min)
crop_x_max = min(w, crop_x_max)
crop_y_max = min(h, crop_y_max)
cropped = image.crop((crop_x_min, crop_y_min, crop_x_max, crop_y_max)) # (left, upper, right, lower)
new_image = Image.new('RGB', (w,h), (128,128,128))
new_image.paste(cropped, (dx, dy))
image_data = np.array(new_image)/255.
box_data = np.zeros((max_boxes,5))
if len(box)>0:
np.random.shuffle(box)
if len(box)>max_boxes: box = box[:max_boxes]
box[:,0] = box[:,0]-crop_y_min
box[:,1] = box[:,1]-crop_y_min
box[:,2] = box[:,2]-crop_x_min
box[:,3] = box[:,3]-crop_y_min
box_data[:len(box)] = box
return image_data, box_data
def get_random_data2(annotation_line, input_shape, max_boxes=20, jitter=.3, hue=.1, sat=1.5, val=1.5, proc_img=True):
line = annotation_line.split()
img = cv2.imread(line[0])
h_img, w_img, _ = img.shape
w, h = input_shape
box = np.array([np.array(list(map(int,box.split(',')))) for box in line[1:]])
max_bbox = np.concatenate([np.min(box[:, 0:2], axis=0), np.max(box[:, 2:4], axis=0)], axis=-1)# 取得所有bbox中的最大bbox
#包含所有目標框的最大框到各個邊的距離
max_l_trans = max_bbox[0]
max_u_trans = max_bbox[1]
max_r_trans = w_img - max_bbox[2]
max_d_trans = h_img - max_bbox[3]
#隨機擴展框最大範圍
crop_xmin = max(0, int(max_bbox[0] - random.uniform(0, max_l_trans)*2))
crop_ymin = max(0, int(max_bbox[1] - random.uniform(0, max_u_trans)*2))
crop_xmax = max(w_img, int(max_bbox[2] + random.uniform(0, max_r_trans)*2))
crop_ymax = max(h_img, int(max_bbox[3] + random.uniform(0, max_d_trans)*2))
img = img[crop_ymin : crop_ymax, crop_xmin : crop_xmax] #進行裁剪
image = Image.fromarray(cv2.cvtColor(img,cv2.COLOR_BGR2RGB)) #因為目前圖片格式是cv2,因此要轉換為PIL格式做貼上的語法
new_image = Image.new('RGB', (w,h), (128,128,128)) #產出一個(416,416)的灰色圖片
new_image.paste(image, (0, 0)) #將轉為PIL格式的圖片 貼到灰色圖片中
img2 = cv2.cvtColor(np.asarray(new_image),cv2.COLOR_RGB2BGR) #再將格式轉回cv2
box_data = np.zeros((max_boxes,5)) #box最多有max_boxes個,即shap->(20,5)
#將剪裁後位移的框與原始框進行相減,避免變換之後的值過大或過小,並去除異常的box
if len(box)>0:
np.random.shuffle(box)
if len(box)>max_boxes: box = box[:max_boxes]
box[:, [0, 2]] = box[:, [0, 2]] - crop_xmin
box[:, [1, 3]] = box[:, [1, 3]] - crop_ymin
box[:, 2][box[:, 2]>w] = w
box[:, 3][box[:, 3]>h] = h
box_w = box[:, 2] - box[:, 0]
box_h = box[:, 3] - box[:, 1]
box = box[np.logical_and(box_w>1, box_h>1)] # discard invalid box
if len(box)>max_boxes: box = box[:max_boxes]
box_data[:len(box)] = box
#標框線
# light_blue = (255,200,100)
# for boxs in box:
# cv2.rectangle(img2,(boxs[0],boxs[1]),(boxs[2],boxs[3]),light_blue,2)
# writename=os.path.basename(line[0]) #取檔名
# cv2.imshow('My Image', img2)
# cv2.waitKey(0)
return img2, box_data
| [((847, 886), 'PIL.Image.new', 'Image.new', (['"""RGB"""', 'size', '(128, 128, 128)'], {}), "('RGB', size, (128, 128, 128))\n", (856, 886), False, 'from PIL import Image\n'), ((1257, 1276), 'PIL.Image.open', 'Image.open', (['line[0]'], {}), '(line[0])\n', (1267, 1276), False, 'from PIL import Image\n'), ((2591, 2632), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(w, h)', '(128, 128, 128)'], {}), "('RGB', (w, h), (128, 128, 128))\n", (2600, 2632), False, 'from PIL import Image\n'), ((3169, 3182), 'matplotlib.colors.hsv_to_rgb', 'hsv_to_rgb', (['x'], {}), '(x)\n', (3179, 3182), False, 'from matplotlib.colors import rgb_to_hsv, hsv_to_rgb\n'), ((3241, 3265), 'numpy.zeros', 'np.zeros', (['(max_boxes, 5)'], {}), '((max_boxes, 5))\n', (3249, 3265), True, 'import numpy as np\n'), ((4090, 4109), 'PIL.Image.open', 'Image.open', (['line[0]'], {}), '(line[0])\n', (4100, 4109), False, 'from PIL import Image\n'), ((5109, 5150), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(w, h)', '(128, 128, 128)'], {}), "('RGB', (w, h), (128, 128, 128))\n", (5118, 5150), False, 'from PIL import Image\n'), ((5245, 5269), 'numpy.zeros', 'np.zeros', (['(max_boxes, 5)'], {}), '((max_boxes, 5))\n', (5253, 5269), True, 'import numpy as np\n'), ((6714, 6755), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(w, h)', '(128, 128, 128)'], {}), "('RGB', (w, h), (128, 128, 128))\n", (6723, 6755), False, 'from PIL import Image\n'), ((6921, 6945), 'numpy.zeros', 'np.zeros', (['(max_boxes, 5)'], {}), '((max_boxes, 5))\n', (6929, 6945), True, 'import numpy as np\n'), ((1882, 1906), 'numpy.zeros', 'np.zeros', (['(max_boxes, 5)'], {}), '((max_boxes, 5))\n', (1890, 1906), True, 'import numpy as np\n'), ((3292, 3314), 'numpy.random.shuffle', 'np.random.shuffle', (['box'], {}), '(box)\n', (3309, 3314), True, 'import numpy as np\n'), ((5204, 5223), 'numpy.array', 'np.array', (['new_image'], {}), '(new_image)\n', (5212, 5223), True, 'import numpy as np\n'), ((5296, 5318), 'numpy.random.shuffle', 'np.random.shuffle', (['box'], {}), '(box)\n', (5313, 5318), True, 'import numpy as np\n'), ((6853, 6874), 'numpy.asarray', 'np.asarray', (['new_image'], {}), '(new_image)\n', (6863, 6874), True, 'import numpy as np\n'), ((7052, 7074), 'numpy.random.shuffle', 'np.random.shuffle', (['box'], {}), '(box)\n', (7069, 7074), True, 'import numpy as np\n'), ((989, 1005), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (1003, 1005), True, 'import numpy as np\n'), ((1704, 1745), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(w, h)', '(128, 128, 128)'], {}), "('RGB', (w, h), (128, 128, 128))\n", (1713, 1745), False, 'from PIL import Image\n'), ((1941, 1963), 'numpy.random.shuffle', 'np.random.shuffle', (['box'], {}), '(box)\n', (1958, 1963), True, 'import numpy as np\n'), ((2973, 2988), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (2981, 2988), True, 'import numpy as np\n'), ((5954, 5981), 'numpy.min', 'np.min', (['box[:, 0:2]'], {'axis': '(0)'}), '(box[:, 0:2], axis=0)\n', (5960, 5981), True, 'import numpy as np\n'), ((5983, 6010), 'numpy.max', 'np.max', (['box[:, 2:4]'], {'axis': '(0)'}), '(box[:, 2:4], axis=0)\n', (5989, 6010), True, 'import numpy as np\n'), ((1813, 1832), 'numpy.array', 'np.array', (['new_image'], {}), '(new_image)\n', (1821, 1832), True, 'import numpy as np\n'), ((3667, 3703), 'numpy.logical_and', 'np.logical_and', (['(box_w > 1)', '(box_h > 1)'], {}), '(box_w > 1, box_h > 1)\n', (3681, 3703), True, 'import numpy as np\n'), ((7397, 7433), 'numpy.logical_and', 'np.logical_and', (['(box_w > 1)', '(box_h > 1)'], {}), '(box_w > 1, box_h > 1)\n', (7411, 7433), True, 'import numpy as np\n')] |
MardanovTimur/kaggle | text_classification/config.py | 62392863a07fcc5de9821c28cf9c6dbbf39ced59 | import logging
import pathlib
logging.basicConfig(level=logging.INFO)
# Dirs
ROOT_DIR = pathlib.Path(__file__).parent.absolute()
DUMP_DIR = ROOT_DIR / 'dumps'
| [((31, 70), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (50, 70), False, 'import logging\n'), ((90, 112), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (102, 112), False, 'import pathlib\n')] |
JosephDErwin/sportsreference | sportsreference/ncaaf/rankings.py | f026366bec91fdf4bebef48e3a4bfd7c5bfab4bd | import re
from pyquery import PyQuery as pq
from .. import utils
from .constants import RANKINGS_SCHEME, RANKINGS_URL
from six.moves.urllib.error import HTTPError
class Rankings:
"""
Get all Associated Press (AP) rankings on a week-by-week basis.
Grab a list of the rankings published by the Associated Press to easily
query the hierarchy of teams each week. The results expose the current and
previous rankings as well as the movement for each team in the list.
Parameters
----------
year : string (optional)
A string of the requested year to pull rankings from. Defaults to the
most recent season.
"""
def __init__(self, year=None):
self._rankings = {}
self._find_rankings(year)
def _pull_rankings_page(self, year):
"""
Download the rankings page.
Download the rankings page for the requested year and create a PyQuery
object.
Parameters
----------
year : string
A string of the requested year to pull rankings from.
Returns
-------
PyQuery object
Returns a PyQuery object of the rankings HTML page.
"""
try:
return pq(RANKINGS_URL % year)
except HTTPError:
return None
def _get_team(self, team):
"""
Retrieve team's name and abbreviation.
The team's name and abbreviation are embedded within the 'school_name'
tag and, in the case of the abbreviation, require special parsing as it
is located in the middle of a URI. The name and abbreviation are
returned for the requested school.
Parameters
----------
team : PyQuery object
A PyQuery object representing a single row in a table on the
rankings page.
Returns
-------
tuple (string, string)
Returns a tuple of two strings where the first string is the team's
abbreviation, such as 'PURDUE' and the second string is the team's
name, such as 'Purdue'.
"""
name_tag = team('td[data-stat="school_name"]')
abbreviation = re.sub(r'.*/cfb/schools/', '', str(name_tag('a')))
abbreviation = re.sub(r'/.*', '', abbreviation)
name = team('td[data-stat="school_name"] a').text()
return abbreviation, name
def _find_rankings(self, year):
"""
Retrieve the rankings for each week.
Find and retrieve all AP rankings for the requested year and combine
them on a per-week basis. Each week contains information about the
name, abbreviation, rank, movement, and previous rank for each team
as well as the date and week number the results were published on.
Parameters
----------
year : string
A string of the requested year to pull rankings from.
"""
if not year:
year = utils._find_year_for_season('ncaaf')
page = self._pull_rankings_page(year)
if not page:
output = ("Can't pull rankings page. Ensure the following URL "
"exists: %s" % RANKINGS_URL)
raise ValueError(output)
rankings = page('table#ap tbody tr').items()
weekly_rankings = []
week = 0
for team in rankings:
if 'class="thead"' in str(team):
self._rankings[int(week)] = weekly_rankings
weekly_rankings = []
continue
abbreviation, name = self._get_team(team)
rank = utils._parse_field(RANKINGS_SCHEME, team, 'rank')
week = utils._parse_field(RANKINGS_SCHEME, team, 'week')
date = utils._parse_field(RANKINGS_SCHEME, team, 'date')
previous = utils._parse_field(RANKINGS_SCHEME, team, 'previous')
change = utils._parse_field(RANKINGS_SCHEME, team, 'change')
if 'decrease' in str(team(RANKINGS_SCHEME['change'])):
change = int(change) * -1
elif 'increase' in str(team(RANKINGS_SCHEME['change'])):
try:
change = int(change)
except ValueError:
change = 0
else:
change = 0
rank_details = {
'abbreviation': abbreviation,
'name': name,
'rank': int(rank),
'week': int(week),
'date': date,
'previous': previous,
'change': change
}
weekly_rankings.append(rank_details)
# Add the final rankings which is not terminated with another header
# row and hence will not hit the first if statement in the loop above.
self._rankings[int(week)] = weekly_rankings
@property
def current_extended(self):
"""
Returns a ``list`` of ``dictionaries`` of the most recent AP rankings.
The list is ordered in terms of the ranking so the #1 team will be in
the first element and the #25 team will be the last element. Each
dictionary has the following structure::
{
'abbreviation': Team's abbreviation, such as 'PURDUE' (str),
'name': Team's full name, such as 'Purdue' (str),
'rank': Team's rank for the current week (int),
'week': Week number for the results, such as 19 (int),
'date': Date the rankings were released, such as '2017-03-01'.
Can also be 'Final' for the final rankings or
'Preseason' for preseason rankings (str),
'previous': The team's previous rank, if applicable (str),
'change': The amount the team moved up or down the rankings.
Moves up the ladder have a positive number while
drops yield a negative number and teams that didn't
move have 0 (int)
}
"""
latest_week = max(self._rankings.keys())
ordered_dict = sorted(self._rankings[latest_week],
key=lambda k: k['rank'])
return ordered_dict
@property
def current(self):
"""
Returns a ``dictionary`` of the most recent rankings from the
Associated Press where each key is a ``string`` of the team's
abbreviation and each value is an ``int`` of the team's rank for the
current week.
"""
rankings_dict = {}
for team in self.current_extended:
rankings_dict[team['abbreviation']] = team['rank']
return rankings_dict
@property
def complete(self):
"""
Returns a ``dictionary`` where each key is a week number as an ``int``
and each value is a ``list`` of ``dictionaries`` containing the AP
rankings for each week. Within each list is a dictionary of team
information such as name, abbreviation, rank, and more. Note that the
list might not necessarily be in the same order as the rankings.
The overall dictionary has the following structure::
{
week number, ie 16 (int): [
{
'abbreviation': Team's abbreviation, such as 'PURDUE'
(str),
'name': Team's full name, such as 'Purdue' (str),
'rank': Team's rank for the current week (int),
'week': Week number for the results, such as 16 (int),
'date': Date the rankings were released, such as
'2017-12-03'. Can also be 'Final' for the final
rankings or 'Preseason' for preseason rankings
(str),
'previous': The team's previous rank, if applicable
(str),
'change': The amount the team moved up or down the
rankings. Moves up the ladder have a positive
number while drops yield a negative number
and teams that didn't move have 0 (int)
},
...
],
...
}
"""
return self._rankings
| [((2268, 2299), 're.sub', 're.sub', (['"""/.*"""', '""""""', 'abbreviation'], {}), "('/.*', '', abbreviation)\n", (2274, 2299), False, 'import re\n'), ((1235, 1258), 'pyquery.PyQuery', 'pq', (['(RANKINGS_URL % year)'], {}), '(RANKINGS_URL % year)\n', (1237, 1258), True, 'from pyquery import PyQuery as pq\n')] |
davide97l/DI-engine | ding/hpc_rl/wrapper.py | d48c93bcd5c07c29f2ce4ac1b7756b8bc255c423 | import importlib
from ditk import logging
from collections import OrderedDict
from functools import wraps
import ding
'''
Overview:
`hpc_wrapper` is the wrapper for functions which are supported by hpc. If a function is wrapped by it, we will
search for its hpc type and return the function implemented by hpc.
We will use the following code as a sample to introduce `hpc_wrapper`:
```
@hpc_wrapper(shape_fn=shape_fn_dntd, namedtuple_data=True, include_args=[0,1,2,3],
include_kwargs=['data', 'gamma', 'v_min', 'v_max'], is_cls_method=False)
def dist_nstep_td_error(
data: namedtuple,
gamma: float,
v_min: float,
v_max: float,
n_atom: int,
nstep: int = 1,
) -> torch.Tensor:
...
```
Parameters:
- shape_fn (:obj:`function`): a function which return the shape needed by hpc function. In fact, it returns
all args that the hpc function needs.
- nametuple_data (:obj:`bool`): If True, when hpc function is called, it will be called as hpc_function(*nametuple).
If False, nametuple data will remain its `nametuple` type.
- include_args (:obj:`list`): a list of index of the args need to be set in hpc function. As shown in the sample,
include_args=[0,1,2,3], which means `data`, `gamma`, `v_min` and `v_max` will be set in hpc function.
- include_kwargs (:obj:`list`): a list of key of the kwargs need to be set in hpc function. As shown in the sample,
include_kwargs=['data', 'gamma', 'v_min', 'v_max'], which means `data`, `gamma`, `v_min` and `v_max` will be
set in hpc function.
- is_cls_method (:obj:`bool`): If True, it means the function we wrap is a method of a class. `self` will be put
into args. We will get rid of `self` in args. Besides, we will use its classname as its fn_name.
If False, it means the function is a simple method.
Q&A:
- Q: Is `include_args` and `include_kwargs` need to be set at the same time?
- A: Yes. `include_args` and `include_kwargs` can deal with all type of input, such as (data, gamma, v_min=v_min,
v_max=v_max) and (data, gamma, v_min, v_max).
- Q: What is `hpc_fns`?
- A: Here we show a normal `hpc_fns`:
```
hpc_fns = {
'fn_name1': {
'runtime_name1': hpc_fn1,
'runtime_name2': hpc_fn2,
...
},
...
}
```
Besides, `per_fn_limit` means the max length of `hpc_fns[fn_name]`. When new function comes, the oldest
function will be popped from `hpc_fns[fn_name]`.
'''
hpc_fns = {}
per_fn_limit = 3
def register_runtime_fn(fn_name, runtime_name, shape):
fn_name_mapping = {
'gae': ['hpc_rll.rl_utils.gae', 'GAE'],
'dist_nstep_td_error': ['hpc_rll.rl_utils.td', 'DistNStepTD'],
'LSTM': ['hpc_rll.torch_utils.network.rnn', 'LSTM'],
'ppo_error': ['hpc_rll.rl_utils.ppo', 'PPO'],
'q_nstep_td_error': ['hpc_rll.rl_utils.td', 'QNStepTD'],
'q_nstep_td_error_with_rescale': ['hpc_rll.rl_utils.td', 'QNStepTDRescale'],
'ScatterConnection': ['hpc_rll.torch_utils.network.scatter_connection', 'ScatterConnection'],
'td_lambda_error': ['hpc_rll.rl_utils.td', 'TDLambda'],
'upgo_loss': ['hpc_rll.rl_utils.upgo', 'UPGO'],
'vtrace_error': ['hpc_rll.rl_utils.vtrace', 'VTrace'],
}
fn_str = fn_name_mapping[fn_name]
cls = getattr(importlib.import_module(fn_str[0]), fn_str[1])
hpc_fn = cls(*shape).cuda()
if fn_name not in hpc_fns:
hpc_fns[fn_name] = OrderedDict()
hpc_fns[fn_name][runtime_name] = hpc_fn
while len(hpc_fns[fn_name]) > per_fn_limit:
hpc_fns[fn_name].popitem(last=False)
# print(hpc_fns)
return hpc_fn
def hpc_wrapper(shape_fn=None, namedtuple_data=False, include_args=[], include_kwargs=[], is_cls_method=False):
def decorate(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
if ding.enable_hpc_rl:
shape = shape_fn(args, kwargs)
if is_cls_method:
fn_name = args[0].__class__.__name__
else:
fn_name = fn.__name__
runtime_name = '_'.join([fn_name] + [str(s) for s in shape])
if fn_name not in hpc_fns or runtime_name not in hpc_fns[fn_name]:
hpc_fn = register_runtime_fn(fn_name, runtime_name, shape)
else:
hpc_fn = hpc_fns[fn_name][runtime_name]
if is_cls_method:
args = args[1:]
clean_args = []
for i in include_args:
if i < len(args):
clean_args.append(args[i])
nouse_args = list(set(list(range(len(args)))).difference(set(include_args)))
clean_kwargs = {}
for k, v in kwargs.items():
if k in include_kwargs:
if k == 'lambda_':
k = 'lambda'
clean_kwargs[k] = v
nouse_kwargs = list(set(kwargs.keys()).difference(set(include_kwargs)))
if len(nouse_args) > 0 or len(nouse_kwargs) > 0:
logging.warn(
'in {}, index {} of args are dropped, and keys {} of kwargs are dropped.'.format(
runtime_name, nouse_args, nouse_kwargs
)
)
if namedtuple_data:
data = args[0] # args[0] is a namedtuple
return hpc_fn(*data, *clean_args[1:], **clean_kwargs)
else:
return hpc_fn(*clean_args, **clean_kwargs)
else:
return fn(*args, **kwargs)
return wrapper
return decorate
| [((3513, 3547), 'importlib.import_module', 'importlib.import_module', (['fn_str[0]'], {}), '(fn_str[0])\n', (3536, 3547), False, 'import importlib\n'), ((3650, 3663), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3661, 3663), False, 'from collections import OrderedDict\n'), ((3987, 3996), 'functools.wraps', 'wraps', (['fn'], {}), '(fn)\n', (3992, 3996), False, 'from functools import wraps\n')] |
JLJTECH/TutorialTesting | CodeWars/2016/NumberOfOccurrences-7k.py | f2dbbd49a86b3b086d0fc156ac3369fb74727f86 | #Return the count of int(s) in passed array.
def number_of_occurrences(s, xs):
return xs.count(s) | [] |
islam-200555/Telethon | telethon_generator/parsers/tlobject.py | 85103bcf6de8024c902ede98f0b9bf0f7f47a0aa | import re
from zlib import crc32
from ..utils import snake_to_camel_case
CORE_TYPES = (
0xbc799737, # boolFalse#bc799737 = Bool;
0x997275b5, # boolTrue#997275b5 = Bool;
0x3fedd339, # true#3fedd339 = True;
0x1cb5c415, # vector#1cb5c415 {t:Type} # [ t ] = Vector t;
)
# https://github.com/telegramdesktop/tdesktop/blob/4bf66cb6e93f3965b40084771b595e93d0b11bcd/Telegram/SourceFiles/codegen/scheme/codegen_scheme.py#L57-L62
WHITELISTED_MISMATCHING_IDS = {
# 0 represents any layer
0: {'ipPortSecret', 'accessPointRule', 'help.configSimple'},
77: {'channel'},
78: {'channel'}
}
class TLObject:
def __init__(self, fullname, object_id, args, result, is_function, layer):
"""
Initializes a new TLObject, given its properties.
:param fullname: The fullname of the TL object (namespace.name)
The namespace can be omitted.
:param object_id: The hexadecimal string representing the object ID
:param args: The arguments, if any, of the TL object
:param result: The result type of the TL object
:param is_function: Is the object a function or a type?
:param layer: The layer this TLObject belongs to.
"""
# The name can or not have a namespace
self.fullname = fullname
if '.' in fullname:
self.namespace, self.name = fullname.split('.', maxsplit=1)
else:
self.namespace, self.name = None, fullname
self.args = args
self.result = result
self.is_function = is_function
self.id = None
if object_id is None:
self.id = self.infer_id()
else:
self.id = int(object_id, base=16)
whitelist = WHITELISTED_MISMATCHING_IDS[0] |\
WHITELISTED_MISMATCHING_IDS.get(layer, set())
if self.fullname not in whitelist:
assert self.id == self.infer_id(),\
'Invalid inferred ID for ' + repr(self)
self.class_name = snake_to_camel_case(
self.name, suffix='Request' if self.is_function else '')
self.real_args = list(a for a in self.sorted_args() if not
(a.flag_indicator or a.generic_definition))
def sorted_args(self):
"""Returns the arguments properly sorted and ready to plug-in
into a Python's method header (i.e., flags and those which
can be inferred will go last so they can default =None)
"""
return sorted(self.args,
key=lambda x: x.is_flag or x.can_be_inferred)
def __repr__(self, ignore_id=False):
if self.id is None or ignore_id:
hex_id = ''
else:
hex_id = '#{:08x}'.format(self.id)
if self.args:
args = ' ' + ' '.join([repr(arg) for arg in self.args])
else:
args = ''
return '{}{}{} = {}'.format(self.fullname, hex_id, args, self.result)
def infer_id(self):
representation = self.__repr__(ignore_id=True)
representation = representation\
.replace(':bytes ', ':string ')\
.replace('?bytes ', '?string ')\
.replace('<', ' ').replace('>', '')\
.replace('{', '').replace('}', '')
representation = re.sub(
r' \w+:flags\.\d+\?true',
r'',
representation
)
return crc32(representation.encode('ascii'))
class TLArg:
def __init__(self, name, arg_type, generic_definition):
"""
Initializes a new .tl argument
:param name: The name of the .tl argument
:param arg_type: The type of the .tl argument
:param generic_definition: Is the argument a generic definition?
(i.e. {X:Type})
"""
self.name = 'is_self' if name == 'self' else name
# Default values
self.is_vector = False
self.is_flag = False
self.skip_constructor_id = False
self.flag_index = -1
# Special case: some types can be inferred, which makes it
# less annoying to type. Currently the only type that can
# be inferred is if the name is 'random_id', to which a
# random ID will be assigned if left as None (the default)
self.can_be_inferred = name == 'random_id'
# The type can be an indicator that other arguments will be flags
if arg_type == '#':
self.flag_indicator = True
self.type = None
self.is_generic = False
else:
self.flag_indicator = False
self.is_generic = arg_type.startswith('!')
# Strip the exclamation mark always to have only the name
self.type = arg_type.lstrip('!')
# The type may be a flag (flags.IDX?REAL_TYPE)
# Note that 'flags' is NOT the flags name; this
# is determined by a previous argument
# However, we assume that the argument will always be called 'flags'
flag_match = re.match(r'flags.(\d+)\?([\w<>.]+)', self.type)
if flag_match:
self.is_flag = True
self.flag_index = int(flag_match.group(1))
# Update the type to match the exact type, not the "flagged" one
self.type = flag_match.group(2)
# Then check if the type is a Vector<REAL_TYPE>
vector_match = re.match(r'[Vv]ector<([\w\d.]+)>', self.type)
if vector_match:
self.is_vector = True
# If the type's first letter is not uppercase, then
# it is a constructor and we use (read/write) its ID
# as pinpointed on issue #81.
self.use_vector_id = self.type[0] == 'V'
# Update the type to match the one inside the vector
self.type = vector_match.group(1)
# See use_vector_id. An example of such case is ipPort in
# help.configSpecial
if self.type.split('.')[-1][0].islower():
self.skip_constructor_id = True
# The name may contain "date" in it, if this is the case and the type is "int",
# we can safely assume that this should be treated as a "date" object.
# Note that this is not a valid Telegram object, but it's easier to work with
if self.type == 'int' and (
re.search(r'(\b|_)date\b', name) or
name in ('expires', 'expires_at', 'was_online')):
self.type = 'date'
self.generic_definition = generic_definition
def type_hint(self):
type = self.type
if '.' in type:
type = type.split('.')[1]
result = {
'int': 'int',
'long': 'int',
'int128': 'int',
'int256': 'int',
'string': 'str',
'date': 'Optional[datetime]', # None date = 0 timestamp
'bytes': 'bytes',
'true': 'bool',
}.get(type, "Type{}".format(type))
if self.is_vector:
result = 'List[{}]'.format(result)
if self.is_flag and type != 'date':
result = 'Optional[{}]'.format(result)
return result
def __str__(self):
# Find the real type representation by updating it as required
real_type = self.type
if self.flag_indicator:
real_type = '#'
if self.is_vector:
if self.use_vector_id:
real_type = 'Vector<{}>'.format(real_type)
else:
real_type = 'vector<{}>'.format(real_type)
if self.is_generic:
real_type = '!{}'.format(real_type)
if self.is_flag:
real_type = 'flags.{}?{}'.format(self.flag_index, real_type)
if self.generic_definition:
return '{{{}:{}}}'.format(self.name, real_type)
else:
return '{}:{}'.format(self.name, real_type)
def __repr__(self):
return str(self).replace(':date', ':int').replace('?date', '?int')
def _from_line(line, is_function, layer):
match = re.match(
r'^([\w.]+)' # 'name'
r'(?:#([0-9a-fA-F]+))?' # '#optionalcode'
r'(?:\s{?\w+:[\w\d<>#.?!]+}?)*' # '{args:.0?type}'
r'\s=\s' # ' = '
r'([\w\d<>#.?]+);$', # '<result.type>;'
line
)
if match is None:
# Probably "vector#1cb5c415 {t:Type} # [ t ] = Vector t;"
raise ValueError('Cannot parse TLObject {}'.format(line))
args_match = re.findall(
r'({)?'
r'(\w+)'
r':'
r'([\w\d<>#.?!]+)'
r'}?',
line
)
return TLObject(
fullname=match.group(1),
object_id=match.group(2),
result=match.group(3),
is_function=is_function,
layer=layer,
args=[TLArg(name, arg_type, brace != '')
for brace, name, arg_type in args_match]
)
def parse_tl(file_path, layer, ignore_core=False):
"""This method yields TLObjects from a given .tl file."""
with open(file_path, encoding='utf-8') as file:
is_function = False
for line in file:
comment_index = line.find('//')
if comment_index != -1:
line = line[:comment_index]
line = line.strip()
if not line:
continue
match = re.match('---(\w+)---', line)
if match:
following_types = match.group(1)
is_function = following_types == 'functions'
continue
try:
result = _from_line(line, is_function, layer=layer)
if not ignore_core or result.id not in CORE_TYPES:
yield result
except ValueError as e:
if 'vector#1cb5c415' not in str(e):
raise
def find_layer(file_path):
"""Finds the layer used on the specified scheme.tl file."""
layer_regex = re.compile(r'^//\s*LAYER\s*(\d+)$')
with open(file_path, encoding='utf-8') as file:
for line in file:
match = layer_regex.match(line)
if match:
return int(match.group(1))
| [((8192, 8307), 're.match', 're.match', (['"""^([\\\\w.]+)(?:#([0-9a-fA-F]+))?(?:\\\\s{?\\\\w+:[\\\\w\\\\d<>#.?!]+}?)*\\\\s=\\\\s([\\\\w\\\\d<>#.?]+);$"""', 'line'], {}), "(\n '^([\\\\w.]+)(?:#([0-9a-fA-F]+))?(?:\\\\s{?\\\\w+:[\\\\w\\\\d<>#.?!]+}?)*\\\\s=\\\\s([\\\\w\\\\d<>#.?]+);$'\n , line)\n", (8200, 8307), False, 'import re\n'), ((8671, 8721), 're.findall', 're.findall', (['"""({)?(\\\\w+):([\\\\w\\\\d<>#.?!]+)}?"""', 'line'], {}), "('({)?(\\\\w+):([\\\\w\\\\d<>#.?!]+)}?', line)\n", (8681, 8721), False, 'import re\n'), ((10120, 10157), 're.compile', 're.compile', (['"""^//\\\\s*LAYER\\\\s*(\\\\d+)$"""'], {}), "('^//\\\\s*LAYER\\\\s*(\\\\d+)$')\n", (10130, 10157), False, 'import re\n'), ((3322, 3377), 're.sub', 're.sub', (['""" \\\\w+:flags\\\\.\\\\d+\\\\?true"""', '""""""', 'representation'], {}), "(' \\\\w+:flags\\\\.\\\\d+\\\\?true', '', representation)\n", (3328, 3377), False, 'import re\n'), ((5079, 5128), 're.match', 're.match', (['"""flags.(\\\\d+)\\\\?([\\\\w<>.]+)"""', 'self.type'], {}), "('flags.(\\\\d+)\\\\?([\\\\w<>.]+)', self.type)\n", (5087, 5128), False, 'import re\n'), ((5466, 5512), 're.match', 're.match', (['"""[Vv]ector<([\\\\w\\\\d.]+)>"""', 'self.type'], {}), "('[Vv]ector<([\\\\w\\\\d.]+)>', self.type)\n", (5474, 5512), False, 'import re\n'), ((9522, 9552), 're.match', 're.match', (['"""---(\\\\w+)---"""', 'line'], {}), "('---(\\\\w+)---', line)\n", (9530, 9552), False, 'import re\n'), ((6476, 6509), 're.search', 're.search', (['"""(\\\\b|_)date\\\\b"""', 'name'], {}), "('(\\\\b|_)date\\\\b', name)\n", (6485, 6509), False, 'import re\n')] |
purkhusid/rules_dotnet | dotnet/private/actions/resx_core.bzl | 934e62d65ed3657be20b2ae3a63e032a2de9ff84 | "Actions for compiling resx files"
load(
"@io_bazel_rules_dotnet//dotnet/private:providers.bzl",
"DotnetResourceInfo",
)
def _make_runner_arglist(dotnet, source, output, resgen):
args = dotnet.actions.args()
if type(source) == "Target":
args.add_all(source.files)
else:
args.add(source)
args.add(output)
return args
def emit_resx_core(
dotnet,
name = "",
src = None,
identifier = None,
out = None,
customresgen = None):
"""The function adds an action that compiles a single .resx file into .resources file.
Returns [DotnetResourceInfo](api.md#dotnetresourceinfo).
Args:
dotnet: [DotnetContextInfo](api.md#dotnetcontextinfo).
name: name of the file to generate.
src: The .resx source file that is transformed into .resources file. Only `.resx` files are permitted.
identifier: The logical name for the resource; the name that is used to load the resource. The default is the basename of the file name (no subfolder).
out: An alternative name of the output file (if name should not be used).
customresgen: custom resgen program to use.
Returns:
DotnetResourceInfo: [DotnetResourceInfo](api.md#dotnetresourceinfo).
"""
if name == "" and out == None:
fail("either name or out must be set")
if not out:
result = dotnet.actions.declare_file(name + ".resources")
else:
result = dotnet.actions.declare_file(out)
args = _make_runner_arglist(dotnet, src, result, customresgen.files_to_run.executable.path)
# We use the command to extrace shell path and force runfiles creation
resolve = dotnet._ctx.resolve_tools(tools = [customresgen])
inputs = src.files.to_list() if type(src) == "Target" else [src]
dotnet.actions.run(
inputs = inputs + resolve[0].to_list(),
tools = customresgen.default_runfiles.files,
outputs = [result],
executable = customresgen.files_to_run,
arguments = [args],
env = {"RUNFILES_MANIFEST_FILE": customresgen.files_to_run.runfiles_manifest.path},
mnemonic = "CoreResxCompile",
input_manifests = resolve[1],
progress_message = (
"Compiling resoources" + dotnet.label.package + ":" + dotnet.label.name
),
)
return DotnetResourceInfo(
name = name,
result = result,
identifier = identifier,
)
| [] |
xiaohanhuang/pytorch | test/jit/test_modules.py | a31aea8eaa99a5ff72b5d002c206cd68d5467a5e | # Owner(s): ["oncall: jit"]
import torch
import os
import sys
from torch.testing._internal.jit_utils import JitTestCase
# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
if __name__ == '__main__':
raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
"\tpython test/test_jit.py TESTNAME\n\n"
"instead.")
class TestModules(JitTestCase):
def test_script_module_with_constants_list(self):
"""
Test that a module that has __constants__ set to something
that is not a set can be scripted.
"""
# torch.nn.Linear has a __constants__ attribute defined
# and intialized to a list.
class Net(torch.nn.Linear):
x: torch.jit.Final[int]
def __init__(self):
super().__init__(5, 10)
self.x = 0
self.checkModule(Net(), (torch.randn(5),))
| [((246, 279), 'sys.path.append', 'sys.path.append', (['pytorch_test_dir'], {}), '(pytorch_test_dir)\n', (261, 279), False, 'import sys\n'), ((217, 243), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (233, 243), False, 'import os\n'), ((1017, 1031), 'torch.randn', 'torch.randn', (['(5)'], {}), '(5)\n', (1028, 1031), False, 'import torch\n')] |
Swayamshu/Pixelate_Sample_Arena | pixelate_task_1.py | d8e8b4614987f9302a19ec1e20a922618e67b943 | import gym
import pix_sample_arena
import time
import pybullet as p
import pybullet_data
import cv2
if __name__ == "__main__":
env = gym.make("pix_sample_arena-v0")
x = 0
while True:
p.stepSimulation()
time.sleep(100) | [((138, 169), 'gym.make', 'gym.make', (['"""pix_sample_arena-v0"""'], {}), "('pix_sample_arena-v0')\n", (146, 169), False, 'import gym\n'), ((227, 242), 'time.sleep', 'time.sleep', (['(100)'], {}), '(100)\n', (237, 242), False, 'import time\n'), ((204, 222), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (220, 222), True, 'import pybullet as p\n')] |
vgoehler/python-i3-battery-block | tests/test_timeparser.py | e47ce80b315d812d731df84f2a1c8e1155b2469a | from datetime import time
import pytest
from i3_battery_block_vgg.timeparser import __parse_time_manually
from i3_battery_block_vgg.timeparser import parse_time
@pytest.mark.parametrize(
"time_input, expected",
[
("12:13", time(hour=12, minute=13)),
("12:13:14", time(hour=12, minute=13, second=14)),
('00:54:00', time(hour=0, minute=54, second=0))
]
)
def test_manually_time_parsing(time_input: str, expected: time):
assert __parse_time_manually(time_input) == expected, "manual time parsing has gone wrong"
@pytest.mark.parametrize(
"time_input, expected",
[
("12:13", time(hour=12, minute=13)),
("12:13:14", time(hour=12, minute=13, second=14)),
('00:54:00', time(hour=0, minute=54, second=0))
]
)
def test_time_parsing(time_input: str, expected: time):
assert parse_time(time_input) == expected, "time parsing has gone wrong"
| [((469, 502), 'i3_battery_block_vgg.timeparser.__parse_time_manually', '__parse_time_manually', (['time_input'], {}), '(time_input)\n', (490, 502), False, 'from i3_battery_block_vgg.timeparser import __parse_time_manually\n'), ((850, 872), 'i3_battery_block_vgg.timeparser.parse_time', 'parse_time', (['time_input'], {}), '(time_input)\n', (860, 872), False, 'from i3_battery_block_vgg.timeparser import parse_time\n'), ((243, 267), 'datetime.time', 'time', ([], {'hour': '(12)', 'minute': '(13)'}), '(hour=12, minute=13)\n', (247, 267), False, 'from datetime import time\n'), ((291, 326), 'datetime.time', 'time', ([], {'hour': '(12)', 'minute': '(13)', 'second': '(14)'}), '(hour=12, minute=13, second=14)\n', (295, 326), False, 'from datetime import time\n'), ((350, 383), 'datetime.time', 'time', ([], {'hour': '(0)', 'minute': '(54)', 'second': '(0)'}), '(hour=0, minute=54, second=0)\n', (354, 383), False, 'from datetime import time\n'), ((633, 657), 'datetime.time', 'time', ([], {'hour': '(12)', 'minute': '(13)'}), '(hour=12, minute=13)\n', (637, 657), False, 'from datetime import time\n'), ((681, 716), 'datetime.time', 'time', ([], {'hour': '(12)', 'minute': '(13)', 'second': '(14)'}), '(hour=12, minute=13, second=14)\n', (685, 716), False, 'from datetime import time\n'), ((740, 773), 'datetime.time', 'time', ([], {'hour': '(0)', 'minute': '(54)', 'second': '(0)'}), '(hour=0, minute=54, second=0)\n', (744, 773), False, 'from datetime import time\n')] |
fyquah/circt | frontends/PyCDE/test/polynomial.py | cee685bf12dbf27a3f2274e08cd1af6874f70baa | # RUN: %PYTHON% %s 2>&1 | FileCheck %s
from __future__ import annotations
import mlir
import pycde
from pycde import (Input, Output, Parameter, module, externmodule, generator,
types, dim)
from circt.dialects import comb, hw
@module
def PolynomialCompute(coefficients: Coefficients):
class PolynomialCompute:
"""Module to compute ax^3 + bx^2 + cx + d for design-time coefficients"""
# Evaluate polynomial for 'x'.
x = Input(types.i32)
y = Output(types.int(8 * 4))
unused_parameter = Parameter(True)
def __init__(self, name: str):
"""coefficients is in 'd' -> 'a' order."""
self.instanceName = name
@staticmethod
def get_module_name():
return "PolyComputeForCoeff_" + '_'.join(
[str(x) for x in coefficients.coeff])
@generator
def construct(mod):
"""Implement this module for input 'x'."""
x = mod.x
taps = list()
for power, coeff in enumerate(coefficients.coeff):
coeffVal = hw.ConstantOp.create(types.i32, coeff)
if power == 0:
newPartialSum = coeffVal.result
else:
partialSum = taps[-1]
if power == 1:
currPow = x
else:
x_power = [x for i in range(power)]
currPow = comb.MulOp.create(*x_power)
newPartialSum = comb.AddOp.create(
partialSum, comb.MulOp.create(coeffVal, currPow))
taps.append(newPartialSum)
# Final output
return {"y": taps[-1]}
return PolynomialCompute
@externmodule("supercooldevice")
class CoolPolynomialCompute:
x = Input(types.i32)
y = Output(types.i32)
def __init__(self, coefficients):
self.coefficients = coefficients
class Coefficients:
def __init__(self, coeff):
self.coeff = coeff
class Polynomial(pycde.System):
inputs = []
outputs = [('y', types.i32)]
def build(self, top):
i32 = types.i32
x = hw.ConstantOp.create(i32, 23)
poly = PolynomialCompute(Coefficients([62, 42, 6]))("example", x=x)
PolynomialCompute(coefficients=Coefficients([62, 42, 6]))("example2",
x=poly.y)
PolynomialCompute(Coefficients([1, 2, 3, 4, 5]))("example2", x=poly.y)
CoolPolynomialCompute([4, 42], x=x)
return {"y": poly.y}
poly = Polynomial()
poly.graph()
# CHECK-LABEL: digraph "top"
# CHECK: label="top";
# CHECK: [shape=record,label="{hw.constant\ni32\n\nvalue: 23 : i32}"];
poly.print()
# CHECK-LABEL: hw.module @top() -> (%y: i32)
# CHECK: [[REG0:%.+]] = "pycde.PolynomialCompute"(%c23_i32) {instanceName = "example", opNames = ["x"], parameters = {coefficients = {coeff = [62, 42, 6]}, module_name = "PolyComputeForCoeff_62_42_6", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32
# CHECK: [[REG1:%.+]] = "pycde.PolynomialCompute"([[REG0]]) {instanceName = "example2", opNames = ["x"], parameters = {coefficients = {coeff = [62, 42, 6]}, module_name = "PolyComputeForCoeff_62_42_6", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32
# CHECK: [[REG2:%.+]] = "pycde.PolynomialCompute"([[REG0]]) {instanceName = "example2", opNames = ["x"], parameters = {coefficients = {coeff = [1, 2, 3, 4, 5]}, module_name = "PolyComputeForCoeff_1_2_3_4_5", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32
# CHECK: [[REG3:%.+]] = "pycde.CoolPolynomialCompute"(%c23_i32) {coefficients = [4, 42], opNames = ["x"], parameters = {}, resultNames = ["y"]} : (i32) -> i32
# CHECK: hw.output [[REG0]] : i32
poly.generate()
poly.print()
# CHECK-LABEL: hw.module @top
# CHECK: %example.y = hw.instance "example" @PolyComputeForCoeff_62_42_6(%c23_i32) {parameters = {}} : (i32) -> i32
# CHECK: %example2.y = hw.instance "example2" @PolyComputeForCoeff_62_42_6(%example.y) {parameters = {}} : (i32) -> i32
# CHECK: %example2.y_0 = hw.instance "example2" @PolyComputeForCoeff_1_2_3_4_5(%example.y) {parameters = {}} : (i32) -> i32
# CHECK: %pycde.CoolPolynomialCompute.y = hw.instance "pycde.CoolPolynomialCompute" @supercooldevice(%c23_i32) {coefficients = [4, 42], parameters = {}} : (i32) -> i32
# CHECK-LABEL: hw.module @PolyComputeForCoeff_62_42_6(%x: i32) -> (%y: i32)
# CHECK: hw.constant 62
# CHECK: hw.constant 42
# CHECK: hw.constant 6
# CHECK-LABEL: hw.module @PolyComputeForCoeff_1_2_3_4_5(%x: i32) -> (%y: i32)
# CHECK: hw.constant 1
# CHECK: hw.constant 2
# CHECK: hw.constant 3
# CHECK: hw.constant 4
# CHECK: hw.constant 5
# CHECK-NOT: hw.module @pycde.PolynomialCompute
print("\n\n=== Verilog ===")
# CHECK-LABEL: === Verilog ===
poly.print_verilog()
# CHECK-LABEL: module PolyComputeForCoeff_62_42_6(
# CHECK: input [31:0] x,
# CHECK: output [31:0] y);
| [((1548, 1579), 'pycde.externmodule', 'externmodule', (['"""supercooldevice"""'], {}), "('supercooldevice')\n", (1560, 1579), False, 'from pycde import Input, Output, Parameter, module, externmodule, generator, types, dim\n'), ((1615, 1631), 'pycde.Input', 'Input', (['types.i32'], {}), '(types.i32)\n', (1620, 1631), False, 'from pycde import Input, Output, Parameter, module, externmodule, generator, types, dim\n'), ((1638, 1655), 'pycde.Output', 'Output', (['types.i32'], {}), '(types.i32)\n', (1644, 1655), False, 'from pycde import Input, Output, Parameter, module, externmodule, generator, types, dim\n'), ((458, 474), 'pycde.Input', 'Input', (['types.i32'], {}), '(types.i32)\n', (463, 474), False, 'from pycde import Input, Output, Parameter, module, externmodule, generator, types, dim\n'), ((532, 547), 'pycde.Parameter', 'Parameter', (['(True)'], {}), '(True)\n', (541, 547), False, 'from pycde import Input, Output, Parameter, module, externmodule, generator, types, dim\n'), ((1937, 1966), 'circt.dialects.hw.ConstantOp.create', 'hw.ConstantOp.create', (['i32', '(23)'], {}), '(i32, 23)\n', (1957, 1966), False, 'from circt.dialects import comb, hw\n'), ((490, 506), 'pycde.types.int', 'types.int', (['(8 * 4)'], {}), '(8 * 4)\n', (499, 506), False, 'from pycde import Input, Output, Parameter, module, externmodule, generator, types, dim\n'), ((1008, 1046), 'circt.dialects.hw.ConstantOp.create', 'hw.ConstantOp.create', (['types.i32', 'coeff'], {}), '(types.i32, coeff)\n', (1028, 1046), False, 'from circt.dialects import comb, hw\n'), ((1293, 1320), 'circt.dialects.comb.MulOp.create', 'comb.MulOp.create', (['*x_power'], {}), '(*x_power)\n', (1310, 1320), False, 'from circt.dialects import comb, hw\n'), ((1392, 1428), 'circt.dialects.comb.MulOp.create', 'comb.MulOp.create', (['coeffVal', 'currPow'], {}), '(coeffVal, currPow)\n', (1409, 1428), False, 'from circt.dialects import comb, hw\n')] |
davidgcameron/arc | python/examples/service_discovery.py | 9813ef5f45e5089507953239de8fa2248f5ad32c | #! /usr/bin/env python
import arc
import sys
import os
def retrieve(uc, endpoints):
# The ComputingServiceRetriever needs the UserConfig to know which credentials
# to use in case of HTTPS connections
retriever = arc.ComputingServiceRetriever(uc, endpoints)
# the constructor of the ComputingServiceRetriever returns immediately
sys.stdout.write('\n')
sys.stdout.write("ComputingServiceRetriever created with the following endpoints:\n")
for endpoint in endpoints:
sys.stdout.write("- %s\n"%endpoint.str())
# here we want to wait until all the results arrive
sys.stdout.write("Waiting for the results...\n")
retriever.wait()
return retriever
def example():
# Creating a UserConfig object with the user's proxy
# and the path of the trusted CA certificates
uc = arc.UserConfig()
uc.ProxyPath("/tmp/x509up_u%s" % os.getuid())
uc.CACertificatesDirectory("/etc/grid-security/certificates")
# Query two registries (index servers) for Computing Services
registries = [
# for the index1, we specify that it is an EGIIS service
arc.Endpoint("index1.nordugrid.org:2135/Mds-Vo-name=NorduGrid,o=grid", arc.Endpoint.REGISTRY, "org.nordugrid.ldapegiis"),
# for the arc-emi.grid.upjs.sk, we don't specify the type (the InterfaceName)
# we let the system to try all possibilities
arc.Endpoint("arc-emi.grid.upjs.sk/O=Grid/Mds-Vo-Name=ARC-EMI", arc.Endpoint.REGISTRY)
]
retriever = retrieve(uc, registries)
# The retriever acts as a list containing all the discovered ComputingServices:
sys.stdout.write("Discovered ComputingServices: %s\n"%(", ".join([service.Name for service in retriever])))
# Get all the ExecutionTargets on these ComputingServices
targets = retriever.GetExecutionTargets()
sys.stdout.write("Number of ExecutionTargets on these ComputingServices: %d\n"%len(targets))
# Query the local infosys (COMPUTINGINFO) of computing elements
computing_elements = [
# for piff, we specify that we want to query the LDAP GLUE2 tree
arc.Endpoint("piff.hep.lu.se", arc.Endpoint.COMPUTINGINFO, "org.nordugrid.ldapglue2"),
# for pgs03, we don't specify the interface, we let the system try all possibilities
arc.Endpoint("pgs03.grid.upjs.sk", arc.Endpoint.COMPUTINGINFO)
]
retriever2 = retrieve(uc, computing_elements)
# Get all the ExecutionTargets on these ComputingServices
targets2 = retriever2.GetExecutionTargets()
sys.stdout.write("The discovered ExecutionTargets:\n")
for target in targets2:
sys.stdout.write("%s\n"%str(target))
# Query both registries and computing elements at the same time:
endpoints = [
arc.Endpoint("arc-emi.grid.upjs.sk/O=Grid/Mds-Vo-Name=ARC-EMI", arc.Endpoint.REGISTRY),
arc.Endpoint("piff.hep.lu.se", arc.Endpoint.COMPUTINGINFO, "org.nordugrid.ldapglue2")
]
retriever3 = retrieve(uc, endpoints)
sys.stdout.write("Discovered ComputingServices: %s\n"%(", ".join([service.Name for service in retriever3])))
# wait for all the background threads to finish before we destroy the objects they may use
import atexit
@atexit.register
def wait_exit():
arc.ThreadInitializer().waitExit()
# arc.Logger.getRootLogger().addDestination(arc.LogStream(sys.stderr))
# arc.Logger.getRootLogger().setThreshold(arc.DEBUG)
# run the example
example()
| [((226, 270), 'arc.ComputingServiceRetriever', 'arc.ComputingServiceRetriever', (['uc', 'endpoints'], {}), '(uc, endpoints)\n', (255, 270), False, 'import arc\n'), ((350, 372), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (366, 372), False, 'import sys\n'), ((377, 467), 'sys.stdout.write', 'sys.stdout.write', (['"""ComputingServiceRetriever created with the following endpoints:\n"""'], {}), "(\n 'ComputingServiceRetriever created with the following endpoints:\\n')\n", (393, 467), False, 'import sys\n'), ((604, 652), 'sys.stdout.write', 'sys.stdout.write', (['"""Waiting for the results...\n"""'], {}), "('Waiting for the results...\\n')\n", (620, 652), False, 'import sys\n'), ((827, 843), 'arc.UserConfig', 'arc.UserConfig', ([], {}), '()\n', (841, 843), False, 'import arc\n'), ((2527, 2581), 'sys.stdout.write', 'sys.stdout.write', (['"""The discovered ExecutionTargets:\n"""'], {}), "('The discovered ExecutionTargets:\\n')\n", (2543, 2581), False, 'import sys\n'), ((1119, 1244), 'arc.Endpoint', 'arc.Endpoint', (['"""index1.nordugrid.org:2135/Mds-Vo-name=NorduGrid,o=grid"""', 'arc.Endpoint.REGISTRY', '"""org.nordugrid.ldapegiis"""'], {}), "('index1.nordugrid.org:2135/Mds-Vo-name=NorduGrid,o=grid', arc.\n Endpoint.REGISTRY, 'org.nordugrid.ldapegiis')\n", (1131, 1244), False, 'import arc\n'), ((1388, 1479), 'arc.Endpoint', 'arc.Endpoint', (['"""arc-emi.grid.upjs.sk/O=Grid/Mds-Vo-Name=ARC-EMI"""', 'arc.Endpoint.REGISTRY'], {}), "('arc-emi.grid.upjs.sk/O=Grid/Mds-Vo-Name=ARC-EMI', arc.\n Endpoint.REGISTRY)\n", (1400, 1479), False, 'import arc\n'), ((2103, 2192), 'arc.Endpoint', 'arc.Endpoint', (['"""piff.hep.lu.se"""', 'arc.Endpoint.COMPUTINGINFO', '"""org.nordugrid.ldapglue2"""'], {}), "('piff.hep.lu.se', arc.Endpoint.COMPUTINGINFO,\n 'org.nordugrid.ldapglue2')\n", (2115, 2192), False, 'import arc\n'), ((2291, 2353), 'arc.Endpoint', 'arc.Endpoint', (['"""pgs03.grid.upjs.sk"""', 'arc.Endpoint.COMPUTINGINFO'], {}), "('pgs03.grid.upjs.sk', arc.Endpoint.COMPUTINGINFO)\n", (2303, 2353), False, 'import arc\n'), ((2752, 2843), 'arc.Endpoint', 'arc.Endpoint', (['"""arc-emi.grid.upjs.sk/O=Grid/Mds-Vo-Name=ARC-EMI"""', 'arc.Endpoint.REGISTRY'], {}), "('arc-emi.grid.upjs.sk/O=Grid/Mds-Vo-Name=ARC-EMI', arc.\n Endpoint.REGISTRY)\n", (2764, 2843), False, 'import arc\n'), ((2848, 2937), 'arc.Endpoint', 'arc.Endpoint', (['"""piff.hep.lu.se"""', 'arc.Endpoint.COMPUTINGINFO', '"""org.nordugrid.ldapglue2"""'], {}), "('piff.hep.lu.se', arc.Endpoint.COMPUTINGINFO,\n 'org.nordugrid.ldapglue2')\n", (2860, 2937), False, 'import arc\n'), ((881, 892), 'os.getuid', 'os.getuid', ([], {}), '()\n', (890, 892), False, 'import os\n'), ((3241, 3264), 'arc.ThreadInitializer', 'arc.ThreadInitializer', ([], {}), '()\n', (3262, 3264), False, 'import arc\n')] |
netajik/oppia | core/domain/rights_manager.py | d3780352d615db7438e010c5aa5eb60588bb7de6 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domain objects and functions that manage rights for various user actions."""
import logging
from constants import constants
from core.domain import activity_services
from core.domain import role_services
from core.domain import subscription_services
from core.domain import user_services
from core.platform import models
import feconf
import utils
current_user_services = models.Registry.import_current_user_services()
(collection_models, exp_models,) = models.Registry.import_models([
models.NAMES.collection, models.NAMES.exploration
])
# IMPORTANT: Ensure that all changes to how these cmds are interpreted preserve
# backward-compatibility with previous exploration snapshots in the datastore.
# Do not modify the definitions of CMD keys that already exist.
CMD_CREATE_NEW = 'create_new'
CMD_CHANGE_ROLE = 'change_role'
CMD_CHANGE_EXPLORATION_STATUS = 'change_exploration_status'
CMD_CHANGE_COLLECTION_STATUS = 'change_collection_status'
CMD_CHANGE_PRIVATE_VIEWABILITY = 'change_private_viewability'
CMD_RELEASE_OWNERSHIP = 'release_ownership'
CMD_UPDATE_FIRST_PUBLISHED_MSEC = 'update_first_published_msec'
ACTIVITY_STATUS_PRIVATE = constants.ACTIVITY_STATUS_PRIVATE
ACTIVITY_STATUS_PUBLIC = constants.ACTIVITY_STATUS_PUBLIC
ROLE_OWNER = 'owner'
ROLE_EDITOR = 'editor'
ROLE_TRANSLATOR = 'translator'
ROLE_VIEWER = 'viewer'
ROLE_NONE = 'none'
ROLE_ADMIN = 'admin'
ROLE_MODERATOR = 'moderator'
class ActivityRights(object):
"""Domain object for the rights/publication status of an activity (an
exploration or a collection).
"""
def __init__(
self, exploration_id, owner_ids, editor_ids, translator_ids,
viewer_ids, community_owned=False, cloned_from=None,
status=ACTIVITY_STATUS_PRIVATE, viewable_if_private=False,
first_published_msec=None):
self.id = exploration_id
self.owner_ids = owner_ids
self.editor_ids = editor_ids
self.translator_ids = translator_ids
self.viewer_ids = viewer_ids
self.community_owned = community_owned
self.cloned_from = cloned_from
self.status = status
self.viewable_if_private = viewable_if_private
self.first_published_msec = first_published_msec
def validate(self):
"""Validates an ActivityRights object.
Raises:
utils.ValidationError: if any of the owners, editors, translators and
viewers lists overlap, or if a community-owned exploration has owners,
editors, translators or viewers specified.
"""
if self.community_owned:
if (self.owner_ids or self.editor_ids or self.translator_ids or
self.viewer_ids):
raise utils.ValidationError(
'Community-owned explorations should have no owners, '
'editors, translators or viewers specified.')
if self.community_owned and self.status == ACTIVITY_STATUS_PRIVATE:
raise utils.ValidationError(
'Community-owned explorations cannot be private.')
if self.status != ACTIVITY_STATUS_PRIVATE and self.viewer_ids:
raise utils.ValidationError(
'Public explorations should have no viewers specified.')
owner_editor = set(self.owner_ids).intersection(set(self.editor_ids))
owner_translator = set(self.owner_ids).intersection(
set(self.translator_ids))
owner_viewer = set(self.owner_ids).intersection(set(self.viewer_ids))
editor_translator = set(self.editor_ids).intersection(
set(self.translator_ids))
editor_viewer = set(self.editor_ids).intersection(set(self.viewer_ids))
translator_viewer = set(self.editor_ids).intersection(
set(self.viewer_ids))
if owner_editor:
raise utils.ValidationError(
'A user cannot be both an owner and an editor: %s' %
owner_editor)
if owner_translator:
raise utils.ValidationError(
'A user cannot be both an owner and a translator: %s' %
owner_translator)
if owner_viewer:
raise utils.ValidationError(
'A user cannot be both an owner and a viewer: %s' %
owner_viewer)
if editor_translator:
raise utils.ValidationError(
'A user cannot be both an editor and a translator: %s' %
editor_translator)
if editor_viewer:
raise utils.ValidationError(
'A user cannot be both an editor and a viewer: %s' %
editor_viewer)
if translator_viewer:
raise utils.ValidationError(
'A user cannot be both a translator and a viewer: %s' %
translator_viewer)
def to_dict(self):
"""Returns a dict suitable for use by the frontend.
Returns:
dict. A dict version of ActivityRights suitable for use by the
frontend.
"""
if self.community_owned:
return {
'cloned_from': self.cloned_from,
'status': self.status,
'community_owned': True,
'owner_names': [],
'editor_names': [],
'translator_names': [],
'viewer_names': [],
'viewable_if_private': self.viewable_if_private,
}
else:
return {
'cloned_from': self.cloned_from,
'status': self.status,
'community_owned': False,
'owner_names': user_services.get_human_readable_user_ids(
self.owner_ids),
'editor_names': user_services.get_human_readable_user_ids(
self.editor_ids),
'translator_names': user_services.get_human_readable_user_ids(
self.translator_ids),
'viewer_names': user_services.get_human_readable_user_ids(
self.viewer_ids),
'viewable_if_private': self.viewable_if_private,
}
def is_owner(self, user_id):
"""Checks whether given user is owner of activity.
Args:
user_id: str or None. Id of the user.
Returns:
bool. Whether user is an activity owner.
"""
return bool(user_id in self.owner_ids)
def is_editor(self, user_id):
"""Checks whether given user is editor of activity.
Args:
user_id: str or None. Id of the user.
Returns:
bool. Whether user is an activity editor.
"""
return bool(user_id in self.editor_ids)
def is_translator(self, user_id):
"""Checks whether given user is translator of activity.
Args:
user_id: str or None. Id of the user.
Returns:
bool. Whether user is an activity translator.
"""
return bool(user_id in self.translator_ids)
def is_viewer(self, user_id):
"""Checks whether given user is viewer of activity.
Args:
user_id: str or None. Id of the user.
Returns:
bool. Whether user is an activity viewer.
"""
return bool(user_id in self.viewer_ids)
def is_published(self):
"""Checks whether activity is published.
Returns:
bool. Whether activity is published.
"""
return bool(self.status == ACTIVITY_STATUS_PUBLIC)
def is_private(self):
"""Checks whether activity is private.
Returns:
bool. Whether activity is private.
"""
return bool(self.status == ACTIVITY_STATUS_PRIVATE)
def get_activity_rights_from_model(activity_rights_model, activity_type):
"""Constructs an ActivityRights object from the given activity rights model.
Args:
activity_rights_model: ActivityRightsModel. Activity rights from the
datastore.
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
Returns:
ActivityRights. The rights object created from the model.
"""
return ActivityRights(
activity_rights_model.id,
activity_rights_model.owner_ids,
activity_rights_model.editor_ids,
activity_rights_model.translator_ids,
activity_rights_model.viewer_ids,
community_owned=activity_rights_model.community_owned,
cloned_from=(
activity_rights_model.cloned_from
if activity_type == constants.ACTIVITY_TYPE_EXPLORATION else None),
status=activity_rights_model.status,
viewable_if_private=activity_rights_model.viewable_if_private,
first_published_msec=activity_rights_model.first_published_msec
)
def _save_activity_rights(
committer_id, activity_rights, activity_type, commit_message,
commit_cmds):
"""Saves an ExplorationRights or CollectionRights domain object to the
datastore.
Args:
committer_id: str. ID of the committer.
activity_rights: ActivityRights. The rights object for the given
activity.
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
commit_message: str. Descriptive message for the commit.
commit_cmds: list(dict). A list of commands describing what kind of
commit was done.
"""
activity_rights.validate()
if activity_type == constants.ACTIVITY_TYPE_EXPLORATION:
model_cls = exp_models.ExplorationRightsModel
elif activity_type == constants.ACTIVITY_TYPE_COLLECTION:
model_cls = collection_models.CollectionRightsModel
model = model_cls.get(activity_rights.id, strict=False)
model.owner_ids = activity_rights.owner_ids
model.editor_ids = activity_rights.editor_ids
model.viewer_ids = activity_rights.viewer_ids
model.translator_ids = activity_rights.translator_ids
model.community_owned = activity_rights.community_owned
model.status = activity_rights.status
model.viewable_if_private = activity_rights.viewable_if_private
model.first_published_msec = activity_rights.first_published_msec
model.commit(committer_id, commit_message, commit_cmds)
def _update_exploration_summary(activity_rights):
"""Updates the exploration summary for the activity associated with the
given rights object.
The ID of rights object is the same as the ID of associated activity.
Args:
activity_rights: ActivityRights. The rights object for the given
activity.
"""
# TODO(msl): get rid of inline imports by refactoring code.
from core.domain import exp_services
exp_services.update_exploration_summary(
activity_rights.id, None)
def _update_collection_summary(activity_rights):
"""Updates the collection summary for the given activity associated with
the given rights object.
The ID of rights object is the same as the ID of associated activity.
Args:
activity_rights: ActivityRights. The rights object for the given
activity.
"""
from core.domain import collection_services
collection_services.update_collection_summary(
activity_rights.id, None)
def _update_activity_summary(activity_type, activity_rights):
"""Updates the activity summary for the given activity associated with
the given rights object.
The ID of rights object is the same as the ID of associated activity.
Args:
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
activity_rights: ActivityRights. The rights object for the given
activity.
"""
if activity_type == constants.ACTIVITY_TYPE_EXPLORATION:
_update_exploration_summary(activity_rights)
elif activity_type == constants.ACTIVITY_TYPE_COLLECTION:
_update_collection_summary(activity_rights)
def update_activity_first_published_msec(
activity_type, activity_id, first_published_msec):
"""Updates the first_published_msec field for the given activity.
The caller is responsible for ensuring that this value is not already
set before updating it.
Args:
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
activity_id: str. ID of the activity.
first_published_msec: float. First publication time in milliseconds
since the Epoch.
"""
activity_rights = _get_activity_rights(activity_type, activity_id)
commit_cmds = [{
'cmd': CMD_UPDATE_FIRST_PUBLISHED_MSEC,
'old_first_published_msec': activity_rights.first_published_msec,
'new_first_published_msec': first_published_msec
}]
activity_rights.first_published_msec = first_published_msec
_save_activity_rights(
feconf.SYSTEM_COMMITTER_ID, activity_rights, activity_type,
'set first published time in msec', commit_cmds)
def create_new_exploration_rights(exploration_id, committer_id):
"""Creates a new exploration rights object and saves it to the datastore.
Subscribes the committer to the new exploration.
Args:
exploration_id: str. ID of the exploration.
committer_id: str. ID of the committer.
"""
exploration_rights = ActivityRights(
exploration_id, [committer_id], [], [], [])
commit_cmds = [{'cmd': CMD_CREATE_NEW}]
exp_models.ExplorationRightsModel(
id=exploration_rights.id,
owner_ids=exploration_rights.owner_ids,
editor_ids=exploration_rights.editor_ids,
translator_ids=exploration_rights.translator_ids,
viewer_ids=exploration_rights.viewer_ids,
community_owned=exploration_rights.community_owned,
status=exploration_rights.status,
viewable_if_private=exploration_rights.viewable_if_private,
first_published_msec=exploration_rights.first_published_msec,
).commit(committer_id, 'Created new exploration', commit_cmds)
subscription_services.subscribe_to_exploration(
committer_id, exploration_id)
def get_exploration_rights(exploration_id, strict=True):
"""Retrieves the rights for this exploration from the datastore.
Args:
exploration_id: str. ID of the exploration.
strict: bool. Whether to raise an error if there is no exploration
matching the given ID.
Returns:
ActivityRights. The rights object for the given exploration.
Raises:
EntityNotFoundError. The exploration with ID exploration_id was not
found in the datastore.
"""
model = exp_models.ExplorationRightsModel.get(
exploration_id, strict=strict)
if model is None:
return None
return get_activity_rights_from_model(
model, constants.ACTIVITY_TYPE_EXPLORATION)
def get_multiple_exploration_rights_by_ids(exp_ids):
"""Returns a list of ActivityRights objects for given exploration ids.
Args:
exp_ids: list(str). List of exploration ids.
Returns:
list(ActivityRights or None). List of rights object containing
ActivityRights object for existing exploration or None.
"""
exp_rights_models = exp_models.ExplorationRightsModel.get_multi(
exp_ids)
exp_models_list = []
for model in exp_rights_models:
if model is None:
exp_models_list.append(None)
else:
exp_models_list.append(
get_activity_rights_from_model(
model, constants.ACTIVITY_TYPE_EXPLORATION))
return exp_models_list
def is_exploration_private(exploration_id):
"""Returns whether exploration is private.
Args:
exploration_id: str. ID of the exploration.
Returns:
bool. Whether the exploration is private or not.
"""
exploration_rights = get_exploration_rights(exploration_id)
return exploration_rights.status == ACTIVITY_STATUS_PRIVATE
def is_exploration_public(exploration_id):
"""Returns whether exploration is public.
Args:
exploration_id: str. ID of the exploration.
Returns:
bool. Whether the exploration is public.
"""
exploration_rights = get_exploration_rights(exploration_id)
return exploration_rights.status == ACTIVITY_STATUS_PUBLIC
def is_exploration_cloned(exploration_id):
"""Returns whether the exploration is a clone of another exploration.
Args:
exploration_id: str. ID of the exploration.
Returns:
bool. Whether the exploration is a clone of another exploration.
"""
exploration_rights = get_exploration_rights(exploration_id)
return bool(exploration_rights.cloned_from)
def create_new_collection_rights(collection_id, committer_id):
"""Creates a new collection rights object and saves it to the datastore.
Subscribes the committer to the new collection.
Args:
collection_id: str. ID of the collection.
committer_id: str. ID of the committer.
"""
collection_rights = ActivityRights(
collection_id, [committer_id], [], [], [])
commit_cmds = [{'cmd': CMD_CREATE_NEW}]
collection_models.CollectionRightsModel(
id=collection_rights.id,
owner_ids=collection_rights.owner_ids,
editor_ids=collection_rights.editor_ids,
translator_ids=collection_rights.translator_ids,
viewer_ids=collection_rights.viewer_ids,
community_owned=collection_rights.community_owned,
status=collection_rights.status,
viewable_if_private=collection_rights.viewable_if_private,
first_published_msec=collection_rights.first_published_msec
).commit(committer_id, 'Created new collection', commit_cmds)
subscription_services.subscribe_to_collection(committer_id, collection_id)
def get_collection_rights(collection_id, strict=True):
"""Retrieves the rights for this collection from the datastore.
Args:
collection_id: str. ID of the collection.
strict: bool. Whether to raise an error if ID is not found.
Returns:
ActivityRights. The rights object for the collection.
Raises:
EntityNotFoundError. The collection with ID collection_id is not found
in the datastore.
"""
model = collection_models.CollectionRightsModel.get(
collection_id, strict=strict)
if model is None:
return None
return get_activity_rights_from_model(
model, constants.ACTIVITY_TYPE_COLLECTION)
def get_collection_owner_names(collection_id):
"""Retrieves the owners for this collection from the datastore.
Args:
collection_id: str. ID of the collection.
Returns:
list(str). Human-readable usernames (or truncated email addresses) of
owners for this collection.
"""
collection_rights = get_collection_rights(collection_id)
return user_services.get_human_readable_user_ids(
collection_rights.owner_ids)
def is_collection_private(collection_id):
"""Returns whether the collection is private.
Args:
collection_id: str. ID of the collection.
Returns:
bool. Whether the collection is private.
"""
collection_rights = get_collection_rights(collection_id)
return collection_rights.status == ACTIVITY_STATUS_PRIVATE
def is_collection_public(collection_id):
"""Returns whether the collection is public.
Args:
collection_id: str. ID of the collection.
Returns:
bool. Whether the collection is public.
"""
collection_rights = get_collection_rights(collection_id)
return collection_rights.status == ACTIVITY_STATUS_PUBLIC
def _get_activity_rights(activity_type, activity_id):
"""Retrieves the rights object for the given activity
based on its type.
Args:
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
activity_id: str. ID of the activity.
Returns:
ActivityRights. The rights object associated with the given activity.
Raises:
Exception. activity_type provided is unknown.
"""
if activity_type == constants.ACTIVITY_TYPE_EXPLORATION:
return get_exploration_rights(activity_id, strict=False)
elif activity_type == constants.ACTIVITY_TYPE_COLLECTION:
return get_collection_rights(activity_id, strict=False)
else:
raise Exception(
'Cannot get activity rights for unknown activity type: %s' % (
activity_type))
def check_can_access_activity(user, activity_rights):
"""Checks whether the user can access given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: AcitivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the given activity can be accessed by the given user.
"""
if activity_rights is None:
return False
elif activity_rights.is_published():
return bool(
role_services.ACTION_PLAY_ANY_PUBLIC_ACTIVITY in user.actions)
elif activity_rights.is_private():
return bool(
(role_services.ACTION_PLAY_ANY_PRIVATE_ACTIVITY in user.actions) or
activity_rights.is_viewer(user.user_id) or
activity_rights.is_owner(user.user_id) or
activity_rights.is_editor(user.user_id) or
activity_rights.is_translator(user.user_id) or
activity_rights.viewable_if_private)
def check_can_edit_activity(user, activity_rights):
"""Checks whether the user can edit given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: ActivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the given user can edit this activity.
"""
if activity_rights is None:
return False
if role_services.ACTION_EDIT_OWNED_ACTIVITY not in user.actions:
return False
if (activity_rights.is_owner(user.user_id) or
activity_rights.is_editor(user.user_id)):
return True
if (activity_rights.community_owned or
(role_services.ACTION_EDIT_ANY_ACTIVITY in user.actions)):
return True
if (activity_rights.is_published() and
(role_services.ACTION_EDIT_ANY_PUBLIC_ACTIVITY in
user.actions)):
return True
return False
def check_can_translate_activity(user, activity_rights):
"""Checks whether the user can translate given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: ActivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the given user can translate this activity.
"""
if activity_rights is None:
return False
if role_services.ACTION_EDIT_OWNED_ACTIVITY not in user.actions:
return False
if (activity_rights.is_owner(user.user_id) or
activity_rights.is_editor(user.user_id) or
activity_rights.is_translator(user.user_id)):
return True
if (activity_rights.community_owned or
(role_services.ACTION_EDIT_ANY_ACTIVITY in user.actions)):
return True
if (activity_rights.is_published() and
(role_services.ACTION_EDIT_ANY_PUBLIC_ACTIVITY in
user.actions)):
return True
return False
def check_can_delete_activity(user, activity_rights):
"""Checks whether the user can delete given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: ActivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the user can delete given activity.
"""
if activity_rights is None:
return False
if role_services.ACTION_DELETE_ANY_ACTIVITY in user.actions:
return True
elif (activity_rights.is_private() and
(role_services.ACTION_DELETE_OWNED_PRIVATE_ACTIVITY in user.actions)
and activity_rights.is_owner(user.user_id)):
return True
elif (activity_rights.is_published() and
(role_services.ACTION_DELETE_ANY_PUBLIC_ACTIVITY in user.actions)):
return True
return False
def check_can_modify_activity_roles(user, activity_rights):
"""Checks whether the user can modify roles for given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: ActivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the user can modify roles for given activity.
"""
if activity_rights is None:
return False
if (activity_rights.community_owned or
activity_rights.cloned_from):
return False
if (role_services.ACTION_MODIFY_ROLES_FOR_ANY_ACTIVITY in
user.actions):
return True
if (role_services.ACTION_MODIFY_ROLES_FOR_OWNED_ACTIVITY in
user.actions):
if activity_rights.is_owner(user.user_id):
return True
return False
def check_can_release_ownership(user, activity_rights):
"""Checks whether the user can release ownership for given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: ActivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the user can release ownership for given activity.
"""
if activity_rights is None:
return False
if activity_rights.is_private():
return False
return check_can_modify_activity_roles(
user, activity_rights)
def check_can_publish_activity(user, activity_rights):
"""Checks whether the user can publish given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: ActivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the user can publish given activity.
"""
if activity_rights is None:
return False
if activity_rights.cloned_from:
return False
if activity_rights.is_published():
return False
if role_services.ACTION_PUBLISH_ANY_ACTIVITY in user.actions:
return True
if role_services.ACTION_PUBLISH_OWNED_ACTIVITY in user.actions:
if activity_rights.is_owner(user.user_id):
return True
return False
def check_can_unpublish_activity(user, activity_rights):
"""Checks whether the user can unpublish given activity.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
activity_rights: ActivityRights or None. Rights object for the given
activity.
Returns:
bool. Whether the user can unpublish given activity.
"""
if activity_rights is None:
return False
if activity_rights.community_owned:
return False
if activity_rights.is_published():
if role_services.ACTION_UNPUBLISH_ANY_PUBLIC_ACTIVITY in user.actions:
return True
return False
def _assign_role(
committer, assignee_id, new_role, activity_id, activity_type):
"""Assigns a new role to the user.
Args:
committer: UserActionsInfo. UserActionInfo object for the user
who is performing the action.
assignee_id: str. ID of the user whose role is being changed.
new_role: str. The name of the new role: One of
ROLE_OWNER
ROLE_EDITOR
ROLE_TRANSLATOR
ROLE_VIEWER
activity_id: str. ID of the activity.
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
Raises:
Exception. The committer does not have rights to modify a role.
Exception. The user already owns the activity.
Exception. The user can already edit the activity.
Exception. The user can already translate the activity.
Exception. The activity is already publicly editable.
Exception. The activity is already publicly translatable.
Exception. The user can already view the activity.
Exception. The activity is already publicly viewable.
Exception. The role is invalid.
"""
committer_id = committer.user_id
activity_rights = _get_activity_rights(activity_type, activity_id)
if not check_can_modify_activity_roles(committer, activity_rights):
logging.error(
'User %s tried to allow user %s to be a(n) %s of activity %s '
'but was refused permission.' % (
committer_id, assignee_id, new_role, activity_id))
raise Exception(
'UnauthorizedUserException: Could not assign new role.')
assignee_username = user_services.get_username(assignee_id)
old_role = ROLE_NONE
if new_role == ROLE_OWNER:
if activity_rights.is_owner(assignee_id):
raise Exception('This user already owns this %s.' % activity_type)
activity_rights.owner_ids.append(assignee_id)
if assignee_id in activity_rights.viewer_ids:
activity_rights.viewer_ids.remove(assignee_id)
old_role = ROLE_VIEWER
if assignee_id in activity_rights.editor_ids:
activity_rights.editor_ids.remove(assignee_id)
old_role = ROLE_EDITOR
if assignee_id in activity_rights.translator_ids:
activity_rights.translator_ids.remove(assignee_id)
old_role = ROLE_TRANSLATOR
elif new_role == ROLE_EDITOR:
if (activity_rights.is_editor(assignee_id) or
activity_rights.is_owner(assignee_id)):
raise Exception(
'This user already can edit this %s.' % activity_type)
if activity_rights.community_owned:
raise Exception(
'Community-owned %ss can be edited by anyone.' % activity_type)
activity_rights.editor_ids.append(assignee_id)
if assignee_id in activity_rights.translator_ids:
activity_rights.translator_ids.remove(assignee_id)
old_role = ROLE_TRANSLATOR
if assignee_id in activity_rights.viewer_ids:
activity_rights.viewer_ids.remove(assignee_id)
old_role = ROLE_VIEWER
elif new_role == ROLE_TRANSLATOR:
if (activity_rights.is_editor(assignee_id) or
activity_rights.is_translator(assignee_id) or
activity_rights.is_owner(assignee_id)):
raise Exception(
'This user already can translate this %s.' % activity_type)
if activity_rights.community_owned:
raise Exception(
'Community-owned %ss can be translated by anyone.' %
activity_type)
activity_rights.translator_ids.append(assignee_id)
if assignee_id in activity_rights.viewer_ids:
activity_rights.viewer_ids.remove(assignee_id)
old_role = ROLE_VIEWER
elif new_role == ROLE_VIEWER:
if (activity_rights.is_owner(assignee_id) or
activity_rights.is_editor(assignee_id) or
activity_rights.is_viewer(assignee_id)):
raise Exception(
'This user already can view this %s.' % activity_type)
if activity_rights.status != ACTIVITY_STATUS_PRIVATE:
raise Exception(
'Public %ss can be viewed by anyone.' % activity_type)
activity_rights.viewer_ids.append(assignee_id)
else:
raise Exception('Invalid role: %s' % new_role)
commit_message = 'Changed role of %s from %s to %s' % (
assignee_username, old_role, new_role)
commit_cmds = [{
'cmd': CMD_CHANGE_ROLE,
'assignee_id': assignee_id,
'old_role': old_role,
'new_role': new_role
}]
_save_activity_rights(
committer_id, activity_rights, activity_type,
commit_message, commit_cmds)
_update_activity_summary(activity_type, activity_rights)
def _release_ownership_of_activity(committer, activity_id, activity_type):
"""Releases ownership of the given activity to the community.
Args:
committer: UserActionsInfo. UserActionsInfo object for the user who
is performing the action.
activity_id: str. ID of the activity.
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
Raise:
Exception. The committer does not have release rights.
"""
committer_id = committer.user_id
activity_rights = _get_activity_rights(activity_type, activity_id)
if not check_can_release_ownership(committer, activity_rights):
logging.error(
'User %s tried to release ownership of %s %s but was '
'refused permission.' % (committer_id, activity_type, activity_id))
raise Exception(
'The ownership of this %s cannot be released.' % activity_type)
activity_rights.community_owned = True
activity_rights.owner_ids = []
activity_rights.editor_ids = []
activity_rights.viewer_ids = []
commit_cmds = [{
'cmd': CMD_RELEASE_OWNERSHIP,
}]
_save_activity_rights(
committer_id, activity_rights, activity_type,
'%s ownership released to the community.' % activity_type, commit_cmds)
_update_activity_summary(activity_type, activity_rights)
def _change_activity_status(
committer_id, activity_id, activity_type, new_status, commit_message):
"""Changes the status of the given activity.
Args:
committer_id: str. ID of the user who is performing the update action.
activity_id: str. ID of the activity.
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
new_status: str. The new status of the activity.
commit_message: str. The human-written commit message for this change.
"""
activity_rights = _get_activity_rights(activity_type, activity_id)
old_status = activity_rights.status
activity_rights.status = new_status
if activity_type == constants.ACTIVITY_TYPE_EXPLORATION:
cmd_type = CMD_CHANGE_EXPLORATION_STATUS
elif activity_type == constants.ACTIVITY_TYPE_COLLECTION:
cmd_type = CMD_CHANGE_COLLECTION_STATUS
commit_cmds = [{
'cmd': cmd_type,
'old_status': old_status,
'new_status': new_status
}]
if new_status != ACTIVITY_STATUS_PRIVATE:
activity_rights.viewer_ids = []
if activity_rights.first_published_msec is None:
activity_rights.first_published_msec = (
utils.get_current_time_in_millisecs())
_save_activity_rights(
committer_id, activity_rights, activity_type, commit_message,
commit_cmds)
_update_activity_summary(activity_type, activity_rights)
def _publish_activity(committer, activity_id, activity_type):
"""Publishes the given activity.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
activity_id: str. ID of the activity.
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
Raises:
Exception. The committer does not have rights to publish the
activity.
"""
committer_id = committer.user_id
activity_rights = _get_activity_rights(activity_type, activity_id)
if not check_can_publish_activity(committer, activity_rights):
logging.error(
'User %s tried to publish %s %s but was refused '
'permission.' % (committer_id, activity_type, activity_id))
raise Exception('This %s cannot be published.' % activity_type)
_change_activity_status(
committer_id, activity_id, activity_type, ACTIVITY_STATUS_PUBLIC,
'%s published.' % activity_type)
def _unpublish_activity(committer, activity_id, activity_type):
"""Unpublishes the given activity.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
activity_id: str. ID of the activity.
activity_type: str. The type of activity. Possible values:
constants.ACTIVITY_TYPE_EXPLORATION
constants.ACTIVITY_TYPE_COLLECTION
Raises:
Exception. The committer does not have rights to unpublish the
activity.
"""
committer_id = committer.user_id
activity_rights = _get_activity_rights(activity_type, activity_id)
if not check_can_unpublish_activity(committer, activity_rights):
logging.error(
'User %s tried to unpublish %s %s but was refused '
'permission.' % (committer_id, activity_type, activity_id))
raise Exception('This %s cannot be unpublished.' % activity_type)
_change_activity_status(
committer_id, activity_id, activity_type, ACTIVITY_STATUS_PRIVATE,
'%s unpublished.' % activity_type)
activity_services.remove_featured_activity(activity_type, activity_id)
# Rights functions for activities.
def assign_role_for_exploration(
committer, exploration_id, assignee_id, new_role):
"""Assigns a user to the given role and subscribes the assignee to future
exploration updates.
The caller should ensure that assignee_id corresponds to a valid user in
the system.
Args:
committer: UserActionsInfo. The UserActionsInfo object for the
committer.
exploration_id: str. ID of the exploration.
assignee_id: str. ID of the user whose role is being changed.
new_role: str. The name of the new role: One of
ROLE_OWNER
ROLE_EDITOR
ROLE_TRANSLATOR
Raises:
Exception. This could potentially throw an exception from
_assign_role.
"""
_assign_role(
committer, assignee_id, new_role, exploration_id,
constants.ACTIVITY_TYPE_EXPLORATION)
if new_role in [ROLE_OWNER, ROLE_EDITOR, ROLE_TRANSLATOR]:
subscription_services.subscribe_to_exploration(
assignee_id, exploration_id)
def release_ownership_of_exploration(committer, exploration_id):
"""Releases ownership of the given exploration to the community.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
exploration_id: str. ID of the exploration.
Raises:
Exception. This could potentially throw an exception from
_release_ownership_of_activity.
"""
_release_ownership_of_activity(
committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION)
def set_private_viewability_of_exploration(
committer, exploration_id, viewable_if_private):
"""Sets the viewable_if_private attribute for the given exploration's rights
object.
If viewable_if_private is True, this allows a private exploration
to be viewed by anyone with the link.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
exploration_id: str. ID of the exploration.
viewable_if_private: bool. Whether the exploration should be made
viewable (by anyone with the link).
Raises:
Exception. The committer does not have the permission to perform change
action.
Exception. If the viewable_if_private property is already as desired.
"""
committer_id = committer.user_id
exploration_rights = get_exploration_rights(exploration_id)
# The user who can publish activity can change its private viewability.
if not check_can_publish_activity(committer, exploration_rights):
logging.error(
'User %s tried to change private viewability of exploration %s '
'but was refused permission.' % (committer_id, exploration_id))
raise Exception(
'The viewability status of this exploration cannot be changed.')
old_viewable_if_private = exploration_rights.viewable_if_private
if old_viewable_if_private == viewable_if_private:
raise Exception(
'Trying to change viewability status of this exploration to %s, '
'but that is already the current value.' % viewable_if_private)
exploration_rights.viewable_if_private = viewable_if_private
commit_cmds = [{
'cmd': CMD_CHANGE_PRIVATE_VIEWABILITY,
'old_viewable_if_private': old_viewable_if_private,
'new_viewable_if_private': viewable_if_private,
}]
commit_message = (
'Made exploration viewable to anyone with the link.'
if viewable_if_private else
'Made exploration viewable only to invited playtesters.')
_save_activity_rights(
committer_id, exploration_rights, constants.ACTIVITY_TYPE_EXPLORATION,
commit_message, commit_cmds)
_update_exploration_summary(exploration_rights)
def publish_exploration(committer, exploration_id):
"""Publishes the given exploration.
It is the responsibility of the caller to check that the exploration is
valid prior to publication.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
exploration_id: str. ID of the exploration.
Raises:
Exception. This could potentially throw an exception from
_publish_activity.
"""
_publish_activity(
committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION)
def unpublish_exploration(committer, exploration_id):
"""Unpublishes the given exploration.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
exploration_id: str. ID of the exploration.
Raises:
Exception. This could potentially throw an exception from
_unpublish_activity.
"""
_unpublish_activity(
committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION)
# Rights functions for collections.
def assign_role_for_collection(
committer, collection_id, assignee_id, new_role):
"""Assign the given user to the given role and subscribes the assignee
to future collection updates.
The caller should ensure that assignee_id corresponds to a valid user in
the system.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
collection_id: str. ID of the collection.
assignee_id: str. ID of the user whose role is being changed.
new_role: str. The name of the new role: One of
ROLE_OWNER
ROLE_EDITOR
Raises:
Exception. This could potentially throw an exception from
_assign_role.
"""
_assign_role(
committer, assignee_id, new_role, collection_id,
constants.ACTIVITY_TYPE_COLLECTION)
if new_role in [ROLE_OWNER, ROLE_EDITOR]:
subscription_services.subscribe_to_collection(
assignee_id, collection_id)
def release_ownership_of_collection(committer, collection_id):
"""Releases ownership of the given collection to the community.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
collection_id: str. ID of the collection.
Raises:
Exception. This could potentially throw an exception from
_release_ownership_of_activity.
"""
_release_ownership_of_activity(
committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION)
def publish_collection(committer, collection_id):
"""Publishes the given collection.
It is the responsibility of the caller to check that the collection is
valid prior to publication.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
collection_id: str. ID of the collection.
Raises:
Exception. This could potentially throw an exception from
_publish_activity.
"""
_publish_activity(
committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION)
def unpublish_collection(committer, collection_id):
"""Unpublishes the given collection.
Args:
committer: UserActionsInfo. UserActionsInfo object for the committer.
collection_id: str. ID of the collection.
Raises:
Exception. This could potentially throw an exception from
_unpublish_activity.
"""
_unpublish_activity(
committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION)
| [((1001, 1047), 'core.platform.models.Registry.import_current_user_services', 'models.Registry.import_current_user_services', ([], {}), '()\n', (1045, 1047), False, 'from core.platform import models\n'), ((1083, 1170), 'core.platform.models.Registry.import_models', 'models.Registry.import_models', (['[models.NAMES.collection, models.NAMES.exploration]'], {}), '([models.NAMES.collection, models.NAMES.\n exploration])\n', (1112, 1170), False, 'from core.platform import models\n'), ((11527, 11592), 'core.domain.exp_services.update_exploration_summary', 'exp_services.update_exploration_summary', (['activity_rights.id', 'None'], {}), '(activity_rights.id, None)\n', (11566, 11592), False, 'from core.domain import exp_services\n'), ((12000, 12071), 'core.domain.collection_services.update_collection_summary', 'collection_services.update_collection_summary', (['activity_rights.id', 'None'], {}), '(activity_rights.id, None)\n', (12045, 12071), False, 'from core.domain import collection_services\n'), ((14976, 15052), 'core.domain.subscription_services.subscribe_to_exploration', 'subscription_services.subscribe_to_exploration', (['committer_id', 'exploration_id'], {}), '(committer_id, exploration_id)\n', (15022, 15052), False, 'from core.domain import subscription_services\n'), ((18704, 18778), 'core.domain.subscription_services.subscribe_to_collection', 'subscription_services.subscribe_to_collection', (['committer_id', 'collection_id'], {}), '(committer_id, collection_id)\n', (18749, 18778), False, 'from core.domain import subscription_services\n'), ((19860, 19930), 'core.domain.user_services.get_human_readable_user_ids', 'user_services.get_human_readable_user_ids', (['collection_rights.owner_ids'], {}), '(collection_rights.owner_ids)\n', (19901, 19930), False, 'from core.domain import user_services\n'), ((30286, 30325), 'core.domain.user_services.get_username', 'user_services.get_username', (['assignee_id'], {}), '(assignee_id)\n', (30312, 30325), False, 'from core.domain import user_services\n'), ((38629, 38699), 'core.domain.activity_services.remove_featured_activity', 'activity_services.remove_featured_activity', (['activity_type', 'activity_id'], {}), '(activity_type, activity_id)\n', (38671, 38699), False, 'from core.domain import activity_services\n'), ((29964, 30131), 'logging.error', 'logging.error', (["('User %s tried to allow user %s to be a(n) %s of activity %s but was refused permission.'\n % (committer_id, assignee_id, new_role, activity_id))"], {}), "(\n 'User %s tried to allow user %s to be a(n) %s of activity %s but was refused permission.'\n % (committer_id, assignee_id, new_role, activity_id))\n", (29977, 30131), False, 'import logging\n'), ((34262, 34404), 'logging.error', 'logging.error', (["('User %s tried to release ownership of %s %s but was refused permission.' %\n (committer_id, activity_type, activity_id))"], {}), "(\n 'User %s tried to release ownership of %s %s but was refused permission.' %\n (committer_id, activity_type, activity_id))\n", (34275, 34404), False, 'import logging\n'), ((37183, 37307), 'logging.error', 'logging.error', (["('User %s tried to publish %s %s but was refused permission.' % (\n committer_id, activity_type, activity_id))"], {}), "('User %s tried to publish %s %s but was refused permission.' %\n (committer_id, activity_type, activity_id))\n", (37196, 37307), False, 'import logging\n'), ((38251, 38383), 'logging.error', 'logging.error', (["('User %s tried to unpublish %s %s but was refused permission.' % (\n committer_id, activity_type, activity_id))"], {}), "(\n 'User %s tried to unpublish %s %s but was refused permission.' % (\n committer_id, activity_type, activity_id))\n", (38264, 38383), False, 'import logging\n'), ((39689, 39764), 'core.domain.subscription_services.subscribe_to_exploration', 'subscription_services.subscribe_to_exploration', (['assignee_id', 'exploration_id'], {}), '(assignee_id, exploration_id)\n', (39735, 39764), False, 'from core.domain import subscription_services\n'), ((41321, 41470), 'logging.error', 'logging.error', (["('User %s tried to change private viewability of exploration %s but was refused permission.'\n % (committer_id, exploration_id))"], {}), "(\n 'User %s tried to change private viewability of exploration %s but was refused permission.'\n % (committer_id, exploration_id))\n", (41334, 41470), False, 'import logging\n'), ((44476, 44549), 'core.domain.subscription_services.subscribe_to_collection', 'subscription_services.subscribe_to_collection', (['assignee_id', 'collection_id'], {}), '(assignee_id, collection_id)\n', (44521, 44549), False, 'from core.domain import subscription_services\n'), ((3607, 3679), 'utils.ValidationError', 'utils.ValidationError', (['"""Community-owned explorations cannot be private."""'], {}), "('Community-owned explorations cannot be private.')\n", (3628, 3679), False, 'import utils\n'), ((3787, 3865), 'utils.ValidationError', 'utils.ValidationError', (['"""Public explorations should have no viewers specified."""'], {}), "('Public explorations should have no viewers specified.')\n", (3808, 3865), False, 'import utils\n'), ((4460, 4552), 'utils.ValidationError', 'utils.ValidationError', (["('A user cannot be both an owner and an editor: %s' % owner_editor)"], {}), "('A user cannot be both an owner and an editor: %s' %\n owner_editor)\n", (4481, 4552), False, 'import utils\n'), ((4629, 4728), 'utils.ValidationError', 'utils.ValidationError', (["('A user cannot be both an owner and a translator: %s' % owner_translator)"], {}), "('A user cannot be both an owner and a translator: %s' %\n owner_translator)\n", (4650, 4728), False, 'import utils\n'), ((4801, 4892), 'utils.ValidationError', 'utils.ValidationError', (["('A user cannot be both an owner and a viewer: %s' % owner_viewer)"], {}), "('A user cannot be both an owner and a viewer: %s' %\n owner_viewer)\n", (4822, 4892), False, 'import utils\n'), ((4970, 5072), 'utils.ValidationError', 'utils.ValidationError', (["('A user cannot be both an editor and a translator: %s' % editor_translator)"], {}), "(\n 'A user cannot be both an editor and a translator: %s' % editor_translator)\n", (4991, 5072), False, 'import utils\n'), ((5145, 5238), 'utils.ValidationError', 'utils.ValidationError', (["('A user cannot be both an editor and a viewer: %s' % editor_viewer)"], {}), "('A user cannot be both an editor and a viewer: %s' %\n editor_viewer)\n", (5166, 5238), False, 'import utils\n'), ((5316, 5416), 'utils.ValidationError', 'utils.ValidationError', (["('A user cannot be both a translator and a viewer: %s' % translator_viewer)"], {}), "('A user cannot be both a translator and a viewer: %s' %\n translator_viewer)\n", (5337, 5416), False, 'import utils\n'), ((36270, 36307), 'utils.get_current_time_in_millisecs', 'utils.get_current_time_in_millisecs', ([], {}), '()\n', (36305, 36307), False, 'import utils\n'), ((3348, 3477), 'utils.ValidationError', 'utils.ValidationError', (['"""Community-owned explorations should have no owners, editors, translators or viewers specified."""'], {}), "(\n 'Community-owned explorations should have no owners, editors, translators or viewers specified.'\n )\n", (3369, 3477), False, 'import utils\n'), ((6266, 6323), 'core.domain.user_services.get_human_readable_user_ids', 'user_services.get_human_readable_user_ids', (['self.owner_ids'], {}), '(self.owner_ids)\n', (6307, 6323), False, 'from core.domain import user_services\n'), ((6378, 6436), 'core.domain.user_services.get_human_readable_user_ids', 'user_services.get_human_readable_user_ids', (['self.editor_ids'], {}), '(self.editor_ids)\n', (6419, 6436), False, 'from core.domain import user_services\n'), ((6495, 6557), 'core.domain.user_services.get_human_readable_user_ids', 'user_services.get_human_readable_user_ids', (['self.translator_ids'], {}), '(self.translator_ids)\n', (6536, 6557), False, 'from core.domain import user_services\n'), ((6612, 6670), 'core.domain.user_services.get_human_readable_user_ids', 'user_services.get_human_readable_user_ids', (['self.viewer_ids'], {}), '(self.viewer_ids)\n', (6653, 6670), False, 'from core.domain import user_services\n')] |
GDGSNF/setuptools | pkg_resources/_vendor/packaging/_typing.py | 3a209029fd2217c039593cd1f6cb378a28527a59 | """For neatly implementing static typing in packaging.
`mypy` - the static type analysis tool we use - uses the `typing` module, which
provides core functionality fundamental to mypy's functioning.
Generally, `typing` would be imported at runtime and used in that fashion -
it acts as a no-op at runtime and does not have any run-time overhead by
design.
As it turns out, `typing` is not vendorable - it uses separate sources for
Python 2/Python 3. Thus, this codebase can not expect it to be present.
To work around this, mypy allows the typing import to be behind a False-y
optional to prevent it from running at runtime and type-comments can be used
to remove the need for the types to be accessible directly during runtime.
This module provides the False-y guard in a nicely named fashion so that a
curious maintainer can reach here to read this.
In packaging, all static-typing related imports should be guarded as follows:
from packaging._typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import ...
Ref: https://github.com/python/mypy/issues/3216
"""
__all__ = ["TYPE_CHECKING", "cast"]
# The TYPE_CHECKING constant defined by the typing module is False at runtime
# but True while type checking.
TYPE_CHECKING = False # pragma: no cover
# typing's cast syntax requires calling typing.cast at runtime, but we don't
# want to import typing at runtime. Here, we inform the type checkers that
# we're importing `typing.cast` as `cast` and re-implement typing.cast's
# runtime behavior in a block that is ignored by type checkers.
if TYPE_CHECKING: # pragma: no cover
# not executed at runtime
from typing import cast
else:
# executed at runtime
def cast(type_, value): # noqa
return value
| [] |
aolin480/openpilot | selfdrive/car/toyota/carcontroller.py | 9ac00c3e5e111a05a0bb10018ccd190571dfff4d | from cereal import car
from common.numpy_fast import clip, interp
from selfdrive.car import apply_toyota_steer_torque_limits, create_gas_interceptor_command, make_can_msg
from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, \
create_accel_command, create_acc_cancel_command, \
create_fcw_command, create_lta_steer_command
from selfdrive.car.toyota.values import CAR, STATIC_DSU_MSGS, NO_STOP_TIMER_CAR, TSS2_CAR, \
MIN_ACC_SPEED, PEDAL_TRANSITION, CarControllerParams
from opendbc.can.packer import CANPacker
from common.op_params import opParams
VisualAlert = car.CarControl.HUDControl.VisualAlert
class CarController():
def __init__(self, dbc_name, CP, VM):
self.last_steer = 0
self.alert_active = False
self.last_standstill = False
self.standstill_req = False
self.steer_rate_limited = False
self.standstill_hack = opParams().get('standstill_hack')
self.packer = CANPacker(dbc_name)
self.gas = 0
self.accel = 0
def update(self, enabled, active, CS, frame, actuators, pcm_cancel_cmd, hud_alert,
left_line, right_line, lead, left_lane_depart, right_lane_depart):
# gas and brake
if CS.CP.enableGasInterceptor and enabled:
MAX_INTERCEPTOR_GAS = 0.5
# RAV4 has very sensitive gas pedal
if CS.CP.carFingerprint in [CAR.RAV4, CAR.RAV4H, CAR.HIGHLANDER, CAR.HIGHLANDERH]:
PEDAL_SCALE = interp(CS.out.vEgo, [0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION], [0.15, 0.3, 0.0])
elif CS.CP.carFingerprint in [CAR.COROLLA]:
PEDAL_SCALE = interp(CS.out.vEgo, [0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION], [0.3, 0.4, 0.0])
else:
PEDAL_SCALE = interp(CS.out.vEgo, [0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION], [0.4, 0.5, 0.0])
# offset for creep and windbrake
pedal_offset = interp(CS.out.vEgo, [0.0, 2.3, MIN_ACC_SPEED + PEDAL_TRANSITION], [-.4, 0.0, 0.2])
pedal_command = PEDAL_SCALE * (actuators.accel + pedal_offset)
interceptor_gas_cmd = clip(pedal_command, 0., MAX_INTERCEPTOR_GAS)
else:
interceptor_gas_cmd = 0.
pcm_accel_cmd = clip(actuators.accel, CarControllerParams.ACCEL_MIN, CarControllerParams.ACCEL_MAX)
# steer torque
new_steer = int(round(actuators.steer * CarControllerParams.STEER_MAX))
apply_steer = apply_toyota_steer_torque_limits(new_steer, self.last_steer, CS.out.steeringTorqueEps, CarControllerParams)
self.steer_rate_limited = new_steer != apply_steer
# Cut steering while we're in a known fault state (2s)
if not enabled or CS.steer_state in [9, 25] or abs(CS.out.steeringRateDeg) > 100:
apply_steer = 0
apply_steer_req = 0
else:
apply_steer_req = 1
# TODO: probably can delete this. CS.pcm_acc_status uses a different signal
# than CS.cruiseState.enabled. confirm they're not meaningfully different
if not enabled and CS.pcm_acc_status:
pcm_cancel_cmd = 1
# on entering standstill, send standstill request
if CS.out.standstill and not self.last_standstill and CS.CP.carFingerprint not in NO_STOP_TIMER_CAR and not self.standstill_hack:
self.standstill_req = True
if CS.pcm_acc_status != 8:
# pcm entered standstill or it's disabled
self.standstill_req = False
self.last_steer = apply_steer
self.last_standstill = CS.out.standstill
can_sends = []
#*** control msgs ***
#print("steer {0} {1} {2} {3}".format(apply_steer, min_lim, max_lim, CS.steer_torque_motor)
# toyota can trace shows this message at 42Hz, with counter adding alternatively 1 and 2;
# sending it at 100Hz seem to allow a higher rate limit, as the rate limit seems imposed
# on consecutive messages
can_sends.append(create_steer_command(self.packer, apply_steer, apply_steer_req, frame))
if frame % 2 == 0 and CS.CP.carFingerprint in TSS2_CAR:
can_sends.append(create_lta_steer_command(self.packer, 0, 0, frame // 2))
# LTA mode. Set ret.steerControlType = car.CarParams.SteerControlType.angle and whitelist 0x191 in the panda
# if frame % 2 == 0:
# can_sends.append(create_steer_command(self.packer, 0, 0, frame // 2))
# can_sends.append(create_lta_steer_command(self.packer, actuators.steeringAngleDeg, apply_steer_req, frame // 2))
# we can spam can to cancel the system even if we are using lat only control
if (frame % 3 == 0 and CS.CP.openpilotLongitudinalControl) or pcm_cancel_cmd:
lead = lead or CS.out.vEgo < 12. # at low speed we always assume the lead is present so ACC can be engaged
# Lexus IS uses a different cancellation message
if pcm_cancel_cmd and CS.CP.carFingerprint in [CAR.LEXUS_IS, CAR.LEXUS_RC]:
can_sends.append(create_acc_cancel_command(self.packer))
elif CS.CP.openpilotLongitudinalControl:
can_sends.append(create_accel_command(self.packer, pcm_accel_cmd, pcm_cancel_cmd, self.standstill_req, lead, CS.acc_type, CS.distance_btn))
self.accel = pcm_accel_cmd
else:
can_sends.append(create_accel_command(self.packer, 0, pcm_cancel_cmd, False, lead, CS.acc_type, CS.distance_btn))
if frame % 2 == 0 and CS.CP.enableGasInterceptor and CS.CP.openpilotLongitudinalControl:
# send exactly zero if gas cmd is zero. Interceptor will send the max between read value and gas cmd.
# This prevents unexpected pedal range rescaling
can_sends.append(create_gas_interceptor_command(self.packer, interceptor_gas_cmd, frame // 2))
self.gas = interceptor_gas_cmd
# ui mesg is at 100Hz but we send asap if:
# - there is something to display
# - there is something to stop displaying
fcw_alert = hud_alert == VisualAlert.fcw
steer_alert = hud_alert in [VisualAlert.steerRequired, VisualAlert.ldw]
send_ui = False
if ((fcw_alert or steer_alert) and not self.alert_active) or \
(not (fcw_alert or steer_alert) and self.alert_active):
send_ui = True
self.alert_active = not self.alert_active
elif pcm_cancel_cmd:
# forcing the pcm to disengage causes a bad fault sound so play a good sound instead
send_ui = True
if (frame % 100 == 0 or send_ui):
can_sends.append(create_ui_command(self.packer, steer_alert, pcm_cancel_cmd, left_line, right_line, left_lane_depart, right_lane_depart, enabled))
if frame % 100 == 0 and CS.CP.enableDsu:
can_sends.append(create_fcw_command(self.packer, fcw_alert))
# *** static msgs ***
for (addr, cars, bus, fr_step, vl) in STATIC_DSU_MSGS:
if frame % fr_step == 0 and CS.CP.enableDsu and CS.CP.carFingerprint in cars:
can_sends.append(make_can_msg(addr, vl, bus))
new_actuators = actuators.copy()
new_actuators.steer = apply_steer / CarControllerParams.STEER_MAX
new_actuators.accel = self.accel
new_actuators.gas = self.gas
return new_actuators, can_sends
| [((1057, 1076), 'opendbc.can.packer.CANPacker', 'CANPacker', (['dbc_name'], {}), '(dbc_name)\n', (1066, 1076), False, 'from opendbc.can.packer import CANPacker\n'), ((2264, 2352), 'common.numpy_fast.clip', 'clip', (['actuators.accel', 'CarControllerParams.ACCEL_MIN', 'CarControllerParams.ACCEL_MAX'], {}), '(actuators.accel, CarControllerParams.ACCEL_MIN, CarControllerParams.\n ACCEL_MAX)\n', (2268, 2352), False, 'from common.numpy_fast import clip, interp\n'), ((2462, 2574), 'selfdrive.car.apply_toyota_steer_torque_limits', 'apply_toyota_steer_torque_limits', (['new_steer', 'self.last_steer', 'CS.out.steeringTorqueEps', 'CarControllerParams'], {}), '(new_steer, self.last_steer, CS.out.\n steeringTorqueEps, CarControllerParams)\n', (2494, 2574), False, 'from selfdrive.car import apply_toyota_steer_torque_limits, create_gas_interceptor_command, make_can_msg\n'), ((1978, 2066), 'common.numpy_fast.interp', 'interp', (['CS.out.vEgo', '[0.0, 2.3, MIN_ACC_SPEED + PEDAL_TRANSITION]', '[-0.4, 0.0, 0.2]'], {}), '(CS.out.vEgo, [0.0, 2.3, MIN_ACC_SPEED + PEDAL_TRANSITION], [-0.4, \n 0.0, 0.2])\n', (1984, 2066), False, 'from common.numpy_fast import clip, interp\n'), ((2158, 2203), 'common.numpy_fast.clip', 'clip', (['pedal_command', '(0.0)', 'MAX_INTERCEPTOR_GAS'], {}), '(pedal_command, 0.0, MAX_INTERCEPTOR_GAS)\n', (2162, 2203), False, 'from common.numpy_fast import clip, interp\n'), ((3878, 3948), 'selfdrive.car.toyota.toyotacan.create_steer_command', 'create_steer_command', (['self.packer', 'apply_steer', 'apply_steer_req', 'frame'], {}), '(self.packer, apply_steer, apply_steer_req, frame)\n', (3898, 3948), False, 'from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, create_accel_command, create_acc_cancel_command, create_fcw_command, create_lta_steer_command\n'), ((1004, 1014), 'common.op_params.opParams', 'opParams', ([], {}), '()\n', (1012, 1014), False, 'from common.op_params import opParams\n'), ((1532, 1629), 'common.numpy_fast.interp', 'interp', (['CS.out.vEgo', '[0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION]', '[0.15, 0.3, 0.0]'], {}), '(CS.out.vEgo, [0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION],\n [0.15, 0.3, 0.0])\n', (1538, 1629), False, 'from common.numpy_fast import clip, interp\n'), ((4033, 4088), 'selfdrive.car.toyota.toyotacan.create_lta_steer_command', 'create_lta_steer_command', (['self.packer', '(0)', '(0)', '(frame // 2)'], {}), '(self.packer, 0, 0, frame // 2)\n', (4057, 4088), False, 'from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, create_accel_command, create_acc_cancel_command, create_fcw_command, create_lta_steer_command\n'), ((5555, 5631), 'selfdrive.car.create_gas_interceptor_command', 'create_gas_interceptor_command', (['self.packer', 'interceptor_gas_cmd', '(frame // 2)'], {}), '(self.packer, interceptor_gas_cmd, frame // 2)\n', (5585, 5631), False, 'from selfdrive.car import apply_toyota_steer_torque_limits, create_gas_interceptor_command, make_can_msg\n'), ((6342, 6474), 'selfdrive.car.toyota.toyotacan.create_ui_command', 'create_ui_command', (['self.packer', 'steer_alert', 'pcm_cancel_cmd', 'left_line', 'right_line', 'left_lane_depart', 'right_lane_depart', 'enabled'], {}), '(self.packer, steer_alert, pcm_cancel_cmd, left_line,\n right_line, left_lane_depart, right_lane_depart, enabled)\n', (6359, 6474), False, 'from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, create_accel_command, create_acc_cancel_command, create_fcw_command, create_lta_steer_command\n'), ((6541, 6583), 'selfdrive.car.toyota.toyotacan.create_fcw_command', 'create_fcw_command', (['self.packer', 'fcw_alert'], {}), '(self.packer, fcw_alert)\n', (6559, 6583), False, 'from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, create_accel_command, create_acc_cancel_command, create_fcw_command, create_lta_steer_command\n'), ((1698, 1794), 'common.numpy_fast.interp', 'interp', (['CS.out.vEgo', '[0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION]', '[0.3, 0.4, 0.0]'], {}), '(CS.out.vEgo, [0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION],\n [0.3, 0.4, 0.0])\n', (1704, 1794), False, 'from common.numpy_fast import clip, interp\n'), ((1825, 1921), 'common.numpy_fast.interp', 'interp', (['CS.out.vEgo', '[0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION]', '[0.4, 0.5, 0.0]'], {}), '(CS.out.vEgo, [0.0, MIN_ACC_SPEED, MIN_ACC_SPEED + PEDAL_TRANSITION],\n [0.4, 0.5, 0.0])\n', (1831, 1921), False, 'from common.numpy_fast import clip, interp\n'), ((4871, 4909), 'selfdrive.car.toyota.toyotacan.create_acc_cancel_command', 'create_acc_cancel_command', (['self.packer'], {}), '(self.packer)\n', (4896, 4909), False, 'from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, create_accel_command, create_acc_cancel_command, create_fcw_command, create_lta_steer_command\n'), ((6780, 6807), 'selfdrive.car.make_can_msg', 'make_can_msg', (['addr', 'vl', 'bus'], {}), '(addr, vl, bus)\n', (6792, 6807), False, 'from selfdrive.car import apply_toyota_steer_torque_limits, create_gas_interceptor_command, make_can_msg\n'), ((4983, 5109), 'selfdrive.car.toyota.toyotacan.create_accel_command', 'create_accel_command', (['self.packer', 'pcm_accel_cmd', 'pcm_cancel_cmd', 'self.standstill_req', 'lead', 'CS.acc_type', 'CS.distance_btn'], {}), '(self.packer, pcm_accel_cmd, pcm_cancel_cmd, self.\n standstill_req, lead, CS.acc_type, CS.distance_btn)\n', (5003, 5109), False, 'from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, create_accel_command, create_acc_cancel_command, create_fcw_command, create_lta_steer_command\n'), ((5178, 5278), 'selfdrive.car.toyota.toyotacan.create_accel_command', 'create_accel_command', (['self.packer', '(0)', 'pcm_cancel_cmd', '(False)', 'lead', 'CS.acc_type', 'CS.distance_btn'], {}), '(self.packer, 0, pcm_cancel_cmd, False, lead, CS.\n acc_type, CS.distance_btn)\n', (5198, 5278), False, 'from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, create_accel_command, create_acc_cancel_command, create_fcw_command, create_lta_steer_command\n')] |
Subsets and Splits