repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
ytyaru0/Python.TemplateFileMaker.20180314204216 | src/backup/template/PositionalArgumentTemplate.py | 4849f982acea5d86b711c5dec4cc046016ab1031 | from string import Template
import re
class PositionalArgumentTemplate(Template):
# (?i): 大文字小文字を区別しないモードを開始する
# (?-i): 大文字小文字を区別しないモードを無効にする
idpattern_default = Template.idpattern # (?-i:[_a-zA-Z][_a-zA-Z0-9]*)
idpattern = '([0-9]+)'
def find_place_holders(self, template:str):
#for m in re.findall(self.pattern, template):
#for m in re.finditer(self.pattern, template):
for m in self.pattern.finditer(template):
print(m, type(m))
#print(dir(m))
#print(len(m.groups()))
print(m[0])
#print(m.groups())
#print(m, m.groups(), m.group('named'), type(m))
#print(m.group('escaped'))
#print(m.group('named'))
#print(m.group('braced'))
#print(m.group('invalid'))
if __name__ == '__main__':
template_str = '${0} is Aug.'
t = PositionalArgumentTemplate(template_str)
print(template_str)
print(dir(t))
print(t.delimiter)
print(t.idpattern)
print(type(t.idpattern))
print(t.flags)
print(t.pattern)
print(t.substitute(**{'0':'V'}))
t.find_place_holders(template_str)
| [] |
kdhaigud/easycla | cla-backend/cla/tests/unit/test_company.py | f913f8dbf658acf4711b601f9312ca5663a4efe8 | # Copyright The Linux Foundation and each contributor to CommunityBridge.
# SPDX-License-Identifier: MIT
import json
import os
import requests
import uuid
import hug
import pytest
from falcon import HTTP_200, HTTP_409
import cla
from cla import routes
ID_TOKEN = os.environ.get('ID_TOKEN')
API_URL = os.environ.get('API_URL')
def test_create_company_duplicate():
"""
Test creating duplicate company names
"""
import pdb;pdb.set_trace()
url = f'{API_URL}/v1/company'
company_name = 'test_company_name'
data = {
'company_id' : uuid.uuid4() ,
'company_name' : company_name ,
}
headers = {
'Authorization' : f'Bearer {ID_TOKEN}'
}
response = requests.post(url, data=data, headers=headers)
assert response.status == HTTP_200
# add duplicate company
data = {
'company_id' : uuid.uuid4(),
'company_name' : company_name
}
req = hug.test.post(routes, url, data=data, headers=headers)
assert req.status == HTTP_409
| [((267, 293), 'os.environ.get', 'os.environ.get', (['"""ID_TOKEN"""'], {}), "('ID_TOKEN')\n", (281, 293), False, 'import os\n'), ((304, 329), 'os.environ.get', 'os.environ.get', (['"""API_URL"""'], {}), "('API_URL')\n", (318, 329), False, 'import os\n'), ((442, 457), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (455, 457), False, 'import pdb\n'), ((712, 758), 'requests.post', 'requests.post', (['url'], {'data': 'data', 'headers': 'headers'}), '(url, data=data, headers=headers)\n', (725, 758), False, 'import requests\n'), ((931, 985), 'hug.test.post', 'hug.test.post', (['routes', 'url'], {'data': 'data', 'headers': 'headers'}), '(routes, url, data=data, headers=headers)\n', (944, 985), False, 'import hug\n'), ((567, 579), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (577, 579), False, 'import uuid\n'), ((863, 875), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (873, 875), False, 'import uuid\n')] |
mathematicalmichael/SpringNodes | py/WatchDialog.py | 3ff4034b6e57ee6efa55c963e1819f3d30a2c4ab | # Copyright(c) 2017, Dimitar Venkov
# @5devene, [email protected]
# www.badmonkeys.net
import clr
clr.AddReference('System.Windows.Forms')
clr.AddReference('System.Drawing')
from System.Drawing import Point, Color, Font
from System.Windows.Forms import *
from cStringIO import StringIO
str_file = StringIO()
size1 = [30, 23] #height, width
def tolist(obj1):
if hasattr(obj1,"__iter__"): return obj1
else: return [obj1]
def write_str(str1, GCL, str_file=str_file, size1=size1):
ln1 = len(str1)
if ln1 > size1[1]:
size1[1] = ln1
str_file.write("%s%s\n" % ("".join(GCL), str1) )
def list2str(l1, writeInd, GCL=None, GCint=-1, size1=size1):
if GCL is None:
GCL = []
GCint += 1
GCL.append(None)
for i, x in enumerate(l1):
GCL[GCint] = "[%i] " % i if writeInd else " "
if hasattr(x, "Id"): #is element
write_str("%s %i" % (x.ToString(), x.Id), GCL)
elif hasattr(x, "__iter__"):
if not x:
write_str("Empty List", GCL)
else:
list2str(x, writeInd, GCL, GCint, size1)
elif x is None:
write_str("null", GCL)
else:
write_str(x.ToString(), GCL)
size1[0] += 19
GCL.pop(GCint)
GCint -= 1
class WatchBox(Form):
def __init__(self, t1):
self.Text = "SpringNodes: Expandable Watch Window"
self.BackColor = Color.FromArgb(40,40,40)
self.ControlBox = False
self.TopMost = True
self.FormBorderStyle = FormBorderStyle.Sizable
self.StartPosition = FormStartPosition.CenterScreen
self.Resize += self.resize1
self.text1 = None
self.button1 = Button()
self.button1.Text = 'Close'
self.button1.Font = Font("Calibri", 10)
self.button1.AutoSize = True
self.button1.Width = 200
self.button1.ForeColor = Color.FromArgb(234,234,234)
self.button1.Click += self.save
self.Controls.Add(self.button1)
self.box1 = RichTextBox()
self.box1.Multiline = True
self.box1.Location = Point(5, 5)
self.box1.Font = Font("Calibri", 12)
self.box1.BackColor = Color.FromArgb(53,53,53)
self.box1.ForeColor = Color.FromArgb(234,234,234)
self.box1.DetectUrls = True
self.box1.Text = t1
self.Controls.Add(self.box1)
def adjust_controls(self, height1, width1):
if height1 > 800:
height1 = 800
self.box1.ScrollBars = RichTextBoxScrollBars.Vertical
if width1 < 23 : width1 = 23
if width1 > 88: width1 = 88
self.Width = 10 + (width1 + 2) * 9 #character width seems to vary between PCs
self.Height = height1 + 90
self.box1.Width = self.Width - 17
self.box1.Height = self.Height - 80
self.button1.Location = Point(self.Width/2 - 103, self.Height - 70)
def resize1(self, sender, event):
if self.Width < 210: self.Width = 230
if self.Height < 120: self.Height = 120
self.box1.Width = self.Width - 17
self.box1.Height = self.Height - 80
self.button1.Location = Point(self.Width/2 - 103, self.Height - 70)
def save(self, sender, event):
self.text1 = self.box1.Text
self.Close()
l1 = [] if IN[0] is None else tolist(IN[0])
list2str(l1, IN[1])
str_content = str_file.getvalue()
str_file.close()
width1 = 100
form = WatchBox(str_content)
form.adjust_controls(*size1)
Application.Run(form)
OUT = form.text1
Application.Exit()
form.Dispose() | [((104, 144), 'clr.AddReference', 'clr.AddReference', (['"""System.Windows.Forms"""'], {}), "('System.Windows.Forms')\n", (120, 144), False, 'import clr\n'), ((145, 179), 'clr.AddReference', 'clr.AddReference', (['"""System.Drawing"""'], {}), "('System.Drawing')\n", (161, 179), False, 'import clr\n'), ((304, 314), 'cStringIO.StringIO', 'StringIO', ([], {}), '()\n', (312, 314), False, 'from cStringIO import StringIO\n'), ((1263, 1289), 'System.Drawing.Color.FromArgb', 'Color.FromArgb', (['(40)', '(40)', '(40)'], {}), '(40, 40, 40)\n', (1277, 1289), False, 'from System.Drawing import Point, Color, Font\n'), ((1568, 1587), 'System.Drawing.Font', 'Font', (['"""Calibri"""', '(10)'], {}), "('Calibri', 10)\n", (1572, 1587), False, 'from System.Drawing import Point, Color, Font\n'), ((1673, 1702), 'System.Drawing.Color.FromArgb', 'Color.FromArgb', (['(234)', '(234)', '(234)'], {}), '(234, 234, 234)\n', (1687, 1702), False, 'from System.Drawing import Point, Color, Font\n'), ((1852, 1863), 'System.Drawing.Point', 'Point', (['(5)', '(5)'], {}), '(5, 5)\n', (1857, 1863), False, 'from System.Drawing import Point, Color, Font\n'), ((1883, 1902), 'System.Drawing.Font', 'Font', (['"""Calibri"""', '(12)'], {}), "('Calibri', 12)\n", (1887, 1902), False, 'from System.Drawing import Point, Color, Font\n'), ((1927, 1953), 'System.Drawing.Color.FromArgb', 'Color.FromArgb', (['(53)', '(53)', '(53)'], {}), '(53, 53, 53)\n', (1941, 1953), False, 'from System.Drawing import Point, Color, Font\n'), ((1976, 2005), 'System.Drawing.Color.FromArgb', 'Color.FromArgb', (['(234)', '(234)', '(234)'], {}), '(234, 234, 234)\n', (1990, 2005), False, 'from System.Drawing import Point, Color, Font\n'), ((2497, 2542), 'System.Drawing.Point', 'Point', (['(self.Width / 2 - 103)', '(self.Height - 70)'], {}), '(self.Width / 2 - 103, self.Height - 70)\n', (2502, 2542), False, 'from System.Drawing import Point, Color, Font\n'), ((2760, 2805), 'System.Drawing.Point', 'Point', (['(self.Width / 2 - 103)', '(self.Height - 70)'], {}), '(self.Width / 2 - 103, self.Height - 70)\n', (2765, 2805), False, 'from System.Drawing import Point, Color, Font\n')] |
mvj3/leetcode | 292-nim-game.py | 3111199beeaefbb3a74173e783ed21c9e53ab203 | """
Question:
Nim Game My Submissions Question
You are playing the following Nim Game with your friend: There is a heap of stones on the table, each time one of you take turns to remove 1 to 3 stones. The one who removes the last stone will be the winner. You will take the first turn to remove the stones.
Both of you are very clever and have optimal strategies for the game. Write a function to determine whether you can win the game given the number of stones in the heap.
For example, if there are 4 stones in the heap, then you will never win the game: no matter 1, 2, or 3 stones you remove, the last stone will always be removed by your friend.
Hint:
If there are 5 stones in the heap, could you figure out a way to remove the stones such that you will always be the winner?
Credits:
Special thanks to @jianchao.li.fighter for adding this problem and creating all test cases.
Performance:
1. Total Accepted: 31755 Total Submissions: 63076 Difficulty: Easy
2. Your runtime beats 43.52% of python submissions.
"""
class Solution(object):
def canWinNim(self, n):
"""
:type n: int
:rtype: bool
"""
if n <= 3:
return True
if n % 4 == 0:
return False
else:
return True
assert Solution().canWinNim(0) is True
assert Solution().canWinNim(1) is True
assert Solution().canWinNim(2) is True
assert Solution().canWinNim(3) is True
assert Solution().canWinNim(4) is False
assert Solution().canWinNim(5) is True
assert Solution().canWinNim(6) is True
assert Solution().canWinNim(7) is True
assert Solution().canWinNim(8) is False
| [] |
lldelisle/bx-python | script_tests/maf_extract_ranges_indexed_tests.py | 19ab41e0905221e3fcaaed4b74faf2d7cda0d15a | import unittest
import base
class Test(base.BaseScriptTest, unittest.TestCase):
command_line = "./scripts/maf_extract_ranges_indexed.py ./test_data/maf_tests/mm8_chr7_tiny.maf -c -m 5 -p mm8."
input_stdin = base.TestFile(filename="./test_data/maf_tests/dcking_ghp074.bed")
output_stdout = base.TestFile(filename="./test_data/maf_tests/dcking_ghp074.maf")
| [((218, 283), 'base.TestFile', 'base.TestFile', ([], {'filename': '"""./test_data/maf_tests/dcking_ghp074.bed"""'}), "(filename='./test_data/maf_tests/dcking_ghp074.bed')\n", (231, 283), False, 'import base\n'), ((304, 369), 'base.TestFile', 'base.TestFile', ([], {'filename': '"""./test_data/maf_tests/dcking_ghp074.maf"""'}), "(filename='./test_data/maf_tests/dcking_ghp074.maf')\n", (317, 369), False, 'import base\n')] |
elxavicio/QSTK | qstklearn/1knn.py | 4981506c37227a72404229d5e1e0887f797a5d57 | '''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on Feb 20, 2011
@author: John Cornwell
@organization: Georgia Institute of Technology
@contact: [email protected]
@summary: This is an implementation of the 1-KNN algorithm for ranking features quickly.
It uses the knn implementation.
@status: oneKNN functions correctly, optimized to use n^2/2 algorithm.
'''
import matplotlib.pyplot as plt
from pylab import gca
import itertools
import string
import numpy as np
import math
import knn
from time import clock
'''
@summary: Query function for 1KNN, return value is a double between 0 and 1.
@param naData: A 2D numpy array. Each row is a data point with the final column containing the classification.
'''
def oneKnn( naData ):
if naData.ndim != 2:
raise Exception( "Data should have two dimensions" )
lLen = naData.shape[0]
''' # of dimensions, subtract one for classification '''
lDim = naData.shape[1] - 1
''' Start best distances as very large '''
ldDistances = [1E300] * lLen
llIndexes = [-1] * lLen
dDistance = 0.0;
''' Loop through finding closest neighbors '''
for i in range( lLen ):
for j in range( i+1, lLen ):
dDistance = 0.0
for k in range( 0, lDim ):
dDistance += (naData[i][k] - naData[j][k])**2
dDistance = math.sqrt( dDistance )
''' Two distances to check, for i's best, and j's best '''
if dDistance < ldDistances[i]:
ldDistances[i] = dDistance
llIndexes[i] = j
if dDistance < ldDistances[j]:
ldDistances[j] = dDistance
llIndexes[j] = i
lCount = 0
''' Now count # of matching pairs '''
for i in range( lLen ):
if naData[i][-1] == naData[ llIndexes[i] ][-1]:
lCount = lCount + 1
return float(lCount) / lLen
''' Test function to plot results '''
def _plotResults( naDist1, naDist2, lfOneKnn, lf5Knn ):
plt.clf()
plt.subplot(311)
plt.scatter( naDist1[:,0], naDist1[:,1] )
plt.scatter( naDist2[:,0], naDist2[:,1], color='r' )
#plt.ylabel( 'Feature 2' )
#plt.xlabel( 'Feature 1' )
#gca().annotate( '', xy=( .8, 0 ), xytext=( -.3 , 0 ), arrowprops=dict(facecolor='red', shrink=0.05) )
gca().annotate( '', xy=( .7, 0 ), xytext=( 1.5 , 0 ), arrowprops=dict(facecolor='black', shrink=0.05) )
plt.title( 'Data Distribution' )
plt.subplot(312)
plt.plot( range( len(lfOneKnn) ), lfOneKnn )
plt.ylabel( '1-KNN Value' )
#plt.xlabel( 'Distribution Merge' )
plt.title( '1-KNN Performance' )
plt.subplot(313)
plt.plot( range( len(lf5Knn) ), lf5Knn )
plt.ylabel( '% Correct Classification' )
#plt.xlabel( 'Distribution Merge' )
plt.title( '5-KNN Performance' )
plt.subplots_adjust()
plt.show()
''' Function to plot 2 distributions '''
def _plotDist( naDist1, naDist2, i ):
plt.clf()
plt.scatter( naDist1[:,0], naDist1[:,1] )
plt.scatter( naDist2[:,0], naDist2[:,1], color='r' )
plt.ylabel( 'Feature 2' )
plt.xlabel( 'Feature 1' )
plt.title( 'Iteration ' + str(i) )
plt.show()
''' Function to test KNN performance '''
def _knnResult( naData ):
''' Split up data into training/testing '''
lSplit = naData.shape[0] * .7
naTrain = naData[:lSplit, :]
naTest = naData[lSplit:, :]
knn.addEvidence( naTrain.astype(float), 1 );
''' Query with last column omitted and 5 nearest neighbors '''
naResults = knn.query( naTest[:,:-1], 5, 'mode')
''' Count returns which are correct '''
lCount = 0
for i, dVal in enumerate(naResults):
if dVal == naTest[i,-1]:
lCount = lCount + 1
dResult = float(lCount) / naResults.size
return dResult
''' Tests performance of 1-KNN '''
def _test1():
''' Generate three random samples to show the value of 1-KNN compared to 5KNN learner performance '''
for i in range(3):
''' Select one of three distributions '''
if i == 0:
naTest1 = np.random.normal( loc=[0,0],scale=.25,size=[500,2] )
naTest1 = np.hstack( (naTest1, np.zeros(500).reshape(-1,1) ) )
naTest2 = np.random.normal( loc=[1.5,0],scale=.25,size=[500,2] )
naTest2 = np.hstack( (naTest2, np.ones(500).reshape(-1,1) ) )
elif i == 1:
naTest1 = np.random.normal( loc=[0,0],scale=.25,size=[500,2] )
naTest1 = np.hstack( (naTest1, np.zeros(500).reshape(-1,1) ) )
naTest2 = np.random.normal( loc=[1.5,0],scale=.1,size=[500,2] )
naTest2 = np.hstack( (naTest2, np.ones(500).reshape(-1,1) ) )
else:
naTest1 = np.random.normal( loc=[0,0],scale=.25,size=[500,2] )
naTest1 = np.hstack( (naTest1, np.zeros(500).reshape(-1,1) ) )
naTest2 = np.random.normal( loc=[1.5,0],scale=.25,size=[250,2] )
naTest2 = np.hstack( (naTest2, np.ones(250).reshape(-1,1) ) )
naOrig = np.vstack( (naTest1, naTest2) )
naBoth = np.vstack( (naTest1, naTest2) )
''' Keep track of runtimes '''
t = clock()
cOneRuntime = t-t;
cKnnRuntime = t-t;
lfResults = []
lfKnnResults = []
for i in range( 15 ):
#_plotDist( naTest1, naBoth[100:,:], i )
t = clock()
lfResults.append( oneKnn( naBoth ) )
cOneRuntime = cOneRuntime + (clock() - t)
t = clock()
lfKnnResults.append( _knnResult( np.random.permutation(naBoth) ) )
cKnnRuntime = cKnnRuntime + (clock() - t)
naBoth[500:,0] = naBoth[500:,0] - .1
print 'Runtime OneKnn:', cOneRuntime
print 'Runtime 5-KNN:', cKnnRuntime
_plotResults( naTest1, naTest2, lfResults, lfKnnResults )
''' Tests performance of 1-KNN '''
def _test2():
''' Generate three random samples to show the value of 1-KNN compared to 5KNN learner performance '''
np.random.seed( 12345 )
''' Create 5 distributions for each of the 5 attributes '''
dist1 = np.random.uniform( -1, 1, 1000 ).reshape( -1, 1 )
dist2 = np.random.uniform( -1, 1, 1000 ).reshape( -1, 1 )
dist3 = np.random.uniform( -1, 1, 1000 ).reshape( -1, 1 )
dist4 = np.random.uniform( -1, 1, 1000 ).reshape( -1, 1 )
dist5 = np.random.uniform( -1, 1, 1000 ).reshape( -1, 1 )
lDists = [ dist1, dist2, dist3, dist4, dist5 ]
''' All features used except for distribution 4 '''
distY = np.sin( dist1 ) + np.sin( dist2 ) + np.sin( dist3 ) + np.sin( dist5 )
distY = distY.reshape( -1, 1 )
for i, fVal in enumerate( distY ):
if fVal >= 0:
distY[i] = 1
else:
distY[i] = 0
for i in range( 1, 6 ):
lsNames = []
lf1Vals = []
lfVals = []
for perm in itertools.combinations( '12345', i ):
''' set test distribution to first element '''
naTest = lDists[ int(perm[0]) - 1 ]
sPerm = perm[0]
''' stack other distributions on '''
for j in range( 1, len(perm) ):
sPerm = sPerm + str(perm[j])
naTest = np.hstack( (naTest, lDists[ int(perm[j]) - 1 ] ) )
''' finally stack y values '''
naTest = np.hstack( (naTest, distY) )
lf1Vals.append( oneKnn( naTest ) )
lfVals.append( _knnResult( np.random.permutation(naTest) ) )
lsNames.append( sPerm )
''' Plot results '''
plt1 = plt.bar( np.arange(len(lf1Vals)), lf1Vals, .2, color='r' )
plt2 = plt.bar( np.arange(len(lfVals)) + 0.2, lfVals, .2, color='b' )
plt.legend( (plt1[0], plt2[0]), ('1-KNN', 'KNN, K=5') )
plt.ylabel('1-KNN Value/KNN Classification')
plt.xlabel('Feature Set')
plt.title('Combinations of ' + str(i) + ' Features')
plt.ylim( (0,1) )
if len(lf1Vals) < 2:
plt.xlim( (-1,1) )
gca().xaxis.set_ticks( np.arange(len(lf1Vals)) + .2 )
gca().xaxis.set_ticklabels( lsNames )
plt.show()
if __name__ == '__main__':
_test1()
#_test2()
| [] |
robert-anderson/pyscf | pyscf/nao/m_comp_coulomb_pack.py | cdc56e168cb15f47e8cdc791a92d689fa9b655af | # Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division
from pyscf.nao.m_coulomb_am import coulomb_am
import numpy as np
try:
import numba as nb
from pyscf.nao.m_numba_utils import fill_triu_v2, fill_tril
use_numba = True
except:
use_numba = False
#
#
#
def comp_coulomb_pack(sv, ao_log=None, funct=coulomb_am, dtype=np.float64, **kvargs):
"""
Computes the matrix elements given by funct, for instance coulomb interaction
Args:
sv : (System Variables), this must have arrays of coordinates and species, etc
ao_log : description of functions (either orbitals or product basis functions)
Returns:
matrix elements for the whole system in packed form (lower triangular part)
"""
from pyscf.nao.m_ao_matelem import ao_matelem_c
from pyscf.nao.m_pack2den import ij2pack_l
aome = ao_matelem_c(sv.ao_log.rr, sv.ao_log.pp)
me = ao_matelem_c(sv.ao_log) if ao_log is None else aome.init_one_set(ao_log)
atom2s = np.zeros((sv.natm+1), dtype=np.int64)
for atom,sp in enumerate(sv.atom2sp): atom2s[atom+1]=atom2s[atom]+me.ao1.sp2norbs[sp]
norbs = atom2s[-1]
res = np.zeros(norbs*(norbs+1)//2, dtype=dtype)
for atom1,[sp1,rv1,s1,f1] in enumerate(zip(sv.atom2sp,sv.atom2coord,atom2s,atom2s[1:])):
#print("atom1 = {0}, rv1 = {1}".format(atom1, rv1))
for atom2,[sp2,rv2,s2,f2] in enumerate(zip(sv.atom2sp,sv.atom2coord,atom2s,atom2s[1:])):
if atom2>atom1: continue # skip
oo2f = funct(me,sp1,rv1,sp2,rv2, **kvargs)
if use_numba:
fill_triu_v2(oo2f, res, s1, f1, s2, f2, norbs)
else:
for i1 in range(s1,f1):
for i2 in range(s2, min(i1+1, f2)):
res[ij2pack_l(i1,i2,norbs)] = oo2f[i1-s1,i2-s2]
#print("number call = ", count)
#print("sum kernel: {0:.6f}".format(np.sum(abs(res))))
#np.savetxt("kernel_pyscf.txt", res)
#import sys
#sys.exit()
return res, norbs
| [((1437, 1477), 'pyscf.nao.m_ao_matelem.ao_matelem_c', 'ao_matelem_c', (['sv.ao_log.rr', 'sv.ao_log.pp'], {}), '(sv.ao_log.rr, sv.ao_log.pp)\n', (1449, 1477), False, 'from pyscf.nao.m_ao_matelem import ao_matelem_c\n'), ((1569, 1606), 'numpy.zeros', 'np.zeros', (['(sv.natm + 1)'], {'dtype': 'np.int64'}), '(sv.natm + 1, dtype=np.int64)\n', (1577, 1606), True, 'import numpy as np\n'), ((1725, 1772), 'numpy.zeros', 'np.zeros', (['(norbs * (norbs + 1) // 2)'], {'dtype': 'dtype'}), '(norbs * (norbs + 1) // 2, dtype=dtype)\n', (1733, 1772), True, 'import numpy as np\n'), ((1485, 1508), 'pyscf.nao.m_ao_matelem.ao_matelem_c', 'ao_matelem_c', (['sv.ao_log'], {}), '(sv.ao_log)\n', (1497, 1508), False, 'from pyscf.nao.m_ao_matelem import ao_matelem_c\n'), ((2126, 2172), 'pyscf.nao.m_numba_utils.fill_triu_v2', 'fill_triu_v2', (['oo2f', 'res', 's1', 'f1', 's2', 'f2', 'norbs'], {}), '(oo2f, res, s1, f1, s2, f2, norbs)\n', (2138, 2172), False, 'from pyscf.nao.m_numba_utils import fill_triu_v2, fill_tril\n'), ((2285, 2309), 'pyscf.nao.m_pack2den.ij2pack_l', 'ij2pack_l', (['i1', 'i2', 'norbs'], {}), '(i1, i2, norbs)\n', (2294, 2309), False, 'from pyscf.nao.m_pack2den import ij2pack_l\n')] |
panguan737/nova | nova/tests/unit/test_service_auth.py | 0d177185a439baa228b42c948cab4e934d6ac7b8 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneauth1 import loading as ks_loading
from keystoneauth1 import service_token
import mock
import nova.conf
from nova import context
from nova import service_auth
from nova import test
CONF = nova.conf.CONF
class ServiceAuthTestCase(test.NoDBTestCase):
def setUp(self):
super(ServiceAuthTestCase, self).setUp()
self.ctx = context.RequestContext('fake', 'fake')
self.addCleanup(service_auth.reset_globals)
@mock.patch.object(ks_loading, 'load_auth_from_conf_options')
def test_get_auth_plugin_no_wraps(self, mock_load):
context = mock.MagicMock()
context.get_auth_plugin.return_value = "fake"
result = service_auth.get_auth_plugin(context)
self.assertEqual("fake", result)
mock_load.assert_not_called()
@mock.patch.object(ks_loading, 'load_auth_from_conf_options')
def test_get_auth_plugin_wraps(self, mock_load):
self.flags(send_service_user_token=True, group='service_user')
result = service_auth.get_auth_plugin(self.ctx)
self.assertIsInstance(result, service_token.ServiceTokenAuthWrapper)
@mock.patch.object(ks_loading, 'load_auth_from_conf_options',
return_value=None)
def test_get_auth_plugin_wraps_bad_config(self, mock_load):
"""Tests the case that send_service_user_token is True but there
is some misconfiguration with the [service_user] section which makes
KSA return None for the service user auth.
"""
self.flags(send_service_user_token=True, group='service_user')
result = service_auth.get_auth_plugin(self.ctx)
self.assertEqual(1, mock_load.call_count)
self.assertNotIsInstance(result, service_token.ServiceTokenAuthWrapper)
| [((1027, 1087), 'mock.patch.object', 'mock.patch.object', (['ks_loading', '"""load_auth_from_conf_options"""'], {}), "(ks_loading, 'load_auth_from_conf_options')\n", (1044, 1087), False, 'import mock\n'), ((1375, 1435), 'mock.patch.object', 'mock.patch.object', (['ks_loading', '"""load_auth_from_conf_options"""'], {}), "(ks_loading, 'load_auth_from_conf_options')\n", (1392, 1435), False, 'import mock\n'), ((1701, 1780), 'mock.patch.object', 'mock.patch.object', (['ks_loading', '"""load_auth_from_conf_options"""'], {'return_value': 'None'}), "(ks_loading, 'load_auth_from_conf_options', return_value=None)\n", (1718, 1780), False, 'import mock\n'), ((930, 968), 'nova.context.RequestContext', 'context.RequestContext', (['"""fake"""', '"""fake"""'], {}), "('fake', 'fake')\n", (952, 968), False, 'from nova import context\n'), ((1162, 1178), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1176, 1178), False, 'import mock\n'), ((1251, 1288), 'nova.service_auth.get_auth_plugin', 'service_auth.get_auth_plugin', (['context'], {}), '(context)\n', (1279, 1288), False, 'from nova import service_auth\n'), ((1578, 1616), 'nova.service_auth.get_auth_plugin', 'service_auth.get_auth_plugin', (['self.ctx'], {}), '(self.ctx)\n', (1606, 1616), False, 'from nova import service_auth\n'), ((2169, 2207), 'nova.service_auth.get_auth_plugin', 'service_auth.get_auth_plugin', (['self.ctx'], {}), '(self.ctx)\n', (2197, 2207), False, 'from nova import service_auth\n')] |
LittleWat/MCD_DA | classification/model/build_gen.py | 37cb1bc38c203702e22c7c0c37e284d0294714fb | import svhn2mnist
import usps
import syn2gtrsb
import syndig2svhn
def Generator(source, target, pixelda=False):
if source == 'usps' or target == 'usps':
return usps.Feature()
elif source == 'svhn':
return svhn2mnist.Feature()
elif source == 'synth':
return syn2gtrsb.Feature()
def Classifier(source, target):
if source == 'usps' or target == 'usps':
return usps.Predictor()
if source == 'svhn':
return svhn2mnist.Predictor()
if source == 'synth':
return syn2gtrsb.Predictor()
| [((173, 187), 'usps.Feature', 'usps.Feature', ([], {}), '()\n', (185, 187), False, 'import usps\n'), ((408, 424), 'usps.Predictor', 'usps.Predictor', ([], {}), '()\n', (422, 424), False, 'import usps\n'), ((465, 487), 'svhn2mnist.Predictor', 'svhn2mnist.Predictor', ([], {}), '()\n', (485, 487), False, 'import svhn2mnist\n'), ((529, 550), 'syn2gtrsb.Predictor', 'syn2gtrsb.Predictor', ([], {}), '()\n', (548, 550), False, 'import syn2gtrsb\n'), ((230, 250), 'svhn2mnist.Feature', 'svhn2mnist.Feature', ([], {}), '()\n', (248, 250), False, 'import svhn2mnist\n'), ((294, 313), 'syn2gtrsb.Feature', 'syn2gtrsb.Feature', ([], {}), '()\n', (311, 313), False, 'import syn2gtrsb\n')] |
pfnet-research/deep-table | deep_table/nn/models/loss/info_nce_loss.py | a19c0c3048484017d5f24806604c3b3470bcf550 | import torch
from torch import Tensor
from torch.nn.modules.loss import _Loss
class InfoNCELoss(_Loss):
"""Info NCE Loss. A type of contrastive loss function used for self-supervised learning.
References:
A. Oord, Y. Li, and O. Vinyals,
"Representation Learning with Contrastive Predictive Coding,"
ArXiv:1807.03748 [cs.LG], 2018. <https://arxiv.org/abs/1807.03748v2>
"""
def __init__(self, reduction: str = "sum") -> None:
"""
Args:
reduction (str)
"""
super().__init__(reduction=reduction)
self.reduction = reduction
def forward(self, z_origin: Tensor, z_noisy: Tensor, t: float = 0.7) -> Tensor:
sim = cos_sim_matrix(z_origin, z_noisy)
exp_sim = torch.exp(sim / t)
loss = -torch.log(torch.diagonal(exp_sim) / exp_sim.sum(1))
if self.reduction == "sum":
loss = loss.sum()
elif self.reduction == "mean":
loss = loss.mean()
return loss
def cos_sim_matrix(a: Tensor, b: Tensor, eps: float = 1e-8) -> Tensor:
a_n, b_n = a.norm(dim=1), b.norm(dim=1)
a_norm = a / torch.clamp(a_n.unsqueeze(1), min=eps)
b_norm = b / torch.clamp(b_n.unsqueeze(1), min=eps)
sim_matrix = torch.mm(a_norm, b_norm.transpose(0, 1))
return sim_matrix
| [((766, 784), 'torch.exp', 'torch.exp', (['(sim / t)'], {}), '(sim / t)\n', (775, 784), False, 'import torch\n'), ((811, 834), 'torch.diagonal', 'torch.diagonal', (['exp_sim'], {}), '(exp_sim)\n', (825, 834), False, 'import torch\n')] |
korkin25/patroni | patroni/config.py | 333d41d9f039b5a799940c8a6fbc75dcbe0e9a31 | import json
import logging
import os
import shutil
import tempfile
import yaml
from collections import defaultdict
from copy import deepcopy
from patroni import PATRONI_ENV_PREFIX
from patroni.exceptions import ConfigParseError
from patroni.dcs import ClusterConfig
from patroni.postgresql.config import CaseInsensitiveDict, ConfigHandler
from patroni.utils import deep_compare, parse_bool, parse_int, patch_config
logger = logging.getLogger(__name__)
_AUTH_ALLOWED_PARAMETERS = (
'username',
'password',
'sslmode',
'sslcert',
'sslkey',
'sslpassword',
'sslrootcert',
'sslcrl',
'sslcrldir',
'gssencmode',
'channel_binding'
)
def default_validator(conf):
if not conf:
return "Config is empty."
class Config(object):
"""
This class is responsible for:
1) Building and giving access to `effective_configuration` from:
* `Config.__DEFAULT_CONFIG` -- some sane default values
* `dynamic_configuration` -- configuration stored in DCS
* `local_configuration` -- configuration from `config.yml` or environment
2) Saving and loading `dynamic_configuration` into 'patroni.dynamic.json' file
located in local_configuration['postgresql']['data_dir'] directory.
This is necessary to be able to restore `dynamic_configuration`
if DCS was accidentally wiped
3) Loading of configuration file in the old format and converting it into new format
4) Mimicking some of the `dict` interfaces to make it possible
to work with it as with the old `config` object.
"""
PATRONI_CONFIG_VARIABLE = PATRONI_ENV_PREFIX + 'CONFIGURATION'
__CACHE_FILENAME = 'patroni.dynamic.json'
__DEFAULT_CONFIG = {
'ttl': 30, 'loop_wait': 10, 'retry_timeout': 10,
'maximum_lag_on_failover': 1048576,
'maximum_lag_on_syncnode': -1,
'check_timeline': False,
'master_start_timeout': 300,
'master_stop_timeout': 0,
'synchronous_mode': False,
'synchronous_mode_strict': False,
'synchronous_node_count': 1,
'standby_cluster': {
'create_replica_methods': '',
'host': '',
'port': '',
'primary_slot_name': '',
'restore_command': '',
'archive_cleanup_command': '',
'recovery_min_apply_delay': ''
},
'postgresql': {
'bin_dir': '',
'use_slots': True,
'parameters': CaseInsensitiveDict({p: v[0] for p, v in ConfigHandler.CMDLINE_OPTIONS.items()
if p not in ('wal_keep_segments', 'wal_keep_size')})
},
'watchdog': {
'mode': 'automatic',
}
}
def __init__(self, configfile, validator=default_validator):
self._modify_index = -1
self._dynamic_configuration = {}
self.__environment_configuration = self._build_environment_configuration()
# Patroni reads the configuration from the command-line argument if it exists, otherwise from the environment
self._config_file = configfile and os.path.exists(configfile) and configfile
if self._config_file:
self._local_configuration = self._load_config_file()
else:
config_env = os.environ.pop(self.PATRONI_CONFIG_VARIABLE, None)
self._local_configuration = config_env and yaml.safe_load(config_env) or self.__environment_configuration
if validator:
error = validator(self._local_configuration)
if error:
raise ConfigParseError(error)
self.__effective_configuration = self._build_effective_configuration({}, self._local_configuration)
self._data_dir = self.__effective_configuration.get('postgresql', {}).get('data_dir', "")
self._cache_file = os.path.join(self._data_dir, self.__CACHE_FILENAME)
self._load_cache()
self._cache_needs_saving = False
@property
def config_file(self):
return self._config_file
@property
def dynamic_configuration(self):
return deepcopy(self._dynamic_configuration)
def check_mode(self, mode):
return bool(parse_bool(self._dynamic_configuration.get(mode)))
def _load_config_path(self, path):
"""
If path is a file, loads the yml file pointed to by path.
If path is a directory, loads all yml files in that directory in alphabetical order
"""
if os.path.isfile(path):
files = [path]
elif os.path.isdir(path):
files = [os.path.join(path, f) for f in sorted(os.listdir(path))
if (f.endswith('.yml') or f.endswith('.yaml')) and os.path.isfile(os.path.join(path, f))]
else:
logger.error('config path %s is neither directory nor file', path)
raise ConfigParseError('invalid config path')
overall_config = {}
for fname in files:
with open(fname) as f:
config = yaml.safe_load(f)
patch_config(overall_config, config)
return overall_config
def _load_config_file(self):
"""Loads config.yaml from filesystem and applies some values which were set via ENV"""
config = self._load_config_path(self._config_file)
patch_config(config, self.__environment_configuration)
return config
def _load_cache(self):
if os.path.isfile(self._cache_file):
try:
with open(self._cache_file) as f:
self.set_dynamic_configuration(json.load(f))
except Exception:
logger.exception('Exception when loading file: %s', self._cache_file)
def save_cache(self):
if self._cache_needs_saving:
tmpfile = fd = None
try:
(fd, tmpfile) = tempfile.mkstemp(prefix=self.__CACHE_FILENAME, dir=self._data_dir)
with os.fdopen(fd, 'w') as f:
fd = None
json.dump(self.dynamic_configuration, f)
tmpfile = shutil.move(tmpfile, self._cache_file)
self._cache_needs_saving = False
except Exception:
logger.exception('Exception when saving file: %s', self._cache_file)
if fd:
try:
os.close(fd)
except Exception:
logger.error('Can not close temporary file %s', tmpfile)
if tmpfile and os.path.exists(tmpfile):
try:
os.remove(tmpfile)
except Exception:
logger.error('Can not remove temporary file %s', tmpfile)
# configuration could be either ClusterConfig or dict
def set_dynamic_configuration(self, configuration):
if isinstance(configuration, ClusterConfig):
if self._modify_index == configuration.modify_index:
return False # If the index didn't changed there is nothing to do
self._modify_index = configuration.modify_index
configuration = configuration.data
if not deep_compare(self._dynamic_configuration, configuration):
try:
self.__effective_configuration = self._build_effective_configuration(configuration,
self._local_configuration)
self._dynamic_configuration = configuration
self._cache_needs_saving = True
return True
except Exception:
logger.exception('Exception when setting dynamic_configuration')
def reload_local_configuration(self):
if self.config_file:
try:
configuration = self._load_config_file()
if not deep_compare(self._local_configuration, configuration):
new_configuration = self._build_effective_configuration(self._dynamic_configuration, configuration)
self._local_configuration = configuration
self.__effective_configuration = new_configuration
return True
else:
logger.info('No local configuration items changed.')
except Exception:
logger.exception('Exception when reloading local configuration from %s', self.config_file)
@staticmethod
def _process_postgresql_parameters(parameters, is_local=False):
return {name: value for name, value in (parameters or {}).items()
if name not in ConfigHandler.CMDLINE_OPTIONS or
not is_local and ConfigHandler.CMDLINE_OPTIONS[name][1](value)}
def _safe_copy_dynamic_configuration(self, dynamic_configuration):
config = deepcopy(self.__DEFAULT_CONFIG)
for name, value in dynamic_configuration.items():
if name == 'postgresql':
for name, value in (value or {}).items():
if name == 'parameters':
config['postgresql'][name].update(self._process_postgresql_parameters(value))
elif name not in ('connect_address', 'listen', 'data_dir', 'pgpass', 'authentication'):
config['postgresql'][name] = deepcopy(value)
elif name == 'standby_cluster':
for name, value in (value or {}).items():
if name in self.__DEFAULT_CONFIG['standby_cluster']:
config['standby_cluster'][name] = deepcopy(value)
elif name in config: # only variables present in __DEFAULT_CONFIG allowed to be overridden from DCS
if name in ('synchronous_mode', 'synchronous_mode_strict'):
config[name] = value
else:
config[name] = int(value)
return config
@staticmethod
def _build_environment_configuration():
ret = defaultdict(dict)
def _popenv(name):
return os.environ.pop(PATRONI_ENV_PREFIX + name.upper(), None)
for param in ('name', 'namespace', 'scope'):
value = _popenv(param)
if value:
ret[param] = value
def _fix_log_env(name, oldname):
value = _popenv(oldname)
name = PATRONI_ENV_PREFIX + 'LOG_' + name.upper()
if value and name not in os.environ:
os.environ[name] = value
for name, oldname in (('level', 'loglevel'), ('format', 'logformat'), ('dateformat', 'log_datefmt')):
_fix_log_env(name, oldname)
def _set_section_values(section, params):
for param in params:
value = _popenv(section + '_' + param)
if value:
ret[section][param] = value
_set_section_values('restapi', ['listen', 'connect_address', 'certfile', 'keyfile', 'keyfile_password',
'cafile', 'ciphers', 'verify_client', 'http_extra_headers',
'https_extra_headers', 'allowlist', 'allowlist_include_members'])
_set_section_values('ctl', ['insecure', 'cacert', 'certfile', 'keyfile', 'keyfile_password'])
_set_section_values('postgresql', ['listen', 'connect_address', 'config_dir', 'data_dir', 'pgpass', 'bin_dir'])
_set_section_values('log', ['level', 'traceback_level', 'format', 'dateformat', 'max_queue_size',
'dir', 'file_size', 'file_num', 'loggers'])
_set_section_values('raft', ['data_dir', 'self_addr', 'partner_addrs', 'password', 'bind_addr'])
for first, second in (('restapi', 'allowlist_include_members'), ('ctl', 'insecure')):
value = ret.get(first, {}).pop(second, None)
if value:
value = parse_bool(value)
if value is not None:
ret[first][second] = value
for second in ('max_queue_size', 'file_size', 'file_num'):
value = ret.get('log', {}).pop(second, None)
if value:
value = parse_int(value)
if value is not None:
ret['log'][second] = value
def _parse_list(value):
if not (value.strip().startswith('-') or '[' in value):
value = '[{0}]'.format(value)
try:
return yaml.safe_load(value)
except Exception:
logger.exception('Exception when parsing list %s', value)
return None
for first, second in (('raft', 'partner_addrs'), ('restapi', 'allowlist')):
value = ret.get(first, {}).pop(second, None)
if value:
value = _parse_list(value)
if value:
ret[first][second] = value
def _parse_dict(value):
if not value.strip().startswith('{'):
value = '{{{0}}}'.format(value)
try:
return yaml.safe_load(value)
except Exception:
logger.exception('Exception when parsing dict %s', value)
return None
for first, params in (('restapi', ('http_extra_headers', 'https_extra_headers')), ('log', ('loggers',))):
for second in params:
value = ret.get(first, {}).pop(second, None)
if value:
value = _parse_dict(value)
if value:
ret[first][second] = value
def _get_auth(name, params=None):
ret = {}
for param in params or _AUTH_ALLOWED_PARAMETERS[:2]:
value = _popenv(name + '_' + param)
if value:
ret[param] = value
return ret
restapi_auth = _get_auth('restapi')
if restapi_auth:
ret['restapi']['authentication'] = restapi_auth
authentication = {}
for user_type in ('replication', 'superuser', 'rewind'):
entry = _get_auth(user_type, _AUTH_ALLOWED_PARAMETERS)
if entry:
authentication[user_type] = entry
if authentication:
ret['postgresql']['authentication'] = authentication
for param in list(os.environ.keys()):
if param.startswith(PATRONI_ENV_PREFIX):
# PATRONI_(ETCD|CONSUL|ZOOKEEPER|EXHIBITOR|...)_(HOSTS?|PORT|..)
name, suffix = (param[8:].split('_', 1) + [''])[:2]
if suffix in ('HOST', 'HOSTS', 'PORT', 'USE_PROXIES', 'PROTOCOL', 'SRV', 'SRV_SUFFIX', 'URL', 'PROXY',
'CACERT', 'CERT', 'KEY', 'VERIFY', 'TOKEN', 'CHECKS', 'DC', 'CONSISTENCY',
'REGISTER_SERVICE', 'SERVICE_CHECK_INTERVAL', 'NAMESPACE', 'CONTEXT',
'USE_ENDPOINTS', 'SCOPE_LABEL', 'ROLE_LABEL', 'POD_IP', 'PORTS', 'LABELS',
'BYPASS_API_SERVICE', 'KEY_PASSWORD', 'USE_SSL', 'SET_ACLS') and name:
value = os.environ.pop(param)
if suffix == 'PORT':
value = value and parse_int(value)
elif suffix in ('HOSTS', 'PORTS', 'CHECKS'):
value = value and _parse_list(value)
elif suffix in ('LABELS', 'SET_ACLS'):
value = _parse_dict(value)
elif suffix in ('USE_PROXIES', 'REGISTER_SERVICE', 'USE_ENDPOINTS', 'BYPASS_API_SERVICE', 'VERIFY'):
value = parse_bool(value)
if value:
ret[name.lower()][suffix.lower()] = value
for dcs in ('etcd', 'etcd3'):
if dcs in ret:
ret[dcs].update(_get_auth(dcs))
users = {}
for param in list(os.environ.keys()):
if param.startswith(PATRONI_ENV_PREFIX):
name, suffix = (param[8:].rsplit('_', 1) + [''])[:2]
# PATRONI_<username>_PASSWORD=<password>, PATRONI_<username>_OPTIONS=<option1,option2,...>
# CREATE USER "<username>" WITH <OPTIONS> PASSWORD '<password>'
if name and suffix == 'PASSWORD':
password = os.environ.pop(param)
if password:
users[name] = {'password': password}
options = os.environ.pop(param[:-9] + '_OPTIONS', None)
options = options and _parse_list(options)
if options:
users[name]['options'] = options
if users:
ret['bootstrap']['users'] = users
return ret
def _build_effective_configuration(self, dynamic_configuration, local_configuration):
config = self._safe_copy_dynamic_configuration(dynamic_configuration)
for name, value in local_configuration.items():
if name == 'postgresql':
for name, value in (value or {}).items():
if name == 'parameters':
config['postgresql'][name].update(self._process_postgresql_parameters(value, True))
elif name != 'use_slots': # replication slots must be enabled/disabled globally
config['postgresql'][name] = deepcopy(value)
elif name not in config or name in ['watchdog']:
config[name] = deepcopy(value) if value else {}
# restapi server expects to get restapi.auth = 'username:password'
if 'restapi' in config and 'authentication' in config['restapi']:
config['restapi']['auth'] = '{username}:{password}'.format(**config['restapi']['authentication'])
# special treatment for old config
# 'exhibitor' inside 'zookeeper':
if 'zookeeper' in config and 'exhibitor' in config['zookeeper']:
config['exhibitor'] = config['zookeeper'].pop('exhibitor')
config.pop('zookeeper')
pg_config = config['postgresql']
# no 'authentication' in 'postgresql', but 'replication' and 'superuser'
if 'authentication' not in pg_config:
pg_config['use_pg_rewind'] = 'pg_rewind' in pg_config
pg_config['authentication'] = {u: pg_config[u] for u in ('replication', 'superuser') if u in pg_config}
# no 'superuser' in 'postgresql'.'authentication'
if 'superuser' not in pg_config['authentication'] and 'pg_rewind' in pg_config:
pg_config['authentication']['superuser'] = pg_config['pg_rewind']
# handle setting additional connection parameters that may be available
# in the configuration file, such as SSL connection parameters
for name, value in pg_config['authentication'].items():
pg_config['authentication'][name] = {n: v for n, v in value.items() if n in _AUTH_ALLOWED_PARAMETERS}
# no 'name' in config
if 'name' not in config and 'name' in pg_config:
config['name'] = pg_config['name']
updated_fields = (
'name',
'scope',
'retry_timeout',
'synchronous_mode',
'synchronous_mode_strict',
'synchronous_node_count',
'maximum_lag_on_syncnode'
)
pg_config.update({p: config[p] for p in updated_fields if p in config})
return config
def get(self, key, default=None):
return self.__effective_configuration.get(key, default)
def __contains__(self, key):
return key in self.__effective_configuration
def __getitem__(self, key):
return self.__effective_configuration[key]
def copy(self):
return deepcopy(self.__effective_configuration)
| [((426, 453), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (443, 453), False, 'import logging\n'), ((3875, 3926), 'os.path.join', 'os.path.join', (['self._data_dir', 'self.__CACHE_FILENAME'], {}), '(self._data_dir, self.__CACHE_FILENAME)\n', (3887, 3926), False, 'import os\n'), ((4137, 4174), 'copy.deepcopy', 'deepcopy', (['self._dynamic_configuration'], {}), '(self._dynamic_configuration)\n', (4145, 4174), False, 'from copy import deepcopy\n'), ((4512, 4532), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (4526, 4532), False, 'import os\n'), ((5348, 5402), 'patroni.utils.patch_config', 'patch_config', (['config', 'self.__environment_configuration'], {}), '(config, self.__environment_configuration)\n', (5360, 5402), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n'), ((5464, 5496), 'os.path.isfile', 'os.path.isfile', (['self._cache_file'], {}), '(self._cache_file)\n', (5478, 5496), False, 'import os\n'), ((8881, 8912), 'copy.deepcopy', 'deepcopy', (['self.__DEFAULT_CONFIG'], {}), '(self.__DEFAULT_CONFIG)\n', (8889, 8912), False, 'from copy import deepcopy\n'), ((10037, 10054), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (10048, 10054), False, 'from collections import defaultdict\n'), ((19789, 19829), 'copy.deepcopy', 'deepcopy', (['self.__effective_configuration'], {}), '(self.__effective_configuration)\n', (19797, 19829), False, 'from copy import deepcopy\n'), ((3149, 3175), 'os.path.exists', 'os.path.exists', (['configfile'], {}), '(configfile)\n', (3163, 3175), False, 'import os\n'), ((3325, 3375), 'os.environ.pop', 'os.environ.pop', (['self.PATRONI_CONFIG_VARIABLE', 'None'], {}), '(self.PATRONI_CONFIG_VARIABLE, None)\n', (3339, 3375), False, 'import os\n'), ((4574, 4593), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (4587, 4593), False, 'import os\n'), ((7211, 7267), 'patroni.utils.deep_compare', 'deep_compare', (['self._dynamic_configuration', 'configuration'], {}), '(self._dynamic_configuration, configuration)\n', (7223, 7267), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n'), ((14367, 14384), 'os.environ.keys', 'os.environ.keys', ([], {}), '()\n', (14382, 14384), False, 'import os\n'), ((15931, 15948), 'os.environ.keys', 'os.environ.keys', ([], {}), '()\n', (15946, 15948), False, 'import os\n'), ((3617, 3640), 'patroni.exceptions.ConfigParseError', 'ConfigParseError', (['error'], {}), '(error)\n', (3633, 3640), False, 'from patroni.exceptions import ConfigParseError\n'), ((4894, 4933), 'patroni.exceptions.ConfigParseError', 'ConfigParseError', (['"""invalid config path"""'], {}), "('invalid config path')\n", (4910, 4933), False, 'from patroni.exceptions import ConfigParseError\n'), ((5051, 5068), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (5065, 5068), False, 'import yaml\n'), ((5085, 5121), 'patroni.utils.patch_config', 'patch_config', (['overall_config', 'config'], {}), '(overall_config, config)\n', (5097, 5121), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n'), ((5891, 5957), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'prefix': 'self.__CACHE_FILENAME', 'dir': 'self._data_dir'}), '(prefix=self.__CACHE_FILENAME, dir=self._data_dir)\n', (5907, 5957), False, 'import tempfile\n'), ((6121, 6159), 'shutil.move', 'shutil.move', (['tmpfile', 'self._cache_file'], {}), '(tmpfile, self._cache_file)\n', (6132, 6159), False, 'import shutil\n'), ((11929, 11946), 'patroni.utils.parse_bool', 'parse_bool', (['value'], {}), '(value)\n', (11939, 11946), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n'), ((12203, 12219), 'patroni.utils.parse_int', 'parse_int', (['value'], {}), '(value)\n', (12212, 12219), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n'), ((12492, 12513), 'yaml.safe_load', 'yaml.safe_load', (['value'], {}), '(value)\n', (12506, 12513), False, 'import yaml\n'), ((13097, 13118), 'yaml.safe_load', 'yaml.safe_load', (['value'], {}), '(value)\n', (13111, 13118), False, 'import yaml\n'), ((3431, 3457), 'yaml.safe_load', 'yaml.safe_load', (['config_env'], {}), '(config_env)\n', (3445, 3457), False, 'import yaml\n'), ((4616, 4637), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (4628, 4637), False, 'import os\n'), ((5979, 5997), 'os.fdopen', 'os.fdopen', (['fd', '"""w"""'], {}), "(fd, 'w')\n", (5988, 5997), False, 'import os\n'), ((6054, 6094), 'json.dump', 'json.dump', (['self.dynamic_configuration', 'f'], {}), '(self.dynamic_configuration, f)\n', (6063, 6094), False, 'import json\n'), ((7914, 7968), 'patroni.utils.deep_compare', 'deep_compare', (['self._local_configuration', 'configuration'], {}), '(self._local_configuration, configuration)\n', (7926, 7968), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n'), ((15147, 15168), 'os.environ.pop', 'os.environ.pop', (['param'], {}), '(param)\n', (15161, 15168), False, 'import os\n'), ((16341, 16362), 'os.environ.pop', 'os.environ.pop', (['param'], {}), '(param)\n', (16355, 16362), False, 'import os\n'), ((2544, 2581), 'patroni.postgresql.config.ConfigHandler.CMDLINE_OPTIONS.items', 'ConfigHandler.CMDLINE_OPTIONS.items', ([], {}), '()\n', (2579, 2581), False, 'from patroni.postgresql.config import CaseInsensitiveDict, ConfigHandler\n'), ((5616, 5628), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5625, 5628), False, 'import json\n'), ((6559, 6582), 'os.path.exists', 'os.path.exists', (['tmpfile'], {}), '(tmpfile)\n', (6573, 6582), False, 'import os\n'), ((16491, 16536), 'os.environ.pop', 'os.environ.pop', (["(param[:-9] + '_OPTIONS')", 'None'], {}), "(param[:-9] + '_OPTIONS', None)\n", (16505, 16536), False, 'import os\n'), ((17520, 17535), 'copy.deepcopy', 'deepcopy', (['value'], {}), '(value)\n', (17528, 17535), False, 'from copy import deepcopy\n'), ((4654, 4670), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (4664, 4670), False, 'import os\n'), ((6396, 6408), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (6404, 6408), False, 'import os\n'), ((6633, 6651), 'os.remove', 'os.remove', (['tmpfile'], {}), '(tmpfile)\n', (6642, 6651), False, 'import os\n'), ((9375, 9390), 'copy.deepcopy', 'deepcopy', (['value'], {}), '(value)\n', (9383, 9390), False, 'from copy import deepcopy\n'), ((9624, 9639), 'copy.deepcopy', 'deepcopy', (['value'], {}), '(value)\n', (9632, 9639), False, 'from copy import deepcopy\n'), ((15252, 15268), 'patroni.utils.parse_int', 'parse_int', (['value'], {}), '(value)\n', (15261, 15268), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n'), ((17412, 17427), 'copy.deepcopy', 'deepcopy', (['value'], {}), '(value)\n', (17420, 17427), False, 'from copy import deepcopy\n'), ((4759, 4780), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (4771, 4780), False, 'import os\n'), ((15658, 15675), 'patroni.utils.parse_bool', 'parse_bool', (['value'], {}), '(value)\n', (15668, 15675), False, 'from patroni.utils import deep_compare, parse_bool, parse_int, patch_config\n')] |
fdiary/Products.CMFCore | src/Products/CMFCore/tests/test_DirectoryView.py | 361a30e0c72a15a21f88433b8d5fc49331f36728 | ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Unit tests for DirectoryView module.
"""
import sys
import unittest
import warnings
from os import mkdir
from os import remove
from os.path import join
from tempfile import mktemp
from App.config import getConfiguration
from . import _globals
from .base.dummy import DummyFolder
from .base.testcase import FSDVTest
from .base.testcase import WritableFSDVTest
class DirectoryViewPathTests(unittest.TestCase):
"""
These test that, no matter what is stored in their dirpath,
FSDV's will do their best to find an appropriate skin
and only do nothing in the case where an appropriate skin
can't be found.
"""
def setUp(self):
from Products.CMFCore.DirectoryView import addDirectoryViews
from Products.CMFCore.DirectoryView import registerDirectory
registerDirectory('fake_skins', _globals)
self.ob = DummyFolder()
addDirectoryViews(self.ob, 'fake_skins', _globals)
def test__generateKey(self):
from Products.CMFCore.DirectoryView import _generateKey
key = _generateKey('Products.CMFCore', 'tests')
self.assertEqual(key.split(':')[0], 'Products.CMFCore')
subkey = _generateKey('Products.CMFCore', 'tests\foo')
self.assertTrue(subkey.startswith(key))
def test__findProductForPath(self):
from Products.CMFCore.DirectoryView import _findProductForPath
cmfpath = sys.modules['Products.CMFCore'].__path__[0]
self.assertEqual(_findProductForPath(cmfpath),
('Products.CMFCore', ''))
cmfpath = join(cmfpath, 'tests')
self.assertEqual(_findProductForPath(cmfpath),
('Products.CMFCore', 'tests'))
def test_getDirectoryInfo(self):
skin = self.ob.fake_skin
skin.manage_properties('Products.CMFCore.tests:fake_skins/fake_skin')
self.assertTrue(hasattr(self.ob.fake_skin, 'test1'),
self.ob.fake_skin.getDirPath())
# Test we do nothing if given a really wacky path
def test_UnhandleableExpandPath(self):
file = mktemp()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.ob.fake_skin.manage_properties(file)
self.assertEqual(self.ob.fake_skin.objectIds(), [])
# Check that a warning was raised.
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, UserWarning))
text = ('DirectoryView fake_skin refers to a non-existing path %r'
% file)
self.assertTrue(text in str(w[-1].message))
# this test tests that registerDirectory creates keys in the right format.
def test_registerDirectoryKeys(self):
from Products.CMFCore.DirectoryView import _dirreg
dirs = _dirreg._directories
self.assertTrue('Products.CMFCore.tests:fake_skins/fake_skin' in dirs,
dirs.keys())
self.assertEqual(self.ob.fake_skin.getDirPath(),
'Products.CMFCore.tests:fake_skins/fake_skin')
class DirectoryViewTests(FSDVTest):
def setUp(self):
FSDVTest.setUp(self)
self._registerDirectory(self)
def test_addDirectoryViews(self):
# Test addDirectoryViews
# also test registration of directory views doesn't barf
pass
def test_DirectoryViewExists(self):
# Check DirectoryView added by addDirectoryViews
# appears as a DirectoryViewSurrogate due
# to Acquisition hackery.
from Products.CMFCore.DirectoryView import DirectoryViewSurrogate
self.assertTrue(isinstance(self.ob.fake_skin, DirectoryViewSurrogate))
def test_DirectoryViewMethod(self):
# Check if DirectoryView method works
self.assertEqual(self.ob.fake_skin.test1(), 'test1')
def test_properties(self):
# Make sure the directory view is reading properties
self.assertEqual(self.ob.fake_skin.testPT.title, 'Zope Pope')
def test_ignored(self):
# Test that "artifact" files and dirs are ignored
for name in '#test1', 'CVS', '.test1', 'test1~':
self.assertTrue(name not in self.ob.fake_skin.objectIds(),
'%s not ignored' % name)
def test_surrogate_writethrough(self):
# CMF Collector 316: It is possible to cause ZODB writes because
# setting attributes on the non-persistent surrogate writes them
# into the persistent DirectoryView as well. This is bad in situations
# where you only want to store markers and remove them before the
# transaction has ended - they never got removed because there was
# no equivalent __delattr__ on the surrogate that would clean up
# the persistent DirectoryView as well.
fs = self.ob.fake_skin
test_foo = 'My Foovalue'
fs.foo = test_foo
self.assertEqual(fs.foo, test_foo)
self.assertEqual(fs.__dict__['_real'].foo, test_foo)
del fs.foo
self.assertRaises(AttributeError, getattr, fs, 'foo')
self.assertRaises(AttributeError, getattr, fs.__dict__['_real'], 'foo')
class DirectoryViewIgnoreTests(FSDVTest):
def setUp(self):
FSDVTest.setUp(self)
self.manual_ign = ('CVS', 'SVN', 'test_manual_ignore.py')
self._registerDirectory(self, ignore=self.manual_ign)
def test_ignored(self):
# Test that "artifact" files and dirs are ignored,
# even when a custom ignore list is used; and that the
# custom ignore list is also honored
auto_ign = ('#test1', '.test1', 'test1~')
must_ignore = self.manual_ign + auto_ign + ('test_manual_ignore',)
visible = self.ob.fake_skin.objectIds()
for name in must_ignore:
self.assertFalse(name in visible)
class DirectoryViewFolderTests(FSDVTest):
def setUp(self):
FSDVTest.setUp(self)
self._registerDirectory(self)
def tearDown(self):
from Products.CMFCore import DirectoryView
# This is nasty, but there is no way to unregister anything
# right now...
metatype_registry = DirectoryView._dirreg._meta_types
if 'FOLDER' in metatype_registry:
del metatype_registry['FOLDER']
FSDVTest.tearDown(self)
def test_DirectoryViewMetadata(self):
# Test to determine if metadata shows up correctly on a
# FSDV that has a corresponding .metadata file
testfolder = self.ob.fake_skin.test_directory
self.assertEqual(testfolder.title, 'test_directory Title')
def test_DirectoryViewMetadataOnPropertyManager(self):
# Test to determine if metadata shows up correctly on a
# FSDV that has a corresponding .metadata file
testfolder = self.ob.fake_skin.test_directory
self.assertEqual(testfolder.getProperty('title'),
'test_directory Title')
def test_DirectoryViewFolderDefault(self):
# Test that a folder inside the fake skin really is of type
# DirectoryViewSurrogate
from Products.CMFCore.DirectoryView import DirectoryViewSurrogate
testfolder = self.ob.fake_skin.test_directory
self.assertTrue(isinstance(testfolder, DirectoryViewSurrogate))
def test_DirectoryViewFolderCustom(self):
# Now we register a different class under the fake meta_type
# "FOLDER" and test again...
from Products.CMFCore.DirectoryView import DirectoryView
from Products.CMFCore.DirectoryView import registerMetaType
class DummyDirectoryViewSurrogate:
pass
class DummyDirectoryView(DirectoryView):
def __of__(self, parent):
return DummyDirectoryViewSurrogate()
registerMetaType('FOLDER', DummyDirectoryView)
# In order to regenerate the FSDV data we need to remove and
# register again, that way the newly registered meta_type is used
self.ob._delObject('fake_skin')
self._registerDirectory(self)
testfolder = self.ob.fake_skin.test_directory
self.assertTrue(isinstance(testfolder, DummyDirectoryViewSurrogate))
class DebugModeTests(WritableFSDVTest):
def setUp(self):
from Products.CMFCore.DirectoryView import _dirreg
WritableFSDVTest.setUp(self)
self.saved_cfg_debug_mode = getConfiguration().debug_mode
getConfiguration().debug_mode = True
# initialise skins
self._registerDirectory(self)
# add a method to the fake skin folder
self._writeFile('test2.py', "return 'test2'")
# edit the test1 method
self._writeFile('test1.py', "return 'new test1'")
# add a new folder
mkdir(join(self.skin_path_name, 'test3'))
info = _dirreg.getDirectoryInfo(self.ob.fake_skin._dirpath)
info.reload()
self.use_dir_mtime = info.use_dir_mtime
def tearDown(self):
getConfiguration().debug_mode = self.saved_cfg_debug_mode
WritableFSDVTest.tearDown(self)
def test_AddNewMethod(self):
# See if a method added to the skin folder can be found
self.assertEqual(self.ob.fake_skin.test2(), 'test2')
def test_EditMethod(self):
# See if an edited method exhibits its new behaviour
self.assertEqual(self.ob.fake_skin.test1(), 'new test1')
def test_DeleteMethod(self):
# Make sure a deleted method goes away
remove(join(self.skin_path_name, 'test2.py'))
self.assertFalse(hasattr(self.ob.fake_skin, 'test2'))
def test_DeleteAddEditMethod(self):
# Check that if we delete a method, then add it back,
# then edit it, the DirectoryView notices.
# This exercises yet another Win32 mtime weirdity.
remove(join(self.skin_path_name, 'test2.py'))
self.assertFalse(hasattr(self.ob.fake_skin, 'test2'))
# add method back to the fake skin folder
self._writeFile('test2.py', "return 'test2.2'",
self.use_dir_mtime)
# check
self.assertEqual(self.ob.fake_skin.test2(), 'test2.2')
# edit method
self._writeFile('test2.py', "return 'test2.3'",
self.use_dir_mtime)
# check
self.assertEqual(self.ob.fake_skin.test2(), 'test2.3')
def test_NewFolder(self):
# See if a new folder shows up
self.assertFalse(hasattr(self.ob.fake_skin, 'test3'))
def test_DeleteFolder(self):
# Make sure a deleted folder goes away
self.assertTrue(hasattr(self.ob.fake_skin, 'test_directory'))
# It has a file, which we need to delete first.
self.assertTrue(hasattr(self.ob.fake_skin.test_directory,
'README.txt'))
self._deleteFile(join('test_directory', 'README.txt'),
self.use_dir_mtime)
self._deleteDirectory('test_directory', self.use_dir_mtime)
self.assertFalse(hasattr(self.ob.fake_skin, 'test_directory'))
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DirectoryViewPathTests))
suite.addTest(unittest.makeSuite(DirectoryViewTests))
suite.addTest(unittest.makeSuite(DirectoryViewIgnoreTests))
suite.addTest(unittest.makeSuite(DirectoryViewFolderTests))
suite.addTest(unittest.makeSuite(DebugModeTests))
return suite
| [((11700, 11720), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (11718, 11720), False, 'import unittest\n'), ((1418, 1459), 'Products.CMFCore.DirectoryView.registerDirectory', 'registerDirectory', (['"""fake_skins"""', '_globals'], {}), "('fake_skins', _globals)\n", (1435, 1459), False, 'from Products.CMFCore.DirectoryView import registerDirectory\n'), ((1500, 1550), 'Products.CMFCore.DirectoryView.addDirectoryViews', 'addDirectoryViews', (['self.ob', '"""fake_skins"""', '_globals'], {}), "(self.ob, 'fake_skins', _globals)\n", (1517, 1550), False, 'from Products.CMFCore.DirectoryView import addDirectoryViews\n'), ((1664, 1705), 'Products.CMFCore.DirectoryView._generateKey', '_generateKey', (['"""Products.CMFCore"""', '"""tests"""'], {}), "('Products.CMFCore', 'tests')\n", (1676, 1705), False, 'from Products.CMFCore.DirectoryView import _generateKey\n'), ((1788, 1835), 'Products.CMFCore.DirectoryView._generateKey', '_generateKey', (['"""Products.CMFCore"""', '"""tests\x0coo"""'], {}), "('Products.CMFCore', 'tests\\x0coo')\n", (1800, 1835), False, 'from Products.CMFCore.DirectoryView import _generateKey\n'), ((2182, 2204), 'os.path.join', 'join', (['cmfpath', '"""tests"""'], {}), "(cmfpath, 'tests')\n", (2186, 2204), False, 'from os.path import join\n'), ((2695, 2703), 'tempfile.mktemp', 'mktemp', ([], {}), '()\n', (2701, 2703), False, 'from tempfile import mktemp\n'), ((8413, 8459), 'Products.CMFCore.DirectoryView.registerMetaType', 'registerMetaType', (['"""FOLDER"""', 'DummyDirectoryView'], {}), "('FOLDER', DummyDirectoryView)\n", (8429, 8459), False, 'from Products.CMFCore.DirectoryView import registerMetaType\n'), ((9438, 9490), 'Products.CMFCore.DirectoryView._dirreg.getDirectoryInfo', '_dirreg.getDirectoryInfo', (['self.ob.fake_skin._dirpath'], {}), '(self.ob.fake_skin._dirpath)\n', (9462, 9490), False, 'from Products.CMFCore.DirectoryView import _dirreg\n'), ((11739, 11781), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewPathTests'], {}), '(DirectoryViewPathTests)\n', (11757, 11781), False, 'import unittest\n'), ((11801, 11839), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewTests'], {}), '(DirectoryViewTests)\n', (11819, 11839), False, 'import unittest\n'), ((11859, 11903), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewIgnoreTests'], {}), '(DirectoryViewIgnoreTests)\n', (11877, 11903), False, 'import unittest\n'), ((11923, 11967), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewFolderTests'], {}), '(DirectoryViewFolderTests)\n', (11941, 11967), False, 'import unittest\n'), ((11987, 12021), 'unittest.makeSuite', 'unittest.makeSuite', (['DebugModeTests'], {}), '(DebugModeTests)\n', (12005, 12021), False, 'import unittest\n'), ((2082, 2110), 'Products.CMFCore.DirectoryView._findProductForPath', '_findProductForPath', (['cmfpath'], {}), '(cmfpath)\n', (2101, 2110), False, 'from Products.CMFCore.DirectoryView import _findProductForPath\n'), ((2230, 2258), 'Products.CMFCore.DirectoryView._findProductForPath', '_findProductForPath', (['cmfpath'], {}), '(cmfpath)\n', (2249, 2258), False, 'from Products.CMFCore.DirectoryView import _findProductForPath\n'), ((2717, 2753), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (2740, 2753), False, 'import warnings\n'), ((2772, 2803), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (2793, 2803), False, 'import warnings\n'), ((9010, 9028), 'App.config.getConfiguration', 'getConfiguration', ([], {}), '()\n', (9026, 9028), False, 'from App.config import getConfiguration\n'), ((9048, 9066), 'App.config.getConfiguration', 'getConfiguration', ([], {}), '()\n', (9064, 9066), False, 'from App.config import getConfiguration\n'), ((9386, 9420), 'os.path.join', 'join', (['self.skin_path_name', '"""test3"""'], {}), "(self.skin_path_name, 'test3')\n", (9390, 9420), False, 'from os.path import join\n'), ((9594, 9612), 'App.config.getConfiguration', 'getConfiguration', ([], {}), '()\n', (9610, 9612), False, 'from App.config import getConfiguration\n'), ((10105, 10142), 'os.path.join', 'join', (['self.skin_path_name', '"""test2.py"""'], {}), "(self.skin_path_name, 'test2.py')\n", (10109, 10142), False, 'from os.path import join\n'), ((10434, 10471), 'os.path.join', 'join', (['self.skin_path_name', '"""test2.py"""'], {}), "(self.skin_path_name, 'test2.py')\n", (10438, 10471), False, 'from os.path import join\n'), ((11446, 11482), 'os.path.join', 'join', (['"""test_directory"""', '"""README.txt"""'], {}), "('test_directory', 'README.txt')\n", (11450, 11482), False, 'from os.path import join\n')] |
eshendricks/pyCycle | pycycle/elements/flight_conditions.py | 2b7f9c2a60c6d93d5e561c71b27e75566b3baef0 | import openmdao.api as om
from pycycle.thermo.cea import species_data
from pycycle.constants import AIR_ELEMENTS
from pycycle.elements.ambient import Ambient
from pycycle.elements.flow_start import FlowStart
class FlightConditions(om.Group):
"""Determines total and static flow properties given an altitude and Mach number using the input atmosphere model"""
def initialize(self):
self.options.declare('thermo_method', default='CEA', values=('CEA',),
desc='Method for computing thermodynamic properties')
self.options.declare('thermo_data', default=species_data.janaf,
desc='thermodynamic data set', recordable=False)
self.options.declare('elements', default=AIR_ELEMENTS,
desc='set of elements present in the flow')
self.options.declare('use_WAR', default=False, values=[True, False],
desc='If True, includes WAR calculation')
def setup(self):
thermo_method = self.options['thermo_method']
thermo_data = self.options['thermo_data']
elements = self.options['elements']
use_WAR = self.options['use_WAR']
self.add_subsystem('ambient', Ambient(), promotes=('alt', 'dTs')) # inputs
conv = self.add_subsystem('conv', om.Group(), promotes=['*'])
if use_WAR == True:
proms = ['Fl_O:*', 'MN', 'W', 'WAR']
else:
proms = ['Fl_O:*', 'MN', 'W']
conv.add_subsystem('fs', FlowStart(thermo_method=thermo_method,
thermo_data=thermo_data,
elements=elements,
use_WAR=use_WAR),
promotes=proms)
balance = conv.add_subsystem('balance', om.BalanceComp())
balance.add_balance('Tt', val=500.0, lower=1e-4, units='degR', desc='Total temperature', eq_units='degR')
balance.add_balance('Pt', val=14.696, lower=1e-4, units='psi', desc='Total pressure', eq_units='psi')
# sub.set_order(['fs','balance'])
newton = conv.nonlinear_solver = om.NewtonSolver()
newton.options['atol'] = 1e-10
newton.options['rtol'] = 1e-10
newton.options['maxiter'] = 10
newton.options['iprint'] = -1
newton.options['solve_subsystems'] = True
newton.options['reraise_child_analysiserror'] = False
newton.linesearch = om.BoundsEnforceLS()
newton.linesearch.options['bound_enforcement'] = 'scalar'
newton.linesearch.options['iprint'] = -1
# newton.linesearch.options['solve_subsystems'] = True
conv.linear_solver = om.DirectSolver(assemble_jac=True)
self.connect('ambient.Ps', 'balance.rhs:Pt')
self.connect('ambient.Ts', 'balance.rhs:Tt')
self.connect('balance.Pt', 'fs.P')
self.connect('balance.Tt', 'fs.T')
self.connect('Fl_O:stat:P', 'balance.lhs:Pt')
self.connect('Fl_O:stat:T', 'balance.lhs:Tt')
# self.set_order(['ambient', 'subgroup'])
if __name__ == "__main__":
p1 = om.Problem()
p1.model = om.Group()
des_vars = p1.model.add_subsystem('des_vars', om.IndepVarComp())
des_vars.add_output('W', 0.0, units='lbm/s')
des_vars.add_output('alt', 1., units='ft')
des_vars.add_output('MN', 0.5)
des_vars.add_output('dTs', 0.0, units='degR')
fc = p1.model.add_subsystem("fc", FlightConditions())
p1.model.connect('des_vars.W', 'fc.W')
p1.model.connect('des_vars.alt', 'fc.alt')
p1.model.connect('des_vars.MN', 'fc.MN')
p1.model.connect('des_vars.dTs', 'fc.dTs')
p1.setup()
# p1.root.list_connections()
p1['des_vars.alt'] = 17868.79060515557
p1['des_vars.MN'] = 2.101070288213628
p1['des_vars.dTs'] = 0.0
p1['des_vars.W'] = 1.0
p1.run_model()
print('Ts_atm: ', p1['fc.ambient.Ts'])
print('Ts_set: ', p1['fc.Fl_O:stat:T'])
print('Ps_atm: ', p1['fc.ambient.Ps'])
print('Ps_set: ', p1['fc.Fl_O:stat:P'])
print('rhos_atm: ', p1['fc.ambient.rhos']*32.175)
print('rhos_set: ', p1['fc.Fl_O:stat:rho'])
print('W', p1['fc.Fl_O:stat:W'])
print('Pt: ', p1['fc.Fl_O:tot:P'])
| [((3148, 3160), 'openmdao.api.Problem', 'om.Problem', ([], {}), '()\n', (3158, 3160), True, 'import openmdao.api as om\n'), ((3176, 3186), 'openmdao.api.Group', 'om.Group', ([], {}), '()\n', (3184, 3186), True, 'import openmdao.api as om\n'), ((2177, 2194), 'openmdao.api.NewtonSolver', 'om.NewtonSolver', ([], {}), '()\n', (2192, 2194), True, 'import openmdao.api as om\n'), ((2490, 2510), 'openmdao.api.BoundsEnforceLS', 'om.BoundsEnforceLS', ([], {}), '()\n', (2508, 2510), True, 'import openmdao.api as om\n'), ((2720, 2754), 'openmdao.api.DirectSolver', 'om.DirectSolver', ([], {'assemble_jac': '(True)'}), '(assemble_jac=True)\n', (2735, 2754), True, 'import openmdao.api as om\n'), ((3238, 3255), 'openmdao.api.IndepVarComp', 'om.IndepVarComp', ([], {}), '()\n', (3253, 3255), True, 'import openmdao.api as om\n'), ((1244, 1253), 'pycycle.elements.ambient.Ambient', 'Ambient', ([], {}), '()\n', (1251, 1253), False, 'from pycycle.elements.ambient import Ambient\n'), ((1333, 1343), 'openmdao.api.Group', 'om.Group', ([], {}), '()\n', (1341, 1343), True, 'import openmdao.api as om\n'), ((1527, 1631), 'pycycle.elements.flow_start.FlowStart', 'FlowStart', ([], {'thermo_method': 'thermo_method', 'thermo_data': 'thermo_data', 'elements': 'elements', 'use_WAR': 'use_WAR'}), '(thermo_method=thermo_method, thermo_data=thermo_data, elements=\n elements, use_WAR=use_WAR)\n', (1536, 1631), False, 'from pycycle.elements.flow_start import FlowStart\n'), ((1851, 1867), 'openmdao.api.BalanceComp', 'om.BalanceComp', ([], {}), '()\n', (1865, 1867), True, 'import openmdao.api as om\n')] |
mashaka/TravelHelper | server/cauth/views.py | 8a216dd13c253e138f241187dee46e6e53281a7b | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AdminPasswordChangeForm, PasswordChangeForm, UserCreationForm
from django.contrib.auth import update_session_auth_hash, login, authenticate
from django.contrib import messages
from django.shortcuts import render, redirect
from social_django.models import UserSocialAuth
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect
from rest_framework.authtoken.models import Token
from app.methods import prepare_user
def get_token(request):
if request.user:
user = request.user
prepare_user(user)
token,_ = Token.objects.get_or_create(user=user)
url = "travel://?token=" + token.key + '&id=' + str(user.id)
else:
url = "travel://error"
response = HttpResponse(url, status=302)
response['Location'] = url
return response
@login_required
def get_facebook_token(request):
q = get_object_or_404(UserSocialAuth, user=request.user, provider='facebook')
return HttpResponse(str(q.extra_data))
def signup(request):
return render(request, 'signup.html')
@login_required
def home(request):
return render(request, 'home.html')
@login_required
def settings(request):
user = request.user
try:
github_login = user.social_auth.get(provider='github')
except UserSocialAuth.DoesNotExist:
github_login = None
try:
twitter_login = user.social_auth.get(provider='twitter')
except UserSocialAuth.DoesNotExist:
twitter_login = None
try:
facebook_login = user.social_auth.get(provider='facebook')
except UserSocialAuth.DoesNotExist:
facebook_login = None
can_disconnect = (user.social_auth.count() > 1 or user.has_usable_password())
return render(request, 'settings.html', {
'facebook_login': facebook_login,
'can_disconnect': can_disconnect
})
@login_required
def password(request):
if request.user.has_usable_password():
PasswordForm = PasswordChangeForm
else:
PasswordForm = AdminPasswordChangeForm
if request.method == 'POST':
form = PasswordForm(request.user, request.POST)
if form.is_valid():
form.save()
update_session_auth_hash(request, form.user)
messages.success(request, 'Your password was successfully updated!')
return redirect('password')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordForm(request.user)
return render(request, 'password.html', {'form': form})
| [((870, 899), 'django.http.HttpResponse', 'HttpResponse', (['url'], {'status': '(302)'}), '(url, status=302)\n', (882, 899), False, 'from django.http import HttpResponse\n'), ((1009, 1082), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['UserSocialAuth'], {'user': 'request.user', 'provider': '"""facebook"""'}), "(UserSocialAuth, user=request.user, provider='facebook')\n", (1026, 1082), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((1159, 1189), 'django.shortcuts.render', 'render', (['request', '"""signup.html"""'], {}), "(request, 'signup.html')\n", (1165, 1189), False, 'from django.shortcuts import render, redirect\n'), ((1237, 1265), 'django.shortcuts.render', 'render', (['request', '"""home.html"""'], {}), "(request, 'home.html')\n", (1243, 1265), False, 'from django.shortcuts import render, redirect\n'), ((1855, 1961), 'django.shortcuts.render', 'render', (['request', '"""settings.html"""', "{'facebook_login': facebook_login, 'can_disconnect': can_disconnect}"], {}), "(request, 'settings.html', {'facebook_login': facebook_login,\n 'can_disconnect': can_disconnect})\n", (1861, 1961), False, 'from django.shortcuts import render, redirect\n'), ((2630, 2678), 'django.shortcuts.render', 'render', (['request', '"""password.html"""', "{'form': form}"], {}), "(request, 'password.html', {'form': form})\n", (2636, 2678), False, 'from django.shortcuts import render, redirect\n'), ((669, 687), 'app.methods.prepare_user', 'prepare_user', (['user'], {}), '(user)\n', (681, 687), False, 'from app.methods import prepare_user\n'), ((706, 744), 'rest_framework.authtoken.models.Token.objects.get_or_create', 'Token.objects.get_or_create', ([], {'user': 'user'}), '(user=user)\n', (733, 744), False, 'from rest_framework.authtoken.models import Token\n'), ((2316, 2360), 'django.contrib.auth.update_session_auth_hash', 'update_session_auth_hash', (['request', 'form.user'], {}), '(request, form.user)\n', (2340, 2360), False, 'from django.contrib.auth import update_session_auth_hash, login, authenticate\n'), ((2373, 2441), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your password was successfully updated!"""'], {}), "(request, 'Your password was successfully updated!')\n", (2389, 2441), False, 'from django.contrib import messages\n'), ((2461, 2481), 'django.shortcuts.redirect', 'redirect', (['"""password"""'], {}), "('password')\n", (2469, 2481), False, 'from django.shortcuts import get_object_or_404, redirect\n'), ((2508, 2566), 'django.contrib.messages.error', 'messages.error', (['request', '"""Please correct the error below."""'], {}), "(request, 'Please correct the error below.')\n", (2522, 2566), False, 'from django.contrib import messages\n')] |
lviala-zaack/zephyr | samples/modules/tensorflow/magic_wand/train/data_split_person.py | bf3c6e7ba415dd85f1b68eb69ea2779b234c686f | # Lint as: python3
# coding=utf-8
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Split data into train, validation and test dataset according to person.
That is, use some people's data as train, some other people's data as
validation, and the rest ones' data as test. These data would be saved
separately under "/person_split".
It will generate new files with the following structure:
├──person_split
│ ├── test
│ ├── train
│ └──valid
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
from data_split import read_data
from data_split import write_data
def person_split(whole_data, train_names, valid_names, test_names):
"""Split data by person."""
random.seed(30)
random.shuffle(whole_data)
train_data = []
valid_data = []
test_data = []
for idx, data in enumerate(whole_data): # pylint: disable=unused-variable
if data["name"] in train_names:
train_data.append(data)
elif data["name"] in valid_names:
valid_data.append(data)
elif data["name"] in test_names:
test_data.append(data)
print("train_length:" + str(len(train_data)))
print("valid_length:" + str(len(valid_data)))
print("test_length:" + str(len(test_data)))
return train_data, valid_data, test_data
if __name__ == "__main__":
data = read_data("./data/complete_data")
train_names = [
"hyw", "shiyun", "tangsy", "dengyl", "jiangyh", "xunkai", "negative3",
"negative4", "negative5", "negative6"
]
valid_names = ["lsj", "pengxl", "negative2", "negative7"]
test_names = ["liucx", "zhangxy", "negative1", "negative8"]
train_data, valid_data, test_data = person_split(data, train_names,
valid_names, test_names)
if not os.path.exists("./person_split"):
os.makedirs("./person_split")
write_data(train_data, "./person_split/train")
write_data(valid_data, "./person_split/valid")
write_data(test_data, "./person_split/test")
| [((1405, 1420), 'random.seed', 'random.seed', (['(30)'], {}), '(30)\n', (1416, 1420), False, 'import random\n'), ((1425, 1451), 'random.shuffle', 'random.shuffle', (['whole_data'], {}), '(whole_data)\n', (1439, 1451), False, 'import random\n'), ((2055, 2088), 'data_split.read_data', 'read_data', (['"""./data/complete_data"""'], {}), "('./data/complete_data')\n", (2064, 2088), False, 'from data_split import read_data\n'), ((2659, 2705), 'data_split.write_data', 'write_data', (['train_data', '"""./person_split/train"""'], {}), "(train_data, './person_split/train')\n", (2669, 2705), False, 'from data_split import write_data\n'), ((2710, 2756), 'data_split.write_data', 'write_data', (['valid_data', '"""./person_split/valid"""'], {}), "(valid_data, './person_split/valid')\n", (2720, 2756), False, 'from data_split import write_data\n'), ((2761, 2805), 'data_split.write_data', 'write_data', (['test_data', '"""./person_split/test"""'], {}), "(test_data, './person_split/test')\n", (2771, 2805), False, 'from data_split import write_data\n'), ((2583, 2615), 'os.path.exists', 'os.path.exists', (['"""./person_split"""'], {}), "('./person_split')\n", (2597, 2615), False, 'import os\n'), ((2625, 2654), 'os.makedirs', 'os.makedirs', (['"""./person_split"""'], {}), "('./person_split')\n", (2636, 2654), False, 'import os\n')] |
josebalius/go-spacemesh | tests/k8s_handler.py | 7ad61dcbe30f361b348e93c97eb3871ab79f1848 | from datetime import datetime
from kubernetes import client
from kubernetes.client.rest import ApiException
import os
import time
import yaml
from tests import config as conf
import tests.utils as ut
def remove_clusterrole_binding(shipper_name, crb_name):
# remove clusterrolebind
k8s_client = client.RbacAuthorizationV1Api()
try:
k8s_client.delete_cluster_role_binding(crb_name)
print(f"\nsuccessfully deleted: {crb_name}")
except Exception as e:
print(f"\n{shipper_name} cluster role binding deletion has failed, please manually delete {crb_name}:")
print(f"kubectl delete clusterrolebinding {crb_name}")
def filebeat_teardown(namespace):
# remove clusterrolebind
# TODO: find a solution for sharing the name both here and in the kube object
crb_name = f"filebeat-cluster-role-binding-{namespace}"
remove_clusterrole_binding("filebeat", crb_name)
def fluent_bit_teardown(namespace):
# remove clusterrolebind
# TODO: find a solution for sharing the name both here and in the kube object
crb_name = f"fluent-bit-clusterrole-binding-{namespace}"
remove_clusterrole_binding("fluent-bit", crb_name)
def add_elastic_cluster(namespace):
print("\nDeploying ElasticSearch\n")
add_deployment_dir(namespace, conf.ELASTIC_CONF_DIR)
def add_filebeat_cluster(namespace):
print("\nDeploying FileBeat\n")
add_deployment_dir(namespace, conf.FILEBEAT_CONF_DIR)
def add_fluent_bit_cluster(namespace):
print("\nDeploying Fluent-bit\n")
add_deployment_dir(namespace, conf.FLUENT_BIT_CONF_DIR)
def add_kibana_cluster(namespace):
print("\nDeploying Kibana\n")
add_deployment_dir(namespace, conf.KIBANA_CONF_DIR)
def add_logstash_cluster(namespace):
print("\nDeploying LogStash\n")
add_deployment_dir(namespace, conf.LOGSTASH_CONF_DIR)
def add_deployment_dir(namespace, dir_path, delete=False):
with open(os.path.join(dir_path, 'dep_order.txt')) as f:
dep_order = f.readline()
dep_lst = [x.strip() for x in dep_order.split(',')]
print(dep_lst)
phrases_to_replace = ["(?<!_)NAMESPACE", "REP_ES_USER", "REP_ES_PASS"]
values_for_replacement = [namespace, conf.ES_USER_LOCAL, conf.ES_PASS_LOCAL]
for filename in dep_lst:
# replace all phrases with the actual values if exists
modified_file_path, is_change = ut.duplicate_file_and_replace_phrases(
dir_path, filename, f"{namespace}_{filename}", phrases_to_replace, values_for_replacement
)
print(f"applying file: {filename}")
with open(modified_file_path) as f:
dep = yaml.safe_load(f)
if modified_file_path != os.path.join(dir_path, filename) and is_change:
# remove modified file
ut.delete_file(modified_file_path)
name = dep["metadata"]["name"]
if dep['kind'] == 'StatefulSet':
k8s_client = client.AppsV1Api()
if not delete:
k8s_client.create_namespaced_stateful_set(body=dep, namespace=namespace)
else:
k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace)
elif dep['kind'] == 'DaemonSet':
k8s_client = client.AppsV1Api()
k8s_client.create_namespaced_daemon_set(body=dep, namespace=namespace)
elif dep['kind'] == 'Deployment':
k8s_client = client.AppsV1Api()
k8s_client.create_namespaced_deployment(body=dep, namespace=namespace)
elif dep['kind'] == 'Service':
try:
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_service(body=dep, namespace=namespace)
except ApiException as e:
if e.status == 409:
print(f"Service exists: {dep['metadata']['name']}")
continue
raise e
elif dep['kind'] == 'PodDisruptionBudget':
k8s_client = client.PolicyV1beta1Api()
k8s_client.create_namespaced_pod_disruption_budget(body=dep, namespace=namespace)
elif dep["kind"] == 'Role':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.create_namespaced_role(body=dep, namespace=namespace)
elif dep["kind"] == 'ClusterRole':
try:
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.create_cluster_role(body=dep)
except ApiException as e:
if e.status == 409:
print(f"cluster role already exists")
continue
raise e
elif dep["kind"] == 'RoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
dep["subjects"][0]["namespace"] = namespace
k8s_client.create_namespaced_role_binding(body=dep, namespace=namespace)
elif dep["kind"] == 'ClusterRoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
try:
k8s_client.create_cluster_role_binding(body=dep)
except ApiException as e:
if e.status == 409:
print(f"cluster role binding already exists")
continue
raise e
elif dep["kind"] == 'ConfigMap':
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_config_map(body=dep, namespace=namespace)
elif dep["kind"] == 'ServiceAccount':
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_service_account(body=dep, namespace=namespace)
print("\nDone\n")
def remove_deployment_dir(namespace, dir_path):
with open(os.path.join(dir_path, 'dep_order.txt')) as f:
dep_order = f.readline()
dep_lst = [x.strip() for x in dep_order.split(',')]
print(dep_lst)
for filename in dep_lst:
print(f"deleting {filename}")
with open(os.path.join(dir_path, filename)) as f:
dep = yaml.safe_load(f)
name = dep["metadata"]["name"]
if dep['kind'] == 'StatefulSet':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace)
elif dep['kind'] == 'DaemonSet':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_daemon_set(name=name, namespace=namespace)
elif dep['kind'] == 'Deployment':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_deployment(name=name, namespace=namespace)
elif dep['kind'] == 'Service':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_service(name=name, namespace=namespace, grace_period_seconds=0)
delete_func = k8s_client.delete_namespaced_service
list_func = k8s_client.list_namespaced_service
wait_for_namespaced_deletion(name, namespace, delete_func, list_func)
elif dep['kind'] == 'PodDisruptionBudget':
k8s_client = client.PolicyV1beta1Api()
k8s_client.delete_namespaced_pod_disruption_budget(name=name, namespace=namespace)
elif dep["kind"] == 'Role':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_namespaced_role(name=name, namespace=namespace)
elif dep["kind"] == 'RoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_namespaced_role_binding(name=name, namespace=namespace)
elif dep["kind"] == 'ClusterRoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_cluster_role_binding(name=name)
elif dep["kind"] == 'ConfigMap':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_config_map(name=name, namespace=namespace)
elif dep["kind"] == 'ServiceAccount':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_service_account(name=name, namespace=namespace)
print("\nDone\n")
def wait_for_namespaced_deletion(name, namespace, deletion_func, list_func, timeout=15):
deleted = False
orig_timeout = timeout
while not deleted:
# find by name and delete requested item
for item in list_func(namespace).items:
if item.metadata.name == name:
if timeout < 0:
raise TimeoutError(f"{orig_timeout} was not enough for deleting item:\n{item}\n")
deletion_func(name=name, namespace=namespace)
print(f"service {name} was not deleted, retrying")
time.sleep(1)
timeout -= 1
# validate item was deleted
for item in list_func(namespace).items:
deleted = True
if item.metadata.name == name:
deleted = False
return deleted
def wait_for_daemonset_to_be_ready(name, namespace, timeout=None):
wait_for_to_be_ready("daemonset", name, namespace, timeout=timeout)
def resolve_read_status_func(obj_name):
if obj_name == "daemonset":
return client.AppsV1Api().read_namespaced_daemon_set_status
else:
raise ValueError(f"resolve_read_status_func: {obj_name} is not a valid value")
def wait_for_to_be_ready(obj_name, name, namespace, timeout=None):
start = datetime.now()
while True:
read_func = resolve_read_status_func(obj_name)
resp = read_func(name=name, namespace=namespace)
total_sleep_time = (datetime.now()-start).total_seconds()
number_ready = resp.status.number_ready
updated_number_scheduled = resp.status.updated_number_scheduled
if number_ready and updated_number_scheduled and number_ready == updated_number_scheduled:
print("Total time waiting for {3} {0} [size: {1}]: {2} sec".format(name, number_ready, total_sleep_time,
obj_name))
break
print("{0}/{1} pods ready {2} sec ".format(number_ready, updated_number_scheduled, total_sleep_time), end="\r")
time.sleep(1)
if timeout and total_sleep_time > timeout:
raise Exception(f"Timeout waiting for {obj_name} to be ready")
| [((305, 336), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (334, 336), False, 'from kubernetes import client\n'), ((9695, 9709), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9707, 9709), False, 'from datetime import datetime\n'), ((2378, 2510), 'tests.utils.duplicate_file_and_replace_phrases', 'ut.duplicate_file_and_replace_phrases', (['dir_path', 'filename', 'f"""{namespace}_{filename}"""', 'phrases_to_replace', 'values_for_replacement'], {}), "(dir_path, filename,\n f'{namespace}_{filename}', phrases_to_replace, values_for_replacement)\n", (2415, 2510), True, 'import tests.utils as ut\n'), ((10490, 10503), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (10500, 10503), False, 'import time\n'), ((1926, 1965), 'os.path.join', 'os.path.join', (['dir_path', '"""dep_order.txt"""'], {}), "(dir_path, 'dep_order.txt')\n", (1938, 1965), False, 'import os\n'), ((2635, 2652), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (2649, 2652), False, 'import yaml\n'), ((5910, 5949), 'os.path.join', 'os.path.join', (['dir_path', '"""dep_order.txt"""'], {}), "(dir_path, 'dep_order.txt')\n", (5922, 5949), False, 'import os\n'), ((6217, 6234), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (6231, 6234), False, 'import yaml\n'), ((9464, 9482), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (9480, 9482), False, 'from kubernetes import client\n'), ((2793, 2827), 'tests.utils.delete_file', 'ut.delete_file', (['modified_file_path'], {}), '(modified_file_path)\n', (2807, 2827), True, 'import tests.utils as ut\n'), ((2946, 2964), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (2962, 2964), False, 'from kubernetes import client\n'), ((6159, 6191), 'os.path.join', 'os.path.join', (['dir_path', 'filename'], {}), '(dir_path, filename)\n', (6171, 6191), False, 'import os\n'), ((6352, 6370), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (6368, 6370), False, 'from kubernetes import client\n'), ((8986, 8999), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (8996, 8999), False, 'import time\n'), ((2690, 2722), 'os.path.join', 'os.path.join', (['dir_path', 'filename'], {}), '(dir_path, filename)\n', (2702, 2722), False, 'import os\n'), ((3279, 3297), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (3295, 3297), False, 'from kubernetes import client\n'), ((6535, 6553), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (6551, 6553), False, 'from kubernetes import client\n'), ((9866, 9880), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9878, 9880), False, 'from datetime import datetime\n'), ((3460, 3478), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (3476, 3478), False, 'from kubernetes import client\n'), ((6717, 6735), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (6733, 6735), False, 'from kubernetes import client\n'), ((6896, 6914), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (6912, 6914), False, 'from kubernetes import client\n'), ((3663, 3681), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (3679, 3681), False, 'from kubernetes import client\n'), ((4073, 4098), 'kubernetes.client.PolicyV1beta1Api', 'client.PolicyV1beta1Api', ([], {}), '()\n', (4096, 4098), False, 'from kubernetes import client\n'), ((7324, 7349), 'kubernetes.client.PolicyV1beta1Api', 'client.PolicyV1beta1Api', ([], {}), '()\n', (7347, 7349), False, 'from kubernetes import client\n'), ((4266, 4297), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (4295, 4297), False, 'from kubernetes import client\n'), ((7518, 7549), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (7547, 7549), False, 'from kubernetes import client\n'), ((7708, 7739), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (7737, 7739), False, 'from kubernetes import client\n'), ((4480, 4511), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (4509, 4511), False, 'from kubernetes import client\n'), ((4854, 4885), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (4883, 4885), False, 'from kubernetes import client\n'), ((7913, 7944), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (7942, 7944), False, 'from kubernetes import client\n'), ((5118, 5149), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (5147, 5149), False, 'from kubernetes import client\n'), ((8085, 8103), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (8101, 8103), False, 'from kubernetes import client\n'), ((5527, 5545), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (5543, 5545), False, 'from kubernetes import client\n'), ((8271, 8289), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (8287, 8289), False, 'from kubernetes import client\n'), ((5712, 5730), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (5728, 5730), False, 'from kubernetes import client\n')] |
m4rcu5/natlas | natlas-agent/config.py | d1057c5349a5443cecffb3db9a6428f7271b07ad | import os
from dotenv import load_dotenv
class Config:
# Current Version
NATLAS_VERSION = "0.6.10"
BASEDIR = os.path.abspath(os.path.dirname(__file__))
load_dotenv(os.path.join(BASEDIR, '.env'))
def get_int(self, varname):
tmp = os.environ.get(varname)
if tmp:
return int(tmp)
return None
def get_bool(self, varname):
tmp = os.environ.get(varname)
if tmp and tmp.upper() == "TRUE":
return True
if tmp and tmp.upper() == "FALSE":
return False
return None
def __init__(self):
# url of server to get/submit work from/to
self.server = os.environ.get('NATLAS_SERVER_ADDRESS') or 'http://127.0.0.1:5000'
# ignore warnings about SSL connections
# you shouldn't ignore ssl warnings, but I'll give you the option
# Instead, you should put the trusted CA certificate bundle on the agent and use the REQUESTS_CA_BUNDLE env variable
self.ignore_ssl_warn = self.get_bool('NATLAS_IGNORE_SSL_WARN') or False
# maximum number of threads to utilize
self.max_threads = self.get_int('NATLAS_MAX_THREADS') or 3
# Are we allowed to scan local addresses?
# By default, agents protect themselves from scanning their local network
self.scan_local = self.get_bool('NATLAS_SCAN_LOCAL') or False
# default time to wait for the server to respond
self.request_timeout = self.get_int('NATLAS_REQUEST_TIMEOUT') or 15 # seconds
# Maximum value for exponential backoff of requests, 5 minutes default
self.backoff_max = self.get_int('NATLAS_BACKOFF_MAX') or 300 # seconds
# Base value to begin the exponential backoff
self.backoff_base = self.get_int('NATLAS_BACKOFF_BASE') or 1 # seconds
# Maximum number of times to retry submitting data before giving up
# This is useful if a thread is submitting data that the server doesn't understand for some reason
self.max_retries = self.get_int('NATLAS_MAX_RETRIES') or 10
# Identification string that identifies the agent that performed any given scan
# Used for database lookup and stored in scan output
self.agent_id = os.environ.get("NATLAS_AGENT_ID") or None
# Authentication token that agents can use to talk to the server API
# Only needed if the server is configured to require agent authentication
self.auth_token = os.environ.get("NATLAS_AGENT_TOKEN") or None
# Optionally save files that failed to upload
self.save_fails = self.get_bool("NATLAS_SAVE_FAILS") or False
# Allow version overrides for local development
# Necessary to test versioned host data templates before release
self.version_override = os.environ.get("NATLAS_VERSION_OVERRIDE") or None
self.sentry_dsn = os.environ.get("SENTRY_DSN") or None
if self.version_override:
self.NATLAS_VERSION = self.version_override
| [((132, 157), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (147, 157), False, 'import os\n'), ((172, 201), 'os.path.join', 'os.path.join', (['BASEDIR', '""".env"""'], {}), "(BASEDIR, '.env')\n", (184, 201), False, 'import os\n'), ((241, 264), 'os.environ.get', 'os.environ.get', (['varname'], {}), '(varname)\n', (255, 264), False, 'import os\n'), ((347, 370), 'os.environ.get', 'os.environ.get', (['varname'], {}), '(varname)\n', (361, 370), False, 'import os\n'), ((572, 611), 'os.environ.get', 'os.environ.get', (['"""NATLAS_SERVER_ADDRESS"""'], {}), "('NATLAS_SERVER_ADDRESS')\n", (586, 611), False, 'import os\n'), ((2022, 2055), 'os.environ.get', 'os.environ.get', (['"""NATLAS_AGENT_ID"""'], {}), "('NATLAS_AGENT_ID')\n", (2036, 2055), False, 'import os\n'), ((2232, 2268), 'os.environ.get', 'os.environ.get', (['"""NATLAS_AGENT_TOKEN"""'], {}), "('NATLAS_AGENT_TOKEN')\n", (2246, 2268), False, 'import os\n'), ((2534, 2575), 'os.environ.get', 'os.environ.get', (['"""NATLAS_VERSION_OVERRIDE"""'], {}), "('NATLAS_VERSION_OVERRIDE')\n", (2548, 2575), False, 'import os\n'), ((2605, 2633), 'os.environ.get', 'os.environ.get', (['"""SENTRY_DSN"""'], {}), "('SENTRY_DSN')\n", (2619, 2633), False, 'import os\n')] |
rubind/host_unity | rdr2019/mcmc_lc_jla_fit.py | a1908d80a8b6354e4516cccbf2b1a214cbc7daa9 | import os
import sys
import click
import pickle
import sncosmo
import numpy as np
from astropy.table import Table
DATA_PATH = '/home/samdixon/jla_light_curves/'
def modify_error(lc, error_floor=0.):
"""Add an error floor of `error_floor` times the maximum flux of the band
to each observation
"""
data = sncosmo.photdata.photometric_data(lc).normalized(zp=25., zpsys='ab')
new_lc = {'time': data.time,
'band': data.band,
'flux': data.flux,
'fluxerr': data.fluxerr,
'zp': data.zp,
'zpsys': data.zpsys}
for band in set(data.band):
band_cut = data.band==band
max_flux_in_band = np.max(data.flux[band_cut])
new_lc['fluxerr'][band_cut] = np.sqrt((error_floor*max_flux_in_band)**2+data.fluxerr[band_cut]**2)
new_lc = Table(new_lc, meta=lc.meta)
return new_lc
def fit_lc_and_save(lc, model_name, save_dir, no_mc):
name = lc.meta['SN']
model = sncosmo.Model(source=model_name,
effects=[sncosmo.CCM89Dust()],
effect_names=['mw'],
effect_frames=['obs'])
if type(name) is float:
name = int(name)
z = lc.meta['Z_HELIO']
mwebv = lc.meta['MWEBV']
bounds = {}
try:
t0 = float(lc.meta['DayMax'].split()[0])
bounds['t0'] = (t0-5, t0+5)
except KeyError:
try:
t0 = np.mean(lc['Date'])
bounds['t0'] = (min(lc['Date'])-20, max(lc['Date']))
except KeyError:
t0 = np.mean(lc['time'])
bounds['t0'] = (min(lc['time'])-20, max(lc['time']))
bounds['z'] = ((1-1e-4)*z, (1+1e-4)*z)
for param_name in model.source.param_names[1:]:
bounds[param_name] = (-50, 50)
modelcov = model_name=='salt2'
model.set(z=z, t0=t0, mwebv=mwebv)
phase_range = (-15, 45) if model_name=='salt2' else (-10, 40)
wave_range = (3000, 7000) if model_name=='salt2' else None
save_path = os.path.join(save_dir, '{}.pkl'.format(name))
try:
minuit_result, minuit_fit_model = sncosmo.fit_lc(lc, model, model.param_names[:-2], bounds=bounds,
phase_range=phase_range, wave_range=wave_range,
warn=False, modelcov=modelcov)
if not no_mc:
emcee_result, emcee_fit_model = sncosmo.mcmc_lc(sncosmo.select_data(lc, minuit_result['data_mask']),
minuit_fit_model,
model.param_names[:-2],
guess_t0=False,
bounds=bounds,
warn=False,
nwalkers=40,
modelcov=modelcov)
pickle.dump(emcee_result, open(save_path, 'wb'))
else:
pickle.dump(minuit_result, open(save_path, 'wb'))
except:
print('Fit to {} failed'.format(name))
sys.stdout.flush()
def main():
model_name, start, finish, err_floor, no_mc = sys.argv[1:]
start = int(start)
finish = int(finish)
err_floor = float(err_floor)
no_mc = bool(int(no_mc))
if no_mc:
save_dir = '/home/samdixon/host_unity/fitting/results_mw_reddening/jla_{}_{:02d}'.format(model_name, int(err_floor*100))
else:
save_dir = '/home/samdixon/host_unity/fitting/results_mw_reddening_mcmc/jla_{}_{:02d}'.format(model_name, int(err_floor*100))
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
lcs = []
for f in os.listdir(DATA_PATH)[int(start):int(finish)]:
if f[:2] == 'lc':
lc = sncosmo.read_lc(os.path.join(DATA_PATH, f), format='salt2', expand_bands=True, read_covmat=True)
lc = modify_error(lc, err_floor)
name = lc.meta['SN']
if type(name) is float:
name = int(name)
load_path = os.path.join(save_dir, '{}.pkl'.format(name))
try:
pickle.load(open(load_path, 'rb'))
print('{}: loaded'.format(name))
sys.stdout.flush()
except IOError:
print('Fitting {}'.format(name))
sys.stdout.flush()
fit_lc_and_save(lc, model_name, save_dir, no_mc)
else:
continue
if __name__=='__main__':
main()
| [((841, 868), 'astropy.table.Table', 'Table', (['new_lc'], {'meta': 'lc.meta'}), '(new_lc, meta=lc.meta)\n', (846, 868), False, 'from astropy.table import Table\n'), ((693, 720), 'numpy.max', 'np.max', (['data.flux[band_cut]'], {}), '(data.flux[band_cut])\n', (699, 720), True, 'import numpy as np\n'), ((759, 835), 'numpy.sqrt', 'np.sqrt', (['((error_floor * max_flux_in_band) ** 2 + data.fluxerr[band_cut] ** 2)'], {}), '((error_floor * max_flux_in_band) ** 2 + data.fluxerr[band_cut] ** 2)\n', (766, 835), True, 'import numpy as np\n'), ((2109, 2261), 'sncosmo.fit_lc', 'sncosmo.fit_lc', (['lc', 'model', 'model.param_names[:-2]'], {'bounds': 'bounds', 'phase_range': 'phase_range', 'wave_range': 'wave_range', 'warn': '(False)', 'modelcov': 'modelcov'}), '(lc, model, model.param_names[:-2], bounds=bounds,\n phase_range=phase_range, wave_range=wave_range, warn=False, modelcov=\n modelcov)\n', (2123, 2261), False, 'import sncosmo\n'), ((3760, 3783), 'os.path.isdir', 'os.path.isdir', (['save_dir'], {}), '(save_dir)\n', (3773, 3783), False, 'import os\n'), ((3793, 3814), 'os.makedirs', 'os.makedirs', (['save_dir'], {}), '(save_dir)\n', (3804, 3814), False, 'import os\n'), ((3842, 3863), 'os.listdir', 'os.listdir', (['DATA_PATH'], {}), '(DATA_PATH)\n', (3852, 3863), False, 'import os\n'), ((328, 365), 'sncosmo.photdata.photometric_data', 'sncosmo.photdata.photometric_data', (['lc'], {}), '(lc)\n', (361, 365), False, 'import sncosmo\n'), ((3243, 3261), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3259, 3261), False, 'import sys\n'), ((1059, 1078), 'sncosmo.CCM89Dust', 'sncosmo.CCM89Dust', ([], {}), '()\n', (1076, 1078), False, 'import sncosmo\n'), ((1447, 1466), 'numpy.mean', 'np.mean', (["lc['Date']"], {}), "(lc['Date'])\n", (1454, 1466), True, 'import numpy as np\n'), ((2449, 2500), 'sncosmo.select_data', 'sncosmo.select_data', (['lc', "minuit_result['data_mask']"], {}), "(lc, minuit_result['data_mask'])\n", (2468, 2500), False, 'import sncosmo\n'), ((3948, 3974), 'os.path.join', 'os.path.join', (['DATA_PATH', 'f'], {}), '(DATA_PATH, f)\n', (3960, 3974), False, 'import os\n'), ((4379, 4397), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4395, 4397), False, 'import sys\n'), ((1574, 1593), 'numpy.mean', 'np.mean', (["lc['time']"], {}), "(lc['time'])\n", (1581, 1593), True, 'import numpy as np\n'), ((4491, 4509), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4507, 4509), False, 'import sys\n')] |
toddrme2178/OpenMDAO | openmdao/core/tests/test_system.py | 379cc6216d13d380e11cb3a46f03960981de4660 | """ Unit tests for the system interface."""
import unittest
from six import assertRaisesRegex
from six.moves import cStringIO
import numpy as np
from openmdao.api import Problem, Group, IndepVarComp, ExecComp
from openmdao.test_suite.components.options_feature_vector import VectorDoublingComp
from openmdao.utils.assert_utils import assert_rel_error, assert_warning
class TestSystem(unittest.TestCase):
def test_vector_context_managers(self):
g1 = Group()
g1.add_subsystem('Indep', IndepVarComp('a', 5.0), promotes=['a'])
g2 = g1.add_subsystem('G2', Group(), promotes=['*'])
g2.add_subsystem('C1', ExecComp('b=2*a'), promotes=['a', 'b'])
model = Group()
model.add_subsystem('G1', g1, promotes=['b'])
model.add_subsystem('Sink', ExecComp('c=2*b'), promotes=['b'])
p = Problem(model=model)
p.set_solver_print(level=0)
# Test pre-setup errors
with self.assertRaises(Exception) as cm:
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(str(cm.exception),
"Group: Cannot get vectors because setup has not yet been called.")
with self.assertRaises(Exception) as cm:
d_inputs, d_outputs, d_residuals = model.get_linear_vectors('vec')
self.assertEqual(str(cm.exception),
"Group: Cannot get vectors because setup has not yet been called.")
p.setup()
p.run_model()
# Test inputs with original values
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(inputs['G1.G2.C1.a'], 5.)
inputs, outputs, residuals = g1.get_nonlinear_vectors()
self.assertEqual(inputs['G2.C1.a'], 5.)
# Test inputs after setting a new value
inputs, outputs, residuals = g2.get_nonlinear_vectors()
inputs['C1.a'] = -1.
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(inputs['G1.G2.C1.a'], -1.)
inputs, outputs, residuals = g1.get_nonlinear_vectors()
self.assertEqual(inputs['G2.C1.a'], -1.)
# Test outputs with original values
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(outputs['G1.G2.C1.b'], 10.)
inputs, outputs, residuals = g2.get_nonlinear_vectors()
# Test outputs after setting a new value
inputs, outputs, residuals = model.get_nonlinear_vectors()
outputs['G1.G2.C1.b'] = 123.
self.assertEqual(outputs['G1.G2.C1.b'], 123.)
inputs, outputs, residuals = g2.get_nonlinear_vectors()
outputs['C1.b'] = 789.
self.assertEqual(outputs['C1.b'], 789.)
# Test residuals
inputs, outputs, residuals = model.get_nonlinear_vectors()
residuals['G1.G2.C1.b'] = 99.0
self.assertEqual(residuals['G1.G2.C1.b'], 99.0)
# Test linear
d_inputs, d_outputs, d_residuals = model.get_linear_vectors('linear')
d_outputs['G1.G2.C1.b'] = 10.
self.assertEqual(d_outputs['G1.G2.C1.b'], 10.)
# Test linear with invalid vec_name
with self.assertRaises(Exception) as cm:
d_inputs, d_outputs, d_residuals = model.get_linear_vectors('bad_name')
self.assertEqual(str(cm.exception),
"Group (<model>): There is no linear vector named %s" % 'bad_name')
def test_set_checks_shape(self):
indep = IndepVarComp()
indep.add_output('a')
indep.add_output('x', shape=(5, 1))
g1 = Group()
g1.add_subsystem('Indep', indep, promotes=['a', 'x'])
g2 = g1.add_subsystem('G2', Group(), promotes=['*'])
g2.add_subsystem('C1', ExecComp('b=2*a'), promotes=['a', 'b'])
g2.add_subsystem('C2', ExecComp('y=2*x',
x=np.zeros((5, 1)),
y=np.zeros((5, 1))),
promotes=['x', 'y'])
model = Group()
model.add_subsystem('G1', g1, promotes=['b', 'y'])
model.add_subsystem('Sink', ExecComp(('c=2*b', 'z=2*y'),
y=np.zeros((5, 1)),
z=np.zeros((5, 1))),
promotes=['b', 'y'])
p = Problem(model=model)
p.setup()
p.set_solver_print(level=0)
p.run_model()
msg = "Incompatible shape for '.*': Expected (.*) but got (.*)"
num_val = -10
arr_val = -10*np.ones((5, 1))
bad_val = -10*np.ones((10))
inputs, outputs, residuals = g2.get_nonlinear_vectors()
#
# set input
#
# assign array to scalar
with assertRaisesRegex(self, ValueError, msg):
inputs['C1.a'] = arr_val
# assign scalar to array
inputs['C2.x'] = num_val
assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10)
# assign array to array
inputs['C2.x'] = arr_val
assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10)
# assign bad array shape to array
with assertRaisesRegex(self, ValueError, msg):
inputs['C2.x'] = bad_val
# assign list to array
inputs['C2.x'] = arr_val.tolist()
assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10)
# assign bad list shape to array
with assertRaisesRegex(self, ValueError, msg):
inputs['C2.x'] = bad_val.tolist()
#
# set output
#
# assign array to scalar
with assertRaisesRegex(self, ValueError, msg):
outputs['C1.b'] = arr_val
# assign scalar to array
outputs['C2.y'] = num_val
assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10)
# assign array to array
outputs['C2.y'] = arr_val
assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10)
# assign bad array shape to array
with assertRaisesRegex(self, ValueError, msg):
outputs['C2.y'] = bad_val
# assign list to array
outputs['C2.y'] = arr_val.tolist()
assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10)
# assign bad list shape to array
with assertRaisesRegex(self, ValueError, msg):
outputs['C2.y'] = bad_val.tolist()
#
# set residual
#
# assign array to scalar
with assertRaisesRegex(self, ValueError, msg):
residuals['C1.b'] = arr_val
# assign scalar to array
residuals['C2.y'] = num_val
assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10)
# assign array to array
residuals['C2.y'] = arr_val
assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10)
# assign bad array shape to array
with assertRaisesRegex(self, ValueError, msg):
residuals['C2.y'] = bad_val
# assign list to array
residuals['C2.y'] = arr_val.tolist()
assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10)
# assign bad list shape to array
with assertRaisesRegex(self, ValueError, msg):
residuals['C2.y'] = bad_val.tolist()
def test_deprecated_solver_names(self):
class DummySolver():
pass
model = Group()
# check nl_solver setter & getter
msg = "The 'nl_solver' attribute provides backwards compatibility " \
"with OpenMDAO 1.x ; use 'nonlinear_solver' instead."
with assert_warning(DeprecationWarning, msg):
model.nl_solver = DummySolver()
with assert_warning(DeprecationWarning, msg):
solver = model.nl_solver
self.assertTrue(isinstance(solver, DummySolver))
# check ln_solver setter & getter
msg = "The 'ln_solver' attribute provides backwards compatibility " \
"with OpenMDAO 1.x ; use 'linear_solver' instead."
with assert_warning(DeprecationWarning, msg):
model.ln_solver = DummySolver()
with assert_warning(DeprecationWarning, msg):
solver = model.ln_solver
self.assertTrue(isinstance(solver, DummySolver))
def test_deprecated_metadata(self):
prob = Problem()
prob.model.add_subsystem('inputs', IndepVarComp('x', shape=3))
prob.model.add_subsystem('double', VectorDoublingComp())
msg = "The 'metadata' attribute provides backwards compatibility " \
"with earlier version of OpenMDAO; use 'options' instead."
with assert_warning(DeprecationWarning, msg):
prob.model.double.metadata['size'] = 3
prob.model.connect('inputs.x', 'double.x')
prob.setup()
prob['inputs.x'] = [1., 2., 3.]
prob.run_model()
assert_rel_error(self, prob['double.y'], [2., 4., 6.])
def test_list_inputs_output_with_includes_excludes(self):
from openmdao.test_suite.scripts.circuit_analysis import Resistor, Diode, Node, Circuit
p = Problem()
model = p.model
model.add_subsystem('ground', IndepVarComp('V', 0., units='V'))
model.add_subsystem('source', IndepVarComp('I', 0.1, units='A'))
model.add_subsystem('circuit', Circuit())
model.connect('source.I', 'circuit.I_in')
model.connect('ground.V', 'circuit.Vg')
p.setup()
p.run_model()
# Inputs with no includes or excludes
inputs = model.list_inputs(out_stream=None)
self.assertEqual( len(inputs), 11)
# Inputs with includes
inputs = model.list_inputs(includes=['*V_out*'], out_stream=None)
self.assertEqual( len(inputs), 3)
# Inputs with includes matching a promoted name
inputs = model.list_inputs(includes=['*Vg*'], out_stream=None)
self.assertEqual( len(inputs), 2)
# Inputs with excludes
inputs = model.list_inputs(excludes=['*V_out*'], out_stream=None)
self.assertEqual( len(inputs), 8)
# Inputs with excludes matching a promoted name
inputs = model.list_inputs(excludes=['*Vg*'], out_stream=None)
self.assertEqual( len(inputs), 9)
# Inputs with includes and excludes
inputs = model.list_inputs(includes=['*V_out*'], excludes=['*Vg*'], out_stream=None)
self.assertEqual( len(inputs), 1)
# Outputs with no includes or excludes. Explicit only
outputs = model.list_outputs(implicit=False, out_stream=None)
self.assertEqual( len(outputs), 5)
# Outputs with includes. Explicit only
outputs = model.list_outputs(includes=['*I'], implicit=False, out_stream=None)
self.assertEqual( len(outputs), 4)
# Outputs with excludes. Explicit only
outputs = model.list_outputs(excludes=['circuit*'], implicit=False, out_stream=None)
self.assertEqual( len(outputs), 2)
if __name__ == "__main__":
unittest.main()
| [((10995, 11010), 'unittest.main', 'unittest.main', ([], {}), '()\n', (11008, 11010), False, 'import unittest\n'), ((467, 474), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (472, 474), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((698, 705), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (703, 705), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((844, 864), 'openmdao.api.Problem', 'Problem', ([], {'model': 'model'}), '(model=model)\n', (851, 864), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3482, 3496), 'openmdao.api.IndepVarComp', 'IndepVarComp', ([], {}), '()\n', (3494, 3496), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3585, 3592), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (3590, 3592), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4036, 4043), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (4041, 4043), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4378, 4398), 'openmdao.api.Problem', 'Problem', ([], {'model': 'model'}), '(model=model)\n', (4385, 4398), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4952, 5006), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "inputs['C2.x']", 'arr_val', '(1e-10)'], {}), "(self, inputs['C2.x'], arr_val, 1e-10)\n", (4968, 5006), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5081, 5135), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "inputs['C2.x']", 'arr_val', '(1e-10)'], {}), "(self, inputs['C2.x'], arr_val, 1e-10)\n", (5097, 5135), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5353, 5407), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "inputs['C2.x']", 'arr_val', '(1e-10)'], {}), "(self, inputs['C2.x'], arr_val, 1e-10)\n", (5369, 5407), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5796, 5851), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "outputs['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, outputs['C2.y'], arr_val, 1e-10)\n", (5812, 5851), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5927, 5982), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "outputs['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, outputs['C2.y'], arr_val, 1e-10)\n", (5943, 5982), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((6202, 6257), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "outputs['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, outputs['C2.y'], arr_val, 1e-10)\n", (6218, 6257), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((6653, 6710), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "residuals['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, residuals['C2.y'], arr_val, 1e-10)\n", (6669, 6710), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((6788, 6845), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "residuals['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, residuals['C2.y'], arr_val, 1e-10)\n", (6804, 6845), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((7069, 7126), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "residuals['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, residuals['C2.y'], arr_val, 1e-10)\n", (7085, 7126), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((7381, 7388), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (7386, 7388), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((8318, 8327), 'openmdao.api.Problem', 'Problem', ([], {}), '()\n', (8325, 8327), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((8869, 8926), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "prob['double.y']", '[2.0, 4.0, 6.0]'], {}), "(self, prob['double.y'], [2.0, 4.0, 6.0])\n", (8885, 8926), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((9096, 9105), 'openmdao.api.Problem', 'Problem', ([], {}), '()\n', (9103, 9105), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((509, 531), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""a"""', '(5.0)'], {}), "('a', 5.0)\n", (521, 531), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((585, 592), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (590, 592), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((641, 658), 'openmdao.api.ExecComp', 'ExecComp', (['"""b=2*a"""'], {}), "('b=2*a')\n", (649, 658), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((796, 813), 'openmdao.api.ExecComp', 'ExecComp', (['"""c=2*b"""'], {}), "('c=2*b')\n", (804, 813), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3692, 3699), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (3697, 3699), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3748, 3765), 'openmdao.api.ExecComp', 'ExecComp', (['"""b=2*a"""'], {}), "('b=2*a')\n", (3756, 3765), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4594, 4609), 'numpy.ones', 'np.ones', (['(5, 1)'], {}), '((5, 1))\n', (4601, 4609), True, 'import numpy as np\n'), ((4632, 4643), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (4639, 4643), True, 'import numpy as np\n'), ((4798, 4838), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (4815, 4838), False, 'from six import assertRaisesRegex\n'), ((5192, 5232), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (5209, 5232), False, 'from six import assertRaisesRegex\n'), ((5463, 5503), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (5480, 5503), False, 'from six import assertRaisesRegex\n'), ((5640, 5680), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (5657, 5680), False, 'from six import assertRaisesRegex\n'), ((6039, 6079), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6056, 6079), False, 'from six import assertRaisesRegex\n'), ((6313, 6353), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6330, 6353), False, 'from six import assertRaisesRegex\n'), ((6493, 6533), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6510, 6533), False, 'from six import assertRaisesRegex\n'), ((6902, 6942), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6919, 6942), False, 'from six import assertRaisesRegex\n'), ((7182, 7222), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (7199, 7222), False, 'from six import assertRaisesRegex\n'), ((7592, 7631), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (7606, 7631), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((7691, 7730), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (7705, 7730), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((8027, 8066), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (8041, 8066), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((8126, 8165), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (8140, 8165), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((8371, 8397), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""x"""'], {'shape': '(3)'}), "('x', shape=3)\n", (8383, 8397), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((8442, 8462), 'openmdao.test_suite.components.options_feature_vector.VectorDoublingComp', 'VectorDoublingComp', ([], {}), '()\n', (8460, 8462), False, 'from openmdao.test_suite.components.options_feature_vector import VectorDoublingComp\n'), ((8629, 8668), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (8643, 8668), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((9169, 9202), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""V"""', '(0.0)'], {'units': '"""V"""'}), "('V', 0.0, units='V')\n", (9181, 9202), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((9241, 9274), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""I"""', '(0.1)'], {'units': '"""A"""'}), "('I', 0.1, units='A')\n", (9253, 9274), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((9315, 9324), 'openmdao.test_suite.scripts.circuit_analysis.Circuit', 'Circuit', ([], {}), '()\n', (9322, 9324), False, 'from openmdao.test_suite.scripts.circuit_analysis import Resistor, Diode, Node, Circuit\n'), ((3879, 3895), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (3887, 3895), True, 'import numpy as np\n'), ((3939, 3955), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (3947, 3955), True, 'import numpy as np\n'), ((4215, 4231), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (4223, 4231), True, 'import numpy as np\n'), ((4280, 4296), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (4288, 4296), True, 'import numpy as np\n')] |
fabiangunzinger/sample_project | code/src/db/create_db.py | a5c87d0c3ff2f6ed39f3e3a18557c0ab439f6b42 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import os
import sqlite3
import sys
import pandas as pd
from src import config
def parse_args(argv):
parser = argparse.ArgumentParser()
parser.add_argument('sample')
parser.add_argument('replace')
return parser.parse_args()
def db_tables(connection):
"""List tables in database."""
res = pd.read_sql("select name from sqlite_master", connection)
return res.name.values
def create_database(sample):
"""Create database with tables for targets, outcomes, and predictions."""
db_name = f'{sample}.db'
db_path = os.path.join(config.DATADIR, db_name)
conn = sqlite3.connect(db_path)
usr_name = f'users_{sample}.csv'
usr_path = os.path.join(config.DATADIR, usr_name)
users = pd.read_csv(usr_path)
db_tbls = db_tables(conn)
for tbl in ['decisions', 'outcomes', 'predictions']:
if tbl not in db_tbls:
users.to_sql(tbl, conn, index=False)
conn.execute(f"create index idx_{tbl}_user_id on {tbl}(user_id)")
def main(argv=None):
if argv is None:
argv = sys.argv[:1]
args = parse_args(argv)
create_database(args.sample)
if __name__ == '__main__':
sys.exit(main())
| [((180, 205), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (203, 205), False, 'import argparse\n'), ((380, 437), 'pandas.read_sql', 'pd.read_sql', (['"""select name from sqlite_master"""', 'connection'], {}), "('select name from sqlite_master', connection)\n", (391, 437), True, 'import pandas as pd\n'), ((617, 654), 'os.path.join', 'os.path.join', (['config.DATADIR', 'db_name'], {}), '(config.DATADIR, db_name)\n', (629, 654), False, 'import os\n'), ((666, 690), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (681, 690), False, 'import sqlite3\n'), ((743, 781), 'os.path.join', 'os.path.join', (['config.DATADIR', 'usr_name'], {}), '(config.DATADIR, usr_name)\n', (755, 781), False, 'import os\n'), ((794, 815), 'pandas.read_csv', 'pd.read_csv', (['usr_path'], {}), '(usr_path)\n', (805, 815), True, 'import pandas as pd\n')] |
craigderington/responder-persons-api | tests/test_responder.py | d2270d2f761c5dd3dbe253113d410f3e37d4d217 | # coding: utf-8
import pytest
import app as service
import yaml
import responder
from starlette.responses import PlainTextResponse
@pytest.fixture
def api():
return service.api
def test_hello_world(api):
r = api.requests.get("/api/v1.0/index")
assert r.text == "Hello, World!"
def test_basic_route(api):
@api.route("/api/v1.0/index")
def index(req, resp):
resp.text = "Hello, World!"
def test_requests_session(api):
assert api.session()
assert api.requests
def test_json_media(api):
dump = {"life": 42}
@api.route("/")
def media(req, resp):
resp.media = dump
r = api.requests.get("http://;/")
assert "json" in r.headers["Content-Type"]
assert r.json() == dump
def test_yaml_media(api):
dump = {"life": 42}
@api.route("/")
def media(req, resp):
resp.media = dump
r = api.requests.get("http://;/", headers={"Accept": "yaml"})
assert "yaml" in r.headers["Content-Type"]
assert yaml.load(r.content) == dump
def test_background(api):
@api.route("/")
def route(req, resp):
@api.background.task
def task():
import time
time.sleep(3)
task()
api.text = "ok"
r = api.requests.get(api.url_for(route))
assert r.ok
def test_500_error(api):
def catcher(req, exc):
return PlainTextResponse("Suppressed error", 500)
api.app.add_exception_handler(ValueError, catcher)
@api.route("/api/v1.0/index")
def view(req, resp):
raise ValueError
r = api.requests.get(api.url_for(view))
assert not r.ok
assert r.content == b'Suppressed error'
def test_404_error(api):
r = api.requests.get("/api/v1.0/foo")
assert r.status_code == responder.API.status_codes.HTTP_404
| [((993, 1013), 'yaml.load', 'yaml.load', (['r.content'], {}), '(r.content)\n', (1002, 1013), False, 'import yaml\n'), ((1366, 1408), 'starlette.responses.PlainTextResponse', 'PlainTextResponse', (['"""Suppressed error"""', '(500)'], {}), "('Suppressed error', 500)\n", (1383, 1408), False, 'from starlette.responses import PlainTextResponse\n'), ((1181, 1194), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1191, 1194), False, 'import time\n')] |
axdahl/SC-MMGP | examples/solar/p25_nonsparse_cmmgp.py | c6cd9d9de66bb7074925a4b6485f10a74bdd9f68 | # -*- coding: utf-8 -*-
"""
Script to execute example covarying MMGP regression forecasting model
with full Krhh.
Inputs: Data training and test sets (dictionary pickle)
Data for example:
- normalised solar data for 25 sites for 15 minute forecast
- N_train = 4200, N_test = 2276, P = 25, D = 51
- Xtr[:, :50] 2 recent lagged observations for each site in order
- Xtr[:, 50] time index
- link inputs is a 25x2 array (link inputs repeated for every group)
with normalised lat,long for each site in order
Model Options:
- Sparse or full x-function covariance prior Krhh (set bool SPARSE_PRIOR)
- Diagonal or Kronecker-structured variational posterior covariance Sr (set bool DIAG_POST)
- Sparse or full posterior covariance (when Kronecker posterior; set bool SPARSE_POST)
Current Settings (sparse covarying mmgp model with sparse Kronecker posterior):
DIAG_POST = False
SPARSE_PRIOR = False # set True for equivalent sparse scmmgp model
SPARSE_POST = True
Note on specifying group structure for F:
Grouping occurs via block_struct, a nested list of grouping order
Where functions [i] are independent i.e. in own block, set link_kernel[i] = link_inputs[i] = 1.0
See model class preamble and example below for further details.
"""
import os
import numpy as np
import pickle
import pandas as pd
import traceback
import time
import sklearn.cluster
import csv
import sys
import mmgp
from mmgp import likelihoods
from mmgp import kernels
import tensorflow as tf
from mmgp import datasets
from mmgp import losses
from mmgp import util
dpath = '/experiments/datasets/'
dfile = 'p25_inputsdict.pickle'
dlinkfile = 'p25_linkinputsarray.pickle'
outdir = '/experiments/results/p25_nonsparse_cmmgp/'
try:
os.makedirs(outdir)
except FileExistsError:
pass
def get_inputs():
"""
inputsdict contains {'Yte': Yte, 'Ytr': Ytr, 'Xtr': Xtr, 'Xte': Xte} where values are np.arrays
np. arrays are truncated to evenly split into batches of size = batchsize
returns inputsdict, Xtr_link (ndarray, shape = [P, D_link_features])
"""
with open(os.path.join(dpath, dfile), 'rb') as f:
d_all = pickle.load(f)
with open(os.path.join(dpath, dlinkfile), 'rb') as f:
d_link = pickle.load(f)
return d_all, d_link
def init_z(train_inputs, num_inducing):
# Initialize inducing points using clustering.
mini_batch = sklearn.cluster.MiniBatchKMeans(num_inducing)
cluster_indices = mini_batch.fit_predict(train_inputs)
inducing_locations = mini_batch.cluster_centers_
return inducing_locations
FLAGS = util.util.get_flags()
BATCH_SIZE = FLAGS.batch_size
LEARNING_RATE = FLAGS.learning_rate
DISPLAY_STEP = FLAGS.display_step
EPOCHS = FLAGS.n_epochs
NUM_SAMPLES = FLAGS.mc_train
PRED_SAMPLES = FLAGS.mc_test
NUM_INDUCING = FLAGS.n_inducing
NUM_COMPONENTS = FLAGS.num_components
IS_ARD = FLAGS.is_ard
TOL = FLAGS.opt_tol
VAR_STEPS = FLAGS.var_steps
DIAG_POST = False
SPARSE_PRIOR = False
SPARSE_POST = True # option for non-diag post
MAXTIME = 1200
print("settings done")
# define GPRN P and Q
output_dim = 25 #P
node_dim = 25 #Q
lag_dim = 2
save_nlpds = False # If True saves samples of nlpds for n,p,s
# extract dataset
d, d_link = get_inputs()
Ytr, Yte, Xtr, Xte = d['Ytr'], d['Yte'], d['Xtr'], d['Xte']
data = datasets.DataSet(Xtr.astype(np.float32), Ytr.astype(np.float32), shuffle=False)
test = datasets.DataSet(Xte.astype(np.float32), Yte.astype(np.float32), shuffle=False)
print("dataset created")
# model config block rows (where P=Q): block all w.1, w.2 etc, leave f independent
# order of block_struct is rows, node functions
# lists required: block_struct, link_inputs, kern_link, kern
#block_struct nested list of grouping order
weight_struct = [[] for _ in range(output_dim)]
for i in range(output_dim):
row = list(range(i, i+output_dim*(node_dim-1)+1, output_dim))
row_0 = row.pop(i) # bring diag to pivot position
weight_struct[i] = [row_0] + row
nodes = [[x] for x in list(range(output_dim * node_dim, output_dim * node_dim + output_dim))]
block_struct = weight_struct + nodes
# create link inputs (link inputs used repeatedly but can have link input per group)
# permute to bring diagonal to first position
link_inputs = [[] for _ in range(output_dim)]
for i in range(output_dim):
idx = list(range(d_link.shape[0]))
link_inputs[i] = d_link[[idx.pop(i)] + idx, :]
link_inputs = link_inputs + [1.0 for i in range(output_dim)] # for full W row blocks, independent nodes
# create 'between' kernel list
klink_rows = [kernels.CompositeKernel('mul',[kernels.RadialBasis(2, std_dev=2.0, lengthscale=1.0, white=0.01, input_scaling = IS_ARD),
kernels.CompactSlice(2, active_dims=[0,1], lengthscale = 2.0, input_scaling = IS_ARD)] )
for i in range(output_dim) ]
klink_f = [1.0 for i in range(node_dim)]
kernlink = klink_rows + klink_f
# create 'within' kernel
# kern
lag_active_dims_s = [ [] for _ in range(output_dim)]
for i in range(output_dim):
lag_active_dims_s[i] = list(range(lag_dim*i, lag_dim*(i+1)))
k_rows = [kernels.CompositeKernel('mul',[kernels.RadialBasisSlice(lag_dim, active_dims=lag_active_dims_s[i],
std_dev = 1.0, white = 0.01, input_scaling = IS_ARD),
kernels.PeriodicSliceFixed(1, active_dims=[Xtr.shape[1]-1],
lengthscale=0.5, std_dev=1.0, period = 144) ])
for i in range(output_dim)]
k_f = [kernels.RadialBasisSlice(lag_dim, active_dims=lag_active_dims_s[i], std_dev = 1.0, white = 0.01, input_scaling = IS_ARD)
for i in range(output_dim)]
kern = k_rows + k_f
print('len link_inputs ',len(link_inputs))
print('len kernlink ',len(kernlink))
print('len kern ', len(kern))
print('no. groups = ', len(block_struct), 'no. latent functions =', len([i for b in block_struct for i in b]))
print('number latent functions', node_dim*(output_dim+1))
likelihood = likelihoods.CovaryingRegressionNetwork(output_dim, node_dim, std_dev = 0.2) # p, q, lik_noise
print("likelihood and kernels set")
Z = init_z(data.X, NUM_INDUCING)
print('inducing points set')
m = mmgp.ExplicitSCMMGP(output_dim, likelihood, kern, kernlink, block_struct, Z, link_inputs,
num_components=NUM_COMPONENTS, diag_post=DIAG_POST, sparse_prior=SPARSE_PRIOR,
sparse_post=SPARSE_POST, num_samples=NUM_SAMPLES, predict_samples=PRED_SAMPLES)
print("model set")
# initialise losses and logging
error_rate = losses.RootMeanSqError(data.Dout)
os.chdir(outdir)
with open("log_results.csv", 'w', newline='') as f:
csv.writer(f).writerow(['epoch', 'fit_runtime', 'nelbo', error_rate.get_name(),'generalised_nlpd'])
with open("log_params.csv", 'w', newline='') as f:
csv.writer(f).writerow(['epoch', 'raw_kernel_params', 'raw_kernlink_params', 'raw_likelihood_params', 'raw_weights'])
with open("log_comp_time.csv", 'w', newline='') as f:
csv.writer(f).writerow(['epoch', 'batch_time', 'nelbo_time', 'pred_time', 'gen_nlpd_time', error_rate.get_name()+'_time'])
# optimise
o = tf.train.AdamOptimizer(LEARNING_RATE, beta1=0.9,beta2=0.99)
print("start time = ", time.strftime('%X %x %Z'))
m.fit(data, o, var_steps = VAR_STEPS, epochs = EPOCHS, batch_size = BATCH_SIZE, display_step=DISPLAY_STEP,
test = test, loss = error_rate, tolerance = TOL, max_time=MAXTIME )
print("optimisation complete")
# export final predicted values and loss metrics
ypred = m.predict(test.X, batch_size = BATCH_SIZE) #same batchsize used for convenience
np.savetxt("predictions.csv", np.concatenate(ypred, axis=1), delimiter=",")
if save_nlpds == True:
nlpd_samples, nlpd_meanvar = m.nlpd_samples(test.X, test.Y, batch_size = BATCH_SIZE)
try:
np.savetxt("nlpd_meanvar.csv", nlpd_meanvar, delimiter=",") # N x 2P as for predictions
except:
print('nlpd_meanvar export fail')
try:
np.savetxt("nlpd_samples.csv", nlpd_samples, delimiter=",") # NP x S (NxS concat for P tasks)
except:
print('nlpd_samples export fail')
print("Final " + error_rate.get_name() + "=" + "%.4f" % error_rate.eval(test.Y, ypred[0]))
print("Final " + "generalised_nlpd" + "=" + "%.4f" % m.nlpd_general(test.X, test.Y, batch_size = BATCH_SIZE))
error_rate_end = [losses.MeanAbsError(data.Dout)] # any extra accuracy measures at end of routine
print("Final ", [e.get_name() for e in error_rate_end])
print([e.eval(test.Y, ypred[0]) for e in error_rate_end])
predvar = [np.mean(np.mean(ypred[1]))]
print("Final predvar ", predvar)
with open("final_losses.csv", 'w', newline='') as f:
csv.writer(f).writerows([[e.get_name() for e in error_rate_end] + ['pred_var'],
[e.eval(test.Y, ypred[0]) for e in error_rate_end] + predvar])
print("finish time = " + time.strftime('%X %x %Z'))
| [((2670, 2691), 'mmgp.util.util.get_flags', 'util.util.get_flags', ([], {}), '()\n', (2689, 2691), False, 'from mmgp import util\n'), ((6258, 6331), 'mmgp.likelihoods.CovaryingRegressionNetwork', 'likelihoods.CovaryingRegressionNetwork', (['output_dim', 'node_dim'], {'std_dev': '(0.2)'}), '(output_dim, node_dim, std_dev=0.2)\n', (6296, 6331), False, 'from mmgp import likelihoods\n'), ((6459, 6720), 'mmgp.ExplicitSCMMGP', 'mmgp.ExplicitSCMMGP', (['output_dim', 'likelihood', 'kern', 'kernlink', 'block_struct', 'Z', 'link_inputs'], {'num_components': 'NUM_COMPONENTS', 'diag_post': 'DIAG_POST', 'sparse_prior': 'SPARSE_PRIOR', 'sparse_post': 'SPARSE_POST', 'num_samples': 'NUM_SAMPLES', 'predict_samples': 'PRED_SAMPLES'}), '(output_dim, likelihood, kern, kernlink, block_struct, Z,\n link_inputs, num_components=NUM_COMPONENTS, diag_post=DIAG_POST,\n sparse_prior=SPARSE_PRIOR, sparse_post=SPARSE_POST, num_samples=\n NUM_SAMPLES, predict_samples=PRED_SAMPLES)\n', (6478, 6720), False, 'import mmgp\n'), ((6789, 6822), 'mmgp.losses.RootMeanSqError', 'losses.RootMeanSqError', (['data.Dout'], {}), '(data.Dout)\n', (6811, 6822), False, 'from mmgp import losses\n'), ((6826, 6842), 'os.chdir', 'os.chdir', (['outdir'], {}), '(outdir)\n', (6834, 6842), False, 'import os\n'), ((7378, 7438), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['LEARNING_RATE'], {'beta1': '(0.9)', 'beta2': '(0.99)'}), '(LEARNING_RATE, beta1=0.9, beta2=0.99)\n', (7400, 7438), True, 'import tensorflow as tf\n'), ((1792, 1811), 'os.makedirs', 'os.makedirs', (['outdir'], {}), '(outdir)\n', (1803, 1811), False, 'import os\n'), ((5777, 5896), 'mmgp.kernels.RadialBasisSlice', 'kernels.RadialBasisSlice', (['lag_dim'], {'active_dims': 'lag_active_dims_s[i]', 'std_dev': '(1.0)', 'white': '(0.01)', 'input_scaling': 'IS_ARD'}), '(lag_dim, active_dims=lag_active_dims_s[i], std_dev\n =1.0, white=0.01, input_scaling=IS_ARD)\n', (5801, 5896), False, 'from mmgp import kernels\n'), ((7462, 7487), 'time.strftime', 'time.strftime', (['"""%X %x %Z"""'], {}), "('%X %x %Z')\n", (7475, 7487), False, 'import time\n'), ((7883, 7912), 'numpy.concatenate', 'np.concatenate', (['ypred'], {'axis': '(1)'}), '(ypred, axis=1)\n', (7897, 7912), True, 'import numpy as np\n'), ((8603, 8633), 'mmgp.losses.MeanAbsError', 'losses.MeanAbsError', (['data.Dout'], {}), '(data.Dout)\n', (8622, 8633), False, 'from mmgp import losses\n'), ((2214, 2228), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2225, 2228), False, 'import pickle\n'), ((2308, 2322), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2319, 2322), False, 'import pickle\n'), ((8064, 8123), 'numpy.savetxt', 'np.savetxt', (['"""nlpd_meanvar.csv"""', 'nlpd_meanvar'], {'delimiter': '""","""'}), "('nlpd_meanvar.csv', nlpd_meanvar, delimiter=',')\n", (8074, 8123), True, 'import numpy as np\n'), ((8228, 8287), 'numpy.savetxt', 'np.savetxt', (['"""nlpd_samples.csv"""', 'nlpd_samples'], {'delimiter': '""","""'}), "('nlpd_samples.csv', nlpd_samples, delimiter=',')\n", (8238, 8287), True, 'import numpy as np\n'), ((8819, 8836), 'numpy.mean', 'np.mean', (['ypred[1]'], {}), '(ypred[1])\n', (8826, 8836), True, 'import numpy as np\n'), ((9134, 9159), 'time.strftime', 'time.strftime', (['"""%X %x %Z"""'], {}), "('%X %x %Z')\n", (9147, 9159), False, 'import time\n'), ((2157, 2183), 'os.path.join', 'os.path.join', (['dpath', 'dfile'], {}), '(dpath, dfile)\n', (2169, 2183), False, 'import os\n'), ((2246, 2276), 'os.path.join', 'os.path.join', (['dpath', 'dlinkfile'], {}), '(dpath, dlinkfile)\n', (2258, 2276), False, 'import os\n'), ((4725, 4815), 'mmgp.kernels.RadialBasis', 'kernels.RadialBasis', (['(2)'], {'std_dev': '(2.0)', 'lengthscale': '(1.0)', 'white': '(0.01)', 'input_scaling': 'IS_ARD'}), '(2, std_dev=2.0, lengthscale=1.0, white=0.01,\n input_scaling=IS_ARD)\n', (4744, 4815), False, 'from mmgp import kernels\n'), ((4860, 4947), 'mmgp.kernels.CompactSlice', 'kernels.CompactSlice', (['(2)'], {'active_dims': '[0, 1]', 'lengthscale': '(2.0)', 'input_scaling': 'IS_ARD'}), '(2, active_dims=[0, 1], lengthscale=2.0, input_scaling=\n IS_ARD)\n', (4880, 4947), False, 'from mmgp import kernels\n'), ((5332, 5451), 'mmgp.kernels.RadialBasisSlice', 'kernels.RadialBasisSlice', (['lag_dim'], {'active_dims': 'lag_active_dims_s[i]', 'std_dev': '(1.0)', 'white': '(0.01)', 'input_scaling': 'IS_ARD'}), '(lag_dim, active_dims=lag_active_dims_s[i], std_dev\n =1.0, white=0.01, input_scaling=IS_ARD)\n', (5356, 5451), False, 'from mmgp import kernels\n'), ((5544, 5652), 'mmgp.kernels.PeriodicSliceFixed', 'kernels.PeriodicSliceFixed', (['(1)'], {'active_dims': '[Xtr.shape[1] - 1]', 'lengthscale': '(0.5)', 'std_dev': '(1.0)', 'period': '(144)'}), '(1, active_dims=[Xtr.shape[1] - 1], lengthscale=\n 0.5, std_dev=1.0, period=144)\n', (5570, 5652), False, 'from mmgp import kernels\n'), ((6901, 6914), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (6911, 6914), False, 'import csv\n'), ((7058, 7071), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (7068, 7071), False, 'import csv\n'), ((7236, 7249), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (7246, 7249), False, 'import csv\n'), ((8934, 8947), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (8944, 8947), False, 'import csv\n')] |
cfreebuf/kubeedge-examples | cruiser-lib/test/positioning/test_position_hl_commander.py | 9b2ab402c33546215a0a9e02e92f5b0aa88bcff9 | # -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2018 Bitcraze AB
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import math
import sys
import unittest
from cflib.crazyflie import Crazyflie
from cflib.crazyflie import HighLevelCommander
from cflib.crazyflie import Param
from cflib.positioning.position_hl_commander import PositionHlCommander
if sys.version_info < (3, 3):
from mock import MagicMock, patch, call
else:
from unittest.mock import MagicMock, patch, call
@patch('time.sleep')
class TestPositionHlCommander(unittest.TestCase):
def setUp(self):
self.commander_mock = MagicMock(spec=HighLevelCommander)
self.param_mock = MagicMock(spec=Param)
self.cf_mock = MagicMock(spec=Crazyflie)
self.cf_mock.high_level_commander = self.commander_mock
self.cf_mock.param = self.param_mock
self.cf_mock.is_connected.return_value = True
self.sut = PositionHlCommander(self.cf_mock)
def test_that_the_estimator_is_reset_on_take_off(
self, sleep_mock):
# Fixture
sut = PositionHlCommander(self.cf_mock, 1.0, 2.0, 3.0)
# Test
sut.take_off()
# Assert
self.param_mock.set_value.assert_has_calls([
call('kalman.initialX', '{:.2f}'.format(1.0)),
call('kalman.initialY', '{:.2f}'.format(2.0)),
call('kalman.initialZ', '{:.2f}'.format(3.0)),
call('kalman.resetEstimation', '1'),
call('kalman.resetEstimation', '0')
])
def test_that_the_hi_level_commander_is_activated_on_take_off(
self, sleep_mock):
# Fixture
# Test
self.sut.take_off()
# Assert
self.param_mock.set_value.assert_has_calls([
call('commander.enHighLevel', '1')
])
def test_that_controller_is_selected_on_take_off(
self, sleep_mock):
# Fixture
self.sut.set_controller(PositionHlCommander.CONTROLLER_MELLINGER)
# Test
self.sut.take_off()
# Assert
self.param_mock.set_value.assert_has_calls([
call('stabilizer.controller', '2')
])
def test_that_take_off_raises_exception_if_not_connected(
self, sleep_mock):
# Fixture
self.cf_mock.is_connected.return_value = False
# Test
# Assert
with self.assertRaises(Exception):
self.sut.take_off()
def test_that_take_off_raises_exception_when_already_flying(
self, sleep_mock):
# Fixture
self.sut.take_off()
# Test
# Assert
with self.assertRaises(Exception):
self.sut.take_off()
def test_that_it_goes_up_on_take_off(
self, sleep_mock):
# Fixture
# Test
self.sut.take_off(height=0.4, velocity=0.6)
# Assert
duration = 0.4 / 0.6
self.commander_mock.takeoff.assert_called_with(0.4, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_up_to_default_height(
self, sleep_mock):
# Fixture
sut = PositionHlCommander(self.cf_mock, default_height=0.4)
# Test
sut.take_off(velocity=0.6)
# Assert
duration = 0.4 / 0.6
self.commander_mock.takeoff.assert_called_with(0.4, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_down_on_landing(
self, sleep_mock):
# Fixture
self.sut.take_off(height=0.4)
# Test
self.sut.land(velocity=0.6)
# Assert
duration = 0.4 / 0.6
self.commander_mock.land.assert_called_with(0.0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_takes_off_and_lands_as_context_manager(
self, sleep_mock):
# Fixture
# Test
with self.sut:
pass
# Assert
duration1 = 0.5 / 0.5
duration2 = 0.5 / 0.5
self.commander_mock.takeoff.assert_called_with(0.5, duration1)
self.commander_mock.land.assert_called_with(0.0, duration2)
sleep_mock.assert_called_with(duration1)
sleep_mock.assert_called_with(duration2)
def test_that_it_returns_current_position(
self, sleep_mock):
# Fixture
self.sut.take_off(height=0.4, velocity=0.6)
# Test
actual = self.sut.get_position()
# Assert
self.assertEqual(actual, (0.0, 0.0, 0.4))
def test_that_it_goes_to_position(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.go_to(1.0, 2.0, 3.0, 4.0)
# Assert
distance = self._distance(inital_pos, (1.0, 2.0, 3.0))
duration = distance / 4.0
self.commander_mock.go_to.assert_called_with(
1.0, 2.0, 3.0, 0.0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_moves_distance(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.move_distance(1.0, 2.0, 3.0, 4.0)
# Assert
distance = self._distance((0.0, 0.0, 0.0), (1.0, 2.0, 3.0))
duration = distance / 4.0
final_pos = (
inital_pos[0] + 1.0,
inital_pos[1] + 2.0,
inital_pos[2] + 3.0)
self.commander_mock.go_to.assert_called_with(
final_pos[0], final_pos[1], final_pos[2], 0.0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_forward(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.forward(1.0, 2.0)
# Assert
duration = 1.0 / 2.0
final_pos = (
inital_pos[0] + 1.0,
inital_pos[1],
inital_pos[2])
self.commander_mock.go_to.assert_called_with(
final_pos[0], final_pos[1], final_pos[2], 0.0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_back(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.back(1.0, 2.0)
# Assert
duration = 1.0 / 2.0
final_pos = (
inital_pos[0] - 1.0,
inital_pos[1],
inital_pos[2])
self.commander_mock.go_to.assert_called_with(
final_pos[0], final_pos[1], final_pos[2], 0.0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_left(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.left(1.0, 2.0)
# Assert
duration = 1.0 / 2.0
final_pos = (
inital_pos[0],
inital_pos[1] + 1.0,
inital_pos[2])
self.commander_mock.go_to.assert_called_with(
final_pos[0], final_pos[1], final_pos[2], 0.0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_right(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.right(1.0, 2.0)
# Assert
duration = 1.0 / 2.0
final_pos = (
inital_pos[0],
inital_pos[1] - 1,
inital_pos[2])
self.commander_mock.go_to.assert_called_with(
final_pos[0], final_pos[1], final_pos[2], 0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_up(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.up(1.0, 2.0)
# Assert
duration = 1.0 / 2.0
final_pos = (
inital_pos[0],
inital_pos[1],
inital_pos[2] + 1)
self.commander_mock.go_to.assert_called_with(
final_pos[0], final_pos[1], final_pos[2], 0, duration)
sleep_mock.assert_called_with(duration)
def test_that_it_goes_down(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
# Test
self.sut.down(1.0, 2.0)
# Assert
duration = 1.0 / 2.0
final_pos = (
inital_pos[0],
inital_pos[1],
inital_pos[2] - 1)
self.commander_mock.go_to.assert_called_with(
final_pos[0], final_pos[1], final_pos[2], 0, duration)
sleep_mock.assert_called_with(duration)
def test_that_default_velocity_is_used(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
self.sut.set_default_velocity(7)
# Test
self.sut.go_to(1.0, 2.0, 3.0)
# Assert
distance = self._distance(inital_pos, (1.0, 2.0, 3.0))
duration = distance / 7.0
self.commander_mock.go_to.assert_called_with(
1.0, 2.0, 3.0, 0.0, duration)
sleep_mock.assert_called_with(duration)
def test_that_default_height_is_used(
self, sleep_mock):
# Fixture
self.sut.take_off()
inital_pos = self.sut.get_position()
self.sut.set_default_velocity(7.0)
self.sut.set_default_height(5.0)
# Test
self.sut.go_to(1.0, 2.0)
# Assert
distance = self._distance(inital_pos, (1.0, 2.0, 5.0))
duration = distance / 7.0
self.commander_mock.go_to.assert_called_with(
1.0, 2.0, 5.0, 0.0, duration)
sleep_mock.assert_called_with(duration)
######################################################################
def _distance(self, p1, p2):
dx = p1[0] - p2[0]
dy = p1[1] - p2[1]
dz = p1[2] - p2[2]
return math.sqrt(dx * dx + dy * dy + dz * dz)
if __name__ == '__main__':
unittest.main()
| [((1401, 1420), 'unittest.mock.patch', 'patch', (['"""time.sleep"""'], {}), "('time.sleep')\n", (1406, 1420), False, 'from unittest.mock import MagicMock, patch, call\n'), ((11056, 11071), 'unittest.main', 'unittest.main', ([], {}), '()\n', (11069, 11071), False, 'import unittest\n'), ((1522, 1556), 'unittest.mock.MagicMock', 'MagicMock', ([], {'spec': 'HighLevelCommander'}), '(spec=HighLevelCommander)\n', (1531, 1556), False, 'from unittest.mock import MagicMock, patch, call\n'), ((1583, 1604), 'unittest.mock.MagicMock', 'MagicMock', ([], {'spec': 'Param'}), '(spec=Param)\n', (1592, 1604), False, 'from unittest.mock import MagicMock, patch, call\n'), ((1628, 1653), 'unittest.mock.MagicMock', 'MagicMock', ([], {'spec': 'Crazyflie'}), '(spec=Crazyflie)\n', (1637, 1653), False, 'from unittest.mock import MagicMock, patch, call\n'), ((1837, 1870), 'cflib.positioning.position_hl_commander.PositionHlCommander', 'PositionHlCommander', (['self.cf_mock'], {}), '(self.cf_mock)\n', (1856, 1870), False, 'from cflib.positioning.position_hl_commander import PositionHlCommander\n'), ((1989, 2037), 'cflib.positioning.position_hl_commander.PositionHlCommander', 'PositionHlCommander', (['self.cf_mock', '(1.0)', '(2.0)', '(3.0)'], {}), '(self.cf_mock, 1.0, 2.0, 3.0)\n', (2008, 2037), False, 'from cflib.positioning.position_hl_commander import PositionHlCommander\n'), ((4038, 4091), 'cflib.positioning.position_hl_commander.PositionHlCommander', 'PositionHlCommander', (['self.cf_mock'], {'default_height': '(0.4)'}), '(self.cf_mock, default_height=0.4)\n', (4057, 4091), False, 'from cflib.positioning.position_hl_commander import PositionHlCommander\n'), ((10984, 11022), 'math.sqrt', 'math.sqrt', (['(dx * dx + dy * dy + dz * dz)'], {}), '(dx * dx + dy * dy + dz * dz)\n', (10993, 11022), False, 'import math\n'), ((2338, 2373), 'unittest.mock.call', 'call', (['"""kalman.resetEstimation"""', '"""1"""'], {}), "('kalman.resetEstimation', '1')\n", (2342, 2373), False, 'from unittest.mock import MagicMock, patch, call\n'), ((2387, 2422), 'unittest.mock.call', 'call', (['"""kalman.resetEstimation"""', '"""0"""'], {}), "('kalman.resetEstimation', '0')\n", (2391, 2422), False, 'from unittest.mock import MagicMock, patch, call\n'), ((2678, 2712), 'unittest.mock.call', 'call', (['"""commander.enHighLevel"""', '"""1"""'], {}), "('commander.enHighLevel', '1')\n", (2682, 2712), False, 'from unittest.mock import MagicMock, patch, call\n'), ((3029, 3063), 'unittest.mock.call', 'call', (['"""stabilizer.controller"""', '"""2"""'], {}), "('stabilizer.controller', '2')\n", (3033, 3063), False, 'from unittest.mock import MagicMock, patch, call\n')] |
NaomiatLibrary/OpenNMT-kpg-release | onmt/keyphrase/pke/unsupervised/graph_based/expandrank.py | 1da3468d7dad22529a77f3526abf9b373bd3dc4c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Florian Boudin
# Date: 10-02-2018
"""ExpandRank keyphrase extraction model.
Graph-based ranking approach to keyphrase extraction described in:
* Xiaojun Wan and Jianguo Xiao.
Single Document Keyphrase Extraction Using Neighborhood Knowledge.
*In proceedings of AAAI*, pages 855-860, 2008.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from onmt.keyphrase.pke.unsupervised import SingleRank
from onmt.keyphrase.pke.base import LoadFile
import networkx as nx
import logging
class ExpandRank(SingleRank):
"""ExpandRank keyphrase extraction model.
Parameterized example::
import pke
import string
from nltk.corpus import stopwords
# 1. create an ExpandRank extractor.
extractor = pke.unsupervised.ExpandRank()
# 2. load the content of the document.
extractor.load_document(input='path/to/input.xml')
# 3. select the the longest sequences of nouns and adjectives, that do
# not contain punctuation marks or stopwords as candidates.
pos = {'NOUN', 'PROPN', 'ADJ'}
stoplist = list(string.punctuation)
stoplist += ['-lrb-', '-rrb-', '-lcb-', '-rcb-', '-lsb-', '-rsb-']
stoplist += stopwords.words('english')
extractor.candidate_selection(pos=pos, stoplist=stoplist)
# 4. weight the candidates using the sum of their word's scores that are
# computed using random walk. In the graph, nodes are words (nouns
# and adjectives only) that are connected if they occur in a window
# of 10 words. A set of extra documents should be provided to expand
# the graph.
expanded_documents = [('path/to/input1.xml', similarity1),
('path/to/input2.xml', similarity2)]
extractor.candidate_weighting(window=10,
pos=pos,
expanded_documents=expanded_documents,
format='corenlp')
# 5. get the 10-highest scored candidates as keyphrases
keyphrases = extractor.get_n_best(n=10)
"""
def __init__(self):
""" Redefining initializer for ExpandRank. """
super(ExpandRank, self).__init__()
def expand_word_graph(self,
input_file,
similarity,
window=10,
pos=None):
"""Expands the word graph using the given document.
Args:
input_file (str): path to the input file.
similarity (float): similarity for weighting edges.
window (int): the window within the sentence for connecting two
words in the graph, defaults to 10.
pos (set): the set of valid pos for words to be considered as nodes
in the graph, defaults to ('NOUN', 'PROPN', 'ADJ').
"""
# define default pos tags set
if pos is None:
pos = {'NOUN', 'PROPN', 'ADJ'}
# initialize document loader
doc = LoadFile()
# read document
doc.load_document(input=input_file,
language=self.language,
normalization=self.normalization)
# flatten document and initialize nodes
sequence = []
for sentence in doc.sentences:
for j, node in enumerate(sentence.stems):
if node not in self.graph and sentence.pos[j] in pos:
self.graph.add_node(node)
sequence.append((node, sentence.pos[j]))
# loop through sequence to build the edges in the graph
for j, node_1 in enumerate(sequence):
for k in range(j + 1, min(j + window, len(sequence))):
node_2 = sequence[k]
if node_1[1] in pos and node_2[1] in pos \
and node_1[0] != node_2[0]:
if not self.graph.has_edge(node_1[0], node_2[0]):
self.graph.add_edge(node_1[0], node_2[0], weight=0)
self.graph[node_1[0]][node_2[0]]['weight'] += similarity
def candidate_weighting(self,
window=10,
pos=None,
expanded_documents=None,
normalized=False):
"""Candidate ranking using random walk.
Args:
window (int): the window within the sentence for connecting two
words in the graph, defaults to 10.
pos (set): the set of valid pos for words to be considered as nodes
in the graph, defaults to ('NOUN', 'PROPN', 'ADJ').
expanded_documents (list): the set of documents to expand the graph,
should be a list of tuples (input_path, similarity). Defaults to
empty list, i.e. no expansion.
normalized (False): normalize keyphrase score by their length,
defaults to False.
"""
# define default pos tags set
if pos is None:
pos = {'NOUN', 'PROPN', 'ADJ'}
if expanded_documents is None:
expanded_documents = []
logging.warning('No neighbor documents provided for ExpandRank.')
# build the word graph
self.build_word_graph(window=window, pos=pos)
# expand the word graph
for input_file, similarity in expanded_documents:
self.expand_word_graph(input_file=input_file,
similarity=similarity,
window=window,
pos=pos)
# compute the word scores using random walk
w = nx.pagerank_scipy(self.graph, alpha=0.85, weight='weight')
# loop through the candidates
for k in self.candidates.keys():
tokens = self.candidates[k].lexical_form
self.weights[k] = sum([w[t] for t in tokens])
if normalized:
self.weights[k] /= len(tokens)
| [((3236, 3246), 'onmt.keyphrase.pke.base.LoadFile', 'LoadFile', ([], {}), '()\n', (3244, 3246), False, 'from onmt.keyphrase.pke.base import LoadFile\n'), ((5908, 5966), 'networkx.pagerank_scipy', 'nx.pagerank_scipy', (['self.graph'], {'alpha': '(0.85)', 'weight': '"""weight"""'}), "(self.graph, alpha=0.85, weight='weight')\n", (5925, 5966), True, 'import networkx as nx\n'), ((5390, 5455), 'logging.warning', 'logging.warning', (['"""No neighbor documents provided for ExpandRank."""'], {}), "('No neighbor documents provided for ExpandRank.')\n", (5405, 5455), False, 'import logging\n')] |
mmeidlinger/cdk-microservices-labs | 5-serverless-xray-stack/app.py | a646c05d4bb0950f9915f9b15f810e82ba8d4e9a | #!/usr/bin/env python3
from aws_cdk import core
from fagate_serverless.fagate_serverless_stack import FagateServerlessStack
app = core.App()
FagateServerlessStack(app, "serverless-xray-stack")
app.synth()
| [((134, 144), 'aws_cdk.core.App', 'core.App', ([], {}), '()\n', (142, 144), False, 'from aws_cdk import core\n'), ((145, 196), 'fagate_serverless.fagate_serverless_stack.FagateServerlessStack', 'FagateServerlessStack', (['app', '"""serverless-xray-stack"""'], {}), "(app, 'serverless-xray-stack')\n", (166, 196), False, 'from fagate_serverless.fagate_serverless_stack import FagateServerlessStack\n')] |
nickmelnikov82/dash | dash/long_callback/managers/celery_manager.py | e774908da770bee83f3213e0307c27ed8a40500e | import json
import inspect
import hashlib
from _plotly_utils.utils import PlotlyJSONEncoder
from dash.long_callback.managers import BaseLongCallbackManager
class CeleryLongCallbackManager(BaseLongCallbackManager):
def __init__(self, celery_app, cache_by=None, expire=None):
"""
Long callback manager that runs callback logic on a celery task queue,
and stores results using a celery result backend.
:param celery_app:
A celery.Celery application instance that must be configured with a
result backend. See the celery documentation for information on
configuration options.
:param cache_by:
A list of zero-argument functions. When provided, caching is enabled and
the return values of these functions are combined with the callback
function's input arguments and source code to generate cache keys.
:param expire:
If provided, a cache entry will be removed when it has not been accessed
for ``expire`` seconds. If not provided, the lifetime of cache entries
is determined by the default behavior of the celery result backend.
"""
try:
import celery # pylint: disable=import-outside-toplevel,import-error
from celery.backends.base import ( # pylint: disable=import-outside-toplevel,import-error
DisabledBackend,
)
except ImportError as missing_imports:
raise ImportError(
"""\
CeleryLongCallbackManager requires extra dependencies which can be installed doing
$ pip install "dash[celery]"\n"""
) from missing_imports
if not isinstance(celery_app, celery.Celery):
raise ValueError("First argument must be a celery.Celery object")
if isinstance(celery_app.backend, DisabledBackend):
raise ValueError("Celery instance must be configured with a result backend")
super().__init__(cache_by)
self.handle = celery_app
self.expire = expire
def terminate_job(self, job):
if job is None:
return
self.handle.control.terminate(job)
def terminate_unhealthy_job(self, job):
task = self.get_task(job)
if task and task.status in ("FAILURE", "REVOKED"):
return self.terminate_job(job)
return False
def job_running(self, job):
future = self.get_task(job)
return future and future.status in (
"PENDING",
"RECEIVED",
"STARTED",
"RETRY",
"PROGRESS",
)
def make_job_fn(self, fn, progress, args_deps):
return _make_job_fn(fn, self.handle, progress, args_deps)
def get_task(self, job):
if job:
return self.handle.AsyncResult(job)
return None
def clear_cache_entry(self, key):
self.handle.backend.delete(key)
def call_job_fn(self, key, job_fn, args):
task = job_fn.delay(key, self._make_progress_key(key), args)
return task.task_id
def get_progress(self, key):
progress_key = self._make_progress_key(key)
progress_data = self.handle.backend.get(progress_key)
if progress_data:
return json.loads(progress_data)
return None
def result_ready(self, key):
return self.handle.backend.get(key) is not None
def get_result(self, key, job):
# Get result value
result = self.handle.backend.get(key)
if result is None:
return None
result = json.loads(result)
# Clear result if not caching
if self.cache_by is None:
self.clear_cache_entry(key)
else:
if self.expire:
# Set/update expiration time
self.handle.backend.expire(key, self.expire)
self.clear_cache_entry(self._make_progress_key(key))
self.terminate_job(job)
return result
def _make_job_fn(fn, celery_app, progress, args_deps):
cache = celery_app.backend
# Hash function source and module to create a unique (but stable) celery task name
fn_source = inspect.getsource(fn)
fn_str = fn_source
fn_hash = hashlib.sha1(fn_str.encode("utf-8")).hexdigest()
@celery_app.task(name=f"long_callback_{fn_hash}")
def job_fn(result_key, progress_key, user_callback_args, fn=fn):
def _set_progress(progress_value):
cache.set(progress_key, json.dumps(progress_value, cls=PlotlyJSONEncoder))
maybe_progress = [_set_progress] if progress else []
if isinstance(args_deps, dict):
user_callback_output = fn(*maybe_progress, **user_callback_args)
elif isinstance(args_deps, (list, tuple)):
user_callback_output = fn(*maybe_progress, *user_callback_args)
else:
user_callback_output = fn(*maybe_progress, user_callback_args)
cache.set(result_key, json.dumps(user_callback_output, cls=PlotlyJSONEncoder))
return job_fn
| [((4202, 4223), 'inspect.getsource', 'inspect.getsource', (['fn'], {}), '(fn)\n', (4219, 4223), False, 'import inspect\n'), ((3614, 3632), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (3624, 3632), False, 'import json\n'), ((3298, 3323), 'json.loads', 'json.loads', (['progress_data'], {}), '(progress_data)\n', (3308, 3323), False, 'import json\n'), ((4990, 5045), 'json.dumps', 'json.dumps', (['user_callback_output'], {'cls': 'PlotlyJSONEncoder'}), '(user_callback_output, cls=PlotlyJSONEncoder)\n', (5000, 5045), False, 'import json\n'), ((4513, 4562), 'json.dumps', 'json.dumps', (['progress_value'], {'cls': 'PlotlyJSONEncoder'}), '(progress_value, cls=PlotlyJSONEncoder)\n', (4523, 4562), False, 'import json\n')] |
victor-kironde/botbuilder-python | libraries/botframework-connector/botframework/connector/token_api/_token_api_client.py | e893d9b036d7cf33cf9c9afd1405450c354cdbcd | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Serializer, Deserializer
from ._configuration import TokenApiClientConfiguration
from .operations import BotSignInOperations
from .operations import UserTokenOperations
from . import models
class TokenApiClient(SDKClient):
"""TokenApiClient
:ivar config: Configuration for client.
:vartype config: TokenApiClientConfiguration
:ivar bot_sign_in: BotSignIn operations
:vartype bot_sign_in: botframework.tokenapi.operations.BotSignInOperations
:ivar user_token: UserToken operations
:vartype user_token: botframework.tokenapi.operations.UserTokenOperations
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
:param str base_url: Service URL
"""
def __init__(self, credentials, base_url=None):
self.config = TokenApiClientConfiguration(credentials, base_url)
super(TokenApiClient, self).__init__(self.config.credentials, self.config)
client_models = {
k: v for k, v in models.__dict__.items() if isinstance(v, type)
}
self.api_version = "token"
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.bot_sign_in = BotSignInOperations(
self._client, self.config, self._serialize, self._deserialize
)
self.user_token = UserTokenOperations(
self._client, self.config, self._serialize, self._deserialize
)
| [((1681, 1706), 'msrest.Serializer', 'Serializer', (['client_models'], {}), '(client_models)\n', (1691, 1706), False, 'from msrest import Serializer, Deserializer\n'), ((1735, 1762), 'msrest.Deserializer', 'Deserializer', (['client_models'], {}), '(client_models)\n', (1747, 1762), False, 'from msrest import Serializer, Deserializer\n')] |
shikshan/soppi | soppi/sample.py | 007f654b0e9fe4bf7fc09e967615cb205a67dbaa | # content of test_sample.py
def inc(x: int) -> int:
return x + 1
| [] |
fairhopeweb/saleor | saleor/order/migrations/0081_auto_20200406_0456.py | 9ac6c22652d46ba65a5b894da5f1ba5bec48c019 | # Generated by Django 3.0.4 on 2020-04-06 09:56
from django.db import migrations
from saleor.order import OrderStatus
def match_orders_with_users(apps, *_args, **_kwargs):
Order = apps.get_model("order", "Order")
User = apps.get_model("account", "User")
orders_without_user = Order.objects.filter(
user_email__isnull=False, user=None
).exclude(status=OrderStatus.DRAFT)
for order in orders_without_user:
try:
new_user = User.objects.get(email=order.user_email)
except User.DoesNotExist:
continue
order.user = new_user
order.save(update_fields=["user"])
class Migration(migrations.Migration):
dependencies = [
("order", "0080_invoice"),
]
operations = [
migrations.RunPython(match_orders_with_users),
]
| [((774, 819), 'django.db.migrations.RunPython', 'migrations.RunPython', (['match_orders_with_users'], {}), '(match_orders_with_users)\n', (794, 819), False, 'from django.db import migrations\n')] |
jhpark428/studio | function/python/brightics/function/textanalytics/regex.py | 539457b3026dda827c1b17b4cb851946e34e3b85 | """
Copyright 2019 Samsung SDS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from brightics.common.utils import check_required_parameters
from brightics.common.exception import BrighticsFunctionException
from .data import regex_format_dict
import re
def regex(table, **params):
check_required_parameters(_regex, params, ['table'])
return _regex(table, **params)
def _regex(table, input_cols, transformation_mode='extract', find_mode='all', pattern='',
user_dict_pattern='', custom_pattern='', replacement_string='', user_dict=None):
out_table = table.copy()
pattern_dict = regex_format_dict.pattern_dict
user_pattern_dict = {}
if user_dict is not None:
user_patterns = user_dict.values
for user_pattern in user_patterns:
user_pattern_name = user_pattern[0]
user_pattern_content = user_pattern[1]
user_pattern_dict[user_pattern_name] = user_pattern_dict.get(user_pattern_name, []) + [user_pattern_content]
user_pattern_dict = {key: r'|'.join(value) for key, value in user_pattern_dict.items()}
if pattern == '':
raise BrighticsFunctionException.from_errors([{'0100': "Please choose a pattern."}])
if pattern == 'custom':
raw_pattern = custom_pattern
elif pattern == 'user_dictionary':
raw_pattern = user_pattern_dict.get(user_dict_pattern)
if raw_pattern is None:
raise BrighticsFunctionException.from_errors(
[{'0100': user_dict_pattern + " is not a valid pattern name in the user dictionary."}])
else:
raw_pattern = pattern_dict.get(pattern)
regex_pattern = re.compile(raw_pattern)
def transformation(text):
if transformation_mode == 'extract':
if find_mode == 'first':
result = regex_pattern.search(text)
if result is None:
return ""
else:
return result.group()
else: # find_mode == 'all'
return regex_pattern.findall(text)
elif transformation_mode == 'replace':
if find_mode == 'first':
return regex_pattern.sub(replacement_string, text, 1)
else: # find_mode == 'all'
return regex_pattern.sub(replacement_string, text)
elif transformation_mode == 'remove':
if find_mode == 'first':
return regex_pattern.sub("", text, 1)
else: # find_mode == 'all'
return regex_pattern.sub("", text)
else: # transformation_mode == 'split'
if find_mode == 'first':
return regex_pattern.split(text, 1)
else: # find_mode == 'all'
return regex_pattern.split(text)
for col in input_cols:
result_col = table[col].apply(transformation)
out_table['regex_' + col] = result_col
return {'out_table': out_table}
| [((820, 872), 'brightics.common.utils.check_required_parameters', 'check_required_parameters', (['_regex', 'params', "['table']"], {}), "(_regex, params, ['table'])\n", (845, 872), False, 'from brightics.common.utils import check_required_parameters\n'), ((2179, 2202), 're.compile', 're.compile', (['raw_pattern'], {}), '(raw_pattern)\n', (2189, 2202), False, 'import re\n'), ((1661, 1739), 'brightics.common.exception.BrighticsFunctionException.from_errors', 'BrighticsFunctionException.from_errors', (["[{'0100': 'Please choose a pattern.'}]"], {}), "([{'0100': 'Please choose a pattern.'}])\n", (1699, 1739), False, 'from brightics.common.exception import BrighticsFunctionException\n'), ((1957, 2087), 'brightics.common.exception.BrighticsFunctionException.from_errors', 'BrighticsFunctionException.from_errors', (["[{'0100': user_dict_pattern +\n ' is not a valid pattern name in the user dictionary.'}]"], {}), "([{'0100': user_dict_pattern +\n ' is not a valid pattern name in the user dictionary.'}])\n", (1995, 2087), False, 'from brightics.common.exception import BrighticsFunctionException\n')] |
travc/outbreak-reporter | bin/temperature_functions.py | 0f03ca66993827ae1866d09e3cf5d9f6d4acb633 | #!/usr/bin/env python3
import sys
import os
import logging
import numpy as np
import pandas as pd
import dateutil
def tempF2C(x): return (x-32.0)*5.0/9.0
def tempC2F(x): return (x*9.0/5.0)+32.0
def load_temperature_hdf5(temps_fn, local_time_offset, basedir=None, start_year=None, truncate_to_full_day=False):
## Load temperature
# temps_fn = "{}_AT_cleaned.h5".format(station_callsign)
logging.info("Using saved temperatures file '{}'".format(temps_fn))
if basedir is not None:
temps_fn = os.path.join(basedir, temps_fn)
tempdf = pd.read_hdf(temps_fn, 'table')
tmp = local_time_offset.split(':')
tmp = int(tmp[0])*3600+int(tmp[1])*60
sitetz = dateutil.tz.tzoffset(local_time_offset, tmp)
tempdf.index = tempdf.index.tz_convert(sitetz)
if truncate_to_full_day:
x = tempdf.index[-1]
if x.hour != 23:
x = x-pd.Timedelta(days=1)
tmp = '{:04d}-{:02d}-{:02d}'.format(x.year, x.month, x.day)
tempdf = tempdf.loc[:tmp]
if start_year is not None:
tempdf = tempdf.loc['{}-01-01'.format(start_year):]
logging.info("Temperature data date range used: {} through {}".format(tempdf.index[0], tempdf.index[-1]))
return tempdf
def load_temperature_csv(fn, local_time_offset=None):
t = pd.read_csv(fn, index_col=0)
if local_time_offset is not None:
tmp = local_time_offset.split(':')
tmp = int(tmp[0])*3600+int(tmp[1])*60
sitetz = dateutil.tz.tzoffset(local_time_offset, tmp)
#t.index = pd.to_datetime(t.index).tz_localize('UTC').tz_convert(sitetz) # @TCC this fails if csv contains datetimes with TZ
t.index = pd.to_datetime(t.index)
try:
t.index = t.index.tz_localize('UTC')
except TypeError:
pass
t.index = t.index.tz_convert(sitetz)
return t
# Function which computes BM (single sine method) degree day generation from temperature data
def compute_BMDD_Fs(tmin, tmax, base_temp, dd_gen):
# Used internally
def _compute_daily_BM_DD(mint, maxt, avet, base_temp):
"""Use standard Baskerville-Ermin (single sine) degree-day method
to compute the degree-day values for each a single day.
"""
if avet is None:
avet = (mint+maxt)/2.0 # simple midpoint (like in the refs)
dd = np.nan # value which we're computing
# Step 1: Adjust for observation time; not relevant
# Step 2: GDD = 0 if max < base (curve all below base)
if maxt < base_temp:
dd = 0
# Step 3: Calc mean temp for day; already done previously
# Step 4: min > base; then whole curve counts
elif mint >= base_temp:
dd = avet - base_temp
# Step 5: else use curve minus part below base
else:
W = (maxt-mint)/2.0
tmp = (base_temp-avet) / W
if tmp < -1:
print('WARNING: (base_temp-avet)/W = {} : should be [-1:1]'.format(tmp))
tmp = -1
if tmp > 1:
print('WARNING: (base_temp-avet)/W = {} : should be [-1:1]'.format(tmp))
tmp = 1
A = np.arcsin(tmp)
dd = ((W*np.cos(A))-((base_temp-avet)*((np.pi/2.0)-A)))/np.pi
return dd
# compute the degree-days for each day in the temperature input (from tmin and tmax vectors)
dd = pd.concat([tmin,tmax], axis=1)
dd.columns = ['tmin', 'tmax']
dd['DD'] = dd.apply(lambda x: _compute_daily_BM_DD(x[0], x[1], (x[0]+x[1])/2.0, base_temp), axis=1)
# compute the degree-days for each day in the temperature input (from a daily groupby)
# grp = t.groupby(pd.TimeGrouper('D'))
# dd = grp.agg(lambda x: _compute_daily_BM_DD(np.min(x), np.max(x), None, base_temp))
# dd.columns = ['DD']
# Find the point where cumulative sums of degree days cross the threshold
cDD = dd['DD'].cumsum(skipna=True)
for cumdd_threshold,label in [[1*dd_gen,'F1'], [2*dd_gen,'F2'], [3*dd_gen,'F3']]:
dtmp = np.zeros(len(dd['DD']))*np.nan
tmp = np.searchsorted(cDD, cDD+(cumdd_threshold)-dd['DD'], side='left').astype(float)
tmp[tmp>=len(tmp)] = np.nan
#dd[label+'_idx'] = tmp
# convert those indexes into end times
e = pd.Series(index=dd.index, dtype='float64')#, dtype='datetime64[ns]')
#e[~np.isnan(tmp)] = dd.index[tmp[~np.isnan(tmp)].astype(int)] # @TCC previous code
e.loc[~np.isnan(tmp)] = dd.index[tmp[~np.isnan(tmp)].astype(int)]
e.loc[np.isnan(tmp)] = np.nan
dd[label+'_end'] = e
# and duration...
#dd[label] = (e-dd.index+pd.Timedelta(days=1)).apply(lambda x: np.nan if pd.isnull(x) else x.days) # @TCC previous code
dd[label] = (pd.to_datetime(e)-dd.index+pd.Timedelta(days=1)).apply(lambda x: np.nan if pd.isnull(x) else x.days)
#dd.loc[np.isnan(tmp), label] = np.nan
print("DD dataframe min values\n", dd.min())
return dd
def compute_year_over_year_norm(in_dataframe,
start, end,
norm_start=None, norm_end=None,
freq='daily',
interp_method='linear',
norm_method='mean'):
"""
Parameters
----------
start: convertable to Datetime
start range of dates to output
end: convertable to Datetime
end range of dates to output
norm_start : convertable to Datetime or None
`None` will use in_dataframe.index[0]
norm_end : convertable to Datetime or None
if given (not None), output range does not include `norm_end` (it is half-open)
`None` will use in_dataframe.index[-1]
freq : {'daily', 'hourly'}
interp_method : str or None
`None` will skip resample and interpolation, so
`in_dataframe` must already be daily or hourly (depending on `freq`)!
norm_method : {'mean', 'median'}
"""
if freq == 'hourly':
hrs = 24
hrs_freq = '1h'
elif freq == 'daily':
hrs = 1
hrs_freq = '24h'
else:
raise ValueError("Invalid `freq` argument value: {}".format(freq))
if norm_start is None:
norm_start = in_dataframe.index[0]
if norm_end is None:
norm_end = in_dataframe.index[-1]
else:
norm_end = pd.to_datetime([norm_end])[0] - pd.Timedelta('1 second')
print('Computing using range:', norm_start, 'to', norm_end)
if interp_method is None: # skip resample+interpolation (assumes in_dataframe is daily!)
t = in_dataframe.loc[norm_start:norm_end]
else: # resample and interpolate to get hourly
t = in_dataframe.resample(hrs_freq).interpolate(method=interp_method).loc[norm_start:norm_end]
if norm_method == 'mean':
norm = t.groupby([t.index.month, t.index.day, t.index.hour]).mean().sort_index()
elif norm_method == 'median':
norm = t.groupby([t.index.month, t.index.day, t.index.hour]).median().sort_index()
else:
assert False, "Error: Unknown norm_method '{}'".format(norm_method)
# now replicate and trim to the desired output range
start = pd.to_datetime(start)
end = pd.to_datetime(end)
# need a non-leapyear and leapyear version
norm_ly = norm.copy()
if norm.shape[0] == 366*hrs:
norm = norm.drop((2,29,))
else: # norm doesn't include any leapyear data
assert norm.shape[0] == 365*hrs
# make Feb 29 the mean of Feb 28 and Mar 1
foo = (norm.loc[(2,28,)] + norm.loc[(3,1,)]) / 2.0
foo.index = pd.MultiIndex.from_product( ([2],[29],list(range(hrs))) )
norm_ly = pd.concat((norm_ly,foo)).sort_index()
norm_ly.sort_index(inplace=True) # probably not needed
# build up a 'long normal' (lnorm) dataframe year by year by appending the norm or norm_ly
lnorm = None
for yr in np.arange(start.year, end.year+1):
#print(yr)
idx = pd.date_range(start='{}-{:02d}-{:02d} {:02d}:00:00'.format(yr,*norm.index[0]),
end= '{}-{:02d}-{:02d} {:02d}:00:00'.format(yr,*norm.index[-1]),
freq=hrs_freq)
if idx.shape[0] == 366*hrs:
foo = norm_ly.copy()
else:
assert norm.shape[0] == 365*hrs
foo = norm.copy()
foo.index = idx
if lnorm is None:
lnorm = foo
else:
lnorm = lnorm.append(foo)
return lnorm.loc[start:end]
| [((562, 592), 'pandas.read_hdf', 'pd.read_hdf', (['temps_fn', '"""table"""'], {}), "(temps_fn, 'table')\n", (573, 592), True, 'import pandas as pd\n'), ((688, 732), 'dateutil.tz.tzoffset', 'dateutil.tz.tzoffset', (['local_time_offset', 'tmp'], {}), '(local_time_offset, tmp)\n', (708, 732), False, 'import dateutil\n'), ((1300, 1328), 'pandas.read_csv', 'pd.read_csv', (['fn'], {'index_col': '(0)'}), '(fn, index_col=0)\n', (1311, 1328), True, 'import pandas as pd\n'), ((3385, 3416), 'pandas.concat', 'pd.concat', (['[tmin, tmax]'], {'axis': '(1)'}), '([tmin, tmax], axis=1)\n', (3394, 3416), True, 'import pandas as pd\n'), ((7182, 7203), 'pandas.to_datetime', 'pd.to_datetime', (['start'], {}), '(start)\n', (7196, 7203), True, 'import pandas as pd\n'), ((7214, 7233), 'pandas.to_datetime', 'pd.to_datetime', (['end'], {}), '(end)\n', (7228, 7233), True, 'import pandas as pd\n'), ((7900, 7935), 'numpy.arange', 'np.arange', (['start.year', '(end.year + 1)'], {}), '(start.year, end.year + 1)\n', (7909, 7935), True, 'import numpy as np\n'), ((517, 548), 'os.path.join', 'os.path.join', (['basedir', 'temps_fn'], {}), '(basedir, temps_fn)\n', (529, 548), False, 'import os\n'), ((1473, 1517), 'dateutil.tz.tzoffset', 'dateutil.tz.tzoffset', (['local_time_offset', 'tmp'], {}), '(local_time_offset, tmp)\n', (1493, 1517), False, 'import dateutil\n'), ((1669, 1692), 'pandas.to_datetime', 'pd.to_datetime', (['t.index'], {}), '(t.index)\n', (1683, 1692), True, 'import pandas as pd\n'), ((4275, 4317), 'pandas.Series', 'pd.Series', ([], {'index': 'dd.index', 'dtype': '"""float64"""'}), "(index=dd.index, dtype='float64')\n", (4284, 4317), True, 'import pandas as pd\n'), ((6393, 6417), 'pandas.Timedelta', 'pd.Timedelta', (['"""1 second"""'], {}), "('1 second')\n", (6405, 6417), True, 'import pandas as pd\n'), ((886, 906), 'pandas.Timedelta', 'pd.Timedelta', ([], {'days': '(1)'}), '(days=1)\n', (898, 906), True, 'import pandas as pd\n'), ((3171, 3185), 'numpy.arcsin', 'np.arcsin', (['tmp'], {}), '(tmp)\n', (3180, 3185), True, 'import numpy as np\n'), ((4068, 4135), 'numpy.searchsorted', 'np.searchsorted', (['cDD', "(cDD + cumdd_threshold - dd['DD'])"], {'side': '"""left"""'}), "(cDD, cDD + cumdd_threshold - dd['DD'], side='left')\n", (4083, 4135), True, 'import numpy as np\n'), ((4525, 4538), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (4533, 4538), True, 'import numpy as np\n'), ((6361, 6387), 'pandas.to_datetime', 'pd.to_datetime', (['[norm_end]'], {}), '([norm_end])\n', (6375, 6387), True, 'import pandas as pd\n'), ((7672, 7697), 'pandas.concat', 'pd.concat', (['(norm_ly, foo)'], {}), '((norm_ly, foo))\n', (7681, 7697), True, 'import pandas as pd\n'), ((4452, 4465), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (4460, 4465), True, 'import numpy as np\n'), ((4780, 4800), 'pandas.Timedelta', 'pd.Timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4792, 4800), True, 'import pandas as pd\n'), ((4828, 4840), 'pandas.isnull', 'pd.isnull', (['x'], {}), '(x)\n', (4837, 4840), True, 'import pandas as pd\n'), ((4753, 4770), 'pandas.to_datetime', 'pd.to_datetime', (['e'], {}), '(e)\n', (4767, 4770), True, 'import pandas as pd\n'), ((3207, 3216), 'numpy.cos', 'np.cos', (['A'], {}), '(A)\n', (3213, 3216), True, 'import numpy as np\n'), ((4483, 4496), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (4491, 4496), True, 'import numpy as np\n')] |
lkusch/Kratos | applications/CSharpWrapperApplication/tests/test_CSharpWrapperApplication.py | e8072d8e24ab6f312765185b19d439f01ab7b27b | # import Kratos
import KratosMultiphysics
import KratosMultiphysics.StructuralMechanicsApplication as StructuralMechanicsApplication
import KratosMultiphysics.CSharpWrapperApplication as CSharpWrapperApplication
import run_cpp_unit_tests
# Import Kratos "wrapper" for unittests
import KratosMultiphysics.KratosUnittest as KratosUnittest
# Import subprocess
import subprocess
# Using kratos_utilities
import KratosMultiphysics.kratos_utilities as kratos_utilities
if kratos_utilities.CheckIfApplicationsAvailable("ExternalSolversApplication"):
has_external_solvers_application = True
else:
has_external_solvers_application = False
# Import the tests o test_classes to create the suits
## SMALL TESTS
## NIGTHLY TESTS
## VALIDATION TESTS
def AssembleTestSuites():
''' Populates the test suites to run.
Populates the test suites to run. At least, it should pupulate the suites:
"small", "nighlty" and "all"
Return
------
suites: A dictionary of suites
The set of suites with its test_cases added.
'''
suites = KratosUnittest.KratosSuites
# Create a test suit with the selected tests (Small tests):
smallSuite = suites['small']
# Create a test suit with the selected tests plus all small tests
nightlySuite = suites['nightly']
### BEGIN SMALL SUITE ###
### END SMALL SUITE ###
### BEGIN NIGHTLY SUITE ###
### END VALIDATION SUITE ###
### BEGIN VALIDATION SUITE ###
# For very long tests that should not be in nighly and you can use to validate
validationSuite = suites['validation']
validationSuite.addTests(nightlySuite)
### END VALIDATION ###
# Create a test suit that contains all the tests:
allSuite = suites['all']
allSuite.addTests(nightlySuite) # Already contains the smallSuite
validationSuite.addTests(allSuite) # Validation contains all
# Manual list for debugging
#allSuite.addTests(
#KratosUnittest.TestLoader().loadTestsFromTestCases([
#### STANDALONE
#### SMALL
#### NIGTHLY
#### VALIDATION
#])
#)
return suites
if __name__ == '__main__':
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning cpp unit tests ...")
run_cpp_unit_tests.run()
KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished running cpp unit tests!")
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning python tests ...")
KratosUnittest.runTests(AssembleTestSuites())
KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished python tests!")
| [((469, 544), 'KratosMultiphysics.kratos_utilities.CheckIfApplicationsAvailable', 'kratos_utilities.CheckIfApplicationsAvailable', (['"""ExternalSolversApplication"""'], {}), "('ExternalSolversApplication')\n", (514, 544), True, 'import KratosMultiphysics.kratos_utilities as kratos_utilities\n'), ((2172, 2259), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""\nRunning cpp unit tests ..."""'], {}), '(\'Unittests\',\n """\nRunning cpp unit tests ...""")\n', (2207, 2259), False, 'import KratosMultiphysics\n'), ((2257, 2281), 'run_cpp_unit_tests.run', 'run_cpp_unit_tests.run', ([], {}), '()\n', (2279, 2281), False, 'import run_cpp_unit_tests\n'), ((2286, 2374), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""Finished running cpp unit tests!"""'], {}), "('Unittests',\n 'Finished running cpp unit tests!')\n", (2321, 2374), False, 'import KratosMultiphysics\n'), ((2376, 2461), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""\nRunning python tests ..."""'], {}), '(\'Unittests\',\n """\nRunning python tests ...""")\n', (2411, 2461), False, 'import KratosMultiphysics\n'), ((2509, 2583), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""Finished python tests!"""'], {}), "('Unittests', 'Finished python tests!')\n", (2544, 2583), False, 'import KratosMultiphysics\n')] |
mezidia/mezidia-airlines-backend | backend/api/models.py | bc0b27b785f45ac83552f7fbb879cd977171c2fc | from sqlalchemy import Column, Integer, String, ForeignKey, Float
from sqlalchemy.orm import relationship
from .database import Base
class Post(Base):
__tablename__ = "posts"
id = Column(Integer, primary_key=True, nullable=False, index=True)
full_name = Column(String, nullable=False)
last_place = Column(String, nullable=False)
description = Column(String, nullable=False)
percentage = Column(Float, default=0.0)
image = Column(String)
owner_id = Column(Integer, ForeignKey("users.id"))
creator = relationship("User", back_populates="posts")
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, nullable=False, index=True)
name = Column(String, nullable=False)
email = Column(String, nullable=False, unique=True)
password = Column(String, nullable=False)
phone_number = Column(String, nullable=False)
posts = relationship("Post", back_populates="creator")
class Code(Base):
__tablename__ = "codes"
id = Column(Integer, primary_key=True, nullable=False, index=True)
code = Column(Integer, nullable=False)
| [((192, 253), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'nullable': '(False)', 'index': '(True)'}), '(Integer, primary_key=True, nullable=False, index=True)\n', (198, 253), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((270, 300), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (276, 300), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((318, 348), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (324, 348), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((367, 397), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (373, 397), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((415, 441), 'sqlalchemy.Column', 'Column', (['Float'], {'default': '(0.0)'}), '(Float, default=0.0)\n', (421, 441), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((454, 468), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (460, 468), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((539, 583), 'sqlalchemy.orm.relationship', 'relationship', (['"""User"""'], {'back_populates': '"""posts"""'}), "('User', back_populates='posts')\n", (551, 583), False, 'from sqlalchemy.orm import relationship\n'), ((642, 703), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'nullable': '(False)', 'index': '(True)'}), '(Integer, primary_key=True, nullable=False, index=True)\n', (648, 703), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((715, 745), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (721, 745), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((758, 801), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)', 'unique': '(True)'}), '(String, nullable=False, unique=True)\n', (764, 801), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((817, 847), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (823, 847), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((867, 897), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (873, 897), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((911, 957), 'sqlalchemy.orm.relationship', 'relationship', (['"""Post"""'], {'back_populates': '"""creator"""'}), "('Post', back_populates='creator')\n", (923, 957), False, 'from sqlalchemy.orm import relationship\n'), ((1016, 1077), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'nullable': '(False)', 'index': '(True)'}), '(Integer, primary_key=True, nullable=False, index=True)\n', (1022, 1077), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((1089, 1120), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (1095, 1120), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n'), ((500, 522), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (510, 522), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, Float\n')] |
Ganeshrockz/Flask-Python-Dev | run.py | 522b280484e8f4cf3877b378a1334c501ffbc41e | from flask import Flask, flash, render_template, redirect, url_for
from flask.ext.pymongo import PyMongo
from flask import request
app=Flask(__name__)
app.config['MONGO_DBNAME']='stud'
app.config['MONGO_URI']='mongodb://localhost:27017/stud'
mongo=PyMongo(app)
"""
@app.route('/add')
def add():
user=mongo.db.users
user.insert({"name":"Ganesh","age":19})
return "Added"
@app.route('/find')
def find():
user=mongo.db.users
data=user.find_one({"name":"Ganesh"})
return data["name"]
"""
@app.route('/',methods=['GET', 'POST'])
def dashboard():
if request.method == 'POST':
name=request.form['name']
passw=request.form['password']
if name=="admin123" and passw=="12345":
return redirect(url_for('display'))
else:
return render_template("dashboard.html",err="Login Failed")
else:
return render_template("dashboard.html")
@app.route('/form',methods=['GET', 'POST'])
def form():
if request.method == 'POST':
user=mongo.db.student
rollno=request.form['rollno']
name=request.form['name']
address=request.form['address']
year=request.form['year']
skills=request.form['skills']
phone=request.form['phone']
email=request.form['emailid']
user.insert({"Rollnumber":rollno,"StudentName":name,"Address":address,"Year":year,"Skills":skills,"PhoneNumber":phone,"EmailId":email})
return redirect(url_for('dashboard'))
else:
return render_template("form.html")
@app.route('/display',methods=['GET', 'POST'])
def display():
data=mongo.db.student
record=[]
for rec in data.find():
record.append({"Rollnumber":rec["Rollnumber"],"StudentName":rec["StudentName"],"Address":rec["Address"],"Year":rec["Year"],"Skills":rec["Skills"],"PhoneNumber":rec["PhoneNumber"],"EmailId":rec["EmailId"]})
app.logger.info(record)
return render_template("display.html", studentdata=record)
if __name__ == '__main__':
app.secret_key = 'ganeshrockz'
app.run(debug=True)
| [((135, 150), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (140, 150), False, 'from flask import Flask, flash, render_template, redirect, url_for\n'), ((248, 260), 'flask.ext.pymongo.PyMongo', 'PyMongo', (['app'], {}), '(app)\n', (255, 260), False, 'from flask.ext.pymongo import PyMongo\n'), ((2007, 2058), 'flask.render_template', 'render_template', (['"""display.html"""'], {'studentdata': 'record'}), "('display.html', studentdata=record)\n", (2022, 2058), False, 'from flask import Flask, flash, render_template, redirect, url_for\n'), ((883, 916), 'flask.render_template', 'render_template', (['"""dashboard.html"""'], {}), "('dashboard.html')\n", (898, 916), False, 'from flask import Flask, flash, render_template, redirect, url_for\n'), ((1590, 1618), 'flask.render_template', 'render_template', (['"""form.html"""'], {}), "('form.html')\n", (1605, 1618), False, 'from flask import Flask, flash, render_template, redirect, url_for\n'), ((805, 858), 'flask.render_template', 'render_template', (['"""dashboard.html"""'], {'err': '"""Login Failed"""'}), "('dashboard.html', err='Login Failed')\n", (820, 858), False, 'from flask import Flask, flash, render_template, redirect, url_for\n'), ((1543, 1563), 'flask.url_for', 'url_for', (['"""dashboard"""'], {}), "('dashboard')\n", (1550, 1563), False, 'from flask import Flask, flash, render_template, redirect, url_for\n'), ((752, 770), 'flask.url_for', 'url_for', (['"""display"""'], {}), "('display')\n", (759, 770), False, 'from flask import Flask, flash, render_template, redirect, url_for\n')] |
jussiarpalahti/respa | resources/tests/conftest.py | c308bcb96e56d9401e22df94d3073e248618e243 | # -*- coding: utf-8 -*-
import pytest
import datetime
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from rest_framework.test import APIClient, APIRequestFactory
from resources.enums import UnitAuthorizationLevel
from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period
from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup
from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility
from munigeo.models import Municipality
@pytest.fixture
def api_client():
return APIClient()
@pytest.fixture
def staff_api_client(staff_user):
api_client = APIClient()
api_client.force_authenticate(user=staff_user)
return api_client
@pytest.fixture
def user_api_client(user):
api_client = APIClient()
api_client.force_authenticate(user=user)
return api_client
@pytest.fixture(params=[None, 'user', 'staff_user'])
def all_user_types_api_client(request):
api_client = APIClient()
if request.param:
api_client.force_authenticate(request.getfixturevalue(request.param))
return api_client
@pytest.fixture
def api_rf():
return APIRequestFactory()
@pytest.mark.django_db
@pytest.fixture
def space_resource_type():
return ResourceType.objects.get_or_create(id="test_space", name="test_space", main_type="space")[0]
@pytest.mark.django_db
@pytest.fixture
def space_resource(space_resource_type):
return Resource.objects.create(type=space_resource_type, authentication="none", name="resource")
@pytest.mark.django_db
@pytest.fixture
def test_unit():
return Unit.objects.create(name="unit", time_zone='Europe/Helsinki')
@pytest.fixture
def test_unit2():
return Unit.objects.create(name="unit 2", time_zone='Europe/Helsinki')
@pytest.fixture
def test_unit3():
return Unit.objects.create(name="unit 3", time_zone='Europe/Helsinki')
@pytest.fixture
def terms_of_use():
return TermsOfUse.objects.create(
name_fi='testikäyttöehdot',
name_en='test terms of use',
text_fi='kaikki on kielletty',
text_en='everything is forbidden',
)
@pytest.mark.django_db
@pytest.fixture
def resource_in_unit(space_resource_type, test_unit, terms_of_use):
return Resource.objects.create(
type=space_resource_type,
authentication="none",
name="resource in unit",
unit=test_unit,
max_reservations_per_user=1,
max_period=datetime.timedelta(hours=2),
reservable=True,
generic_terms=terms_of_use,
specific_terms_fi='spesifiset käyttöehdot',
specific_terms_en='specific terms of use',
reservation_confirmed_notification_extra_en='this resource rocks'
)
@pytest.mark.django_db
@pytest.fixture
def resource_in_unit2(space_resource_type, test_unit2):
return Resource.objects.create(
type=space_resource_type,
authentication="none",
name="resource in unit 2",
unit=test_unit2,
max_reservations_per_user=2,
max_period=datetime.timedelta(hours=4),
reservable=True,
)
@pytest.mark.django_db
@pytest.fixture
def resource_in_unit3(space_resource_type, test_unit3):
return Resource.objects.create(
type=space_resource_type,
authentication="none",
name="resource in unit 3",
unit=test_unit3,
max_reservations_per_user=2,
max_period=datetime.timedelta(hours=4),
reservable=True,
)
@pytest.mark.django_db
@pytest.fixture
def resource_with_opening_hours(resource_in_unit):
p1 = Period.objects.create(start=datetime.date(2115, 1, 1),
end=datetime.date(2115, 12, 31),
resource=resource_in_unit, name='regular hours')
for weekday in range(0, 7):
Day.objects.create(period=p1, weekday=weekday,
opens=datetime.time(8, 0),
closes=datetime.time(18, 0))
resource_in_unit.update_opening_hours()
return resource_in_unit
@pytest.mark.django_db
@pytest.fixture
def exceptional_period(resource_with_opening_hours):
parent = resource_with_opening_hours.periods.first()
period = Period.objects.create(start='2115-01-10', end='2115-01-12',
resource=resource_with_opening_hours,
name='exceptional hours',
exceptional=True, parent=parent)
date = period.start
Day.objects.create(period=period, weekday=date.weekday(),
closed=True)
date = date + datetime.timedelta(days=1)
Day.objects.create(period=period, weekday=date.weekday(),
opens='12:00', closes='13:00')
date = date + datetime.timedelta(days=1)
Day.objects.create(period=period, weekday=date.weekday(),
closed=True)
return period
@pytest.mark.django_db
@pytest.fixture
def equipment_category():
return EquipmentCategory.objects.create(
name='test equipment category'
)
@pytest.mark.django_db
@pytest.fixture
def equipment(equipment_category):
equipment = Equipment.objects.create(name='test equipment', category=equipment_category)
return equipment
@pytest.mark.django_db
@pytest.fixture
def equipment_alias(equipment):
equipment_alias = EquipmentAlias.objects.create(name='test equipment alias', language='fi', equipment=equipment)
return equipment_alias
@pytest.mark.django_db
@pytest.fixture
def resource_equipment(resource_in_unit, equipment):
data = {'test_key': 'test_value'}
resource_equipment = ResourceEquipment.objects.create(
equipment=equipment,
resource=resource_in_unit,
data=data,
description='test resource equipment',
)
return resource_equipment
@pytest.mark.django_db
@pytest.fixture
def user():
return get_user_model().objects.create(
username='test_user',
first_name='Cem',
last_name='Kaner',
email='[email protected]',
preferred_language='en'
)
@pytest.mark.django_db
@pytest.fixture
def user2():
return get_user_model().objects.create(
username='test_user2',
first_name='Brendan',
last_name='Neutra',
email='[email protected]'
)
@pytest.mark.django_db
@pytest.fixture
def staff_user():
return get_user_model().objects.create(
username='test_staff_user',
first_name='John',
last_name='Staff',
email='[email protected]',
is_staff=True,
preferred_language='en'
)
@pytest.mark.django_db
@pytest.fixture
def unit_manager_user(resource_in_unit):
user = get_user_model().objects.create(
username='test_manager_user',
first_name='Inspector',
last_name='Lestrade',
email='[email protected]',
is_staff=True,
preferred_language='en'
)
user.unit_authorizations.create(subject=resource_in_unit.unit, level=UnitAuthorizationLevel.manager)
return user
@pytest.mark.django_db
@pytest.fixture
def general_admin():
return get_user_model().objects.create(
username='test_general_admin',
first_name='Genie',
last_name='Manager',
email='[email protected]',
is_staff=True,
is_general_admin=True,
preferred_language='en'
)
@pytest.mark.django_db
@pytest.fixture
def group():
return Group.objects.create(name='test group')
@pytest.mark.django_db
@pytest.fixture
def purpose():
return Purpose.objects.create(name='test purpose', id='test-purpose')
@pytest.fixture
def resource_group(resource_in_unit):
group = ResourceGroup.objects.create(
identifier='test_group',
name='Test resource group'
)
group.resources.set([resource_in_unit])
return group
@pytest.fixture
def resource_group2(resource_in_unit2):
group = ResourceGroup.objects.create(
identifier='test_group_2',
name='Test resource group 2'
)
group.resources.set([resource_in_unit2])
return group
@pytest.fixture
def test_municipality():
municipality = Municipality.objects.create(
id='foo',
name='Foo'
)
return municipality
@pytest.fixture
def accessibility_viewpoint_wheelchair():
vp = {"id": "10", "name_en": "I am a wheelchair user", "order_text": 10}
return AccessibilityViewpoint.objects.create(**vp)
@pytest.fixture
def accessibility_viewpoint_hearing():
vp = {"id": "20", "name_en": "I am hearing impaired", "order_text": 20}
return AccessibilityViewpoint.objects.create(**vp)
@pytest.fixture
def accessibility_value_green():
return AccessibilityValue.objects.create(value='green', order=10)
@pytest.fixture
def accessibility_value_red():
return AccessibilityValue.objects.create(value='red', order=-10)
@pytest.fixture
def resource_with_accessibility_data(resource_in_unit, accessibility_viewpoint_wheelchair,
accessibility_viewpoint_hearing, accessibility_value_green,
accessibility_value_red):
""" Resource is wheelchair accessible, not hearing accessible, unit is accessible to both """
ResourceAccessibility.objects.create(
resource=resource_in_unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
ResourceAccessibility.objects.create(
resource=resource_in_unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_red
)
UnitAccessibility.objects.create(
unit=resource_in_unit.unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit.unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
return resource_in_unit
@pytest.fixture
def resource_with_accessibility_data2(resource_in_unit2, accessibility_viewpoint_wheelchair,
accessibility_viewpoint_hearing, accessibility_value_green,
accessibility_value_red):
""" Resource is hearing accessible, not wheelchair accessible, unit is accessible to both """
ResourceAccessibility.objects.create(
resource=resource_in_unit2,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_red
)
ResourceAccessibility.objects.create(
resource=resource_in_unit2,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit2.unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit2.unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
return resource_in_unit2
@pytest.fixture
def resource_with_accessibility_data3(resource_in_unit3, accessibility_viewpoint_wheelchair,
accessibility_viewpoint_hearing, accessibility_value_green,
accessibility_value_red):
""" Resource is accessible, unit is not """
ResourceAccessibility.objects.create(
resource=resource_in_unit3,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
ResourceAccessibility.objects.create(
resource=resource_in_unit3,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit3.unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_red
)
UnitAccessibility.objects.create(
unit=resource_in_unit3.unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_red
)
return resource_in_unit3
| [((970, 1021), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[None, 'user', 'staff_user']"}), "(params=[None, 'user', 'staff_user'])\n", (984, 1021), False, 'import pytest\n'), ((660, 671), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (669, 671), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((741, 752), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (750, 752), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((888, 899), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (897, 899), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((1079, 1090), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1088, 1090), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((1256, 1275), 'rest_framework.test.APIRequestFactory', 'APIRequestFactory', ([], {}), '()\n', (1273, 1275), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((1541, 1634), 'resources.models.Resource.objects.create', 'Resource.objects.create', ([], {'type': 'space_resource_type', 'authentication': '"""none"""', 'name': '"""resource"""'}), "(type=space_resource_type, authentication='none',\n name='resource')\n", (1564, 1634), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((1700, 1761), 'resources.models.Unit.objects.create', 'Unit.objects.create', ([], {'name': '"""unit"""', 'time_zone': '"""Europe/Helsinki"""'}), "(name='unit', time_zone='Europe/Helsinki')\n", (1719, 1761), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((1809, 1872), 'resources.models.Unit.objects.create', 'Unit.objects.create', ([], {'name': '"""unit 2"""', 'time_zone': '"""Europe/Helsinki"""'}), "(name='unit 2', time_zone='Europe/Helsinki')\n", (1828, 1872), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((1920, 1983), 'resources.models.Unit.objects.create', 'Unit.objects.create', ([], {'name': '"""unit 3"""', 'time_zone': '"""Europe/Helsinki"""'}), "(name='unit 3', time_zone='Europe/Helsinki')\n", (1939, 1983), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((2033, 2191), 'resources.models.TermsOfUse.objects.create', 'TermsOfUse.objects.create', ([], {'name_fi': '"""testikäyttöehdot"""', 'name_en': '"""test terms of use"""', 'text_fi': '"""kaikki on kielletty"""', 'text_en': '"""everything is forbidden"""'}), "(name_fi='testikäyttöehdot', name_en=\n 'test terms of use', text_fi='kaikki on kielletty', text_en=\n 'everything is forbidden')\n", (2058, 2191), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((4298, 4463), 'resources.models.Period.objects.create', 'Period.objects.create', ([], {'start': '"""2115-01-10"""', 'end': '"""2115-01-12"""', 'resource': 'resource_with_opening_hours', 'name': '"""exceptional hours"""', 'exceptional': '(True)', 'parent': 'parent'}), "(start='2115-01-10', end='2115-01-12', resource=\n resource_with_opening_hours, name='exceptional hours', exceptional=True,\n parent=parent)\n", (4319, 4463), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((5084, 5148), 'resources.models.EquipmentCategory.objects.create', 'EquipmentCategory.objects.create', ([], {'name': '"""test equipment category"""'}), "(name='test equipment category')\n", (5116, 5148), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((5255, 5331), 'resources.models.Equipment.objects.create', 'Equipment.objects.create', ([], {'name': '"""test equipment"""', 'category': 'equipment_category'}), "(name='test equipment', category=equipment_category)\n", (5279, 5331), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((5448, 5546), 'resources.models.EquipmentAlias.objects.create', 'EquipmentAlias.objects.create', ([], {'name': '"""test equipment alias"""', 'language': '"""fi"""', 'equipment': 'equipment'}), "(name='test equipment alias', language='fi',\n equipment=equipment)\n", (5477, 5546), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((5727, 5862), 'resources.models.ResourceEquipment.objects.create', 'ResourceEquipment.objects.create', ([], {'equipment': 'equipment', 'resource': 'resource_in_unit', 'data': 'data', 'description': '"""test resource equipment"""'}), "(equipment=equipment, resource=\n resource_in_unit, data=data, description='test resource equipment')\n", (5759, 5862), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((7545, 7584), 'django.contrib.auth.models.Group.objects.create', 'Group.objects.create', ([], {'name': '"""test group"""'}), "(name='test group')\n", (7565, 7584), False, 'from django.contrib.auth.models import Group\n'), ((7652, 7714), 'resources.models.Purpose.objects.create', 'Purpose.objects.create', ([], {'name': '"""test purpose"""', 'id': '"""test-purpose"""'}), "(name='test purpose', id='test-purpose')\n", (7674, 7714), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((7783, 7869), 'resources.models.ResourceGroup.objects.create', 'ResourceGroup.objects.create', ([], {'identifier': '"""test_group"""', 'name': '"""Test resource group"""'}), "(identifier='test_group', name=\n 'Test resource group')\n", (7811, 7869), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((8018, 8108), 'resources.models.ResourceGroup.objects.create', 'ResourceGroup.objects.create', ([], {'identifier': '"""test_group_2"""', 'name': '"""Test resource group 2"""'}), "(identifier='test_group_2', name=\n 'Test resource group 2')\n", (8046, 8108), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((8249, 8298), 'munigeo.models.Municipality.objects.create', 'Municipality.objects.create', ([], {'id': '"""foo"""', 'name': '"""Foo"""'}), "(id='foo', name='Foo')\n", (8276, 8298), False, 'from munigeo.models import Municipality\n'), ((8493, 8536), 'resources.models.AccessibilityViewpoint.objects.create', 'AccessibilityViewpoint.objects.create', ([], {}), '(**vp)\n', (8530, 8536), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((8681, 8724), 'resources.models.AccessibilityViewpoint.objects.create', 'AccessibilityViewpoint.objects.create', ([], {}), '(**vp)\n', (8718, 8724), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((8787, 8845), 'resources.models.AccessibilityValue.objects.create', 'AccessibilityValue.objects.create', ([], {'value': '"""green"""', 'order': '(10)'}), "(value='green', order=10)\n", (8820, 8845), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((8906, 8963), 'resources.models.AccessibilityValue.objects.create', 'AccessibilityValue.objects.create', ([], {'value': '"""red"""', 'order': '(-10)'}), "(value='red', order=-10)\n", (8939, 8963), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9335, 9482), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (9371, 9482), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9512, 9654), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_red'}), '(resource=resource_in_unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_red)\n', (9548, 9654), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9684, 9828), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit.unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit.unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (9716, 9828), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9858, 9999), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit.unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit.unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (9890, 9999), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10428, 10574), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit2', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_red'}), '(resource=resource_in_unit2, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_red)\n', (10464, 10574), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10604, 10749), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit2', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit2, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (10640, 10749), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10779, 10924), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit2.unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit2.unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (10811, 10924), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10954, 11096), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit2.unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit2.unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (10986, 11096), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((11476, 11624), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit3', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit3, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (11512, 11624), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((11654, 11799), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit3', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit3, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (11690, 11799), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((11829, 11972), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit3.unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_red'}), '(unit=resource_in_unit3.unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_red)\n', (11861, 11972), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((12002, 12142), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit3.unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_red'}), '(unit=resource_in_unit3.unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_red)\n', (12034, 12142), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((1355, 1448), 'resources.models.ResourceType.objects.get_or_create', 'ResourceType.objects.get_or_create', ([], {'id': '"""test_space"""', 'name': '"""test_space"""', 'main_type': '"""space"""'}), "(id='test_space', name='test_space',\n main_type='space')\n", (1389, 1448), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((4701, 4727), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4719, 4727), False, 'import datetime\n'), ((4862, 4888), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4880, 4888), False, 'import datetime\n'), ((2544, 2571), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (2562, 2571), False, 'import datetime\n'), ((3131, 3158), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (3149, 3158), False, 'import datetime\n'), ((3505, 3532), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (3523, 3532), False, 'import datetime\n'), ((3694, 3719), 'datetime.date', 'datetime.date', (['(2115)', '(1)', '(1)'], {}), '(2115, 1, 1)\n', (3707, 3719), False, 'import datetime\n'), ((3756, 3783), 'datetime.date', 'datetime.date', (['(2115)', '(12)', '(31)'], {}), '(2115, 12, 31)\n', (3769, 3783), False, 'import datetime\n'), ((3985, 4004), 'datetime.time', 'datetime.time', (['(8)', '(0)'], {}), '(8, 0)\n', (3998, 4004), False, 'import datetime\n'), ((4040, 4060), 'datetime.time', 'datetime.time', (['(18)', '(0)'], {}), '(18, 0)\n', (4053, 4060), False, 'import datetime\n'), ((5991, 6007), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6005, 6007), False, 'from django.contrib.auth import get_user_model\n'), ((6241, 6257), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6255, 6257), False, 'from django.contrib.auth import get_user_model\n'), ((6474, 6490), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6488, 6490), False, 'from django.contrib.auth import get_user_model\n'), ((6783, 6799), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6797, 6799), False, 'from django.contrib.auth import get_user_model\n'), ((7216, 7232), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (7230, 7232), False, 'from django.contrib.auth import get_user_model\n')] |
x2dev/device_leeco_x2 | qcmetadataprinter/struct.py | 9bf4549b5f64390ca4da291745b2a66a8e3f006e | #!/bin/python3
with open('../camera/QCamera2/stack/common/cam_intf.h', 'r') as f:
data = f.read()
f.closed
start = data.find(' INCLUDE(CAM_INTF_META_HISTOGRAM')
end = data.find('} metadata_data_t;')
data = data[start:end]
metadata = data.split("\n")
metalist = list()
for line in metadata:
if (line.startswith(' INCLUDE')):
foo = line.split(',')
foo[0] = foo[0].replace('INCLUDE', 'PRINT')
metalist.append(foo[0] + ", pMetadata);")
with open('list.txt', 'w') as f:
for item in metalist:
f.write("%s\n" % item)
f.closed
| [] |
c-yan/atcoder | abc/abc121/abc121d-2.py | 940e49d576e6a2d734288fadaf368e486480a948 | def g(A, n):
if A == -1:
return 0
return A // (2 * n) * n + max(A % (2 * n) - (n - 1), 0)
def f(A, B):
result = 0
for i in range(48):
t = 1 << i
if (g(B, t) - g(A - 1, t)) % 2 == 1:
result += t
return result
A, B = map(int, input().split())
print(f(A, B))
| [] |
kizunai/Weather-Scrapy | log_mysql.py | d2104d28dc303f6710b043f9821dcb84c665665d | import logging
from logging.handlers import TimedRotatingFileHandler
class MyLog():
def __init__(self, name, filename):
self.logger = logging.getLogger(name)
if not self.logger.handlers:
self.logger.setLevel(logging.INFO)
ch = TimedRotatingFileHandler(filename=filename, when='midnight', encoding="utf-8")
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
self.logger.addHandler(ch)
'''
logger = MyLog("test","log\\text.txt")
logger.logger.debug('debug message')
logger.logger.info('info message')
logger.logger.warning('warn message')
logger.logger.error('error message')
logger.logger.critical('critical message')
'''
| [((147, 170), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (164, 170), False, 'import logging\n'), ((272, 350), 'logging.handlers.TimedRotatingFileHandler', 'TimedRotatingFileHandler', ([], {'filename': 'filename', 'when': '"""midnight"""', 'encoding': '"""utf-8"""'}), "(filename=filename, when='midnight', encoding='utf-8')\n", (296, 350), False, 'from logging.handlers import TimedRotatingFileHandler\n'), ((414, 487), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (431, 487), False, 'import logging\n')] |
lerooze/django-fiesta | src/fiesta/urls.py | d521f50bcdd3d40e91f0474ec2fa7e256758e0a5 | # urls.py
from django.urls import path, register_converter
from fiesta import converters
from fiesta.views import views
from rest_framework.urlpatterns import format_suffix_patterns
# "http://django-sdmx.org/wsrest/"
# "http://django-sdmx.org/ws/"
register_converter(converters.ResourceConverter, 'res')
register_converter(converters.AgencyConverter, 'age')
register_converter(converters.ContextConverter, 'con')
urlpatterns = [
path('wsreg/SubmitStructure/', views.SubmitStructureRequestView.as_view()),
path('wsrest/schema/<con:context>/<age:agencyID>/<str:resourceID>', views.SDMXRESTfulSchemaView.as_view()),
path('wsrest/schema/<con:context>/<age:agencyID>/<str:resourceID>/<str:version>', views.SDMXRESTfulSchemaView.as_view()),
path('wsrest/<res:resource>/', views.SDMXRESTfulStructureView.as_view()),
path('wsrest/<res:resource>/<age:agencyID>/',
views.SDMXRESTfulStructureView.as_view()),
path('wsrest/<res:resource>/<age:agencyID>/<str:resourceID>/',
views.SDMXRESTfulStructureView.as_view()),
path('wsrest/<res:resource>/<age:agencyID>/<str:resourceID>/'
'<str:version>/',
views.SDMXRESTfulStructureView.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
| [((252, 307), 'django.urls.register_converter', 'register_converter', (['converters.ResourceConverter', '"""res"""'], {}), "(converters.ResourceConverter, 'res')\n", (270, 307), False, 'from django.urls import path, register_converter\n'), ((308, 361), 'django.urls.register_converter', 'register_converter', (['converters.AgencyConverter', '"""age"""'], {}), "(converters.AgencyConverter, 'age')\n", (326, 361), False, 'from django.urls import path, register_converter\n'), ((362, 416), 'django.urls.register_converter', 'register_converter', (['converters.ContextConverter', '"""con"""'], {}), "(converters.ContextConverter, 'con')\n", (380, 416), False, 'from django.urls import path, register_converter\n'), ((1215, 1250), 'rest_framework.urlpatterns.format_suffix_patterns', 'format_suffix_patterns', (['urlpatterns'], {}), '(urlpatterns)\n', (1237, 1250), False, 'from rest_framework.urlpatterns import format_suffix_patterns\n'), ((469, 511), 'fiesta.views.views.SubmitStructureRequestView.as_view', 'views.SubmitStructureRequestView.as_view', ([], {}), '()\n', (509, 511), False, 'from fiesta.views import views\n'), ((586, 623), 'fiesta.views.views.SDMXRESTfulSchemaView.as_view', 'views.SDMXRESTfulSchemaView.as_view', ([], {}), '()\n', (621, 623), False, 'from fiesta.views import views\n'), ((712, 749), 'fiesta.views.views.SDMXRESTfulSchemaView.as_view', 'views.SDMXRESTfulSchemaView.as_view', ([], {}), '()\n', (747, 749), False, 'from fiesta.views import views\n'), ((787, 827), 'fiesta.views.views.SDMXRESTfulStructureView.as_view', 'views.SDMXRESTfulStructureView.as_view', ([], {}), '()\n', (825, 827), False, 'from fiesta.views import views\n'), ((889, 929), 'fiesta.views.views.SDMXRESTfulStructureView.as_view', 'views.SDMXRESTfulStructureView.as_view', ([], {}), '()\n', (927, 929), False, 'from fiesta.views import views\n'), ((1009, 1049), 'fiesta.views.views.SDMXRESTfulStructureView.as_view', 'views.SDMXRESTfulStructureView.as_view', ([], {}), '()\n', (1047, 1049), False, 'from fiesta.views import views\n'), ((1155, 1195), 'fiesta.views.views.SDMXRESTfulStructureView.as_view', 'views.SDMXRESTfulStructureView.as_view', ([], {}), '()\n', (1193, 1195), False, 'from fiesta.views import views\n')] |
code-knayam/DataStructureAlgorithms | code-wars/010.moving-zeros-to-the-end.py | 8425911633d4d343c58798a123175289ed0df1fe | # Write an algorithm that takes an array and moves all of the zeros to the end, preserving the order of the other elements.
def move_zeros(array):
#your code here
new_array = []
new_index = 0
while len(array) > 0:
item = array.pop(0)
if item == 0 and not type(item) == bool :
new_array.append(item)
else:
new_array.insert(new_index, item)
new_index = new_index + 1
return new_array | [] |
jason-gm/python_sepa | sepa_generator/definitions.py | 542c48326c07ab68d341a07d5ee12502f7248690 |
def construct_tag_data(tag_name, attrs=None, value=None, sorting=None):
data = {
'_name': tag_name,
'_attrs': attrs or [],
'_value': value,
}
if sorting:
data['_sorting'] = sorting
return data
def add_simple_child(data, child_friendly_name, child_tag_name, child_attrs=None, child_value=None):
data[child_friendly_name] = construct_tag_data(child_tag_name, child_attrs, child_value)
return data
def construct_header(ctransfer):
header = construct_tag_data('GrpHdr')
header['_sorting'] = ['MsgId', 'CreDtTm', 'NbOfTxs', 'CtrlSum', 'InitgPty']
header['message_id'] = construct_tag_data('MsgId', value=ctransfer.uuid)
header['creation_date_time'] = construct_tag_data('CreDtTm', value=ctransfer.timestamp)
header['num_transactions'] = construct_tag_data('NbOfTxs', value=ctransfer.get_num_of_transactions())
header['control_sum'] = construct_tag_data('CtrlSum', value=ctransfer.get_control_sum())
header['initiating_party'] = add_simple_child(construct_tag_data('InitgPty'), 'name', 'Nm', [],
ctransfer.debtor.name)
return header
def construct_iban(account, tag_name):
iban_data = construct_tag_data(tag_name)
iban_data['id'] = add_simple_child(construct_tag_data('Id'), 'iban', 'IBAN', [], account.iban)
return iban_data
def construct_bic(account, tag_name):
bic_data = construct_tag_data(tag_name)
bic_data['financial_instrument_id'] = add_simple_child(construct_tag_data('FinInstnId'), 'bic', 'BIC', [],
account.bic)
return bic_data
def construct_address_data(account, tag_name):
addr_data = construct_tag_data(tag_name)
addr_data['name'] = construct_tag_data('Nm', value=account.name)
if account.has_address():
address = construct_tag_data('PstlAdr')
if account.country:
address['country'] = construct_tag_data('Ctry', value=account.country)
if account.street:
address['addr_line_1'] = construct_tag_data('AdrLine', value=account.street)
if account.postcode and account.city:
address['addr_line_2'] = construct_tag_data('AdrLine', value="%s %s" % (account.postcode, account.city))
addr_data['address'] = address
return addr_data
def construct_transaction_data(ctransfer, transaction):
transaction_information = construct_tag_data('CdtTrfTxInf')
transaction_information['_sorting'] = ['PmtId', 'Amt', 'ChrgBr', 'UltmtDbtr', 'CdtrAgt', 'Cdtr', 'CdtrAcct',
'UltmtCdtr', 'Purp', 'RmtInf']
transaction_information['payment_id'] = add_simple_child(
data=add_simple_child(data=construct_tag_data('PmtId', sorting=['InstrId', 'EndToEndId']),
child_friendly_name='instruction',
child_tag_name='InstrId',
child_value=transaction.uuid),
child_friendly_name='eref',
child_tag_name='EndToEndId',
child_value=transaction.eref)
transaction_information['amount'] = add_simple_child(data=construct_tag_data('Amt'),
child_friendly_name='amount',
child_tag_name='InstdAmt',
child_attrs=[('Ccy', ctransfer.currency)],
child_value=transaction.get_amount())
transaction_information['charge_bearer'] = construct_tag_data('ChrgBr', value='SLEV')
if ctransfer.debtor.use_ultimate:
transaction_information['ultimate_debtor'] = add_simple_child(data=construct_tag_data('UltmtDbtr'),
child_friendly_name='name',
child_tag_name='Nm',
child_value=ctransfer.debtor.name)
transaction_information['creditor_agent'] = construct_bic(transaction.creditor, 'CdtrAgt')
transaction_information['creditor_data'] = construct_address_data(transaction.creditor, 'Cdtr')
transaction_information['creditor_account'] = construct_iban(transaction.creditor, 'CdtrAcct')
if transaction.creditor.use_ultimate:
transaction_information['ultimate_creditor'] = add_simple_child(data=construct_tag_data('UltmtCdtr'),
child_friendly_name='name',
child_tag_name='Nm',
child_value=transaction.creditor.name)
transaction_information['purpose'] = add_simple_child(data=construct_tag_data('Purp'),
child_friendly_name='code',
child_tag_name='Cd',
child_value=transaction.ext_purpose)
if not transaction.use_structured:
transaction_information['remote_inf'] = add_simple_child(data=construct_tag_data('RmtInf'),
child_friendly_name='unstructured',
child_tag_name='Ustrd',
child_value=transaction.purpose)
else:
rmt_inf = construct_tag_data('RmtInf')
rmt_inf_strd = add_simple_child(data=construct_tag_data('Strd'),
child_friendly_name='additional_info',
child_tag_name='AddtlRmtInf',
child_value=transaction.purpose)
rmt_tp = construct_tag_data('Tp')
rmt_tp['code_or_property'] = add_simple_child(data=construct_tag_data('CdOrPrtry'),
child_friendly_name='code',
child_tag_name='Cd',
child_value='SCOR')
rmt_creditor_ref_inf = add_simple_child(data=construct_tag_data('CdtrRefInf'),
child_friendly_name='reference',
child_tag_name='Ref',
child_value=transaction.cref)
rmt_creditor_ref_inf['tp'] = rmt_tp
rmt_inf_strd['creditor_ref_information'] = rmt_creditor_ref_inf
rmt_inf['structured'] = rmt_inf_strd
transaction_information['remote_inf'] = rmt_inf
return transaction_information
def construct_payment_information(ctransfer):
payment_inf = construct_tag_data('PmtInf')
payment_inf['_sorting'] = ['PmtInfId', 'PmtMtd', 'BtchBookg', 'NbOfTxs', 'CtrlSum', 'PmtTpInf', 'ReqdExctnDt',
'Dbtr', 'DbtrAcct', 'DbtrAgt', 'ChrgBr', 'CdtTrfTxInf']
payment_inf['payment_id'] = construct_tag_data('PmtInfId', value=ctransfer.payment_id)
payment_inf['payment_method'] = construct_tag_data('PmtMtd', value='TRF')
payment_inf['batch'] = construct_tag_data('BtchBookg', value=str(ctransfer.batch).lower())
payment_inf['num_transactions'] = construct_tag_data('NbOfTxs', value=ctransfer.get_num_of_transactions())
payment_inf['control_sum'] = construct_tag_data('CtrlSum', value=ctransfer.get_control_sum())
payment_instruction = construct_tag_data('PmtTpInf')
payment_instruction['_sorting'] = ['InstrPrty', 'SvcLvl']
payment_instruction['priority'] = construct_tag_data('InstrPrty', value='NORM')
payment_instruction['service_level'] = add_simple_child(construct_tag_data('SvcLvl'), 'code', 'Cd', [], 'SEPA')
payment_inf['instruction'] = payment_instruction
payment_inf['requested_execution_time'] = construct_tag_data('ReqdExctnDt', value=ctransfer.execution_time)
payment_inf['debtor'] = construct_address_data(ctransfer.debtor, 'Dbtr')
payment_inf['debtor_account'] = construct_iban(ctransfer.debtor, 'DbtrAcct')
payment_inf['debtor_agent'] = construct_bic(ctransfer.debtor, 'DbtrAgt')
payment_inf['charge_bearer'] = construct_tag_data('ChrgBr', value='SLEV')
for i, payment in enumerate(ctransfer.transactions):
transfer_information = construct_transaction_data(ctransfer, payment)
payment_inf['transfer_no_%s' % i] = transfer_information
return payment_inf
def construct_document(ctransfer):
root = construct_tag_data('Document', [('xmlns', 'urn:iso:std:iso:20022:tech:xsd:pain.001.001.03')])
message = construct_tag_data('CstmrCdtTrfInitn')
message['_sorting'] = ['GrpHdr', 'PmtInf']
message['header'] = construct_header(ctransfer)
message['payment_information'] = construct_payment_information(ctransfer)
root['message'] = message
return root
| [] |
miezebieze/scott-launcher | __main__.py | a03597d0883af075128d1ea4ea53e7b5132807b1 | from enum import Enum
from window import Window
D = Enum ('Directions','N NE E SE S SW W NW')
selector_map = {
D.NW: [0.5,0.5], D.N: [1.5,0], D.NE: [2.5,0.5],
D.W: [0,1.5], D.E: [3,1.5],
D.SW: [0.5,2.5], D.S: [1.5,3], D.SE: [2.5,2.5],
}
selector_size = 100
window_size = selector_size*4
window = Window (window_size,window_size,selector_map,selector_size,selector_size)
# set actions here
from functools import partial
def say (something):
print (''.join (('Me: "',something,'"')))
window.actions[D.NW] = partial (say,'northwast')
window.actions[D.N] = partial (say,'north')
window.actions[D.NE] = partial (say,'neorthest')
window.actions[D.W] = partial (say,'western')
window.actions[D.E] = partial (say,'easy')
window.actions[D.SW] = partial (say,'suess whest')
window.actions[D.S] = partial (say,'sissy')
window.actions[D.SE] = partial (say,'seoul')
window.go ()
| [((53, 94), 'enum.Enum', 'Enum', (['"""Directions"""', '"""N NE E SE S SW W NW"""'], {}), "('Directions', 'N NE E SE S SW W NW')\n", (57, 94), False, 'from enum import Enum\n'), ((351, 427), 'window.Window', 'Window', (['window_size', 'window_size', 'selector_map', 'selector_size', 'selector_size'], {}), '(window_size, window_size, selector_map, selector_size, selector_size)\n', (357, 427), False, 'from window import Window\n'), ((566, 591), 'functools.partial', 'partial', (['say', '"""northwast"""'], {}), "(say, 'northwast')\n", (573, 591), False, 'from functools import partial\n'), ((614, 635), 'functools.partial', 'partial', (['say', '"""north"""'], {}), "(say, 'north')\n", (621, 635), False, 'from functools import partial\n'), ((659, 684), 'functools.partial', 'partial', (['say', '"""neorthest"""'], {}), "(say, 'neorthest')\n", (666, 684), False, 'from functools import partial\n'), ((707, 730), 'functools.partial', 'partial', (['say', '"""western"""'], {}), "(say, 'western')\n", (714, 730), False, 'from functools import partial\n'), ((753, 773), 'functools.partial', 'partial', (['say', '"""easy"""'], {}), "(say, 'easy')\n", (760, 773), False, 'from functools import partial\n'), ((797, 824), 'functools.partial', 'partial', (['say', '"""suess whest"""'], {}), "(say, 'suess whest')\n", (804, 824), False, 'from functools import partial\n'), ((847, 868), 'functools.partial', 'partial', (['say', '"""sissy"""'], {}), "(say, 'sissy')\n", (854, 868), False, 'from functools import partial\n'), ((892, 913), 'functools.partial', 'partial', (['say', '"""seoul"""'], {}), "(say, 'seoul')\n", (899, 913), False, 'from functools import partial\n')] |
monteals/C-Ride | cride/circles/serializers.py | 6e9368011f49ff619d1edaeaf1e8232685cc2095 | from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from cride.circles.models import Circle
class CircleSerializer(serializers.Serializer):
name = serializers.CharField()
slug_name = serializers.SlugField()
rides_taken = serializers.IntegerField()
rides_offered = serializers.IntegerField()
members_limit = serializers.IntegerField()
class CreateCircleSerializer(serializers.Serializer):
name = serializers.CharField(max_length=140)
slug_name = serializers.CharField(max_length=40, validators=[UniqueValidator(queryset=Circle.objects.all())])
about = serializers.CharField(max_length=255, required=False)
def create(self, data):
return Circle.objects.create(**data)
| [((193, 216), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (214, 216), False, 'from rest_framework import serializers\n'), ((233, 256), 'rest_framework.serializers.SlugField', 'serializers.SlugField', ([], {}), '()\n', (254, 256), False, 'from rest_framework import serializers\n'), ((275, 301), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (299, 301), False, 'from rest_framework import serializers\n'), ((322, 348), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (346, 348), False, 'from rest_framework import serializers\n'), ((369, 395), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (393, 395), False, 'from rest_framework import serializers\n'), ((466, 503), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(140)'}), '(max_length=140)\n', (487, 503), False, 'from rest_framework import serializers\n'), ((630, 683), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(255)', 'required': '(False)'}), '(max_length=255, required=False)\n', (651, 683), False, 'from rest_framework import serializers\n'), ((728, 757), 'cride.circles.models.Circle.objects.create', 'Circle.objects.create', ([], {}), '(**data)\n', (749, 757), False, 'from cride.circles.models import Circle\n'), ((594, 614), 'cride.circles.models.Circle.objects.all', 'Circle.objects.all', ([], {}), '()\n', (612, 614), False, 'from cride.circles.models import Circle\n')] |
Dimstella/blockchain-contact-tracing-app-hospitals | contact/views.py | e0b2bf2b3b8c06e58032faed99900d1c7b7d300d | from django.shortcuts import render
from django.template import loader
from django.http import HttpResponse
from django.shortcuts import render, redirect
from .models import Patient
from django.contrib import messages
import pandas as pd
from django.contrib.auth.decorators import login_required
from web3 import Web3
import datetime
import hashlib
import json
def encrypt_string(hash_string):
sha_signature = \
hashlib.sha256(hash_string.encode()).hexdigest()
return sha_signature
@login_required()
def index(request):
context = {}
template = loader.get_template('index.html')
return HttpResponse(template.render(context, request))
@login_required()
def patient_form(request):
dt = pd.read_csv('contact/static/info/countries.txt', sep='\n')
countries = []
df = dt.to_dict()
for k, country in df.items():
for k,v in country.items():
countries.append(v)
return render(request, 'patient_form.html', {'countries': countries})
@login_required()
def add_patients(request):
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
print(contract)
if request.method == 'GET':
precord = Patient()
precord.name = request.GET.get("name")
precord.surname = request.GET.get("surname")
precord.address = request.GET.get("address")
precord.email = request.GET.get("email")
precord.city = request.GET.get("city")
precord.region = request.GET.get("region")
precord.postal = request.GET.get("postal")
precord.country = request.GET.get("country")
precord.phone = request.GET.get("phone")
precord.status = request.GET.get("status")
precord.notes = request.GET.get("notes", None)
precord.created_at = request.GET.get("bdate")
precord.user = request.user
name_surname = precord.name+"_"+precord.surname
precord.hashing = encrypt_string(name_surname)
country = precord.country
postal = precord.postal
status = int(precord.status)
result = contract.functions.addPatient(status, postal, str(precord.user), encrypt_string(name_surname), country).transact({'from':'0xF49D6960fb886B6ACD1Aca017f6306134a795457'})
num = contract.functions.getPatientsCount().call()
print(num)
precord.save()
messages.success(request, 'Record Saved')
return render(request, 'index.html')
else:
return render(request, 'index.html')
@login_required()
def patients_list(request):
patients = Patient.objects.all()
return render(request, 'patient_list.html', {'patients':patients})
@login_required
def delete_patient(request, uid):
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
patient = Patient.objects.get(uid=uid)
patient.status = request.GET.get("status")
patient.notes = request.GET.get("notes", None)
patient.created_at = request.GET.get("bdate")
hashing = patient.hashing
country = patient.country
postal = patient.postal
status = int(patient.status)
result = contract.functions.addPatient(status, postal, 'deleted', str(hashing), country).transact({'from':'0xF49D6960fb886B6ACD1Aca017f6306134a795457'})
patient.delete()
return redirect("http://127.0.0.1:8000/patients-list")
@login_required
def update_patient_status(request, uid):
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
if request.method == 'GET':
patient = Patient.objects.get(uid=uid)
patient.status = request.GET.get("status")
patient.notes = request.GET.get("notes", None)
patient.created_at = request.GET.get("bdate")
hashing = patient.hashing
print(patient)
country = patient.country
postal = patient.postal
status = int(patient.status)
result = contract.functions.addPatient(status, postal, str(patient.user), str(patient.hashing), country).transact({'from':'0xF49D6960fb886B6ACD1Aca017f6306134a795457'})
patient.save()
return redirect("http://127.0.0.1:8000/patients-list")
return redirect("http://127.0.0.1:8000/patients-list")
@login_required
def edit(request, uid):
patient = Patient.objects.get(uid=uid)
return render(request,'edit.html', {'patient':patient})
def users(request):
dt = pd.read_csv('countries.txt', sep='\n')
countries = []
df = dt.to_dict()
for k, country in df.items():
for k,v in country.items():
countries.append(v)
return render(request,'users.html', {'countries':countries})
def search_results(request):
dt = pd.read_csv('countries.txt', sep='\n')
countries = []
df = dt.to_dict()
for k, country in df.items():
for k,v in country.items():
countries.append(v)
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
no_patients = contract.functions.getPatientsCount().call()
lpatient = []
lhash = []
message = 'No infected people'
if request.method == 'GET':
region = request.GET.get("city")
postal = request.GET.get("postal")
country = request.GET.get("country")
counter = 0
for i in range(0, no_patients):
patients = contract.functions.gettPatient(i).call()
lpatient = list(patients)
print(lpatient)
if lpatient[0] == 0 and lpatient[1] == postal and lpatient[4] == country:
if lpatient[3] not in lhash and lpatient[2] != 'daleted':
lhash.append(lpatient[3])
counter = counter + 1
message = 'Infected people in your area are'
return render(request,'infected.html', {'countries':countries, 'infected_people': counter, 'message': message})
| [((519, 535), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (533, 535), False, 'from django.contrib.auth.decorators import login_required\n'), ((693, 709), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (707, 709), False, 'from django.contrib.auth.decorators import login_required\n'), ((1039, 1055), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (1053, 1055), False, 'from django.contrib.auth.decorators import login_required\n'), ((4553, 4569), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (4567, 4569), False, 'from django.contrib.auth.decorators import login_required\n'), ((591, 624), 'django.template.loader.get_template', 'loader.get_template', (['"""index.html"""'], {}), "('index.html')\n", (610, 624), False, 'from django.template import loader\n'), ((750, 808), 'pandas.read_csv', 'pd.read_csv', (['"""contact/static/info/countries.txt"""'], {'sep': '"""\n"""'}), "('contact/static/info/countries.txt', sep='\\n')\n", (761, 808), True, 'import pandas as pd\n'), ((970, 1032), 'django.shortcuts.render', 'render', (['request', '"""patient_form.html"""', "{'countries': countries}"], {}), "(request, 'patient_form.html', {'countries': countries})\n", (976, 1032), False, 'from django.shortcuts import render, redirect\n'), ((1195, 2926), 'json.loads', 'json.loads', (['"""[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]"""'], {}), '(\n \'[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]\'\n )\n', (1205, 2926), False, 'import json\n'), ((4661, 4721), 'django.shortcuts.render', 'render', (['request', '"""patient_list.html"""', "{'patients': patients}"], {}), "(request, 'patient_list.html', {'patients': patients})\n", (4667, 4721), False, 'from django.shortcuts import render, redirect\n'), ((4896, 6627), 'json.loads', 'json.loads', (['"""[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]"""'], {}), '(\n \'[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]\'\n )\n', (4906, 6627), False, 'import json\n'), ((7290, 7337), 'django.shortcuts.redirect', 'redirect', (['"""http://127.0.0.1:8000/patients-list"""'], {}), "('http://127.0.0.1:8000/patients-list')\n", (7298, 7337), False, 'from django.shortcuts import render, redirect\n'), ((7518, 9249), 'json.loads', 'json.loads', (['"""[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]"""'], {}), '(\n \'[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]\'\n )\n', (7528, 9249), False, 'import json\n'), ((10090, 10137), 'django.shortcuts.redirect', 'redirect', (['"""http://127.0.0.1:8000/patients-list"""'], {}), "('http://127.0.0.1:8000/patients-list')\n", (10098, 10137), False, 'from django.shortcuts import render, redirect\n'), ((10247, 10297), 'django.shortcuts.render', 'render', (['request', '"""edit.html"""', "{'patient': patient}"], {}), "(request, 'edit.html', {'patient': patient})\n", (10253, 10297), False, 'from django.shortcuts import render, redirect\n'), ((10337, 10375), 'pandas.read_csv', 'pd.read_csv', (['"""countries.txt"""'], {'sep': '"""\n"""'}), "('countries.txt', sep='\\n')\n", (10348, 10375), True, 'import pandas as pd\n'), ((10543, 10598), 'django.shortcuts.render', 'render', (['request', '"""users.html"""', "{'countries': countries}"], {}), "(request, 'users.html', {'countries': countries})\n", (10549, 10598), False, 'from django.shortcuts import render, redirect\n'), ((10645, 10683), 'pandas.read_csv', 'pd.read_csv', (['"""countries.txt"""'], {'sep': '"""\n"""'}), "('countries.txt', sep='\\n')\n", (10656, 10683), True, 'import pandas as pd\n'), ((10944, 12675), 'json.loads', 'json.loads', (['"""[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]"""'], {}), '(\n \'[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]\'\n )\n', (10954, 12675), False, 'import json\n'), ((1150, 1180), 'web3.Web3.HTTPProvider', 'Web3.HTTPProvider', (['ganache_url'], {}), '(ganache_url)\n', (1167, 1180), False, 'from web3 import Web3\n'), ((4402, 4443), 'django.contrib.messages.success', 'messages.success', (['request', '"""Record Saved"""'], {}), "(request, 'Record Saved')\n", (4418, 4443), False, 'from django.contrib import messages\n'), ((4462, 4491), 'django.shortcuts.render', 'render', (['request', '"""index.html"""'], {}), "(request, 'index.html')\n", (4468, 4491), False, 'from django.shortcuts import render, redirect\n'), ((4519, 4548), 'django.shortcuts.render', 'render', (['request', '"""index.html"""'], {}), "(request, 'index.html')\n", (4525, 4548), False, 'from django.shortcuts import render, redirect\n'), ((4851, 4881), 'web3.Web3.HTTPProvider', 'Web3.HTTPProvider', (['ganache_url'], {}), '(ganache_url)\n', (4868, 4881), False, 'from web3 import Web3\n'), ((7473, 7503), 'web3.Web3.HTTPProvider', 'Web3.HTTPProvider', (['ganache_url'], {}), '(ganache_url)\n', (7490, 7503), False, 'from web3 import Web3\n'), ((10022, 10069), 'django.shortcuts.redirect', 'redirect', (['"""http://127.0.0.1:8000/patients-list"""'], {}), "('http://127.0.0.1:8000/patients-list')\n", (10030, 10069), False, 'from django.shortcuts import render, redirect\n'), ((10899, 10929), 'web3.Web3.HTTPProvider', 'Web3.HTTPProvider', (['ganache_url'], {}), '(ganache_url)\n', (10916, 10929), False, 'from web3 import Web3\n'), ((13609, 13719), 'django.shortcuts.render', 'render', (['request', '"""infected.html"""', "{'countries': countries, 'infected_people': counter, 'message': message}"], {}), "(request, 'infected.html', {'countries': countries, 'infected_people':\n counter, 'message': message})\n", (13615, 13719), False, 'from django.shortcuts import render, redirect\n')] |
brandon-rhodes/pycon2010-mighty-dictionary | figures/collide1a.py | 1f75fdd42cd243c9f86a87f7b48f6b3498d032e8 | import _dictdraw, sys
d = {}
surface = _dictdraw.draw_dictionary(d, [4])
surface.write_to_png(sys.argv[1])
| [((40, 73), '_dictdraw.draw_dictionary', '_dictdraw.draw_dictionary', (['d', '[4]'], {}), '(d, [4])\n', (65, 73), False, 'import _dictdraw, sys\n')] |
SeveNNoff/InstagramReportBot | ReportBot.py | 0a613b5f2733d988a952d64d8141cb7390527b9e | # coding=utf-8
#!/usr/bin/env python3
from libs.check_modules import check_modules
from sys import exit
from os import _exit
check_modules()
from os import path
from libs.logo import print_logo
from libs.utils import print_success
from libs.utils import print_error
from libs.utils import ask_question
from libs.utils import print_status
from libs.utils import parse_proxy_file
from libs.proxy_harvester import find_proxies
from libs.attack import report_profile_attack
from libs.attack import report_video_attack
from multiprocessing import Process
from colorama import Fore, Back, Style
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i:i + n]
def profile_attack_process(username, proxy_list):
if (len(proxy_list) == 0):
for _ in range(10):
report_profile_attack(username, None)
return
for proxy in proxy_list:
report_profile_attack(username, proxy)
def video_attack_process(video_url, proxy_list):
if (len(proxy_list) == 0):
for _ in range(10):
report_video_attack(video_url, None)
return
for proxy in proxy_list:
report_video_attack(video_url, proxy)
def video_attack(proxies):
video_url = ask_question("Enter the link of the video you want to report")
print(Style.RESET_ALL)
if (len(proxies) == 0):
for k in range(5):
p = Process(target=video_attack_process, args=(video_url, [],))
p.start()
print_status(str(k + 1) + ". Transaction Opened!")
if (k == 5): print()
return
chunk = list(chunks(proxies, 10))
print("")
print_status("Video complaint attack is on!\n")
i = 1
for proxy_list in chunk:
p = Process(target=video_attack_process, args=(video_url, proxy_list,))
p.start()
print_status(str(i) + ". Transaction Opened!")
if (k == 5): print()
i = i + 1
def profile_attack(proxies):
username = ask_question("Enter the username of the person you want to report")
print(Style.RESET_ALL)
if (len(proxies) == 0):
for k in range(5):
p = Process(target=profile_attack_process, args=(username, [],))
p.start()
print_status(str(k + 1) + ". Transaction Opened!")
return
chunk = list(chunks(proxies, 10))
print("")
print_status("Profile complaint attack is starting!\n")
i = 1
for proxy_list in chunk:
p = Process(target=profile_attack_process, args=(username, proxy_list,))
p.start()
print_status(str(i) + ". Transaction Opened!")
if (k == 5): print()
i = i + 1
def main():
print_success("Modules loaded!\n")
ret = ask_question("Would you like to use a proxy? [Y / N]")
proxies = []
if (ret == "Y" or ret == "y"):
ret = ask_question("Would you like to collect your proxies from the internet? [Y / N]")
if (ret == "Y" or ret == "y"):
print_status("Gathering proxy from the Internet! This may take a while.\n")
proxies = find_proxies()
elif (ret == "N" or ret == "n"):
print_status("Please have a maximum of 50 proxies in a file!")
file_path = ask_question("Enter the path to your proxy list")
proxies = parse_proxy_file(file_path)
else:
print_error("Answer not understood, exiting!")
exit()
print_success(str(len(proxies)) + " Number of proxy found!\n")
elif (ret == "N" or ret == "n"):
pass
else:
print_error("Answer not understood, exiting!")
exit()
print("")
print_status("1 - Report Profile.")
print_status("2 - Report a video.")
report_choice = ask_question("Please select the complaint method")
print("")
if (report_choice.isdigit() == False):
print_error("The answer is not understood.")
exit(0)
if (int(report_choice) > 2 or int(report_choice) == 0):
print_error("The answer is not understood.")
exit(0)
if (int(report_choice) == 1):
profile_attack(proxies)
elif (int(report_choice) == 2):
video_attack(proxies)
if __name__ == "__main__":
print_logo()
try:
main()
print(Style.RESET_ALL)
except KeyboardInterrupt:
print("\n\n" + Fore.RED + "[*] Program is closing!")
print(Style.RESET_ALL)
_exit(0) | [((134, 149), 'libs.check_modules.check_modules', 'check_modules', ([], {}), '()\n', (147, 149), False, 'from libs.check_modules import check_modules\n'), ((1323, 1385), 'libs.utils.ask_question', 'ask_question', (['"""Enter the link of the video you want to report"""'], {}), "('Enter the link of the video you want to report')\n", (1335, 1385), False, 'from libs.utils import ask_question\n'), ((1748, 1795), 'libs.utils.print_status', 'print_status', (['"""Video complaint attack is on!\n"""'], {}), "('Video complaint attack is on!\\n')\n", (1760, 1795), False, 'from libs.utils import print_status\n'), ((2092, 2159), 'libs.utils.ask_question', 'ask_question', (['"""Enter the username of the person you want to report"""'], {}), "('Enter the username of the person you want to report')\n", (2104, 2159), False, 'from libs.utils import ask_question\n'), ((2489, 2544), 'libs.utils.print_status', 'print_status', (['"""Profile complaint attack is starting!\n"""'], {}), "('Profile complaint attack is starting!\\n')\n", (2501, 2544), False, 'from libs.utils import print_status\n'), ((2814, 2848), 'libs.utils.print_success', 'print_success', (['"""Modules loaded!\n"""'], {}), "('Modules loaded!\\n')\n", (2827, 2848), False, 'from libs.utils import print_success\n'), ((2862, 2916), 'libs.utils.ask_question', 'ask_question', (['"""Would you like to use a proxy? [Y / N]"""'], {}), "('Would you like to use a proxy? [Y / N]')\n", (2874, 2916), False, 'from libs.utils import ask_question\n'), ((3819, 3854), 'libs.utils.print_status', 'print_status', (['"""1 - Report Profile."""'], {}), "('1 - Report Profile.')\n", (3831, 3854), False, 'from libs.utils import print_status\n'), ((3860, 3895), 'libs.utils.print_status', 'print_status', (['"""2 - Report a video."""'], {}), "('2 - Report a video.')\n", (3872, 3895), False, 'from libs.utils import print_status\n'), ((3917, 3967), 'libs.utils.ask_question', 'ask_question', (['"""Please select the complaint method"""'], {}), "('Please select the complaint method')\n", (3929, 3967), False, 'from libs.utils import ask_question\n'), ((4411, 4423), 'libs.logo.print_logo', 'print_logo', ([], {}), '()\n', (4421, 4423), False, 'from libs.logo import print_logo\n'), ((979, 1017), 'libs.attack.report_profile_attack', 'report_profile_attack', (['username', 'proxy'], {}), '(username, proxy)\n', (1000, 1017), False, 'from libs.attack import report_profile_attack\n'), ((1238, 1275), 'libs.attack.report_video_attack', 'report_video_attack', (['video_url', 'proxy'], {}), '(video_url, proxy)\n', (1257, 1275), False, 'from libs.attack import report_video_attack\n'), ((1852, 1918), 'multiprocessing.Process', 'Process', ([], {'target': 'video_attack_process', 'args': '(video_url, proxy_list)'}), '(target=video_attack_process, args=(video_url, proxy_list))\n', (1859, 1918), False, 'from multiprocessing import Process\n'), ((2601, 2668), 'multiprocessing.Process', 'Process', ([], {'target': 'profile_attack_process', 'args': '(username, proxy_list)'}), '(target=profile_attack_process, args=(username, proxy_list))\n', (2608, 2668), False, 'from multiprocessing import Process\n'), ((2990, 3076), 'libs.utils.ask_question', 'ask_question', (['"""Would you like to collect your proxies from the internet? [Y / N]"""'], {}), "(\n 'Would you like to collect your proxies from the internet? [Y / N]')\n", (3002, 3076), False, 'from libs.utils import ask_question\n'), ((4038, 4082), 'libs.utils.print_error', 'print_error', (['"""The answer is not understood."""'], {}), "('The answer is not understood.')\n", (4049, 4082), False, 'from libs.utils import print_error\n'), ((4092, 4099), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (4096, 4099), False, 'from sys import exit\n'), ((4176, 4220), 'libs.utils.print_error', 'print_error', (['"""The answer is not understood."""'], {}), "('The answer is not understood.')\n", (4187, 4220), False, 'from libs.utils import print_error\n'), ((4230, 4237), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (4234, 4237), False, 'from sys import exit\n'), ((884, 921), 'libs.attack.report_profile_attack', 'report_profile_attack', (['username', 'None'], {}), '(username, None)\n', (905, 921), False, 'from libs.attack import report_profile_attack\n'), ((1144, 1180), 'libs.attack.report_video_attack', 'report_video_attack', (['video_url', 'None'], {}), '(video_url, None)\n', (1163, 1180), False, 'from libs.attack import report_video_attack\n'), ((1488, 1546), 'multiprocessing.Process', 'Process', ([], {'target': 'video_attack_process', 'args': '(video_url, [])'}), '(target=video_attack_process, args=(video_url, []))\n', (1495, 1546), False, 'from multiprocessing import Process\n'), ((2262, 2321), 'multiprocessing.Process', 'Process', ([], {'target': 'profile_attack_process', 'args': '(username, [])'}), '(target=profile_attack_process, args=(username, []))\n', (2269, 2321), False, 'from multiprocessing import Process\n'), ((3127, 3202), 'libs.utils.print_status', 'print_status', (['"""Gathering proxy from the Internet! This may take a while.\n"""'], {}), "('Gathering proxy from the Internet! This may take a while.\\n')\n", (3139, 3202), False, 'from libs.utils import print_status\n'), ((3226, 3240), 'libs.proxy_harvester.find_proxies', 'find_proxies', ([], {}), '()\n', (3238, 3240), False, 'from libs.proxy_harvester import find_proxies\n'), ((3726, 3772), 'libs.utils.print_error', 'print_error', (['"""Answer not understood, exiting!"""'], {}), "('Answer not understood, exiting!')\n", (3737, 3772), False, 'from libs.utils import print_error\n'), ((3782, 3788), 'sys.exit', 'exit', ([], {}), '()\n', (3786, 3788), False, 'from sys import exit\n'), ((4616, 4624), 'os._exit', '_exit', (['(0)'], {}), '(0)\n', (4621, 4624), False, 'from os import _exit\n'), ((3296, 3358), 'libs.utils.print_status', 'print_status', (['"""Please have a maximum of 50 proxies in a file!"""'], {}), "('Please have a maximum of 50 proxies in a file!')\n", (3308, 3358), False, 'from libs.utils import print_status\n'), ((3384, 3433), 'libs.utils.ask_question', 'ask_question', (['"""Enter the path to your proxy list"""'], {}), "('Enter the path to your proxy list')\n", (3396, 3433), False, 'from libs.utils import ask_question\n'), ((3457, 3484), 'libs.utils.parse_proxy_file', 'parse_proxy_file', (['file_path'], {}), '(file_path)\n', (3473, 3484), False, 'from libs.utils import parse_proxy_file\n'), ((3513, 3559), 'libs.utils.print_error', 'print_error', (['"""Answer not understood, exiting!"""'], {}), "('Answer not understood, exiting!')\n", (3524, 3559), False, 'from libs.utils import print_error\n'), ((3573, 3579), 'sys.exit', 'exit', ([], {}), '()\n', (3577, 3579), False, 'from sys import exit\n')] |
viathor/OpenFermion-Cirq | openfermioncirq/variational/ansatzes/default_initial_params_test.py | b4b7f8d82c40f0a6282873b5d2867e9d8778cea6 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy
import pytest
import cirq
import openfermion
from openfermioncirq import (
HamiltonianObjective,
LowRankTrotterAnsatz,
SplitOperatorTrotterAnsatz,
SwapNetworkTrotterAnsatz,
SwapNetworkTrotterHubbardAnsatz,
VariationalStudy,
prepare_gaussian_state,
simulate_trotter)
from openfermioncirq.trotter import (
LINEAR_SWAP_NETWORK, LOW_RANK, LowRankTrotterAlgorithm, SPLIT_OPERATOR)
# 4-qubit random DiagonalCoulombHamiltonian
diag_coul_hamiltonian = openfermion.random_diagonal_coulomb_hamiltonian(
4, real=True, seed=47141)
# 4-qubit H2 2-2 with bond length 0.7414
bond_length = 0.7414
geometry = [('H', (0., 0., 0.)), ('H', (0., 0., bond_length))]
h2_hamiltonian = openfermion.load_molecular_hamiltonian(
geometry, 'sto-3g', 1, format(bond_length), 2, 2)
# 4-qubit LiH 2-2 with bond length 1.45
bond_length = 1.45
geometry = [('Li', (0., 0., 0.)), ('H', (0., 0., bond_length))]
lih_hamiltonian = openfermion.load_molecular_hamiltonian(
geometry, 'sto-3g', 1, format(bond_length), 2, 2)
@pytest.mark.parametrize(
'ansatz, trotter_algorithm, order, hamiltonian, atol', [
(SwapNetworkTrotterAnsatz(diag_coul_hamiltonian, iterations=1),
LINEAR_SWAP_NETWORK, 1, diag_coul_hamiltonian, 5e-5),
(SplitOperatorTrotterAnsatz(diag_coul_hamiltonian, iterations=1),
SPLIT_OPERATOR, 1, diag_coul_hamiltonian, 5e-5),
(LowRankTrotterAnsatz(h2_hamiltonian, iterations=1),
LOW_RANK, 0, h2_hamiltonian, 5e-5),
(LowRankTrotterAnsatz(lih_hamiltonian, iterations=1, final_rank=3),
LowRankTrotterAlgorithm(final_rank=3), 0, lih_hamiltonian, 5e-5),
(SwapNetworkTrotterHubbardAnsatz(2, 2, 1.0, 4.0, iterations=1),
LINEAR_SWAP_NETWORK, 1,
openfermion.get_diagonal_coulomb_hamiltonian(
openfermion.reorder(
openfermion.fermi_hubbard(2, 2, 1.0, 4.0),
openfermion.up_then_down)
),
5e-5)
])
def test_trotter_ansatzes_default_initial_params_iterations_1(
ansatz, trotter_algorithm, order, hamiltonian, atol):
"""Check that a Trotter ansatz with one iteration and default parameters
is consistent with time evolution with one Trotter step."""
objective = HamiltonianObjective(hamiltonian)
qubits = ansatz.qubits
if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian):
one_body = hamiltonian.one_body
elif isinstance(hamiltonian, openfermion.InteractionOperator):
one_body = hamiltonian.one_body_tensor
if isinstance(ansatz, SwapNetworkTrotterHubbardAnsatz):
occupied_orbitals = (range(len(qubits)//4), range(len(qubits)//4))
else:
occupied_orbitals = range(len(qubits)//2)
preparation_circuit = cirq.Circuit(
prepare_gaussian_state(
qubits,
openfermion.QuadraticHamiltonian(one_body),
occupied_orbitals=occupied_orbitals
)
)
# Compute value using ansatz circuit and objective
circuit = cirq.resolve_parameters(
preparation_circuit + ansatz.circuit,
ansatz.param_resolver(ansatz.default_initial_params()))
result = circuit.final_wavefunction(
qubit_order=ansatz.qubit_permutation(qubits))
obj_val = objective.value(result)
# Compute value using study
study = VariationalStudy(
'study',
ansatz,
objective,
preparation_circuit=preparation_circuit)
study_val = study.value_of(ansatz.default_initial_params())
# Compute value by simulating time evolution
if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian):
half_way_hamiltonian = openfermion.DiagonalCoulombHamiltonian(
one_body=hamiltonian.one_body,
two_body=0.5 * hamiltonian.two_body)
elif isinstance(hamiltonian, openfermion.InteractionOperator):
half_way_hamiltonian = openfermion.InteractionOperator(
constant=hamiltonian.constant,
one_body_tensor=hamiltonian.one_body_tensor,
two_body_tensor=0.5 * hamiltonian.two_body_tensor)
simulation_circuit = cirq.Circuit(
simulate_trotter(
qubits,
half_way_hamiltonian,
time=ansatz.adiabatic_evolution_time,
n_steps=1,
order=order,
algorithm=trotter_algorithm)
)
final_state = (
preparation_circuit + simulation_circuit).final_wavefunction()
correct_val = openfermion.expectation(
objective._hamiltonian_linear_op, final_state).real
numpy.testing.assert_allclose(obj_val, study_val, atol=atol)
numpy.testing.assert_allclose(obj_val, correct_val, atol=atol)
@pytest.mark.parametrize(
'ansatz, trotter_algorithm, order, hamiltonian, atol', [
(SwapNetworkTrotterAnsatz(diag_coul_hamiltonian, iterations=2),
LINEAR_SWAP_NETWORK, 1, diag_coul_hamiltonian, 5e-5),
(SplitOperatorTrotterAnsatz(diag_coul_hamiltonian, iterations=2),
SPLIT_OPERATOR, 1, diag_coul_hamiltonian, 5e-5),
(LowRankTrotterAnsatz(h2_hamiltonian, iterations=2),
LOW_RANK, 0, h2_hamiltonian, 5e-5),
(LowRankTrotterAnsatz(lih_hamiltonian, iterations=2, final_rank=3),
LowRankTrotterAlgorithm(final_rank=3), 0, lih_hamiltonian, 1e-3),
(SwapNetworkTrotterHubbardAnsatz(2, 2, 1.0, 4.0, iterations=2),
LINEAR_SWAP_NETWORK, 1,
openfermion.get_diagonal_coulomb_hamiltonian(
openfermion.reorder(
openfermion.fermi_hubbard(2, 2, 1.0, 4.0),
openfermion.up_then_down)
),
5e-5)
])
def test_trotter_ansatzes_default_initial_params_iterations_2(
ansatz, trotter_algorithm, order, hamiltonian, atol):
"""Check that a Trotter ansatz with two iterations and default parameters
is consistent with time evolution with two Trotter steps."""
objective = HamiltonianObjective(hamiltonian)
qubits = ansatz.qubits
if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian):
one_body = hamiltonian.one_body
elif isinstance(hamiltonian, openfermion.InteractionOperator):
one_body = hamiltonian.one_body_tensor
if isinstance(ansatz, SwapNetworkTrotterHubbardAnsatz):
occupied_orbitals = (range(len(qubits)//4), range(len(qubits)//4))
else:
occupied_orbitals = range(len(qubits)//2)
preparation_circuit = cirq.Circuit(
prepare_gaussian_state(
qubits,
openfermion.QuadraticHamiltonian(one_body),
occupied_orbitals=occupied_orbitals
)
)
# Compute value using ansatz circuit and objective
circuit = cirq.resolve_parameters(
preparation_circuit + ansatz.circuit,
ansatz.param_resolver(ansatz.default_initial_params()))
result = circuit.final_wavefunction(
qubit_order=ansatz.qubit_permutation(qubits))
obj_val = objective.value(result)
# Compute value using study
study = VariationalStudy(
'study',
ansatz,
objective,
preparation_circuit=preparation_circuit)
study_val = study.value_of(ansatz.default_initial_params())
# Compute value by simulating time evolution
if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian):
quarter_way_hamiltonian = openfermion.DiagonalCoulombHamiltonian(
one_body=hamiltonian.one_body,
two_body=0.25 * hamiltonian.two_body)
three_quarters_way_hamiltonian = openfermion.DiagonalCoulombHamiltonian(
one_body=hamiltonian.one_body,
two_body=0.75 * hamiltonian.two_body)
elif isinstance(hamiltonian, openfermion.InteractionOperator):
quarter_way_hamiltonian = openfermion.InteractionOperator(
constant=hamiltonian.constant,
one_body_tensor=hamiltonian.one_body_tensor,
two_body_tensor=0.25 * hamiltonian.two_body_tensor)
three_quarters_way_hamiltonian = openfermion.InteractionOperator(
constant=hamiltonian.constant,
one_body_tensor=hamiltonian.one_body_tensor,
two_body_tensor=0.75 * hamiltonian.two_body_tensor)
simulation_circuit = cirq.Circuit(
simulate_trotter(
qubits,
quarter_way_hamiltonian,
time=0.5 * ansatz.adiabatic_evolution_time,
n_steps=1,
order=order,
algorithm=trotter_algorithm),
simulate_trotter(
qubits,
three_quarters_way_hamiltonian,
time=0.5 * ansatz.adiabatic_evolution_time,
n_steps=1,
order=order,
algorithm=trotter_algorithm)
)
final_state = (
preparation_circuit + simulation_circuit).final_wavefunction()
correct_val = openfermion.expectation(
objective._hamiltonian_linear_op, final_state).real
numpy.testing.assert_allclose(obj_val, study_val, atol=atol)
numpy.testing.assert_allclose(obj_val, correct_val, atol=atol)
| [((1096, 1169), 'openfermion.random_diagonal_coulomb_hamiltonian', 'openfermion.random_diagonal_coulomb_hamiltonian', (['(4)'], {'real': '(True)', 'seed': '(47141)'}), '(4, real=True, seed=47141)\n', (1143, 1169), False, 'import openfermion\n'), ((2856, 2889), 'openfermioncirq.HamiltonianObjective', 'HamiltonianObjective', (['hamiltonian'], {}), '(hamiltonian)\n', (2876, 2889), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((3969, 4059), 'openfermioncirq.VariationalStudy', 'VariationalStudy', (['"""study"""', 'ansatz', 'objective'], {'preparation_circuit': 'preparation_circuit'}), "('study', ansatz, objective, preparation_circuit=\n preparation_circuit)\n", (3985, 4059), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((5267, 5327), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['obj_val', 'study_val'], {'atol': 'atol'}), '(obj_val, study_val, atol=atol)\n', (5296, 5327), False, 'import numpy\n'), ((5332, 5394), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['obj_val', 'correct_val'], {'atol': 'atol'}), '(obj_val, correct_val, atol=atol)\n', (5361, 5394), False, 'import numpy\n'), ((6593, 6626), 'openfermioncirq.HamiltonianObjective', 'HamiltonianObjective', (['hamiltonian'], {}), '(hamiltonian)\n', (6613, 6626), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((7706, 7796), 'openfermioncirq.VariationalStudy', 'VariationalStudy', (['"""study"""', 'ansatz', 'objective'], {'preparation_circuit': 'preparation_circuit'}), "('study', ansatz, objective, preparation_circuit=\n preparation_circuit)\n", (7722, 7796), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((9717, 9777), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['obj_val', 'study_val'], {'atol': 'atol'}), '(obj_val, study_val, atol=atol)\n', (9746, 9777), False, 'import numpy\n'), ((9782, 9844), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['obj_val', 'correct_val'], {'atol': 'atol'}), '(obj_val, correct_val, atol=atol)\n', (9811, 9844), False, 'import numpy\n'), ((4321, 4431), 'openfermion.DiagonalCoulombHamiltonian', 'openfermion.DiagonalCoulombHamiltonian', ([], {'one_body': 'hamiltonian.one_body', 'two_body': '(0.5 * hamiltonian.two_body)'}), '(one_body=hamiltonian.one_body,\n two_body=0.5 * hamiltonian.two_body)\n', (4359, 4431), False, 'import openfermion\n'), ((4819, 4966), 'openfermioncirq.simulate_trotter', 'simulate_trotter', (['qubits', 'half_way_hamiltonian'], {'time': 'ansatz.adiabatic_evolution_time', 'n_steps': '(1)', 'order': 'order', 'algorithm': 'trotter_algorithm'}), '(qubits, half_way_hamiltonian, time=ansatz.\n adiabatic_evolution_time, n_steps=1, order=order, algorithm=\n trotter_algorithm)\n', (4835, 4966), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((5173, 5243), 'openfermion.expectation', 'openfermion.expectation', (['objective._hamiltonian_linear_op', 'final_state'], {}), '(objective._hamiltonian_linear_op, final_state)\n', (5196, 5243), False, 'import openfermion\n'), ((8061, 8172), 'openfermion.DiagonalCoulombHamiltonian', 'openfermion.DiagonalCoulombHamiltonian', ([], {'one_body': 'hamiltonian.one_body', 'two_body': '(0.25 * hamiltonian.two_body)'}), '(one_body=hamiltonian.one_body,\n two_body=0.25 * hamiltonian.two_body)\n', (8099, 8172), False, 'import openfermion\n'), ((8243, 8354), 'openfermion.DiagonalCoulombHamiltonian', 'openfermion.DiagonalCoulombHamiltonian', ([], {'one_body': 'hamiltonian.one_body', 'two_body': '(0.75 * hamiltonian.two_body)'}), '(one_body=hamiltonian.one_body,\n two_body=0.75 * hamiltonian.two_body)\n', (8281, 8354), False, 'import openfermion\n'), ((8996, 9152), 'openfermioncirq.simulate_trotter', 'simulate_trotter', (['qubits', 'quarter_way_hamiltonian'], {'time': '(0.5 * ansatz.adiabatic_evolution_time)', 'n_steps': '(1)', 'order': 'order', 'algorithm': 'trotter_algorithm'}), '(qubits, quarter_way_hamiltonian, time=0.5 * ansatz.\n adiabatic_evolution_time, n_steps=1, order=order, algorithm=\n trotter_algorithm)\n', (9012, 9152), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((9253, 9416), 'openfermioncirq.simulate_trotter', 'simulate_trotter', (['qubits', 'three_quarters_way_hamiltonian'], {'time': '(0.5 * ansatz.adiabatic_evolution_time)', 'n_steps': '(1)', 'order': 'order', 'algorithm': 'trotter_algorithm'}), '(qubits, three_quarters_way_hamiltonian, time=0.5 * ansatz.\n adiabatic_evolution_time, n_steps=1, order=order, algorithm=\n trotter_algorithm)\n', (9269, 9416), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((9623, 9693), 'openfermion.expectation', 'openfermion.expectation', (['objective._hamiltonian_linear_op', 'final_state'], {}), '(objective._hamiltonian_linear_op, final_state)\n', (9646, 9693), False, 'import openfermion\n'), ((3458, 3500), 'openfermion.QuadraticHamiltonian', 'openfermion.QuadraticHamiltonian', (['one_body'], {}), '(one_body)\n', (3490, 3500), False, 'import openfermion\n'), ((4559, 4725), 'openfermion.InteractionOperator', 'openfermion.InteractionOperator', ([], {'constant': 'hamiltonian.constant', 'one_body_tensor': 'hamiltonian.one_body_tensor', 'two_body_tensor': '(0.5 * hamiltonian.two_body_tensor)'}), '(constant=hamiltonian.constant,\n one_body_tensor=hamiltonian.one_body_tensor, two_body_tensor=0.5 *\n hamiltonian.two_body_tensor)\n', (4590, 4725), False, 'import openfermion\n'), ((1758, 1819), 'openfermioncirq.SwapNetworkTrotterAnsatz', 'SwapNetworkTrotterAnsatz', (['diag_coul_hamiltonian'], {'iterations': '(1)'}), '(diag_coul_hamiltonian, iterations=1)\n', (1782, 1819), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((1888, 1951), 'openfermioncirq.SplitOperatorTrotterAnsatz', 'SplitOperatorTrotterAnsatz', (['diag_coul_hamiltonian'], {'iterations': '(1)'}), '(diag_coul_hamiltonian, iterations=1)\n', (1914, 1951), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((2015, 2065), 'openfermioncirq.LowRankTrotterAnsatz', 'LowRankTrotterAnsatz', (['h2_hamiltonian'], {'iterations': '(1)'}), '(h2_hamiltonian, iterations=1)\n', (2035, 2065), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((2116, 2181), 'openfermioncirq.LowRankTrotterAnsatz', 'LowRankTrotterAnsatz', (['lih_hamiltonian'], {'iterations': '(1)', 'final_rank': '(3)'}), '(lih_hamiltonian, iterations=1, final_rank=3)\n', (2136, 2181), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((2191, 2228), 'openfermioncirq.trotter.LowRankTrotterAlgorithm', 'LowRankTrotterAlgorithm', ([], {'final_rank': '(3)'}), '(final_rank=3)\n', (2214, 2228), False, 'from openfermioncirq.trotter import LINEAR_SWAP_NETWORK, LOW_RANK, LowRankTrotterAlgorithm, SPLIT_OPERATOR\n'), ((2262, 2323), 'openfermioncirq.SwapNetworkTrotterHubbardAnsatz', 'SwapNetworkTrotterHubbardAnsatz', (['(2)', '(2)', '(1.0)', '(4.0)'], {'iterations': '(1)'}), '(2, 2, 1.0, 4.0, iterations=1)\n', (2293, 2323), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((7195, 7237), 'openfermion.QuadraticHamiltonian', 'openfermion.QuadraticHamiltonian', (['one_body'], {}), '(one_body)\n', (7227, 7237), False, 'import openfermion\n'), ((8485, 8652), 'openfermion.InteractionOperator', 'openfermion.InteractionOperator', ([], {'constant': 'hamiltonian.constant', 'one_body_tensor': 'hamiltonian.one_body_tensor', 'two_body_tensor': '(0.25 * hamiltonian.two_body_tensor)'}), '(constant=hamiltonian.constant,\n one_body_tensor=hamiltonian.one_body_tensor, two_body_tensor=0.25 *\n hamiltonian.two_body_tensor)\n', (8516, 8652), False, 'import openfermion\n'), ((8735, 8902), 'openfermion.InteractionOperator', 'openfermion.InteractionOperator', ([], {'constant': 'hamiltonian.constant', 'one_body_tensor': 'hamiltonian.one_body_tensor', 'two_body_tensor': '(0.75 * hamiltonian.two_body_tensor)'}), '(constant=hamiltonian.constant,\n one_body_tensor=hamiltonian.one_body_tensor, two_body_tensor=0.75 *\n hamiltonian.two_body_tensor)\n', (8766, 8902), False, 'import openfermion\n'), ((5493, 5554), 'openfermioncirq.SwapNetworkTrotterAnsatz', 'SwapNetworkTrotterAnsatz', (['diag_coul_hamiltonian'], {'iterations': '(2)'}), '(diag_coul_hamiltonian, iterations=2)\n', (5517, 5554), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((5623, 5686), 'openfermioncirq.SplitOperatorTrotterAnsatz', 'SplitOperatorTrotterAnsatz', (['diag_coul_hamiltonian'], {'iterations': '(2)'}), '(diag_coul_hamiltonian, iterations=2)\n', (5649, 5686), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((5750, 5800), 'openfermioncirq.LowRankTrotterAnsatz', 'LowRankTrotterAnsatz', (['h2_hamiltonian'], {'iterations': '(2)'}), '(h2_hamiltonian, iterations=2)\n', (5770, 5800), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((5851, 5916), 'openfermioncirq.LowRankTrotterAnsatz', 'LowRankTrotterAnsatz', (['lih_hamiltonian'], {'iterations': '(2)', 'final_rank': '(3)'}), '(lih_hamiltonian, iterations=2, final_rank=3)\n', (5871, 5916), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((5926, 5963), 'openfermioncirq.trotter.LowRankTrotterAlgorithm', 'LowRankTrotterAlgorithm', ([], {'final_rank': '(3)'}), '(final_rank=3)\n', (5949, 5963), False, 'from openfermioncirq.trotter import LINEAR_SWAP_NETWORK, LOW_RANK, LowRankTrotterAlgorithm, SPLIT_OPERATOR\n'), ((5997, 6058), 'openfermioncirq.SwapNetworkTrotterHubbardAnsatz', 'SwapNetworkTrotterHubbardAnsatz', (['(2)', '(2)', '(1.0)', '(4.0)'], {'iterations': '(2)'}), '(2, 2, 1.0, 4.0, iterations=2)\n', (6028, 6058), False, 'from openfermioncirq import HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter\n'), ((2460, 2501), 'openfermion.fermi_hubbard', 'openfermion.fermi_hubbard', (['(2)', '(2)', '(1.0)', '(4.0)'], {}), '(2, 2, 1.0, 4.0)\n', (2485, 2501), False, 'import openfermion\n'), ((6195, 6236), 'openfermion.fermi_hubbard', 'openfermion.fermi_hubbard', (['(2)', '(2)', '(1.0)', '(4.0)'], {}), '(2, 2, 1.0, 4.0)\n', (6220, 6236), False, 'import openfermion\n')] |
seryj/spyder | spyder/plugins/variableexplorer/widgets/arrayeditor.py | acea4f501c1a04d57b02e5e817708a69b503f430 | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
NumPy Array Editor Dialog based on Qt
"""
# pylint: disable=C0103
# pylint: disable=R0903
# pylint: disable=R0911
# pylint: disable=R0201
# Standard library imports
from __future__ import print_function
# Third party imports
from qtpy.compat import from_qvariant, to_qvariant
from qtpy.QtCore import (QAbstractTableModel, QItemSelection, QLocale,
QItemSelectionRange, QModelIndex, Qt, Slot)
from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence
from qtpy.QtWidgets import (QAbstractItemDelegate, QApplication, QCheckBox,
QComboBox, QDialog, QDialogButtonBox, QGridLayout,
QHBoxLayout, QInputDialog, QItemDelegate, QLabel,
QLineEdit, QMenu, QMessageBox, QPushButton,
QSpinBox, QStackedWidget, QTableView, QVBoxLayout,
QWidget)
import numpy as np
# Local imports
from spyder.config.base import _
from spyder.config.fonts import DEFAULT_SMALL_DELTA
from spyder.config.gui import get_font, config_shortcut
from spyder.py3compat import (io, is_binary_string, is_string,
is_text_string, PY3, to_binary_string,
to_text_string)
from spyder.utils import icon_manager as ima
from spyder.utils.qthelpers import add_actions, create_action, keybinding
# Note: string and unicode data types will be formatted with '%s' (see below)
SUPPORTED_FORMATS = {
'single': '%.6g',
'double': '%.6g',
'float_': '%.6g',
'longfloat': '%.6g',
'float16': '%.6g',
'float32': '%.6g',
'float64': '%.6g',
'float96': '%.6g',
'float128': '%.6g',
'csingle': '%r',
'complex_': '%r',
'clongfloat': '%r',
'complex64': '%r',
'complex128': '%r',
'complex192': '%r',
'complex256': '%r',
'byte': '%d',
'bytes8': '%s',
'short': '%d',
'intc': '%d',
'int_': '%d',
'longlong': '%d',
'intp': '%d',
'int8': '%d',
'int16': '%d',
'int32': '%d',
'int64': '%d',
'ubyte': '%d',
'ushort': '%d',
'uintc': '%d',
'uint': '%d',
'ulonglong': '%d',
'uintp': '%d',
'uint8': '%d',
'uint16': '%d',
'uint32': '%d',
'uint64': '%d',
'bool_': '%r',
'bool8': '%r',
'bool': '%r',
}
LARGE_SIZE = 5e5
LARGE_NROWS = 1e5
LARGE_COLS = 60
#==============================================================================
# Utility functions
#==============================================================================
def is_float(dtype):
"""Return True if datatype dtype is a float kind"""
return ('float' in dtype.name) or dtype.name in ['single', 'double']
def is_number(dtype):
"""Return True is datatype dtype is a number kind"""
return is_float(dtype) or ('int' in dtype.name) or ('long' in dtype.name) \
or ('short' in dtype.name)
def get_idx_rect(index_list):
"""Extract the boundaries from a list of indexes"""
rows, cols = list(zip(*[(i.row(), i.column()) for i in index_list]))
return ( min(rows), max(rows), min(cols), max(cols) )
#==============================================================================
# Main classes
#==============================================================================
class ArrayModel(QAbstractTableModel):
"""Array Editor Table Model"""
ROWS_TO_LOAD = 500
COLS_TO_LOAD = 40
def __init__(self, data, format="%.6g", xlabels=None, ylabels=None,
readonly=False, parent=None):
QAbstractTableModel.__init__(self)
self.dialog = parent
self.changes = {}
self.xlabels = xlabels
self.ylabels = ylabels
self.readonly = readonly
self.test_array = np.array([0], dtype=data.dtype)
# for complex numbers, shading will be based on absolute value
# but for all other types it will be the real part
if data.dtype in (np.complex64, np.complex128):
self.color_func = np.abs
else:
self.color_func = np.real
# Backgroundcolor settings
huerange = [.66, .99] # Hue
self.sat = .7 # Saturation
self.val = 1. # Value
self.alp = .6 # Alpha-channel
self._data = data
self._format = format
self.total_rows = self._data.shape[0]
self.total_cols = self._data.shape[1]
size = self.total_rows * self.total_cols
try:
self.vmin = np.nanmin(self.color_func(data))
self.vmax = np.nanmax(self.color_func(data))
if self.vmax == self.vmin:
self.vmin -= 1
self.hue0 = huerange[0]
self.dhue = huerange[1]-huerange[0]
self.bgcolor_enabled = True
except (TypeError, ValueError):
self.vmin = None
self.vmax = None
self.hue0 = None
self.dhue = None
self.bgcolor_enabled = False
# Use paging when the total size, number of rows or number of
# columns is too large
if size > LARGE_SIZE:
self.rows_loaded = self.ROWS_TO_LOAD
self.cols_loaded = self.COLS_TO_LOAD
else:
if self.total_rows > LARGE_NROWS:
self.rows_loaded = self.ROWS_TO_LOAD
else:
self.rows_loaded = self.total_rows
if self.total_cols > LARGE_COLS:
self.cols_loaded = self.COLS_TO_LOAD
else:
self.cols_loaded = self.total_cols
def get_format(self):
"""Return current format"""
# Avoid accessing the private attribute _format from outside
return self._format
def get_data(self):
"""Return data"""
return self._data
def set_format(self, format):
"""Change display format"""
self._format = format
self.reset()
def columnCount(self, qindex=QModelIndex()):
"""Array column number"""
if self.total_cols <= self.cols_loaded:
return self.total_cols
else:
return self.cols_loaded
def rowCount(self, qindex=QModelIndex()):
"""Array row number"""
if self.total_rows <= self.rows_loaded:
return self.total_rows
else:
return self.rows_loaded
def can_fetch_more(self, rows=False, columns=False):
if rows:
if self.total_rows > self.rows_loaded:
return True
else:
return False
if columns:
if self.total_cols > self.cols_loaded:
return True
else:
return False
def fetch_more(self, rows=False, columns=False):
if self.can_fetch_more(rows=rows):
reminder = self.total_rows - self.rows_loaded
items_to_fetch = min(reminder, self.ROWS_TO_LOAD)
self.beginInsertRows(QModelIndex(), self.rows_loaded,
self.rows_loaded + items_to_fetch - 1)
self.rows_loaded += items_to_fetch
self.endInsertRows()
if self.can_fetch_more(columns=columns):
reminder = self.total_cols - self.cols_loaded
items_to_fetch = min(reminder, self.COLS_TO_LOAD)
self.beginInsertColumns(QModelIndex(), self.cols_loaded,
self.cols_loaded + items_to_fetch - 1)
self.cols_loaded += items_to_fetch
self.endInsertColumns()
def bgcolor(self, state):
"""Toggle backgroundcolor"""
self.bgcolor_enabled = state > 0
self.reset()
def get_value(self, index):
i = index.row()
j = index.column()
if len(self._data.shape) == 1:
value = self._data[j]
else:
value = self._data[i, j]
return self.changes.get((i, j), value)
def data(self, index, role=Qt.DisplayRole):
"""Cell content"""
if not index.isValid():
return to_qvariant()
value = self.get_value(index)
if is_binary_string(value):
try:
value = to_text_string(value, 'utf8')
except:
pass
if role == Qt.DisplayRole:
if value is np.ma.masked:
return ''
else:
try:
return to_qvariant(self._format % value)
except TypeError:
self.readonly = True
return repr(value)
elif role == Qt.TextAlignmentRole:
return to_qvariant(int(Qt.AlignCenter|Qt.AlignVCenter))
elif role == Qt.BackgroundColorRole and self.bgcolor_enabled \
and value is not np.ma.masked:
try:
hue = (self.hue0 +
self.dhue * (float(self.vmax) - self.color_func(value))
/ (float(self.vmax) - self.vmin))
hue = float(np.abs(hue))
color = QColor.fromHsvF(hue, self.sat, self.val, self.alp)
return to_qvariant(color)
except TypeError:
return to_qvariant()
elif role == Qt.FontRole:
return to_qvariant(get_font(font_size_delta=DEFAULT_SMALL_DELTA))
return to_qvariant()
def setData(self, index, value, role=Qt.EditRole):
"""Cell content change"""
if not index.isValid() or self.readonly:
return False
i = index.row()
j = index.column()
value = from_qvariant(value, str)
dtype = self._data.dtype.name
if dtype == "bool":
try:
val = bool(float(value))
except ValueError:
val = value.lower() == "true"
elif dtype.startswith("string") or dtype.startswith("bytes"):
val = to_binary_string(value, 'utf8')
elif dtype.startswith("unicode") or dtype.startswith("str"):
val = to_text_string(value)
else:
if value.lower().startswith('e') or value.lower().endswith('e'):
return False
try:
val = complex(value)
if not val.imag:
val = val.real
except ValueError as e:
QMessageBox.critical(self.dialog, "Error",
"Value error: %s" % str(e))
return False
try:
self.test_array[0] = val # will raise an Exception eventually
except OverflowError as e:
print("OverflowError: " + str(e)) # spyder: test-skip
QMessageBox.critical(self.dialog, "Error",
"Overflow error: %s" % str(e))
return False
# Add change to self.changes
self.changes[(i, j)] = val
self.dataChanged.emit(index, index)
if not is_string(val):
if val > self.vmax:
self.vmax = val
if val < self.vmin:
self.vmin = val
return True
def flags(self, index):
"""Set editable flag"""
if not index.isValid():
return Qt.ItemIsEnabled
return Qt.ItemFlags(QAbstractTableModel.flags(self, index)|
Qt.ItemIsEditable)
def headerData(self, section, orientation, role=Qt.DisplayRole):
"""Set header data"""
if role != Qt.DisplayRole:
return to_qvariant()
labels = self.xlabels if orientation == Qt.Horizontal else self.ylabels
if labels is None:
return to_qvariant(int(section))
else:
return to_qvariant(labels[section])
def reset(self):
self.beginResetModel()
self.endResetModel()
class ArrayDelegate(QItemDelegate):
"""Array Editor Item Delegate"""
def __init__(self, dtype, parent=None):
QItemDelegate.__init__(self, parent)
self.dtype = dtype
def createEditor(self, parent, option, index):
"""Create editor widget"""
model = index.model()
value = model.get_value(index)
if model._data.dtype.name == "bool":
value = not value
model.setData(index, to_qvariant(value))
return
elif value is not np.ma.masked:
editor = QLineEdit(parent)
editor.setFont(get_font(font_size_delta=DEFAULT_SMALL_DELTA))
editor.setAlignment(Qt.AlignCenter)
if is_number(self.dtype):
validator = QDoubleValidator(editor)
validator.setLocale(QLocale('C'))
editor.setValidator(validator)
editor.returnPressed.connect(self.commitAndCloseEditor)
return editor
def commitAndCloseEditor(self):
"""Commit and close editor"""
editor = self.sender()
# Avoid a segfault with PyQt5. Variable value won't be changed
# but at least Spyder won't crash. It seems generated by a bug in sip.
try:
self.commitData.emit(editor)
except AttributeError:
pass
self.closeEditor.emit(editor, QAbstractItemDelegate.NoHint)
def setEditorData(self, editor, index):
"""Set editor widget's data"""
text = from_qvariant(index.model().data(index, Qt.DisplayRole), str)
editor.setText(text)
#TODO: Implement "Paste" (from clipboard) feature
class ArrayView(QTableView):
"""Array view class"""
def __init__(self, parent, model, dtype, shape):
QTableView.__init__(self, parent)
self.setModel(model)
self.setItemDelegate(ArrayDelegate(dtype, self))
total_width = 0
for k in range(shape[1]):
total_width += self.columnWidth(k)
self.viewport().resize(min(total_width, 1024), self.height())
self.shape = shape
self.menu = self.setup_menu()
config_shortcut(self.copy, context='variable_explorer', name='copy',
parent=self)
self.horizontalScrollBar().valueChanged.connect(
lambda val: self.load_more_data(val, columns=True))
self.verticalScrollBar().valueChanged.connect(
lambda val: self.load_more_data(val, rows=True))
def load_more_data(self, value, rows=False, columns=False):
try:
old_selection = self.selectionModel().selection()
old_rows_loaded = old_cols_loaded = None
if rows and value == self.verticalScrollBar().maximum():
old_rows_loaded = self.model().rows_loaded
self.model().fetch_more(rows=rows)
if columns and value == self.horizontalScrollBar().maximum():
old_cols_loaded = self.model().cols_loaded
self.model().fetch_more(columns=columns)
if old_rows_loaded is not None or old_cols_loaded is not None:
# if we've changed anything, update selection
new_selection = QItemSelection()
for part in old_selection:
top = part.top()
bottom = part.bottom()
if (old_rows_loaded is not None and
top == 0 and bottom == (old_rows_loaded-1)):
# complete column selected (so expand it to match
# updated range)
bottom = self.model().rows_loaded-1
left = part.left()
right = part.right()
if (old_cols_loaded is not None
and left == 0 and right == (old_cols_loaded-1)):
# compete row selected (so expand it to match updated
# range)
right = self.model().cols_loaded-1
top_left = self.model().index(top, left)
bottom_right = self.model().index(bottom, right)
part = QItemSelectionRange(top_left, bottom_right)
new_selection.append(part)
self.selectionModel().select
(new_selection, self.selectionModel().ClearAndSelect)
except NameError:
# Needed to handle a NameError while fetching data when closing
# See isue 7880
pass
def resize_to_contents(self):
"""Resize cells to contents"""
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
self.resizeColumnsToContents()
self.model().fetch_more(columns=True)
self.resizeColumnsToContents()
QApplication.restoreOverrideCursor()
def setup_menu(self):
"""Setup context menu"""
self.copy_action = create_action(self, _('Copy'),
shortcut=keybinding('Copy'),
icon=ima.icon('editcopy'),
triggered=self.copy,
context=Qt.WidgetShortcut)
menu = QMenu(self)
add_actions(menu, [self.copy_action, ])
return menu
def contextMenuEvent(self, event):
"""Reimplement Qt method"""
self.menu.popup(event.globalPos())
event.accept()
def keyPressEvent(self, event):
"""Reimplement Qt method"""
if event == QKeySequence.Copy:
self.copy()
else:
QTableView.keyPressEvent(self, event)
def _sel_to_text(self, cell_range):
"""Copy an array portion to a unicode string"""
if not cell_range:
return
row_min, row_max, col_min, col_max = get_idx_rect(cell_range)
if col_min == 0 and col_max == (self.model().cols_loaded-1):
# we've selected a whole column. It isn't possible to
# select only the first part of a column without loading more,
# so we can treat it as intentional and copy the whole thing
col_max = self.model().total_cols-1
if row_min == 0 and row_max == (self.model().rows_loaded-1):
row_max = self.model().total_rows-1
_data = self.model().get_data()
if PY3:
output = io.BytesIO()
else:
output = io.StringIO()
try:
np.savetxt(output, _data[row_min:row_max+1, col_min:col_max+1],
delimiter='\t', fmt=self.model().get_format())
except:
QMessageBox.warning(self, _("Warning"),
_("It was not possible to copy values for "
"this array"))
return
contents = output.getvalue().decode('utf-8')
output.close()
return contents
@Slot()
def copy(self):
"""Copy text to clipboard"""
cliptxt = self._sel_to_text( self.selectedIndexes() )
clipboard = QApplication.clipboard()
clipboard.setText(cliptxt)
class ArrayEditorWidget(QWidget):
def __init__(self, parent, data, readonly=False,
xlabels=None, ylabels=None):
QWidget.__init__(self, parent)
self.data = data
self.old_data_shape = None
if len(self.data.shape) == 1:
self.old_data_shape = self.data.shape
self.data.shape = (self.data.shape[0], 1)
elif len(self.data.shape) == 0:
self.old_data_shape = self.data.shape
self.data.shape = (1, 1)
format = SUPPORTED_FORMATS.get(data.dtype.name, '%s')
self.model = ArrayModel(self.data, format=format, xlabels=xlabels,
ylabels=ylabels, readonly=readonly, parent=self)
self.view = ArrayView(self, self.model, data.dtype, data.shape)
btn_layout = QHBoxLayout()
btn_layout.setAlignment(Qt.AlignLeft)
btn = QPushButton(_( "Format"))
# disable format button for int type
btn.setEnabled(is_float(data.dtype))
btn_layout.addWidget(btn)
btn.clicked.connect(self.change_format)
btn = QPushButton(_( "Resize"))
btn_layout.addWidget(btn)
btn.clicked.connect(self.view.resize_to_contents)
bgcolor = QCheckBox(_( 'Background color'))
bgcolor.setChecked(self.model.bgcolor_enabled)
bgcolor.setEnabled(self.model.bgcolor_enabled)
bgcolor.stateChanged.connect(self.model.bgcolor)
btn_layout.addWidget(bgcolor)
layout = QVBoxLayout()
layout.addWidget(self.view)
layout.addLayout(btn_layout)
self.setLayout(layout)
def accept_changes(self):
"""Accept changes"""
for (i, j), value in list(self.model.changes.items()):
self.data[i, j] = value
if self.old_data_shape is not None:
self.data.shape = self.old_data_shape
def reject_changes(self):
"""Reject changes"""
if self.old_data_shape is not None:
self.data.shape = self.old_data_shape
def change_format(self):
"""Change display format"""
format, valid = QInputDialog.getText(self, _( 'Format'),
_( "Float formatting"),
QLineEdit.Normal, self.model.get_format())
if valid:
format = str(format)
try:
format % 1.1
except:
QMessageBox.critical(self, _("Error"),
_("Format (%s) is incorrect") % format)
return
self.model.set_format(format)
class ArrayEditor(QDialog):
"""Array Editor Dialog"""
def __init__(self, parent=None):
QDialog.__init__(self, parent)
# Destroying the C++ object right after closing the dialog box,
# otherwise it may be garbage-collected in another QThread
# (e.g. the editor's analysis thread in Spyder), thus leading to
# a segmentation fault on UNIX or an application crash on Windows
self.setAttribute(Qt.WA_DeleteOnClose)
self.data = None
self.arraywidget = None
self.stack = None
self.layout = None
self.btn_save_and_close = None
self.btn_close = None
# Values for 3d array editor
self.dim_indexes = [{}, {}, {}]
self.last_dim = 0 # Adjust this for changing the startup dimension
def setup_and_check(self, data, title='', readonly=False,
xlabels=None, ylabels=None):
"""
Setup ArrayEditor:
return False if data is not supported, True otherwise
"""
self.data = data
readonly = readonly or not self.data.flags.writeable
is_record_array = data.dtype.names is not None
is_masked_array = isinstance(data, np.ma.MaskedArray)
if data.ndim > 3:
self.error(_("Arrays with more than 3 dimensions are not "
"supported"))
return False
if xlabels is not None and len(xlabels) != self.data.shape[1]:
self.error(_("The 'xlabels' argument length do no match array "
"column number"))
return False
if ylabels is not None and len(ylabels) != self.data.shape[0]:
self.error(_("The 'ylabels' argument length do no match array row "
"number"))
return False
if not is_record_array:
dtn = data.dtype.name
if dtn not in SUPPORTED_FORMATS and not dtn.startswith('str') \
and not dtn.startswith('unicode'):
arr = _("%s arrays") % data.dtype.name
self.error(_("%s are currently not supported") % arr)
return False
self.layout = QGridLayout()
self.setLayout(self.layout)
self.setWindowIcon(ima.icon('arredit'))
if title:
title = to_text_string(title) + " - " + _("NumPy array")
else:
title = _("Array editor")
if readonly:
title += ' (' + _('read only') + ')'
self.setWindowTitle(title)
self.resize(600, 500)
# Stack widget
self.stack = QStackedWidget(self)
if is_record_array:
for name in data.dtype.names:
self.stack.addWidget(ArrayEditorWidget(self, data[name],
readonly, xlabels, ylabels))
elif is_masked_array:
self.stack.addWidget(ArrayEditorWidget(self, data, readonly,
xlabels, ylabels))
self.stack.addWidget(ArrayEditorWidget(self, data.data, readonly,
xlabels, ylabels))
self.stack.addWidget(ArrayEditorWidget(self, data.mask, readonly,
xlabels, ylabels))
elif data.ndim == 3:
pass
else:
self.stack.addWidget(ArrayEditorWidget(self, data, readonly,
xlabels, ylabels))
self.arraywidget = self.stack.currentWidget()
if self.arraywidget:
self.arraywidget.model.dataChanged.connect(
self.save_and_close_enable)
self.stack.currentChanged.connect(self.current_widget_changed)
self.layout.addWidget(self.stack, 1, 0)
# Buttons configuration
btn_layout = QHBoxLayout()
if is_record_array or is_masked_array or data.ndim == 3:
if is_record_array:
btn_layout.addWidget(QLabel(_("Record array fields:")))
names = []
for name in data.dtype.names:
field = data.dtype.fields[name]
text = name
if len(field) >= 3:
title = field[2]
if not is_text_string(title):
title = repr(title)
text += ' - '+title
names.append(text)
else:
names = [_('Masked data'), _('Data'), _('Mask')]
if data.ndim == 3:
# QSpinBox
self.index_spin = QSpinBox(self, keyboardTracking=False)
self.index_spin.valueChanged.connect(self.change_active_widget)
# QComboBox
names = [str(i) for i in range(3)]
ra_combo = QComboBox(self)
ra_combo.addItems(names)
ra_combo.currentIndexChanged.connect(self.current_dim_changed)
# Adding the widgets to layout
label = QLabel(_("Axis:"))
btn_layout.addWidget(label)
btn_layout.addWidget(ra_combo)
self.shape_label = QLabel()
btn_layout.addWidget(self.shape_label)
label = QLabel(_("Index:"))
btn_layout.addWidget(label)
btn_layout.addWidget(self.index_spin)
self.slicing_label = QLabel()
btn_layout.addWidget(self.slicing_label)
# set the widget to display when launched
self.current_dim_changed(self.last_dim)
else:
ra_combo = QComboBox(self)
ra_combo.currentIndexChanged.connect(self.stack.setCurrentIndex)
ra_combo.addItems(names)
btn_layout.addWidget(ra_combo)
if is_masked_array:
label = QLabel(_("<u>Warning</u>: changes are applied separately"))
label.setToolTip(_("For performance reasons, changes applied "\
"to masked array won't be reflected in "\
"array's data (and vice-versa)."))
btn_layout.addWidget(label)
btn_layout.addStretch()
if not readonly:
self.btn_save_and_close = QPushButton(_('Save and Close'))
self.btn_save_and_close.setDisabled(True)
self.btn_save_and_close.clicked.connect(self.accept)
btn_layout.addWidget(self.btn_save_and_close)
self.btn_close = QPushButton(_('Close'))
self.btn_close.setAutoDefault(True)
self.btn_close.setDefault(True)
self.btn_close.clicked.connect(self.reject)
btn_layout.addWidget(self.btn_close)
self.layout.addLayout(btn_layout, 2, 0)
self.setMinimumSize(400, 300)
# Make the dialog act as a window
self.setWindowFlags(Qt.Window)
return True
@Slot(QModelIndex, QModelIndex)
def save_and_close_enable(self, left_top, bottom_right):
"""Handle the data change event to enable the save and close button."""
if self.btn_save_and_close:
self.btn_save_and_close.setEnabled(True)
self.btn_save_and_close.setAutoDefault(True)
self.btn_save_and_close.setDefault(True)
def current_widget_changed(self, index):
self.arraywidget = self.stack.widget(index)
self.arraywidget.model.dataChanged.connect(self.save_and_close_enable)
def change_active_widget(self, index):
"""
This is implemented for handling negative values in index for
3d arrays, to give the same behavior as slicing
"""
string_index = [':']*3
string_index[self.last_dim] = '<font color=red>%i</font>'
self.slicing_label.setText((r"Slicing: [" + ", ".join(string_index) +
"]") % index)
if index < 0:
data_index = self.data.shape[self.last_dim] + index
else:
data_index = index
slice_index = [slice(None)]*3
slice_index[self.last_dim] = data_index
stack_index = self.dim_indexes[self.last_dim].get(data_index)
if stack_index is None:
stack_index = self.stack.count()
try:
self.stack.addWidget(ArrayEditorWidget(
self, self.data[tuple(slice_index)]))
except IndexError: # Handle arrays of size 0 in one axis
self.stack.addWidget(ArrayEditorWidget(self, self.data))
self.dim_indexes[self.last_dim][data_index] = stack_index
self.stack.update()
self.stack.setCurrentIndex(stack_index)
def current_dim_changed(self, index):
"""
This change the active axis the array editor is plotting over
in 3D
"""
self.last_dim = index
string_size = ['%i']*3
string_size[index] = '<font color=red>%i</font>'
self.shape_label.setText(('Shape: (' + ', '.join(string_size) +
') ') % self.data.shape)
if self.index_spin.value() != 0:
self.index_spin.setValue(0)
else:
# this is done since if the value is currently 0 it does not emit
# currentIndexChanged(int)
self.change_active_widget(0)
self.index_spin.setRange(-self.data.shape[index],
self.data.shape[index]-1)
@Slot()
def accept(self):
"""Reimplement Qt method"""
for index in range(self.stack.count()):
self.stack.widget(index).accept_changes()
QDialog.accept(self)
def get_value(self):
"""Return modified array -- this is *not* a copy"""
# It is import to avoid accessing Qt C++ object as it has probably
# already been destroyed, due to the Qt.WA_DeleteOnClose attribute
return self.data
def error(self, message):
"""An error occured, closing the dialog box"""
QMessageBox.critical(self, _("Array editor"), message)
self.setAttribute(Qt.WA_DeleteOnClose)
self.reject()
@Slot()
def reject(self):
"""Reimplement Qt method"""
if self.arraywidget is not None:
for index in range(self.stack.count()):
self.stack.widget(index).reject_changes()
QDialog.reject(self)
| [((20131, 20137), 'qtpy.QtCore.Slot', 'Slot', ([], {}), '()\n', (20135, 20137), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((30224, 30254), 'qtpy.QtCore.Slot', 'Slot', (['QModelIndex', 'QModelIndex'], {}), '(QModelIndex, QModelIndex)\n', (30228, 30254), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((32813, 32819), 'qtpy.QtCore.Slot', 'Slot', ([], {}), '()\n', (32817, 32819), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((33513, 33519), 'qtpy.QtCore.Slot', 'Slot', ([], {}), '()\n', (33517, 33519), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((4530, 4564), 'qtpy.QtCore.QAbstractTableModel.__init__', 'QAbstractTableModel.__init__', (['self'], {}), '(self)\n', (4558, 4564), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((4749, 4780), 'numpy.array', 'np.array', (['[0]'], {'dtype': 'data.dtype'}), '([0], dtype=data.dtype)\n', (4757, 4780), True, 'import numpy as np\n'), ((6988, 7001), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (6999, 7001), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((7209, 7222), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (7220, 7222), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((9196, 9219), 'spyder.py3compat.is_binary_string', 'is_binary_string', (['value'], {}), '(value)\n', (9212, 9219), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((10438, 10451), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (10449, 10451), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((10691, 10716), 'qtpy.compat.from_qvariant', 'from_qvariant', (['value', 'str'], {}), '(value, str)\n', (10704, 10716), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((13108, 13144), 'qtpy.QtWidgets.QItemDelegate.__init__', 'QItemDelegate.__init__', (['self', 'parent'], {}), '(self, parent)\n', (13130, 13144), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((14786, 14819), 'qtpy.QtWidgets.QTableView.__init__', 'QTableView.__init__', (['self', 'parent'], {}), '(self, parent)\n', (14805, 14819), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((15165, 15251), 'spyder.config.gui.config_shortcut', 'config_shortcut', (['self.copy'], {'context': '"""variable_explorer"""', 'name': '"""copy"""', 'parent': 'self'}), "(self.copy, context='variable_explorer', name='copy', parent\n =self)\n", (15180, 15251), False, 'from spyder.config.gui import get_font, config_shortcut\n'), ((17938, 17974), 'qtpy.QtWidgets.QApplication.restoreOverrideCursor', 'QApplication.restoreOverrideCursor', ([], {}), '()\n', (17972, 17974), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((18385, 18396), 'qtpy.QtWidgets.QMenu', 'QMenu', (['self'], {}), '(self)\n', (18390, 18396), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((18406, 18443), 'spyder.utils.qthelpers.add_actions', 'add_actions', (['menu', '[self.copy_action]'], {}), '(menu, [self.copy_action])\n', (18417, 18443), False, 'from spyder.utils.qthelpers import add_actions, create_action, keybinding\n'), ((20281, 20305), 'qtpy.QtWidgets.QApplication.clipboard', 'QApplication.clipboard', ([], {}), '()\n', (20303, 20305), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((20493, 20523), 'qtpy.QtWidgets.QWidget.__init__', 'QWidget.__init__', (['self', 'parent'], {}), '(self, parent)\n', (20509, 20523), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((21181, 21194), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (21192, 21194), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((21876, 21889), 'qtpy.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (21887, 21889), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((23120, 23150), 'qtpy.QtWidgets.QDialog.__init__', 'QDialog.__init__', (['self', 'parent'], {}), '(self, parent)\n', (23136, 23150), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((25257, 25270), 'qtpy.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (25268, 25270), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((25687, 25707), 'qtpy.QtWidgets.QStackedWidget', 'QStackedWidget', (['self'], {}), '(self)\n', (25701, 25707), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((27020, 27033), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (27031, 27033), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((32993, 33013), 'qtpy.QtWidgets.QDialog.accept', 'QDialog.accept', (['self'], {}), '(self)\n', (33007, 33013), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((33743, 33763), 'qtpy.QtWidgets.QDialog.reject', 'QDialog.reject', (['self'], {}), '(self)\n', (33757, 33763), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((9131, 9144), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (9142, 9144), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((12077, 12091), 'spyder.py3compat.is_string', 'is_string', (['val'], {}), '(val)\n', (12086, 12091), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((12656, 12669), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (12667, 12669), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((12860, 12888), 'qtpy.compat.to_qvariant', 'to_qvariant', (['labels[section]'], {}), '(labels[section])\n', (12871, 12888), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((17778, 17800), 'qtpy.QtGui.QCursor', 'QCursor', (['Qt.WaitCursor'], {}), '(Qt.WaitCursor)\n', (17785, 17800), False, 'from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence\n'), ((18086, 18095), 'spyder.config.base._', '_', (['"""Copy"""'], {}), "('Copy')\n", (18087, 18095), False, 'from spyder.config.base import _\n'), ((18783, 18820), 'qtpy.QtWidgets.QTableView.keyPressEvent', 'QTableView.keyPressEvent', (['self', 'event'], {}), '(self, event)\n', (18807, 18820), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((19577, 19589), 'spyder.py3compat.io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (19587, 19589), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((19627, 19640), 'spyder.py3compat.io.StringIO', 'io.StringIO', ([], {}), '()\n', (19638, 19640), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((21269, 21280), 'spyder.config.base._', '_', (['"""Format"""'], {}), "('Format')\n", (21270, 21280), False, 'from spyder.config.base import _\n'), ((21486, 21497), 'spyder.config.base._', '_', (['"""Resize"""'], {}), "('Resize')\n", (21487, 21497), False, 'from spyder.config.base import _\n'), ((21623, 21644), 'spyder.config.base._', '_', (['"""Background color"""'], {}), "('Background color')\n", (21624, 21644), False, 'from spyder.config.base import _\n'), ((22537, 22548), 'spyder.config.base._', '_', (['"""Format"""'], {}), "('Format')\n", (22538, 22548), False, 'from spyder.config.base import _\n'), ((22585, 22606), 'spyder.config.base._', '_', (['"""Float formatting"""'], {}), "('Float formatting')\n", (22586, 22606), False, 'from spyder.config.base import _\n'), ((25336, 25355), 'spyder.utils.icon_manager.icon', 'ima.icon', (['"""arredit"""'], {}), "('arredit')\n", (25344, 25355), True, 'from spyder.utils import icon_manager as ima\n'), ((25482, 25499), 'spyder.config.base._', '_', (['"""Array editor"""'], {}), "('Array editor')\n", (25483, 25499), False, 'from spyder.config.base import _\n'), ((29821, 29831), 'spyder.config.base._', '_', (['"""Close"""'], {}), "('Close')\n", (29822, 29831), False, 'from spyder.config.base import _\n'), ((33406, 33423), 'spyder.config.base._', '_', (['"""Array editor"""'], {}), "('Array editor')\n", (33407, 33423), False, 'from spyder.config.base import _\n'), ((8009, 8022), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (8020, 8022), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((8406, 8419), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (8417, 8419), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((9264, 9293), 'spyder.py3compat.to_text_string', 'to_text_string', (['value', '"""utf8"""'], {}), "(value, 'utf8')\n", (9278, 9293), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((11014, 11045), 'spyder.py3compat.to_binary_string', 'to_binary_string', (['value', '"""utf8"""'], {}), "(value, 'utf8')\n", (11030, 11045), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((12409, 12447), 'qtpy.QtCore.QAbstractTableModel.flags', 'QAbstractTableModel.flags', (['self', 'index'], {}), '(self, index)\n', (12434, 12447), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((13445, 13463), 'qtpy.compat.to_qvariant', 'to_qvariant', (['value'], {}), '(value)\n', (13456, 13463), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((13548, 13565), 'qtpy.QtWidgets.QLineEdit', 'QLineEdit', (['parent'], {}), '(parent)\n', (13557, 13565), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((16301, 16317), 'qtpy.QtCore.QItemSelection', 'QItemSelection', ([], {}), '()\n', (16315, 16317), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((18148, 18166), 'spyder.utils.qthelpers.keybinding', 'keybinding', (['"""Copy"""'], {}), "('Copy')\n", (18158, 18166), False, 'from spyder.utils.qthelpers import add_actions, create_action, keybinding\n'), ((18215, 18235), 'spyder.utils.icon_manager.icon', 'ima.icon', (['"""editcopy"""'], {}), "('editcopy')\n", (18223, 18235), True, 'from spyder.utils import icon_manager as ima\n'), ((24330, 24387), 'spyder.config.base._', '_', (['"""Arrays with more than 3 dimensions are not supported"""'], {}), "('Arrays with more than 3 dimensions are not supported')\n", (24331, 24387), False, 'from spyder.config.base import _\n'), ((24540, 24606), 'spyder.config.base._', '_', (['"""The \'xlabels\' argument length do no match array column number"""'], {}), '("The \'xlabels\' argument length do no match array column number")\n', (24541, 24606), False, 'from spyder.config.base import _\n'), ((24759, 24822), 'spyder.config.base._', '_', (['"""The \'ylabels\' argument length do no match array row number"""'], {}), '("The \'ylabels\' argument length do no match array row number")\n', (24760, 24822), False, 'from spyder.config.base import _\n'), ((25429, 25445), 'spyder.config.base._', '_', (['"""NumPy array"""'], {}), "('NumPy array')\n", (25430, 25445), False, 'from spyder.config.base import _\n'), ((27819, 27857), 'qtpy.QtWidgets.QSpinBox', 'QSpinBox', (['self'], {'keyboardTracking': '(False)'}), '(self, keyboardTracking=False)\n', (27827, 27857), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28048, 28063), 'qtpy.QtWidgets.QComboBox', 'QComboBox', (['self'], {}), '(self)\n', (28057, 28063), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28407, 28415), 'qtpy.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (28413, 28415), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28655, 28663), 'qtpy.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (28661, 28663), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28885, 28900), 'qtpy.QtWidgets.QComboBox', 'QComboBox', (['self'], {}), '(self)\n', (28894, 28900), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((29580, 29599), 'spyder.config.base._', '_', (['"""Save and Close"""'], {}), "('Save and Close')\n", (29581, 29599), False, 'from spyder.config.base import _\n'), ((9508, 9541), 'qtpy.compat.to_qvariant', 'to_qvariant', (['(self._format % value)'], {}), '(self._format % value)\n', (9519, 9541), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((11135, 11156), 'spyder.py3compat.to_text_string', 'to_text_string', (['value'], {}), '(value)\n', (11149, 11156), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((13594, 13639), 'spyder.config.gui.get_font', 'get_font', ([], {'font_size_delta': 'DEFAULT_SMALL_DELTA'}), '(font_size_delta=DEFAULT_SMALL_DELTA)\n', (13602, 13639), False, 'from spyder.config.gui import get_font, config_shortcut\n'), ((13758, 13782), 'qtpy.QtGui.QDoubleValidator', 'QDoubleValidator', (['editor'], {}), '(editor)\n', (13774, 13782), False, 'from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence\n'), ((17299, 17342), 'qtpy.QtCore.QItemSelectionRange', 'QItemSelectionRange', (['top_left', 'bottom_right'], {}), '(top_left, bottom_right)\n', (17318, 17342), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((19859, 19871), 'spyder.config.base._', '_', (['"""Warning"""'], {}), "('Warning')\n", (19860, 19871), False, 'from spyder.config.base import _\n'), ((19906, 19960), 'spyder.config.base._', '_', (['"""It was not possible to copy values for this array"""'], {}), "('It was not possible to copy values for this array')\n", (19907, 19960), False, 'from spyder.config.base import _\n'), ((25098, 25112), 'spyder.config.base._', '_', (['"""%s arrays"""'], {}), "('%s arrays')\n", (25099, 25112), False, 'from spyder.config.base import _\n'), ((25397, 25418), 'spyder.py3compat.to_text_string', 'to_text_string', (['title'], {}), '(title)\n', (25411, 25418), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((25551, 25565), 'spyder.config.base._', '_', (['"""read only"""'], {}), "('read only')\n", (25552, 25565), False, 'from spyder.config.base import _\n'), ((27684, 27700), 'spyder.config.base._', '_', (['"""Masked data"""'], {}), "('Masked data')\n", (27685, 27700), False, 'from spyder.config.base import _\n'), ((27702, 27711), 'spyder.config.base._', '_', (['"""Data"""'], {}), "('Data')\n", (27703, 27711), False, 'from spyder.config.base import _\n'), ((27713, 27722), 'spyder.config.base._', '_', (['"""Mask"""'], {}), "('Mask')\n", (27714, 27722), False, 'from spyder.config.base import _\n'), ((28266, 28276), 'spyder.config.base._', '_', (['"""Axis:"""'], {}), "('Axis:')\n", (28267, 28276), False, 'from spyder.config.base import _\n'), ((28504, 28515), 'spyder.config.base._', '_', (['"""Index:"""'], {}), "('Index:')\n", (28505, 28515), False, 'from spyder.config.base import _\n'), ((29138, 29189), 'spyder.config.base._', '_', (['"""<u>Warning</u>: changes are applied separately"""'], {}), "('<u>Warning</u>: changes are applied separately')\n", (29139, 29189), False, 'from spyder.config.base import _\n'), ((29225, 29344), 'spyder.config.base._', '_', (['"""For performance reasons, changes applied to masked array won\'t be reflected in array\'s data (and vice-versa)."""'], {}), '("For performance reasons, changes applied to masked array won\'t be reflected in array\'s data (and vice-versa)."\n )\n', (29226, 29344), False, 'from spyder.config.base import _\n'), ((10145, 10195), 'qtpy.QtGui.QColor.fromHsvF', 'QColor.fromHsvF', (['hue', 'self.sat', 'self.val', 'self.alp'], {}), '(hue, self.sat, self.val, self.alp)\n', (10160, 10195), False, 'from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence\n'), ((10220, 10238), 'qtpy.compat.to_qvariant', 'to_qvariant', (['color'], {}), '(color)\n', (10231, 10238), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((13820, 13832), 'qtpy.QtCore.QLocale', 'QLocale', (['"""C"""'], {}), "('C')\n", (13827, 13832), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((22852, 22862), 'spyder.config.base._', '_', (['"""Error"""'], {}), "('Error')\n", (22853, 22862), False, 'from spyder.config.base import _\n'), ((25159, 25194), 'spyder.config.base._', '_', (['"""%s are currently not supported"""'], {}), "('%s are currently not supported')\n", (25160, 25194), False, 'from spyder.config.base import _\n'), ((27178, 27203), 'spyder.config.base._', '_', (['"""Record array fields:"""'], {}), "('Record array fields:')\n", (27179, 27203), False, 'from spyder.config.base import _\n'), ((10107, 10118), 'numpy.abs', 'np.abs', (['hue'], {}), '(hue)\n', (10113, 10118), True, 'import numpy as np\n'), ((10294, 10307), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (10305, 10307), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((10375, 10420), 'spyder.config.gui.get_font', 'get_font', ([], {'font_size_delta': 'DEFAULT_SMALL_DELTA'}), '(font_size_delta=DEFAULT_SMALL_DELTA)\n', (10383, 10420), False, 'from spyder.config.gui import get_font, config_shortcut\n'), ((22902, 22931), 'spyder.config.base._', '_', (['"""Format (%s) is incorrect"""'], {}), "('Format (%s) is incorrect')\n", (22903, 22931), False, 'from spyder.config.base import _\n'), ((27482, 27503), 'spyder.py3compat.is_text_string', 'is_text_string', (['title'], {}), '(title)\n', (27496, 27503), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n')] |
dddpaul/beats | libbeat/tests/system/idxmgmt.py | 0d4a830fea46210ee264c52a977834d39493c750 | import datetime
import unittest
import pytest
from elasticsearch import NotFoundError
class IdxMgmt(unittest.TestCase):
def __init__(self, client, index):
self._client = client
self._index = index if index != '' and index != '*' else 'mockbeat'
def needs_init(self, s):
return s == '' or s == '*'
def delete(self, indices=[], policies=[]):
indices = list([x for x in indices if x != ''])
if not indices:
indices == [self._index]
for i in indices:
self.delete_index_and_alias(i)
self.delete_template(template=i)
for i in [x for x in policies if x != '']:
self.delete_policy(i)
def delete_index_and_alias(self, index=""):
if self.needs_init(index):
index = self._index
try:
self._client.transport.perform_request('DELETE', "/" + index + "*")
except NotFoundError:
pass
def delete_template(self, template=""):
if self.needs_init(template):
template = self._index
try:
self._client.transport.perform_request('DELETE', "/_template/" + template + "*")
except NotFoundError:
pass
def delete_policy(self, policy):
# Delete any existing policy starting with given policy
policies = self._client.transport.perform_request('GET', "/_ilm/policy")
for p, _ in policies.items():
if not p.startswith(policy):
continue
try:
self._client.transport.perform_request('DELETE', "/_ilm/policy/" + p)
except NotFoundError:
pass
def assert_index_template_not_loaded(self, template):
with pytest.raises(NotFoundError):
self._client.transport.perform_request('GET', '/_template/' + template)
def assert_index_template_loaded(self, template):
resp = self._client.transport.perform_request('GET', '/_template/' + template)
assert template in resp
assert "lifecycle" not in resp[template]["settings"]["index"]
def assert_ilm_template_loaded(self, template, policy, alias):
resp = self._client.transport.perform_request('GET', '/_template/' + template)
assert resp[template]["settings"]["index"]["lifecycle"]["name"] == policy
assert resp[template]["settings"]["index"]["lifecycle"]["rollover_alias"] == alias
def assert_index_template_index_pattern(self, template, index_pattern):
resp = self._client.transport.perform_request('GET', '/_template/' + template)
assert template in resp
assert resp[template]["index_patterns"] == index_pattern
def assert_alias_not_created(self, alias):
resp = self._client.transport.perform_request('GET', '/_alias')
for name, entry in resp.items():
if alias not in name:
continue
assert entry["aliases"] == {}, entry["aliases"]
def assert_alias_created(self, alias, pattern=None):
if pattern is None:
pattern = self.default_pattern()
name = alias + "-" + pattern
resp = self._client.transport.perform_request('GET', '/_alias/' + alias)
assert name in resp
assert resp[name]["aliases"][alias]["is_write_index"] == True
def assert_policy_not_created(self, policy):
with pytest.raises(NotFoundError):
self._client.transport.perform_request('GET', '/_ilm/policy/' + policy)
def assert_policy_created(self, policy):
resp = self._client.transport.perform_request('GET', '/_ilm/policy/' + policy)
assert policy in resp
assert resp[policy]["policy"]["phases"]["hot"]["actions"]["rollover"]["max_size"] == "50gb"
assert resp[policy]["policy"]["phases"]["hot"]["actions"]["rollover"]["max_age"] == "30d"
def assert_docs_written_to_alias(self, alias, pattern=None):
# Refresh the indices to guarantee all documents are available
# through the _search API.
self._client.transport.perform_request('POST', '/_refresh')
if pattern is None:
pattern = self.default_pattern()
name = alias + "-" + pattern
data = self._client.transport.perform_request('GET', '/' + name + '/_search')
self.assertGreater(data["hits"]["total"]["value"], 0)
def default_pattern(self):
d = datetime.datetime.now().strftime("%Y.%m.%d")
return d + "-000001"
def index_for(self, alias, pattern=None):
if pattern is None:
pattern = self.default_pattern()
return "{}-{}".format(alias, pattern)
| [((1743, 1771), 'pytest.raises', 'pytest.raises', (['NotFoundError'], {}), '(NotFoundError)\n', (1756, 1771), False, 'import pytest\n'), ((3380, 3408), 'pytest.raises', 'pytest.raises', (['NotFoundError'], {}), '(NotFoundError)\n', (3393, 3408), False, 'import pytest\n'), ((4398, 4421), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4419, 4421), False, 'import datetime\n')] |
atsgen/tf-test | scripts/policy/sdn_single_vm_multiple_policy_topology.py | 2748fcd81491450c75dadc71849d2a1c11061029 | '''*******AUTO-GENERATED TOPOLOGY*********'''
from __future__ import print_function
from builtins import range
from builtins import object
from tcutils.util import get_random_name,get_random_cidr
class sdn_single_vm_multiple_policy_config(object):
def __init__(self, domain='default-domain', project='admin', username=None, password=None):
#
# Domain and project defaults: Do not change until support for
# non-default is tested!
self.domain = domain
self.project = project
self.username = username
self.password = password
#
# Define VN's in the project:
self.vnet_list = [get_random_name('vnet0')]
#
# Define network info for each VN:
if self.project == 'vCenter':
# For vcenter, only one subnet per VN is supported
self.vn_nets = {self.vnet_list[0]: [get_random_cidr(af='v4')]}
else:
self.vn_nets = {self.vnet_list[0]: ['10.1.1.0/24', '11.1.1.0/24']}
#
# Define network policies
self.policy_list = list()
for i in range(10):
self.policy_list.append(get_random_name('policy%d'%i))
self.vn_policy = {self.vnet_list[0]: self.policy_list}
#
# Define VM's
# VM distribution on available compute nodes is handled by nova
# scheduler or contrail vm naming scheme
self.vn_of_vm = {get_random_name('vmc0'): self.vnet_list[0]}
#
# Define network policy rules
self.rules = {}
self.rules[self.policy_list[0]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[1]] = [{'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[2]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[3]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[4]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[5]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[6]] = [{'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[7]] = [{'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[8]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[9]] = [{'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
# end __init__
if __name__ == '__main__':
print("Currently topology limited to one domain/project..")
print("Based on need, can be extended to cover config for multiple domain/projects")
print()
my_topo = sdn_single_vm_multiple_policy_config(
domain='default-domain', project='admin')
x = my_topo.__dict__
# print "keys only:"
# for key, value in x.iteritems(): print key
# print
# print "keys & values:"
# for key, value in x.iteritems(): print key, "-->", value
import topo_helper
topo_h = topo_helper.topology_helper(my_topo)
#vmc_list= topo_h.get_vmc_list()
policy_vn = topo_h.get_policy_vn()
#
| [((9620, 9656), 'topo_helper.topology_helper', 'topo_helper.topology_helper', (['my_topo'], {}), '(my_topo)\n', (9647, 9656), False, 'import topo_helper\n'), ((1104, 1113), 'builtins.range', 'range', (['(10)'], {}), '(10)\n', (1109, 1113), False, 'from builtins import range\n'), ((661, 685), 'tcutils.util.get_random_name', 'get_random_name', (['"""vnet0"""'], {}), "('vnet0')\n", (676, 685), False, 'from tcutils.util import get_random_name, get_random_cidr\n'), ((1423, 1446), 'tcutils.util.get_random_name', 'get_random_name', (['"""vmc0"""'], {}), "('vmc0')\n", (1438, 1446), False, 'from tcutils.util import get_random_name, get_random_cidr\n'), ((1151, 1182), 'tcutils.util.get_random_name', 'get_random_name', (["('policy%d' % i)"], {}), "('policy%d' % i)\n", (1166, 1182), False, 'from tcutils.util import get_random_name, get_random_cidr\n'), ((889, 913), 'tcutils.util.get_random_cidr', 'get_random_cidr', ([], {'af': '"""v4"""'}), "(af='v4')\n", (904, 913), False, 'from tcutils.util import get_random_name, get_random_cidr\n')] |
burnpanck/chaco | chaco/polygon_plot.py | 6457cdd28625991ba69fbbee105051cab237aa51 | """ Defines the PolygonPlot class.
"""
from __future__ import with_statement
# Major library imports
import numpy as np
# Enthought library imports.
from enable.api import LineStyle, black_color_trait, \
transparent_color_trait
from kiva.agg import points_in_polygon
from traits.api import Enum, Float, Tuple, Property, cached_property, \
on_trait_change
# Local imports.
from base_xy_plot import BaseXYPlot
class PolygonPlot(BaseXYPlot):
""" Plots a polygon in dataspace.
Assuming that the index and value mappers are linear mappers, and that
"index" corresponds to X-coordinates and "value" corresponds to
Y-coordinates, the points are arranged in a counter-clockwise fashion.
The polygon is closed automatically, so there is no need to reproduce
the first point as the last point.
Nonlinear mappers are possible, but the results may be unexpected. Only the
data-space points are mapped in a nonlinear fashion. Straight lines
connecting them in a linear screen-space become curved in a nonlinear
screen-space; however, the drawing still contains straight lines in
screen-space.
If you don't want the edge of the polygon to be drawn, set **edge_color**
to transparent; don't try to do this by setting **edge_width** to 0. In
some drawing systems, such as PostScript, a line width of 0 means to make
the line as small as possible while still putting ink on the page.
"""
# The color of the line on the edge of the polygon.
edge_color = black_color_trait
# The thickness of the edge of the polygon.
edge_width = Float(1.0)
# The line dash style for the edge of the polygon.
edge_style = LineStyle
# The color of the face of the polygon.
face_color = transparent_color_trait
# Override the hittest_type trait inherited from BaseXYPlot
hittest_type = Enum("poly", "point", "line")
# The RGBA tuple for rendering edges. It is always a tuple of length 4.
# It has the same RGB values as edge_color_, and its alpha value is the
# alpha value of self.edge_color multiplied by self.alpha.
effective_edge_color = Property(Tuple, depends_on=['edge_color', 'alpha'])
# The RGBA tuple for rendering the face. It is always a tuple of length 4.
# It has the same RGB values as face_color_, and its alpha value is the
# alpha value of self.face_color multiplied by self.alpha.
effective_face_color = Property(Tuple, depends_on=['face_color', 'alpha'])
#----------------------------------------------------------------------
# Private 'BaseXYPlot' interface
#----------------------------------------------------------------------
def _gather_points(self):
""" Collects the data points that are within the bounds of the plot and
caches them.
"""
if self._cache_valid:
return
index = self.index.get_data()
value = self.value.get_data()
if not self.index or not self.value:
return
if len(index) == 0 or len(value) == 0 or len(index) != len(value):
self._cached_data_pts = []
self._cache_valid = True
return
points = np.transpose(np.array((index,value)))
self._cached_data_pts = points
self._cache_valid = True
def _render(self, gc, points):
""" Renders an Nx2 array of screen-space points as a polygon.
"""
with gc:
gc.clip_to_rect(self.x, self.y, self.width, self.height)
gc.set_stroke_color(self.effective_edge_color)
gc.set_line_width(self.edge_width)
gc.set_line_dash(self.edge_style_)
gc.set_fill_color(self.effective_face_color)
gc.lines(points)
gc.close_path()
gc.draw_path()
def _render_icon(self, gc, x, y, width, height):
""" Renders a representation of this plot as an icon into the box
defined by the parameters.
Used by the legend.
"""
with gc:
gc.set_stroke_color(self.effective_edge_color)
gc.set_line_width(self.edge_width)
gc.set_fill_color(self.effective_face_color)
if hasattr(self, 'line_style_'):
gc.set_line_dash(self.line_style_)
gc.draw_rect((x,y,width,height))
return
def hittest(self, screen_pt, threshold=7.0, return_distance=False):
""" Performs point-in-polygon testing or point/line proximity testing.
If self.hittest_type is "line" or "point", then behaves like the
parent class BaseXYPlot.hittest().
If self.hittest_type is "poly", then returns True if the given
point is inside the polygon, and False otherwise.
"""
if self.hittest_type in ("line", "point"):
return BaseXYPlot.hittest(self, screen_pt, threshold, return_distance)
data_pt = self.map_data(screen_pt, all_values=True)
index = self.index.get_data()
value = self.value.get_data()
poly = np.vstack((index,value)).T
if points_in_polygon([data_pt], poly)[0] == 1:
return True
else:
return False
#------------------------------------------------------------------------
# Event handlers
#------------------------------------------------------------------------
@on_trait_change('edge_color, edge_width, edge_style, face_color, alpha')
def _attributes_changed(self):
self.invalidate_draw()
self.request_redraw()
#------------------------------------------------------------------------
# Property getters
#------------------------------------------------------------------------
@cached_property
def _get_effective_edge_color(self):
if len(self.edge_color_) == 4:
edge_alpha = self.edge_color_[-1]
else:
edge_alpha = 1.0
c = self.edge_color_[:3] + (edge_alpha * self.alpha,)
return c
@cached_property
def _get_effective_face_color(self):
if len(self.face_color_) == 4:
face_alpha = self.face_color_[-1]
else:
face_alpha = 1.0
c = self.face_color_[:3] + (face_alpha * self.alpha,)
return c
| [((1659, 1669), 'traits.api.Float', 'Float', (['(1.0)'], {}), '(1.0)\n', (1664, 1669), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((1923, 1952), 'traits.api.Enum', 'Enum', (['"""poly"""', '"""point"""', '"""line"""'], {}), "('poly', 'point', 'line')\n", (1927, 1952), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((2202, 2253), 'traits.api.Property', 'Property', (['Tuple'], {'depends_on': "['edge_color', 'alpha']"}), "(Tuple, depends_on=['edge_color', 'alpha'])\n", (2210, 2253), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((2508, 2559), 'traits.api.Property', 'Property', (['Tuple'], {'depends_on': "['face_color', 'alpha']"}), "(Tuple, depends_on=['face_color', 'alpha'])\n", (2516, 2559), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((5451, 5523), 'traits.api.on_trait_change', 'on_trait_change', (['"""edge_color, edge_width, edge_style, face_color, alpha"""'], {}), "('edge_color, edge_width, edge_style, face_color, alpha')\n", (5466, 5523), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((3287, 3311), 'numpy.array', 'np.array', (['(index, value)'], {}), '((index, value))\n', (3295, 3311), True, 'import numpy as np\n'), ((4906, 4969), 'base_xy_plot.BaseXYPlot.hittest', 'BaseXYPlot.hittest', (['self', 'screen_pt', 'threshold', 'return_distance'], {}), '(self, screen_pt, threshold, return_distance)\n', (4924, 4969), False, 'from base_xy_plot import BaseXYPlot\n'), ((5122, 5147), 'numpy.vstack', 'np.vstack', (['(index, value)'], {}), '((index, value))\n', (5131, 5147), True, 'import numpy as np\n'), ((5160, 5194), 'kiva.agg.points_in_polygon', 'points_in_polygon', (['[data_pt]', 'poly'], {}), '([data_pt], poly)\n', (5177, 5194), False, 'from kiva.agg import points_in_polygon\n')] |
evgenyss/investing | webapp/template_config.py | b72da8587a4783bfdd389f1781dcd108d1a5e53f | import os
from datetime import timedelta
basedir = os.path.abspath(os.path.dirname(__file__))
API_DATA_URL = "https://invest-public-api.tinkoff.ru/rest/tinkoff.public.invest.api.contract.v1.InstrumentsService/"
API_LASTPRICES_URL = "https://invest-public-api.tinkoff.ru/rest/\
tinkoff.public.invest.api.contract.v1.MarketDataService/GetLastPrices"
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, '..', 'webapp.db')
REMEMBER_COOKIE_DURATION = timedelta(days=1)
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = ""
API_TOKEN = ""
| [((461, 478), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (470, 478), False, 'from datetime import timedelta\n'), ((68, 93), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (83, 93), False, 'import os\n'), ((392, 432), 'os.path.join', 'os.path.join', (['basedir', '""".."""', '"""webapp.db"""'], {}), "(basedir, '..', 'webapp.db')\n", (404, 432), False, 'import os\n')] |
dytk2134/humann2 | humann2/quantify/families.py | 9b8f212bdd910ee7187f06f1550f0c86bce0473b | """
HUMAnN2: quantify_families module
Compute alignments by gene family
Copyright (c) 2014 Harvard School of Public Health
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import os
import logging
import math
from .. import config
from .. import utilities
from .. import store
# name global logging instance
logger=logging.getLogger(__name__)
def gene_families(alignments,gene_scores,unaligned_reads_count):
"""
Compute the gene families from the alignments
"""
logger.debug("Compute gene families")
# Compute scores for each gene family for each bug set
alignments.convert_alignments_to_gene_scores(gene_scores)
# Process the gene id to names mappings
gene_names=store.Names(config.gene_family_name_mapping_file)
delimiter=config.output_file_column_delimiter
category_delimiter=config.output_file_category_delimiter
# Write the scores ordered with the top first
column_name=config.file_basename+"_Abundance-RPKs"
if config.remove_column_description_output:
column_name=config.file_basename
tsv_output=["# Gene Family"+delimiter+column_name]
# Add the unaligned reads count
tsv_output.append(config.unmapped_gene_name+delimiter+utilities.format_float_to_string(unaligned_reads_count))
# Print out the gene families with those with the highest scores first
for gene in gene_scores.gene_list_sorted_by_score("all"):
all_score=gene_scores.get_score("all",gene)
if all_score>0:
gene_name=gene_names.get_name(gene)
# Print the computation of all bugs for gene family
tsv_output.append(gene_name+delimiter+utilities.format_float_to_string(all_score))
# Process and print per bug if selected
if not config.remove_stratified_output:
# Print scores per bug for family ordered with those with the highest values first
scores_by_bug=gene_scores.get_scores_for_gene_by_bug(gene)
for bug in utilities.double_sort(scores_by_bug):
if scores_by_bug[bug]>0:
tsv_output.append(gene_name+category_delimiter+bug+delimiter
+utilities.format_float_to_string(scores_by_bug[bug]))
if config.output_format=="biom":
# Open a temp file if a conversion to biom is selected
tmpfile=utilities.unnamed_temp_file()
file_handle=open(tmpfile,'w')
file_handle.write("\n".join(tsv_output))
file_handle.close()
utilities.tsv_to_biom(tmpfile,config.genefamilies_file,"Gene")
else:
# Write output as tsv format
file_handle = open(config.genefamilies_file, "w")
file_handle.write("\n".join(tsv_output))
file_handle.close()
return config.genefamilies_file
| [((1298, 1325), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1315, 1325), False, 'import logging\n')] |
RazvanBalau/parallel-2020 | Buta Nicolae/threads.py | bd9c0dea6cc70e167320f64632d7a235522dfdb3 | import threading
from multiprocessing import Queue
results = []
results2 = []
def take_numbers(q):
print('Enter the numbers:')
for i in range(0,3):
num1 = int(input('Enter first number: '))
num2 = int(input('Enter second number: '))
q.put(num1)
q.put(num2)
def add_num(q):
for i in range(0,3):
num1 = q.get()
num2 = q.get()
results.append(num1+num2)
results2.append(num1-num2)
q = Queue()
t2 = threading.Thread(target=add_num, args=(q, ))
t1 = threading.Thread(target=take_numbers, args=(q, ))
t2.start()
t1.start()
t2.join()
t1.join()
q.close()
for result in results:
print ("adunare =", result)
for result in results2:
print ("scadere =", result) | [((464, 471), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (469, 471), False, 'from multiprocessing import Queue\n'), ((477, 520), 'threading.Thread', 'threading.Thread', ([], {'target': 'add_num', 'args': '(q,)'}), '(target=add_num, args=(q,))\n', (493, 520), False, 'import threading\n'), ((527, 575), 'threading.Thread', 'threading.Thread', ([], {'target': 'take_numbers', 'args': '(q,)'}), '(target=take_numbers, args=(q,))\n', (543, 575), False, 'import threading\n')] |
dylanlee101/leetcode | code_week11_76_712/unique_paths.py | b059afdadb83d504e62afd1227107de0b59557af | '''
一个机器人位于一个 m x n 网格的左上角 (起始点在下图中标记为“Start” )。
机器人每次只能向下或者向右移动一步。机器人试图达到网格的右下角(在下图中标记为“Finish”)。
问总共有多少条不同的路径?
例如,上图是一个7 x 3 的网格。有多少可能的路径?
示例 1:
输入: m = 3, n = 2
输出: 3
解释:
从左上角开始,总共有 3 条路径可以到达右下角。
1. 向右 -> 向右 -> 向下
2. 向右 -> 向下 -> 向右
3. 向下 -> 向右 -> 向右
示例 2:
输入: m = 7, n = 3
输出: 28
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/unique-paths
'''
class Solution:
def uniquePaths(self, m: int, n: int) -> int:
dp = [1] + [0] * n
for i in range(m):
for j in range(n):
dp[j] = dp[j] + dp[j-1]
return dp[-2] | [] |
JonaBecher/spektral | spektral/datasets/qm9.py | ff59e16d959e0ec698428997363be20462625699 | import os
import os.path as osp
import numpy as np
from joblib import Parallel, delayed
from tensorflow.keras.utils import get_file
from tqdm import tqdm
from spektral.data import Dataset, Graph
from spektral.utils import label_to_one_hot, sparse
from spektral.utils.io import load_csv, load_sdf
ATOM_TYPES = [1, 6, 7, 8, 9]
BOND_TYPES = [1, 2, 3, 4]
class QM9(Dataset):
"""
The QM9 chemical data set of small molecules.
In this dataset, nodes represent atoms and edges represent chemical bonds.
There are 5 possible atom types (H, C, N, O, F) and 4 bond types (single,
double, triple, aromatic).
Node features represent the chemical properties of each atom and include:
- The atomic number, one-hot encoded;
- The atom's position in the X, Y, and Z dimensions;
- The atomic charge;
- The mass difference from the monoisotope;
The edge features represent the type of chemical bond between two atoms,
one-hot encoded.
Each graph has an 19-dimensional label for regression.
**Arguments**
- `amount`: int, load this many molecules instead of the full dataset
(useful for debugging).
- `n_jobs`: number of CPU cores to use for reading the data (-1, to use all
available cores).
"""
url = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/gdb9.tar.gz"
def __init__(self, amount=None, n_jobs=1, **kwargs):
self.amount = amount
self.n_jobs = n_jobs
super().__init__(**kwargs)
def download(self):
get_file(
"qm9.tar.gz",
self.url,
extract=True,
cache_dir=self.path,
cache_subdir=self.path,
)
os.remove(osp.join(self.path, "qm9.tar.gz"))
def read(self):
print("Loading QM9 dataset.")
sdf_file = osp.join(self.path, "gdb9.sdf")
data = load_sdf(sdf_file, amount=self.amount) # Internal SDF format
def read_mol(mol):
x = np.array([atom_to_feature(atom) for atom in mol["atoms"]])
a, e = mol_to_adj(mol)
return x, a, e
data = Parallel(n_jobs=self.n_jobs)(
delayed(read_mol)(mol) for mol in tqdm(data, ncols=80)
)
x_list, a_list, e_list = list(zip(*data))
# Load labels
labels_file = osp.join(self.path, "gdb9.sdf.csv")
labels = load_csv(labels_file)
labels = labels.set_index("mol_id").values
if self.amount is not None:
labels = labels[: self.amount]
return [
Graph(x=x, a=a, e=e, y=y)
for x, a, e, y in zip(x_list, a_list, e_list, labels)
]
def atom_to_feature(atom):
atomic_num = label_to_one_hot(atom["atomic_num"], ATOM_TYPES)
coords = atom["coords"]
charge = atom["charge"]
iso = atom["iso"]
return np.concatenate((atomic_num, coords, [charge, iso]), -1)
def mol_to_adj(mol):
row, col, edge_features = [], [], []
for bond in mol["bonds"]:
start, end = bond["start_atom"], bond["end_atom"]
row += [start, end]
col += [end, start]
edge_features += [bond["type"]] * 2
a, e = sparse.edge_index_to_matrix(
edge_index=np.array((row, col)).T,
edge_weight=np.ones_like(row),
edge_features=label_to_one_hot(edge_features, BOND_TYPES),
)
return a, e
| [((2701, 2749), 'spektral.utils.label_to_one_hot', 'label_to_one_hot', (["atom['atomic_num']", 'ATOM_TYPES'], {}), "(atom['atomic_num'], ATOM_TYPES)\n", (2717, 2749), False, 'from spektral.utils import label_to_one_hot, sparse\n'), ((2840, 2895), 'numpy.concatenate', 'np.concatenate', (['(atomic_num, coords, [charge, iso])', '(-1)'], {}), '((atomic_num, coords, [charge, iso]), -1)\n', (2854, 2895), True, 'import numpy as np\n'), ((1532, 1627), 'tensorflow.keras.utils.get_file', 'get_file', (['"""qm9.tar.gz"""', 'self.url'], {'extract': '(True)', 'cache_dir': 'self.path', 'cache_subdir': 'self.path'}), "('qm9.tar.gz', self.url, extract=True, cache_dir=self.path,\n cache_subdir=self.path)\n", (1540, 1627), False, 'from tensorflow.keras.utils import get_file\n'), ((1826, 1857), 'os.path.join', 'osp.join', (['self.path', '"""gdb9.sdf"""'], {}), "(self.path, 'gdb9.sdf')\n", (1834, 1857), True, 'import os.path as osp\n'), ((1873, 1911), 'spektral.utils.io.load_sdf', 'load_sdf', (['sdf_file'], {'amount': 'self.amount'}), '(sdf_file, amount=self.amount)\n', (1881, 1911), False, 'from spektral.utils.io import load_csv, load_sdf\n'), ((2318, 2353), 'os.path.join', 'osp.join', (['self.path', '"""gdb9.sdf.csv"""'], {}), "(self.path, 'gdb9.sdf.csv')\n", (2326, 2353), True, 'import os.path as osp\n'), ((2371, 2392), 'spektral.utils.io.load_csv', 'load_csv', (['labels_file'], {}), '(labels_file)\n', (2379, 2392), False, 'from spektral.utils.io import load_csv, load_sdf\n'), ((1713, 1746), 'os.path.join', 'osp.join', (['self.path', '"""qm9.tar.gz"""'], {}), "(self.path, 'qm9.tar.gz')\n", (1721, 1746), True, 'import os.path as osp\n'), ((2116, 2144), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'self.n_jobs'}), '(n_jobs=self.n_jobs)\n', (2124, 2144), False, 'from joblib import Parallel, delayed\n'), ((2553, 2578), 'spektral.data.Graph', 'Graph', ([], {'x': 'x', 'a': 'a', 'e': 'e', 'y': 'y'}), '(x=x, a=a, e=e, y=y)\n', (2558, 2578), False, 'from spektral.data import Dataset, Graph\n'), ((3252, 3269), 'numpy.ones_like', 'np.ones_like', (['row'], {}), '(row)\n', (3264, 3269), True, 'import numpy as np\n'), ((3293, 3336), 'spektral.utils.label_to_one_hot', 'label_to_one_hot', (['edge_features', 'BOND_TYPES'], {}), '(edge_features, BOND_TYPES)\n', (3309, 3336), False, 'from spektral.utils import label_to_one_hot, sparse\n'), ((3208, 3228), 'numpy.array', 'np.array', (['(row, col)'], {}), '((row, col))\n', (3216, 3228), True, 'import numpy as np\n'), ((2158, 2175), 'joblib.delayed', 'delayed', (['read_mol'], {}), '(read_mol)\n', (2165, 2175), False, 'from joblib import Parallel, delayed\n'), ((2192, 2212), 'tqdm.tqdm', 'tqdm', (['data'], {'ncols': '(80)'}), '(data, ncols=80)\n', (2196, 2212), False, 'from tqdm import tqdm\n')] |
tscofield/cpx-training | code/Level 1 - Intro to CPX/5-acceleration/main.py | 682a2cef6bb164bc7c374744de94c21581258392 | from adafruit_circuitplayground.express import cpx
# Main loop gets x, y and z axis acceleration, prints the values, and turns on
# red, green and blue, at levels related to the x, y and z values.
while True:
if cpx.switch:
print("Slide switch off!")
cpx.pixels.fill((0, 0, 0))
continue
else:
R = 0
G = 0
B = 0
x, y, z = cpx.acceleration
print((x, y, z))
if x:
R = R + abs(int(x))
if y:
G = G + abs(int(y))
if z:
B = B + abs(int(z))
cpx.pixels.fill((R, G, B))
| [((272, 298), 'adafruit_circuitplayground.express.cpx.pixels.fill', 'cpx.pixels.fill', (['(0, 0, 0)'], {}), '((0, 0, 0))\n', (287, 298), False, 'from adafruit_circuitplayground.express import cpx\n'), ((574, 600), 'adafruit_circuitplayground.express.cpx.pixels.fill', 'cpx.pixels.fill', (['(R, G, B)'], {}), '((R, G, B))\n', (589, 600), False, 'from adafruit_circuitplayground.express import cpx\n')] |
QinganZhao/ML-based-driving-motion-prediction | src/data_preprocess.py | 5a7772cf199d30e4e33bbe943775c2e19aac5d5b | import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import matplotlib.patches as patches
def load_data(file_name, car_flag):
if car_flag == 1:
data = np.loadtxt('./car1/'+str(file_name))
elif car_flag == 2:
data = np.loadtxt('./car2/'+str(file_name))
return data
def get_low_freq_data(data):
"""
Return a data matrix with 0.1s per time step data. (from 0.01s data)
"""
matrix = np.zeros((1, data.shape[1]))
for i in range(data.shape[0]):
if i % 10 == 0:
matrix = np.concatenate((matrix, data[i,:].reshape(1,data.shape[1])),axis=0)
return matrix[1:,:]
def data_process():
"""
This function serves to concatenate the information of two cars into one array.
Note: car1 -- mainlane car;
car2 -- merging car;
OutFormat:
0 case_ID
1 frame_ID
2 car1_long_pos
3 car1_long_vel
4 car1_lateral_pos
5 car1_lateral_displacement
6 car2_long_pos
7 car2_long_vel
8 car2_lateral_pos
9 car2_lateral_displacement
10 relative_long_vel (merge - mainlane)
11 relative_lateral_distance (merge - mainlane)
12 relative_long_distance (merge - mainlane)
13 car1_yaw
14 car2_yaw
15 situation label: (0: car1 yields car2; 1: car2 yields car1)
"""
data_matrix = np.zeros((1,16))
for i in range(128):
file_name_1 = 'data_'+str(i)+'_1.txt'
file_name_2 = 'data_'+str(i)+'_2.txt'
car1 = get_low_freq_data(load_data(file_name_1, 1))
car2 = get_low_freq_data(load_data(file_name_2, 2))
T = int(car1.shape[0])
#print(T)
current_data_matrix = np.zeros((T,16))
for j in range(1, T):
current_data_matrix[j,0] = i
current_data_matrix[j,1] = j
current_data_matrix[j,2] = car1[j,1]
current_data_matrix[j,3] = 10 * (car1[j,1] - car1[j-1,1])
current_data_matrix[j,4] = car1[j,2]
current_data_matrix[j,5] = car1[j,2] - car1[j-1,2]
current_data_matrix[j,6] = car2[j,1]
current_data_matrix[j,7] = 10 * (car2[j,1] - car2[j-1,1])
current_data_matrix[j,8] = car2[j,2]
current_data_matrix[j,9] = car2[j,2] - car2[j-1,2]
current_data_matrix[j,10] = current_data_matrix[j,7] - current_data_matrix[j,3]
current_data_matrix[j,11] = current_data_matrix[j,8] - current_data_matrix[j,4]
current_data_matrix[j,12] = current_data_matrix[j,6] - current_data_matrix[j,2]
current_data_matrix[j,13] = car1[j,3]
current_data_matrix[j,14] = car2[j,3]
if car1[-1,1] > car2[-1,1]:
current_data_matrix[j,15] = 1
else:
current_data_matrix[j,15] = 0
current_data_matrix = current_data_matrix[1:, :]
data_matrix = np.concatenate((data_matrix, current_data_matrix),axis=0)
np.savetxt('./data_matrix.txt', data_matrix[1:,:],'%.4f')
##################################################################
def divide_data(data_matrix, segment_length):
"""
This function serves to separate two situation cases.
"""
situation0_data = data_matrix[np.where(data_matrix[:,-1] == 0)]
situation1_data = data_matrix[np.where(data_matrix[:,-1] == 1)]
np.savetxt('./all_trajs_1.txt', situation0_data, '%.4f')
np.savetxt('./all_trajs_2.txt', situation1_data, '%.4f')
# count seq lengths
# separate sequence segments
# all_trajs_seg_1 = np.zeros((1, data_matrix.shape[1]))
# all_trajs_seg_2 = np.zeros((1, data_matrix.shape[1]))
all_trajs_1 = np.zeros((1, data_matrix.shape[1]))
all_trajs_2 = np.zeros((1, data_matrix.shape[1]))
count0, count1 = [], []
# for i in range(128):
# print('i = '+str(i))
# temp_data = data_matrix[np.where(data_matrix[:,0] == i)]
# if temp_data[0,-1] == 0:
# for j in range(temp_data.shape[0]-segment_length+1):
# temp_seg_data = temp_data[j:j+segment_length, :]
# count0.append(temp_seg_data.shape[0])
# all_trajs_seg_1 = np.concatenate((all_trajs_seg_1, temp_seg_data),axis=0)
# else:
# for j in range(temp_data.shape[0]-segment_length+1):
# temp_seg_data = temp_data[j:j+segment_length, :]
# count1.append(temp_seg_data.shape[0])
# all_trajs_seg_2 = np.concatenate((all_trajs_seg_2, temp_seg_data),axis=0)
for i in range(128):
print('i = '+str(i))
temp_data = data_matrix[np.where(data_matrix[:,0] == i)]
if temp_data[0,-1] == 0:
count0.append(temp_data.shape[0])
all_trajs_1 = np.concatenate((all_trajs_1, temp_data),axis=0)
elif temp_data[0,-1] == 1:
count1.append(temp_data.shape[0])
all_trajs_2 = np.concatenate((all_trajs_2, temp_data),axis=0)
print(all_trajs_1.shape)
print(all_trajs_2.shape)
print(sum(count0))
print(sum(count1))
# np.savetxt('./all_trajs_seg_1.txt', all_trajs_seg_1[1:,:], '%.4f')
# np.savetxt('./all_trajs_seg_2.txt', all_trajs_seg_2[1:,:], '%.4f')
np.savetxt('./all_trajs_seq_length_1.txt', np.array(count0), '%d')
np.savetxt('./all_trajs_seq_length_2.txt', np.array(count1), '%d')
#data_process()
#data_matrix = np.loadtxt('./data_matrix.txt')
#divide_data(data_matrix=data_matrix, segment_length=30)
###############################################
def check_data():
data = np.loadtxt('../simulation_data/data_matrix.txt')
temp_data = data[np.where(data[:,0]==69)]
T = temp_data.shape[0]
car1_long_vel = temp_data[:,3]
car2_long_vel = temp_data[:,7]
car1_acc = 10*(temp_data[1:,3]-temp_data[:-1,3])
car2_acc = 10*(temp_data[1:,7]-temp_data[:-1,7])
# plt.figure(1)
# plt.plot(range(T-1), car1_acc, c='b', label='main lane car acceleration')
# plt.plot(range(T-1), car2_acc, c='r', label='merging car acceleration')
# plt.legend()
plt.figure(2,figsize=(14,4))
plt.plot(range(T), car1_long_vel, c='b', label='main lane car velocity')
plt.plot(range(T), car2_long_vel, c='r', label='merging car velocity')
plt.legend()
plt.savefig('./long_vel_69.eps', bbox_inches='tight')
#plt.show()
#check_data()
###############################################
def plot_vehicles(case_id, data_matrix):
"""
This function is to plot vehicle trajectories with bounding boxes.
"""
current_case_data = data_matrix[np.where(data_matrix[:,0]==case_id)]
T = current_case_data.shape[0]
fig = plt.figure(figsize=(20,2))
for i in range(T):
if i<10:
name='00'+str(i)
elif i>=10 and i<100:
name = '0'+str(i)
elif i>=100:
name = str(i)
ax = fig.add_subplot(111, aspect='equal')
ax.add_patch(
patches.Rectangle(
(current_case_data[i,2]-2.0, current_case_data[i,4]-0.9), # (x,y)
4.0, # width
1.8, # height
alpha = 0.3 + 0.7*(T-i) / float(T),
facecolor='blue',
edgecolor='black',
linewidth=0.5
)
)
ax.add_patch(
patches.Rectangle(
(current_case_data[i,6]-2.0, current_case_data[i,8]-0.9), # (x,y)
4.0, # width
1.8, # height
alpha = 0.3 + 0.7*(T-i) / float(T),
facecolor='red',
edgecolor='black',
linewidth=0.5
)
)
ax.plot(range(-805,-360),-605*np.ones(445), color='k',linewidth=1)
ax.plot(range(-805,-584),-610*np.ones(221), color='k',linewidth=1)
ax.plot(range(-445,-360),-610*np.ones(85), color='k',linewidth=1)
x = [[-584,-805],[-445,-805]]
y = [[-610,-618],[-610,-622]]
for l in range(len(x)):
ax.plot(x[l], y[l], color='k',linewidth=1)
ax.set_xlim(-680, -400)
ax.set_ylim(-620, -600)
ax.set_xticks([])
ax.set_yticks([])
fig.savefig('./vehicles_plot/'+str(case_id)+'_'+str(name)+'.png', bbox_inches='tight')
data_matrix = np.loadtxt('./data_matrix.txt')
plot_vehicles(case_id=8, data_matrix=data_matrix)
| [((7584, 7615), 'numpy.loadtxt', 'np.loadtxt', (['"""./data_matrix.txt"""'], {}), "('./data_matrix.txt')\n", (7594, 7615), True, 'import numpy as np\n'), ((436, 464), 'numpy.zeros', 'np.zeros', (['(1, data.shape[1])'], {}), '((1, data.shape[1]))\n', (444, 464), True, 'import numpy as np\n'), ((1272, 1289), 'numpy.zeros', 'np.zeros', (['(1, 16)'], {}), '((1, 16))\n', (1280, 1289), True, 'import numpy as np\n'), ((2665, 2724), 'numpy.savetxt', 'np.savetxt', (['"""./data_matrix.txt"""', 'data_matrix[1:, :]', '"""%.4f"""'], {}), "('./data_matrix.txt', data_matrix[1:, :], '%.4f')\n", (2675, 2724), True, 'import numpy as np\n'), ((3058, 3114), 'numpy.savetxt', 'np.savetxt', (['"""./all_trajs_1.txt"""', 'situation0_data', '"""%.4f"""'], {}), "('./all_trajs_1.txt', situation0_data, '%.4f')\n", (3068, 3114), True, 'import numpy as np\n'), ((3117, 3173), 'numpy.savetxt', 'np.savetxt', (['"""./all_trajs_2.txt"""', 'situation1_data', '"""%.4f"""'], {}), "('./all_trajs_2.txt', situation1_data, '%.4f')\n", (3127, 3173), True, 'import numpy as np\n'), ((3364, 3399), 'numpy.zeros', 'np.zeros', (['(1, data_matrix.shape[1])'], {}), '((1, data_matrix.shape[1]))\n', (3372, 3399), True, 'import numpy as np\n'), ((3416, 3451), 'numpy.zeros', 'np.zeros', (['(1, data_matrix.shape[1])'], {}), '((1, data_matrix.shape[1]))\n', (3424, 3451), True, 'import numpy as np\n'), ((5085, 5133), 'numpy.loadtxt', 'np.loadtxt', (['"""../simulation_data/data_matrix.txt"""'], {}), "('../simulation_data/data_matrix.txt')\n", (5095, 5133), True, 'import numpy as np\n'), ((5567, 5597), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {'figsize': '(14, 4)'}), '(2, figsize=(14, 4))\n', (5577, 5597), True, 'import matplotlib.pyplot as plt\n'), ((5746, 5758), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5756, 5758), True, 'import matplotlib.pyplot as plt\n'), ((5763, 5816), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./long_vel_69.eps"""'], {'bbox_inches': '"""tight"""'}), "('./long_vel_69.eps', bbox_inches='tight')\n", (5774, 5816), True, 'import matplotlib.pyplot as plt\n'), ((6144, 6171), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 2)'}), '(figsize=(20, 2))\n', (6154, 6171), True, 'import matplotlib.pyplot as plt\n'), ((1570, 1587), 'numpy.zeros', 'np.zeros', (['(T, 16)'], {}), '((T, 16))\n', (1578, 1587), True, 'import numpy as np\n'), ((2603, 2661), 'numpy.concatenate', 'np.concatenate', (['(data_matrix, current_data_matrix)'], {'axis': '(0)'}), '((data_matrix, current_data_matrix), axis=0)\n', (2617, 2661), True, 'import numpy as np\n'), ((4782, 4798), 'numpy.array', 'np.array', (['count0'], {}), '(count0)\n', (4790, 4798), True, 'import numpy as np\n'), ((4851, 4867), 'numpy.array', 'np.array', (['count1'], {}), '(count1)\n', (4859, 4867), True, 'import numpy as np\n'), ((2954, 2989), 'numpy.where', 'np.where', (['(data_matrix[:, (-1)] == 0)'], {}), '(data_matrix[:, (-1)] == 0)\n', (2962, 2989), True, 'import numpy as np\n'), ((3020, 3055), 'numpy.where', 'np.where', (['(data_matrix[:, (-1)] == 1)'], {}), '(data_matrix[:, (-1)] == 1)\n', (3028, 3055), True, 'import numpy as np\n'), ((4309, 4357), 'numpy.concatenate', 'np.concatenate', (['(all_trajs_1, temp_data)'], {'axis': '(0)'}), '((all_trajs_1, temp_data), axis=0)\n', (4323, 4357), True, 'import numpy as np\n'), ((5153, 5181), 'numpy.where', 'np.where', (['(data[:, (0)] == 69)'], {}), '(data[:, (0)] == 69)\n', (5161, 5181), True, 'import numpy as np\n'), ((6064, 6104), 'numpy.where', 'np.where', (['(data_matrix[:, (0)] == case_id)'], {}), '(data_matrix[:, (0)] == case_id)\n', (6072, 6104), True, 'import numpy as np\n'), ((4192, 4226), 'numpy.where', 'np.where', (['(data_matrix[:, (0)] == i)'], {}), '(data_matrix[:, (0)] == i)\n', (4200, 4226), True, 'import numpy as np\n'), ((4443, 4491), 'numpy.concatenate', 'np.concatenate', (['(all_trajs_2, temp_data)'], {'axis': '(0)'}), '((all_trajs_2, temp_data), axis=0)\n', (4457, 4491), True, 'import numpy as np\n'), ((7059, 7071), 'numpy.ones', 'np.ones', (['(445)'], {}), '(445)\n', (7066, 7071), True, 'import numpy as np\n'), ((7129, 7141), 'numpy.ones', 'np.ones', (['(221)'], {}), '(221)\n', (7136, 7141), True, 'import numpy as np\n'), ((7199, 7210), 'numpy.ones', 'np.ones', (['(85)'], {}), '(85)\n', (7206, 7210), True, 'import numpy as np\n')] |
tylertjburns/ledgerkeeper | balancesheet/equityManager.py | cd69e9f48f35a973d08e450dfffdfea46bdc3802 | import balancesheet.mongoData.equities_data_service as dsvce
from userInteraction.financeCliInteraction import FinanceCliInteraction
import ledgerkeeper.mongoData.account_data_service as dsvca
from balancesheet.enums import EquityClass, AssetType, LiabiltyType, EquityTimeHorizon, EquityStatus, EquityContingency
import plotter as plot
class EquityManager():
def __init__(self, user_notification_system: FinanceCliInteraction):
self.uns = user_notification_system
def add_equity(self):
name = self.uns.request_string("Name: ")
description = self.uns.request_string("Description: ")
accountName = self.uns.request_from_dict(dsvca.accounts_as_dict())
equityClass = self.uns.request_enum(EquityClass)
if equityClass == EquityClass.ASSET:
equityType = self.uns.request_enum(AssetType)
elif equityClass == EquityClass.LIABILITY:
equityType = self.uns.request_enum(LiabiltyType)
else:
raise Exception(f"Unknown equity class: {equityClass.name}")
interestRate = self.uns.request_float("Interest Rate: ")
equityTimeHorizon = self.uns.request_enum(EquityTimeHorizon)
equityStatus = self.uns.request_enum(EquityStatus)
equityContingency = self.uns.request_enum(EquityContingency)
equity = dsvce.enter_if_not_exists(name=name,
description=description,
accountId=str(dsvca.account_by_name(accountName).id),
equityClass=equityClass,
equityType=equityType,
equityTimeHorizon=equityTimeHorizon,
equityStatus=equityStatus,
equityContingency=equityContingency,
interestRate=interestRate)
if equity is not None:
self.uns.notify_user("Equity entered successfully!")
def delete_equity(self):
accountName = self.uns.request_from_dict(dsvca.accounts_as_dict())
equityName = self.uns.request_from_dict(dsvce.equities_as_dict())
dsvce.delete_equity(dsvca.account_by_name(accountName).id, equityName)
def record_value(self):
accountName = self.uns.request_from_dict(dsvca.accounts_as_dict())
equityName = self.uns.request_from_dict(dsvce.equities_as_dict())
year = self.uns.request_int("Year: ")
month = self.uns.request_int("Month: ")
value = self.uns.request_float("Value: ")
account = dsvca.account_by_name(accountName)
equity = dsvce.equity_by_account_and_name(str(account.id), equityName)
if equity is None:
raise Exception(f"Equity: {accountName} [{account.id}], {equityName} not found.")
value = dsvce.record_value_on_equity(equity, year, month, value)
if value is not None:
self.uns.notify_user("Value Recorded successfully!")
def print_value_snapshots(self, accountName=None):
if accountName is None:
accountName = self.uns.request_from_dict(dsvca.accounts_as_dict())
account = dsvca.account_by_name(accountName)
equities = dsvce.equities_by_account(account.id)
if equities is None or len(equities) == 0:
self.uns.notify_user(f"No Equities in account [{accountName}]")
return
self.uns.pretty_print_items(sorted(equities, key=lambda x: x.equityType),
title="Equities Snapshots")
def print_equities(self):
self.uns.pretty_print_items(dsvce.query_equities("").to_json(), title="Equities")
def print_balance_sheet(self):
accountName = self.uns.request_from_dict(dsvca.accounts_as_dict())
relevant_mos = self.uns.request_int("Number of past months: ")
account = dsvca.account_by_name(accountName)
data = dsvce.balance_sheet_over_time(relevant_months=relevant_mos, accountIds=[str(account.id)])
self.uns.notify_user(f"\n---------Balance Sheet---------")
self.uns.pretty_print_items(data)
def plot_balance_over_time(self):
relevant_mos = self.uns.request_int("Number of past months: ")
accountName = self.uns.request_from_dict(dsvca.accounts_as_dict())
account = dsvca.account_by_name(accountName)
ax = plot.plot_assets_liabilities_worth_over_time(relevant_mos, accountIds=[str(account.id)])
if ax is None:
self.uns.notify_user("No Data to show...")
| [((2670, 2704), 'ledgerkeeper.mongoData.account_data_service.account_by_name', 'dsvca.account_by_name', (['accountName'], {}), '(accountName)\n', (2691, 2704), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((2921, 2977), 'balancesheet.mongoData.equities_data_service.record_value_on_equity', 'dsvce.record_value_on_equity', (['equity', 'year', 'month', 'value'], {}), '(equity, year, month, value)\n', (2949, 2977), True, 'import balancesheet.mongoData.equities_data_service as dsvce\n'), ((3268, 3302), 'ledgerkeeper.mongoData.account_data_service.account_by_name', 'dsvca.account_by_name', (['accountName'], {}), '(accountName)\n', (3289, 3302), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((3323, 3360), 'balancesheet.mongoData.equities_data_service.equities_by_account', 'dsvce.equities_by_account', (['account.id'], {}), '(account.id)\n', (3348, 3360), True, 'import balancesheet.mongoData.equities_data_service as dsvce\n'), ((3976, 4010), 'ledgerkeeper.mongoData.account_data_service.account_by_name', 'dsvca.account_by_name', (['accountName'], {}), '(accountName)\n', (3997, 4010), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((4438, 4472), 'ledgerkeeper.mongoData.account_data_service.account_by_name', 'dsvca.account_by_name', (['accountName'], {}), '(accountName)\n', (4459, 4472), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((666, 690), 'ledgerkeeper.mongoData.account_data_service.accounts_as_dict', 'dsvca.accounts_as_dict', ([], {}), '()\n', (688, 690), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((2148, 2172), 'ledgerkeeper.mongoData.account_data_service.accounts_as_dict', 'dsvca.accounts_as_dict', ([], {}), '()\n', (2170, 2172), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((2222, 2246), 'balancesheet.mongoData.equities_data_service.equities_as_dict', 'dsvce.equities_as_dict', ([], {}), '()\n', (2244, 2246), True, 'import balancesheet.mongoData.equities_data_service as dsvce\n'), ((2406, 2430), 'ledgerkeeper.mongoData.account_data_service.accounts_as_dict', 'dsvca.accounts_as_dict', ([], {}), '()\n', (2428, 2430), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((2480, 2504), 'balancesheet.mongoData.equities_data_service.equities_as_dict', 'dsvce.equities_as_dict', ([], {}), '()\n', (2502, 2504), True, 'import balancesheet.mongoData.equities_data_service as dsvce\n'), ((3861, 3885), 'ledgerkeeper.mongoData.account_data_service.accounts_as_dict', 'dsvca.accounts_as_dict', ([], {}), '()\n', (3883, 3885), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((4394, 4418), 'ledgerkeeper.mongoData.account_data_service.accounts_as_dict', 'dsvca.accounts_as_dict', ([], {}), '()\n', (4416, 4418), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((2277, 2311), 'ledgerkeeper.mongoData.account_data_service.account_by_name', 'dsvca.account_by_name', (['accountName'], {}), '(accountName)\n', (2298, 2311), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((3223, 3247), 'ledgerkeeper.mongoData.account_data_service.accounts_as_dict', 'dsvca.accounts_as_dict', ([], {}), '()\n', (3245, 3247), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n'), ((3722, 3746), 'balancesheet.mongoData.equities_data_service.query_equities', 'dsvce.query_equities', (['""""""'], {}), "('')\n", (3742, 3746), True, 'import balancesheet.mongoData.equities_data_service as dsvce\n'), ((1494, 1528), 'ledgerkeeper.mongoData.account_data_service.account_by_name', 'dsvca.account_by_name', (['accountName'], {}), '(accountName)\n', (1515, 1528), True, 'import ledgerkeeper.mongoData.account_data_service as dsvca\n')] |
agustinhenze/mibs.snmplabs.com | pysnmp-with-texts/CISCO-DOT11-QOS-MIB.py | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | #
# PySNMP MIB module CISCO-DOT11-QOS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-DOT11-QOS-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:55:50 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint")
CDot11IfVlanIdOrZero, = mibBuilder.importSymbols("CISCO-DOT11-IF-MIB", "CDot11IfVlanIdOrZero")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup")
Bits, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Counter64, Counter32, ModuleIdentity, NotificationType, Unsigned32, IpAddress, MibIdentifier, iso, TimeTicks, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Counter64", "Counter32", "ModuleIdentity", "NotificationType", "Unsigned32", "IpAddress", "MibIdentifier", "iso", "TimeTicks", "Integer32")
TextualConvention, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue")
ciscoDot11QosMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 416))
ciscoDot11QosMIB.setRevisions(('2006-05-09 00:00', '2003-11-24 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoDot11QosMIB.setRevisionsDescriptions(('The DEFVAL clauses have been removed from the definition of the objects cdot11QosCWmin, cdot11QosCWmax, cdot11QosMaxRetry and cdot11QosBackoffOffset, as the default values for these objects depend on the different traffic classes and that there are no common default values across the different traffic classes. ', 'Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoDot11QosMIB.setLastUpdated('200605090000Z')
if mibBuilder.loadTexts: ciscoDot11QosMIB.setOrganization('Cisco Systems Inc.')
if mibBuilder.loadTexts: ciscoDot11QosMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 West Tasman Drive, San Jose CA 95134-1706. USA Tel: +1 800 553-NETS E-mail: [email protected]')
if mibBuilder.loadTexts: ciscoDot11QosMIB.setDescription('This MIB module provides network management support for QoS on wireless LAN devices. The objects defined in this MIB provide equivalent support as the objects in the IEEE 802.11E Standard draft. The original names of the objects in the standard are included in the REFERENCE clauses. GLOSSARY and ACRONYMS Access point (AP) Transmitter/receiver (transceiver) device that commonly connects and transports data between a wireless network and a wired network. AIFS Arbitration Interframe Space. It is one of the five different IFSs defined to provide priority levels for access to the wireless media. It shall be used by QSTAs to transmit data type frames (MPDUs) and management type frames (MMPDUs). BSS IEEE 802.11 Basic Service Set (Radio Cell). The BSS of an AP comprises of the stations directly associating with the AP. CW Contention Window. It is the time period between radio signal collisions caused by simultaneous broadcast from multiple wireless stations. The contention window is used to compute the random backoff of the radio broadcast. The IEEE 802.11b does not specify the unit for the time period. CWP Factor Contention Window Persistence Factor. It indicates the factor used in computing new CW values on every 15 unsuccessful attempt to transmit an MPDU or an MMPDU of a traffic class. It is a scaling factor in units of 1/16ths. IFS Inter-Frame Space is the time interval between frames. A STA shall determine that the medium is idle through the use of the carrier sense function for the interval specified. In other words, the size of the IFS determines the length of the backoff time interval of a device to the medium. In this case, the medium is the radio wave spectrum. The IEEE 802.11b standard does not specify any unit for the time interval. BSS IEEE 802.11 Basic Service Set (Radio Cell). The MAC Medium Access Control. Layer 2 in the network model. MPDU MAC protocol data unit. The unit of data exchanged between two peer MAC entities using the services of the physical layer (PHY). MMPDU Management type MAC protocol data unit. MSDU MAC service data unit. Information that is delivered as a unit between MAC service access points. QBSS Quality of service basic service set. QSTA QoS station. STA (WSTA) A non-AP IEEE 802.11 wireless station.')
ciscoDot11QosMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 0))
ciscoDot11QosMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 1))
ciscoDot11QosMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 2))
ciscoDot11QosConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1))
ciscoDot11QosQueue = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 2))
ciscoDot11QosStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3))
ciscoDot11QosNotifControl = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 4))
class Cdot11QosTrafficClass(TextualConvention, Integer32):
reference = 'IEEE 802.1D-1998, Annex H.2.10 and IEEE 802.11E-2001, section 7.5.1.'
description = 'This textual convention defines the 802.11E traffic classes: background(0) - background traffic, lowest priority bestEffort(1) - best effort delivery, default priority class for all traffic video(2) - video traffic, 2nd highest priority voice(3) - voice traffic, highest priority.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("background", 0), ("bestEffort", 1), ("video", 2), ("voice", 3))
cdot11QosConfigTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1), )
if mibBuilder.loadTexts: cdot11QosConfigTable.setStatus('current')
if mibBuilder.loadTexts: cdot11QosConfigTable.setDescription('This table contains the basic set of attributes to configure QoS queues for radio interfaces of a wireless LAN device. This table has an expansion dependent relationship with the ifTable. Each IEEE 802.11 wireless interface has different outbound queues for different network traffic class. For each entry in this table, there exists an entry in the ifTable of ifType ieee80211(71).')
cdot11QosConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-DOT11-QOS-MIB", "cdot11TrafficQueue"))
if mibBuilder.loadTexts: cdot11QosConfigEntry.setStatus('current')
if mibBuilder.loadTexts: cdot11QosConfigEntry.setDescription('Each entry contains parameters to configure traffic contention window, AIFS, priority and MSDU lifetime for each traffic queue on an IEEE 802.11 interface.')
cdot11TrafficQueue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1, 1, 1), Unsigned32())
if mibBuilder.loadTexts: cdot11TrafficQueue.setStatus('current')
if mibBuilder.loadTexts: cdot11TrafficQueue.setDescription('This is the index to the outbound traffic queue on the radio interface.')
cdot11TrafficClass = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1, 1, 2), Cdot11QosTrafficClass()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11TrafficClass.setStatus('current')
if mibBuilder.loadTexts: cdot11TrafficClass.setDescription('This object specifies the traffic class and priority for the traffic on this queue.')
cdot11QosCWmin = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdot11QosCWmin.setReference('dot11CWmin, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosCWmin.setStatus('current')
if mibBuilder.loadTexts: cdot11QosCWmin.setDescription('This object defines the minimum contention window value for a traffic class. The minimum contention window is 2 to the power of cdot11QosCWmin minus 1, and that is from 0 to 1023. The cdot11QosCWmin value must be less than or equal to cdot11QosCWmax.')
cdot11QosCWmax = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdot11QosCWmax.setReference('dot11CWmax, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosCWmax.setStatus('current')
if mibBuilder.loadTexts: cdot11QosCWmax.setDescription('This object defines the maximum contention window value for a traffic class. The maximum contention window is 2 to the power of cdot11QosCWmax minus 1, and that is from 0 to 1023. The cdot11QosCWmax value must be greater than or equal to cdot11QosCWmin.')
cdot11QosBackoffOffset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 20))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdot11QosBackoffOffset.setStatus('current')
if mibBuilder.loadTexts: cdot11QosBackoffOffset.setDescription('This specifies the offset of the radio backoff from the transmission media for this traffic class. The backoff interval of a radio is calculated from a pseudo random integer drawn from a uniform distribution over the interval determined by the maximum and minimum of the contention window.')
cdot11QosMaxRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 1, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdot11QosMaxRetry.setStatus('current')
if mibBuilder.loadTexts: cdot11QosMaxRetry.setDescription('This specifies the number of times the radio retries for a particular transmission if there is a collision for the media.')
cdot11QosSupportTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 2), )
if mibBuilder.loadTexts: cdot11QosSupportTable.setStatus('current')
if mibBuilder.loadTexts: cdot11QosSupportTable.setDescription('This table contains the attributes indicating QoS support information on the IEEE 802.11 interfaces of this device. This table has a sparse dependent relationship with the ifTable. For each entry in this table, there exists an entry in the ifTable of ifType ieee80211(71).')
cdot11QosSupportEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cdot11QosSupportEntry.setStatus('current')
if mibBuilder.loadTexts: cdot11QosSupportEntry.setDescription('Each entry contains attributes to indicate if QoS and priority queue are supported for an IEEE 802.11 interface.')
cdot11QosOptionImplemented = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 2, 1, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosOptionImplemented.setReference('dot11QosOptionImplemented, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosOptionImplemented.setStatus('current')
if mibBuilder.loadTexts: cdot11QosOptionImplemented.setDescription('This object indicates if QoS is implemented on this IEEE 802.11 network interface.')
cdot11QosOptionEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 2, 1, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosOptionEnabled.setStatus('current')
if mibBuilder.loadTexts: cdot11QosOptionEnabled.setDescription("This object indicates if QoS is enabled on this IEEE 802.11 network interface. If it is 'true', QoS queuing is ON and traffic are prioritized according to their traffic class. If it is 'false', there is no QoS queuing and traffic are not prioritized.")
cdot11QosQueuesAvailable = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 2, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(4, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosQueuesAvailable.setReference('dot11QueuesAvailable, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosQueuesAvailable.setStatus('current')
if mibBuilder.loadTexts: cdot11QosQueuesAvailable.setDescription('This object shows the number of QoS priority queues are available on this IEEE 802.11 network interface. That is the number of queue per interface in the cdot11QosConfigTable.')
cdot11QosQueueTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 2, 1), )
if mibBuilder.loadTexts: cdot11QosQueueTable.setStatus('current')
if mibBuilder.loadTexts: cdot11QosQueueTable.setDescription('This table contains the queue weight and size information and statistics for each traffic queue on each the IEEE 802.11 interface. This table has a sparse dependent relationship with the ifTable. For each entry in this table, there exists an entry in the ifTable of ifType ieee80211(71).')
cdot11QosQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-DOT11-QOS-MIB", "cdot11TrafficQueue"))
if mibBuilder.loadTexts: cdot11QosQueueEntry.setStatus('current')
if mibBuilder.loadTexts: cdot11QosQueueEntry.setDescription('Each entry contains the current queue weight, size, and peak size information for each traffic queue on an IEEE 802.11 interface.')
cdot11QosQueueQuota = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 2, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosQueueQuota.setStatus('current')
if mibBuilder.loadTexts: cdot11QosQueueQuota.setDescription('This is the current QoS priority queue packet quota for this queue on the overall bandwidth. The total available quota is platform dependent and is shared among all the transmitting queues. The queue with the largest quota value has the largest share of the overall bandwidth of the radio. The quota is allocated by the radio driver dynamically.')
cdot11QosQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 2, 1, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosQueueSize.setReference('dot11QueueSizeTC, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosQueueSize.setStatus('current')
if mibBuilder.loadTexts: cdot11QosQueueSize.setDescription('This is the current QoS priority queue size for this queue.')
cdot11QosQueuePeakSize = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 2, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosQueuePeakSize.setReference('dot11QueuePeakSizeTC, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosQueuePeakSize.setStatus('current')
if mibBuilder.loadTexts: cdot11QosQueuePeakSize.setDescription('This is the peak QoS priority queue size for this queue.')
cdot11QosStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 1), )
if mibBuilder.loadTexts: cdot11QosStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: cdot11QosStatisticsTable.setDescription('This table contains the QoS statistics by traffic queue on each the IEEE 802.11 network interface. This table has a expansion dependent relationship with the ifTable. For each entry in this table, there exists an entry in the ifTable of ifType ieee80211(71).')
cdot11QosStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-DOT11-QOS-MIB", "cdot11TrafficQueue"))
if mibBuilder.loadTexts: cdot11QosStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: cdot11QosStatisticsEntry.setDescription('Each entry contain QoS statistics for data transmission and receive for each traffic queue on an IEEE 802.11 interface.')
cdot11QosDiscardedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosDiscardedFrames.setReference('dot11QosDiscardedFrameCountTC, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosDiscardedFrames.setStatus('current')
if mibBuilder.loadTexts: cdot11QosDiscardedFrames.setDescription('This is the counter for QoS discarded frames transmitting from this IEEE 802.11 interface for the traffic queue.')
cdot11QosFails = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosFails.setReference('dot11QosFailedCountTC, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosFails.setStatus('current')
if mibBuilder.loadTexts: cdot11QosFails.setDescription('This is the counter for QoS failures on this IEEE 802.11 interface for the traffic queue.')
cdot11QosRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosRetries.setReference('dot11QosRetryCountTC, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosRetries.setStatus('current')
if mibBuilder.loadTexts: cdot11QosRetries.setDescription('This is the counter for QoS retries performed on this IEEE 802.11 interface for the traffic queue.')
cdot11QosMutipleRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosMutipleRetries.setReference('dot11QosMutipleRetryCountTC, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosMutipleRetries.setStatus('current')
if mibBuilder.loadTexts: cdot11QosMutipleRetries.setDescription('This is the counter for QoS multiple retries performed on this IEEE 802.11 interface for the traffic queue.')
cdot11QosTransmittedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosTransmittedFrames.setReference('dot11QosTransmittedFrameCountTC, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosTransmittedFrames.setStatus('current')
if mibBuilder.loadTexts: cdot11QosTransmittedFrames.setDescription('This is the counter for QoS frames transmitted from this IEEE 802.11 interface for the traffic queue.')
cdot11QosIfStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 2), )
if mibBuilder.loadTexts: cdot11QosIfStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: cdot11QosIfStatisticsTable.setDescription('This table contains the attributes indicating QoS statistics on the IEEE 802.11 interfaces of the device. This table has a sparse dependent relationship with the ifTable. For each entry in this table, there exists an entry in the ifTable of ifType ieee80211(71).')
cdot11QosIfStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cdot11QosIfStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: cdot11QosIfStatisticsEntry.setDescription('Each entry contains attributes to support QoS statistics on an IEEE 802.11 interface.')
cdot11QosIfDiscardedFragments = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 3, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosIfDiscardedFragments.setReference('dot11QosDiscardedFragments, IEEE 802.11E-2001/D1.')
if mibBuilder.loadTexts: cdot11QosIfDiscardedFragments.setStatus('current')
if mibBuilder.loadTexts: cdot11QosIfDiscardedFragments.setDescription('This object counts the number of QoS discarded transmitting fragments on this radio interface.')
cdot11QosIfVlanTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 3), )
if mibBuilder.loadTexts: cdot11QosIfVlanTable.setStatus('current')
if mibBuilder.loadTexts: cdot11QosIfVlanTable.setDescription('This table maps VLANs to different traffic classes and defines their QoS properties. This table has an expansion dependent relationship with the ifTable. For each entry in this table, there exists an entry in the ifTable of ifType ieee80211(71).')
cdot11QosIfVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-DOT11-QOS-MIB", "cdot11QosIfVlanId"))
if mibBuilder.loadTexts: cdot11QosIfVlanEntry.setStatus('current')
if mibBuilder.loadTexts: cdot11QosIfVlanEntry.setDescription('Each entry defines parameters determining the traffic class and QoS configuration of a VLAN.')
cdot11QosIfVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 3, 1, 1), CDot11IfVlanIdOrZero().subtype(subtypeSpec=ValueRangeConstraint(1, 4095)))
if mibBuilder.loadTexts: cdot11QosIfVlanId.setStatus('current')
if mibBuilder.loadTexts: cdot11QosIfVlanId.setDescription('This object identifies the VLAN (1 to 4095) on this radio interface.')
cdot11QosIfVlanTrafficClass = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 1, 3, 1, 2), Cdot11QosTrafficClass()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdot11QosIfVlanTrafficClass.setStatus('current')
if mibBuilder.loadTexts: cdot11QosIfVlanTrafficClass.setDescription('This is the QoS traffic class for the traffic transmitting on this VLAN. The traffic class determines the priority for the VLAN.')
cdot11QosNotifEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 416, 1, 4, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdot11QosNotifEnabled.setStatus('current')
if mibBuilder.loadTexts: cdot11QosNotifEnabled.setDescription('Indicates whether cdot11QosChangeNotif notification will or will not be sent by the agent when the QoS configuration in the cdot11QosConfigTable is changed.')
cdot11QosChangeNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 416, 0, 1)).setObjects(("CISCO-DOT11-QOS-MIB", "cdot11TrafficClass"))
if mibBuilder.loadTexts: cdot11QosChangeNotif.setStatus('current')
if mibBuilder.loadTexts: cdot11QosChangeNotif.setDescription('This notification will be sent when the QoS configuration in the cdot11QosConfigTable is changed. The object cdot11TrafficClass specifies the traffic class of which a queue is configured. The sending of these notifications can be enabled or disabled via cdot11QosNotifEnabled.')
ciscoDot11QosMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 2, 1))
ciscoDot11QosMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 416, 2, 2))
ciscoDot11QosMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 416, 2, 1, 1)).setObjects(("CISCO-DOT11-QOS-MIB", "ciscoDot11QosConfigGroup"), ("CISCO-DOT11-QOS-MIB", "ciscoDot11QosStatsGroup"), ("CISCO-DOT11-QOS-MIB", "ciscoDot11QosNotifControlGroup"), ("CISCO-DOT11-QOS-MIB", "ciscoDot11QosNotificationGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDot11QosMIBCompliance = ciscoDot11QosMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: ciscoDot11QosMIBCompliance.setDescription('The compliance statement for the configuration and status groups.')
ciscoDot11QosConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 416, 2, 2, 1)).setObjects(("CISCO-DOT11-QOS-MIB", "cdot11TrafficClass"), ("CISCO-DOT11-QOS-MIB", "cdot11QosCWmin"), ("CISCO-DOT11-QOS-MIB", "cdot11QosCWmax"), ("CISCO-DOT11-QOS-MIB", "cdot11QosBackoffOffset"), ("CISCO-DOT11-QOS-MIB", "cdot11QosMaxRetry"), ("CISCO-DOT11-QOS-MIB", "cdot11QosOptionImplemented"), ("CISCO-DOT11-QOS-MIB", "cdot11QosOptionEnabled"), ("CISCO-DOT11-QOS-MIB", "cdot11QosQueuesAvailable"), ("CISCO-DOT11-QOS-MIB", "cdot11QosQueueQuota"), ("CISCO-DOT11-QOS-MIB", "cdot11QosQueueSize"), ("CISCO-DOT11-QOS-MIB", "cdot11QosQueuePeakSize"), ("CISCO-DOT11-QOS-MIB", "cdot11QosIfVlanTrafficClass"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDot11QosConfigGroup = ciscoDot11QosConfigGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDot11QosConfigGroup.setDescription('Configurations for IEEE 802.11 QoS.')
ciscoDot11QosStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 416, 2, 2, 2)).setObjects(("CISCO-DOT11-QOS-MIB", "cdot11QosIfDiscardedFragments"), ("CISCO-DOT11-QOS-MIB", "cdot11QosDiscardedFrames"), ("CISCO-DOT11-QOS-MIB", "cdot11QosFails"), ("CISCO-DOT11-QOS-MIB", "cdot11QosRetries"), ("CISCO-DOT11-QOS-MIB", "cdot11QosMutipleRetries"), ("CISCO-DOT11-QOS-MIB", "cdot11QosTransmittedFrames"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDot11QosStatsGroup = ciscoDot11QosStatsGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDot11QosStatsGroup.setDescription('Status and statistics for IEEE 802.11 QoS.')
ciscoDot11QosNotifControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 416, 2, 2, 3)).setObjects(("CISCO-DOT11-QOS-MIB", "cdot11QosNotifEnabled"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDot11QosNotifControlGroup = ciscoDot11QosNotifControlGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDot11QosNotifControlGroup.setDescription('Notification control configuration for QoS.')
ciscoDot11QosNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 416, 2, 2, 4)).setObjects(("CISCO-DOT11-QOS-MIB", "cdot11QosChangeNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDot11QosNotificationGroup = ciscoDot11QosNotificationGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDot11QosNotificationGroup.setDescription('Notifications for QoS configuration.')
mibBuilder.exportSymbols("CISCO-DOT11-QOS-MIB", PYSNMP_MODULE_ID=ciscoDot11QosMIB, cdot11QosQueueTable=cdot11QosQueueTable, cdot11QosCWmin=cdot11QosCWmin, ciscoDot11QosMIBObjects=ciscoDot11QosMIBObjects, cdot11QosIfVlanTable=cdot11QosIfVlanTable, cdot11QosIfVlanId=cdot11QosIfVlanId, cdot11QosStatisticsTable=cdot11QosStatisticsTable, ciscoDot11QosQueue=ciscoDot11QosQueue, ciscoDot11QosStatistics=ciscoDot11QosStatistics, cdot11QosRetries=cdot11QosRetries, cdot11QosQueuesAvailable=cdot11QosQueuesAvailable, cdot11QosFails=cdot11QosFails, cdot11QosOptionEnabled=cdot11QosOptionEnabled, cdot11QosStatisticsEntry=cdot11QosStatisticsEntry, cdot11TrafficQueue=cdot11TrafficQueue, ciscoDot11QosMIBCompliance=ciscoDot11QosMIBCompliance, ciscoDot11QosMIBCompliances=ciscoDot11QosMIBCompliances, cdot11QosIfStatisticsTable=cdot11QosIfStatisticsTable, cdot11QosIfDiscardedFragments=cdot11QosIfDiscardedFragments, cdot11QosMaxRetry=cdot11QosMaxRetry, cdot11QosMutipleRetries=cdot11QosMutipleRetries, ciscoDot11QosMIB=ciscoDot11QosMIB, cdot11QosQueueQuota=cdot11QosQueueQuota, ciscoDot11QosMIBConformance=ciscoDot11QosMIBConformance, cdot11QosConfigTable=cdot11QosConfigTable, cdot11QosCWmax=cdot11QosCWmax, cdot11QosConfigEntry=cdot11QosConfigEntry, cdot11QosQueueSize=cdot11QosQueueSize, cdot11QosIfVlanEntry=cdot11QosIfVlanEntry, cdot11TrafficClass=cdot11TrafficClass, ciscoDot11QosStatsGroup=ciscoDot11QosStatsGroup, ciscoDot11QosConfig=ciscoDot11QosConfig, ciscoDot11QosNotifControl=ciscoDot11QosNotifControl, cdot11QosSupportEntry=cdot11QosSupportEntry, cdot11QosSupportTable=cdot11QosSupportTable, ciscoDot11QosMIBGroups=ciscoDot11QosMIBGroups, cdot11QosBackoffOffset=cdot11QosBackoffOffset, ciscoDot11QosConfigGroup=ciscoDot11QosConfigGroup, cdot11QosTransmittedFrames=cdot11QosTransmittedFrames, cdot11QosQueueEntry=cdot11QosQueueEntry, ciscoDot11QosNotifControlGroup=ciscoDot11QosNotifControlGroup, ciscoDot11QosNotificationGroup=ciscoDot11QosNotificationGroup, ciscoDot11QosMIBNotifs=ciscoDot11QosMIBNotifs, cdot11QosIfStatisticsEntry=cdot11QosIfStatisticsEntry, cdot11QosNotifEnabled=cdot11QosNotifEnabled, cdot11QosChangeNotif=cdot11QosChangeNotif, cdot11QosOptionImplemented=cdot11QosOptionImplemented, cdot11QosIfVlanTrafficClass=cdot11QosIfVlanTrafficClass, Cdot11QosTrafficClass=Cdot11QosTrafficClass, cdot11QosQueuePeakSize=cdot11QosQueuePeakSize, cdot11QosDiscardedFrames=cdot11QosDiscardedFrames)
| [] |
liuyu81/SnapSearch-Client-Python | examples/django/hello_world/wsgi.py | 41857806c2b26f0537de2dcc23a145107a4ecd04 | import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hello_world.settings")
# django WSGI application
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# load SnapSearch API credentials
api_email = "<email>"
api_key = "<key>"
# initialize the interceptor
from SnapSearch import Client, Detector, Interceptor
interceptor = Interceptor(Client(api_email, api_key), Detector())
# deploy the interceptor
from SnapSearch.wsgi import InterceptorMiddleware
application = InterceptorMiddleware(application, interceptor)
| [((11, 82), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""hello_world.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'hello_world.settings')\n", (32, 82), False, 'import os\n'), ((174, 196), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (194, 196), False, 'from django.core.wsgi import get_wsgi_application\n'), ((511, 558), 'SnapSearch.wsgi.InterceptorMiddleware', 'InterceptorMiddleware', (['application', 'interceptor'], {}), '(application, interceptor)\n', (532, 558), False, 'from SnapSearch.wsgi import InterceptorMiddleware\n'), ((381, 407), 'SnapSearch.Client', 'Client', (['api_email', 'api_key'], {}), '(api_email, api_key)\n', (387, 407), False, 'from SnapSearch import Client, Detector, Interceptor\n'), ((409, 419), 'SnapSearch.Detector', 'Detector', ([], {}), '()\n', (417, 419), False, 'from SnapSearch import Client, Detector, Interceptor\n')] |
xcat2/confluent | confluent_server/confluent/syncfiles.py | 47a83f4628df48638c2aebbfbcddc1531aac20d0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2021 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
import shutil
import tempfile
import confluent.sshutil as sshutil
import confluent.util as util
import confluent.noderange as noderange
import eventlet
import pwd
import grp
def mkdirp(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != 17:
raise
def get_entries(filename):
secname = 'REPLACE:'
filename = filename.strip()
if filename[-1] == '>':
filename = filename[:-1]
with open(filename, 'r') as slfile:
slist = slfile.read()
entries = slist.split('\n')
for ent in entries:
ent = ent.split('#', 1)[0].strip()
if not ent:
continue
if ent in ('APPENDONCE:', 'MERGE:', 'REPLACE:'):
secname = ent
if ent[0] == '<':
subfilename = ent[1:]
if subfilename[-1] == '>':
subfilename = subfilename[:-1]
if subfilename[0] != '/':
subfilename = os.path.join(os.path.dirname(filename), subfilename)
for subent in get_entries(subfilename):
yield subent
yield secname
else:
yield ent
class SyncList(object):
def __init__(self, filename, nodename, cfg):
slist = None
self.replacemap = {}
self.appendmap = {}
self.appendoncemap = {}
self.mergemap = {}
self.optmap = {}
entries = get_entries(filename)
currmap = self.replacemap
for ent in entries:
try:
cmtidx = ent.index('#')
ent = ent[:cmtidx]
except ValueError:
pass
for special in '$%^&|{}':
if special in ent:
raise Exception(
'Special character "{}" reserved for future use'.format(special))
ent = ent.strip()
if not ent:
continue
if ent[-1] == ':':
if ent == 'MERGE:':
currmap = self.mergemap
elif ent == 'APPENDONCE:':
currmap = self.appendoncemap
elif ent == 'REPLACE:':
currmap = self.replacemap
else:
raise Exception(
'Section "{}" is not currently supported in syncfiles'.format(ent[:-1]))
continue
if '->' in ent:
k, v = ent.split('->')
k = k.strip()
v = v.strip()
if ':' in v:
nr, v = v.split(':', 1)
for candidate in noderange.NodeRange(nr, cfg).nodes:
if candidate == nodename:
break
else:
continue
optparts = v.split()
v = optparts[0]
optparts = optparts[1:]
else:
kparts = []
optparts = []
currparts = kparts
for part in ent.split():
if part[0] == '(':
currparts = optparts
currparts.append(part)
k = ' '.join(kparts)
v = None
entopts = {}
if optparts:
if optparts[0][0] != '(' or optparts[-1][-1] != ')':
raise Exception("Unsupported syntax in syncfile: " + ent)
opts = ','.join(optparts)
opts = opts[1:-1]
for opt in opts.split(','):
optname, optval = opt.split('=')
if optname == 'owner':
try:
uid = pwd.getpwnam(optval).pw_uid
except KeyError:
uid = None
optval = {'name': optval, 'id': uid}
elif optname == 'group':
try:
gid = grp.getgrnam(optval).gr_gid
except KeyError:
gid = None
optval = {'name': optval, 'id': gid}
entopts[optname] = optval
currmap[k] = v
targ = v if v else k
for f in targ.split():
self.optmap[f] = entopts
def sync_list_to_node(sl, node, suffixes):
targdir = tempfile.mkdtemp('.syncto{}'.format(node))
output = ''
try:
for ent in sl.replacemap:
stage_ent(sl.replacemap, ent, targdir)
if 'append' in suffixes:
while suffixes['append'] and suffixes['append'][0] == '/':
suffixes['append'] = suffixes['append'][1:]
for ent in sl.appendmap:
stage_ent(sl.appendmap, ent,
os.path.join(targdir, suffixes['append']))
if 'merge' in suffixes:
while suffixes['merge'] and suffixes['merge'][0] == '/':
suffixes['merge'] = suffixes['merge'][1:]
for ent in sl.mergemap:
stage_ent(sl.mergemap, ent,
os.path.join(targdir, suffixes['merge']), True)
if 'appendonce' in suffixes:
while suffixes['appendonce'] and suffixes['appendonce'][0] == '/':
suffixes['appendonce'] = suffixes['appendonce'][1:]
for ent in sl.appendoncemap:
stage_ent(sl.appendoncemap, ent,
os.path.join(targdir, suffixes['appendonce']), True)
sshutil.prep_ssh_key('/etc/confluent/ssh/automation')
output = util.run(
['rsync', '-rvLD', targdir + '/', 'root@{}:/'.format(node)])[0]
except Exception as e:
if 'CalledProcessError' not in repr(e):
# https://github.com/eventlet/eventlet/issues/413
# for some reason, can't catch the calledprocesserror normally
# for this exception, implement a hack workaround
raise
unreadablefiles = []
for root, dirnames, filenames in os.walk(targdir):
for filename in filenames:
filename = os.path.join(root, filename)
try:
with open(filename, 'r') as _:
pass
except OSError as e:
unreadablefiles.append(filename.replace(targdir, ''))
if unreadablefiles:
raise Exception("Syncing failed due to unreadable files: " + ','.join(unreadablefiles))
else:
raise
finally:
shutil.rmtree(targdir)
if not isinstance(output, str):
output = output.decode('utf8')
retval = {
'options': sl.optmap,
'output': output,
}
return retval # need dictionary with output and options
def stage_ent(currmap, ent, targdir, appendexist=False):
dst = currmap[ent]
everyfent = []
allfents = ent.split()
for tmpent in allfents:
fents = glob.glob(tmpent)
everyfent.extend(fents)
if not everyfent:
raise Exception('No matching files for "{}"'.format(ent))
if dst is None: # this is to indicate source and destination as one
dst = os.path.dirname(everyfent[0]) + '/'
while dst and dst[0] == '/':
dst = dst[1:]
if len(everyfent) > 1 and dst[-1] != '/':
raise Exception(
'Multiple files match {}, {} needs a trailing slash to indicate a directory'.format(ent, dst))
fulltarg = os.path.join(targdir, dst)
for targ in everyfent:
mkpathorlink(targ, fulltarg, appendexist)
def mkpathorlink(source, destination, appendexist=False):
if os.path.isdir(source):
mkdirp(destination)
for ent in os.listdir(source):
currsrc = os.path.join(source, ent)
currdst = os.path.join(destination, ent)
mkpathorlink(currsrc, currdst)
else:
if destination[-1] == '/':
mkdirp(destination)
destination = os.path.join(destination, os.path.basename(source))
else:
mkdirp(os.path.dirname(destination))
if appendexist and os.path.exists(destination):
tmpnam = tempfile.mktemp()
shutil.copy(destination, tmpnam)
os.remove(destination)
with open(destination, 'w') as realdest:
with open(tmpnam) as olddest:
realdest.write(olddest.read())
with open(source) as sourcedata:
realdest.write(sourcedata.read())
os.remove(tmpnam)
else:
os.symlink(source, destination)
syncrunners = {}
def start_syncfiles(nodename, cfg, suffixes):
deployinfo = cfg.get_node_attributes(
nodename, ('deployment.*',))
deployinfo = deployinfo.get(nodename, {})
profile = deployinfo.get(
'deployment.pendingprofile', {}).get('value', '')
if not profile:
profile = deployinfo.get(
'deployment.stagedprofile', {}).get('value', '')
if not profile:
profile = deployinfo.get(
'deployment.profile', {}).get('value', '')
if not profile:
raise Exception('Cannot perform syncfiles without profile assigned')
synclist = '/var/lib/confluent/public/os/{}/syncfiles'.format(profile)
if not os.path.exists(synclist):
return '200 OK' # not running
sl = SyncList(synclist, nodename, cfg)
if not (sl.appendmap or sl.mergemap or sl.replacemap or sl.appendoncemap):
return '200 OK' # the synclist has no actual entries
syncrunners[nodename] = eventlet.spawn(
sync_list_to_node, sl, nodename, suffixes)
return '202 Queued' # backgrounded
def get_syncresult(nodename):
if nodename not in syncrunners:
return ('204 Not Running', '')
if not syncrunners[nodename].dead:
return ('200 OK', '')
result = syncrunners[nodename].wait()
del syncrunners[nodename]
return ('200 OK', result)
| [((8126, 8152), 'os.path.join', 'os.path.join', (['targdir', 'dst'], {}), '(targdir, dst)\n', (8138, 8152), False, 'import os\n'), ((8296, 8317), 'os.path.isdir', 'os.path.isdir', (['source'], {}), '(source)\n', (8309, 8317), False, 'import os\n'), ((10220, 10277), 'eventlet.spawn', 'eventlet.spawn', (['sync_list_to_node', 'sl', 'nodename', 'suffixes'], {}), '(sync_list_to_node, sl, nodename, suffixes)\n', (10234, 10277), False, 'import eventlet\n'), ((848, 865), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (859, 865), False, 'import os\n'), ((6186, 6239), 'confluent.sshutil.prep_ssh_key', 'sshutil.prep_ssh_key', (['"""/etc/confluent/ssh/automation"""'], {}), "('/etc/confluent/ssh/automation')\n", (6206, 6239), True, 'import confluent.sshutil as sshutil\n'), ((7211, 7233), 'shutil.rmtree', 'shutil.rmtree', (['targdir'], {}), '(targdir)\n', (7224, 7233), False, 'import shutil\n'), ((7617, 7634), 'glob.glob', 'glob.glob', (['tmpent'], {}), '(tmpent)\n', (7626, 7634), False, 'import glob\n'), ((8366, 8384), 'os.listdir', 'os.listdir', (['source'], {}), '(source)\n', (8376, 8384), False, 'import os\n'), ((9943, 9967), 'os.path.exists', 'os.path.exists', (['synclist'], {}), '(synclist)\n', (9957, 9967), False, 'import os\n'), ((6705, 6721), 'os.walk', 'os.walk', (['targdir'], {}), '(targdir)\n', (6712, 6721), False, 'import os\n'), ((7842, 7871), 'os.path.dirname', 'os.path.dirname', (['everyfent[0]'], {}), '(everyfent[0])\n', (7857, 7871), False, 'import os\n'), ((8408, 8433), 'os.path.join', 'os.path.join', (['source', 'ent'], {}), '(source, ent)\n', (8420, 8433), False, 'import os\n'), ((8456, 8486), 'os.path.join', 'os.path.join', (['destination', 'ent'], {}), '(destination, ent)\n', (8468, 8486), False, 'import os\n'), ((8775, 8802), 'os.path.exists', 'os.path.exists', (['destination'], {}), '(destination)\n', (8789, 8802), False, 'import os\n'), ((8825, 8842), 'tempfile.mktemp', 'tempfile.mktemp', ([], {}), '()\n', (8840, 8842), False, 'import tempfile\n'), ((8855, 8887), 'shutil.copy', 'shutil.copy', (['destination', 'tmpnam'], {}), '(destination, tmpnam)\n', (8866, 8887), False, 'import shutil\n'), ((8900, 8922), 'os.remove', 'os.remove', (['destination'], {}), '(destination)\n', (8909, 8922), False, 'import os\n'), ((9188, 9205), 'os.remove', 'os.remove', (['tmpnam'], {}), '(tmpnam)\n', (9197, 9205), False, 'import os\n'), ((9232, 9263), 'os.symlink', 'os.symlink', (['source', 'destination'], {}), '(source, destination)\n', (9242, 9263), False, 'import os\n'), ((8659, 8683), 'os.path.basename', 'os.path.basename', (['source'], {}), '(source)\n', (8675, 8683), False, 'import os\n'), ((8718, 8746), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (8733, 8746), False, 'import os\n'), ((1601, 1626), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (1616, 1626), False, 'import os\n'), ((5469, 5510), 'os.path.join', 'os.path.join', (['targdir', "suffixes['append']"], {}), "(targdir, suffixes['append'])\n", (5481, 5510), False, 'import os\n'), ((5777, 5817), 'os.path.join', 'os.path.join', (['targdir', "suffixes['merge']"], {}), "(targdir, suffixes['merge'])\n", (5789, 5817), False, 'import os\n'), ((6125, 6170), 'os.path.join', 'os.path.join', (['targdir', "suffixes['appendonce']"], {}), "(targdir, suffixes['appendonce'])\n", (6137, 6170), False, 'import os\n'), ((6789, 6817), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (6801, 6817), False, 'import os\n'), ((3252, 3280), 'confluent.noderange.NodeRange', 'noderange.NodeRange', (['nr', 'cfg'], {}), '(nr, cfg)\n', (3271, 3280), True, 'import confluent.noderange as noderange\n'), ((4357, 4377), 'pwd.getpwnam', 'pwd.getpwnam', (['optval'], {}), '(optval)\n', (4369, 4377), False, 'import pwd\n'), ((4634, 4654), 'grp.getgrnam', 'grp.getgrnam', (['optval'], {}), '(optval)\n', (4646, 4654), False, 'import grp\n')] |
JungYT/fym | fym/models/missile.py | d519c50086e3c7793b960e0326c92ed407836790 | import numpy as np
from fym.core import BaseSystem
class MissilePlanar(BaseSystem):
R = 288
g = 9.80665
S = 1
t1 = 1.5
t2 = 8.5
name = 'missile'
def __init__(self, initial_state):
super().__init__(initial_state)
def external(self, states, controls):
return 0
# return {"wind" : [(0, 0), (0, 0)]} # no external effects
def deriv(self, state, t, control, external):
# state and (control) input
x, y, V, gamma, = state.ravel()
a = control
# temperature
if y <= 11000:
Tmp = 288.16 - 0.0065*y
else:
Tmp = 216.66
# Mach number
M = V/(1.4*self.R*Tmp)**0.5
# Mass and thrust (Note: guidance loop is closed after t=t1)
if t < self.t1:
m = 135 - 14.53*t
T = 33000
elif t < self.t2:
m = 113.205 - 3.331*t
T = 7500
else:
m = 90.035
T = 0
# density and dynamic pressure
rho = (1.15579 - 1.058*1e-4*y + 3.725*1e-9*y**2
- 6.0*1e-14*y**3) # y in [0, 20000]
Q = 0.5*rho*V**2
# Drag model
if M < 0.93:
Cd0 = 0.02
elif M < 1.03:
Cd0 = 0.02 + 0.2*(M - 0.93)
elif M < 1.10:
Cd0 = 0.04 + 0.06*(M - 1.03)
else:
Cd0 = 0.0442 - 0.007*(M - 1.10)
if M < 1.15:
K = 0.2
else:
K = 0.2 + 0.246*(M - 1.15)
D0 = Cd0*Q*self.S
Di = K*m**2*a**2/(Q*self.S)
D = D0 + Di
dxdt = V*np.cos(gamma)
dydt = V*np.sin(gamma)
dVdt = (T - D)/m - self.g*np.sin(gamma)
dgammadt = (a - self.g*np.cos(gamma))/V
return np.vstack([dxdt, dydt, dVdt, dgammadt])
| [((1764, 1803), 'numpy.vstack', 'np.vstack', (['[dxdt, dydt, dVdt, dgammadt]'], {}), '([dxdt, dydt, dVdt, dgammadt])\n', (1773, 1803), True, 'import numpy as np\n'), ((1607, 1620), 'numpy.cos', 'np.cos', (['gamma'], {}), '(gamma)\n', (1613, 1620), True, 'import numpy as np\n'), ((1638, 1651), 'numpy.sin', 'np.sin', (['gamma'], {}), '(gamma)\n', (1644, 1651), True, 'import numpy as np\n'), ((1686, 1699), 'numpy.sin', 'np.sin', (['gamma'], {}), '(gamma)\n', (1692, 1699), True, 'import numpy as np\n'), ((1731, 1744), 'numpy.cos', 'np.cos', (['gamma'], {}), '(gamma)\n', (1737, 1744), True, 'import numpy as np\n')] |
chengemily/EGG | egg/zoo/addition/data.py | 40e84228e9d6e9ae785c0e4a846bb7e12e2b9291 | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Iterable, Optional, Tuple
import torch
from torch.utils.data import DataLoader
class ScaledDataset:
def __init__(self, examples, scaling_factor=1):
self.examples = examples
self.scaling_factor = scaling_factor
def __len__(self):
return len(self.examples) * self.scaling_factor
def __getitem__(self, k):
k = k % len(self.examples)
return self.examples[k]
def get_dataloaders(opts) -> Tuple[Iterable[
Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]
], Iterable[
Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]
]]:
"Returning an iterator for tuple(sender_input, labels, receiver_input)."
full_data = enumerate_dataset(opts.input_size)
len_train = int(opts.training_density * len(full_data))
train_set, holdout_set = torch.utils.data.random_split(full_data,
[len_train, len(full_data) - len_train]
)
validation_set = train_set
train_set = ScaledDataset(train_set, opts.data_scaler)
train_loader, validation_loader, holdout_loader = DataLoader(train_set, batch_size=opts.batch_size, shuffle=True), \
DataLoader(validation_set, batch_size=len(validation_set)), \
DataLoader(holdout_set, batch_size=opts.batch_size)
return train_loader, validation_loader, holdout_loader
def enumerate_dataset(input_size):
data = []
labels = []
for i in range(input_size):
for j in range(input_size):
inp = torch.zeros(2 * input_size)
inp[i] = 1.0
inp[input_size + j] = 1.0
label = torch.zeros(2 * input_size - 1)
label[i + j] = 1.0
data.append(inp)
labels.append(label)
data_tuples = [(data[i], labels[i]) for i in range(len(data))]
return data_tuples
| [((1358, 1421), 'torch.utils.data.DataLoader', 'DataLoader', (['train_set'], {'batch_size': 'opts.batch_size', 'shuffle': '(True)'}), '(train_set, batch_size=opts.batch_size, shuffle=True)\n', (1368, 1421), False, 'from torch.utils.data import DataLoader\n'), ((1577, 1628), 'torch.utils.data.DataLoader', 'DataLoader', (['holdout_set'], {'batch_size': 'opts.batch_size'}), '(holdout_set, batch_size=opts.batch_size)\n', (1587, 1628), False, 'from torch.utils.data import DataLoader\n'), ((1843, 1870), 'torch.zeros', 'torch.zeros', (['(2 * input_size)'], {}), '(2 * input_size)\n', (1854, 1870), False, 'import torch\n'), ((1955, 1986), 'torch.zeros', 'torch.zeros', (['(2 * input_size - 1)'], {}), '(2 * input_size - 1)\n', (1966, 1986), False, 'import torch\n')] |
lsbardel/mathfun | mathfun/lexographic.py | 98e7c210409c2b5777e91059c3651cef4f3045dd | """
Next lexicographical permutation algorithm
https://www.nayuki.io/page/next-lexicographical-permutation-algorithm
"""
def next_lexo(S):
b = S[-1]
for i, a in enumerate(reversed(S[:-1]), 2):
if a < b:
# we have the pivot a
for j, b in enumerate(reversed(S), 1):
if b > a:
F = list(S)
F[-i], F[-j] = F[-j], F[-i]
F = F[: -i + 1] + sorted(F[-i + 1 :])
return "".join(F)
else:
b = a
return "no answer"
| [] |
miker83z/cloud-chain | simulation-web3py/utility.py | 0f5c43159544da547173ee0425e78bede261513b | import json
import os
from argparse import ArgumentTypeError
from eth_typing import Address
from web3.contract import Contract
from settings import MIN_VAL, MAX_VAL, DEPLOYED_CONTRACTS, CONFIG_DIR
async def init_simulation(contracts: [], factor: float, fn: str, status_init: bool) -> bool:
statuses = [True]
try:
if status_init:
for c in contracts:
# Use different cloud_addresses for each contract instance
cloud_address, cloud_status_ok = await c.cloud_sla_creation_activation()
c.set_cloud_sla_address(cloud_address)
statuses.append(cloud_status_ok)
if fn == 'read' or fn == 'read_deny_lost_file_check' or fn == 'file_check_undeleted_file':
statuses.append(await c.upload())
if fn == 'file_check_undeleted_file':
statuses.append(await c.read())
if fn == 'corrupted_file_check':
statuses.append(await c.another_file_upload_read())
if fn == 'delete':
for _ in range(round(factor / DEPLOYED_CONTRACTS) + 1):
statuses.append(await c.upload())
else:
for c in contracts:
if fn == 'delete':
if c.tx_upload_count < round(factor / DEPLOYED_CONTRACTS) + 1:
for _ in range(abs(c.tx_upload_count - (round(factor / DEPLOYED_CONTRACTS) + 1))):
statuses.append(await c.upload())
except ValueError as v:
print(f'{type(v)} [init_sim]: {v}')
else:
return check_statuses(statuses)
def get_credentials(blockchain: str) -> tuple:
if blockchain == 'polygon':
from settings import (
polygon_private_keys
)
return polygon_private_keys
from settings import (
quorum_private_keys
)
return quorum_private_keys
def get_contract(w3, address: Address, compiled_contract_path: str) -> Contract:
def get_abi(path: str) -> list:
with open(path) as file:
contract_json = json.load(file)
contract_abi = contract_json['abi']
return contract_abi
abi = get_abi(compiled_contract_path)
contract = w3.eth.contract(address=address, abi=abi)
return contract
def check_statuses(statuses: []) -> bool:
for idx in range(len(statuses)):
if statuses[idx] == 0:
return False
return True
def exists_mkdir(paths: []):
for path in paths:
if not os.path.exists(path):
os.mkdir(path)
def get_contracts_config(blockchain: str, msg: bool = True):
if msg:
print('Retrieve config file...')
filename = f'{blockchain}.json'
filepath = os.path.join(os.getcwd(), CONFIG_DIR, filename)
with open(filepath) as file:
contracts_summary = json.loads(file.read())
if msg:
print(f'Config file retrieved at {filepath}.')
return contracts_summary
def range_limited_val(arg: str) -> int:
"""
Type function for argparse - int within some predefined bounds.
"""
try:
s = int(arg)
except ValueError:
raise ArgumentTypeError("must be a int number")
if s < MIN_VAL or s > MAX_VAL:
raise ArgumentTypeError(f"argument must be > {str(MIN_VAL)} and < {str(MAX_VAL)}")
return s
| [((2789, 2800), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2798, 2800), False, 'import os\n'), ((2125, 2140), 'json.load', 'json.load', (['file'], {}), '(file)\n', (2134, 2140), False, 'import json\n'), ((2560, 2580), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2574, 2580), False, 'import os\n'), ((2594, 2608), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (2602, 2608), False, 'import os\n'), ((3198, 3239), 'argparse.ArgumentTypeError', 'ArgumentTypeError', (['"""must be a int number"""'], {}), "('must be a int number')\n", (3215, 3239), False, 'from argparse import ArgumentTypeError\n')] |
OpenASL/HowSignBot | migrations/versions/816ea3631582_add_topics.py | bd9c5bc0edfd6fb50bdce7c7c1d84462e1e704c2 | """add topics
Revision ID: 816ea3631582
Revises: 37a124b0099b
Create Date: 2021-03-13 14:20:10.044131
"""
from alembic import op
import sqlalchemy as sa
import bot
# revision identifiers, used by Alembic.
revision = "816ea3631582"
down_revision = "37a124b0099b"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"topics",
sa.Column("content", sa.Text(), nullable=False),
sa.Column("last_synced_at", bot.database.TIMESTAMP(timezone=True), nullable=True),
sa.PrimaryKeyConstraint("content"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("topics")
# ### end Alembic commands ###
| [((749, 772), 'alembic.op.drop_table', 'op.drop_table', (['"""topics"""'], {}), "('topics')\n", (762, 772), False, 'from alembic import op\n'), ((583, 617), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""content"""'], {}), "('content')\n", (606, 617), True, 'import sqlalchemy as sa\n'), ((456, 465), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (463, 465), True, 'import sqlalchemy as sa\n'), ((520, 557), 'bot.database.TIMESTAMP', 'bot.database.TIMESTAMP', ([], {'timezone': '(True)'}), '(timezone=True)\n', (542, 557), False, 'import bot\n')] |
NUS-ALSET/ace-react-redux-brython | src/Lib/importlib/__init__.py | d009490263c5716a145d9691cd59bfcd5aff837a | """A pure Python implementation of import."""
__all__ = ['__import__', 'import_module', 'invalidate_caches']
# Bootstrap help #####################################################
# Until bootstrapping is complete, DO NOT import any modules that attempt
# to import importlib._bootstrap (directly or indirectly). Since this
# partially initialised package would be present in sys.modules, those
# modules would get an uninitialised copy of the source version, instead
# of a fully initialised version (either the frozen one or the one
# initialised below if the frozen one is not available).
import _imp # Just the builtin component, NOT the full Python module
import sys
from . import machinery
from . import _bootstrap
_bootstrap._setup(sys, _imp)
# To simplify imports in test code
_w_long = _bootstrap._w_long
_r_long = _bootstrap._r_long
# Fully bootstrapped at this point, import whatever you like, circular
# dependencies and startup overhead minimisation permitting :)
# Public API #########################################################
from ._bootstrap import __import__
def invalidate_caches():
"""Call the invalidate_caches() method on all meta path finders stored in
sys.meta_path (where implemented)."""
for finder in sys.meta_path:
if hasattr(finder, 'invalidate_caches'):
finder.invalidate_caches()
def find_loader(name, path=None):
"""Find the loader for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__loader__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable loader with the
value of 'path' given to the finders. None is returned if no loader could
be found.
Dotted names do not have their parent packages implicitly imported. You will
most likely need to explicitly import all parent packages in the proper
order for a submodule to get the correct loader.
"""
try:
loader = sys.modules[name].__loader__
if loader is None:
raise ValueError('{}.__loader__ is None'.format(name))
else:
return loader
except KeyError:
pass
return _bootstrap._find_module(name, path)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
level = 0
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
for character in name:
if character != '.':
break
level += 1
return _bootstrap._gcd_import(name[level:], package, level)
#need at least one import hook for importlib stuff to work.
from . import basehook
sys.meta_path.append(basehook.BaseHook())
| [] |
swyddfa/stylo | lib/arlunio/arlunio/image.py | 4d6b348ce5812dc5c2554bfd21a1550375aa05e1 | from __future__ import annotations
import base64
import io
import logging
import pathlib
from typing import Optional
# TODO: Remove these, as they should be contained in the numpy backend.
import numpy as np
import PIL.Image as PImage
import arlunio.ast as ast
import arlunio.color as color
import arlunio.mask as mask
import arlunio.math as math
logger = logging.getLogger(__name__)
class Image:
"""Our representation of an image, implemented as a wrapper around a standard
Pillow image."""
def __init__(self, img: PImage.Image):
self.img = img
"""The wrapped pillow image object."""
def __eq__(self, other):
if not isinstance(other, Image):
return False
a = np.asarray(self.img)
b = np.asarray(other.img)
return (a == b).all()
def __add__(self, other):
if isinstance(other, Image):
other = other.img
if not isinstance(other, PImage.Image):
raise TypeError("Addition is only supported between images.")
img = self.copy()
img.alpha_composite(other)
return img
@property
def __array_interface__(self):
# Ensure that our version of an image also plays nice with numpy.
return self.img.__array_interface__
def _repr_png_(self):
# Give nice previews in jupyter notebooks
return self.img._repr_png_()
@property
def size(self):
return self.img.size
def alpha_composite(self, im, *args, **kwargs):
"""Composites an image onto this image.
See :meth:`pillow:PIL.Image.Image.alpha_composite`
"""
if isinstance(im, Image):
im = im.img
self.img.alpha_composite(im, *args, **kwargs)
def copy(self):
"""Return a copy of the image.
See :meth:`pillow:PIL.Image.Image.copy`
"""
return Image(self.img.copy())
def paste(self, *args, **kwargs):
"""Paste another image into this image.
See :meth:`pillow:PIL.Image.Image.paste`
"""
self.img.paste(*args, **kwargs)
def save(self, *args, **kwargs):
"""Save the image with the given filename.
See :meth:`pillow:PIL.Image.Image.save`
"""
self.img.save(*args, **kwargs)
def thumbnail(self, *args, **kwargs):
"""Convert this image into a thumbail.
See :meth:`pillow:PIL.Image.Image.thumbnail`
"""
self.img.thumbnail(*args, **kwargs)
def new(color) -> Image:
"""Creates a new image with the given background color."""
return ast.Node.builtin(name="image", color=color)
def fromarray(*args, **kwargs):
"""Create an image from an array
See :func:`pillow:PIL.Image.fromarray`
"""
return Image(PImage.fromarray(*args, **kwargs))
def load(*args, **kwargs) -> Image:
"""Load an image from the given file.
See :func:`pillow:PIL.Image.open`
"""
return Image(PImage.open(*args, **kwargs))
def save(image: Image, filename: str, mkdirs: bool = False) -> None:
"""Save an image in PNG format.
:param filename: The filepath to save the image to.
:param mkdirs: If true, make any parent directories
"""
path = pathlib.Path(filename)
if not path.parent.exists() and mkdirs:
path.parent.mkdir(parents=True)
with open(filename, "wb") as f:
image.save(f)
def encode(image: Image) -> bytes:
"""Return the image encoded as a base64 string.
Parameters
----------
image:
The image to encode.
Example
-------
::
>>> import arlunio.image as image
>>> img = image.new((8, 8), color='red')
>>> image.encode(img)
b'iVBORw0KGgoAAAANSUhEUgAAAAgAAAAICAYAAADED76LAAAAFklEQVR4nGP8z8DwnwEPYMInOXwUAAASWwIOH0pJXQAAAABJRU5ErkJggg=='
"""
with io.BytesIO() as byte_stream:
image.save(byte_stream, "PNG")
image_bytes = byte_stream.getvalue()
return base64.b64encode(image_bytes)
def decode(bytestring: bytes) -> Image:
"""Decode the image represented by the given bytestring into an image object.
Parameters
----------
bytestring:
The bytestring to decode.
Example
-------
.. arlunio-image:: Decode Example
:include-code:
::
import arlunio.image as image
bytestring = b'iVBORw0KGgoAAAANSUhEUgAAAAgAAAAICAYAAADED76LAAAAFklEQVR4nGP8z8DwnwEPYMInOXwUAAASWwIOH0pJXQAAAABJRU5ErkJggg==' # noqa: E501
img = image.decode(bytestring)
"""
data = base64.b64decode(bytestring)
bytes_ = io.BytesIO(data)
return Image(load(bytes_))
def colorramp(values, start: Optional[str] = None, stop: Optional[str] = None) -> Image:
"""Given a 2d array of values, produce an image gradient based on them.
.. arlunio-image:: Colorramp Demo
:align: right
::
import arlunio.image as image
import arlunio.math as math
import numpy as np
cartesian = math.Cartesian()
p = cartesian(width=256, height=256)
x, y = p[:, :, 0], p[:, :, 1]
values = np.sin(2*x*np.pi) * np.sin(2*y* np.pi)
img = image.colorramp(values)
First this function will scale the input array so that all values fall in the range
:math:`[0, 1]`. It will then produce an image with the same dimensions as the
original array. The color of each pixel will be chosen based on the corresponding
value of the scaled array.
- If the value is :math:`0` the color will be given by the :code:`start` parameter
- If the value is :math:`1` the color will be given by the :code:`stop` parameter
- Otherwise the color will be some mix between the two.
Parameters
----------
values:
The array of values used to decide on the color.
start:
The color to use for values near :math:`0` (default, :code:`black`)
stop:
The color to use for values near :math:`1` (default, :code:`white`)
Examples
--------
.. arlunio-image:: Colorramp Demo 2
:include-code:
::
import arlunio.image as image
import arlunio.math as math
import numpy as np
cartesian = math.Cartesian()
p = cartesian(width=256, height=256)
x = image.colorramp(p[:, :, 0], start="#0000", stop="#f007")
y = image.colorramp(p[:, :, 1], start="#0000", stop="#00f7")
img = x + y
"""
# Scale all the values so that they fall into the range [0, 1]
minx = np.min(values)
vs = np.array(values) - minx
vs = vs / np.max(vs)
if start is None:
start = "black"
if stop is None:
stop = "white"
start = color.getcolor(start, "RGBA")
stop = color.getcolor(stop, "RGBA")
funcs = [math.lerp(a, b) for a, b in zip(start, stop)]
channels = [np.floor(func(vs)) for func in funcs]
pixels = np.array(np.dstack(channels), dtype=np.uint8)
return fromarray(pixels)
def fill(
region,
foreground: Optional[str] = None,
background: Optional[str] = None,
image: Optional[Image] = None,
) -> Image:
"""Apply color to an image, as specified by a mask.
Parameters
----------
mask:
The mask that selects the region to be coloured
foreground:
A string representation of the color to use, this can be in any format that is
supported by the :mod:`pillow:PIL.ImageColor` module. If omitted this will
default to black.
background:
In the case where an existing image is not provided this parameter can be used
to set the background color of the generated image. This can be any string that
is accepted by the :mod:`pillow:PIL.ImageColor` module. If omitted this will
default to transparent
image:
The image to color in, if omitted a blank image will be used.
Example
--------
.. arlunio-image:: Fill Demo
:include-code:
::
import arlunio.image as image
import arlunio.shape as shape
circle = shape.Circle(x0=-0.5, y0=0.25, r=0.6)
img = image.fill(circle(width=512, height=256), foreground='red')
circle.x0, circle.y0 = 0, 0
img = image.fill(circle(width=512, height=256), foreground='#0f0', image=img)
circle.x0, circle.y0 = 0.5, -0.25
img = image.fill(circle(width=512, height=256), foreground='blue', image=img)
"""
foreground = "#000" if foreground is None else foreground
fill_color = color.getcolor(foreground, "RGBA")
if image is None:
background = "#0000" if background is None else background
image = new(color=background)
if not isinstance(region, ast.Node):
region = region()
return ast.Node.fill(image, region, fill_color)
| [((360, 387), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (377, 387), False, 'import logging\n'), ((2591, 2634), 'arlunio.ast.Node.builtin', 'ast.Node.builtin', ([], {'name': '"""image"""', 'color': 'color'}), "(name='image', color=color)\n", (2607, 2634), True, 'import arlunio.ast as ast\n'), ((3223, 3245), 'pathlib.Path', 'pathlib.Path', (['filename'], {}), '(filename)\n', (3235, 3245), False, 'import pathlib\n'), ((4550, 4578), 'base64.b64decode', 'base64.b64decode', (['bytestring'], {}), '(bytestring)\n', (4566, 4578), False, 'import base64\n'), ((4592, 4608), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (4602, 4608), False, 'import io\n'), ((6556, 6570), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (6562, 6570), True, 'import numpy as np\n'), ((6734, 6763), 'arlunio.color.getcolor', 'color.getcolor', (['start', '"""RGBA"""'], {}), "(start, 'RGBA')\n", (6748, 6763), True, 'import arlunio.color as color\n'), ((6775, 6803), 'arlunio.color.getcolor', 'color.getcolor', (['stop', '"""RGBA"""'], {}), "(stop, 'RGBA')\n", (6789, 6803), True, 'import arlunio.color as color\n'), ((8564, 8598), 'arlunio.color.getcolor', 'color.getcolor', (['foreground', '"""RGBA"""'], {}), "(foreground, 'RGBA')\n", (8578, 8598), True, 'import arlunio.color as color\n'), ((8807, 8847), 'arlunio.ast.Node.fill', 'ast.Node.fill', (['image', 'region', 'fill_color'], {}), '(image, region, fill_color)\n', (8820, 8847), True, 'import arlunio.ast as ast\n'), ((730, 750), 'numpy.asarray', 'np.asarray', (['self.img'], {}), '(self.img)\n', (740, 750), True, 'import numpy as np\n'), ((763, 784), 'numpy.asarray', 'np.asarray', (['other.img'], {}), '(other.img)\n', (773, 784), True, 'import numpy as np\n'), ((2775, 2808), 'PIL.Image.fromarray', 'PImage.fromarray', (['*args'], {}), '(*args, **kwargs)\n', (2791, 2808), True, 'import PIL.Image as PImage\n'), ((2954, 2982), 'PIL.Image.open', 'PImage.open', (['*args'], {}), '(*args, **kwargs)\n', (2965, 2982), True, 'import PIL.Image as PImage\n'), ((3838, 3850), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3848, 3850), False, 'import io\n'), ((3967, 3996), 'base64.b64encode', 'base64.b64encode', (['image_bytes'], {}), '(image_bytes)\n', (3983, 3996), False, 'import base64\n'), ((6580, 6596), 'numpy.array', 'np.array', (['values'], {}), '(values)\n', (6588, 6596), True, 'import numpy as np\n'), ((6618, 6628), 'numpy.max', 'np.max', (['vs'], {}), '(vs)\n', (6624, 6628), True, 'import numpy as np\n'), ((6818, 6833), 'arlunio.math.lerp', 'math.lerp', (['a', 'b'], {}), '(a, b)\n', (6827, 6833), True, 'import arlunio.math as math\n'), ((6941, 6960), 'numpy.dstack', 'np.dstack', (['channels'], {}), '(channels)\n', (6950, 6960), True, 'import numpy as np\n')] |
smarie/python-yamlable | yamlable/tests/test_yamlable.py | c726f5c56eea037968560ce83f9753bde1514991 | from copy import copy
try:
# Python 2 only:
from StringIO import StringIO
# create a variant that can serve as a context manager
class StringIO(StringIO):
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
self.close()
except ImportError:
from io import StringIO
try: # python 3.5+
from typing import Dict, Any
from yamlable import Y
except ImportError:
pass
import pytest
from yaml import dump, load
from yamlable import YamlAble, yaml_info
def test_yamlable_incomplete_description():
""" Tests that if __yaml_tag_suffix__ is not provided a YamlAble subclass cannot be declared """
with pytest.raises(NotImplementedError) as err_info:
class Foo(YamlAble):
# __yaml_tag_suffix__ = 'foo'
def __to_yaml_dict__(self):
# type: (...) -> Dict[str, Any]
return copy(vars(self))
@classmethod
def __from_yaml_dict__(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
return Foo(**dct)
# instantiate
f = Foo()
# dump
f.dumps_yaml()
assert "does not seem to have a non-None '__yaml_tag_suffix__' field" in str(err_info.value)
def test_yamlable():
""" Tests that YamlAble works correctly """
@yaml_info(yaml_tag_ns='yaml.tests')
class Foo(YamlAble):
# __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
def __to_yaml_dict__(self):
# type: (...) -> Dict[str, Any]
return copy(vars(self))
@classmethod
def __from_yaml_dict__(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
return Foo(**dct)
# instantiate
f = Foo(1, 'hello') # note:
# dump
y = f.dumps_yaml(default_flow_style=False)
assert y == """!yamlable/yaml.tests.Foo
a: 1
b: hello
"""
# dump io
class MemorizingStringIO(StringIO):
""" A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """
def close(self):
self.value = self.getvalue()
# super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one)
StringIO.close(self)
s = MemorizingStringIO()
f.dump_yaml(s, default_flow_style=False)
assert s.value == y
# dump pyyaml
assert dump(f, default_flow_style=False) == y
# load
assert f == Foo.loads_yaml(y)
# load io
assert f == Foo.load_yaml(StringIO(y))
# load pyyaml
assert f == load(y)
def test_yamlable_legacy_method_names():
""" Tests that YamlAbleMixIn works correctly """
global enc
global dec
enc, dec = False, False
@yaml_info(yaml_tag_ns='yaml.tests')
class FooLegacy(YamlAble):
# __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
def to_yaml_dict(self):
# type: (...) -> Dict[str, Any]
global enc
enc = True
return copy(vars(self))
@classmethod
def from_yaml_dict(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
global dec
dec = True
return FooLegacy(**dct)
# instantiate
f = FooLegacy(1, 'hello')
# dump
y = f.dumps_yaml(default_flow_style=False)
assert y == """!yamlable/yaml.tests.FooLegacy
a: 1
b: hello
"""
# dump io
class MemorizingStringIO(StringIO):
""" A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """
def close(self):
self.value = self.getvalue()
# super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one)
StringIO.close(self)
s = MemorizingStringIO()
f.dump_yaml(s, default_flow_style=False)
assert s.value == y
# dump pyyaml
assert dump(f, default_flow_style=False) == y
# load
assert f == FooLegacy.loads_yaml(y)
# load io
assert f == FooLegacy.load_yaml(StringIO(y))
# load pyyaml
assert f == load(y)
assert enc
assert dec
# TODO override so that tag is not supported, to check error message
def test_yamlable_not_supported():
@yaml_info(yaml_tag_ns='yaml.tests')
class Foo_Err(YamlAble):
# __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
def __to_yaml_dict__(self):
# type: (...) -> Dict[str, Any]
return copy(vars(self))
@classmethod
def __from_yaml_dict__(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
return Foo_Err(**dct)
@classmethod
def is_yaml_tag_supported(cls,
yaml_tag # type: str
):
# type: (...) -> bool
# ALWAYS return false
return False
with pytest.raises(TypeError) as err_info:
Foo_Err.loads_yaml("!yamlable/yaml.tests.Foo_Err {a: 1, b: hello}\n")
assert "No YamlAble subclass found able to decode object" in str(err_info.value)
def test_yamlable_default_impl():
""" tests that the default implementation works """
@yaml_info(yaml_tag_ns='yaml.tests')
class Foo_Default(YamlAble):
def __init__(self, a, b):
self.a = a
self.b = b
f = Foo_Default(1, 'hello')
s = """!yamlable/yaml.tests.Foo_Default
a: 1
b: hello
"""
assert dump(f, default_flow_style=False) == s
assert dump(load(dump(load(s))), default_flow_style=False) == s
def test_help_yaml_info():
@yaml_info("com.example.MyFoo")
class Foo(YamlAble):
pass
assert Foo.__yaml_tag_suffix__ == "com.example.MyFoo"
@yaml_info(yaml_tag_ns="com.example")
class Foo(YamlAble):
pass
assert Foo.__yaml_tag_suffix__ == "com.example.Foo"
assert Foo().dumps_yaml() == """!yamlable/com.example.Foo {}
"""
def test_abstract_parent_error():
"""This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it"""
class AbstractFooE(YamlAble):
pass
class FooError(AbstractFooE):
"""
This class inherits from the parent without redefining a yaml tag
"""
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
# instantiate
e = FooError(1, 'hello')
# dump
with pytest.raises(NotImplementedError):
e.dumps_yaml()
def test_abstract_parent():
"""This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it"""
class AbstractFooV(YamlAble):
pass
@yaml_info(yaml_tag_ns='yaml.tests')
class FooValid(AbstractFooV):
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
# instantiate
f = FooValid(1, 'hello') # note:
# dump
y = f.dumps_yaml(default_flow_style=False)
assert y == """!yamlable/yaml.tests.FooValid
a: 1
b: hello
"""
# dump io
class MemorizingStringIO(StringIO):
""" A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """
def close(self):
self.value = self.getvalue()
# super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one)
StringIO.close(self)
s = MemorizingStringIO()
f.dump_yaml(s, default_flow_style=False)
assert s.value == y
# dump pyyaml
assert dump(f, default_flow_style=False) == y
# load
assert f == FooValid.loads_yaml(y)
# load io
assert f == FooValid.load_yaml(StringIO(y))
# load pyyaml
assert f == load(y)
| [((1534, 1569), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (1543, 1569), False, 'from yamlable import YamlAble, yaml_info\n'), ((3261, 3296), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (3270, 3296), False, 'from yamlable import YamlAble, yaml_info\n'), ((5081, 5116), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (5090, 5116), False, 'from yamlable import YamlAble, yaml_info\n'), ((6338, 6373), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (6347, 6373), False, 'from yamlable import YamlAble, yaml_info\n'), ((6736, 6766), 'yamlable.yaml_info', 'yaml_info', (['"""com.example.MyFoo"""'], {}), "('com.example.MyFoo')\n", (6745, 6766), False, 'from yamlable import YamlAble, yaml_info\n'), ((6870, 6906), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""com.example"""'}), "(yaml_tag_ns='com.example')\n", (6879, 6906), False, 'from yamlable import YamlAble, yaml_info\n'), ((7876, 7911), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (7885, 7911), False, 'from yamlable import YamlAble, yaml_info\n'), ((721, 755), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (734, 755), False, 'import pytest\n'), ((2914, 2947), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (2918, 2947), False, 'from yaml import dump, load\n'), ((3092, 3099), 'yaml.load', 'load', (['y'], {}), '(y)\n', (3096, 3099), False, 'from yaml import dump, load\n'), ((4740, 4773), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (4744, 4773), False, 'from yaml import dump, load\n'), ((4930, 4937), 'yaml.load', 'load', (['y'], {}), '(y)\n', (4934, 4937), False, 'from yaml import dump, load\n'), ((6038, 6062), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (6051, 6062), False, 'import pytest\n'), ((6593, 6626), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (6597, 6626), False, 'from yaml import dump, load\n'), ((7625, 7659), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (7638, 7659), False, 'import pytest\n'), ((8805, 8838), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (8809, 8838), False, 'from yaml import dump, load\n'), ((8993, 9000), 'yaml.load', 'load', (['y'], {}), '(y)\n', (8997, 9000), False, 'from yaml import dump, load\n'), ((2764, 2784), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (2778, 2784), False, 'from io import StringIO\n'), ((3044, 3055), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (3052, 3055), False, 'from io import StringIO\n'), ((4590, 4610), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (4604, 4610), False, 'from io import StringIO\n'), ((4882, 4893), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (4890, 4893), False, 'from io import StringIO\n'), ((8655, 8675), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (8669, 8675), False, 'from io import StringIO\n'), ((8945, 8956), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (8953, 8956), False, 'from io import StringIO\n'), ((6659, 6666), 'yaml.load', 'load', (['s'], {}), '(s)\n', (6663, 6666), False, 'from yaml import dump, load\n')] |
vmario/twisted | src/twisted/web/server.py | 34f3d8f8c6f51772eaed92a89257ea011e9a818d | # -*- test-case-name: twisted.web.test.test_web -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This is a web server which integrates with the twisted.internet infrastructure.
@var NOT_DONE_YET: A token value which L{twisted.web.resource.IResource.render}
implementations can return to indicate that the application will later call
C{.write} and C{.finish} to complete the request, and that the HTTP
connection should be left open.
@type NOT_DONE_YET: Opaque; do not depend on any particular type for this
value.
"""
import copy
import os
import re
from html import escape
from typing import List, Optional
from urllib.parse import quote as _quote
import zlib
from binascii import hexlify
from zope.interface import implementer
from twisted.python.compat import networkString, nativeString
from twisted.spread.pb import Copyable, ViewPoint
from twisted.internet import address, interfaces
from twisted.internet.error import AlreadyCalled, AlreadyCancelled
from twisted.web import iweb, http, util
from twisted.web.http import unquote
from twisted.python import reflect, failure, components
from twisted import copyright
from twisted.web import resource
from twisted.web.error import UnsupportedMethod
from incremental import Version
from twisted.python.deprecate import deprecatedModuleAttribute
from twisted.logger import Logger
NOT_DONE_YET = 1
__all__ = [
"supportedMethods",
"Request",
"Session",
"Site",
"version",
"NOT_DONE_YET",
"GzipEncoderFactory",
]
# backwards compatibility
deprecatedModuleAttribute(
Version("Twisted", 12, 1, 0),
"Please use twisted.web.http.datetimeToString instead",
"twisted.web.server",
"date_time_string",
)
deprecatedModuleAttribute(
Version("Twisted", 12, 1, 0),
"Please use twisted.web.http.stringToDatetime instead",
"twisted.web.server",
"string_date_time",
)
date_time_string = http.datetimeToString
string_date_time = http.stringToDatetime
# Support for other methods may be implemented on a per-resource basis.
supportedMethods = (b"GET", b"HEAD", b"POST")
def quote(string, *args, **kwargs):
return _quote(string.decode("charmap"), *args, **kwargs).encode("charmap")
def _addressToTuple(addr):
if isinstance(addr, address.IPv4Address):
return ("INET", addr.host, addr.port)
elif isinstance(addr, address.UNIXAddress):
return ("UNIX", addr.name)
else:
return tuple(addr)
@implementer(iweb.IRequest)
class Request(Copyable, http.Request, components.Componentized):
"""
An HTTP request.
@ivar defaultContentType: A L{bytes} giving the default I{Content-Type}
value to send in responses if no other value is set. L{None} disables
the default.
@ivar _insecureSession: The L{Session} object representing state that will
be transmitted over plain-text HTTP.
@ivar _secureSession: The L{Session} object representing the state that
will be transmitted only over HTTPS.
"""
defaultContentType = b"text/html"
site = None
appRootURL = None
prepath: Optional[List[bytes]] = None
postpath: Optional[List[bytes]] = None
__pychecker__ = "unusednames=issuer"
_inFakeHead = False
_encoder = None
_log = Logger()
def __init__(self, *args, **kw):
http.Request.__init__(self, *args, **kw)
components.Componentized.__init__(self)
def getStateToCopyFor(self, issuer):
x = self.__dict__.copy()
del x["transport"]
# XXX refactor this attribute out; it's from protocol
# del x['server']
del x["channel"]
del x["content"]
del x["site"]
self.content.seek(0, 0)
x["content_data"] = self.content.read()
x["remote"] = ViewPoint(issuer, self)
# Address objects aren't jellyable
x["host"] = _addressToTuple(x["host"])
x["client"] = _addressToTuple(x["client"])
# Header objects also aren't jellyable.
x["requestHeaders"] = list(x["requestHeaders"].getAllRawHeaders())
return x
# HTML generation helpers
def sibLink(self, name):
"""
Return the text that links to a sibling of the requested resource.
@param name: The sibling resource
@type name: C{bytes}
@return: A relative URL.
@rtype: C{bytes}
"""
if self.postpath:
return (len(self.postpath) * b"../") + name
else:
return name
def childLink(self, name):
"""
Return the text that links to a child of the requested resource.
@param name: The child resource
@type name: C{bytes}
@return: A relative URL.
@rtype: C{bytes}
"""
lpp = len(self.postpath)
if lpp > 1:
return ((lpp - 1) * b"../") + name
elif lpp == 1:
return name
else: # lpp == 0
if len(self.prepath) and self.prepath[-1]:
return self.prepath[-1] + b"/" + name
else:
return name
def gotLength(self, length):
"""
Called when HTTP channel got length of content in this request.
This method is not intended for users.
@param length: The length of the request body, as indicated by the
request headers. L{None} if the request headers do not indicate a
length.
"""
try:
getContentFile = self.channel.site.getContentFile
except AttributeError:
http.Request.gotLength(self, length)
else:
self.content = getContentFile(length)
def process(self):
"""
Process a request.
Find the addressed resource in this request's L{Site},
and call L{self.render()<Request.render()>} with it.
@see: L{Site.getResourceFor()}
"""
# get site from channel
self.site = self.channel.site
# set various default headers
self.setHeader(b"server", version)
self.setHeader(b"date", http.datetimeToString())
# Resource Identification
self.prepath = []
self.postpath = list(map(unquote, self.path[1:].split(b"/")))
# Short-circuit for requests whose path is '*'.
if self.path == b"*":
self._handleStar()
return
try:
resrc = self.site.getResourceFor(self)
if resource._IEncodingResource.providedBy(resrc):
encoder = resrc.getEncoder(self)
if encoder is not None:
self._encoder = encoder
self.render(resrc)
except BaseException:
self.processingFailed(failure.Failure())
def write(self, data):
"""
Write data to the transport (if not responding to a HEAD request).
@param data: A string to write to the response.
@type data: L{bytes}
"""
if not self.startedWriting:
# Before doing the first write, check to see if a default
# Content-Type header should be supplied. We omit it on
# NOT_MODIFIED and NO_CONTENT responses. We also omit it if there
# is a Content-Length header set to 0, as empty bodies don't need
# a content-type.
needsCT = self.code not in (http.NOT_MODIFIED, http.NO_CONTENT)
contentType = self.responseHeaders.getRawHeaders(b"content-type")
contentLength = self.responseHeaders.getRawHeaders(b"content-length")
contentLengthZero = contentLength and (contentLength[0] == b"0")
if (
needsCT
and contentType is None
and self.defaultContentType is not None
and not contentLengthZero
):
self.responseHeaders.setRawHeaders(
b"content-type", [self.defaultContentType]
)
# Only let the write happen if we're not generating a HEAD response by
# faking out the request method. Note, if we are doing that,
# startedWriting will never be true, and the above logic may run
# multiple times. It will only actually change the responseHeaders
# once though, so it's still okay.
if not self._inFakeHead:
if self._encoder:
data = self._encoder.encode(data)
http.Request.write(self, data)
def finish(self):
"""
Override C{http.Request.finish} for possible encoding.
"""
if self._encoder:
data = self._encoder.finish()
if data:
http.Request.write(self, data)
return http.Request.finish(self)
def render(self, resrc):
"""
Ask a resource to render itself.
If the resource does not support the requested method,
generate a C{NOT IMPLEMENTED} or C{NOT ALLOWED} response.
@param resrc: The resource to render.
@type resrc: L{twisted.web.resource.IResource}
@see: L{IResource.render()<twisted.web.resource.IResource.render()>}
"""
try:
body = resrc.render(self)
except UnsupportedMethod as e:
allowedMethods = e.allowedMethods
if (self.method == b"HEAD") and (b"GET" in allowedMethods):
# We must support HEAD (RFC 2616, 5.1.1). If the
# resource doesn't, fake it by giving the resource
# a 'GET' request and then return only the headers,
# not the body.
self._log.info(
"Using GET to fake a HEAD request for {resrc}", resrc=resrc
)
self.method = b"GET"
self._inFakeHead = True
body = resrc.render(self)
if body is NOT_DONE_YET:
self._log.info(
"Tried to fake a HEAD request for {resrc}, but "
"it got away from me.",
resrc=resrc,
)
# Oh well, I guess we won't include the content length.
else:
self.setHeader(b"content-length", b"%d" % (len(body),))
self._inFakeHead = False
self.method = b"HEAD"
self.write(b"")
self.finish()
return
if self.method in (supportedMethods):
# We MUST include an Allow header
# (RFC 2616, 10.4.6 and 14.7)
self.setHeader(b"Allow", b", ".join(allowedMethods))
s = (
"""Your browser approached me (at %(URI)s) with"""
""" the method "%(method)s". I only allow"""
""" the method%(plural)s %(allowed)s here."""
% {
"URI": escape(nativeString(self.uri)),
"method": nativeString(self.method),
"plural": ((len(allowedMethods) > 1) and "s") or "",
"allowed": ", ".join([nativeString(x) for x in allowedMethods]),
}
)
epage = resource.ErrorPage(http.NOT_ALLOWED, "Method Not Allowed", s)
body = epage.render(self)
else:
epage = resource.ErrorPage(
http.NOT_IMPLEMENTED,
"Huh?",
"I don't know how to treat a %s request."
% (escape(self.method.decode("charmap")),),
)
body = epage.render(self)
# end except UnsupportedMethod
if body is NOT_DONE_YET:
return
if not isinstance(body, bytes):
body = resource.ErrorPage(
http.INTERNAL_SERVER_ERROR,
"Request did not return bytes",
"Request: "
+ util._PRE(reflect.safe_repr(self))
+ "<br />"
+ "Resource: "
+ util._PRE(reflect.safe_repr(resrc))
+ "<br />"
+ "Value: "
+ util._PRE(reflect.safe_repr(body)),
).render(self)
if self.method == b"HEAD":
if len(body) > 0:
# This is a Bad Thing (RFC 2616, 9.4)
self._log.info(
"Warning: HEAD request {slf} for resource {resrc} is"
" returning a message body. I think I'll eat it.",
slf=self,
resrc=resrc,
)
self.setHeader(b"content-length", b"%d" % (len(body),))
self.write(b"")
else:
self.setHeader(b"content-length", b"%d" % (len(body),))
self.write(body)
self.finish()
def processingFailed(self, reason):
"""
Finish this request with an indication that processing failed and
possibly display a traceback.
@param reason: Reason this request has failed.
@type reason: L{twisted.python.failure.Failure}
@return: The reason passed to this method.
@rtype: L{twisted.python.failure.Failure}
"""
self._log.failure("", failure=reason)
if self.site.displayTracebacks:
body = (
b"<html><head><title>web.Server Traceback"
b" (most recent call last)</title></head>"
b"<body><b>web.Server Traceback"
b" (most recent call last):</b>\n\n"
+ util.formatFailure(reason)
+ b"\n\n</body></html>\n"
)
else:
body = (
b"<html><head><title>Processing Failed"
b"</title></head><body>"
b"<b>Processing Failed</b></body></html>"
)
self.setResponseCode(http.INTERNAL_SERVER_ERROR)
self.setHeader(b"content-type", b"text/html")
self.setHeader(b"content-length", b"%d" % (len(body),))
self.write(body)
self.finish()
return reason
def view_write(self, issuer, data):
"""Remote version of write; same interface."""
self.write(data)
def view_finish(self, issuer):
"""Remote version of finish; same interface."""
self.finish()
def view_addCookie(self, issuer, k, v, **kwargs):
"""Remote version of addCookie; same interface."""
self.addCookie(k, v, **kwargs)
def view_setHeader(self, issuer, k, v):
"""Remote version of setHeader; same interface."""
self.setHeader(k, v)
def view_setLastModified(self, issuer, when):
"""Remote version of setLastModified; same interface."""
self.setLastModified(when)
def view_setETag(self, issuer, tag):
"""Remote version of setETag; same interface."""
self.setETag(tag)
def view_setResponseCode(self, issuer, code, message=None):
"""
Remote version of setResponseCode; same interface.
"""
self.setResponseCode(code, message)
def view_registerProducer(self, issuer, producer, streaming):
"""Remote version of registerProducer; same interface.
(requires a remote producer.)
"""
self.registerProducer(_RemoteProducerWrapper(producer), streaming)
def view_unregisterProducer(self, issuer):
self.unregisterProducer()
### these calls remain local
_secureSession = None
_insecureSession = None
@property
def session(self):
"""
If a session has already been created or looked up with
L{Request.getSession}, this will return that object. (This will always
be the session that matches the security of the request; so if
C{forceNotSecure} is used on a secure request, this will not return
that session.)
@return: the session attribute
@rtype: L{Session} or L{None}
"""
if self.isSecure():
return self._secureSession
else:
return self._insecureSession
def getSession(self, sessionInterface=None, forceNotSecure=False):
"""
Check if there is a session cookie, and if not, create it.
By default, the cookie with be secure for HTTPS requests and not secure
for HTTP requests. If for some reason you need access to the insecure
cookie from a secure request you can set C{forceNotSecure = True}.
@param forceNotSecure: Should we retrieve a session that will be
transmitted over HTTP, even if this L{Request} was delivered over
HTTPS?
@type forceNotSecure: L{bool}
"""
# Make sure we aren't creating a secure session on a non-secure page
secure = self.isSecure() and not forceNotSecure
if not secure:
cookieString = b"TWISTED_SESSION"
sessionAttribute = "_insecureSession"
else:
cookieString = b"TWISTED_SECURE_SESSION"
sessionAttribute = "_secureSession"
session = getattr(self, sessionAttribute)
if session is not None:
# We have a previously created session.
try:
# Refresh the session, to keep it alive.
session.touch()
except (AlreadyCalled, AlreadyCancelled):
# Session has already expired.
session = None
if session is None:
# No session was created yet for this request.
cookiename = b"_".join([cookieString] + self.sitepath)
sessionCookie = self.getCookie(cookiename)
if sessionCookie:
try:
session = self.site.getSession(sessionCookie)
except KeyError:
pass
# if it still hasn't been set, fix it up.
if not session:
session = self.site.makeSession()
self.addCookie(cookiename, session.uid, path=b"/", secure=secure)
setattr(self, sessionAttribute, session)
if sessionInterface:
return session.getComponent(sessionInterface)
return session
def _prePathURL(self, prepath):
port = self.getHost().port
if self.isSecure():
default = 443
else:
default = 80
if port == default:
hostport = ""
else:
hostport = ":%d" % port
prefix = networkString(
"http%s://%s%s/"
% (
self.isSecure() and "s" or "",
nativeString(self.getRequestHostname()),
hostport,
)
)
path = b"/".join([quote(segment, safe=b"") for segment in prepath])
return prefix + path
def prePathURL(self):
return self._prePathURL(self.prepath)
def URLPath(self):
from twisted.python import urlpath
return urlpath.URLPath.fromRequest(self)
def rememberRootURL(self):
"""
Remember the currently-processed part of the URL for later
recalling.
"""
url = self._prePathURL(self.prepath[:-1])
self.appRootURL = url
def getRootURL(self):
"""
Get a previously-remembered URL.
@return: An absolute URL.
@rtype: L{bytes}
"""
return self.appRootURL
def _handleStar(self):
"""
Handle receiving a request whose path is '*'.
RFC 7231 defines an OPTIONS * request as being something that a client
can send as a low-effort way to probe server capabilities or readiness.
Rather than bother the user with this, we simply fast-path it back to
an empty 200 OK. Any non-OPTIONS verb gets a 405 Method Not Allowed
telling the client they can only use OPTIONS.
"""
if self.method == b"OPTIONS":
self.setResponseCode(http.OK)
else:
self.setResponseCode(http.NOT_ALLOWED)
self.setHeader(b"Allow", b"OPTIONS")
# RFC 7231 says we MUST set content-length 0 when responding to this
# with no body.
self.setHeader(b"Content-Length", b"0")
self.finish()
@implementer(iweb._IRequestEncoderFactory)
class GzipEncoderFactory:
"""
@cvar compressLevel: The compression level used by the compressor, default
to 9 (highest).
@since: 12.3
"""
_gzipCheckRegex = re.compile(br"(:?^|[\s,])gzip(:?$|[\s,])")
compressLevel = 9
def encoderForRequest(self, request):
"""
Check the headers if the client accepts gzip encoding, and encodes the
request if so.
"""
acceptHeaders = b",".join(
request.requestHeaders.getRawHeaders(b"accept-encoding", [])
)
if self._gzipCheckRegex.search(acceptHeaders):
encoding = request.responseHeaders.getRawHeaders(b"content-encoding")
if encoding:
encoding = b",".join(encoding + [b"gzip"])
else:
encoding = b"gzip"
request.responseHeaders.setRawHeaders(b"content-encoding", [encoding])
return _GzipEncoder(self.compressLevel, request)
@implementer(iweb._IRequestEncoder)
class _GzipEncoder:
"""
An encoder which supports gzip.
@ivar _zlibCompressor: The zlib compressor instance used to compress the
stream.
@ivar _request: A reference to the originating request.
@since: 12.3
"""
_zlibCompressor = None
def __init__(self, compressLevel, request):
self._zlibCompressor = zlib.compressobj(
compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS
)
self._request = request
def encode(self, data):
"""
Write to the request, automatically compressing data on the fly.
"""
if not self._request.startedWriting:
# Remove the content-length header, we can't honor it
# because we compress on the fly.
self._request.responseHeaders.removeHeader(b"content-length")
return self._zlibCompressor.compress(data)
def finish(self):
"""
Finish handling the request request, flushing any data from the zlib
buffer.
"""
remain = self._zlibCompressor.flush()
self._zlibCompressor = None
return remain
class _RemoteProducerWrapper:
def __init__(self, remote):
self.resumeProducing = remote.remoteMethod("resumeProducing")
self.pauseProducing = remote.remoteMethod("pauseProducing")
self.stopProducing = remote.remoteMethod("stopProducing")
class Session(components.Componentized):
"""
A user's session with a system.
This utility class contains no functionality, but is used to
represent a session.
@ivar site: The L{Site} that generated the session.
@type site: L{Site}
@ivar uid: A unique identifier for the session.
@type uid: L{bytes}
@ivar _reactor: An object providing L{IReactorTime} to use for scheduling
expiration.
@ivar sessionTimeout: Time after last modification the session will expire,
in seconds.
@type sessionTimeout: L{float}
@ivar lastModified: Time the C{touch()} method was last called (or time the
session was created). A UNIX timestamp as returned by
L{IReactorTime.seconds()}.
@type lastModified: L{float}
"""
sessionTimeout = 900
_expireCall = None
def __init__(self, site, uid, reactor=None):
"""
Initialize a session with a unique ID for that session.
@param reactor: L{IReactorTime} used to schedule expiration of the
session. If C{None}, the reactor associated with I{site} is used.
"""
super().__init__()
if reactor is None:
reactor = site.reactor
self._reactor = reactor
self.site = site
self.uid = uid
self.expireCallbacks = []
self.touch()
self.sessionNamespaces = {}
def startCheckingExpiration(self):
"""
Start expiration tracking.
@return: L{None}
"""
self._expireCall = self._reactor.callLater(self.sessionTimeout, self.expire)
def notifyOnExpire(self, callback):
"""
Call this callback when the session expires or logs out.
"""
self.expireCallbacks.append(callback)
def expire(self):
"""
Expire/logout of the session.
"""
del self.site.sessions[self.uid]
for c in self.expireCallbacks:
c()
self.expireCallbacks = []
if self._expireCall and self._expireCall.active():
self._expireCall.cancel()
# Break reference cycle.
self._expireCall = None
def touch(self):
"""
Mark the session as modified, which resets expiration timer.
"""
self.lastModified = self._reactor.seconds()
if self._expireCall is not None:
self._expireCall.reset(self.sessionTimeout)
version = networkString(f"TwistedWeb/{copyright.version}")
@implementer(interfaces.IProtocolNegotiationFactory)
class Site(http.HTTPFactory):
"""
A web site: manage log, sessions, and resources.
@ivar requestFactory: A factory which is called with (channel)
and creates L{Request} instances. Default to L{Request}.
@ivar displayTracebacks: If set, unhandled exceptions raised during
rendering are returned to the client as HTML. Default to C{False}.
@ivar sessionFactory: factory for sessions objects. Default to L{Session}.
@ivar sessions: Mapping of session IDs to objects returned by
C{sessionFactory}.
@type sessions: L{dict} mapping L{bytes} to L{Session} given the default
C{sessionFactory}
@ivar counter: The number of sessions that have been generated.
@type counter: L{int}
@ivar sessionCheckTime: Deprecated and unused. See
L{Session.sessionTimeout} instead.
"""
counter = 0
requestFactory = Request
displayTracebacks = False
sessionFactory = Session
sessionCheckTime = 1800
_entropy = os.urandom
def __init__(self, resource, requestFactory=None, *args, **kwargs):
"""
@param resource: The root of the resource hierarchy. All request
traversal for requests received by this factory will begin at this
resource.
@type resource: L{IResource} provider
@param requestFactory: Overwrite for default requestFactory.
@type requestFactory: C{callable} or C{class}.
@see: L{twisted.web.http.HTTPFactory.__init__}
"""
super().__init__(*args, **kwargs)
self.sessions = {}
self.resource = resource
if requestFactory is not None:
self.requestFactory = requestFactory
def _openLogFile(self, path):
from twisted.python import logfile
return logfile.LogFile(os.path.basename(path), os.path.dirname(path))
def __getstate__(self):
d = self.__dict__.copy()
d["sessions"] = {}
return d
def _mkuid(self):
"""
(internal) Generate an opaque, unique ID for a user's session.
"""
self.counter = self.counter + 1
return hexlify(self._entropy(32))
def makeSession(self):
"""
Generate a new Session instance, and store it for future reference.
"""
uid = self._mkuid()
session = self.sessions[uid] = self.sessionFactory(self, uid)
session.startCheckingExpiration()
return session
def getSession(self, uid):
"""
Get a previously generated session.
@param uid: Unique ID of the session.
@type uid: L{bytes}.
@raise KeyError: If the session is not found.
"""
return self.sessions[uid]
def buildProtocol(self, addr):
"""
Generate a channel attached to this site.
"""
channel = super().buildProtocol(addr)
channel.requestFactory = self.requestFactory
channel.site = self
return channel
isLeaf = 0
def render(self, request):
"""
Redirect because a Site is always a directory.
"""
request.redirect(request.prePathURL() + b"/")
request.finish()
def getChildWithDefault(self, pathEl, request):
"""
Emulate a resource's getChild method.
"""
request.site = self
return self.resource.getChildWithDefault(pathEl, request)
def getResourceFor(self, request):
"""
Get a resource for a request.
This iterates through the resource hierarchy, calling
getChildWithDefault on each resource it finds for a path element,
stopping when it hits an element where isLeaf is true.
"""
request.site = self
# Sitepath is used to determine cookie names between distributed
# servers and disconnected sites.
request.sitepath = copy.copy(request.prepath)
return resource.getChildForRequest(self.resource, request)
# IProtocolNegotiationFactory
def acceptableProtocols(self):
"""
Protocols this server can speak.
"""
baseProtocols = [b"http/1.1"]
if http.H2_ENABLED:
baseProtocols.insert(0, b"h2")
return baseProtocols
| [((2482, 2508), 'zope.interface.implementer', 'implementer', (['iweb.IRequest'], {}), '(iweb.IRequest)\n', (2493, 2508), False, 'from zope.interface import implementer\n'), ((20318, 20359), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoderFactory'], {}), '(iweb._IRequestEncoderFactory)\n', (20329, 20359), False, 'from zope.interface import implementer\n'), ((21320, 21354), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoder'], {}), '(iweb._IRequestEncoder)\n', (21331, 21354), False, 'from zope.interface import implementer\n'), ((25192, 25240), 'twisted.python.compat.networkString', 'networkString', (['f"""TwistedWeb/{copyright.version}"""'], {}), "(f'TwistedWeb/{copyright.version}')\n", (25205, 25240), False, 'from twisted.python.compat import networkString, nativeString\n'), ((25244, 25295), 'zope.interface.implementer', 'implementer', (['interfaces.IProtocolNegotiationFactory'], {}), '(interfaces.IProtocolNegotiationFactory)\n', (25255, 25295), False, 'from zope.interface import implementer\n'), ((1605, 1633), 'incremental.Version', 'Version', (['"""Twisted"""', '(12)', '(1)', '(0)'], {}), "('Twisted', 12, 1, 0)\n", (1612, 1633), False, 'from incremental import Version\n'), ((1778, 1806), 'incremental.Version', 'Version', (['"""Twisted"""', '(12)', '(1)', '(0)'], {}), "('Twisted', 12, 1, 0)\n", (1785, 1806), False, 'from incremental import Version\n'), ((3294, 3302), 'twisted.logger.Logger', 'Logger', ([], {}), '()\n', (3300, 3302), False, 'from twisted.logger import Logger\n'), ((20546, 20589), 're.compile', 're.compile', (["b'(:?^|[\\\\s,])gzip(:?$|[\\\\s,])'"], {}), "(b'(:?^|[\\\\s,])gzip(:?$|[\\\\s,])')\n", (20556, 20589), False, 'import re\n'), ((3349, 3389), 'twisted.web.http.Request.__init__', 'http.Request.__init__', (['self', '*args'], {}), '(self, *args, **kw)\n', (3370, 3389), False, 'from twisted.web import iweb, http, util\n'), ((3398, 3437), 'twisted.python.components.Componentized.__init__', 'components.Componentized.__init__', (['self'], {}), '(self)\n', (3431, 3437), False, 'from twisted.python import reflect, failure, components\n'), ((3802, 3825), 'twisted.spread.pb.ViewPoint', 'ViewPoint', (['issuer', 'self'], {}), '(issuer, self)\n', (3811, 3825), False, 'from twisted.spread.pb import Copyable, ViewPoint\n'), ((8744, 8769), 'twisted.web.http.Request.finish', 'http.Request.finish', (['self'], {}), '(self)\n', (8763, 8769), False, 'from twisted.web import iweb, http, util\n'), ((19036, 19069), 'twisted.python.urlpath.URLPath.fromRequest', 'urlpath.URLPath.fromRequest', (['self'], {}), '(self)\n', (19063, 19069), False, 'from twisted.python import urlpath\n'), ((21708, 21775), 'zlib.compressobj', 'zlib.compressobj', (['compressLevel', 'zlib.DEFLATED', '(16 + zlib.MAX_WBITS)'], {}), '(compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS)\n', (21724, 21775), False, 'import zlib\n'), ((29168, 29194), 'copy.copy', 'copy.copy', (['request.prepath'], {}), '(request.prepath)\n', (29177, 29194), False, 'import copy\n'), ((29210, 29261), 'twisted.web.resource.getChildForRequest', 'resource.getChildForRequest', (['self.resource', 'request'], {}), '(self.resource, request)\n', (29237, 29261), False, 'from twisted.web import resource\n'), ((6104, 6127), 'twisted.web.http.datetimeToString', 'http.datetimeToString', ([], {}), '()\n', (6125, 6127), False, 'from twisted.web import iweb, http, util\n'), ((6477, 6522), 'twisted.web.resource._IEncodingResource.providedBy', 'resource._IEncodingResource.providedBy', (['resrc'], {}), '(resrc)\n', (6515, 6522), False, 'from twisted.web import resource\n'), ((8452, 8482), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\n', (8470, 8482), False, 'from twisted.web import iweb, http, util\n'), ((27104, 27126), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (27120, 27126), False, 'import os\n'), ((27128, 27149), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (27143, 27149), False, 'import os\n'), ((5578, 5614), 'twisted.web.http.Request.gotLength', 'http.Request.gotLength', (['self', 'length'], {}), '(self, length)\n', (5600, 5614), False, 'from twisted.web import iweb, http, util\n'), ((8698, 8728), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\n', (8716, 8728), False, 'from twisted.web import iweb, http, util\n'), ((6752, 6769), 'twisted.python.failure.Failure', 'failure.Failure', ([], {}), '()\n', (6767, 6769), False, 'from twisted.python import reflect, failure, components\n'), ((11281, 11342), 'twisted.web.resource.ErrorPage', 'resource.ErrorPage', (['http.NOT_ALLOWED', '"""Method Not Allowed"""', 's'], {}), "(http.NOT_ALLOWED, 'Method Not Allowed', s)\n", (11299, 11342), False, 'from twisted.web import resource\n'), ((13642, 13668), 'twisted.web.util.formatFailure', 'util.formatFailure', (['reason'], {}), '(reason)\n', (13660, 13668), False, 'from twisted.web import iweb, http, util\n'), ((11024, 11049), 'twisted.python.compat.nativeString', 'nativeString', (['self.method'], {}), '(self.method)\n', (11036, 11049), False, 'from twisted.python.compat import networkString, nativeString\n'), ((10965, 10987), 'twisted.python.compat.nativeString', 'nativeString', (['self.uri'], {}), '(self.uri)\n', (10977, 10987), False, 'from twisted.python.compat import networkString, nativeString\n'), ((12242, 12265), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['body'], {}), '(body)\n', (12259, 12265), False, 'from twisted.python import reflect, failure, components\n'), ((11174, 11189), 'twisted.python.compat.nativeString', 'nativeString', (['x'], {}), '(x)\n', (11186, 11189), False, 'from twisted.python.compat import networkString, nativeString\n'), ((12133, 12157), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['resrc'], {}), '(resrc)\n', (12150, 12157), False, 'from twisted.python import reflect, failure, components\n'), ((12022, 12045), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['self'], {}), '(self)\n', (12039, 12045), False, 'from twisted.python import reflect, failure, components\n')] |
jaschadub/pycoin | pycoin/symbols/doge.py | 1e8d0d9fe20ce0347b97847bb529cd1bd84c7442 | from pycoin.networks.bitcoinish import create_bitcoinish_network
network = create_bitcoinish_network(
symbol="DOGE", network_name="Dogecoin", subnet_name="mainnet",
wif_prefix_hex="9e", address_prefix_hex="1e", pay_to_script_prefix_hex="16",
bip32_prv_prefix_hex="02fd3955", bip32_pub_prefix_hex="02fd3929")
| [((76, 319), 'pycoin.networks.bitcoinish.create_bitcoinish_network', 'create_bitcoinish_network', ([], {'symbol': '"""DOGE"""', 'network_name': '"""Dogecoin"""', 'subnet_name': '"""mainnet"""', 'wif_prefix_hex': '"""9e"""', 'address_prefix_hex': '"""1e"""', 'pay_to_script_prefix_hex': '"""16"""', 'bip32_prv_prefix_hex': '"""02fd3955"""', 'bip32_pub_prefix_hex': '"""02fd3929"""'}), "(symbol='DOGE', network_name='Dogecoin',\n subnet_name='mainnet', wif_prefix_hex='9e', address_prefix_hex='1e',\n pay_to_script_prefix_hex='16', bip32_prv_prefix_hex='02fd3955',\n bip32_pub_prefix_hex='02fd3929')\n", (101, 319), False, 'from pycoin.networks.bitcoinish import create_bitcoinish_network\n')] |
MarkHershey/python-learning | Pset/hamming_numbers.py | 8d6c87941af6db5878b59483526ed402f4b319b3 | def hamming(n):
"""Returns the nth hamming number"""
hamming = {1}
x = 1
while len(hamming) <= n * 3.5:
new_hamming = {1}
for i in hamming:
new_hamming.add(i * 2)
new_hamming.add(i * 3)
new_hamming.add(i * 5)
# merge new number into hamming set
hamming = hamming.union(new_hamming)
hamming = sorted(list(hamming))
return hamming[n - 1]
print(hamming(970))
# hamming(968) should be 41943040
# hamming(969) should be 41990400
# hamming(970) should be 42187500
| [] |
needlehaystack/needlestack | examples/run_merger.py | e00529a2a7c2d85059936a85f54dfb55e515b6ef | import logging
from grpc_health.v1 import health_pb2, health_pb2_grpc
from grpc_health.v1.health import HealthServicer
from needlestack.apis import servicers_pb2_grpc
from needlestack.servicers import factory
from needlestack.servicers.merger import MergerServicer
from examples import configs
logging.getLogger("kazoo").setLevel("WARN")
def main():
config = configs.LocalDockerConfig()
server = factory.create_server(config)
manager = factory.create_zookeeper_cluster_manager(config)
manager.startup()
servicers_pb2_grpc.add_MergerServicer_to_server(MergerServicer(config, manager), server)
health = HealthServicer()
health_pb2_grpc.add_HealthServicer_to_server(health, server)
health.set("Merger", health_pb2.HealthCheckResponse.SERVING)
factory.serve(server)
if __name__ == "__main__":
main()
| [((369, 396), 'examples.configs.LocalDockerConfig', 'configs.LocalDockerConfig', ([], {}), '()\n', (394, 396), False, 'from examples import configs\n'), ((411, 440), 'needlestack.servicers.factory.create_server', 'factory.create_server', (['config'], {}), '(config)\n', (432, 440), False, 'from needlestack.servicers import factory\n'), ((455, 503), 'needlestack.servicers.factory.create_zookeeper_cluster_manager', 'factory.create_zookeeper_cluster_manager', (['config'], {}), '(config)\n', (495, 503), False, 'from needlestack.servicers import factory\n'), ((634, 650), 'grpc_health.v1.health.HealthServicer', 'HealthServicer', ([], {}), '()\n', (648, 650), False, 'from grpc_health.v1.health import HealthServicer\n'), ((655, 715), 'grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server', 'health_pb2_grpc.add_HealthServicer_to_server', (['health', 'server'], {}), '(health, server)\n', (699, 715), False, 'from grpc_health.v1 import health_pb2, health_pb2_grpc\n'), ((786, 807), 'needlestack.servicers.factory.serve', 'factory.serve', (['server'], {}), '(server)\n', (799, 807), False, 'from needlestack.servicers import factory\n'), ((298, 324), 'logging.getLogger', 'logging.getLogger', (['"""kazoo"""'], {}), "('kazoo')\n", (315, 324), False, 'import logging\n'), ((579, 610), 'needlestack.servicers.merger.MergerServicer', 'MergerServicer', (['config', 'manager'], {}), '(config, manager)\n', (593, 610), False, 'from needlestack.servicers.merger import MergerServicer\n')] |
lidevelopers/Lishogi-Bot-1 | engine_wrapper.py | 5e669870930fe497e323324f36ccdbf5b04d26d3 | import os
import shogi
import backoff
import subprocess
from util import *
import logging
logger = logging.getLogger(__name__)
import engine_ctrl
@backoff.on_exception(backoff.expo, BaseException, max_time=120)
def create_engine(config, board):
cfg = config["engine"]
engine_path = os.path.realpath(os.path.join(cfg["dir"], cfg["name"]))
engine_type = cfg.get("protocol")
engine_options = cfg.get("engine_options")
commands = [engine_path]
if engine_options:
for k, v in engine_options.items():
commands.append("--{}={}".format(k, v))
silence_stderr = cfg.get("silence_stderr", False)
return USIEngine(board, commands, cfg.get("usi_options", {}), cfg.get("go_commands", {}), silence_stderr)
class EngineWrapper:
def __init__(self, board, commands, options=None, silence_stderr=False):
pass
def search_for(self, board, movetime):
pass
def first_search(self, board, movetime):
pass
def search(self, game, board, btime, wtime, binc, winc):
pass
def print_stats(self):
pass
def get_opponent_info(self, game):
pass
def name(self):
return self.engine.name
def report_game_result(self, game, board):
pass
def quit(self):
self.engine.kill_process()
def print_handler_stats(self):
pass
def get_handler_stats(self):
pass
class USIEngine(EngineWrapper):
def __init__(self, board, commands, options, go_commands={}, silence_stderr=False):
commands = commands[0] if len(commands) == 1 else commands
self.go_commands = go_commands
self.engine = engine_ctrl.Engine(commands)
self.engine.usi()
if options:
for name, value in options.items():
self.engine.setoption(name, value)
self.engine.isready()
def first_search(self, board, movetime):
best_move, _ = self.engine.go(board.sfen(), "", movetime=movetime)
return best_move
def search_with_ponder(self, game, board, btime, wtime, binc, winc, byo, ponder=False):
moves = [m.usi() for m in list(board.move_stack)]
cmds = self.go_commands
if len(cmds) > 0:
best_move, ponder_move = self.engine.go(
game.initial_fen,
moves,
nodes=cmds.get("nodes"),
depth=cmds.get("depth"),
movetime=cmds.get("movetime"),
ponder=ponder
)
else:
best_move, ponder_move = self.engine.go(
game.initial_fen,
moves,
btime=btime,
wtime=wtime,
binc=binc,
winc=winc,
byo=byo,
ponder=ponder
)
return (best_move, ponder_move)
def search(self, game, board, btime, wtime, binc, winc):
cmds = self.go_commands
moves = [m.usi() for m in list(board.move_stack)]
best_move, _ = self.engine.go(
game.initial_fen,
moves,
btime=btime,
wtime=wtime,
binc=binc,
winc=winc,
depth=cmds.get("depth"),
nodes=cmds.get("nodes"),
movetime=cmds.get("movetime")
)
return best_move
def stop(self):
self.engine.kill_process()
def print_stats(self, stats=None):
if stats is None:
stats = ['score', 'depth', 'nodes', 'nps']
info = self.engine.info
for stat in stats:
if stat in info:
logger.info("{}: {}".format(stat, info[stat]))
def get_stats(self, stats=None):
if stats is None:
stats = ['score', 'depth', 'nodes', 'nps']
info = self.engine.info
stats_str = []
for stat in stats:
if stat in info:
stats_str.append("{}: {}".format(stat, info[stat]))
return stats_str
def get_opponent_info(self, game):
name = game.opponent.name
if name:
rating = game.opponent.rating if game.opponent.rating is not None else "none"
title = game.opponent.title if game.opponent.title else "none"
player_type = "computer" if title == "BOT" else "human"
def report_game_result(self, game, board):
self.engine.protocol._position(board)
| [((100, 127), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (117, 127), False, 'import logging\n'), ((151, 214), 'backoff.on_exception', 'backoff.on_exception', (['backoff.expo', 'BaseException'], {'max_time': '(120)'}), '(backoff.expo, BaseException, max_time=120)\n', (171, 214), False, 'import backoff\n'), ((311, 348), 'os.path.join', 'os.path.join', (["cfg['dir']", "cfg['name']"], {}), "(cfg['dir'], cfg['name'])\n", (323, 348), False, 'import os\n'), ((1672, 1700), 'engine_ctrl.Engine', 'engine_ctrl.Engine', (['commands'], {}), '(commands)\n', (1690, 1700), False, 'import engine_ctrl\n')] |
sloriot/cgal-swig-bindings | examples/python/test_as2.py | c9c5afdf64fa0c52f9c3785173159167ab2b3163 | from CGAL.CGAL_Kernel import Point_2
from CGAL.CGAL_Kernel import Weighted_point_2
from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2
from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2
from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2_Face_handle
from CGAL.CGAL_Alpha_shape_2 import GENERAL, EXTERIOR, SINGULAR, REGULAR, INTERIOR
from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Vertex_handle
from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Face_handle
from CGAL.CGAL_Alpha_shape_2 import Face_Interval_3
lst = []
lst.append(Point_2(0, 0))
lst.append(Point_2(0, 4))
lst.append(Point_2(44, 0))
lst.append(Point_2(44, 5))
lst.append(Point_2(444, 51))
lst.append(Point_2(14, 1))
t = Alpha_shape_2(lst, 0, GENERAL)
t2 = Alpha_shape_2(lst, 0)
t.clear()
t.make_alpha_shape(lst)
for d in t.alpha():
print(d)
for v in t.finite_vertices():
type = t.classify(v)
print(v.get_range()[0])
if type == INTERIOR:
print("INTERIOR")
elif type == SINGULAR:
print("SINGULAR")
elif type == REGULAR:
print("REGULAR")
elif type == EXTERIOR:
print("EXTERIOR")
for f in t.finite_faces():
i = f.get_ranges(0)
print(i.first)
print(i.second)
print(i.third)
was = Weighted_alpha_shape_2()
lst_wp = []
lst_wp.append(Weighted_point_2(Point_2(0, 0), 1))
lst_wp.append(Weighted_point_2(Point_2(0, 4), 1))
lst_wp.append(Weighted_point_2(Point_2(44, 0), 1))
lst_wp.append(Weighted_point_2(Point_2(44, 5), 1))
lst_wp.append(Weighted_point_2(Point_2(444, 51), 1))
lst_wp.append(Weighted_point_2(Point_2(14, 1), 1))
was.make_alpha_shape(lst_wp)
| [((702, 732), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)', 'GENERAL'], {}), '(lst, 0, GENERAL)\n', (715, 732), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\n'), ((738, 759), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)'], {}), '(lst, 0)\n', (751, 759), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\n'), ((1242, 1266), 'CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2', 'Weighted_alpha_shape_2', ([], {}), '()\n', (1264, 1266), False, 'from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2\n'), ((546, 559), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\n', (553, 559), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((572, 585), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\n', (579, 585), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((598, 612), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\n', (605, 612), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((625, 639), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\n', (632, 639), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((652, 668), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\n', (659, 668), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((681, 695), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\n', (688, 695), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1310, 1323), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\n', (1317, 1323), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1360, 1373), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\n', (1367, 1373), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1410, 1424), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\n', (1417, 1424), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1461, 1475), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\n', (1468, 1475), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1512, 1528), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\n', (1519, 1528), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1565, 1579), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\n', (1572, 1579), False, 'from CGAL.CGAL_Kernel import Point_2\n')] |
pavithra-mahamani/TAF | connections/mode.py | ff854adcc6ca3e50d9dc64e7756ca690251128d3 | '''
Created on Jan 18, 2018
@author: riteshagarwal
'''
java = False
rest = False
cli = False | [] |
encela95dus/ios_pythonista_examples | scene_action2.py | e136cdcb05126f0f9b9f6fb6365870876b419619 | import scene
class MyScene(scene.Scene):
def setup(self):
self.label_node = scene.LabelNode('A',
position=(100,400), parent=self)
self.start_flag = False
def update(self):
if self.start_flag:
x,y = self.label_node.position
if x < 340:
self.label_node.position = (x+2, y)
else:
self.start_flag = False
def touch_ended(self, touch):
self.start_flag = True
scene.run(MyScene())
| [((91, 145), 'scene.LabelNode', 'scene.LabelNode', (['"""A"""'], {'position': '(100, 400)', 'parent': 'self'}), "('A', position=(100, 400), parent=self)\n", (106, 145), False, 'import scene\n')] |
aq1/vkPostman | bot/commands/disconnect.py | db6b8d387d484ff53d12dcaf77ba3dcaa6da3822 | from bot.commands import BaseCommand
import mongo
class DisconnectCommand(BaseCommand):
_COMMAND = 'disconnect'
_DESCRIPTION = 'Close currently active chat.'
_SUCCESS_MESSAGE = 'Disconnected from chat'
def _callback(self, user, _bot, update, **kwargs):
return self._call(user, _bot, update, **kwargs)
def _call(self, user, _bot, update, **kwargs):
chat = mongo.chats.get_active_chat_by_telegram_id(user.id)
if chat:
mongo.chats.disable_chat(chat['_id'])
return True
_bot.send_message(
user.id,
'You are not connected to any vk user',
)
return False
| [((397, 448), 'mongo.chats.get_active_chat_by_telegram_id', 'mongo.chats.get_active_chat_by_telegram_id', (['user.id'], {}), '(user.id)\n', (439, 448), False, 'import mongo\n'), ((478, 515), 'mongo.chats.disable_chat', 'mongo.chats.disable_chat', (["chat['_id']"], {}), "(chat['_id'])\n", (502, 515), False, 'import mongo\n')] |
JordanKoeller/Pysch | pysh/bash_vm/shell_command.py | 6775db00e6d551328ce49a50a5987223a9e9a9c3 | from __future__ import annotations
import subprocess
import os
from typing import List, Dict, Iterator, Optional, Tuple
class ShellCommand:
def __init__(self, cmd: str):
self.run_args = [
"bash", "-c", f'{cmd}'
]
# self.run_args: List[str] = [executable, *args]
def exec(self, **extra_environ: str) -> ShellCommandOutput:
result = subprocess.run(self.run_args,
stdout=subprocess.PIPE,
env={
**os.environ,
**(extra_environ if extra_environ else {})
}
)
print("Finished shell command")
return ShellCommandOutput(str(result.stdout, 'utf-8'), result.returncode)
class ShellCommandOutput:
def __init__(self, output_body: str, code: int):
self._code = code
self._value = output_body
@property
def succeeded(self) -> bool:
return self._code == 0
@property
def code(self) -> int:
return self._code
@property
def value(self) -> str:
return self._value
def lines(self) -> List[ShellCommandOutput]:
return [
ShellCommandOutput(substr, self.code)
for substr in self.value.splitlines()
if substr
]
def __iter__(self) -> Iterator[str]:
return iter(self._split_tokens())
def __str__(self) -> str:
return f'<STDOUT value={self.value} code={self.code} >'
def _split_tokens(self) -> List[str]:
ret = []
in_quotes = None
accumulator: List[str] = []
for char in self.value:
if _whitespace(char) and not in_quotes and accumulator:
ret.append(''.join(accumulator))
accumulator = []
elif in_quotes == None and _quotes(char):
in_quotes = char
elif in_quotes and in_quotes == char:
in_quotes = None
if accumulator:
ret.append(''.join(accumulator))
accumulator = []
elif in_quotes and _quotes(char):
raise ValueError(
f"Found unmatched quote characters in string {self.value}")
else:
accumulator.append(char)
return ret
def _quotes(c: str) -> bool:
return c in ['"', "'"]
def _whitespace(c: str) -> bool:
return str.isspace(c)
| [((390, 510), 'subprocess.run', 'subprocess.run', (['self.run_args'], {'stdout': 'subprocess.PIPE', 'env': '{**os.environ, **extra_environ if extra_environ else {}}'}), '(self.run_args, stdout=subprocess.PIPE, env={**os.environ, **\n extra_environ if extra_environ else {}})\n', (404, 510), False, 'import subprocess\n')] |
jgrigera/indico | indico/web/forms/fields/protection.py | b5538f2755bc38a02313d079bac831ee3dfb44ab | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import absolute_import, unicode_literals
from flask import render_template
from markupsafe import Markup
from indico.core.db import db
from indico.core.db.sqlalchemy.protection import ProtectionMode
from indico.util.i18n import _
from indico.web.forms.fields import IndicoEnumRadioField
from indico.web.forms.widgets import JinjaWidget
class IndicoProtectionField(IndicoEnumRadioField):
widget = JinjaWidget('forms/protection_widget.html', single_kwargs=True)
radio_widget = JinjaWidget('forms/radio_buttons_widget.html', orientation='horizontal', single_kwargs=True)
def __init__(self, *args, **kwargs):
self.protected_object = kwargs.pop('protected_object')(kwargs['_form'])
get_acl_message_url = kwargs.pop('acl_message_url', None)
self.acl_message_url = get_acl_message_url(kwargs['_form']) if get_acl_message_url else None
self.can_inherit_protection = self.protected_object.protection_parent is not None
if not self.can_inherit_protection:
kwargs['skip'] = {ProtectionMode.inheriting}
super(IndicoProtectionField, self).__init__(*args, enum=ProtectionMode, **kwargs)
def render_protection_message(self):
protected_object = self.get_form().protected_object
if hasattr(protected_object, 'get_non_inheriting_objects'):
non_inheriting_objects = protected_object.get_non_inheriting_objects()
else:
non_inheriting_objects = []
if isinstance(protected_object.protection_parent, db.m.Event):
parent_type = _('Event')
elif isinstance(protected_object.protection_parent, db.m.Category):
parent_type = _('Category')
else:
parent_type = _('Session')
rv = render_template('_protection_info.html', field=self, protected_object=protected_object,
parent_type=parent_type, non_inheriting_objects=non_inheriting_objects)
return Markup(rv)
| [((634, 697), 'indico.web.forms.widgets.JinjaWidget', 'JinjaWidget', (['"""forms/protection_widget.html"""'], {'single_kwargs': '(True)'}), "('forms/protection_widget.html', single_kwargs=True)\n", (645, 697), False, 'from indico.web.forms.widgets import JinjaWidget\n'), ((717, 813), 'indico.web.forms.widgets.JinjaWidget', 'JinjaWidget', (['"""forms/radio_buttons_widget.html"""'], {'orientation': '"""horizontal"""', 'single_kwargs': '(True)'}), "('forms/radio_buttons_widget.html', orientation='horizontal',\n single_kwargs=True)\n", (728, 813), False, 'from indico.web.forms.widgets import JinjaWidget\n'), ((1977, 2146), 'flask.render_template', 'render_template', (['"""_protection_info.html"""'], {'field': 'self', 'protected_object': 'protected_object', 'parent_type': 'parent_type', 'non_inheriting_objects': 'non_inheriting_objects'}), "('_protection_info.html', field=self, protected_object=\n protected_object, parent_type=parent_type, non_inheriting_objects=\n non_inheriting_objects)\n", (1992, 2146), False, 'from flask import render_template\n'), ((2181, 2191), 'markupsafe.Markup', 'Markup', (['rv'], {}), '(rv)\n', (2187, 2191), False, 'from markupsafe import Markup\n'), ((1784, 1794), 'indico.util.i18n._', '_', (['"""Event"""'], {}), "('Event')\n", (1785, 1794), False, 'from indico.util.i18n import _\n'), ((1897, 1910), 'indico.util.i18n._', '_', (['"""Category"""'], {}), "('Category')\n", (1898, 1910), False, 'from indico.util.i18n import _\n'), ((1951, 1963), 'indico.util.i18n._', '_', (['"""Session"""'], {}), "('Session')\n", (1952, 1963), False, 'from indico.util.i18n import _\n')] |
masterapps-au/pysaml2 | src/saml2/saml.py | 97ad6c066c93cb31a3c3b9d504877c02e93ca9a9 | #!/usr/bin/env python
#
# Generated Mon May 2 14:23:33 2011 by parse_xsd.py version 0.4.
#
# A summary of available specifications can be found at:
# https://wiki.oasis-open.org/security/FrontPage
#
# saml core specifications to be found at:
# if any question arise please query the following pdf.
# http://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf
# The specification was later updated with errata, and the new version is here:
# https://www.oasis-open.org/committees/download.php/56776/sstc-saml-core-errata-2.0-wd-07.pdf
#
try:
from base64 import encodebytes as b64encode
except ImportError:
from base64 import b64encode
from saml2.validate import valid_ipv4, MustValueError
from saml2.validate import valid_ipv6
from saml2.validate import ShouldValueError
from saml2.validate import valid_domain_name
import saml2
from saml2 import SamlBase
import six
from saml2 import xmldsig as ds
from saml2 import xmlenc as xenc
# authentication information fields
NAMESPACE = 'urn:oasis:names:tc:SAML:2.0:assertion'
# xmlschema definition
XSD = "xs"
# xmlschema templates and extensions
XS_NAMESPACE = 'http://www.w3.org/2001/XMLSchema'
# xmlschema-instance, which contains several builtin attributes
XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
# xml soap namespace
NS_SOAP_ENC = "http://schemas.xmlsoap.org/soap/encoding/"
# type definitions for xmlschemas
XSI_TYPE = '{%s}type' % XSI_NAMESPACE
# nil type definition for xmlschemas
XSI_NIL = '{%s}nil' % XSI_NAMESPACE
# idp and sp communicate usually about a subject(NameID)
# the format determines the category the subject is in
# custom subject
NAMEID_FORMAT_UNSPECIFIED = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified")
# subject as email address
NAMEID_FORMAT_EMAILADDRESS = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress")
# subject as x509 key
NAMEID_FORMAT_X509SUBJECTNAME = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName")
# subject as windows domain name
NAMEID_FORMAT_WINDOWSDOMAINQUALIFIEDNAME = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName")
# subject from a kerberos instance
NAMEID_FORMAT_KERBEROS = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos")
# subject as name
NAMEID_FORMAT_ENTITY = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:entity")
# linked subject
NAMEID_FORMAT_PERSISTENT = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:persistent")
# annonymous subject
NAMEID_FORMAT_TRANSIENT = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:transient")
# subject avaiable in encrypted format
NAMEID_FORMAT_ENCRYPTED = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:encrypted")
# dicc for avaiable formats
NAMEID_FORMATS_SAML2 = (
('NAMEID_FORMAT_EMAILADDRESS', NAMEID_FORMAT_EMAILADDRESS),
('NAMEID_FORMAT_ENCRYPTED', NAMEID_FORMAT_ENCRYPTED),
('NAMEID_FORMAT_ENTITY', NAMEID_FORMAT_ENTITY),
('NAMEID_FORMAT_PERSISTENT', NAMEID_FORMAT_PERSISTENT),
('NAMEID_FORMAT_TRANSIENT', NAMEID_FORMAT_TRANSIENT),
('NAMEID_FORMAT_UNSPECIFIED', NAMEID_FORMAT_UNSPECIFIED),
)
# a profile outlines a set of rules describing how to embed SAML assertions.
# https://docs.oasis-open.org/security/saml/v2.0/saml-profiles-2.0-os.pdf
# The specification was later updated with errata, and the new version is here:
# https://www.oasis-open.org/committees/download.php/56782/sstc-saml-profiles-errata-2.0-wd-07.pdf
# XML based values for SAML attributes
PROFILE_ATTRIBUTE_BASIC = (
"urn:oasis:names:tc:SAML:2.0:profiles:attribute:basic")
# an AuthnRequest is made to initiate authentication
# authenticate the request with login credentials
AUTHN_PASSWORD = "urn:oasis:names:tc:SAML:2.0:ac:classes:Password"
# authenticate the request with login credentials, over tls/https
AUTHN_PASSWORD_PROTECTED = \
"urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport"
# attribute statements is key:value metadata shared with your app
# custom format
NAME_FORMAT_UNSPECIFIED = (
"urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified")
# uri format
NAME_FORMAT_URI = "urn:oasis:names:tc:SAML:2.0:attrname-format:uri"
# XML-based format
NAME_FORMAT_BASIC = "urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
# dicc for avaiable formats
NAME_FORMATS_SAML2 = (
('NAME_FORMAT_BASIC', NAME_FORMAT_BASIC),
('NAME_FORMAT_URI', NAME_FORMAT_URI),
('NAME_FORMAT_UNSPECIFIED', NAME_FORMAT_UNSPECIFIED),
)
# the SAML authority's decision can be predetermined by arbitrary context
# the specified action is permitted
DECISION_TYPE_PERMIT = "Permit"
# the specified action is denied
DECISION_TYPE_DENY = "Deny"
# the SAML authority cannot determine if the action is permitted or denied
DECISION_TYPE_INDETERMINATE = "Indeterminate"
# consent attributes determine wether consent has been given and under
# what conditions
# no claim to consent is made
CONSENT_UNSPECIFIED = "urn:oasis:names:tc:SAML:2.0:consent:unspecified"
# consent has been obtained
CONSENT_OBTAINED = "urn:oasis:names:tc:SAML:2.0:consent:obtained"
# consent has been obtained before the message has been initiated
CONSENT_PRIOR = "urn:oasis:names:tc:SAML:2.0:consent:prior"
# consent has been obtained implicitly
CONSENT_IMPLICIT = "urn:oasis:names:tc:SAML:2.0:consent:current-implicit"
# consent has been obtained explicitly
CONSENT_EXPLICIT = "urn:oasis:names:tc:SAML:2.0:consent:current-explicit"
# no consent has been obtained
CONSENT_UNAVAILABLE = "urn:oasis:names:tc:SAML:2.0:consent:unavailable"
# no consent is needed.
CONSENT_INAPPLICABLE = "urn:oasis:names:tc:SAML:2.0:consent:inapplicable"
# Subject confirmation methods(scm), can be issued, besides the subject itself
# by third parties.
# http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.0.pdf
# the 3rd party is identified on behalf of the subject given private/public key
SCM_HOLDER_OF_KEY = "urn:oasis:names:tc:SAML:2.0:cm:holder-of-key"
# the 3rd party is identified by subject confirmation and must include a security header
# signing its content.
SCM_SENDER_VOUCHES = "urn:oasis:names:tc:SAML:2.0:cm:sender-vouches"
# a bearer token is issued instead.
SCM_BEARER = "urn:oasis:names:tc:SAML:2.0:cm:bearer"
class AttributeValueBase(SamlBase):
def __init__(self,
text=None,
extension_elements=None,
extension_attributes=None):
self._extatt = {}
SamlBase.__init__(self,
text=None,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
if self._extatt:
self.extension_attributes = self._extatt
if text:
self.set_text(text)
elif not extension_elements:
self.extension_attributes = {XSI_NIL: 'true'}
elif XSI_TYPE in self.extension_attributes:
del self.extension_attributes[XSI_TYPE]
def __setattr__(self, key, value):
if key == "text":
self.set_text(value)
else:
SamlBase.__setattr__(self, key, value)
def verify(self):
if not self.text and not self.extension_elements:
if not self.extension_attributes:
raise Exception(
"Attribute value base should not have extension attributes"
)
if self.extension_attributes[XSI_NIL] != "true":
raise Exception(
"Attribute value base should not have extension attributes"
)
return True
else:
SamlBase.verify(self)
def set_type(self, typ):
try:
del self.extension_attributes[XSI_NIL]
except (AttributeError, KeyError):
pass
try:
self.extension_attributes[XSI_TYPE] = typ
except AttributeError:
self._extatt[XSI_TYPE] = typ
if typ.startswith('xs:'):
try:
self.extension_attributes['xmlns:xs'] = XS_NAMESPACE
except AttributeError:
self._extatt['xmlns:xs'] = XS_NAMESPACE
if typ.startswith('xsd:'):
try:
self.extension_attributes['xmlns:xsd'] = XS_NAMESPACE
except AttributeError:
self._extatt['xmlns:xsd'] = XS_NAMESPACE
def get_type(self):
try:
return self.extension_attributes[XSI_TYPE]
except (KeyError, AttributeError):
try:
return self._extatt[XSI_TYPE]
except KeyError:
return ""
def clear_type(self):
try:
del self.extension_attributes[XSI_TYPE]
except KeyError:
pass
try:
del self._extatt[XSI_TYPE]
except KeyError:
pass
def set_text(self, value, base64encode=False):
def _wrong_type_value(xsd, value):
msg = 'Type and value do not match: {xsd}:{type}:{value}'
msg = msg.format(xsd=xsd, type=type(value), value=value)
raise ValueError(msg)
# only work with six.string_types
_str = unicode if six.PY2 else str
if isinstance(value, six.binary_type):
value = value.decode('utf-8')
type_to_xsd = {
_str: 'string',
int: 'integer',
float: 'float',
bool: 'boolean',
type(None): '',
}
# entries of xsd-types each declaring:
# - a corresponding python type
# - a function to turn a string into that type
# - a function to turn that type into a text-value
xsd_types_props = {
'string': {
'type': _str,
'to_type': _str,
'to_text': _str,
},
'integer': {
'type': int,
'to_type': int,
'to_text': _str,
},
'short': {
'type': int,
'to_type': int,
'to_text': _str,
},
'int': {
'type': int,
'to_type': int,
'to_text': _str,
},
'long': {
'type': int,
'to_type': int,
'to_text': _str,
},
'float': {
'type': float,
'to_type': float,
'to_text': _str,
},
'double': {
'type': float,
'to_type': float,
'to_text': _str,
},
'boolean': {
'type': bool,
'to_type': lambda x: {
'true': True,
'false': False,
}[_str(x).lower()],
'to_text': lambda x: _str(x).lower(),
},
'base64Binary': {
'type': _str,
'to_type': _str,
'to_text': (
lambda x: b64encode(x.encode()) if base64encode else x
),
},
'anyType': {
'type': type(value),
'to_type': lambda x: x,
'to_text': lambda x: x,
},
'': {
'type': type(None),
'to_type': lambda x: None,
'to_text': lambda x: '',
},
}
xsd_string = (
'base64Binary' if base64encode
else self.get_type()
or type_to_xsd.get(type(value)))
xsd_ns, xsd_type = (
['', type(None)] if xsd_string is None
else ['', ''] if xsd_string == ''
else [
XSD if xsd_string in xsd_types_props else '',
xsd_string
] if ':' not in xsd_string
else xsd_string.split(':', 1))
xsd_type_props = xsd_types_props.get(xsd_type, {})
valid_type = xsd_type_props.get('type', type(None))
to_type = xsd_type_props.get('to_type', str)
to_text = xsd_type_props.get('to_text', str)
# cast to correct type before type-checking
if type(value) is _str and valid_type is not _str:
try:
value = to_type(value)
except (TypeError, ValueError, KeyError):
# the cast failed
_wrong_type_value(xsd=xsd_type, value=value)
if type(value) is not valid_type:
_wrong_type_value(xsd=xsd_type, value=value)
text = to_text(value)
self.set_type(
'{ns}:{type}'.format(ns=xsd_ns, type=xsd_type) if xsd_ns
else xsd_type if xsd_type
else '')
SamlBase.__setattr__(self, 'text', text)
return self
def harvest_element_tree(self, tree):
# Fill in the instance members from the contents of the XML tree.
for child in tree:
self._convert_element_tree_to_member(child)
for attribute, value in iter(tree.attrib.items()):
self._convert_element_attribute_to_member(attribute, value)
# if we have added children to this node
# we consider whitespace insignificant
# and remove/trim/strip whitespace
# and expect to not have actual text content
text = (
tree.text.strip()
if tree.text and self.extension_elements
else tree.text
)
if text:
#print("set_text:", tree.text)
# clear type
#self.clear_type()
self.set_text(text)
# if we have added a text node
# or other children to this node
# remove the nil marker
if text or self.extension_elements:
if XSI_NIL in self.extension_attributes:
del self.extension_attributes[XSI_NIL]
class BaseIDAbstractType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:BaseIDAbstractType element """
c_tag = 'BaseIDAbstractType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['NameQualifier'] = ('name_qualifier', 'string', False)
c_attributes['SPNameQualifier'] = ('sp_name_qualifier', 'string', False)
def __init__(self,
name_qualifier=None,
sp_name_qualifier=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.name_qualifier = name_qualifier
self.sp_name_qualifier = sp_name_qualifier
class NameIDType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:NameIDType element """
c_tag = 'NameIDType'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['NameQualifier'] = ('name_qualifier', 'string', False)
c_attributes['SPNameQualifier'] = ('sp_name_qualifier', 'string', False)
c_attributes['Format'] = ('format', 'anyURI', False)
c_attributes['SPProvidedID'] = ('sp_provided_id', 'string', False)
def __init__(self,
name_qualifier=None,
sp_name_qualifier=None,
format=None,
sp_provided_id=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.name_qualifier = name_qualifier
self.sp_name_qualifier = sp_name_qualifier
self.format = format
self.sp_provided_id = sp_provided_id
def name_id_type__from_string(xml_string):
return saml2.create_class_from_xml_string(NameIDType_, xml_string)
class EncryptedElementType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedElementType element
"""
c_tag = 'EncryptedElementType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2001/04/xmlenc#}EncryptedData'] = (
'encrypted_data',
xenc.EncryptedData)
c_children['{http://www.w3.org/2001/04/xmlenc#}EncryptedKey'] = (
'encrypted_key',
[xenc.EncryptedKey])
c_cardinality['encrypted_key'] = {"min": 0}
c_child_order.extend(['encrypted_data', 'encrypted_key'])
def __init__(self,
encrypted_data=None,
encrypted_key=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.encrypted_data = encrypted_data
self.encrypted_key = encrypted_key or []
def encrypted_element_type__from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedElementType_, xml_string)
class EncryptedID(EncryptedElementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedID element """
c_tag = 'EncryptedID'
c_namespace = NAMESPACE
c_children = EncryptedElementType_.c_children.copy()
c_attributes = EncryptedElementType_.c_attributes.copy()
c_child_order = EncryptedElementType_.c_child_order[:]
c_cardinality = EncryptedElementType_.c_cardinality.copy()
def encrypted_id_from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedID, xml_string)
class Issuer(NameIDType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Issuer element """
c_tag = 'Issuer'
c_namespace = NAMESPACE
c_children = NameIDType_.c_children.copy()
c_attributes = NameIDType_.c_attributes.copy()
c_child_order = NameIDType_.c_child_order[:]
c_cardinality = NameIDType_.c_cardinality.copy()
def issuer_from_string(xml_string):
return saml2.create_class_from_xml_string(Issuer, xml_string)
class AssertionIDRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AssertionIDRef element """
c_tag = 'AssertionIDRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'NCName'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def assertion_id_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AssertionIDRef, xml_string)
class AssertionURIRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AssertionURIRef element """
c_tag = 'AssertionURIRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def assertion_uri_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AssertionURIRef, xml_string)
class SubjectConfirmationDataType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationDataType
element """
c_tag = 'SubjectConfirmationDataType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['NotBefore'] = ('not_before', 'dateTime', False)
c_attributes['NotOnOrAfter'] = ('not_on_or_after', 'dateTime', False)
c_attributes['Recipient'] = ('recipient', 'anyURI', False)
c_attributes['InResponseTo'] = ('in_response_to', 'NCName', False)
c_attributes['Address'] = ('address', 'string', False)
c_any = {"namespace": "##any", "processContents": "lax", "minOccurs": "0",
"maxOccurs": "unbounded"}
c_any_attribute = {"namespace": "##other", "processContents": "lax"}
def __init__(self,
not_before=None,
not_on_or_after=None,
recipient=None,
in_response_to=None,
address=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.not_before = not_before
self.not_on_or_after = not_on_or_after
self.recipient = recipient
self.in_response_to = in_response_to
self.address = address
def subject_confirmation_data_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmationDataType_,
xml_string)
class KeyInfoConfirmationDataType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:KeyInfoConfirmationDataType
element """
c_tag = 'KeyInfoConfirmationDataType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}KeyInfo'] = ('key_info',
[ds.KeyInfo])
c_cardinality['key_info'] = {"min": 1}
c_child_order.extend(['key_info'])
def __init__(self,
key_info=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.key_info = key_info or []
def key_info_confirmation_data_type__from_string(xml_string):
return saml2.create_class_from_xml_string(KeyInfoConfirmationDataType_,
xml_string)
class ConditionAbstractType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ConditionAbstractType
element """
c_tag = 'ConditionAbstractType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
class Audience(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Audience element """
c_tag = 'Audience'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def audience_from_string(xml_string):
return saml2.create_class_from_xml_string(Audience, xml_string)
class OneTimeUseType_(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:OneTimeUseType element """
c_tag = 'OneTimeUseType'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
def one_time_use_type__from_string(xml_string):
return saml2.create_class_from_xml_string(OneTimeUseType_, xml_string)
class ProxyRestrictionType_(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ProxyRestrictionType element
"""
c_tag = 'ProxyRestrictionType'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Audience'] = ('audience',
[Audience])
c_cardinality['audience'] = {"min": 0}
c_attributes['Count'] = ('count', 'nonNegativeInteger', False)
c_child_order.extend(['audience'])
def __init__(self,
audience=None,
count=None,
text=None,
extension_elements=None,
extension_attributes=None):
ConditionAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.audience = audience or []
self.count = count
def proxy_restriction_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ProxyRestrictionType_, xml_string)
class EncryptedAssertion(EncryptedElementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAssertion element """
c_tag = 'EncryptedAssertion'
c_namespace = NAMESPACE
c_children = EncryptedElementType_.c_children.copy()
c_attributes = EncryptedElementType_.c_attributes.copy()
c_child_order = EncryptedElementType_.c_child_order[:]
c_cardinality = EncryptedElementType_.c_cardinality.copy()
def encrypted_assertion_from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedAssertion, xml_string)
class StatementAbstractType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:StatementAbstractType element
"""
c_tag = 'StatementAbstractType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
class SubjectLocalityType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectLocalityType element """
c_tag = 'SubjectLocalityType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Address'] = ('address', 'string', False)
c_attributes['DNSName'] = ('dns_name', 'string', False)
def __init__(self,
address=None,
dns_name=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.address = address
self.dns_name = dns_name
def subject_locality_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectLocalityType_, xml_string)
class AuthnContextClassRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextClassRef element
"""
c_tag = 'AuthnContextClassRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authn_context_class_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextClassRef, xml_string)
class AuthnContextDeclRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextDeclRef element """
c_tag = 'AuthnContextDeclRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authn_context_decl_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextDeclRef, xml_string)
class AuthnContextDecl(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextDecl element """
c_tag = 'AuthnContextDecl'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyType'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authn_context_decl_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextDecl, xml_string)
class AuthenticatingAuthority(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthenticatingAuthority
element """
c_tag = 'AuthenticatingAuthority'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authenticating_authority_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthenticatingAuthority,
xml_string)
class DecisionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:DecisionType element """
c_tag = 'DecisionType'
c_namespace = NAMESPACE
c_value_type = {'base': 'string', 'enumeration': ['Permit', 'Deny',
'Indeterminate']}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def decision_type__from_string(xml_string):
return saml2.create_class_from_xml_string(DecisionType_, xml_string)
class ActionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ActionType element """
c_tag = 'ActionType'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Namespace'] = ('namespace', 'anyURI', True)
def __init__(self,
namespace=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.namespace = namespace
def action_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ActionType_, xml_string)
class AttributeValue(AttributeValueBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeValue element """
c_tag = 'AttributeValue'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyType'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def attribute_value_from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeValue, xml_string)
class EncryptedAttribute(EncryptedElementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAttribute element """
c_tag = 'EncryptedAttribute'
c_namespace = NAMESPACE
c_children = EncryptedElementType_.c_children.copy()
c_attributes = EncryptedElementType_.c_attributes.copy()
c_child_order = EncryptedElementType_.c_child_order[:]
c_cardinality = EncryptedElementType_.c_cardinality.copy()
def encrypted_attribute_from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedAttribute, xml_string)
class BaseID(BaseIDAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:BaseID element """
c_tag = 'BaseID'
c_namespace = NAMESPACE
c_children = BaseIDAbstractType_.c_children.copy()
c_attributes = BaseIDAbstractType_.c_attributes.copy()
c_child_order = BaseIDAbstractType_.c_child_order[:]
c_cardinality = BaseIDAbstractType_.c_cardinality.copy()
def base_id_from_string(xml_string):
return saml2.create_class_from_xml_string(BaseID, xml_string)
class NameID(NameIDType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:NameID element
From the Oasis SAML2 Technical Overview:
"The <NameID> element within a <Subject> offers the ability to provide name
identifiers in a number of different formats. SAML's predefined formats
include: Email address, X.509 subject name, Windows domain qualified name,
Kerberos principal name, Entity identifier, Persistent identifier,
Transient identifier."
"""
c_tag = 'NameID'
c_namespace = NAMESPACE
c_children = NameIDType_.c_children.copy()
c_attributes = NameIDType_.c_attributes.copy()
c_child_order = NameIDType_.c_child_order[:]
c_cardinality = NameIDType_.c_cardinality.copy()
def name_id_from_string(xml_string):
return saml2.create_class_from_xml_string(NameID, xml_string)
class SubjectConfirmationData(SubjectConfirmationDataType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationData
element """
c_tag = 'SubjectConfirmationData'
c_namespace = NAMESPACE
c_children = SubjectConfirmationDataType_.c_children.copy()
c_attributes = SubjectConfirmationDataType_.c_attributes.copy()
c_child_order = SubjectConfirmationDataType_.c_child_order[:]
c_cardinality = SubjectConfirmationDataType_.c_cardinality.copy()
def subject_confirmation_data_from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmationData,
xml_string)
class Condition(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Condition element """
c_tag = 'Condition'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
def condition_from_string(xml_string):
return saml2.create_class_from_xml_string(Condition, xml_string)
class AudienceRestrictionType_(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AudienceRestrictionType
element """
c_tag = 'AudienceRestrictionType'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Audience'] = ('audience',
[Audience])
c_cardinality['audience'] = {"min": 1}
c_child_order.extend(['audience'])
def __init__(self,
audience=None,
text=None,
extension_elements=None,
extension_attributes=None):
ConditionAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.audience = audience or []
def audience_restriction_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AudienceRestrictionType_,
xml_string)
class OneTimeUse(OneTimeUseType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:OneTimeUse element """
c_tag = 'OneTimeUse'
c_namespace = NAMESPACE
c_children = OneTimeUseType_.c_children.copy()
c_attributes = OneTimeUseType_.c_attributes.copy()
c_child_order = OneTimeUseType_.c_child_order[:]
c_cardinality = OneTimeUseType_.c_cardinality.copy()
def one_time_use_from_string(xml_string):
return saml2.create_class_from_xml_string(OneTimeUse, xml_string)
class ProxyRestriction(ProxyRestrictionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ProxyRestriction element """
c_tag = 'ProxyRestriction'
c_namespace = NAMESPACE
c_children = ProxyRestrictionType_.c_children.copy()
c_attributes = ProxyRestrictionType_.c_attributes.copy()
c_child_order = ProxyRestrictionType_.c_child_order[:]
c_cardinality = ProxyRestrictionType_.c_cardinality.copy()
def proxy_restriction_from_string(xml_string):
return saml2.create_class_from_xml_string(ProxyRestriction, xml_string)
class Statement(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Statement element """
c_tag = 'Statement'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
def statement_from_string(xml_string):
return saml2.create_class_from_xml_string(Statement, xml_string)
class SubjectLocality(SubjectLocalityType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectLocality element """
c_tag = 'SubjectLocality'
c_namespace = NAMESPACE
c_children = SubjectLocalityType_.c_children.copy()
c_attributes = SubjectLocalityType_.c_attributes.copy()
c_child_order = SubjectLocalityType_.c_child_order[:]
c_cardinality = SubjectLocalityType_.c_cardinality.copy()
def verify(self):
if self.address:
# dotted-decimal IPv4 or RFC3513 IPv6 address
if valid_ipv4(self.address) or valid_ipv6(self.address):
pass
else:
raise ShouldValueError("Not an IPv4 or IPv6 address")
elif self.dns_name:
valid_domain_name(self.dns_name)
return SubjectLocalityType_.verify(self)
def subject_locality_from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectLocality, xml_string)
class AuthnContextType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextType element """
c_tag = 'AuthnContextType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextClassRef'] = (
'authn_context_class_ref', AuthnContextClassRef)
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextDecl'] = (
'authn_context_decl',
AuthnContextDecl)
c_cardinality['authn_context_decl'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextDeclRef'] = (
'authn_context_decl_ref',
AuthnContextDeclRef)
c_cardinality['authn_context_decl_ref'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}AuthenticatingAuthority'] = (
'authenticating_authority', [AuthenticatingAuthority])
c_cardinality['authenticating_authority'] = {"min": 0}
c_child_order.extend(['authn_context_class_ref', 'authn_context_decl',
'authn_context_decl_ref', 'authenticating_authority'])
def __init__(self,
authn_context_class_ref=None,
authn_context_decl=None,
authn_context_decl_ref=None,
authenticating_authority=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.authn_context_class_ref = authn_context_class_ref
self.authn_context_decl = authn_context_decl
self.authn_context_decl_ref = authn_context_decl_ref
self.authenticating_authority = authenticating_authority or []
def verify(self):
if self.authn_context_decl and self.authn_context_decl_ref:
raise Exception(
"Invalid Response: "
"Cannot have both <AuthnContextDecl> and <AuthnContextDeclRef>"
)
return SamlBase.verify(self)
def authn_context_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextType_, xml_string)
class Action(ActionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Action element """
c_tag = 'Action'
c_namespace = NAMESPACE
c_children = ActionType_.c_children.copy()
c_attributes = ActionType_.c_attributes.copy()
c_child_order = ActionType_.c_child_order[:]
c_cardinality = ActionType_.c_cardinality.copy()
def action_from_string(xml_string):
return saml2.create_class_from_xml_string(Action, xml_string)
class AttributeType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeType element """
c_tag = 'AttributeType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AttributeValue'] = (
'attribute_value',
[AttributeValue])
c_cardinality['attribute_value'] = {"min": 0}
c_attributes['Name'] = ('name', 'string', True)
c_attributes['NameFormat'] = ('name_format', 'anyURI', False)
c_attributes['FriendlyName'] = ('friendly_name', 'string', False)
c_child_order.extend(['attribute_value'])
c_any_attribute = {"namespace": "##other", "processContents": "lax"}
def __init__(self,
attribute_value=None,
name=None,
name_format=NAME_FORMAT_URI,
friendly_name=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.attribute_value = attribute_value or []
self.name = name
self.name_format = name_format
self.friendly_name = friendly_name
# when consuming such elements, default to NAME_FORMAT_UNSPECIFIED as NameFormat
def harvest_element_tree(self, tree):
tree.attrib.setdefault('NameFormat', NAME_FORMAT_UNSPECIFIED)
SamlBase.harvest_element_tree(self, tree)
def attribute_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeType_, xml_string)
class SubjectConfirmationType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationType
element """
c_tag = 'SubjectConfirmationType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}BaseID'] = ('base_id',
BaseID)
c_cardinality['base_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}NameID'] = ('name_id',
NameID)
c_cardinality['name_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedID'] = (
'encrypted_id',
EncryptedID)
c_cardinality['encrypted_id'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}SubjectConfirmationData'] = (
'subject_confirmation_data', SubjectConfirmationData)
c_cardinality['subject_confirmation_data'] = {"min": 0, "max": 1}
c_attributes['Method'] = ('method', 'anyURI', True)
c_child_order.extend(['base_id', 'name_id', 'encrypted_id',
'subject_confirmation_data'])
def __init__(self,
base_id=None,
name_id=None,
encrypted_id=None,
subject_confirmation_data=None,
method=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.base_id = base_id
self.name_id = name_id
self.encrypted_id = encrypted_id
self.subject_confirmation_data = subject_confirmation_data
self.method = method
def subject_confirmation_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmationType_,
xml_string)
class AudienceRestriction(AudienceRestrictionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AudienceRestriction element """
c_tag = 'AudienceRestriction'
c_namespace = NAMESPACE
c_children = AudienceRestrictionType_.c_children.copy()
c_attributes = AudienceRestrictionType_.c_attributes.copy()
c_child_order = AudienceRestrictionType_.c_child_order[:]
c_cardinality = AudienceRestrictionType_.c_cardinality.copy()
def audience_restriction_from_string(xml_string):
return saml2.create_class_from_xml_string(AudienceRestriction, xml_string)
class AuthnContext(AuthnContextType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContext element """
c_tag = 'AuthnContext'
c_namespace = NAMESPACE
c_children = AuthnContextType_.c_children.copy()
c_attributes = AuthnContextType_.c_attributes.copy()
c_child_order = AuthnContextType_.c_child_order[:]
c_cardinality = AuthnContextType_.c_cardinality.copy()
def authn_context_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContext, xml_string)
class Attribute(AttributeType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Attribute element """
c_tag = 'Attribute'
c_namespace = NAMESPACE
c_children = AttributeType_.c_children.copy()
c_attributes = AttributeType_.c_attributes.copy()
c_child_order = AttributeType_.c_child_order[:]
c_cardinality = AttributeType_.c_cardinality.copy()
def attribute_from_string(xml_string):
return saml2.create_class_from_xml_string(Attribute, xml_string)
class SubjectConfirmation(SubjectConfirmationType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmation element """
c_tag = 'SubjectConfirmation'
c_namespace = NAMESPACE
c_children = SubjectConfirmationType_.c_children.copy()
c_attributes = SubjectConfirmationType_.c_attributes.copy()
c_child_order = SubjectConfirmationType_.c_child_order[:]
c_cardinality = SubjectConfirmationType_.c_cardinality.copy()
def subject_confirmation_from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmation, xml_string)
class ConditionsType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ConditionsType element """
c_tag = 'ConditionsType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Condition'] = (
'condition',
[Condition])
c_cardinality['condition'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AudienceRestriction'] = (
'audience_restriction',
[AudienceRestriction])
c_cardinality['audience_restriction'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}OneTimeUse'] = (
'one_time_use',
[OneTimeUse])
c_cardinality['one_time_use'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}ProxyRestriction'] = (
'proxy_restriction',
[ProxyRestriction])
c_cardinality['proxy_restriction'] = {"min": 0}
c_attributes['NotBefore'] = ('not_before', 'dateTime', False)
c_attributes['NotOnOrAfter'] = ('not_on_or_after', 'dateTime', False)
c_child_order.extend(['condition', 'audience_restriction', 'one_time_use',
'proxy_restriction'])
def __init__(self,
condition=None,
audience_restriction=None,
one_time_use=None,
proxy_restriction=None,
not_before=None,
not_on_or_after=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.condition = condition or []
self.audience_restriction = audience_restriction or []
self.one_time_use = one_time_use or []
self.proxy_restriction = proxy_restriction or []
self.not_before = not_before
self.not_on_or_after = not_on_or_after
def verify(self):
if self.one_time_use:
if len(self.one_time_use) != 1:
raise Exception("Cannot be used more than once")
if self.proxy_restriction:
if len(self.proxy_restriction) != 1:
raise Exception("Cannot be used more than once")
return SamlBase.verify(self)
def conditions_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ConditionsType_, xml_string)
class AuthnStatementType_(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnStatementType element """
c_tag = 'AuthnStatementType'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}SubjectLocality'] = (
'subject_locality', SubjectLocality)
c_cardinality['subject_locality'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContext'] = (
'authn_context', AuthnContext)
c_attributes['AuthnInstant'] = ('authn_instant', 'dateTime', True)
c_attributes['SessionIndex'] = ('session_index', 'string', False)
c_attributes['SessionNotOnOrAfter'] = ('session_not_on_or_after',
'dateTime', False)
c_child_order.extend(['subject_locality', 'authn_context'])
def __init__(self,
subject_locality=None,
authn_context=None,
authn_instant=None,
session_index=None,
session_not_on_or_after=None,
text=None,
extension_elements=None,
extension_attributes=None):
StatementAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.subject_locality = subject_locality
self.authn_context = authn_context
self.authn_instant = authn_instant
self.session_index = session_index
self.session_not_on_or_after = session_not_on_or_after
def authn_statement_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnStatementType_, xml_string)
class AttributeStatementType_(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeStatementType
element """
c_tag = 'AttributeStatementType'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Attribute'] = (
'attribute',
[Attribute])
c_cardinality['attribute'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAttribute'] = (
'encrypted_attribute',
[EncryptedAttribute])
c_cardinality['encrypted_attribute'] = {"min": 0}
c_child_order.extend(['attribute', 'encrypted_attribute'])
def __init__(self,
attribute=None,
encrypted_attribute=None,
text=None,
extension_elements=None,
extension_attributes=None):
StatementAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.attribute = attribute or []
self.encrypted_attribute = encrypted_attribute or []
def attribute_statement_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeStatementType_,
xml_string)
class SubjectType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectType element """
c_tag = 'SubjectType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}BaseID'] = ('base_id',
BaseID)
c_cardinality['base_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}NameID'] = ('name_id',
NameID)
c_cardinality['name_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedID'] = (
'encrypted_id', EncryptedID)
c_cardinality['encrypted_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}SubjectConfirmation'] = (
'subject_confirmation', [SubjectConfirmation])
c_cardinality['subject_confirmation'] = {"min": 0}
c_child_order.extend(['base_id', 'name_id', 'encrypted_id',
'subject_confirmation'])
def __init__(self,
base_id=None,
name_id=None,
encrypted_id=None,
subject_confirmation=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.base_id = base_id
self.name_id = name_id
self.encrypted_id = encrypted_id
self.subject_confirmation = subject_confirmation or []
def subject_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectType_, xml_string)
class Conditions(ConditionsType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Conditions element """
c_tag = 'Conditions'
c_namespace = NAMESPACE
c_children = ConditionsType_.c_children.copy()
c_attributes = ConditionsType_.c_attributes.copy()
c_child_order = ConditionsType_.c_child_order[:]
c_cardinality = ConditionsType_.c_cardinality.copy()
def conditions_from_string(xml_string):
return saml2.create_class_from_xml_string(Conditions, xml_string)
class AuthnStatement(AuthnStatementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnStatement element """
c_tag = 'AuthnStatement'
c_namespace = NAMESPACE
c_children = AuthnStatementType_.c_children.copy()
c_attributes = AuthnStatementType_.c_attributes.copy()
c_child_order = AuthnStatementType_.c_child_order[:]
c_cardinality = AuthnStatementType_.c_cardinality.copy()
def authn_statement_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnStatement, xml_string)
class AttributeStatement(AttributeStatementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeStatement element """
c_tag = 'AttributeStatement'
c_namespace = NAMESPACE
c_children = AttributeStatementType_.c_children.copy()
c_attributes = AttributeStatementType_.c_attributes.copy()
c_child_order = AttributeStatementType_.c_child_order[:]
c_cardinality = AttributeStatementType_.c_cardinality.copy()
def attribute_statement_from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeStatement, xml_string)
class Subject(SubjectType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Subject element """
c_tag = 'Subject'
c_namespace = NAMESPACE
c_children = SubjectType_.c_children.copy()
c_attributes = SubjectType_.c_attributes.copy()
c_child_order = SubjectType_.c_child_order[:]
c_cardinality = SubjectType_.c_cardinality.copy()
def subject_from_string(xml_string):
return saml2.create_class_from_xml_string(Subject, xml_string)
#..................
# ['AuthzDecisionStatement', 'EvidenceType', 'AdviceType', 'Evidence',
# 'Assertion', 'AssertionType', 'AuthzDecisionStatementType', 'Advice']
class EvidenceType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EvidenceType element """
c_tag = 'EvidenceType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionIDRef'] = (
'assertion_id_ref', [AssertionIDRef])
c_cardinality['assertion_id_ref'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionURIRef'] = (
'assertion_uri_ref', [AssertionURIRef])
c_cardinality['assertion_uri_ref'] = {"min": 0}
c_cardinality['assertion'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAssertion'] = (
'encrypted_assertion', [EncryptedAssertion])
c_cardinality['encrypted_assertion'] = {"min": 0}
c_child_order.extend(['assertion_id_ref', 'assertion_uri_ref', 'assertion',
'encrypted_assertion'])
def __init__(self,
assertion_id_ref=None,
assertion_uri_ref=None,
assertion=None,
encrypted_assertion=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.assertion_id_ref = assertion_id_ref or []
self.assertion_uri_ref = assertion_uri_ref or []
self.assertion = assertion or []
self.encrypted_assertion = encrypted_assertion or []
def evidence_type__from_string(xml_string):
return saml2.create_class_from_xml_string(EvidenceType_, xml_string)
class Evidence(EvidenceType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Evidence element """
c_tag = 'Evidence'
c_namespace = NAMESPACE
c_children = EvidenceType_.c_children.copy()
c_attributes = EvidenceType_.c_attributes.copy()
c_child_order = EvidenceType_.c_child_order[:]
c_cardinality = EvidenceType_.c_cardinality.copy()
def evidence_from_string(xml_string):
return saml2.create_class_from_xml_string(Evidence, xml_string)
class AuthzDecisionStatementType_(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthzDecisionStatementType
element """
c_tag = 'AuthzDecisionStatementType'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Action'] = (
'action', [Action])
c_cardinality['action'] = {"min": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Evidence'] = (
'evidence', Evidence)
c_cardinality['evidence'] = {"min": 0, "max": 1}
c_attributes['Resource'] = ('resource', 'anyURI', True)
c_attributes['Decision'] = ('decision', DecisionType_, True)
c_child_order.extend(['action', 'evidence'])
def __init__(self,
action=None,
evidence=None,
resource=None,
decision=None,
text=None,
extension_elements=None,
extension_attributes=None):
StatementAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.action = action or []
self.evidence = evidence
self.resource = resource
self.decision = decision
def authz_decision_statement_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthzDecisionStatementType_,
xml_string)
class AuthzDecisionStatement(AuthzDecisionStatementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthzDecisionStatement
element """
c_tag = 'AuthzDecisionStatement'
c_namespace = NAMESPACE
c_children = AuthzDecisionStatementType_.c_children.copy()
c_attributes = AuthzDecisionStatementType_.c_attributes.copy()
c_child_order = AuthzDecisionStatementType_.c_child_order[:]
c_cardinality = AuthzDecisionStatementType_.c_cardinality.copy()
def authz_decision_statement_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthzDecisionStatement,
xml_string)
#..................
# ['Assertion', 'AssertionType', 'AdviceType', 'Advice']
class AssertionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AssertionType element """
c_tag = 'AssertionType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Issuer'] = ('issuer',
Issuer)
c_children['{http://www.w3.org/2000/09/xmldsig#}Signature'] = ('signature',
ds.Signature)
c_cardinality['signature'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Subject'] = ('subject',
Subject)
c_cardinality['subject'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Conditions'] = (
'conditions', Conditions)
c_cardinality['conditions'] = {"min": 0, "max": 1}
c_cardinality['advice'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Statement'] = (
'statement', [Statement])
c_cardinality['statement'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnStatement'] = (
'authn_statement', [AuthnStatement])
c_cardinality['authn_statement'] = {"min": 0}
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}AuthzDecisionStatement'] = (
'authz_decision_statement', [AuthzDecisionStatement])
c_cardinality['authz_decision_statement'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AttributeStatement'] = (
'attribute_statement', [AttributeStatement])
c_cardinality['attribute_statement'] = {"min": 0}
c_attributes['Version'] = ('version', 'string', True)
c_attributes['ID'] = ('id', 'ID', True)
c_attributes['IssueInstant'] = ('issue_instant', 'dateTime', True)
c_child_order.extend(['issuer', 'signature', 'subject', 'conditions',
'advice', 'statement', 'authn_statement',
'authz_decision_statement', 'attribute_statement'])
def __init__(self,
issuer=None,
signature=None,
subject=None,
conditions=None,
advice=None,
statement=None,
authn_statement=None,
authz_decision_statement=None,
attribute_statement=None,
version=None,
id=None,
issue_instant=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.issuer = issuer
self.signature = signature
self.subject = subject
self.conditions = conditions
self.advice = advice
self.statement = statement or []
self.authn_statement = authn_statement or []
self.authz_decision_statement = authz_decision_statement or []
self.attribute_statement = attribute_statement or []
self.version = version
self.id = id
self.issue_instant = issue_instant
def verify(self):
# If no statement MUST contain a subject element
if self.attribute_statement or self.statement or \
self.authn_statement or self.authz_decision_statement:
pass
elif not self.subject:
raise MustValueError(
"If no statement MUST contain a subject element")
if self.authn_statement and not self.subject:
raise MustValueError(
"An assertion with an AuthnStatement must contain a Subject")
return SamlBase.verify(self)
def assertion_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AssertionType_, xml_string)
class Assertion(AssertionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Assertion element """
c_tag = 'Assertion'
c_namespace = NAMESPACE
c_children = AssertionType_.c_children.copy()
c_attributes = AssertionType_.c_attributes.copy()
c_child_order = AssertionType_.c_child_order[:]
c_cardinality = AssertionType_.c_cardinality.copy()
def assertion_from_string(xml_string):
return saml2.create_class_from_xml_string(Assertion, xml_string)
class AdviceType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AdviceType element """
c_tag = 'AdviceType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionIDRef'] = (
'assertion_id_ref', [AssertionIDRef])
c_cardinality['assertion_id_ref'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionURIRef'] = (
'assertion_uri_ref', [AssertionURIRef])
c_cardinality['assertion_uri_ref'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = (
'assertion', [Assertion])
c_cardinality['assertion'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAssertion'] = (
'encrypted_assertion', [EncryptedAssertion])
c_cardinality['encrypted_assertion'] = {"min": 0}
c_child_order.extend(['assertion_id_ref', 'assertion_uri_ref', 'assertion',
'encrypted_assertion'])
c_any = {"namespace": "##other", "processContents": "lax"}
def __init__(self,
assertion_id_ref=None,
assertion_uri_ref=None,
assertion=None,
encrypted_assertion=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.assertion_id_ref = assertion_id_ref or []
self.assertion_uri_ref = assertion_uri_ref or []
self.assertion = assertion or []
self.encrypted_assertion = encrypted_assertion or []
def advice_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AdviceType_, xml_string)
class Advice(AdviceType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Advice element """
c_tag = 'Advice'
c_namespace = NAMESPACE
c_children = AdviceType_.c_children.copy()
c_attributes = AdviceType_.c_attributes.copy()
c_child_order = AdviceType_.c_child_order[:]
c_cardinality = AdviceType_.c_cardinality.copy()
def advice_from_string(xml_string):
return saml2.create_class_from_xml_string(Advice, xml_string)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
EvidenceType_.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = (
'assertion', [Assertion])
Evidence.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = (
'assertion', [Assertion])
AssertionType_.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Advice'] = (
'advice', Advice)
Assertion.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Advice'] = (
'advice', Advice)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
AG_IDNameQualifiers = [
('NameQualifier', 'string', False),
('SPNameQualifier', 'string', False),
]
ELEMENT_FROM_STRING = {
BaseID.c_tag: base_id_from_string,
NameID.c_tag: name_id_from_string,
NameIDType_.c_tag: name_id_type__from_string,
EncryptedElementType_.c_tag: encrypted_element_type__from_string,
EncryptedID.c_tag: encrypted_id_from_string,
Issuer.c_tag: issuer_from_string,
AssertionIDRef.c_tag: assertion_id_ref_from_string,
AssertionURIRef.c_tag: assertion_uri_ref_from_string,
Assertion.c_tag: assertion_from_string,
AssertionType_.c_tag: assertion_type__from_string,
Subject.c_tag: subject_from_string,
SubjectType_.c_tag: subject_type__from_string,
SubjectConfirmation.c_tag: subject_confirmation_from_string,
SubjectConfirmationType_.c_tag: subject_confirmation_type__from_string,
SubjectConfirmationData.c_tag: subject_confirmation_data_from_string,
SubjectConfirmationDataType_.c_tag:
subject_confirmation_data_type__from_string,
KeyInfoConfirmationDataType_.c_tag:
key_info_confirmation_data_type__from_string,
Conditions.c_tag: conditions_from_string,
ConditionsType_.c_tag: conditions_type__from_string,
Condition.c_tag: condition_from_string,
AudienceRestriction.c_tag: audience_restriction_from_string,
AudienceRestrictionType_.c_tag: audience_restriction_type__from_string,
Audience.c_tag: audience_from_string,
OneTimeUse.c_tag: one_time_use_from_string,
OneTimeUseType_.c_tag: one_time_use_type__from_string,
ProxyRestriction.c_tag: proxy_restriction_from_string,
ProxyRestrictionType_.c_tag: proxy_restriction_type__from_string,
Advice.c_tag: advice_from_string,
AdviceType_.c_tag: advice_type__from_string,
EncryptedAssertion.c_tag: encrypted_assertion_from_string,
Statement.c_tag: statement_from_string,
AuthnStatement.c_tag: authn_statement_from_string,
AuthnStatementType_.c_tag: authn_statement_type__from_string,
SubjectLocality.c_tag: subject_locality_from_string,
SubjectLocalityType_.c_tag: subject_locality_type__from_string,
AuthnContext.c_tag: authn_context_from_string,
AuthnContextType_.c_tag: authn_context_type__from_string,
AuthnContextClassRef.c_tag: authn_context_class_ref_from_string,
AuthnContextDeclRef.c_tag: authn_context_decl_ref_from_string,
AuthnContextDecl.c_tag: authn_context_decl_from_string,
AuthenticatingAuthority.c_tag: authenticating_authority_from_string,
AuthzDecisionStatement.c_tag: authz_decision_statement_from_string,
AuthzDecisionStatementType_.c_tag:
authz_decision_statement_type__from_string,
DecisionType_.c_tag: decision_type__from_string,
Action.c_tag: action_from_string,
ActionType_.c_tag: action_type__from_string,
Evidence.c_tag: evidence_from_string,
EvidenceType_.c_tag: evidence_type__from_string,
AttributeStatement.c_tag: attribute_statement_from_string,
AttributeStatementType_.c_tag: attribute_statement_type__from_string,
Attribute.c_tag: attribute_from_string,
AttributeType_.c_tag: attribute_type__from_string,
AttributeValue.c_tag: attribute_value_from_string,
EncryptedAttribute.c_tag: encrypted_attribute_from_string,
}
ELEMENT_BY_TAG = {
'BaseID': BaseID,
'NameID': NameID,
'NameIDType': NameIDType_,
'EncryptedElementType': EncryptedElementType_,
'EncryptedID': EncryptedID,
'Issuer': Issuer,
'AssertionIDRef': AssertionIDRef,
'AssertionURIRef': AssertionURIRef,
'Assertion': Assertion,
'AssertionType': AssertionType_,
'Subject': Subject,
'SubjectType': SubjectType_,
'SubjectConfirmation': SubjectConfirmation,
'SubjectConfirmationType': SubjectConfirmationType_,
'SubjectConfirmationData': SubjectConfirmationData,
'SubjectConfirmationDataType': SubjectConfirmationDataType_,
'KeyInfoConfirmationDataType': KeyInfoConfirmationDataType_,
'Conditions': Conditions,
'ConditionsType': ConditionsType_,
'Condition': Condition,
'AudienceRestriction': AudienceRestriction,
'AudienceRestrictionType': AudienceRestrictionType_,
'Audience': Audience,
'OneTimeUse': OneTimeUse,
'OneTimeUseType': OneTimeUseType_,
'ProxyRestriction': ProxyRestriction,
'ProxyRestrictionType': ProxyRestrictionType_,
'Advice': Advice,
'AdviceType': AdviceType_,
'EncryptedAssertion': EncryptedAssertion,
'Statement': Statement,
'AuthnStatement': AuthnStatement,
'AuthnStatementType': AuthnStatementType_,
'SubjectLocality': SubjectLocality,
'SubjectLocalityType': SubjectLocalityType_,
'AuthnContext': AuthnContext,
'AuthnContextType': AuthnContextType_,
'AuthnContextClassRef': AuthnContextClassRef,
'AuthnContextDeclRef': AuthnContextDeclRef,
'AuthnContextDecl': AuthnContextDecl,
'AuthenticatingAuthority': AuthenticatingAuthority,
'AuthzDecisionStatement': AuthzDecisionStatement,
'AuthzDecisionStatementType': AuthzDecisionStatementType_,
'DecisionType': DecisionType_,
'Action': Action,
'ActionType': ActionType_,
'Evidence': Evidence,
'EvidenceType': EvidenceType_,
'AttributeStatement': AttributeStatement,
'AttributeStatementType': AttributeStatementType_,
'Attribute': Attribute,
'AttributeType': AttributeType_,
'AttributeValue': AttributeValue,
'EncryptedAttribute': EncryptedAttribute,
'BaseIDAbstractType': BaseIDAbstractType_,
'ConditionAbstractType': ConditionAbstractType_,
'StatementAbstractType': StatementAbstractType_,
}
def factory(tag, **kwargs):
return ELEMENT_BY_TAG[tag](**kwargs)
| [((14076, 14102), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (14100, 14102), False, 'from saml2 import SamlBase\n'), ((14122, 14150), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (14148, 14150), False, 'from saml2 import SamlBase\n'), ((14217, 14246), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (14244, 14246), False, 'from saml2 import SamlBase\n'), ((15125, 15151), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (15149, 15151), False, 'from saml2 import SamlBase\n'), ((15171, 15199), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (15197, 15199), False, 'from saml2 import SamlBase\n'), ((15266, 15295), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (15293, 15295), False, 'from saml2 import SamlBase\n'), ((16288, 16347), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['NameIDType_', 'xml_string'], {}), '(NameIDType_, xml_string)\n', (16322, 16347), False, 'import saml2\n'), ((16556, 16582), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (16580, 16582), False, 'from saml2 import SamlBase\n'), ((16602, 16630), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (16628, 16630), False, 'from saml2 import SamlBase\n'), ((16697, 16726), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (16724, 16726), False, 'from saml2 import SamlBase\n'), ((17663, 17732), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['EncryptedElementType_', 'xml_string'], {}), '(EncryptedElementType_, xml_string)\n', (17697, 17732), False, 'import saml2\n'), ((18200, 18259), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['EncryptedID', 'xml_string'], {}), '(EncryptedID, xml_string)\n', (18234, 18259), False, 'import saml2\n'), ((18656, 18710), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Issuer', 'xml_string'], {}), '(Issuer, xml_string)\n', (18690, 18710), False, 'import saml2\n'), ((18934, 18960), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (18958, 18960), False, 'from saml2 import SamlBase\n'), ((18980, 19008), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (19006, 19008), False, 'from saml2 import SamlBase\n'), ((19075, 19104), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (19102, 19104), False, 'from saml2 import SamlBase\n'), ((19164, 19226), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AssertionIDRef', 'xml_string'], {}), '(AssertionIDRef, xml_string)\n', (19198, 19226), False, 'import saml2\n'), ((19453, 19479), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (19477, 19479), False, 'from saml2 import SamlBase\n'), ((19499, 19527), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (19525, 19527), False, 'from saml2 import SamlBase\n'), ((19594, 19623), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (19621, 19623), False, 'from saml2 import SamlBase\n'), ((19684, 19747), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AssertionURIRef', 'xml_string'], {}), '(AssertionURIRef, xml_string)\n', (19718, 19747), False, 'import saml2\n'), ((19977, 20003), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (20001, 20003), False, 'from saml2 import SamlBase\n'), ((20023, 20051), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (20049, 20051), False, 'from saml2 import SamlBase\n'), ((20118, 20147), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (20145, 20147), False, 'from saml2 import SamlBase\n'), ((21458, 21534), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['SubjectConfirmationDataType_', 'xml_string'], {}), '(SubjectConfirmationDataType_, xml_string)\n', (21492, 21534), False, 'import saml2\n'), ((21810, 21836), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (21834, 21836), False, 'from saml2 import SamlBase\n'), ((21856, 21884), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (21882, 21884), False, 'from saml2 import SamlBase\n'), ((21951, 21980), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (21978, 21980), False, 'from saml2 import SamlBase\n'), ((22707, 22783), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['KeyInfoConfirmationDataType_', 'xml_string'], {}), '(KeyInfoConfirmationDataType_, xml_string)\n', (22741, 22783), False, 'import saml2\n'), ((23041, 23067), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (23065, 23067), False, 'from saml2 import SamlBase\n'), ((23087, 23115), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (23113, 23115), False, 'from saml2 import SamlBase\n'), ((23182, 23211), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (23209, 23211), False, 'from saml2 import SamlBase\n'), ((23417, 23443), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (23441, 23443), False, 'from saml2 import SamlBase\n'), ((23463, 23491), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (23489, 23491), False, 'from saml2 import SamlBase\n'), ((23558, 23587), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (23585, 23587), False, 'from saml2 import SamlBase\n'), ((23639, 23695), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Audience', 'xml_string'], {}), '(Audience, xml_string)\n', (23673, 23695), False, 'import saml2\n'), ((24184, 24247), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['OneTimeUseType_', 'xml_string'], {}), '(OneTimeUseType_, xml_string)\n', (24218, 24247), False, 'import saml2\n'), ((25504, 25573), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['ProxyRestrictionType_', 'xml_string'], {}), '(ProxyRestrictionType_, xml_string)\n', (25538, 25573), False, 'import saml2\n'), ((26069, 26135), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['EncryptedAssertion', 'xml_string'], {}), '(EncryptedAssertion, xml_string)\n', (26103, 26135), False, 'import saml2\n'), ((26347, 26373), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (26371, 26373), False, 'from saml2 import SamlBase\n'), ((26393, 26421), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (26419, 26421), False, 'from saml2 import SamlBase\n'), ((26488, 26517), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (26515, 26517), False, 'from saml2 import SamlBase\n'), ((26719, 26745), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (26743, 26745), False, 'from saml2 import SamlBase\n'), ((26765, 26793), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (26791, 26793), False, 'from saml2 import SamlBase\n'), ((26860, 26889), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (26887, 26889), False, 'from saml2 import SamlBase\n'), ((27543, 27611), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['SubjectLocalityType_', 'xml_string'], {}), '(SubjectLocalityType_, xml_string)\n', (27577, 27611), False, 'import saml2\n'), ((27857, 27883), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (27881, 27883), False, 'from saml2 import SamlBase\n'), ((27903, 27931), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (27929, 27931), False, 'from saml2 import SamlBase\n'), ((27998, 28027), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (28025, 28027), False, 'from saml2 import SamlBase\n'), ((28094, 28162), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthnContextClassRef', 'xml_string'], {}), '(AuthnContextClassRef, xml_string)\n', (28128, 28162), False, 'import saml2\n'), ((28401, 28427), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (28425, 28427), False, 'from saml2 import SamlBase\n'), ((28447, 28475), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (28473, 28475), False, 'from saml2 import SamlBase\n'), ((28542, 28571), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (28569, 28571), False, 'from saml2 import SamlBase\n'), ((28637, 28704), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthnContextDeclRef', 'xml_string'], {}), '(AuthnContextDeclRef, xml_string)\n', (28671, 28704), False, 'import saml2\n'), ((28935, 28961), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (28959, 28961), False, 'from saml2 import SamlBase\n'), ((28981, 29009), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (29007, 29009), False, 'from saml2 import SamlBase\n'), ((29076, 29105), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (29103, 29105), False, 'from saml2 import SamlBase\n'), ((29167, 29231), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthnContextDecl', 'xml_string'], {}), '(AuthnContextDecl, xml_string)\n', (29201, 29231), False, 'import saml2\n'), ((29486, 29512), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (29510, 29512), False, 'from saml2 import SamlBase\n'), ((29532, 29560), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (29558, 29560), False, 'from saml2 import SamlBase\n'), ((29627, 29656), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (29654, 29656), False, 'from saml2 import SamlBase\n'), ((29724, 29795), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthenticatingAuthority', 'xml_string'], {}), '(AuthenticatingAuthority, xml_string)\n', (29758, 29795), False, 'import saml2\n'), ((30166, 30192), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (30190, 30192), False, 'from saml2 import SamlBase\n'), ((30212, 30240), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (30238, 30240), False, 'from saml2 import SamlBase\n'), ((30307, 30336), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (30334, 30336), False, 'from saml2 import SamlBase\n'), ((30394, 30455), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['DecisionType_', 'xml_string'], {}), '(DecisionType_, xml_string)\n', (30428, 30455), False, 'import saml2\n'), ((30668, 30694), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (30692, 30694), False, 'from saml2 import SamlBase\n'), ((30714, 30742), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (30740, 30742), False, 'from saml2 import SamlBase\n'), ((30809, 30838), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (30836, 30838), False, 'from saml2 import SamlBase\n'), ((31366, 31425), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['ActionType_', 'xml_string'], {}), '(ActionType_, xml_string)\n', (31400, 31425), False, 'import saml2\n'), ((31660, 31686), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (31684, 31686), False, 'from saml2 import SamlBase\n'), ((31706, 31734), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (31732, 31734), False, 'from saml2 import SamlBase\n'), ((31801, 31830), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (31828, 31830), False, 'from saml2 import SamlBase\n'), ((31889, 31951), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AttributeValue', 'xml_string'], {}), '(AttributeValue, xml_string)\n', (31923, 31951), False, 'import saml2\n'), ((32447, 32513), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['EncryptedAttribute', 'xml_string'], {}), '(EncryptedAttribute, xml_string)\n', (32481, 32513), False, 'import saml2\n'), ((32951, 33005), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['BaseID', 'xml_string'], {}), '(BaseID, xml_string)\n', (32985, 33005), False, 'import saml2\n'), ((33792, 33846), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['NameID', 'xml_string'], {}), '(NameID, xml_string)\n', (33826, 33846), False, 'import saml2\n'), ((34402, 34473), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['SubjectConfirmationData', 'xml_string'], {}), '(SubjectConfirmationData, xml_string)\n', (34436, 34473), False, 'import saml2\n'), ((34983, 35040), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Condition', 'xml_string'], {}), '(Condition, xml_string)\n', (35017, 35040), False, 'import saml2\n'), ((36186, 36258), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AudienceRestrictionType_', 'xml_string'], {}), '(AudienceRestrictionType_, xml_string)\n', (36220, 36258), False, 'import saml2\n'), ((36739, 36797), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['OneTimeUse', 'xml_string'], {}), '(OneTimeUse, xml_string)\n', (36773, 36797), False, 'import saml2\n'), ((37285, 37349), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['ProxyRestriction', 'xml_string'], {}), '(ProxyRestriction, xml_string)\n', (37319, 37349), False, 'import saml2\n'), ((37813, 37870), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Statement', 'xml_string'], {}), '(Statement, xml_string)\n', (37847, 37870), False, 'import saml2\n'), ((38756, 38819), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['SubjectLocality', 'xml_string'], {}), '(SubjectLocality, xml_string)\n', (38790, 38819), False, 'import saml2\n'), ((39012, 39038), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (39036, 39038), False, 'from saml2 import SamlBase\n'), ((39058, 39086), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (39084, 39086), False, 'from saml2 import SamlBase\n'), ((39153, 39182), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (39180, 39182), False, 'from saml2 import SamlBase\n'), ((41242, 41307), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthnContextType_', 'xml_string'], {}), '(AuthnContextType_, xml_string)\n', (41276, 41307), False, 'import saml2\n'), ((41704, 41758), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Action', 'xml_string'], {}), '(Action, xml_string)\n', (41738, 41758), False, 'import saml2\n'), ((41942, 41968), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (41966, 41968), False, 'from saml2 import SamlBase\n'), ((41988, 42016), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (42014, 42016), False, 'from saml2 import SamlBase\n'), ((42083, 42112), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (42110, 42112), False, 'from saml2 import SamlBase\n'), ((43557, 43619), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AttributeType_', 'xml_string'], {}), '(AttributeType_, xml_string)\n', (43591, 43619), False, 'import saml2\n'), ((43837, 43863), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (43861, 43863), False, 'from saml2 import SamlBase\n'), ((43883, 43911), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (43909, 43911), False, 'from saml2 import SamlBase\n'), ((43978, 44007), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (44005, 44007), False, 'from saml2 import SamlBase\n'), ((45786, 45858), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['SubjectConfirmationType_', 'xml_string'], {}), '(SubjectConfirmationType_, xml_string)\n', (45820, 45858), False, 'import saml2\n'), ((46419, 46486), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AudienceRestriction', 'xml_string'], {}), '(AudienceRestriction, xml_string)\n', (46453, 46486), False, 'import saml2\n'), ((46938, 46998), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthnContext', 'xml_string'], {}), '(AuthnContext, xml_string)\n', (46972, 46998), False, 'import saml2\n'), ((47422, 47479), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Attribute', 'xml_string'], {}), '(Attribute, xml_string)\n', (47456, 47479), False, 'import saml2\n'), ((47994, 48061), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['SubjectConfirmation', 'xml_string'], {}), '(SubjectConfirmation, xml_string)\n', (48028, 48061), False, 'import saml2\n'), ((48248, 48274), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (48272, 48274), False, 'from saml2 import SamlBase\n'), ((48294, 48322), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (48320, 48322), False, 'from saml2 import SamlBase\n'), ((48389, 48418), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (48416, 48418), False, 'from saml2 import SamlBase\n'), ((50663, 50726), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['ConditionsType_', 'xml_string'], {}), '(ConditionsType_, xml_string)\n', (50697, 50726), False, 'import saml2\n'), ((52605, 52672), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthnStatementType_', 'xml_string'], {}), '(AuthnStatementType_, xml_string)\n', (52639, 52672), False, 'import saml2\n'), ((54092, 54163), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AttributeStatementType_', 'xml_string'], {}), '(AttributeStatementType_, xml_string)\n', (54126, 54163), False, 'import saml2\n'), ((54387, 54413), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (54411, 54413), False, 'from saml2 import SamlBase\n'), ((54433, 54461), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (54459, 54461), False, 'from saml2 import SamlBase\n'), ((54528, 54557), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (54555, 54557), False, 'from saml2 import SamlBase\n'), ((56147, 56207), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['SubjectType_', 'xml_string'], {}), '(SubjectType_, xml_string)\n', (56181, 56207), False, 'import saml2\n'), ((56640, 56698), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Conditions', 'xml_string'], {}), '(Conditions, xml_string)\n', (56674, 56698), False, 'import saml2\n'), ((57168, 57230), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthnStatement', 'xml_string'], {}), '(AuthnStatement, xml_string)\n', (57202, 57230), False, 'import saml2\n'), ((57736, 57802), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AttributeStatement', 'xml_string'], {}), '(AttributeStatement, xml_string)\n', (57770, 57802), False, 'import saml2\n'), ((58208, 58263), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Subject', 'xml_string'], {}), '(Subject, xml_string)\n', (58242, 58263), False, 'import saml2\n'), ((58607, 58633), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (58631, 58633), False, 'from saml2 import SamlBase\n'), ((58653, 58681), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (58679, 58681), False, 'from saml2 import SamlBase\n'), ((58748, 58777), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (58775, 58777), False, 'from saml2 import SamlBase\n'), ((60259, 60320), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['EvidenceType_', 'xml_string'], {}), '(EvidenceType_, xml_string)\n', (60293, 60320), False, 'import saml2\n'), ((60735, 60791), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Evidence', 'xml_string'], {}), '(Evidence, xml_string)\n', (60769, 60791), False, 'import saml2\n'), ((62359, 62434), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthzDecisionStatementType_', 'xml_string'], {}), '(AuthzDecisionStatementType_, xml_string)\n', (62393, 62434), False, 'import saml2\n'), ((63027, 63097), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AuthzDecisionStatement', 'xml_string'], {}), '(AuthzDecisionStatement, xml_string)\n', (63061, 63097), False, 'import saml2\n'), ((63404, 63430), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (63428, 63430), False, 'from saml2 import SamlBase\n'), ((63450, 63478), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (63476, 63478), False, 'from saml2 import SamlBase\n'), ((63545, 63574), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (63572, 63574), False, 'from saml2 import SamlBase\n'), ((67340, 67402), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AssertionType_', 'xml_string'], {}), '(AssertionType_, xml_string)\n', (67374, 67402), False, 'import saml2\n'), ((67826, 67883), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Assertion', 'xml_string'], {}), '(Assertion, xml_string)\n', (67860, 67883), False, 'import saml2\n'), ((68058, 68084), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (68082, 68084), False, 'from saml2 import SamlBase\n'), ((68104, 68132), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (68130, 68132), False, 'from saml2 import SamlBase\n'), ((68199, 68228), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (68226, 68228), False, 'from saml2 import SamlBase\n'), ((69876, 69935), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AdviceType_', 'xml_string'], {}), '(AdviceType_, xml_string)\n', (69910, 69935), False, 'import saml2\n'), ((70332, 70386), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Advice', 'xml_string'], {}), '(Advice, xml_string)\n', (70366, 70386), False, 'import saml2\n'), ((6408, 6528), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'None', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=None, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (6425, 6528), False, 'from saml2 import SamlBase\n'), ((12743, 12783), 'saml2.SamlBase.__setattr__', 'SamlBase.__setattr__', (['self', '"""text"""', 'text'], {}), "(self, 'text', text)\n", (12763, 12783), False, 'from saml2 import SamlBase\n'), ((14622, 14742), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (14639, 14742), False, 'from saml2 import SamlBase\n'), ((15867, 15987), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (15884, 15987), False, 'from saml2 import SamlBase\n'), ((17308, 17428), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (17325, 17428), False, 'from saml2 import SamlBase\n'), ((20994, 21114), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (21011, 21114), False, 'from saml2 import SamlBase\n'), ((22398, 22518), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (22415, 22518), False, 'from saml2 import SamlBase\n'), ((27219, 27339), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (27236, 27339), False, 'from saml2 import SamlBase\n'), ((31081, 31201), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (31098, 31201), False, 'from saml2 import SamlBase\n'), ((40449, 40569), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (40466, 40569), False, 'from saml2 import SamlBase\n'), ((41158, 41179), 'saml2.SamlBase.verify', 'SamlBase.verify', (['self'], {}), '(self)\n', (41173, 41179), False, 'from saml2 import SamlBase\n'), ((42896, 43016), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (42913, 43016), False, 'from saml2 import SamlBase\n'), ((43457, 43498), 'saml2.SamlBase.harvest_element_tree', 'SamlBase.harvest_element_tree', (['self', 'tree'], {}), '(self, tree)\n', (43486, 43498), False, 'from saml2 import SamlBase\n'), ((45323, 45443), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (45340, 45443), False, 'from saml2 import SamlBase\n'), ((49768, 49888), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (49785, 49888), False, 'from saml2 import SamlBase\n'), ((50582, 50603), 'saml2.SamlBase.verify', 'SamlBase.verify', (['self'], {}), '(self)\n', (50597, 50603), False, 'from saml2 import SamlBase\n'), ((55730, 55850), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (55747, 55850), False, 'from saml2 import SamlBase\n'), ((59793, 59913), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (59810, 59913), False, 'from saml2 import SamlBase\n'), ((66042, 66162), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (66059, 66162), False, 'from saml2 import SamlBase\n'), ((67260, 67281), 'saml2.SamlBase.verify', 'SamlBase.verify', (['self'], {}), '(self)\n', (67275, 67281), False, 'from saml2 import SamlBase\n'), ((69412, 69532), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (69429, 69532), False, 'from saml2 import SamlBase\n'), ((7055, 7093), 'saml2.SamlBase.__setattr__', 'SamlBase.__setattr__', (['self', 'key', 'value'], {}), '(self, key, value)\n', (7075, 7093), False, 'from saml2 import SamlBase\n'), ((7594, 7615), 'saml2.SamlBase.verify', 'SamlBase.verify', (['self'], {}), '(self)\n', (7609, 7615), False, 'from saml2 import SamlBase\n'), ((67150, 67226), 'saml2.validate.MustValueError', 'MustValueError', (['"""An assertion with an AuthnStatement must contain a Subject"""'], {}), "('An assertion with an AuthnStatement must contain a Subject')\n", (67164, 67226), False, 'from saml2.validate import valid_ipv4, MustValueError\n'), ((38411, 38435), 'saml2.validate.valid_ipv4', 'valid_ipv4', (['self.address'], {}), '(self.address)\n', (38421, 38435), False, 'from saml2.validate import valid_ipv4, MustValueError\n'), ((38439, 38463), 'saml2.validate.valid_ipv6', 'valid_ipv6', (['self.address'], {}), '(self.address)\n', (38449, 38463), False, 'from saml2.validate import valid_ipv6\n'), ((38526, 38573), 'saml2.validate.ShouldValueError', 'ShouldValueError', (['"""Not an IPv4 or IPv6 address"""'], {}), "('Not an IPv4 or IPv6 address')\n", (38542, 38573), False, 'from saml2.validate import ShouldValueError\n'), ((38614, 38646), 'saml2.validate.valid_domain_name', 'valid_domain_name', (['self.dns_name'], {}), '(self.dns_name)\n', (38631, 38646), False, 'from saml2.validate import valid_domain_name\n'), ((66995, 67059), 'saml2.validate.MustValueError', 'MustValueError', (['"""If no statement MUST contain a subject element"""'], {}), "('If no statement MUST contain a subject element')\n", (67009, 67059), False, 'from saml2.validate import valid_ipv4, MustValueError\n')] |
PierreExeter/custom_gym_envs | ROS_packages/custom_ROS_envs/turtlebot2_maze_env/src/turtlebot2_maze_random.py | 2b6a1c16a4198c8d9fa64f10fe09a041826ac81a | #!/usr/bin/env python
import gym
import rospy
from openai_ros.openai_ros_common import StartOpenAI_ROS_Environment
# initialise environment
rospy.init_node('turtlebot2_maze_random', anonymous=True, log_level=rospy.WARN)
task_and_robot_environment_name = rospy.get_param('/turtlebot2/task_and_robot_environment_name')
env = StartOpenAI_ROS_Environment(task_and_robot_environment_name)
print("Environment: ", env)
print("Action space: ", env.action_space)
# print(env.action_space.high)
# print(env.action_space.low)
print("Observation space: ", env.observation_space)
print(env.observation_space.high)
print(env.observation_space.low)
for episode in range(20):
env.reset()
for t in range(100):
action = env.action_space.sample()
obs, reward, done, info = env.step(action)
print("episode: ", episode)
print("timestep: ", t)
print("obs: ", obs)
print("action:", action)
print("reward: ", reward)
print("done: ", done)
print("info: ", info)
if done:
print("Episode {} finished after {} timesteps".format(episode, t+1))
break
env.close() | [((142, 221), 'rospy.init_node', 'rospy.init_node', (['"""turtlebot2_maze_random"""'], {'anonymous': '(True)', 'log_level': 'rospy.WARN'}), "('turtlebot2_maze_random', anonymous=True, log_level=rospy.WARN)\n", (157, 221), False, 'import rospy\n'), ((256, 318), 'rospy.get_param', 'rospy.get_param', (['"""/turtlebot2/task_and_robot_environment_name"""'], {}), "('/turtlebot2/task_and_robot_environment_name')\n", (271, 318), False, 'import rospy\n'), ((325, 385), 'openai_ros.openai_ros_common.StartOpenAI_ROS_Environment', 'StartOpenAI_ROS_Environment', (['task_and_robot_environment_name'], {}), '(task_and_robot_environment_name)\n', (352, 385), False, 'from openai_ros.openai_ros_common import StartOpenAI_ROS_Environment\n')] |
jacobchh/Sudoku-Solver | solver.py | 946a954e8eda234760872c55fcd2354dc0a8a4f9 | import numpy as np
board = np.zeros(shape=(9, 9))
count = 0
def solve():
global count
count += 1
if count % 1000 == 0:
print('\rCurrent number of computations made:', count, end='')
freePos = find()
if freePos is None:
return True
i = freePos[0]
j = freePos[1]
for w in range(1, 10):
if possible(w, freePos):
board[i][j] = w
if solve():
return True
board[i][j] = 0
return False
def find():
for i in range(9):
for j in range(9):
if board[i][j] == 0:
return [i, j]
return None
def possible(value, position):
# position = (i, j) tuple
i = position[0]
j = position[1]
# checks row and column for repeat value
if (value in board[:, j]) or (value in board[i]):
return False
# reset to i,j - top left square
i = (i // 3) * 3
j = (j // 3) * 3
# check all squares in square
for n in range(i, i + 3):
for m in range(j, j + 3):
if board[n][m] == value:
return False
return True
def change(position):
# position = (i, j) tuple
i = position[0]
j = position[1]
for w in range(1, 10):
if w not in board[:, j] and w not in board[i]:
board[i][j] = w
return True
return False
def initialize():
print("Please enter the values on the board starting from left to right, top to bottom, 0 for blank")
integerChunk = input("Numbers: ")
pos = 0
for i in range(9):
for j in range(9):
board[i][j] = int(integerChunk[pos])
pos += 1
def displayBoard():
for i in range(3):
for j in range(9):
if board[i][j] == 0:
print(" ", end="")
else:
print("%d " % board[i][j], end="")
if (j == 2) or (j == 5):
print("| ", end="")
if j == 8:
print("")
print("- - - - - - - - - - -")
for i in range(3, 6):
for j in range(9):
if board[i][j] == 0:
print(" ", end="")
else:
print("%d " % board[i][j], end="")
if (j == 2) or (j == 5):
print("| ", end="")
if j == 8:
print("")
print("- - - - - - - - - - -")
for i in range(6, 9):
for j in range(9):
if board[i][j] == 0:
print(" ", end="")
else:
print("%d " % board[i][j], end="")
if (j == 2) or (j == 5):
print("| ", end="")
if j == 8:
print("")
def main():
initialize()
print("Is this the correct board? Press enter to continue or 'q' to exit program.")
displayBoard()
response = input()
if response == "q":
exit()
print("---------------SOLVING---------------\n")
solve()
print("\r\rSOLUTION")
displayBoard()
print("\nTotal number of computations:", count)
if __name__ == "__main__":
main()
| [((28, 50), 'numpy.zeros', 'np.zeros', ([], {'shape': '(9, 9)'}), '(shape=(9, 9))\n', (36, 50), True, 'import numpy as np\n')] |
johny-c/theano_exercises | 01_basics/01_building_expressions/02_vector_mat_soln.py | 7fd43315bf7c475a6f218091316c0bd34e0688c4 | import numpy as np
from theano import function
import theano.tensor as T
def make_vector():
"""
Returns a new Theano vector.
"""
return T.vector()
def make_matrix():
"""
Returns a new Theano matrix.
"""
return T.matrix()
def elemwise_mul(a, b):
"""
a: A theano matrix
b: A theano matrix
Returns the elementwise product of a and b
"""
return a * b
def matrix_vector_mul(a, b):
"""
a: A theano matrix
b: A theano vector
Returns the matrix-vector product of a and b
"""
return T.dot(a, b)
if __name__ == "__main__":
a = make_vector()
b = make_vector()
c = elemwise_mul(a, b)
d = make_matrix()
e = matrix_vector_mul(d, c)
f = function([a, b, d], e)
rng = np.random.RandomState([1, 2, 3])
a_value = rng.randn(5).astype(a.dtype)
b_value = rng.rand(5).astype(b.dtype)
c_value = a_value * b_value
d_value = rng.randn(5, 5).astype(d.dtype)
expected = np.dot(d_value, c_value)
actual = f(a_value, b_value, d_value)
assert np.allclose(actual, expected)
print "SUCCESS!"
| [] |
bopopescu/nova-8 | nova/api/openstack/compute/used_limits.py | 768d7cc0a632e1a880f00c5840c1ec8051e161be | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import api_version_request
from nova.api.openstack.api_version_request \
import MIN_WITHOUT_PROXY_API_SUPPORT_VERSION
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.policies import used_limits as ul_policies
from nova import quota
QUOTAS = quota.QUOTAS
class UsedLimitsController(wsgi.Controller):
@staticmethod
def _reserved(req):
try:
return int(req.GET['reserved'])
except (ValueError, KeyError):
return False
@wsgi.extends
@extensions.expected_errors(())
def index(self, req, resp_obj):
context = req.environ['nova.context']
project_id = self._project_id(context, req)
quotas = QUOTAS.get_project_quotas(context, project_id, usages=True)
if api_version_request.is_supported(
req, min_version=MIN_WITHOUT_PROXY_API_SUPPORT_VERSION):
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalServerGroupsUsed': 'server_groups',
}
else:
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalFloatingIpsUsed': 'floating_ips',
'totalSecurityGroupsUsed': 'security_groups',
'totalServerGroupsUsed': 'server_groups',
}
used_limits = {}
for display_name, key in quota_map.items():
if key in quotas:
reserved = (quotas[key]['reserved']
if self._reserved(req) else 0)
used_limits[display_name] = quotas[key]['in_use'] + reserved
resp_obj.obj['limits']['absolute'].update(used_limits)
def _project_id(self, context, req):
if 'tenant_id' in req.GET:
tenant_id = req.GET.get('tenant_id')
target = {
'project_id': tenant_id,
'user_id': context.user_id
}
context.can(ul_policies.BASE_POLICY_NAME, target)
return tenant_id
return context.project_id
| [((1173, 1203), 'nova.api.openstack.extensions.expected_errors', 'extensions.expected_errors', (['()'], {}), '(())\n', (1199, 1203), False, 'from nova.api.openstack import extensions\n'), ((1426, 1519), 'nova.api.openstack.api_version_request.is_supported', 'api_version_request.is_supported', (['req'], {'min_version': 'MIN_WITHOUT_PROXY_API_SUPPORT_VERSION'}), '(req, min_version=\n MIN_WITHOUT_PROXY_API_SUPPORT_VERSION)\n', (1458, 1519), False, 'from nova.api.openstack import api_version_request\n')] |
howards11/agents | tf_agents/bandits/agents/examples/v2/trainer.py | 8d5627d9b9c3680468a63564c25a4d82fa1befb0 | # coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Generic TF-Agents training function for bandits."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import logging
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.drivers import dynamic_step_driver
from tf_agents.eval import metric_utils
from tf_agents.metrics import tf_metrics
from tf_agents.policies import policy_saver
from tf_agents.replay_buffers import tf_uniform_replay_buffer
tf = tf.compat.v2
AGENT_CHECKPOINT_NAME = 'agent'
STEP_CHECKPOINT_NAME = 'step'
CHECKPOINT_FILE_PREFIX = 'ckpt'
def get_replay_buffer(data_spec,
batch_size,
steps_per_loop):
"""Return a `TFUniformReplayBuffer` for the given `agent`."""
buf = tf_uniform_replay_buffer.TFUniformReplayBuffer(
data_spec=data_spec,
batch_size=batch_size,
max_length=steps_per_loop)
return buf
def set_expected_shape(experience, num_steps):
def set_time_dim(input_tensor, steps):
tensor_shape = input_tensor.shape.as_list()
tensor_shape[1] = steps
input_tensor.set_shape(tensor_shape)
tf.nest.map_structure(lambda t: set_time_dim(t, num_steps), experience)
def get_training_loop_fn(driver, replay_buffer, agent, steps):
"""Returns a `tf.function` that runs the driver and training loops.
Args:
driver: an instance of `Driver`.
replay_buffer: an instance of `ReplayBuffer`.
agent: an instance of `TFAgent`.
steps: an integer indicating how many driver steps should be
executed and presented to the trainer during each training loop.
"""
def training_loop():
"""Returns a `tf.function` that runs the training loop."""
driver.run()
batch_size = driver.env.batch_size
dataset = replay_buffer.as_dataset(
sample_batch_size=batch_size,
num_steps=steps,
single_deterministic_pass=True)
experience, unused_info = tf.data.experimental.get_single_element(dataset)
set_expected_shape(experience, steps)
loss_info = agent.train(experience)
replay_buffer.clear()
return loss_info
return training_loop
def restore_and_get_checkpoint_manager(root_dir, agent, metrics, step_metric):
"""Restores from `root_dir` and returns a function that writes checkpoints."""
trackable_objects = {metric.name: metric for metric in metrics}
trackable_objects[AGENT_CHECKPOINT_NAME] = agent
trackable_objects[STEP_CHECKPOINT_NAME] = step_metric
checkpoint = tf.train.Checkpoint(**trackable_objects)
checkpoint_manager = tf.train.CheckpointManager(checkpoint=checkpoint,
directory=root_dir,
max_to_keep=5)
latest = checkpoint_manager.latest_checkpoint
if latest is not None:
logging.info('Restoring checkpoint from %s.', latest)
checkpoint.restore(latest)
logging.info('Successfully restored to step %s.', step_metric.result())
else:
logging.info('Did not find a pre-existing checkpoint. '
'Starting from scratch.')
return checkpoint_manager
def train(root_dir,
agent,
environment,
training_loops,
steps_per_loop,
additional_metrics=(),
training_data_spec_transformation_fn=None):
"""Perform `training_loops` iterations of training.
Checkpoint results.
If one or more baseline_reward_fns are provided, the regret is computed
against each one of them. Here is example baseline_reward_fn:
def baseline_reward_fn(observation, per_action_reward_fns):
rewards = ... # compute reward for each arm
optimal_action_reward = ... # take the maximum reward
return optimal_action_reward
Args:
root_dir: path to the directory where checkpoints and metrics will be
written.
agent: an instance of `TFAgent`.
environment: an instance of `TFEnvironment`.
training_loops: an integer indicating how many training loops should be run.
steps_per_loop: an integer indicating how many driver steps should be
executed and presented to the trainer during each training loop.
additional_metrics: Tuple of metric objects to log, in addition to default
metrics `NumberOfEpisodes`, `AverageReturnMetric`, and
`AverageEpisodeLengthMetric`.
training_data_spec_transformation_fn: Optional function that transforms the
data items before they get to the replay buffer.
"""
# TODO(b/127641485): create evaluation loop with configurable metrics.
if training_data_spec_transformation_fn is None:
data_spec = agent.policy.trajectory_spec
else:
data_spec = training_data_spec_transformation_fn(
agent.policy.trajectory_spec)
replay_buffer = get_replay_buffer(data_spec, environment.batch_size,
steps_per_loop)
# `step_metric` records the number of individual rounds of bandit interaction;
# that is, (number of trajectories) * batch_size.
step_metric = tf_metrics.EnvironmentSteps()
metrics = [
tf_metrics.NumberOfEpisodes(),
tf_metrics.AverageEpisodeLengthMetric(batch_size=environment.batch_size)
] + list(additional_metrics)
if isinstance(environment.reward_spec(), dict):
metrics += [tf_metrics.AverageReturnMultiMetric(
reward_spec=environment.reward_spec(),
batch_size=environment.batch_size)]
else:
metrics += [
tf_metrics.AverageReturnMetric(batch_size=environment.batch_size)]
if training_data_spec_transformation_fn is not None:
add_batch_fn = lambda data: replay_buffer.add_batch( # pylint: disable=g-long-lambda
training_data_spec_transformation_fn(data))
else:
add_batch_fn = replay_buffer.add_batch
observers = [add_batch_fn, step_metric] + metrics
driver = dynamic_step_driver.DynamicStepDriver(
env=environment,
policy=agent.collect_policy,
num_steps=steps_per_loop * environment.batch_size,
observers=observers)
training_loop = get_training_loop_fn(
driver, replay_buffer, agent, steps_per_loop)
checkpoint_manager = restore_and_get_checkpoint_manager(
root_dir, agent, metrics, step_metric)
train_step_counter = tf.compat.v1.train.get_or_create_global_step()
saver = policy_saver.PolicySaver(agent.policy, train_step=train_step_counter)
summary_writer = tf.summary.create_file_writer(root_dir)
summary_writer.set_as_default()
for i in range(training_loops):
training_loop()
metric_utils.log_metrics(metrics)
for metric in metrics:
metric.tf_summaries(train_step=step_metric.result())
checkpoint_manager.save()
if i % 100 == 0:
saver.save(os.path.join(root_dir, 'policy_%d' % step_metric.result()))
| [((1417, 1538), 'tf_agents.replay_buffers.tf_uniform_replay_buffer.TFUniformReplayBuffer', 'tf_uniform_replay_buffer.TFUniformReplayBuffer', ([], {'data_spec': 'data_spec', 'batch_size': 'batch_size', 'max_length': 'steps_per_loop'}), '(data_spec=data_spec,\n batch_size=batch_size, max_length=steps_per_loop)\n', (1463, 1538), False, 'from tf_agents.replay_buffers import tf_uniform_replay_buffer\n'), ((3124, 3164), 'tensorflow.train.Checkpoint', 'tf.train.Checkpoint', ([], {}), '(**trackable_objects)\n', (3143, 3164), True, 'import tensorflow as tf\n'), ((3188, 3276), 'tensorflow.train.CheckpointManager', 'tf.train.CheckpointManager', ([], {'checkpoint': 'checkpoint', 'directory': 'root_dir', 'max_to_keep': '(5)'}), '(checkpoint=checkpoint, directory=root_dir,\n max_to_keep=5)\n', (3214, 3276), True, 'import tensorflow as tf\n'), ((5634, 5663), 'tf_agents.metrics.tf_metrics.EnvironmentSteps', 'tf_metrics.EnvironmentSteps', ([], {}), '()\n', (5661, 5663), False, 'from tf_agents.metrics import tf_metrics\n'), ((6434, 6598), 'tf_agents.drivers.dynamic_step_driver.DynamicStepDriver', 'dynamic_step_driver.DynamicStepDriver', ([], {'env': 'environment', 'policy': 'agent.collect_policy', 'num_steps': '(steps_per_loop * environment.batch_size)', 'observers': 'observers'}), '(env=environment, policy=agent.\n collect_policy, num_steps=steps_per_loop * environment.batch_size,\n observers=observers)\n', (6471, 6598), False, 'from tf_agents.drivers import dynamic_step_driver\n'), ((6835, 6881), 'tensorflow.compat.v1.train.get_or_create_global_step', 'tf.compat.v1.train.get_or_create_global_step', ([], {}), '()\n', (6879, 6881), True, 'import tensorflow as tf\n'), ((6892, 6961), 'tf_agents.policies.policy_saver.PolicySaver', 'policy_saver.PolicySaver', (['agent.policy'], {'train_step': 'train_step_counter'}), '(agent.policy, train_step=train_step_counter)\n', (6916, 6961), False, 'from tf_agents.policies import policy_saver\n'), ((6982, 7021), 'tensorflow.summary.create_file_writer', 'tf.summary.create_file_writer', (['root_dir'], {}), '(root_dir)\n', (7011, 7021), True, 'import tensorflow as tf\n'), ((2573, 2621), 'tensorflow.data.experimental.get_single_element', 'tf.data.experimental.get_single_element', (['dataset'], {}), '(dataset)\n', (2612, 2621), True, 'import tensorflow as tf\n'), ((3450, 3503), 'absl.logging.info', 'logging.info', (['"""Restoring checkpoint from %s."""', 'latest'], {}), "('Restoring checkpoint from %s.', latest)\n", (3462, 3503), False, 'from absl import logging\n'), ((3623, 3701), 'absl.logging.info', 'logging.info', (['"""Did not find a pre-existing checkpoint. Starting from scratch."""'], {}), "('Did not find a pre-existing checkpoint. Starting from scratch.')\n", (3635, 3701), False, 'from absl import logging\n'), ((7114, 7147), 'tf_agents.eval.metric_utils.log_metrics', 'metric_utils.log_metrics', (['metrics'], {}), '(metrics)\n', (7138, 7147), False, 'from tf_agents.eval import metric_utils\n'), ((5684, 5713), 'tf_agents.metrics.tf_metrics.NumberOfEpisodes', 'tf_metrics.NumberOfEpisodes', ([], {}), '()\n', (5711, 5713), False, 'from tf_agents.metrics import tf_metrics\n'), ((5721, 5793), 'tf_agents.metrics.tf_metrics.AverageEpisodeLengthMetric', 'tf_metrics.AverageEpisodeLengthMetric', ([], {'batch_size': 'environment.batch_size'}), '(batch_size=environment.batch_size)\n', (5758, 5793), False, 'from tf_agents.metrics import tf_metrics\n'), ((6053, 6118), 'tf_agents.metrics.tf_metrics.AverageReturnMetric', 'tf_metrics.AverageReturnMetric', ([], {'batch_size': 'environment.batch_size'}), '(batch_size=environment.batch_size)\n', (6083, 6118), False, 'from tf_agents.metrics import tf_metrics\n')] |
RSE-Cambridge/rally-openstack | rally_openstack/cfg/manila.py | 32bbc091bbce1db625a2fc22da28b32718befa13 | # Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally.common import cfg
OPTS = {"openstack": [
cfg.FloatOpt(
"manila_share_create_prepoll_delay",
default=2.0,
deprecated_group="benchmark",
help="Delay between creating Manila share and polling for its "
"status."),
cfg.FloatOpt(
"manila_share_create_timeout",
default=300.0,
deprecated_group="benchmark",
help="Timeout for Manila share creation."),
cfg.FloatOpt(
"manila_share_create_poll_interval",
default=3.0,
deprecated_group="benchmark",
help="Interval between checks when waiting for Manila share "
"creation."),
cfg.FloatOpt(
"manila_share_delete_timeout",
default=180.0,
deprecated_group="benchmark",
help="Timeout for Manila share deletion."),
cfg.FloatOpt(
"manila_share_delete_poll_interval",
default=2.0,
deprecated_group="benchmark",
help="Interval between checks when waiting for Manila share "
"deletion."),
cfg.FloatOpt(
"manila_access_create_timeout",
default=300.0,
deprecated_group="benchmark",
help="Timeout for Manila access creation."),
cfg.FloatOpt(
"manila_access_create_poll_interval",
default=3.0,
deprecated_group="benchmark",
help="Interval between checks when waiting for Manila access "
"creation."),
cfg.FloatOpt(
"manila_access_delete_timeout",
default=180.0,
deprecated_group="benchmark",
help="Timeout for Manila access deletion."),
cfg.FloatOpt(
"manila_access_delete_poll_interval",
default=2.0,
deprecated_group="benchmark",
help="Interval between checks when waiting for Manila access "
"deletion."),
]}
| [((687, 860), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_share_create_prepoll_delay"""'], {'default': '(2.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Delay between creating Manila share and polling for its status."""'}), "('manila_share_create_prepoll_delay', default=2.0,\n deprecated_group='benchmark', help=\n 'Delay between creating Manila share and polling for its status.')\n", (699, 860), False, 'from rally.common import cfg\n'), ((906, 1042), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_share_create_timeout"""'], {'default': '(300.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Timeout for Manila share creation."""'}), "('manila_share_create_timeout', default=300.0, deprecated_group\n ='benchmark', help='Timeout for Manila share creation.')\n", (918, 1042), False, 'from rally.common import cfg\n'), ((1076, 1249), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_share_create_poll_interval"""'], {'default': '(3.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Interval between checks when waiting for Manila share creation."""'}), "('manila_share_create_poll_interval', default=3.0,\n deprecated_group='benchmark', help=\n 'Interval between checks when waiting for Manila share creation.')\n", (1088, 1249), False, 'from rally.common import cfg\n'), ((1295, 1431), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_share_delete_timeout"""'], {'default': '(180.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Timeout for Manila share deletion."""'}), "('manila_share_delete_timeout', default=180.0, deprecated_group\n ='benchmark', help='Timeout for Manila share deletion.')\n", (1307, 1431), False, 'from rally.common import cfg\n'), ((1465, 1638), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_share_delete_poll_interval"""'], {'default': '(2.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Interval between checks when waiting for Manila share deletion."""'}), "('manila_share_delete_poll_interval', default=2.0,\n deprecated_group='benchmark', help=\n 'Interval between checks when waiting for Manila share deletion.')\n", (1477, 1638), False, 'from rally.common import cfg\n'), ((1684, 1821), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_access_create_timeout"""'], {'default': '(300.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Timeout for Manila access creation."""'}), "('manila_access_create_timeout', default=300.0,\n deprecated_group='benchmark', help='Timeout for Manila access creation.')\n", (1696, 1821), False, 'from rally.common import cfg\n'), ((1856, 2031), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_access_create_poll_interval"""'], {'default': '(3.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Interval between checks when waiting for Manila access creation."""'}), "('manila_access_create_poll_interval', default=3.0,\n deprecated_group='benchmark', help=\n 'Interval between checks when waiting for Manila access creation.')\n", (1868, 2031), False, 'from rally.common import cfg\n'), ((2077, 2214), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_access_delete_timeout"""'], {'default': '(180.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Timeout for Manila access deletion."""'}), "('manila_access_delete_timeout', default=180.0,\n deprecated_group='benchmark', help='Timeout for Manila access deletion.')\n", (2089, 2214), False, 'from rally.common import cfg\n'), ((2249, 2424), 'rally.common.cfg.FloatOpt', 'cfg.FloatOpt', (['"""manila_access_delete_poll_interval"""'], {'default': '(2.0)', 'deprecated_group': '"""benchmark"""', 'help': '"""Interval between checks when waiting for Manila access deletion."""'}), "('manila_access_delete_poll_interval', default=2.0,\n deprecated_group='benchmark', help=\n 'Interval between checks when waiting for Manila access deletion.')\n", (2261, 2424), False, 'from rally.common import cfg\n')] |
matayoos/invoice-scrapper | app/backend/app/crud/crud_register_invoice.py | d36c944c10714e61d304693d0fce28769d2a746a | from sqlalchemy.orm.session import Session
from app import crud
from .utils import insert, get_content
def register_invoice(db: Session, url: str):
content = get_content.get_invoice_info(url)
grocery_store_id = insert.insert_grocery_store_info(
db, obj_in=content["grocery_store"]
)
invoice_id = insert.insert_invoice_info(
db, obj_in=content["invoice"], grocery_store_id=grocery_store_id
)
insert.insert_invoice_items(db, content["items"], grocery_store_id, invoice_id)
return crud.get_invoice_by_id(db, id=invoice_id)
| [((529, 570), 'app.crud.get_invoice_by_id', 'crud.get_invoice_by_id', (['db'], {'id': 'invoice_id'}), '(db, id=invoice_id)\n', (551, 570), False, 'from app import crud\n')] |
nicoddemus/aioworkers | examples/web/handlers.py | 4ab85064844dc28141833d1348989d8c891f3d7d |
async def handler(context):
return await context.data
| [] |
garronej/linphone | tools/genapixml.py | f61a337f5363b991d6e866a6aa7d303658c04073 | #!/usr/bin/python
# Copyright (C) 2014 Belledonne Communications SARL
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import argparse
import os
import six
import string
import sys
import xml.etree.ElementTree as ET
import xml.dom.minidom as minidom
import metadoc
class CObject:
def __init__(self, name):
self.name = name.strip()
self.briefDescription = ''
self.detailedDescription = None
self.deprecated = False
self.briefDoc = None
class CEnumValue(CObject):
def __init__(self, name):
CObject.__init__(self, name)
self.value = None
class CEnum(CObject):
def __init__(self, name):
CObject.__init__(self, name)
self.values = []
self.associatedTypedef = None
def addValue(self, value):
self.values.append(value)
class CStructMember(CObject):
def __init__(self, name, t):
CObject.__init__(self, name)
self.ctype = t.strip()
class CStruct(CObject):
def __init__(self, name):
CObject.__init__(self, name)
self.members = []
self.associatedTypedef = None
def addMember(self, member):
self.members.append(member)
class CTypedef(CObject):
def __init__(self, name, definition):
CObject.__init__(self, name)
self.definition = definition.strip()
class CArgument(CObject):
def __init__(self, t, name = '', enums = [], structs = []):
CObject.__init__(self, name)
self.description = None
self.containedType = None
keywords = [ 'const', 'struct', 'enum', 'signed', 'unsigned', 'short', 'long', '*' ]
fullySplittedType = []
splittedType = t.strip().split(' ')
for s in splittedType:
if s.startswith('*'):
fullySplittedType.append('*')
if len(s) > 1:
fullySplittedType.append(s[1:])
elif s.endswith('*'):
fullySplittedType.append(s[:-1])
fullySplittedType.append('*')
else:
fullySplittedType.append(s)
if 'MS2_DEPRECATED' in fullySplittedType:
fullySplittedType.remove('MS2_DEPRECATED')
elif 'LINPHONE_DEPRECATED' in fullySplittedType:
fullySplittedType.remove('LINPHONE_DEPRECATED')
isStruct = False
isEnum = False
self.ctype = 'int' # Default to int so that the result is correct eg. for 'unsigned short'
for s in fullySplittedType:
if not s in keywords:
self.ctype = s
if s == 'struct':
isStruct = True
if s == 'enum':
isEnum = True
if isStruct:
for st in structs:
if st.associatedTypedef is not None:
self.ctype = st.associatedTypedef.name
elif isEnum:
for e in enums:
if e.associatedTypedef is not None:
self.ctype = e.associatedTypedef.name
if self.ctype == 'int' and 'int' not in fullySplittedType:
if fullySplittedType[-1] == '*':
fullySplittedType.insert(-1, 'int')
else:
fullySplittedType.append('int')
self.completeType = ' '.join(fullySplittedType)
def __str__(self):
return self.completeType + " " + self.name
class CArgumentsList:
def __init__(self):
self.arguments = []
def addArgument(self, arg):
self.arguments.append(arg)
def __len__(self):
return len(self.arguments)
def __getitem__(self, key):
return self.arguments[key]
def __str__(self):
argstr = []
for arg in self.arguments:
argstr.append(str(arg))
return ', '.join(argstr)
class CFunction(CObject):
def __init__(self, name, returnarg, argslist):
CObject.__init__(self, name)
self.returnArgument = returnarg
self.arguments = argslist
self.location = None
class CEvent(CFunction):
pass
class CProperty:
def __init__(self, name):
self.name = name
self.getter = None
self.setter = None
class CClass(CObject):
def __init__(self, st):
CObject.__init__(self, st.associatedTypedef.name)
if st.deprecated or st.associatedTypedef.deprecated:
self.deprecated = True
if len(st.associatedTypedef.briefDescription) > 0:
self.briefDescription = st.associatedTypedef.briefDescription
elif len(st.briefDescription) > 0:
self.briefDescription = st.briefDescription
if st.associatedTypedef.detailedDescription is not None:
self.detailedDescription = st.associatedTypedef.detailedDescription
elif st.detailedDescription is not None:
self.detailedDescription = st.detailedDescription
self.__struct = st
self.events = {}
self.classMethods = {}
self.instanceMethods = {}
self.properties = {}
self.__computeCFunctionPrefix()
def __computeCFunctionPrefix(self):
self.cFunctionPrefix = ''
first = True
for l in self.name:
if l.isupper() and not first:
self.cFunctionPrefix += '_'
self.cFunctionPrefix += l.lower()
first = False
self.cFunctionPrefix += '_'
def __addPropertyGetter(self, name, f):
if not name in self.properties:
prop = CProperty(name)
self.properties[name] = prop
self.properties[name].getter = f
def __addPropertySetter(self, name, f):
if not name in self.properties:
prop = CProperty(name)
self.properties[name] = prop
self.properties[name].setter = f
def __addClassMethod(self, f):
if not f.name in self.classMethods:
self.classMethods[f.name] = f
def __addInstanceMethod(self, f):
name = f.name[len(self.cFunctionPrefix):]
if name.startswith('get_') and len(f.arguments) == 1:
self.__addPropertyGetter(name[4:], f)
elif name.startswith('is_') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t':
self.__addPropertyGetter(name, f)
elif name.endswith('_enabled') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t':
self.__addPropertyGetter(name, f)
elif name.startswith('set_') and len(f.arguments) == 2:
self.__addPropertySetter(name[4:], f)
elif name.startswith('enable_') and len(f.arguments) == 2 and f.arguments[1].ctype == 'bool_t':
self.__addPropertySetter(name[7:] + '_enabled', f)
else:
if not f.name in self.instanceMethods:
self.instanceMethods[f.name] = f
def addEvent(self, ev):
if not ev.name in self.events:
self.events[ev.name] = ev
def addMethod(self, f):
if len(f.arguments) > 0 and f.arguments[0].ctype == self.name:
self.__addInstanceMethod(f)
else:
self.__addClassMethod(f)
class Project:
def __init__(self):
self.verbose = False
self.prettyPrint = False
self.enums = []
self.__structs = []
self.__typedefs = []
self.__events = []
self.__functions = []
self.classes = []
self.docparser = metadoc.Parser()
def add(self, elem):
if isinstance(elem, CClass):
if self.verbose:
print("Adding class " + elem.name)
self.classes.append(elem)
elif isinstance(elem, CEnum):
if self.verbose:
print("Adding enum " + elem.name)
for ev in elem.values:
print("\t" + ev.name)
self.enums.append(elem)
elif isinstance(elem, CStruct):
if self.verbose:
print("Adding struct " + elem.name)
for sm in elem.members:
print("\t" + sm.ctype + " " + sm.name)
self.__structs.append(elem)
elif isinstance(elem, CTypedef):
if self.verbose:
print("Adding typedef " + elem.name)
print("\t" + elem.definition)
self.__typedefs.append(elem)
elif isinstance(elem, CEvent):
if self.verbose:
print("Adding event " + elem.name)
print("\tReturns: " + elem.returnArgument.ctype)
print("\tArguments: " + str(elem.arguments))
self.__events.append(elem)
elif isinstance(elem, CFunction):
if self.verbose:
print("Adding function " + elem.name)
print("\tReturns: " + elem.returnArgument.ctype)
print("\tArguments: " + str(elem.arguments))
self.__functions.append(elem)
def __cleanDescription(self, descriptionNode):
for para in descriptionNode.findall('./para'):
for n in para.findall('./parameterlist'):
para.remove(n)
for n in para.findall("./simplesect[@kind='return']"):
para.remove(n)
for n in para.findall("./simplesect[@kind='see']"):
t = ''.join(n.itertext())
n.clear()
n.tag = 'see'
n.text = t
for n in para.findall("./simplesect[@kind='note']"):
n.tag = 'note'
n.attrib = {}
for n in para.findall(".//xrefsect"):
para.remove(n)
for n in para.findall('.//ref'):
n.attrib = {}
for n in para.findall(".//bctbx_list"):
para.remove(n)
if descriptionNode.tag == 'parameterdescription':
descriptionNode.tag = 'description'
if descriptionNode.tag == 'simplesect':
descriptionNode.tag = 'description'
descriptionNode.attrib = {}
return descriptionNode
def __canBeWrapped(self, node):
return node.find('./detaileddescription//donotwrap') is None
def __discoverClasses(self):
for td in self.__typedefs:
if td.definition.startswith('enum '):
for e in self.enums:
if (e.associatedTypedef is None) and td.definition[5:] == e.name:
e.associatedTypedef = td
break
elif td.definition.startswith('struct '):
structFound = False
for st in self.__structs:
if (st.associatedTypedef is None) and td.definition[7:] == st.name:
st.associatedTypedef = td
structFound = True
break
if not structFound:
name = td.definition[7:]
print("Structure with no associated typedef: " + name)
st = CStruct(name)
st.associatedTypedef = td
self.add(st)
for td in self.__typedefs:
if td.definition.startswith('struct '):
for st in self.__structs:
if st.associatedTypedef == td:
cclass = CClass(st)
cclass.briefDoc = td.briefDoc
self.add(cclass)
break
elif ('Linphone' + td.definition) == td.name:
st = CStruct(td.name)
st.associatedTypedef = td
cclass = CClass(st)
cclass.briefDoc = td.briefDoc
self.add(st)
self.add(cclass)
# Sort classes by length of name (longest first), so that methods are put in the right class
self.classes.sort(key = lambda c: len(c.name), reverse = True)
for e in self.__events:
eventAdded = False
for c in self.classes:
if c.name.endswith('Cbs') and e.name.startswith(c.name):
c.addEvent(e)
eventAdded = True
break
if not eventAdded:
for c in self.classes:
if e.name.startswith(c.name):
c.addEvent(e)
eventAdded = True
break
for f in self.__functions:
for c in self.classes:
if c.cFunctionPrefix == f.name[0 : len(c.cFunctionPrefix)]:
c.addMethod(f)
break
def __parseCEnumValueInitializer(self, initializer):
initializer = initializer.strip()
if not initializer.startswith('='):
return None
initializer = initializer[1:]
initializer.strip()
return initializer
def __parseCEnumValue(self, node):
ev = CEnumValue(node.find('./name').text)
initializerNode = node.find('./initializer')
if initializerNode is not None:
ev.value = self.__parseCEnumValueInitializer(initializerNode.text)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
ev.deprecated = True
ev.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
ev.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
ev.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return ev
def __parseCEnumMemberdef(self, node):
if not Project.__canBeWrapped(self, node):
return None
e = CEnum(node.find('./name').text)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
e.deprecated = True
e.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
e.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
e.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
enumvalues = node.findall("enumvalue[@prot='public']")
for enumvalue in enumvalues:
ev = self.__parseCEnumValue(enumvalue)
e.addValue(ev)
return e
def __findCEnum(self, tree):
memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='enum']/memberdef[@kind='enum'][@prot='public']")
for m in memberdefs:
e = self.__parseCEnumMemberdef(m)
self.add(e)
def __parseCStructMember(self, node, structname):
name = node.find('./name').text
definition = node.find('./definition').text
t = definition[0:definition.find(structname + "::" + name)]
sm = CStructMember(name, t)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
sm.deprecated = True
sm.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
sm.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
sm.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return sm
def __parseCStructCompounddef(self, node):
s = CStruct(node.find('./compoundname').text)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
s.deprecated = True
s.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
s.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
s.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
structmembers = node.findall("sectiondef/memberdef[@kind='variable'][@prot='public']")
for structmember in structmembers:
sm = self.__parseCStructMember(structmember, s.name)
s.addMember(sm)
return s
def __findCStruct(self, tree):
compounddefs = tree.findall("./compounddef[@kind='struct'][@prot='public']")
for c in compounddefs:
s = self.__parseCStructCompounddef(c)
self.add(s)
def __parseCTypedefMemberdef(self, node):
if not Project.__canBeWrapped(self, node):
return None
name = node.find('./name').text
definition = node.find('./definition').text
if definition.startswith('typedef '):
definition = definition[8 :]
if name.endswith('Cb'):
pos = definition.find("(*")
if pos == -1:
return None
returntype = definition[0:pos].strip()
returnarg = CArgument(returntype, enums = self.enums, structs = self.__structs)
returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']")
if returndesc is not None:
if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t':
n = returndesc.find('.//bctbxlist')
if n is not None:
returnarg.containedType = n.text
returnarg.description = self.__cleanDescription(returndesc)
elif returnarg.completeType != 'void':
missingDocWarning += "\tReturn value is not documented\n"
definition = definition[pos + 2 :]
pos = definition.find("(")
definition = definition[pos + 1 : -1]
argslist = CArgumentsList()
for argdef in definition.split(', '):
argType = ''
starPos = argdef.rfind('*')
spacePos = argdef.rfind(' ')
if starPos != -1:
argType = argdef[0 : starPos + 1]
argName = argdef[starPos + 1 :]
elif spacePos != -1:
argType = argdef[0 : spacePos]
argName = argdef[spacePos + 1 :]
argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs))
if len(argslist) > 0:
paramdescs = node.findall("detaileddescription/para/parameterlist[@kind='param']/parameteritem")
if paramdescs:
for arg in argslist.arguments:
for paramdesc in paramdescs:
if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text:
arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription'))
missingDocWarning = ''
for arg in argslist.arguments:
if arg.description == None:
missingDocWarning += "\t'" + arg.name + "' parameter not documented\n";
if missingDocWarning != '':
print(name + ":\n" + missingDocWarning)
f = CEvent(name, returnarg, argslist)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
f.deprecated = True
f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
f.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return f
else:
pos = definition.rfind(" " + name)
if pos != -1:
definition = definition[0 : pos]
td = CTypedef(name, definition)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
td.deprecated = True
td.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
td.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
td.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return td
return None
def __findCTypedef(self, tree):
memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='typedef']/memberdef[@kind='typedef'][@prot='public']")
for m in memberdefs:
td = self.__parseCTypedefMemberdef(m)
self.add(td)
def __parseCFunctionMemberdef(self, node):
if not Project.__canBeWrapped(self, node):
return None
internal = node.find("./detaileddescription/internal")
if internal is not None:
return None
missingDocWarning = ''
name = node.find('./name').text
t = ''.join(node.find('./type').itertext())
returnarg = CArgument(t, enums = self.enums, structs = self.__structs)
returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']")
if returndesc is not None:
if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t':
n = returndesc.find('.//bctbxlist')
if n is not None:
returnarg.containedType = n.text
returnarg.description = self.__cleanDescription(returndesc)
elif returnarg.completeType != 'void':
missingDocWarning += "\tReturn value is not documented\n"
argslist = CArgumentsList()
argslistNode = node.findall('./param')
for argNode in argslistNode:
argType = ''.join(argNode.find('./type').itertext())
argName = ''
argNameNode = argNode.find('./declname')
if argNameNode is not None:
argName = ''.join(argNameNode.itertext())
if argType != 'void':
argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs))
if len(argslist) > 0:
paramdescs = node.findall("./detaileddescription/para/parameterlist[@kind='param']/parameteritem")
if paramdescs:
for arg in argslist.arguments:
for paramdesc in paramdescs:
if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text:
if arg.ctype == 'MSList' or arg.ctype == 'bctbx_list_t':
n = paramdesc.find('.//bctbxlist')
if n is not None:
arg.containedType = n.text
arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription'))
missingDocWarning = ''
for arg in argslist.arguments:
if arg.description == None:
missingDocWarning += "\t'" + arg.name + "' parameter not documented\n";
f = CFunction(name, returnarg, argslist)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
f.deprecated = True
f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
f.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
if f.briefDescription == '' and ''.join(f.detailedDescription.itertext()).strip() == '':
return None
locationNode = node.find('./location')
if locationNode is not None:
f.location = locationNode.get('file')
if not f.location.endswith('.h'):
missingDocWarning += "\tNot documented in a header file ('" + f.location + "')\n";
if missingDocWarning != '':
print(name + ":\n" + missingDocWarning)
return f
def __findCFunction(self, tree):
memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='func']/memberdef[@kind='function'][@prot='public'][@static='no']")
for m in memberdefs:
f = self.__parseCFunctionMemberdef(m)
if f is not None:
self.add(f)
def initFromFiles(self, xmlfiles):
trees = []
for f in xmlfiles:
tree = None
try:
if self.verbose:
print("Parsing XML file: " + f.name)
tree = ET.parse(f)
except ET.ParseError as e:
print(e)
if tree is not None:
trees.append(tree)
for tree in trees:
self.__findCEnum(tree)
for tree in trees:
self.__findCStruct(tree)
for tree in trees:
self.__findCTypedef(tree)
for tree in trees:
self.__findCFunction(tree)
self.__discoverClasses()
def initFromDir(self, xmldir):
files = [ os.path.join(xmldir, f) for f in os.listdir(xmldir) if (os.path.isfile(os.path.join(xmldir, f)) and f.endswith('.xml')) ]
self.initFromFiles(files)
def check(self):
for c in self.classes:
for name, p in six.iteritems(c.properties):
if p.getter is None and p.setter is not None:
print("Property '" + name + "' of class '" + c.name + "' has a setter but no getter")
class Generator:
def __init__(self, outputfile):
self.__outputfile = outputfile
def __generateEnum(self, cenum, enumsNode):
enumNodeAttributes = { 'name' : cenum.name, 'deprecated' : str(cenum.deprecated).lower() }
if cenum.associatedTypedef is not None:
enumNodeAttributes['name'] = cenum.associatedTypedef.name
enumNode = ET.SubElement(enumsNode, 'enum', enumNodeAttributes)
if cenum.briefDescription != '':
enumBriefDescriptionNode = ET.SubElement(enumNode, 'briefdescription')
enumBriefDescriptionNode.text = cenum.briefDescription
enumNode.append(cenum.detailedDescription)
if len(cenum.values) > 0:
enumValuesNode = ET.SubElement(enumNode, 'values')
for value in cenum.values:
enumValuesNodeAttributes = { 'name' : value.name, 'deprecated' : str(value.deprecated).lower() }
valueNode = ET.SubElement(enumValuesNode, 'value', enumValuesNodeAttributes)
if value.briefDescription != '':
valueBriefDescriptionNode = ET.SubElement(valueNode, 'briefdescription')
valueBriefDescriptionNode.text = value.briefDescription
valueNode.append(value.detailedDescription)
def __generateFunction(self, parentNode, nodeName, f):
functionAttributes = { 'name' : f.name, 'deprecated' : str(f.deprecated).lower() }
if f.location is not None:
functionAttributes['location'] = f.location
functionNode = ET.SubElement(parentNode, nodeName, functionAttributes)
returnValueAttributes = { 'type' : f.returnArgument.ctype, 'completetype' : f.returnArgument.completeType }
if f.returnArgument.containedType is not None:
returnValueAttributes['containedtype'] = f.returnArgument.containedType
returnValueNode = ET.SubElement(functionNode, 'return', returnValueAttributes)
if f.returnArgument.description is not None:
returnValueNode.append(f.returnArgument.description)
argumentsNode = ET.SubElement(functionNode, 'arguments')
for arg in f.arguments:
argumentNodeAttributes = { 'name' : arg.name, 'type' : arg.ctype, 'completetype' : arg.completeType }
if arg.containedType is not None:
argumentNodeAttributes['containedtype'] = arg.containedType
argumentNode = ET.SubElement(argumentsNode, 'argument', argumentNodeAttributes)
if arg.description is not None:
argumentNode.append(arg.description)
if f.briefDescription != '':
functionBriefDescriptionNode = ET.SubElement(functionNode, 'briefdescription')
functionBriefDescriptionNode.text = f.briefDescription
functionNode.append(f.detailedDescription)
def __generateClass(self, cclass, classesNode):
# Do not include classes that contain nothing
if len(cclass.events) == 0 and len(cclass.classMethods) == 0 and \
len(cclass.instanceMethods) == 0 and len(cclass.properties) == 0:
return
# Check the capabilities of the class
has_ref_method = False
has_unref_method = False
has_destroy_method = False
for methodname in cclass.instanceMethods:
methodname_without_prefix = methodname.replace(cclass.cFunctionPrefix, '')
if methodname_without_prefix == 'ref':
has_ref_method = True
elif methodname_without_prefix == 'unref':
has_unref_method = True
elif methodname_without_prefix == 'destroy':
has_destroy_method = True
refcountable = False
destroyable = False
if has_ref_method and has_unref_method:
refcountable = True
if has_destroy_method:
destroyable = True
classNodeAttributes = {
'name' : cclass.name,
'cfunctionprefix' : cclass.cFunctionPrefix,
'deprecated' : str(cclass.deprecated).lower(),
'refcountable' : str(refcountable).lower(),
'destroyable' : str(destroyable).lower()
}
# Generate the XML node for the class
classNode = ET.SubElement(classesNode, 'class', classNodeAttributes)
if len(cclass.events) > 0:
eventsNode = ET.SubElement(classNode, 'events')
eventnames = []
for eventname in cclass.events:
eventnames.append(eventname)
eventnames.sort()
for eventname in eventnames:
self.__generateFunction(eventsNode, 'event', cclass.events[eventname])
if len(cclass.classMethods) > 0:
classMethodsNode = ET.SubElement(classNode, 'classmethods')
methodnames = []
for methodname in cclass.classMethods:
methodnames.append(methodname)
methodnames.sort()
for methodname in methodnames:
self.__generateFunction(classMethodsNode, 'classmethod', cclass.classMethods[methodname])
if len(cclass.instanceMethods) > 0:
instanceMethodsNode = ET.SubElement(classNode, 'instancemethods')
methodnames = []
for methodname in cclass.instanceMethods:
methodnames.append(methodname)
methodnames.sort()
for methodname in methodnames:
self.__generateFunction(instanceMethodsNode, 'instancemethod', cclass.instanceMethods[methodname])
if len(cclass.properties) > 0:
propertiesNode = ET.SubElement(classNode, 'properties')
propnames = []
for propname in cclass.properties:
propnames.append(propname)
propnames.sort()
for propname in propnames:
propertyNodeAttributes = { 'name' : propname }
propertyNode = ET.SubElement(propertiesNode, 'property', propertyNodeAttributes)
if cclass.properties[propname].getter is not None:
self.__generateFunction(propertyNode, 'getter', cclass.properties[propname].getter)
if cclass.properties[propname].setter is not None:
self.__generateFunction(propertyNode, 'setter', cclass.properties[propname].setter)
if cclass.briefDescription != '':
classBriefDescriptionNode = ET.SubElement(classNode, 'briefdescription')
classBriefDescriptionNode.text = cclass.briefDescription
classNode.append(cclass.detailedDescription)
def generate(self, project):
print("Generating XML document of Linphone API to '" + self.__outputfile.name + "'")
apiNode = ET.Element('api')
project.enums.sort(key = lambda e: e.name)
if len(project.enums) > 0:
enumsNode = ET.SubElement(apiNode, 'enums')
for cenum in project.enums:
self.__generateEnum(cenum, enumsNode)
if len(project.classes) > 0:
classesNode = ET.SubElement(apiNode, 'classes')
project.classes.sort(key = lambda c: c.name)
for cclass in project.classes:
self.__generateClass(cclass, classesNode)
s = '<?xml version="1.0" encoding="UTF-8" ?>\n'.encode('utf-8')
s += ET.tostring(apiNode, 'utf-8')
if project.prettyPrint:
s = minidom.parseString(s).toprettyxml(indent='\t')
self.__outputfile.write(s)
def main(argv = None):
if argv is None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Generate XML version of the Linphone API.")
argparser.add_argument('-o', '--outputfile', metavar='outputfile', type=argparse.FileType('w'), help="Output XML file describing the Linphone API.")
argparser.add_argument('--verbose', help="Increase output verbosity", action='store_true')
argparser.add_argument('--pretty', help="XML pretty print", action='store_true')
argparser.add_argument('xmldir', help="XML directory generated by doxygen.")
args = argparser.parse_args()
if args.outputfile == None:
args.outputfile = open('api.xml', 'w')
project = Project()
if args.verbose:
project.verbose = True
if args.pretty:
project.prettyPrint = True
project.initFromDir(args.xmldir)
project.check()
gen = Generator(args.outputfile)
gen.generate(project)
if __name__ == "__main__":
sys.exit(main())
| [((27754, 27839), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate XML version of the Linphone API."""'}), "(description='Generate XML version of the Linphone API.'\n )\n", (27777, 27839), False, 'import argparse\n'), ((6873, 6889), 'metadoc.Parser', 'metadoc.Parser', ([], {}), '()\n', (6887, 6889), False, 'import metadoc\n'), ((21658, 21710), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumsNode', '"""enum"""', 'enumNodeAttributes'], {}), "(enumsNode, 'enum', enumNodeAttributes)\n", (21671, 21710), True, 'import xml.etree.ElementTree as ET\n'), ((22676, 22731), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['parentNode', 'nodeName', 'functionAttributes'], {}), '(parentNode, nodeName, functionAttributes)\n', (22689, 22731), True, 'import xml.etree.ElementTree as ET\n'), ((22986, 23046), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""return"""', 'returnValueAttributes'], {}), "(functionNode, 'return', returnValueAttributes)\n", (22999, 23046), True, 'import xml.etree.ElementTree as ET\n'), ((23168, 23208), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""arguments"""'], {}), "(functionNode, 'arguments')\n", (23181, 23208), True, 'import xml.etree.ElementTree as ET\n'), ((24979, 25035), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classesNode', '"""class"""', 'classNodeAttributes'], {}), "(classesNode, 'class', classNodeAttributes)\n", (24992, 25035), True, 'import xml.etree.ElementTree as ET\n'), ((27044, 27061), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""api"""'], {}), "('api')\n", (27054, 27061), True, 'import xml.etree.ElementTree as ET\n'), ((27539, 27568), 'xml.etree.ElementTree.tostring', 'ET.tostring', (['apiNode', '"""utf-8"""'], {}), "(apiNode, 'utf-8')\n", (27550, 27568), True, 'import xml.etree.ElementTree as ET\n'), ((20936, 20959), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\n', (20948, 20959), False, 'import os\n'), ((21148, 21175), 'six.iteritems', 'six.iteritems', (['c.properties'], {}), '(c.properties)\n', (21161, 21175), False, 'import six\n'), ((21776, 21819), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '"""briefdescription"""'], {}), "(enumNode, 'briefdescription')\n", (21789, 21819), True, 'import xml.etree.ElementTree as ET\n'), ((21971, 22004), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '"""values"""'], {}), "(enumNode, 'values')\n", (21984, 22004), True, 'import xml.etree.ElementTree as ET\n'), ((23459, 23523), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['argumentsNode', '"""argument"""', 'argumentNodeAttributes'], {}), "(argumentsNode, 'argument', argumentNodeAttributes)\n", (23472, 23523), True, 'import xml.etree.ElementTree as ET\n'), ((23665, 23712), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""briefdescription"""'], {}), "(functionNode, 'briefdescription')\n", (23678, 23712), True, 'import xml.etree.ElementTree as ET\n'), ((25081, 25115), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""events"""'], {}), "(classNode, 'events')\n", (25094, 25115), True, 'import xml.etree.ElementTree as ET\n'), ((25388, 25428), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""classmethods"""'], {}), "(classNode, 'classmethods')\n", (25401, 25428), True, 'import xml.etree.ElementTree as ET\n'), ((25739, 25782), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""instancemethods"""'], {}), "(classNode, 'instancemethods')\n", (25752, 25782), True, 'import xml.etree.ElementTree as ET\n'), ((26095, 26133), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""properties"""'], {}), "(classNode, 'properties')\n", (26108, 26133), True, 'import xml.etree.ElementTree as ET\n'), ((26762, 26806), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""briefdescription"""'], {}), "(classNode, 'briefdescription')\n", (26775, 26806), True, 'import xml.etree.ElementTree as ET\n'), ((27151, 27182), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '"""enums"""'], {}), "(apiNode, 'enums')\n", (27164, 27182), True, 'import xml.etree.ElementTree as ET\n'), ((27304, 27337), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '"""classes"""'], {}), "(apiNode, 'classes')\n", (27317, 27337), True, 'import xml.etree.ElementTree as ET\n'), ((27908, 27930), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (27925, 27930), False, 'import argparse\n'), ((20565, 20576), 'xml.etree.ElementTree.parse', 'ET.parse', (['f'], {}), '(f)\n', (20573, 20576), True, 'import xml.etree.ElementTree as ET\n'), ((20969, 20987), 'os.listdir', 'os.listdir', (['xmldir'], {}), '(xmldir)\n', (20979, 20987), False, 'import os\n'), ((22152, 22216), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumValuesNode', '"""value"""', 'enumValuesNodeAttributes'], {}), "(enumValuesNode, 'value', enumValuesNodeAttributes)\n", (22165, 22216), True, 'import xml.etree.ElementTree as ET\n'), ((26341, 26406), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['propertiesNode', '"""property"""', 'propertyNodeAttributes'], {}), "(propertiesNode, 'property', propertyNodeAttributes)\n", (26354, 26406), True, 'import xml.etree.ElementTree as ET\n'), ((22287, 22331), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['valueNode', '"""briefdescription"""'], {}), "(valueNode, 'briefdescription')\n", (22300, 22331), True, 'import xml.etree.ElementTree as ET\n'), ((27602, 27624), 'xml.dom.minidom.parseString', 'minidom.parseString', (['s'], {}), '(s)\n', (27621, 27624), True, 'import xml.dom.minidom as minidom\n'), ((21007, 21030), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\n', (21019, 21030), False, 'import os\n')] |
aaronstjohn/incubator-heron | examples/src/python/join_streamlet_topology.py | bdc35f8d23296472983956a477ea38da54d16b2b | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''join_streamlet_topology.py: module is an example of how to use the join operator'''
import sys
from heronpy.streamlet.builder import Builder
from heronpy.streamlet.runner import Runner
from heronpy.streamlet.config import Config
from heronpy.streamlet.windowconfig import WindowConfig
from heronpy.connectors.mock.arraylooper import ArrayLooper
# pylint: disable=superfluous-parens
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Topology's name is not specified")
sys.exit(1)
builder = Builder()
source_1 = builder.new_source(ArrayLooper([["key1", "a"], ["key1", "b"]], sleep=1))
source_2 = builder.new_source(ArrayLooper([["key1", "c"], ["key1", "d"]], sleep=1))
source_1.join(source_2, WindowConfig.create_sliding_window(2, 1), lambda x, y: x + y).log()
runner = Runner()
config = Config()
runner.run(sys.argv[1], config, builder)
| [((1364, 1373), 'heronpy.streamlet.builder.Builder', 'Builder', ([], {}), '()\n', (1371, 1373), False, 'from heronpy.streamlet.builder import Builder\n'), ((1655, 1663), 'heronpy.streamlet.runner.Runner', 'Runner', ([], {}), '()\n', (1661, 1663), False, 'from heronpy.streamlet.runner import Runner\n'), ((1675, 1683), 'heronpy.streamlet.config.Config', 'Config', ([], {}), '()\n', (1681, 1683), False, 'from heronpy.streamlet.config import Config\n'), ((1339, 1350), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1347, 1350), False, 'import sys\n'), ((1407, 1459), 'heronpy.connectors.mock.arraylooper.ArrayLooper', 'ArrayLooper', (["[['key1', 'a'], ['key1', 'b']]"], {'sleep': '(1)'}), "([['key1', 'a'], ['key1', 'b']], sleep=1)\n", (1418, 1459), False, 'from heronpy.connectors.mock.arraylooper import ArrayLooper\n'), ((1494, 1546), 'heronpy.connectors.mock.arraylooper.ArrayLooper', 'ArrayLooper', (["[['key1', 'c'], ['key1', 'd']]"], {'sleep': '(1)'}), "([['key1', 'c'], ['key1', 'd']], sleep=1)\n", (1505, 1546), False, 'from heronpy.connectors.mock.arraylooper import ArrayLooper\n'), ((1575, 1615), 'heronpy.streamlet.windowconfig.WindowConfig.create_sliding_window', 'WindowConfig.create_sliding_window', (['(2)', '(1)'], {}), '(2, 1)\n', (1609, 1615), False, 'from heronpy.streamlet.windowconfig import WindowConfig\n')] |
yolkdata/yolk-python | yolk/test/utils.py | 978d98cbe637c1309a1be766a40bb874e996c61d | from datetime import date, datetime, timedelta
from decimal import Decimal
import unittest
from dateutil.tz import tzutc
import six
from yolk import utils
class TestUtils(unittest.TestCase):
def test_timezone_utils(self):
now = datetime.now()
utcnow = datetime.now(tz=tzutc())
self.assertTrue(utils.is_naive(now))
self.assertFalse(utils.is_naive(utcnow))
fixed = utils.guess_timezone(now)
self.assertFalse(utils.is_naive(fixed))
shouldnt_be_edited = utils.guess_timezone(utcnow)
self.assertEqual(utcnow, shouldnt_be_edited)
def test_clean(self):
simple = {
'decimal': Decimal('0.142857'),
'unicode': six.u('woo'),
'date': datetime.now(),
'long': 200000000,
'integer': 1,
'float': 2.0,
'bool': True,
'str': 'woo',
'none': None
}
complicated = {
'exception': Exception('This should show up'),
'timedelta': timedelta(microseconds=20),
'list': [1, 2, 3]
}
combined = dict(simple.items())
combined.update(complicated.items())
pre_clean_keys = combined.keys()
utils.clean(combined)
self.assertEqual(combined.keys(), pre_clean_keys)
def test_clean_with_dates(self):
dict_with_dates = {
'birthdate': date(1980, 1, 1),
'registration': datetime.utcnow(),
}
self.assertEqual(dict_with_dates, utils.clean(dict_with_dates))
@classmethod
def test_bytes(cls):
if six.PY3:
item = bytes(10)
else:
item = bytearray(10)
utils.clean(item)
def test_clean_fn(self):
cleaned = utils.clean({'fn': lambda x: x, 'number': 4})
self.assertEqual(cleaned['number'], 4)
if 'fn' in cleaned:
self.assertEqual(cleaned['fn'], None)
def test_remove_slash(self):
self.assertEqual('http://segment.io',
utils.remove_trailing_slash('http://segment.io/'))
self.assertEqual('http://segment.io',
utils.remove_trailing_slash('http://segment.io'))
| [((245, 259), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (257, 259), False, 'from datetime import date, datetime, timedelta\n'), ((413, 438), 'yolk.utils.guess_timezone', 'utils.guess_timezone', (['now'], {}), '(now)\n', (433, 438), False, 'from yolk import utils\n'), ((517, 545), 'yolk.utils.guess_timezone', 'utils.guess_timezone', (['utcnow'], {}), '(utcnow)\n', (537, 545), False, 'from yolk import utils\n'), ((1246, 1267), 'yolk.utils.clean', 'utils.clean', (['combined'], {}), '(combined)\n', (1257, 1267), False, 'from yolk import utils\n'), ((1712, 1729), 'yolk.utils.clean', 'utils.clean', (['item'], {}), '(item)\n', (1723, 1729), False, 'from yolk import utils\n'), ((1778, 1823), 'yolk.utils.clean', 'utils.clean', (["{'fn': lambda x: x, 'number': 4}"], {}), "({'fn': lambda x: x, 'number': 4})\n", (1789, 1823), False, 'from yolk import utils\n'), ((326, 345), 'yolk.utils.is_naive', 'utils.is_naive', (['now'], {}), '(now)\n', (340, 345), False, 'from yolk import utils\n'), ((372, 394), 'yolk.utils.is_naive', 'utils.is_naive', (['utcnow'], {}), '(utcnow)\n', (386, 394), False, 'from yolk import utils\n'), ((464, 485), 'yolk.utils.is_naive', 'utils.is_naive', (['fixed'], {}), '(fixed)\n', (478, 485), False, 'from yolk import utils\n'), ((668, 687), 'decimal.Decimal', 'Decimal', (['"""0.142857"""'], {}), "('0.142857')\n", (675, 687), False, 'from decimal import Decimal\n'), ((712, 724), 'six.u', 'six.u', (['"""woo"""'], {}), "('woo')\n", (717, 724), False, 'import six\n'), ((746, 760), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (758, 760), False, 'from datetime import date, datetime, timedelta\n'), ((1041, 1067), 'datetime.timedelta', 'timedelta', ([], {'microseconds': '(20)'}), '(microseconds=20)\n', (1050, 1067), False, 'from datetime import date, datetime, timedelta\n'), ((1417, 1433), 'datetime.date', 'date', (['(1980)', '(1)', '(1)'], {}), '(1980, 1, 1)\n', (1421, 1433), False, 'from datetime import date, datetime, timedelta\n'), ((1463, 1480), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1478, 1480), False, 'from datetime import date, datetime, timedelta\n'), ((1534, 1562), 'yolk.utils.clean', 'utils.clean', (['dict_with_dates'], {}), '(dict_with_dates)\n', (1545, 1562), False, 'from yolk import utils\n'), ((2054, 2103), 'yolk.utils.remove_trailing_slash', 'utils.remove_trailing_slash', (['"""http://segment.io/"""'], {}), "('http://segment.io/')\n", (2081, 2103), False, 'from yolk import utils\n'), ((2176, 2224), 'yolk.utils.remove_trailing_slash', 'utils.remove_trailing_slash', (['"""http://segment.io"""'], {}), "('http://segment.io')\n", (2203, 2224), False, 'from yolk import utils\n'), ((293, 300), 'dateutil.tz.tzutc', 'tzutc', ([], {}), '()\n', (298, 300), False, 'from dateutil.tz import tzutc\n')] |
kw1122/MKS66 | 09Scan/matrix.py | 25986e79077692afbc085920af1fef276c22d967 | """
A matrix will be an N sized list of 4 element lists.
Each individual list will represent an [x, y, z, 1] point.
For multiplication purposes, consider the lists like so:
x0 x1 xn
y0 y1 yn
z0 z1 ... zn
1 1 1
"""
import math
def make_bezier():
return [
[-1, 3, -3, 1],
[3, -6, 3, 0],
[-3, 3, 0, 0],
[1, 0, 0, 0]
]
def make_hermite():
return [
[2, -3, 0, 1],
[-2, 3, 0, 0],
[1, -2, 1, 0],
[1, -1, 0, 0]
]
def generate_curve_coefs(p0, p1, p2, p3, t):
coefs = [[p0, p1, p2, p3]]
if t == 'hermite':
curve = make_hermite()
else:
curve = make_bezier()
matrix_mult(curve, coefs)
return coefs
def make_translate(x, y, z):
t = new_matrix()
ident(t)
t[3][0] = x
t[3][1] = y
t[3][2] = z
return t
def make_scale(x, y, z):
t = new_matrix()
ident(t)
t[0][0] = x
t[1][1] = y
t[2][2] = z
return t
def make_rotX(theta):
t = new_matrix()
ident(t)
t[1][1] = math.cos(theta)
t[2][1] = -math.sin(theta)
t[1][2] = math.sin(theta)
t[2][2] = math.cos(theta)
return t
def make_rotY(theta):
t = new_matrix()
ident(t)
t[0][0] = math.cos(theta)
t[0][2] = -math.sin(theta)
t[2][0] = math.sin(theta)
t[2][2] = math.cos(theta)
return t
def make_rotZ(theta):
t = new_matrix()
ident(t)
t[0][0] = math.cos(theta)
t[1][0] = -math.sin(theta)
t[0][1] = math.sin(theta)
t[1][1] = math.cos(theta)
return t
#print the matrix such that it looks like
#the template in the top comment
def print_matrix(matrix):
s = ''
for r in range(len(matrix[0])):
for c in range(len(matrix)):
s+= str(matrix[c][r]) + ' '
s += '\n'
print (s)
#turn the paramter matrix into an identity matrix
#you may assume matrix is square
def ident(matrix):
for r in range(len(matrix[0])):
for c in range(len(matrix)):
if r == c:
matrix[c][r] = 1
else:
matrix[c][r] = 0
#multiply m1 by m2, modifying m2 to be the product
#m1 * m2 -> m2
def matrix_mult(m1, m2):
point = 0
for row in m2:
#get a copy of the next point
tmp = row[:]
for r in range(4):
m2[point][r] = (m1[0][r] * tmp[0] +
m1[1][r] * tmp[1] +
m1[2][r] * tmp[2] +
m1[3][r] * tmp[3])
point += 1
def new_matrix(rows = 4, cols = 4):
m = []
for c in range(cols):
m.append([])
for r in range(rows):
m[c].append(0)
return m
| [((1101, 1116), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (1109, 1116), False, 'import math\n'), ((1164, 1179), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (1172, 1179), False, 'import math\n'), ((1195, 1210), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (1203, 1210), False, 'import math\n'), ((1301, 1316), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (1309, 1316), False, 'import math\n'), ((1364, 1379), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (1372, 1379), False, 'import math\n'), ((1395, 1410), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (1403, 1410), False, 'import math\n'), ((1501, 1516), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (1509, 1516), False, 'import math\n'), ((1564, 1579), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (1572, 1579), False, 'import math\n'), ((1595, 1610), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (1603, 1610), False, 'import math\n'), ((1133, 1148), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (1141, 1148), False, 'import math\n'), ((1333, 1348), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (1341, 1348), False, 'import math\n'), ((1533, 1548), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (1541, 1548), False, 'import math\n')] |
Nekmo/spice | tests/test.py | 717a2cc24ad969e1caec2aabeffc30a796c6ec91 | from bs4 import BeautifulSoup
import requests
import sys, os
from time import sleep
sys.path.insert(0, '/home/may/Dropbox/Programming/spice/')
import spice_api as spice
def main():
creds = spice.load_auth_from_file('auth')
print(creds)
results = spice.search('Re:Zero Kara Hajimeru Isekai Seikatsu', spice.get_medium('anime'), creds)
print(results[0].title)
souma = spice.search_id(1, spice.get_medium('manga'), creds)
print(souma.raw_data)
print(souma.title)
print(souma.chapters)
print(souma.volumes)
re_zero_data = spice.get_blank(spice.get_medium('anime'))
re_zero_data.episodes = 0
re_zero_data.status = spice.get_status('reading')
re_zero_data.score = 8
re_zero_data.tags = ['this the first time a show that made me cringe']
shokugeki_data = spice.get_blank(spice.get_medium('manga'))
shokugeki_data.chapters = 13
shokugeki_data.volumes = 1
shokugeki_data.status = 1
shokugeki_data.score = 8
spice.update(shokugeki_data, 45757, spice.get_medium('manga'), creds)
anime_list = spice.get_list(spice.get_medium('ANIME'), 'Utagai-', creds)
print(anime_list.avg_score())
print(anime_list.median_score())
print(anime_list.mode_score())
print(anime_list.extremes())
print(anime_list.p_stddev())
print(anime_list.p_var())
print(anime_list.get_num_status(1))
print(anime_list.get_total())
print(anime_list.get_days())
print(anime_list.exists(11734))
print(len(anime_list.get_ids()))
print(len(anime_list.get_titles()))
print(anime_list.get_status(1))
print(anime_list.get_score(10))
print(anime_list.exists_as_status(11734, 1))
print(anime_list.score_diff())
anime_list2 = spice.get_list(spice.get_medium('ANIME'), 'Pickleplatter', creds)
print("Similarity coefficient: {}".format(anime_list.compatibility(anime_list2)))
if __name__ == '__main__':
main()
| [((86, 144), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""/home/may/Dropbox/Programming/spice/"""'], {}), "(0, '/home/may/Dropbox/Programming/spice/')\n", (101, 144), False, 'import sys, os\n'), ((197, 230), 'spice_api.load_auth_from_file', 'spice.load_auth_from_file', (['"""auth"""'], {}), "('auth')\n", (222, 230), True, 'import spice_api as spice\n'), ((661, 688), 'spice_api.get_status', 'spice.get_status', (['"""reading"""'], {}), "('reading')\n", (677, 688), True, 'import spice_api as spice\n'), ((316, 341), 'spice_api.get_medium', 'spice.get_medium', (['"""anime"""'], {}), "('anime')\n", (332, 341), True, 'import spice_api as spice\n'), ((409, 434), 'spice_api.get_medium', 'spice.get_medium', (['"""manga"""'], {}), "('manga')\n", (425, 434), True, 'import spice_api as spice\n'), ((578, 603), 'spice_api.get_medium', 'spice.get_medium', (['"""anime"""'], {}), "('anime')\n", (594, 603), True, 'import spice_api as spice\n'), ((829, 854), 'spice_api.get_medium', 'spice.get_medium', (['"""manga"""'], {}), "('manga')\n", (845, 854), True, 'import spice_api as spice\n'), ((1019, 1044), 'spice_api.get_medium', 'spice.get_medium', (['"""manga"""'], {}), "('manga')\n", (1035, 1044), True, 'import spice_api as spice\n'), ((1086, 1111), 'spice_api.get_medium', 'spice.get_medium', (['"""ANIME"""'], {}), "('ANIME')\n", (1102, 1111), True, 'import spice_api as spice\n'), ((1742, 1767), 'spice_api.get_medium', 'spice.get_medium', (['"""ANIME"""'], {}), "('ANIME')\n", (1758, 1767), True, 'import spice_api as spice\n')] |
prog-serhii/MyMoney_v2 | backend/project/settings.py | 8d2aa3ec0497c7afd1a25bb9266bfc405e9c9397 | import os
from pathlib import Path
from datetime import timedelta
from celery.schedules import crontab
from django.utils.translation import gettext_lazy as _
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = os.environ.get(
'SECRET_KEY',
default='m8/o)3$n^03w)mxgvnrxb46__@6qnte9l0dkb7$6%lpbcox+v!'
)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = int(os.environ.get('DEBUG', default=1))
# 'DJANGO_ALLOWED_HOSTS' should be a single string of hosts with a space between each.
# For example: 'DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 [::1]'
ALLOWED_HOSTS = os.environ.get("DJANGO_ALLOWED_HOSTS", default='*').split(" ")
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'django_filters',
'rosetta',
'djoser',
'djmoney',
'djmoney.contrib.exchange',
'corsheaders',
'apps.account.apps.AccountConfig',
'apps.transaction.apps.TransactionConfig',
'apps.common.apps.CommonConfig',
'apps.authentication.apps.AuthenticationConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware'
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages'
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': os.environ.get('SQL_ENGINE', 'django.db.backends.sqlite3'),
'NAME': os.environ.get('SQL_DATABASE', os.path.join(BASE_DIR, 'db.sqlite3')),
'USER': os.environ.get('SQL_USER'),
'PASSWORD': os.environ.get('SQL_PASSWORD'),
'HOST': os.environ.get('SQL_HOST'),
'PORT': os.environ.get('SQL_PORT'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTH_USER_MODEL = 'authentication.User'
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'uk'
LANGUAGES = [
('en', _('English')),
('uk', _('Ukrainian'))
]
LOCALE_PATHS = (
BASE_DIR / 'locale',
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = "/static/"
STATIC_ROOT = os.path.join(BASE_DIR, "static")
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# ------------------------------------------ #
# django-money #
# ------------------------------------------ #
DEFAULT_CURRENCY = 'EUR'
EXCHANGE_BACKEND = 'djmoney.contrib.exchange.backends.FixerBackend'
FIXER_ACCESS_KEY = 'f5a898dbf45d15d8aa6eca7af3f372e1'
# ------------------------------------------ #
# Celery #
# ------------------------------------------ #
CELERY_BROKER_URL = os.environ.get('REDIS_LOCATION', 'redis://127.0.0.1:6379')
CELERY_RESULT_BACKEND = os.environ.get('REDIS_LOCATION', 'redis://127.0.0.1:6379')
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_BEAT_SCHEDULE = {
'update_rates': {
'task': 'apps.user.tasks.update_rates',
'schedule': crontab(hour="*/1"),
}
}
# ------------------------------------------ #
# Django REST Framework #
# ------------------------------------------ #
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework_simplejwt.authentication.JWTAuthentication',
),
'EXCEPTION_HANDLER': 'apps.common.errors.custom_exception_handler',
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
}
# ------------------------------------------ #
# djoser #
# ------------------------------------------ #
DJOSER = {
# Name of a field in User model to be used as login field
'LOGIN_FIELD': 'email',
# If True user will be required to click activation
# link sent in email after:
# * creating an account
# * updating their email
'SEND_ACTIVATION_EMAIL': True,
'ACTIVATION_URL': '/activate/{uid}/{token}',
'PASSWORD_RESET_CONFIRM_URL': 'password/reset/confirm/{uid}/{token}',
'USERNAME_RESET_CONFIRM_URL': 'eamil/reset/confirm/{uid}/{token}',
# If True, you need to pass re_password to /users/
# endpoint, to validate password equality.
'USER_CREATE_PASSWORD_RETYPE': True,
'PASSWORD_RESET_CONFIRM_RETYPE': True,
# If True, register or activation endpoint
# will send confirmation email to user.
'SEND_CONFIRMATION_EMAIL': True,
'SERIALIZERS': {
'user_create': 'apps.user.serializers.UserCreateSerializer'
}
}
# ------------------------------------------ #
# Simple JWT #
# ------------------------------------------ #
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'ROTATE_REFRESH_TOKENS': True,
'AUTH_HEADER_TYPES': ('JWT',),
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'id',
}
CORS_ALLOW_ALL_ORIGINS = True
| [((292, 387), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {'default': '"""m8/o)3$n^03w)mxgvnrxb46__@6qnte9l0dkb7$6%lpbcox+v!"""'}), "('SECRET_KEY', default=\n 'm8/o)3$n^03w)mxgvnrxb46__@6qnte9l0dkb7$6%lpbcox+v!')\n", (306, 387), False, 'import os\n'), ((3704, 3736), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (3716, 3736), False, 'import os\n'), ((4255, 4313), 'os.environ.get', 'os.environ.get', (['"""REDIS_LOCATION"""', '"""redis://127.0.0.1:6379"""'], {}), "('REDIS_LOCATION', 'redis://127.0.0.1:6379')\n", (4269, 4313), False, 'import os\n'), ((4338, 4396), 'os.environ.get', 'os.environ.get', (['"""REDIS_LOCATION"""', '"""redis://127.0.0.1:6379"""'], {}), "('REDIS_LOCATION', 'redis://127.0.0.1:6379')\n", (4352, 4396), False, 'import os\n'), ((472, 506), 'os.environ.get', 'os.environ.get', (['"""DEBUG"""'], {'default': '(1)'}), "('DEBUG', default=1)\n", (486, 506), False, 'import os\n'), ((6420, 6440), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (6429, 6440), False, 'from datetime import timedelta\n'), ((6472, 6489), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (6481, 6489), False, 'from datetime import timedelta\n'), ((676, 727), 'os.environ.get', 'os.environ.get', (['"""DJANGO_ALLOWED_HOSTS"""'], {'default': '"""*"""'}), "('DJANGO_ALLOWED_HOSTS', default='*')\n", (690, 727), False, 'import os\n'), ((2375, 2433), 'os.environ.get', 'os.environ.get', (['"""SQL_ENGINE"""', '"""django.db.backends.sqlite3"""'], {}), "('SQL_ENGINE', 'django.db.backends.sqlite3')\n", (2389, 2433), False, 'import os\n'), ((2537, 2563), 'os.environ.get', 'os.environ.get', (['"""SQL_USER"""'], {}), "('SQL_USER')\n", (2551, 2563), False, 'import os\n'), ((2585, 2615), 'os.environ.get', 'os.environ.get', (['"""SQL_PASSWORD"""'], {}), "('SQL_PASSWORD')\n", (2599, 2615), False, 'import os\n'), ((2633, 2659), 'os.environ.get', 'os.environ.get', (['"""SQL_HOST"""'], {}), "('SQL_HOST')\n", (2647, 2659), False, 'import os\n'), ((2677, 2703), 'os.environ.get', 'os.environ.get', (['"""SQL_PORT"""'], {}), "('SQL_PORT')\n", (2691, 2703), False, 'import os\n'), ((3405, 3417), 'django.utils.translation.gettext_lazy', '_', (['"""English"""'], {}), "('English')\n", (3406, 3417), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3431, 3445), 'django.utils.translation.gettext_lazy', '_', (['"""Ukrainian"""'], {}), "('Ukrainian')\n", (3432, 3445), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4625, 4644), 'celery.schedules.crontab', 'crontab', ([], {'hour': '"""*/1"""'}), "(hour='*/1')\n", (4632, 4644), False, 'from celery.schedules import crontab\n'), ((2482, 2518), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (2494, 2518), False, 'import os\n'), ((238, 252), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (242, 252), False, 'from pathlib import Path\n')] |
jeanmarc2019/PTHacks2019-Planning | app/flaskApp/config.py | bc0c71588187fde8498494b3e74728c09de56f18 | import configparser
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path += '/cfg.ini'
class Configuration(object):
def __init__(self,debug=False):
section = "Flask-debug" if debug else "Flask"
cfg = configparser.ConfigParser()
cfg.read(dir_path if debug else "/var/www/html/flaskApp/cfg.ini")
self.debug = cfg.getboolean(section, "DEBUG")
self.csrf_enabled = cfg.getboolean(section,"CSRF_ENABLED")
self.threads_per_page = cfg.getint(section,"THREADS_PER_PAGE")
self.port = cfg.getint(section,"PORT")
self.host = cfg.get(section,"HOST")
| [((62, 88), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (78, 88), False, 'import os\n'), ((256, 283), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (281, 283), False, 'import configparser\n')] |
cleo4zheng/neutron | neutron/db/models/l3ha.py | 6d65318308edfd984bdd0ff1ac7fef9486a040f7 | # Copyright (C) 2014 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from neutron_lib.db import model_base
import sqlalchemy as sa
from sqlalchemy import orm
from neutron.common import constants as n_const
from neutron.db.models import agent as agent_model
from neutron.db import models_v2
class L3HARouterAgentPortBinding(model_base.BASEV2):
"""Represent agent binding state of a HA router port.
A HA Router has one HA port per agent on which it is spawned.
This binding table stores which port is used for a HA router by a
L3 agent.
"""
__tablename__ = 'ha_router_agent_port_bindings'
__table_args__ = (
sa.UniqueConstraint(
'router_id', 'l3_agent_id',
name='uniq_ha_router_agent_port_bindings0port_id0l3_agent_id'),
model_base.BASEV2.__table_args__
)
port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id',
ondelete='CASCADE'),
nullable=False, primary_key=True)
port = orm.relationship(models_v2.Port)
router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id',
ondelete='CASCADE'),
nullable=False)
l3_agent_id = sa.Column(sa.String(36),
sa.ForeignKey("agents.id",
ondelete='CASCADE'))
agent = orm.relationship(agent_model.Agent)
state = sa.Column(sa.Enum(n_const.HA_ROUTER_STATE_ACTIVE,
n_const.HA_ROUTER_STATE_STANDBY,
name='l3_ha_states'),
default=n_const.HA_ROUTER_STATE_STANDBY,
server_default=n_const.HA_ROUTER_STATE_STANDBY)
class L3HARouterNetwork(model_base.BASEV2, model_base.HasProjectPrimaryKey):
"""Host HA network for a tenant.
One HA Network is used per tenant, all HA router ports are created
on this network.
"""
__tablename__ = 'ha_router_networks'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
nullable=False, primary_key=True)
network = orm.relationship(models_v2.Network)
class L3HARouterVRIdAllocation(model_base.BASEV2):
"""VRID allocation per HA network.
Keep a track of the VRID allocations per HA network.
"""
__tablename__ = 'ha_router_vrid_allocations'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
nullable=False, primary_key=True)
vr_id = sa.Column(sa.Integer(), nullable=False, primary_key=True)
| [((1579, 1611), 'sqlalchemy.orm.relationship', 'orm.relationship', (['models_v2.Port'], {}), '(models_v2.Port)\n', (1595, 1611), False, 'from sqlalchemy import orm\n'), ((1974, 2009), 'sqlalchemy.orm.relationship', 'orm.relationship', (['agent_model.Agent'], {}), '(agent_model.Agent)\n', (1990, 2009), False, 'from sqlalchemy import orm\n'), ((2775, 2810), 'sqlalchemy.orm.relationship', 'orm.relationship', (['models_v2.Network'], {}), '(models_v2.Network)\n', (2791, 2810), False, 'from sqlalchemy import orm\n'), ((1187, 1302), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""router_id"""', '"""l3_agent_id"""'], {'name': '"""uniq_ha_router_agent_port_bindings0port_id0l3_agent_id"""'}), "('router_id', 'l3_agent_id', name=\n 'uniq_ha_router_agent_port_bindings0port_id0l3_agent_id')\n", (1206, 1302), True, 'import sqlalchemy as sa\n'), ((1395, 1408), 'sqlalchemy.String', 'sa.String', (['(36)'], {}), '(36)\n', (1404, 1408), True, 'import sqlalchemy as sa\n'), ((1410, 1455), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""ports.id"""'], {'ondelete': '"""CASCADE"""'}), "('ports.id', ondelete='CASCADE')\n", (1423, 1455), True, 'import sqlalchemy as sa\n'), ((1639, 1652), 'sqlalchemy.String', 'sa.String', (['(36)'], {}), '(36)\n', (1648, 1652), True, 'import sqlalchemy as sa\n'), ((1654, 1701), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""routers.id"""'], {'ondelete': '"""CASCADE"""'}), "('routers.id', ondelete='CASCADE')\n", (1667, 1701), True, 'import sqlalchemy as sa\n'), ((1829, 1842), 'sqlalchemy.String', 'sa.String', (['(36)'], {}), '(36)\n', (1838, 1842), True, 'import sqlalchemy as sa\n'), ((1872, 1918), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""agents.id"""'], {'ondelete': '"""CASCADE"""'}), "('agents.id', ondelete='CASCADE')\n", (1885, 1918), True, 'import sqlalchemy as sa\n'), ((2033, 2130), 'sqlalchemy.Enum', 'sa.Enum', (['n_const.HA_ROUTER_STATE_ACTIVE', 'n_const.HA_ROUTER_STATE_STANDBY'], {'name': '"""l3_ha_states"""'}), "(n_const.HA_ROUTER_STATE_ACTIVE, n_const.HA_ROUTER_STATE_STANDBY,\n name='l3_ha_states')\n", (2040, 2130), True, 'import sqlalchemy as sa\n'), ((2608, 2621), 'sqlalchemy.String', 'sa.String', (['(36)'], {}), '(36)\n', (2617, 2621), True, 'import sqlalchemy as sa\n'), ((2650, 2698), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""networks.id"""'], {'ondelete': '"""CASCADE"""'}), "('networks.id', ondelete='CASCADE')\n", (2663, 2698), True, 'import sqlalchemy as sa\n'), ((3047, 3060), 'sqlalchemy.String', 'sa.String', (['(36)'], {}), '(36)\n', (3056, 3060), True, 'import sqlalchemy as sa\n'), ((3089, 3137), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""networks.id"""'], {'ondelete': '"""CASCADE"""'}), "('networks.id', ondelete='CASCADE')\n", (3102, 3137), True, 'import sqlalchemy as sa\n'), ((3222, 3234), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3232, 3234), True, 'import sqlalchemy as sa\n')] |
anae09/electionWebService | authentication/migrate.py | 756968e5cd6db1422ae5fe8445a9e92a25953073 | from flask import Flask;
from configuration import Configuration;
from flask_migrate import Migrate, init, migrate, upgrade;
from models import database, Role, UserRole, User;
from sqlalchemy_utils import database_exists, create_database;
application = Flask(__name__);
application.config.from_object(Configuration);
migrateObject = Migrate(application, database);
done = False;
while not done:
try:
if not database_exists(application.config["SQLALCHEMY_DATABASE_URI"]):
create_database(application.config["SQLALCHEMY_DATABASE_URI"]);
database.init_app(application);
with application.app_context() as context:
init();
migrate(message="Production migration");
upgrade();
adminRole = Role(name="administrator");
userRole = Role(name="user");
database.session.add(adminRole);
database.session.add(userRole);
database.session.commit();
admin = User(
jmbg="0000000000000",
forename="admin",
surname="admin",
email="[email protected]",
password="1"
);
database.session.add(admin);
database.session.commit();
userRole = UserRole(
userId=admin.id,
roleId=adminRole.id
);
database.session.add(userRole);
database.session.commit();
done = True;
except Exception as err:
print(err);
| [((254, 269), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (259, 269), False, 'from flask import Flask\n'), ((335, 365), 'flask_migrate.Migrate', 'Migrate', (['application', 'database'], {}), '(application, database)\n', (342, 365), False, 'from flask_migrate import Migrate, init, migrate, upgrade\n'), ((572, 602), 'models.database.init_app', 'database.init_app', (['application'], {}), '(application)\n', (589, 602), False, 'from models import database, Role, UserRole, User\n'), ((423, 485), 'sqlalchemy_utils.database_exists', 'database_exists', (["application.config['SQLALCHEMY_DATABASE_URI']"], {}), "(application.config['SQLALCHEMY_DATABASE_URI'])\n", (438, 485), False, 'from sqlalchemy_utils import database_exists, create_database\n'), ((499, 561), 'sqlalchemy_utils.create_database', 'create_database', (["application.config['SQLALCHEMY_DATABASE_URI']"], {}), "(application.config['SQLALCHEMY_DATABASE_URI'])\n", (514, 561), False, 'from sqlalchemy_utils import database_exists, create_database\n'), ((668, 674), 'flask_migrate.init', 'init', ([], {}), '()\n', (672, 674), False, 'from flask_migrate import Migrate, init, migrate, upgrade\n'), ((688, 727), 'flask_migrate.migrate', 'migrate', ([], {'message': '"""Production migration"""'}), "(message='Production migration')\n", (695, 727), False, 'from flask_migrate import Migrate, init, migrate, upgrade\n'), ((741, 750), 'flask_migrate.upgrade', 'upgrade', ([], {}), '()\n', (748, 750), False, 'from flask_migrate import Migrate, init, migrate, upgrade\n'), ((777, 803), 'models.Role', 'Role', ([], {'name': '"""administrator"""'}), "(name='administrator')\n", (781, 803), False, 'from models import database, Role, UserRole, User\n'), ((828, 845), 'models.Role', 'Role', ([], {'name': '"""user"""'}), "(name='user')\n", (832, 845), False, 'from models import database, Role, UserRole, User\n'), ((860, 891), 'models.database.session.add', 'database.session.add', (['adminRole'], {}), '(adminRole)\n', (880, 891), False, 'from models import database, Role, UserRole, User\n'), ((905, 935), 'models.database.session.add', 'database.session.add', (['userRole'], {}), '(userRole)\n', (925, 935), False, 'from models import database, Role, UserRole, User\n'), ((949, 974), 'models.database.session.commit', 'database.session.commit', ([], {}), '()\n', (972, 974), False, 'from models import database, Role, UserRole, User\n'), ((997, 1102), 'models.User', 'User', ([], {'jmbg': '"""0000000000000"""', 'forename': '"""admin"""', 'surname': '"""admin"""', 'email': '"""[email protected]"""', 'password': '"""1"""'}), "(jmbg='0000000000000', forename='admin', surname='admin', email=\n '[email protected]', password='1')\n", (1001, 1102), False, 'from models import database, Role, UserRole, User\n'), ((1206, 1233), 'models.database.session.add', 'database.session.add', (['admin'], {}), '(admin)\n', (1226, 1233), False, 'from models import database, Role, UserRole, User\n'), ((1247, 1272), 'models.database.session.commit', 'database.session.commit', ([], {}), '()\n', (1270, 1272), False, 'from models import database, Role, UserRole, User\n'), ((1298, 1344), 'models.UserRole', 'UserRole', ([], {'userId': 'admin.id', 'roleId': 'adminRole.id'}), '(userId=admin.id, roleId=adminRole.id)\n', (1306, 1344), False, 'from models import database, Role, UserRole, User\n'), ((1405, 1435), 'models.database.session.add', 'database.session.add', (['userRole'], {}), '(userRole)\n', (1425, 1435), False, 'from models import database, Role, UserRole, User\n'), ((1449, 1474), 'models.database.session.commit', 'database.session.commit', ([], {}), '()\n', (1472, 1474), False, 'from models import database, Role, UserRole, User\n')] |
gitter-lab/pria-ams-enamine | output/ensemble_analysis.py | b37bc7edf3c21af6653267ecd4bb9fd232eeb575 | from __future__ import print_function
import os
import json
import numpy as np
def extract(file_path):
if not os.path.isfile(file_path):
return -1, -1, -1
with open(file_path, 'r') as f:
lines = f.readlines()
test_roc, test_precision, test_NEF = -1, -1, -1
for line in lines:
if 'test precision' in line:
line = line.strip().split(':')
test_precision = float(line[1])
if 'test roc' in line:
line = line.strip().split(':')
test_roc = float(line[1])
if 'ratio: 0.01, NEF:' in line:
line = line.strip().replace('NEF:', '').split(',')
test_NEF = float(line[1])
return test_roc, test_precision, test_NEF
if __name__ == '__main__':
model_list = [
'random_forest_classification',
'xgboost_classification', 'xgboost_regression',
'single_deep_classification', 'single_deep_regression'
]
model_process_num_list = {
'random_forest_classification': [139, 69, 111, 212, 210, 148, 28, 61, 124, 130, 131, 141, 14, 38, 165, 65, 123, 94, 3, 88, 72],
'xgboost_classification': [140, 967, 960, 807, 263, 694, 440, 47, 116, 792, 663, 32, 564, 950, 735, 84, 364, 605, 431, 55, 388],
'xgboost_regression': [187, 6, 514, 507, 880, 440, 605, 718, 754, 409, 586, 214, 753, 65, 294, 911, 721, 81, 321, 545, 280],
'single_deep_classification': [356, 404, 215, 93, 254, 88, 423, 47, 363, 132, 5, 385, 370, 29, 415, 54, 124, 183, 180, 416],
'single_deep_regression': [199, 323, 114, 123, 47, 175, 17, 178, 106, 265, 67, 157, 369, 115, 191, 20, 27, 108, 270, 45],
'ensemble': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
}
for model in model_list:
print('Model: {}'.format(model))
number = len(model_process_num_list[model])
hyper_parameter_result_roc = []
hyper_parameter_result_precision = []
hyper_parameter_result_NEF = []
for running_process in model_process_num_list[model]:
test_roc_list, test_precision_list, test_NEF_list = [], [], []
for idx in range(4):
file_path = '{}/{}_{}_{}.out'.format(model, model, running_process, idx)
test_roc, test_precision, test_NEF = extract(file_path)
if test_roc == -1 and test_precision == -1:
print('missing index: {}'.format(running_process))
if test_roc != -1:
test_roc_list.append(test_roc)
if test_precision != -1:
test_precision_list.append(test_precision)
if test_NEF != -1:
test_NEF_list.append(test_NEF)
hyper_parameter_result_roc.append(np.mean(test_roc_list))
hyper_parameter_result_precision.append(np.mean(test_precision_list))
hyper_parameter_result_NEF.append(np.mean(test_NEF_list))
for running_process, roc, pr, NEF in zip(model_process_num_list[model], hyper_parameter_result_roc, hyper_parameter_result_precision, hyper_parameter_result_NEF):
print('{}\t{}\t{}\t{}'.format(running_process, roc, pr, NEF))
print()
print('On The Last Folder')
model_list = [
'random_forest_classification',
'xgboost_classification', 'xgboost_regression',
'single_deep_classification', 'single_deep_regression',
'ensemble'
]
for model in model_list:
print('Model: {}'.format(model))
number = len(model_process_num_list[model])
for running_process in model_process_num_list[model]:
if model == 'ensemble':
file_path = '{}/{}.out'.format(model, running_process)
else:
file_path = '{}/{}_{}_4.out'.format(model, model, running_process)
test_roc, test_precision, test_NEF = extract(file_path)
print('{}\t{}'.format(running_process, test_NEF))
print() | [((127, 152), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (141, 152), False, 'import os\n'), ((2821, 2843), 'numpy.mean', 'np.mean', (['test_roc_list'], {}), '(test_roc_list)\n', (2828, 2843), True, 'import numpy as np\n'), ((2898, 2926), 'numpy.mean', 'np.mean', (['test_precision_list'], {}), '(test_precision_list)\n', (2905, 2926), True, 'import numpy as np\n'), ((2975, 2997), 'numpy.mean', 'np.mean', (['test_NEF_list'], {}), '(test_NEF_list)\n', (2982, 2997), True, 'import numpy as np\n')] |
Subsets and Splits