code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import glob
import os
import time
import cv2
import numpy as np
from Pre_Processing import frameManipulator
commands = ['bin', 'lay', 'place', 'set']
prepositions = ['at', 'by', 'in', 'with']
colors = ['blue', 'green', 'red', 'white']
adverbs = ['again', 'now', 'please', 'soon']
alphabet = [chr(x) for x in range(ord('a'), ord('z') + 1)]
numbers = ['one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine']
categories = ['Adverb', 'Alphabet', 'Commands', 'Colors', 'Numbers', 'Prepositions']
commonCNNDataPath = 'D:/CNN-Test-Images/'
def getVideoFrames(videoPath):
"""Function to return a video's frames in a list"""
vidcap = cv2.VideoCapture(videoPath)
success, image = vidcap.read()
allFrames = []
while success:
allFrames.append(image)
success, image = vidcap.read()
return allFrames
def stackFramesToImage(listOfFrames):
"""Function to concat frames into a single picture"""
if len(listOfFrames) < frameManipulator.FPS:
return None
newList = [np.hstack(listOfFrames[:5]), np.hstack(listOfFrames[5:10]), np.hstack(listOfFrames[10:15]),
np.hstack(listOfFrames[15:20]), np.hstack(listOfFrames[20:25]), np.hstack(listOfFrames[25:30])]
return np.vstack(newList)
def saveImage(image, imagePath):
"""Function to save an image in grayscale to a specific path"""
if len(image.shape) == 3:
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
index = len(os.listdir(imagePath))
imagePath = imagePath + '/{}.jpg'.format(index)
cv2.imwrite(imagePath, image)
def createCNNDataDirectories():
"""Function to create label directories for each category for training the CNN"""
for command in commands:
dirName = commonCNNDataPath + '/Commands/{}/'.format(command)
if not os.path.exists(dirName):
os.makedirs(dirName)
for preposition in prepositions:
dirName = commonCNNDataPath + '/Prepositions/{}/'.format(preposition)
if not os.path.exists(dirName):
os.makedirs(dirName)
for color in colors:
dirName = commonCNNDataPath + '/Colors/{}/'.format(color)
if not os.path.exists(dirName):
os.makedirs(dirName)
for adverb in adverbs:
dirName = commonCNNDataPath + '/Adverb/{}/'.format(adverb)
if not os.path.exists(dirName):
os.makedirs(dirName)
for letter in alphabet:
dirName = commonCNNDataPath + '/Alphabet/{}/'.format(letter)
if not os.path.exists(dirName):
os.makedirs(dirName)
for number in numbers:
dirName = commonCNNDataPath + '/Numbers/{}/'.format(number)
if not os.path.exists(dirName):
os.makedirs(dirName)
def extractLipsHaarCascade(haarDetector, frame):
"""Function to extract lips from a frame"""
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
roi_gray = 0
faces = haarDetector.detectMultiScale(gray, 1.3, 5)
if len(faces) == 0:
roi_gray = cv2.resize(gray, (150, 100))
return roi_gray
for (x, y, w, h) in faces:
roi_gray = gray[y + (2 * h // 3):y + h, x:x + w]
roi_gray = cv2.resize(roi_gray, (150, 100))
return roi_gray
def prepareSingleVideoForCNN(path, haarDetector):
"""Function to prepare a single video to be redy for CNN training"""
vidData = frameManipulator.getVideoDataFromPath(path)
videoFrames = getVideoFrames(path)
videoFrames = [extractLipsHaarCascade(haarDetector, x) for x in videoFrames]
if len(videoFrames) != 0:
stackedImage = stackFramesToImage(videoFrames)
videoLabel = vidData.identifier.split('_')[0]
imageSavePath = commonCNNDataPath + vidData.category + '/{}'.format(videoLabel)
saveImage(stackedImage, imageSavePath)
else:
print("Error in finding video with path: {}".format(path))
def prepareDataSetForCNN(firstSpeaker, secondSpeaker):
"""Function that traverses the whole dataset and creates new directory for the CNN"""
detector = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
for i in range(firstSpeaker, secondSpeaker):
for category in categories:
sTime = time.time()
videoPath = "../New-DataSet-Videos/S{}/{}/".format(i, category) + "*.mp4"
vidList = glob.glob(videoPath)
def f(x):
return x.replace("\\", '/')
vidList = [f(x) for x in vidList]
for j in vidList:
prepareSingleVideoForCNN(j, detector)
print("Finished category : {}, for speaker: {}".format(category, i))
print("In:{} Seconds".format(time.time() - sTime))
print("Finished Speaker {}".format(i))
def main():
startTime = time.time()
firstSpeaker = 23
secondSpeaker = 24
createCNNDataDirectories()
prepareDataSetForCNN(firstSpeaker, secondSpeaker)
print("Finished preparing the videos in {} seconds".format(time.time() - startTime))
if __name__ == "__main__":
main()
|
[
"cv2.resize",
"os.makedirs",
"cv2.cvtColor",
"cv2.imwrite",
"Pre_Processing.frameManipulator.getVideoDataFromPath",
"os.path.exists",
"time.time",
"cv2.VideoCapture",
"numpy.hstack",
"cv2.CascadeClassifier",
"glob.glob",
"os.listdir",
"numpy.vstack"
] |
[((654, 681), 'cv2.VideoCapture', 'cv2.VideoCapture', (['videoPath'], {}), '(videoPath)\n', (670, 681), False, 'import cv2\n'), ((1243, 1261), 'numpy.vstack', 'np.vstack', (['newList'], {}), '(newList)\n', (1252, 1261), True, 'import numpy as np\n'), ((1546, 1575), 'cv2.imwrite', 'cv2.imwrite', (['imagePath', 'image'], {}), '(imagePath, image)\n', (1557, 1575), False, 'import cv2\n'), ((2840, 2879), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (2852, 2879), False, 'import cv2\n'), ((3153, 3185), 'cv2.resize', 'cv2.resize', (['roi_gray', '(150, 100)'], {}), '(roi_gray, (150, 100))\n', (3163, 3185), False, 'import cv2\n'), ((3345, 3388), 'Pre_Processing.frameManipulator.getVideoDataFromPath', 'frameManipulator.getVideoDataFromPath', (['path'], {}), '(path)\n', (3382, 3388), False, 'from Pre_Processing import frameManipulator\n'), ((4023, 4111), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (["(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')"], {}), "(cv2.data.haarcascades +\n 'haarcascade_frontalface_default.xml')\n", (4044, 4111), False, 'import cv2\n'), ((4775, 4786), 'time.time', 'time.time', ([], {}), '()\n', (4784, 4786), False, 'import time\n'), ((1029, 1056), 'numpy.hstack', 'np.hstack', (['listOfFrames[:5]'], {}), '(listOfFrames[:5])\n', (1038, 1056), True, 'import numpy as np\n'), ((1058, 1087), 'numpy.hstack', 'np.hstack', (['listOfFrames[5:10]'], {}), '(listOfFrames[5:10])\n', (1067, 1087), True, 'import numpy as np\n'), ((1089, 1119), 'numpy.hstack', 'np.hstack', (['listOfFrames[10:15]'], {}), '(listOfFrames[10:15])\n', (1098, 1119), True, 'import numpy as np\n'), ((1136, 1166), 'numpy.hstack', 'np.hstack', (['listOfFrames[15:20]'], {}), '(listOfFrames[15:20])\n', (1145, 1166), True, 'import numpy as np\n'), ((1168, 1198), 'numpy.hstack', 'np.hstack', (['listOfFrames[20:25]'], {}), '(listOfFrames[20:25])\n', (1177, 1198), True, 'import numpy as np\n'), ((1200, 1230), 'numpy.hstack', 'np.hstack', (['listOfFrames[25:30]'], {}), '(listOfFrames[25:30])\n', (1209, 1230), True, 'import numpy as np\n'), ((1411, 1450), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (1423, 1450), False, 'import cv2\n'), ((1467, 1488), 'os.listdir', 'os.listdir', (['imagePath'], {}), '(imagePath)\n', (1477, 1488), False, 'import os\n'), ((2996, 3024), 'cv2.resize', 'cv2.resize', (['gray', '(150, 100)'], {}), '(gray, (150, 100))\n', (3006, 3024), False, 'import cv2\n'), ((1810, 1833), 'os.path.exists', 'os.path.exists', (['dirName'], {}), '(dirName)\n', (1824, 1833), False, 'import os\n'), ((1847, 1867), 'os.makedirs', 'os.makedirs', (['dirName'], {}), '(dirName)\n', (1858, 1867), False, 'import os\n'), ((1999, 2022), 'os.path.exists', 'os.path.exists', (['dirName'], {}), '(dirName)\n', (2013, 2022), False, 'import os\n'), ((2036, 2056), 'os.makedirs', 'os.makedirs', (['dirName'], {}), '(dirName)\n', (2047, 2056), False, 'import os\n'), ((2164, 2187), 'os.path.exists', 'os.path.exists', (['dirName'], {}), '(dirName)\n', (2178, 2187), False, 'import os\n'), ((2201, 2221), 'os.makedirs', 'os.makedirs', (['dirName'], {}), '(dirName)\n', (2212, 2221), False, 'import os\n'), ((2332, 2355), 'os.path.exists', 'os.path.exists', (['dirName'], {}), '(dirName)\n', (2346, 2355), False, 'import os\n'), ((2369, 2389), 'os.makedirs', 'os.makedirs', (['dirName'], {}), '(dirName)\n', (2380, 2389), False, 'import os\n'), ((2503, 2526), 'os.path.exists', 'os.path.exists', (['dirName'], {}), '(dirName)\n', (2517, 2526), False, 'import os\n'), ((2540, 2560), 'os.makedirs', 'os.makedirs', (['dirName'], {}), '(dirName)\n', (2551, 2560), False, 'import os\n'), ((2672, 2695), 'os.path.exists', 'os.path.exists', (['dirName'], {}), '(dirName)\n', (2686, 2695), False, 'import os\n'), ((2709, 2729), 'os.makedirs', 'os.makedirs', (['dirName'], {}), '(dirName)\n', (2720, 2729), False, 'import os\n'), ((4213, 4224), 'time.time', 'time.time', ([], {}), '()\n', (4222, 4224), False, 'import time\n'), ((4333, 4353), 'glob.glob', 'glob.glob', (['videoPath'], {}), '(videoPath)\n', (4342, 4353), False, 'import glob\n'), ((4981, 4992), 'time.time', 'time.time', ([], {}), '()\n', (4990, 4992), False, 'import time\n'), ((4675, 4686), 'time.time', 'time.time', ([], {}), '()\n', (4684, 4686), False, 'import time\n')]
|
import numpy as np
import math
from pressiotools import linalg as la
def read_binary_array(fileName, nCols):
# read a numpy array from a binary file "fileName"
if nCols==1:
return np.fromfile(fileName)
else:
array = np.fromfile(fileName)
nRows = int(len(array) / float(nCols))
return array.reshape((nCols,nRows)).T
def read_ascii_array(fileName, nCols):
# read a numpy array from an ascii file "fileName"
return np.asfortranarray(np.loadtxt(fileName))
def read_array(fileName, nCols, isBinary=True):
if isBinary:
return read_binary_array(fileName,nCols)
else:
return read_ascii_array(fileName,nCols)
def read_array_distributed(comm, rootFileName, nCols, isBinary=True):
# Read an array from binary or ascii files with the name specified
# by the string rootFileName
# Each local array segment will be read from a file rootFileName.XX.YY,
# where XX is the number of ranks and YY is the local rank
rank = comm.Get_rank()
size = comm.Get_size()
nDigit = int(math.log10(size)) + 1
myFileName = "{}.{}.{:0{width}d}".format(rootFileName,size,rank,width=nDigit)
myArr = read_array(myFileName,nCols,isBinary)
if nCols==1:
return la.Vector(myArr)
else:
return la.MultiVector(myArr)
|
[
"numpy.fromfile",
"pressiotools.linalg.MultiVector",
"pressiotools.linalg.Vector",
"math.log10",
"numpy.loadtxt"
] |
[((190, 211), 'numpy.fromfile', 'np.fromfile', (['fileName'], {}), '(fileName)\n', (201, 211), True, 'import numpy as np\n'), ((232, 253), 'numpy.fromfile', 'np.fromfile', (['fileName'], {}), '(fileName)\n', (243, 253), True, 'import numpy as np\n'), ((459, 479), 'numpy.loadtxt', 'np.loadtxt', (['fileName'], {}), '(fileName)\n', (469, 479), True, 'import numpy as np\n'), ((1191, 1207), 'pressiotools.linalg.Vector', 'la.Vector', (['myArr'], {}), '(myArr)\n', (1200, 1207), True, 'from pressiotools import linalg as la\n'), ((1227, 1248), 'pressiotools.linalg.MultiVector', 'la.MultiVector', (['myArr'], {}), '(myArr)\n', (1241, 1248), True, 'from pressiotools import linalg as la\n'), ((1014, 1030), 'math.log10', 'math.log10', (['size'], {}), '(size)\n', (1024, 1030), False, 'import math\n')]
|
from flask import Flask, request, jsonify
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
@app.route('/api/data', methods=['GET'])
def data():
query = ''
with open('stats.json', 'r') as db:
query = db.read()
print(query)
return query
@app.route('/api/sendData', methods=['POST'])
def receiveData():
data = request.get_json()
print(data)
with open('stats.json', 'w') as db:
db.write(f'''{{
"servers": {data['servers']},
"users": {data['users']}
}}
''')
return 'Thank you'
app.run("0.0.0.0", 8080, False)
|
[
"flask_cors.CORS",
"flask.Flask",
"flask.request.get_json"
] |
[((77, 92), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (82, 92), False, 'from flask import Flask, request, jsonify\n'), ((94, 103), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (98, 103), False, 'from flask_cors import CORS\n'), ((358, 376), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (374, 376), False, 'from flask import Flask, request, jsonify\n')]
|
from setuptools import setup
from os import path
from codecs import open
here = path.abspath( path.dirname( __file__ ) )
with open( path.join( here, 'README.md' ), encoding='utf-8' ) as file :
long_description = file.read()
for line in open( path.join( 'meteostat', '__init__.py' ) ) :
if line.startswith( '__version__' ) :
exec( line )
break
setup(
name = 'meteostat2',
versio = __version__,
description = 'Meteostat alternative API for python',
long_description = long_description,
url = 'https://github.com/SNR20db/meteostat2',
license = 'MIT',
Classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developer',
'Intended Audience :: Education',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: MIT License',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10'
],
keywords = 'meteo meteostat meteostat2 weather weatherAPI meteorology',
install_requires = [ 'requests' ],
entry_points = {
'console_scripts' : [
'meteostat2 = meteostat.__main__:main',
'meteo2 = meteostat.__main__:main'
]
},
py_modules = [ 'meteostat.meteostat2' , 'meteostat.__main__' ],
test_require = [
'pandas'
]
)
|
[
"os.path.dirname",
"os.path.join",
"setuptools.setup"
] |
[((372, 1272), 'setuptools.setup', 'setup', ([], {'name': '"""meteostat2"""', 'versio': '__version__', 'description': '"""Meteostat alternative API for python"""', 'long_description': 'long_description', 'url': '"""https://github.com/SNR20db/meteostat2"""', 'license': '"""MIT"""', 'Classifiers': "['Development Status :: 3 - Alpha', 'Intended Audience :: Developer',\n 'Intended Audience :: Education',\n 'Intended Audience :: End Users/Desktop',\n 'Intended Audience :: Science/Research', 'License :: MIT License',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10']", 'keywords': '"""meteo meteostat meteostat2 weather weatherAPI meteorology"""', 'install_requires': "['requests']", 'entry_points': "{'console_scripts': ['meteostat2 = meteostat.__main__:main',\n 'meteo2 = meteostat.__main__:main']}", 'py_modules': "['meteostat.meteostat2', 'meteostat.__main__']", 'test_require': "['pandas']"}), "(name='meteostat2', versio=__version__, description=\n 'Meteostat alternative API for python', long_description=\n long_description, url='https://github.com/SNR20db/meteostat2', license=\n 'MIT', Classifiers=['Development Status :: 3 - Alpha',\n 'Intended Audience :: Developer', 'Intended Audience :: Education',\n 'Intended Audience :: End Users/Desktop',\n 'Intended Audience :: Science/Research', 'License :: MIT License',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10'], keywords=\n 'meteo meteostat meteostat2 weather weatherAPI meteorology',\n install_requires=['requests'], entry_points={'console_scripts': [\n 'meteostat2 = meteostat.__main__:main',\n 'meteo2 = meteostat.__main__:main']}, py_modules=[\n 'meteostat.meteostat2', 'meteostat.__main__'], test_require=['pandas'])\n", (377, 1272), False, 'from setuptools import setup\n'), ((96, 118), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (108, 118), False, 'from os import path\n'), ((250, 287), 'os.path.join', 'path.join', (['"""meteostat"""', '"""__init__.py"""'], {}), "('meteostat', '__init__.py')\n", (259, 287), False, 'from os import path\n'), ((135, 163), 'os.path.join', 'path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (144, 163), False, 'from os import path\n')]
|
import pytest
from numpy.random import RandomState
from skvalid.parameters import TypeOf
from skvalid.parameters import Enum
from skvalid.parameters import Union
from skvalid.parameters import Interval
from skvalid.parameters import Const
import typing
@pytest.mark.parametrize('type_of,value',
[(TypeOf(bool), True), (TypeOf(bool), False),
(TypeOf(typing.Callable), TypeOf.validate),
(TypeOf(float), 10.1), (TypeOf(int), 10),
(TypeOf(dict), {})])
def test_typeof_valid_values(type_of, value):
type_of.validate(value, "tol")
@pytest.mark.parametrize('type_of,value', [(TypeOf(bool), 'Hello world'),
(TypeOf(typing.Callable), True),
(TypeOf(str), 120),
(TypeOf(int), 10.1),
(TypeOf(dict), True)])
def test_typeof_invalid_values(type_of, value):
cur_type = type_of.types[0]
name = getattr(cur_type, "__name__", str(cur_type))
msg = 'tol: {} is not a {}'.format(value, name)
with pytest.raises(TypeError, match=msg):
type_of.validate(value, "tol")
def test_typeof_invalid_values_multiple():
msg = 'tol: 4.0 is not a RandomState or int'
with pytest.raises(TypeError, match=msg):
TypeOf(RandomState, int).validate(4.0, "tol")
@pytest.mark.parametrize('constant,value', [(Const(4), 4),
(Const('hehe'), 'hehe'),
(Const(3.1), 3.1),
(Const(True), True),
(Const(None), None)])
def test_constant_valid_values(constant, value):
# does not raise
constant.validate(value, "tol")
@pytest.mark.parametrize('constant,value', [(Const(4), 3),
(Const('hehe'), 'heh'),
(Const(3.1), 4.1),
(Const(True), False),
(Const(None), 4),
(Const(4), None)])
def test_constant_invalid_values(constant, value):
msg = 'tol: {} != {}'.format(value, constant.value)
with pytest.raises(ValueError, match=msg):
constant.validate(value, "tol")
@pytest.mark.parametrize('members, msg',
[([], 'members must have at least one item'),
((), 'members must have at least one item')])
def test_enum_invalid_members_init(members, msg):
with pytest.raises(ValueError, match=msg):
Enum(*members)
@pytest.mark.parametrize('enum, value',
[(Enum('a', 'b'), 'a'), (Enum('a', 'b'), 'b'),
(Enum('a', 'c', 'b'), 'c'),
(Enum('a', 1, None, 1.0, True), 'a'),
(Enum('a', 1, None, 1.0, True), 1),
(Enum('a', 1, None, 1.0, True), None),
(Enum('a', 1, None, 1.0, True), 1.0),
(Enum('a', 1, None, 1.0, True), True)])
def test_enum_values(enum, value):
# does not raise
enum.validate(value, "tol")
@pytest.mark.parametrize(
'enum, value, msg',
[(Enum('a', '5'), '3', r'3 is not in \[a, 5\]'),
(Enum('a', '3', '9'), '5', r'5 is not in \[a, 3, 9\]'),
(Enum('a', 1, None, 1.0,
True), 'bad', r'bad is not in \[a, 1, None, 1.0, True\]')])
def test_enum_invalid_values(enum, value, msg):
with pytest.raises(ValueError, match=msg):
enum.validate(value, "tol")
def test_enum_invalid_type_error():
enum, value, msg = Enum('hello', 'f'), 1, r'1 is not in \[hello, f\]'
with pytest.raises(ValueError, match=msg):
enum.validate(value, 'tol')
@pytest.mark.parametrize(
'params, msg',
[((), 'parameters must have at least one item'),
(('hello', 'world'), 'all parameters must be of type Parameter'),
((TypeOf(int), 3), 'all parameters must be of type Parameter'),
((None, Enum('hello')), 'all parameters must be of type Parameter')])
def test_union_invalid_params_init(params, msg):
with pytest.raises(ValueError, match=msg):
Union(*params)
@pytest.mark.parametrize('union, value, msg', [
(Union(TypeOf(int), Enum('hello', 'world')), None,
r'tol: None is not a int and is not in \[hello, world\]'),
(Union(TypeOf(int), Enum('hello', 'world')), 0.4, 'tol: 0.4 is not a int'),
])
def test_union_invalid_values(union, value, msg):
with pytest.raises(ValueError, match=msg):
union.validate(value, "tol")
@pytest.mark.parametrize('union, value', [
(Union(TypeOf(int), Enum('hello', 'world')), 'hello'),
(Union(TypeOf(int), Enum('hello', 'world')), 'world'),
(Union(TypeOf(int), Enum('hello', 'world')), 10),
(Union(TypeOf(int), Enum('hello', 'world'), Const(None)), None),
(Union(TypeOf(float), TypeOf(int)), 10),
(Union(TypeOf(float), TypeOf(int)), 10.3),
])
def test_union_valid_values(union, value):
# does not raise
union.validate(value, "tol")
def test_union_removes_tags():
union = Union(TypeOf(int, tags=['control']),
Enum('a', 'b', tags=['not good']),
tags=['deprecated'])
for params in union.params:
assert not params.tags
@pytest.mark.parametrize('lower, upper, msg',
[(None, None, 'lower or upper must be defined'),
(10, 1, 'lower must be strictly less than upper'),
(10, 10, 'lower must be strictly less than upper')])
def test_interval_error_init(lower, upper, msg):
with pytest.raises(ValueError, match=msg):
Interval(int, lower=lower, upper=upper)
@pytest.mark.parametrize('interval, value', [
(Interval(int, lower=None, upper=2), 1),
(Interval(int, lower=None, upper=2), 2),
(Interval(int, lower=-3, upper=None), 3),
(Interval(int, lower=-3, upper=None), -3),
(Interval(int, lower=-3, upper=2), 0),
(Interval(int, lower=-3, upper=2), -3),
(Interval(int, lower=-3, upper=2), 2),
(Interval(float, lower=None, upper=2), 1.0),
(Interval(float, lower=None, upper=2), 2.0),
(Interval(float, lower=-3, upper=None), 3.0),
(Interval(float, lower=-3, upper=None), -3.0),
(Interval(float, lower=-3, upper=2), 0.0),
(Interval(float, lower=-3, upper=2), -3.0),
(Interval(float, lower=-3, upper=2), 2.0),
])
def test_interval_valid_values(interval, value):
interval.validate(value, "tol")
@pytest.mark.parametrize('interval, value, msg', [
(Interval(int, lower=None, upper=2), 1.0, 'tol: 1.0 is not a int'),
(Interval(float, lower=None, upper=2), 1, 'tol: 1 is not a float'),
])
def test_interval_invalid_type(interval, value, msg):
with pytest.raises(TypeError, match=msg):
interval.validate(value, "tol")
@pytest.mark.parametrize('interval, value, msg', [
(Interval(int, lower=None, upper=2), 3, r'3 not in \(-inf, 2\]'),
(Interval(int, lower=None, upper=2,
upper_inclusive=False), 2, r'2 not in \(-inf, 2\)'),
(Interval(int, lower=-3, upper=None), -4, r'-4 not in \[-3, inf\)'),
(Interval(int, lower=-3, upper=None,
lower_inclusive=False), -3, r'-3 not in \(-3, inf\)'),
(Interval(int, lower=-3, upper=2), 3, r'3 not in \[-3, 2\]'),
(Interval(int, lower=-3, upper=2), -4, r'-4 not in \[-3, 2\]'),
(Interval(int, lower=-3, upper=2,
lower_inclusive=False), -3, r'-3 not in \(-3, 2\]'),
(Interval(int, lower=-3, upper=2,
upper_inclusive=False), 2, r'2 not in \[-3, 2\)'),
])
def test_interval_invalid_values(interval, value, msg):
with pytest.raises(ValueError, match=msg):
interval.validate(value, 'tol')
|
[
"skvalid.parameters.TypeOf",
"skvalid.parameters.Union",
"skvalid.parameters.Const",
"pytest.raises",
"skvalid.parameters.Enum",
"pytest.mark.parametrize",
"skvalid.parameters.Interval"
] |
[((2457, 2596), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""members, msg"""', "[([], 'members must have at least one item'), ((),\n 'members must have at least one item')]"], {}), "('members, msg', [([],\n 'members must have at least one item'), ((),\n 'members must have at least one item')])\n", (2480, 2596), False, 'import pytest\n'), ((5468, 5677), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""lower, upper, msg"""', "[(None, None, 'lower or upper must be defined'), (10, 1,\n 'lower must be strictly less than upper'), (10, 10,\n 'lower must be strictly less than upper')]"], {}), "('lower, upper, msg', [(None, None,\n 'lower or upper must be defined'), (10, 1,\n 'lower must be strictly less than upper'), (10, 10,\n 'lower must be strictly less than upper')])\n", (5491, 5677), False, 'import pytest\n'), ((1177, 1212), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': 'msg'}), '(TypeError, match=msg)\n', (1190, 1212), False, 'import pytest\n'), ((1356, 1391), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': 'msg'}), '(TypeError, match=msg)\n', (1369, 1391), False, 'import pytest\n'), ((2376, 2412), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (2389, 2412), False, 'import pytest\n'), ((2699, 2735), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (2712, 2735), False, 'import pytest\n'), ((2745, 2759), 'skvalid.parameters.Enum', 'Enum', (['*members'], {}), '(*members)\n', (2749, 2759), False, 'from skvalid.parameters import Enum\n'), ((3661, 3697), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (3674, 3697), False, 'import pytest\n'), ((3796, 3814), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""', '"""f"""'], {}), "('hello', 'f')\n", (3800, 3814), False, 'from skvalid.parameters import Enum\n'), ((3856, 3892), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (3869, 3892), False, 'import pytest\n'), ((4303, 4339), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (4316, 4339), False, 'import pytest\n'), ((4349, 4363), 'skvalid.parameters.Union', 'Union', (['*params'], {}), '(*params)\n', (4354, 4363), False, 'from skvalid.parameters import Union\n'), ((4675, 4711), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (4688, 4711), False, 'import pytest\n'), ((5279, 5308), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {'tags': "['control']"}), "(int, tags=['control'])\n", (5285, 5308), False, 'from skvalid.parameters import TypeOf\n'), ((5328, 5361), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '"""b"""'], {'tags': "['not good']"}), "('a', 'b', tags=['not good'])\n", (5332, 5361), False, 'from skvalid.parameters import Enum\n'), ((5801, 5837), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (5814, 5837), False, 'import pytest\n'), ((5847, 5886), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': 'lower', 'upper': 'upper'}), '(int, lower=lower, upper=upper)\n', (5855, 5886), False, 'from skvalid.parameters import Interval\n'), ((6940, 6975), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': 'msg'}), '(TypeError, match=msg)\n', (6953, 6975), False, 'import pytest\n'), ((7840, 7876), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'msg'}), '(ValueError, match=msg)\n', (7853, 7876), False, 'import pytest\n'), ((325, 337), 'skvalid.parameters.TypeOf', 'TypeOf', (['bool'], {}), '(bool)\n', (331, 337), False, 'from skvalid.parameters import TypeOf\n'), ((347, 359), 'skvalid.parameters.TypeOf', 'TypeOf', (['bool'], {}), '(bool)\n', (353, 359), False, 'from skvalid.parameters import TypeOf\n'), ((396, 419), 'skvalid.parameters.TypeOf', 'TypeOf', (['typing.Callable'], {}), '(typing.Callable)\n', (402, 419), False, 'from skvalid.parameters import TypeOf\n'), ((466, 479), 'skvalid.parameters.TypeOf', 'TypeOf', (['float'], {}), '(float)\n', (472, 479), False, 'from skvalid.parameters import TypeOf\n'), ((489, 500), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (495, 500), False, 'from skvalid.parameters import TypeOf\n'), ((534, 546), 'skvalid.parameters.TypeOf', 'TypeOf', (['dict'], {}), '(dict)\n', (540, 546), False, 'from skvalid.parameters import TypeOf\n'), ((681, 693), 'skvalid.parameters.TypeOf', 'TypeOf', (['bool'], {}), '(bool)\n', (687, 693), False, 'from skvalid.parameters import TypeOf\n'), ((755, 778), 'skvalid.parameters.TypeOf', 'TypeOf', (['typing.Callable'], {}), '(typing.Callable)\n', (761, 778), False, 'from skvalid.parameters import TypeOf\n'), ((831, 842), 'skvalid.parameters.TypeOf', 'TypeOf', (['str'], {}), '(str)\n', (837, 842), False, 'from skvalid.parameters import TypeOf\n'), ((894, 905), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (900, 905), False, 'from skvalid.parameters import TypeOf\n'), ((958, 970), 'skvalid.parameters.TypeOf', 'TypeOf', (['dict'], {}), '(dict)\n', (964, 970), False, 'from skvalid.parameters import TypeOf\n'), ((1494, 1502), 'skvalid.parameters.Const', 'Const', (['(4)'], {}), '(4)\n', (1499, 1502), False, 'from skvalid.parameters import Const\n'), ((1553, 1566), 'skvalid.parameters.Const', 'Const', (['"""hehe"""'], {}), "('hehe')\n", (1558, 1566), False, 'from skvalid.parameters import Const\n'), ((1622, 1632), 'skvalid.parameters.Const', 'Const', (['(3.1)'], {}), '(3.1)\n', (1627, 1632), False, 'from skvalid.parameters import Const\n'), ((1685, 1696), 'skvalid.parameters.Const', 'Const', (['(True)'], {}), '(True)\n', (1690, 1696), False, 'from skvalid.parameters import Const\n'), ((1750, 1761), 'skvalid.parameters.Const', 'Const', (['None'], {}), '(None)\n', (1755, 1761), False, 'from skvalid.parameters import Const\n'), ((1924, 1932), 'skvalid.parameters.Const', 'Const', (['(4)'], {}), '(4)\n', (1929, 1932), False, 'from skvalid.parameters import Const\n'), ((1983, 1996), 'skvalid.parameters.Const', 'Const', (['"""hehe"""'], {}), "('hehe')\n", (1988, 1996), False, 'from skvalid.parameters import Const\n'), ((2051, 2061), 'skvalid.parameters.Const', 'Const', (['(3.1)'], {}), '(3.1)\n', (2056, 2061), False, 'from skvalid.parameters import Const\n'), ((2114, 2125), 'skvalid.parameters.Const', 'Const', (['(True)'], {}), '(True)\n', (2119, 2125), False, 'from skvalid.parameters import Const\n'), ((2180, 2191), 'skvalid.parameters.Const', 'Const', (['None'], {}), '(None)\n', (2185, 2191), False, 'from skvalid.parameters import Const\n'), ((2242, 2250), 'skvalid.parameters.Const', 'Const', (['(4)'], {}), '(4)\n', (2247, 2250), False, 'from skvalid.parameters import Const\n'), ((2829, 2843), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '"""b"""'], {}), "('a', 'b')\n", (2833, 2843), False, 'from skvalid.parameters import Enum\n'), ((2852, 2866), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '"""b"""'], {}), "('a', 'b')\n", (2856, 2866), False, 'from skvalid.parameters import Enum\n'), ((2901, 2920), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '"""c"""', '"""b"""'], {}), "('a', 'c', 'b')\n", (2905, 2920), False, 'from skvalid.parameters import Enum\n'), ((2955, 2984), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '(1)', 'None', '(1.0)', '(True)'], {}), "('a', 1, None, 1.0, True)\n", (2959, 2984), False, 'from skvalid.parameters import Enum\n'), ((3019, 3048), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '(1)', 'None', '(1.0)', '(True)'], {}), "('a', 1, None, 1.0, True)\n", (3023, 3048), False, 'from skvalid.parameters import Enum\n'), ((3081, 3110), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '(1)', 'None', '(1.0)', '(True)'], {}), "('a', 1, None, 1.0, True)\n", (3085, 3110), False, 'from skvalid.parameters import Enum\n'), ((3146, 3175), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '(1)', 'None', '(1.0)', '(True)'], {}), "('a', 1, None, 1.0, True)\n", (3150, 3175), False, 'from skvalid.parameters import Enum\n'), ((3210, 3239), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '(1)', 'None', '(1.0)', '(True)'], {}), "('a', 1, None, 1.0, True)\n", (3214, 3239), False, 'from skvalid.parameters import Enum\n'), ((3395, 3409), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '"""5"""'], {}), "('a', '5')\n", (3399, 3409), False, 'from skvalid.parameters import Enum\n'), ((3448, 3467), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '"""3"""', '"""9"""'], {}), "('a', '3', '9')\n", (3452, 3467), False, 'from skvalid.parameters import Enum\n'), ((3509, 3538), 'skvalid.parameters.Enum', 'Enum', (['"""a"""', '(1)', 'None', '(1.0)', '(True)'], {}), "('a', 1, None, 1.0, True)\n", (3513, 3538), False, 'from skvalid.parameters import Enum\n'), ((5940, 5974), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': 'None', 'upper': '(2)'}), '(int, lower=None, upper=2)\n', (5948, 5974), False, 'from skvalid.parameters import Interval\n'), ((5985, 6019), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': 'None', 'upper': '(2)'}), '(int, lower=None, upper=2)\n', (5993, 6019), False, 'from skvalid.parameters import Interval\n'), ((6030, 6065), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': 'None'}), '(int, lower=-3, upper=None)\n', (6038, 6065), False, 'from skvalid.parameters import Interval\n'), ((6076, 6111), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': 'None'}), '(int, lower=-3, upper=None)\n', (6084, 6111), False, 'from skvalid.parameters import Interval\n'), ((6123, 6155), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': '(2)'}), '(int, lower=-3, upper=2)\n', (6131, 6155), False, 'from skvalid.parameters import Interval\n'), ((6166, 6198), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': '(2)'}), '(int, lower=-3, upper=2)\n', (6174, 6198), False, 'from skvalid.parameters import Interval\n'), ((6210, 6242), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': '(2)'}), '(int, lower=-3, upper=2)\n', (6218, 6242), False, 'from skvalid.parameters import Interval\n'), ((6253, 6289), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': 'None', 'upper': '(2)'}), '(float, lower=None, upper=2)\n', (6261, 6289), False, 'from skvalid.parameters import Interval\n'), ((6302, 6338), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': 'None', 'upper': '(2)'}), '(float, lower=None, upper=2)\n', (6310, 6338), False, 'from skvalid.parameters import Interval\n'), ((6351, 6388), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': '(-3)', 'upper': 'None'}), '(float, lower=-3, upper=None)\n', (6359, 6388), False, 'from skvalid.parameters import Interval\n'), ((6401, 6438), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': '(-3)', 'upper': 'None'}), '(float, lower=-3, upper=None)\n', (6409, 6438), False, 'from skvalid.parameters import Interval\n'), ((6452, 6486), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': '(-3)', 'upper': '(2)'}), '(float, lower=-3, upper=2)\n', (6460, 6486), False, 'from skvalid.parameters import Interval\n'), ((6499, 6533), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': '(-3)', 'upper': '(2)'}), '(float, lower=-3, upper=2)\n', (6507, 6533), False, 'from skvalid.parameters import Interval\n'), ((6547, 6581), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': '(-3)', 'upper': '(2)'}), '(float, lower=-3, upper=2)\n', (6555, 6581), False, 'from skvalid.parameters import Interval\n'), ((6735, 6769), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': 'None', 'upper': '(2)'}), '(int, lower=None, upper=2)\n', (6743, 6769), False, 'from skvalid.parameters import Interval\n'), ((6807, 6843), 'skvalid.parameters.Interval', 'Interval', (['float'], {'lower': 'None', 'upper': '(2)'}), '(float, lower=None, upper=2)\n', (6815, 6843), False, 'from skvalid.parameters import Interval\n'), ((7075, 7109), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': 'None', 'upper': '(2)'}), '(int, lower=None, upper=2)\n', (7083, 7109), False, 'from skvalid.parameters import Interval\n'), ((7145, 7202), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': 'None', 'upper': '(2)', 'upper_inclusive': '(False)'}), '(int, lower=None, upper=2, upper_inclusive=False)\n', (7153, 7202), False, 'from skvalid.parameters import Interval\n'), ((7252, 7287), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': 'None'}), '(int, lower=-3, upper=None)\n', (7260, 7287), False, 'from skvalid.parameters import Interval\n'), ((7325, 7383), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': 'None', 'lower_inclusive': '(False)'}), '(int, lower=-3, upper=None, lower_inclusive=False)\n', (7333, 7383), False, 'from skvalid.parameters import Interval\n'), ((7435, 7467), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': '(2)'}), '(int, lower=-3, upper=2)\n', (7443, 7467), False, 'from skvalid.parameters import Interval\n'), ((7501, 7533), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': '(2)'}), '(int, lower=-3, upper=2)\n', (7509, 7533), False, 'from skvalid.parameters import Interval\n'), ((7569, 7624), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': '(2)', 'lower_inclusive': '(False)'}), '(int, lower=-3, upper=2, lower_inclusive=False)\n', (7577, 7624), False, 'from skvalid.parameters import Interval\n'), ((7674, 7729), 'skvalid.parameters.Interval', 'Interval', (['int'], {'lower': '(-3)', 'upper': '(2)', 'upper_inclusive': '(False)'}), '(int, lower=-3, upper=2, upper_inclusive=False)\n', (7682, 7729), False, 'from skvalid.parameters import Interval\n'), ((1401, 1425), 'skvalid.parameters.TypeOf', 'TypeOf', (['RandomState', 'int'], {}), '(RandomState, int)\n', (1407, 1425), False, 'from skvalid.parameters import TypeOf\n'), ((4108, 4119), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (4114, 4119), False, 'from skvalid.parameters import TypeOf\n'), ((4183, 4196), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""'], {}), "('hello')\n", (4187, 4196), False, 'from skvalid.parameters import Enum\n'), ((4425, 4436), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (4431, 4436), False, 'from skvalid.parameters import TypeOf\n'), ((4438, 4460), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""', '"""world"""'], {}), "('hello', 'world')\n", (4442, 4460), False, 'from skvalid.parameters import Enum\n'), ((4544, 4555), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (4550, 4555), False, 'from skvalid.parameters import TypeOf\n'), ((4557, 4579), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""', '"""world"""'], {}), "('hello', 'world')\n", (4561, 4579), False, 'from skvalid.parameters import Enum\n'), ((4806, 4817), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (4812, 4817), False, 'from skvalid.parameters import TypeOf\n'), ((4819, 4841), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""', '"""world"""'], {}), "('hello', 'world')\n", (4823, 4841), False, 'from skvalid.parameters import Enum\n'), ((4865, 4876), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (4871, 4876), False, 'from skvalid.parameters import TypeOf\n'), ((4878, 4900), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""', '"""world"""'], {}), "('hello', 'world')\n", (4882, 4900), False, 'from skvalid.parameters import Enum\n'), ((4924, 4935), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (4930, 4935), False, 'from skvalid.parameters import TypeOf\n'), ((4937, 4959), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""', '"""world"""'], {}), "('hello', 'world')\n", (4941, 4959), False, 'from skvalid.parameters import Enum\n'), ((4978, 4989), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (4984, 4989), False, 'from skvalid.parameters import TypeOf\n'), ((4991, 5013), 'skvalid.parameters.Enum', 'Enum', (['"""hello"""', '"""world"""'], {}), "('hello', 'world')\n", (4995, 5013), False, 'from skvalid.parameters import Enum\n'), ((5015, 5026), 'skvalid.parameters.Const', 'Const', (['None'], {}), '(None)\n', (5020, 5026), False, 'from skvalid.parameters import Const\n'), ((5047, 5060), 'skvalid.parameters.TypeOf', 'TypeOf', (['float'], {}), '(float)\n', (5053, 5060), False, 'from skvalid.parameters import TypeOf\n'), ((5062, 5073), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (5068, 5073), False, 'from skvalid.parameters import TypeOf\n'), ((5092, 5105), 'skvalid.parameters.TypeOf', 'TypeOf', (['float'], {}), '(float)\n', (5098, 5105), False, 'from skvalid.parameters import TypeOf\n'), ((5107, 5118), 'skvalid.parameters.TypeOf', 'TypeOf', (['int'], {}), '(int)\n', (5113, 5118), False, 'from skvalid.parameters import TypeOf\n')]
|
import os
os.system("chmod 777 /content/xorta/Miners/ethminer/v0.11.0_Nvidia_Optimized/Linux/ethminer")
|
[
"os.system"
] |
[((10, 113), 'os.system', 'os.system', (['"""chmod 777 /content/xorta/Miners/ethminer/v0.11.0_Nvidia_Optimized/Linux/ethminer"""'], {}), "(\n 'chmod 777 /content/xorta/Miners/ethminer/v0.11.0_Nvidia_Optimized/Linux/ethminer'\n )\n", (19, 113), False, 'import os\n')]
|
# RTC clock with DS1307
#
# 2017-0225 various tests, since micropython 1.8.7 changes RTC interface
# Sources:
# ESP8266 - connection to RTC, I2C-connection with NodeMCU
# MicroPython class RTC: https://micropython.org/resources/docs/en/latest/wipy/library/machine.RTC.html
# class RTC is only for WiPy board, but also works in ESP8266
# interface/methods are changed, is more like Adafruit uRTC
# class uRTC of Adafruit is NOT used, but documentation is used!
# Adafruit uRTC: http://micropython-urtc.readthedocs.io/en/latest/urtc.html#ds1307
#
import machine
# create an RTC object
# 2017_0225 apparently no I2C address has to be given?? It works, however.
print("create RTC object...")
rtc = machine.RTC()
# initialise the RTC to a certain datetime
#
# Analyses:
# * according to Adafruit documentation datetime().
#
# * rtc.datetime(datetime): get or set the current time.
# The datetime is an 8-tuple of the format describing the time
# to be set:
# (year, month, day, weekday, hour, minute, second, millisecond)
#
# * If not specified, the method returns a tuple in the same format.
# * day of the week: an integer, where Monday is 0 and Sunday is 6.
# set RTC: (y, m, d, wd, h, m, s, ms)
# rtc.datetime((2017, 2, 25, 6, 22, 48, 15, 0))
# get datetime
print("current datetime: ", rtc.datetime())
# get help about rtc
print("\nprint help about RTC object...")
help(rtc)
# object <RTC> is of type RTC
# datetime -- <function>
# memory -- <function>
# alarm -- <function>
# alarm_left -- <function>
# irq -- <function>
# ALARM0 -- 0
print("\ncreate an alarm which triggers off in 10 seconds...")
import time
# set alarm in 10 seconds
rtc.alarm(0, 10000)
print("print time-left of alarm...")
# get time left to alarm
time.sleep(7.0)
print(rtc.alarm_left(0))
time.sleep(1.0)
print(rtc.alarm_left(0))
time.sleep(1.0)
print(rtc.alarm_left(0))
time.sleep(1.0)
print(rtc.alarm_left(0))
print("Alarm is on, no visible cues. How todo?")
print("\n===== end-of-RTC-test ===")
|
[
"machine.RTC",
"time.sleep"
] |
[((704, 717), 'machine.RTC', 'machine.RTC', ([], {}), '()\n', (715, 717), False, 'import machine\n'), ((1763, 1778), 'time.sleep', 'time.sleep', (['(7.0)'], {}), '(7.0)\n', (1773, 1778), False, 'import time\n'), ((1804, 1819), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (1814, 1819), False, 'import time\n'), ((1845, 1860), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (1855, 1860), False, 'import time\n'), ((1886, 1901), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (1896, 1901), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
import boto3
from boto3.dynamodb.conditions import Key
TABLE_PREFIX = 'KanshinCom-'
USER_TABLE = TABLE_PREFIX + 'user'
KEYWORD_TABLE = TABLE_PREFIX + 'keyword'
CONNECTION_TABLE = TABLE_PREFIX + 'connection'
DIARY_TABLE = TABLE_PREFIX + 'diary'
dynamodb = boto3.resource('dynamodb', region_name='us-west-2')
user_table = dynamodb.Table(USER_TABLE)
keyword_table = dynamodb.Table(KEYWORD_TABLE)
connection_table = dynamodb.Table(CONNECTION_TABLE)
diary_table = dynamodb.Table(DIARY_TABLE)
s3 = boto3.resource('s3', region_name='ap-northeast-1')
storage_bucket = s3.Bucket('s.kanshin.link')
def fetch_user_diaries(user_id):
query_args = dict(
IndexName='byUser',
KeyConditionExpression=Key('user_id').eq(user_id),
ProjectionExpression='id'
)
for item in query(diary_table, **query_args):
yield(get_item(diary_table, item['id']))
def fetch_user_keywords(user_id):
query_args = dict(
IndexName='byUser',
KeyConditionExpression=Key('user_id').eq(user_id),
ProjectionExpression='id'
)
for item in query(keyword_table, **query_args):
yield(get_item(keyword_table, item['id']))
def fetch_user(user_id):
return get_item(user_table, user_id)
def fetch_connections(keyword_id):
query_args = dict(
IndexName='byUser',
KeyConditionExpression=Key('user_id').eq(user_id),
ProjectionExpression='id'
)
return (
dict(
id=item['other_id'],
out_reason=item['out_reason'],
in_reason=item['in_reason']
)
for item in query(connection_table, **query_args)
)
def save_user(item):
item['id'] = int(item['id'])
save_item(user_table, item)
def save_keyword(item):
item['id'] = int(item['id'])
save_item(keyword_table, item)
def save_connection(id1, id2, out_reason=None, in_reason=None):
save_item(connection_table, dict(id=int(id1), other_id=int(id2), out_reason=out_reason, in_reason=in_reason), ['id', 'other_id'])
def save_diary(item):
item['id'] = int(item['id'])
save_item(diary_table, item)
def has_image(path):
obj = storage_bucket.Object(path)
try:
obj.metadata # test if obj exists
except:
return False
return True
def save_image(path, content_type, content):
obj = storage_bucket.Object(path)
obj.put(Body=content, ContentType=content_type, ACL='public-read')
# ----------------------
def get_item(table, id=None, **kwargs):
if id is not None:
kwargs['KeyConditionExpression'] = Key('id').eq(id)
result = table.query(**kwargs)
if result['Items'] and len(result['Items']) > 0:
return result['Items'][0]
else:
return None
def query(table, **kwargs):
startKey = None
while True:
if startKey:
kwargs['ExclusiveStartKey'] = startKey
elif 'ExclusiveStartKey' in kwargs:
del kwargs['ExclusiveStartKey']
result = table.query(**kwargs)
for item in result['Items']:
yield item
startKey = result.get('LastEvaluatedKey')
if not startKey:
break
def key_for(item, pk_keys):
return dict([(key, item[key]) for key in item if key in pk_keys])
def updates_for(item, pk_keys):
updates = {}
for key in item:
if key not in pk_keys:
value = item[key]
if value is None or value == '':
value = {'Action': 'DELETE'}
else:
value = {'Action': 'PUT', 'Value': value}
updates[key] = value
return updates
def save_item(table, item, pk_keys=['id']):
table.update_item(
Key=key_for(item, pk_keys),
AttributeUpdates=updates_for(item, pk_keys)
)
|
[
"boto3.dynamodb.conditions.Key",
"boto3.resource"
] |
[((282, 333), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {'region_name': '"""us-west-2"""'}), "('dynamodb', region_name='us-west-2')\n", (296, 333), False, 'import boto3\n'), ((520, 570), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {'region_name': '"""ap-northeast-1"""'}), "('s3', region_name='ap-northeast-1')\n", (534, 570), False, 'import boto3\n'), ((2586, 2595), 'boto3.dynamodb.conditions.Key', 'Key', (['"""id"""'], {}), "('id')\n", (2589, 2595), False, 'from boto3.dynamodb.conditions import Key\n'), ((733, 747), 'boto3.dynamodb.conditions.Key', 'Key', (['"""user_id"""'], {}), "('user_id')\n", (736, 747), False, 'from boto3.dynamodb.conditions import Key\n'), ((1019, 1033), 'boto3.dynamodb.conditions.Key', 'Key', (['"""user_id"""'], {}), "('user_id')\n", (1022, 1033), False, 'from boto3.dynamodb.conditions import Key\n'), ((1378, 1392), 'boto3.dynamodb.conditions.Key', 'Key', (['"""user_id"""'], {}), "('user_id')\n", (1381, 1392), False, 'from boto3.dynamodb.conditions import Key\n')]
|
import numpy as np
import sys
# convert any index to a 4 tuple
def unpackIndex(i, default):
a = b = c = d = default
if type(i) == int:
d = i
elif len(i) == 1:
d = i[0]
elif len(i) == 2:
c = i[0]
d = i[1]
elif len(i) == 3:
b = i[0]
c = i[1]
d = i[2]
else:
a = i[0]
b = i[1]
c = i[2]
d = i[3]
return (a, b, c, d)
def convert(path):
# load the file
arr = np.load(path + ".npy")
# open the output file
with open(("../cifar10/" + path + ".c").lower(), "w") as f:
# get dimensions
(a, b, c, d) = unpackIndex(arr.shape, 1)
arr = arr.reshape((a, b, c, d))
# write head
f.write('#include "../include/deep_cyber.h"\n')
f.write('\n')
f.write('const uint8_t ' + path.upper() + '_DATA[' + str(arr.view(np.uint8).flatten().shape[0]) + '] = {\n')
# write data
for ai in range(a):
for bi in range(b):
for ci in range(c):
for di in range(d):
elem_arr = np.zeros((1), dtype=np.float32)
elem_arr[0] = arr[ai, bi, ci, di]
elem = elem_arr.view(np.uint8).flatten()
e = elem.shape[0]
for ei in range(e):
if ai == a - 1 and bi == b - 1 and ci == c - 1 and di == d - 1 and ei == e - 1:
break
f.write('\t' + hex(elem[ei]) + ',\n')
# write tail
elem_arr = np.zeros((1), dtype=np.float32)
elem_arr[0] = arr.flatten()[-1]
elem = elem_arr.view(np.uint8).flatten()
e = elem.shape[0]
f.write('\t' + hex(elem[-1]) + '};\n')
f.write('\n')
f.write('Tensor ' + path.upper() + ' = {' + str(a) + ', ' + str(b) + ', ' + str(c) + ', ' + str(d) + ', (float*)' + path.upper() + '_DATA};\n')
convert("c1b")
convert("c1w")
convert("c2b")
convert("c2w")
convert("c3b")
convert("c3w")
convert("c4b")
convert("c4w")
convert("d1b")
convert("d1w")
convert("d2b")
convert("d2w")
|
[
"numpy.load",
"numpy.zeros"
] |
[((479, 501), 'numpy.load', 'np.load', (["(path + '.npy')"], {}), "(path + '.npy')\n", (486, 501), True, 'import numpy as np\n'), ((1660, 1689), 'numpy.zeros', 'np.zeros', (['(1)'], {'dtype': 'np.float32'}), '(1, dtype=np.float32)\n', (1668, 1689), True, 'import numpy as np\n'), ((1138, 1167), 'numpy.zeros', 'np.zeros', (['(1)'], {'dtype': 'np.float32'}), '(1, dtype=np.float32)\n', (1146, 1167), True, 'import numpy as np\n')]
|
x1=[]
x2=[]
x3=[]
import sys
import numpy as np
f1 = open("light_gbm.txt")
for line in f1:
x1.append(float((line.strip().split('\t')[1])))
#print x1
f2 = open("simese_cnn.txt")
for line in f2:
x2.append(0.5 + 0.5*float((line.strip().split('\t')[1])))
#print x2
f3 = open("matchpyramid.txt")
for line in f3:
x3.append(float((line.strip().split('\t')[1])))
#print x3
x1=np.asarray(x1)
x2=np.asarray(x2)
x3=np.asarray(x3)
f=np.vstack((x1,x2))
f=np.vstack((f,x3))
y_pred=f[0]/3+f[1]/3+f[2]/3
#print pred.shape
#print pred
for i in range(len(y_pred)):
if y_pred[i]>0.31:
y_pred[i]=1
else:
y_pred[i]=0
output_file=sys.argv[1]
with open(output_file, 'w') as fo:
print("\nemsembling...\n")
lineno = 1
for pred in y_pred:
fo.write('{}\t{}\n'.format(lineno, int(pred)))
lineno += 1
|
[
"numpy.asarray",
"numpy.vstack"
] |
[((385, 399), 'numpy.asarray', 'np.asarray', (['x1'], {}), '(x1)\n', (395, 399), True, 'import numpy as np\n'), ((403, 417), 'numpy.asarray', 'np.asarray', (['x2'], {}), '(x2)\n', (413, 417), True, 'import numpy as np\n'), ((421, 435), 'numpy.asarray', 'np.asarray', (['x3'], {}), '(x3)\n', (431, 435), True, 'import numpy as np\n'), ((438, 457), 'numpy.vstack', 'np.vstack', (['(x1, x2)'], {}), '((x1, x2))\n', (447, 457), True, 'import numpy as np\n'), ((459, 477), 'numpy.vstack', 'np.vstack', (['(f, x3)'], {}), '((f, x3))\n', (468, 477), True, 'import numpy as np\n')]
|
from mujoco_base import MuJoCoBase
def main():
xml_path = "./xml/ball.xml"
mjb = MuJoCoBase(xml_path)
mjb.simulate()
if __name__ == "__main__":
main()
|
[
"mujoco_base.MuJoCoBase"
] |
[((91, 111), 'mujoco_base.MuJoCoBase', 'MuJoCoBase', (['xml_path'], {}), '(xml_path)\n', (101, 111), False, 'from mujoco_base import MuJoCoBase\n')]
|
from django.conf import settings
settings.configure(
SESSION_ENGINE='rdb_session.main'
)
|
[
"django.conf.settings.configure"
] |
[((35, 88), 'django.conf.settings.configure', 'settings.configure', ([], {'SESSION_ENGINE': '"""rdb_session.main"""'}), "(SESSION_ENGINE='rdb_session.main')\n", (53, 88), False, 'from django.conf import settings\n')]
|
"""
Module with a function for plotting spectra.
"""
import os
import math
import warnings
import itertools
from typing import Optional, Union, Tuple, List
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from typeguard import typechecked
from matplotlib.ticker import AutoMinorLocator, MultipleLocator
from species.core import box, constants
from species.read import read_filter
from species.util import plot_util
@typechecked
def plot_spectrum(boxes: list,
filters: Optional[List[str]] = None,
residuals: Optional[box.ResidualsBox] = None,
plot_kwargs: Optional[List[Optional[dict]]] = None,
xlim: Optional[Tuple[float, float]] = None,
ylim: Optional[Tuple[float, float]] = None,
ylim_res: Optional[Tuple[float, float]] = None,
scale: Optional[Tuple[str, str]] = None,
title: Optional[str] = None,
offset: Optional[Tuple[float, float]] = None,
legend: Optional[Union[str, dict, Tuple[float, float],
List[Optional[Union[dict, str, Tuple[float, float]]]]]] = None,
figsize: Optional[Tuple[float, float]] = (10., 5.),
object_type: str = 'planet',
quantity: str = 'flux density',
output: str = 'spectrum.pdf'):
"""
Parameters
----------
boxes : list(species.core.box, )
Boxes with data.
filters : list(str, ), None
Filter IDs for which the transmission profile is plotted. Not plotted if set to None.
residuals : species.core.box.ResidualsBox, None
Box with residuals of a fit. Not plotted if set to None.
plot_kwargs : list(dict, ), None
List with dictionaries of keyword arguments for each box. For example, if the ``boxes``
are a ``ModelBox`` and ``ObjectBox``:
.. code-block:: python
plot_kwargs=[{'ls': '-', 'lw': 1., 'color': 'black'},
{'spectrum_1': {'marker': 'o', 'ms': 3., 'color': 'tab:brown', 'ls': 'none'},
'spectrum_2': {'marker': 'o', 'ms': 3., 'color': 'tab:blue', 'ls': 'none'},
'Paranal/SPHERE.IRDIS_D_H23_3': {'marker': 's', 'ms': 4., 'color': 'tab:cyan', 'ls': 'none'},
'Paranal/SPHERE.IRDIS_D_K12_1': [{'marker': 's', 'ms': 4., 'color': 'tab:orange', 'ls': 'none'},
{'marker': 's', 'ms': 4., 'color': 'tab:red', 'ls': 'none'}],
'Paranal/NACO.Lp': {'marker': 's', 'ms': 4., 'color': 'tab:green', 'ls': 'none'},
'Paranal/NACO.Mp': {'marker': 's', 'ms': 4., 'color': 'tab:green', 'ls': 'none'}}]
For an ``ObjectBox``, the dictionary contains items for the different spectrum and filter
names stored with :func:`~species.data.database.Database.add_object`. In case both
and ``ObjectBox`` and a ``SynphotBox`` are provided, then the latter can be set to ``None``
in order to use the same (but open) symbols as the data from the ``ObjectBox``. Note that
if a filter name is duplicated in an ``ObjectBox`` (Paranal/SPHERE.IRDIS_D_K12_1 in the
example) then a list with two dictionaries should be provided. Colors are automatically
chosen if ``plot_kwargs`` is set to ``None``.
xlim : tuple(float, float)
Limits of the wavelength axis.
ylim : tuple(float, float)
Limits of the flux axis.
ylim_res : tuple(float, float), None
Limits of the residuals axis. Automatically chosen (based on the minimum and maximum
residual value) if set to None.
scale : tuple(str, str), None
Scale of the x and y axes ('linear' or 'log'). The scale is set to ``('linear', 'linear')``
if set to ``None``.
title : str
Title.
offset : tuple(float, float)
Offset for the label of the x- and y-axis.
legend : str, tuple, dict, list(dict, dict), None
Location of the legend (str or tuple(float, float)) or a dictionary with the ``**kwargs``
of ``matplotlib.pyplot.legend``, for example ``{'loc': 'upper left', 'fontsize: 12.}``.
Alternatively, a list with two values can be provided to separate the model and data
handles in two legends. Each of these two elements can be set to ``None``. For example,
``[None, {'loc': 'upper left', 'fontsize: 12.}]``, if only the data points should be
included in a legend.
figsize : tuple(float, float)
Figure size.
object_type : str
Object type ('planet' or 'star'). With 'planet', the radius and mass are expressed in
Jupiter units. With 'star', the radius and mass are expressed in solar units.
quantity: str
The quantity of the y-axis ('flux density', 'flux', or 'magnitude').
output : str
Output filename.
Returns
-------
NoneType
None
"""
mpl.rcParams['font.serif'] = ['Bitstream Vera Serif']
mpl.rcParams['font.family'] = 'serif'
plt.rc('axes', edgecolor='black', linewidth=2.2)
plt.rcParams['axes.axisbelow'] = False
if plot_kwargs is None:
plot_kwargs = []
elif plot_kwargs is not None and len(boxes) != len(plot_kwargs):
raise ValueError(f'The number of \'boxes\' ({len(boxes)}) should be equal to the '
f'number of items in \'plot_kwargs\' ({len(plot_kwargs)}).')
if residuals is not None and filters is not None:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(3, 1, height_ratios=[1, 3, 1])
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[1, 0])
ax2 = plt.subplot(gridsp[0, 0])
ax3 = plt.subplot(gridsp[2, 0])
elif residuals is not None:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(2, 1, height_ratios=[4, 1])
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[0, 0])
ax2 = None
ax3 = plt.subplot(gridsp[1, 0])
elif filters is not None:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(2, 1, height_ratios=[1, 4])
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[1, 0])
ax2 = plt.subplot(gridsp[0, 0])
ax3 = None
else:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(1, 1)
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[0, 0])
ax2 = None
ax3 = None
if residuals is not None:
labelbottom = False
else:
labelbottom = True
if scale is None:
scale = ('linear', 'linear')
ax1.set_xscale(scale[0])
ax1.set_yscale(scale[1])
if filters is not None:
ax2.set_xscale(scale[0])
if residuals is not None:
ax3.set_xscale(scale[0])
ax1.tick_params(axis='both', which='major', colors='black', labelcolor='black',
direction='in', width=1, length=5, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=labelbottom)
ax1.tick_params(axis='both', which='minor', colors='black', labelcolor='black',
direction='in', width=1, length=3, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=labelbottom)
if filters is not None:
ax2.tick_params(axis='both', which='major', colors='black', labelcolor='black',
direction='in', width=1, length=5, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=False)
ax2.tick_params(axis='both', which='minor', colors='black', labelcolor='black',
direction='in', width=1, length=3, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=False)
if residuals is not None:
ax3.tick_params(axis='both', which='major', colors='black', labelcolor='black',
direction='in', width=1, length=5, labelsize=12, top=True,
bottom=True, left=True, right=True)
ax3.tick_params(axis='both', which='minor', colors='black', labelcolor='black',
direction='in', width=1, length=3, labelsize=12, top=True,
bottom=True, left=True, right=True)
if scale[0] == 'linear':
ax1.xaxis.set_minor_locator(AutoMinorLocator(5))
if scale[1] == 'linear':
ax1.yaxis.set_minor_locator(AutoMinorLocator(5))
# ax1.set_yticks([1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1e0])
# ax3.set_yticks([-2., 0., 2.])
if filters is not None and scale[0] == 'linear':
ax2.xaxis.set_minor_locator(AutoMinorLocator(5))
if residuals is not None and scale[0] == 'linear':
ax3.xaxis.set_minor_locator(AutoMinorLocator(5))
if residuals is not None and filters is not None:
ax1.set_xlabel('')
ax2.set_xlabel('')
ax3.set_xlabel('Wavelength (µm)', fontsize=13)
elif residuals is not None:
ax1.set_xlabel('')
ax3.set_xlabel('Wavelength (µm)', fontsize=11)
elif filters is not None:
ax1.set_xlabel('Wavelength (µm)', fontsize=13)
ax2.set_xlabel('')
else:
ax1.set_xlabel('Wavelength (µm)', fontsize=13)
if filters is not None:
ax2.set_ylabel('Transmission', fontsize=13)
if residuals is not None:
if quantity == 'flux density':
ax3.set_ylabel(r'$\Delta$$\mathregular{F}_\lambda$ ($\sigma$)', fontsize=11)
elif quantity == 'flux':
ax3.set_ylabel(r'$\Delta$$\mathregular{F}_\lambda$ ($\sigma$)', fontsize=11)
if xlim is None:
ax1.set_xlim(0.6, 6.)
else:
ax1.set_xlim(xlim[0], xlim[1])
if quantity == 'magnitude':
scaling = 1.
ax1.set_ylabel('Flux contrast (mag)', fontsize=13)
if ylim:
ax1.set_ylim(ylim[0], ylim[1])
else:
if ylim:
ax1.set_ylim(ylim[0], ylim[1])
ylim = ax1.get_ylim()
exponent = math.floor(math.log10(ylim[1]))
scaling = 10.**exponent
if quantity == 'flux density':
ylabel = r'$\mathregular{F}_\lambda$ (10$^{'+str(exponent)+r'}$ W m$^{-2}$ µm$^{-1}$)'
elif quantity == 'flux':
ylabel = r'$\lambda$$\mathregular{F}_\lambda$ (10$^{'+str(exponent)+r'}$ W m$^{-2}$)'
ax1.set_ylabel(ylabel, fontsize=11)
ax1.set_ylim(ylim[0]/scaling, ylim[1]/scaling)
if ylim[0] < 0.:
ax1.axhline(0.0, ls='--', lw=0.7, color='gray', dashes=(2, 4), zorder=0.5)
else:
if quantity == 'flux density':
ax1.set_ylabel(r'$\mathregular{F}_\lambda$ (W m$^{-2}$ µm$^{-1}$)', fontsize=11)
elif quantity == 'flux':
ax1.set_ylabel(r'$\lambda$$\mathregular{F}_\lambda$ (W m$^{-2}$)', fontsize=11)
scaling = 1.
xlim = ax1.get_xlim()
if filters is not None:
ax2.set_xlim(xlim[0], xlim[1])
ax2.set_ylim(0., 1.)
if residuals is not None:
ax3.set_xlim(xlim[0], xlim[1])
if offset is not None and residuals is not None and filters is not None:
ax3.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
ax2.get_yaxis().set_label_coords(offset[1], 0.5)
ax3.get_yaxis().set_label_coords(offset[1], 0.5)
elif offset is not None and filters is not None:
ax1.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
ax2.get_yaxis().set_label_coords(offset[1], 0.5)
elif offset is not None and residuals is not None:
ax3.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
ax3.get_yaxis().set_label_coords(offset[1], 0.5)
elif offset is not None:
ax1.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
else:
ax1.get_xaxis().set_label_coords(0.5, -0.12)
ax1.get_yaxis().set_label_coords(-0.1, 0.5)
for j, boxitem in enumerate(boxes):
flux_scaling = 1.
if j < len(boxes):
plot_kwargs.append(None)
if isinstance(boxitem, (box.SpectrumBox, box.ModelBox)):
wavelength = boxitem.wavelength
flux = boxitem.flux
if isinstance(wavelength[0], (np.float32, np.float64)):
data = np.array(flux, dtype=np.float64)
masked = np.ma.array(data, mask=np.isnan(data))
if isinstance(boxitem, box.ModelBox):
param = boxitem.parameters
par_key, par_unit, par_label = plot_util.quantity_unit(
param=list(param.keys()), object_type=object_type)
label = ''
newline = False
for i, item in enumerate(par_key):
if item[:4] == 'teff':
value = f'{param[item]:.0f}'
elif item in ['logg', 'feh', 'fsed', 'lognorm_ext',
'powerlaw_ext', 'ism_ext']:
value = f'{param[item]:.1f}'
elif item in ['co']:
value = f'{param[item]:.2f}'
elif item[:6] == 'radius':
if object_type == 'planet':
value = f'{param[item]:.1f}'
# if item == 'radius_1':
# value = f'{param[item]:.0f}'
# else:
# value = f'{param[item]:.1f}'
elif object_type == 'star':
value = f'{param[item]*constants.R_JUP/constants.R_SUN:.1f}'
elif item == 'mass':
if object_type == 'planet':
value = f'{param[item]:.0f}'
elif object_type == 'star':
value = f'{param[item]*constants.M_JUP/constants.M_SUN:.1f}'
elif item == 'luminosity':
value = f'{np.log10(param[item]):.2f}'
else:
continue
# if len(label) > 80 and newline == False:
# label += '\n'
# newline = True
if par_unit[i] is None:
label += f'{par_label[i]} = {value}'
else:
label += f'{par_label[i]} = {value} {par_unit[i]}'
if i < len(par_key)-1:
label += ', '
else:
label = None
if plot_kwargs[j]:
kwargs_copy = plot_kwargs[j].copy()
if 'label' in kwargs_copy:
if kwargs_copy['label'] is None:
label = None
else:
label = kwargs_copy['label']
del kwargs_copy['label']
if quantity == 'flux':
flux_scaling = wavelength
ax1.plot(wavelength, flux_scaling*masked/scaling, zorder=2, label=label, **kwargs_copy)
else:
if quantity == 'flux':
flux_scaling = wavelength
ax1.plot(wavelength, flux_scaling*masked/scaling, lw=0.5, label=label, zorder=2)
elif isinstance(wavelength[0], (np.ndarray)):
for i, item in enumerate(wavelength):
data = np.array(flux[i], dtype=np.float64)
masked = np.ma.array(data, mask=np.isnan(data))
if isinstance(boxitem.name[i], bytes):
label = boxitem.name[i].decode('utf-8')
else:
label = boxitem.name[i]
if quantity == 'flux':
flux_scaling = item
ax1.plot(item, flux_scaling*masked/scaling, lw=0.5, label=label)
elif isinstance(boxitem, list):
for i, item in enumerate(boxitem):
wavelength = item.wavelength
flux = item.flux
data = np.array(flux, dtype=np.float64)
masked = np.ma.array(data, mask=np.isnan(data))
if quantity == 'flux':
flux_scaling = wavelength
if plot_kwargs[j]:
ax1.plot(wavelength, flux_scaling*masked/scaling, zorder=1, **plot_kwargs[j])
else:
ax1.plot(wavelength, flux_scaling*masked/scaling, color='gray', lw=0.2, alpha=0.5, zorder=1)
elif isinstance(boxitem, box.PhotometryBox):
label_check = []
for i, item in enumerate(boxitem.wavelength):
transmission = read_filter.ReadFilter(boxitem.filter_name[i])
fwhm = transmission.filter_fwhm()
if quantity == 'flux':
flux_scaling = item
if plot_kwargs[j]:
if 'label' in plot_kwargs[j] and plot_kwargs[j]['label'] not in label_check:
label_check.append(plot_kwargs[j]['label'])
elif 'label' in plot_kwargs[j] and plot_kwargs[j]['label'] in label_check:
del plot_kwargs[j]['label']
if boxitem.flux[i][1] is None:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=None, zorder=3, **plot_kwargs[j])
else:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[i][1]/scaling, zorder=3, **plot_kwargs[j])
else:
if boxitem.flux[i][1] is None:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=None, marker='s', ms=6, color='black', zorder=3)
else:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[i][1]/scaling, marker='s', ms=6, color='black',
zorder=3)
elif isinstance(boxitem, box.ObjectBox):
if boxitem.spectrum is not None:
spec_list = []
wavel_list = []
for item in boxitem.spectrum:
spec_list.append(item)
wavel_list.append(boxitem.spectrum[item][0][0, 0])
sort_index = np.argsort(wavel_list)
spec_sort = []
for i in range(sort_index.size):
spec_sort.append(spec_list[sort_index[i]])
for key in spec_sort:
masked = np.ma.array(boxitem.spectrum[key][0],
mask=np.isnan(boxitem.spectrum[key][0]))
if quantity == 'flux':
flux_scaling = masked[:, 0]
if not plot_kwargs[j] or key not in plot_kwargs[j]:
plot_obj = ax1.errorbar(masked[:, 0], flux_scaling*masked[:, 1]/scaling,
yerr=flux_scaling*masked[:, 2]/scaling, ms=2, marker='s',
zorder=2.5, ls='none')
if plot_kwargs[j] is None:
plot_kwargs[j] = {}
plot_kwargs[j][key] = {'marker': 's', 'ms': 2., 'ls': 'none',
'color': plot_obj[0].get_color()}
else:
ax1.errorbar(masked[:, 0], flux_scaling*masked[:, 1]/scaling, yerr=flux_scaling*masked[:, 2]/scaling,
zorder=2.5, **plot_kwargs[j][key])
if boxitem.flux is not None:
filter_list = []
wavel_list = []
for item in boxitem.flux:
read_filt = read_filter.ReadFilter(item)
filter_list.append(item)
wavel_list.append(read_filt.mean_wavelength())
sort_index = np.argsort(wavel_list)
filter_sort = []
for i in range(sort_index.size):
filter_sort.append(filter_list[sort_index[i]])
for item in filter_sort:
transmission = read_filter.ReadFilter(item)
wavelength = transmission.mean_wavelength()
fwhm = transmission.filter_fwhm()
if not plot_kwargs[j] or item not in plot_kwargs[j]:
if not plot_kwargs[j]:
plot_kwargs[j] = {}
if quantity == 'flux':
flux_scaling = wavelength
if isinstance(boxitem.flux[item][0], np.ndarray):
for i in range(boxitem.flux[item].shape[1]):
plot_obj = ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0, i]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[item][1, i]/scaling, marker='s', ms=5, zorder=3)
else:
plot_obj = ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[item][1]/scaling, marker='s', ms=5, zorder=3)
plot_kwargs[j][item] = {'marker': 's', 'ms': 5., 'color': plot_obj[0].get_color()}
else:
if quantity == 'flux':
flux_scaling = wavelength
if isinstance(boxitem.flux[item][0], np.ndarray):
if not isinstance(plot_kwargs[j][item], list):
raise ValueError(f'A list with {boxitem.flux[item].shape[1]} '
f'dictionaries are required because the filter '
f'{item} has {boxitem.flux[item].shape[1]} '
f'values.')
for i in range(boxitem.flux[item].shape[1]):
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0, i]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[item][1, i]/scaling, zorder=3, **plot_kwargs[j][item][i])
else:
if boxitem.flux[item][1] == 0.:
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0]/scaling,
xerr=fwhm/2., yerr=0.5*flux_scaling*boxitem.flux[item][0]/scaling,
uplims=True, capsize=2., capthick=0., zorder=3, **plot_kwargs[j][item])
else:
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0]/scaling,
xerr=fwhm/2., yerr=flux_scaling*boxitem.flux[item][1]/scaling,
zorder=3, **plot_kwargs[j][item])
elif isinstance(boxitem, box.SynphotBox):
for i, find_item in enumerate(boxes):
if isinstance(find_item, box.ObjectBox):
obj_index = i
break
for item in boxitem.flux:
transmission = read_filter.ReadFilter(item)
wavelength = transmission.mean_wavelength()
fwhm = transmission.filter_fwhm()
if quantity == 'flux':
flux_scaling = wavelength
if not plot_kwargs[obj_index] or item not in plot_kwargs[obj_index]:
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item]/scaling, xerr=fwhm/2., yerr=None,
alpha=0.7, marker='s', ms=5, zorder=4, mfc='white')
else:
if isinstance(plot_kwargs[obj_index][item], list):
# In case of multiple photometry values for the same filter, use the
# plot_kwargs of the first data point
kwargs_copy = plot_kwargs[obj_index][item][0].copy()
if 'label' in kwargs_copy:
del kwargs_copy['label']
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item]/scaling, xerr=fwhm/2., yerr=None,
zorder=4, mfc='white', **kwargs_copy)
else:
kwargs_copy = plot_kwargs[obj_index][item].copy()
if 'label' in kwargs_copy:
del kwargs_copy['label']
if 'mfc' in kwargs_copy:
del kwargs_copy['mfc']
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item]/scaling, xerr=fwhm/2., yerr=None,
zorder=4, mfc='white', **kwargs_copy)
if filters is not None:
for i, item in enumerate(filters):
transmission = read_filter.ReadFilter(item)
data = transmission.get_filter()
ax2.plot(data[:, 0], data[:, 1], '-', lw=0.7, color='black', zorder=1)
if residuals is not None:
for i, find_item in enumerate(boxes):
if isinstance(find_item, box.ObjectBox):
obj_index = i
break
res_max = 0.
if residuals.photometry is not None:
for item in residuals.photometry:
if not plot_kwargs[obj_index] or item not in plot_kwargs[obj_index]:
ax3.plot(residuals.photometry[item][0], residuals.photometry[item][1], marker='s',
ms=5, linestyle='none', zorder=2)
else:
if residuals.photometry[item].ndim == 1:
ax3.errorbar(residuals.photometry[item][0], residuals.photometry[item][1],
zorder=2, **plot_kwargs[obj_index][item])
elif residuals.photometry[item].ndim == 2:
for i in range(residuals.photometry[item].shape[1]):
if isinstance(plot_kwargs[obj_index][item], list):
ax3.errorbar(residuals.photometry[item][0, i],
residuals.photometry[item][1, i], zorder=2,
**plot_kwargs[obj_index][item][i])
else:
ax3.errorbar(residuals.photometry[item][0, i],
residuals.photometry[item][1, i], zorder=2,
**plot_kwargs[obj_index][item])
res_max = np.nanmax(np.abs(residuals.photometry[item][1]))
if residuals.spectrum is not None:
for key, value in residuals.spectrum.items():
if not plot_kwargs[obj_index] or key not in plot_kwargs[obj_index]:
ax3.errorbar(value[:, 0], value[:, 1], marker='o', ms=2, ls='none', zorder=1)
else:
ax3.errorbar(value[:, 0], value[:, 1], zorder=1, **plot_kwargs[obj_index][key])
max_tmp = np.nanmax(np.abs(value[:, 1]))
if max_tmp > res_max:
res_max = max_tmp
res_lim = math.ceil(1.1*res_max)
if res_lim > 10.:
res_lim = 5.
ax3.axhline(0., ls='--', lw=0.7, color='gray', dashes=(2, 4), zorder=0.5)
# ax3.axhline(-2.5, ls=':', lw=0.7, color='gray', dashes=(1, 4), zorder=0.5)
# ax3.axhline(2.5, ls=':', lw=0.7, color='gray', dashes=(1, 4), zorder=0.5)
if ylim_res is None:
ax3.set_ylim(-res_lim, res_lim)
else:
ax3.set_ylim(ylim_res[0], ylim_res[1])
if filters is not None:
ax2.set_ylim(0., 1.1)
print(f'Plotting spectrum: {output}...', end='', flush=True)
if title is not None:
if filters:
ax2.set_title(title, y=1.02, fontsize=13)
else:
ax1.set_title(title, y=1.02, fontsize=13)
handles, labels = ax1.get_legend_handles_labels()
if handles and legend is not None:
if isinstance(legend, list):
model_handles = []
data_handles = []
model_labels = []
data_labels = []
for i, item in enumerate(handles):
if isinstance(item, mpl.lines.Line2D):
model_handles.append(item)
model_labels.append(labels[i])
elif isinstance(item, mpl.container.ErrorbarContainer):
data_handles.append(item)
data_labels.append(labels[i])
else:
warnings.warn(f'The object type {item} is not implemented for the legend.')
if legend[0] is not None:
if isinstance(legend[0], (str, tuple)):
leg_1 = ax1.legend(model_handles, model_labels, loc=legend[0], fontsize=10., frameon=False)
else:
leg_1 = ax1.legend(model_handles, model_labels, **legend[0])
else:
leg_1 = None
if legend[1] is not None:
if isinstance(legend[1], (str, tuple)):
leg_2 = ax1.legend(data_handles, data_labels, loc=legend[1], fontsize=8, frameon=False)
else:
leg_2 = ax1.legend(data_handles, data_labels, **legend[1])
if leg_1 is not None:
ax1.add_artist(leg_1)
elif isinstance(legend, (str, tuple)):
ax1.legend(loc=legend, fontsize=8, frameon=False)
else:
ax1.legend(**legend)
# filters = ['Paranal/SPHERE.ZIMPOL_N_Ha',
# 'MUSE/Hbeta',
# 'ALMA/855']
#
# filters = ['Paranal/SPHERE.IRDIS_B_Y',
# 'MKO/NSFCam.J',
# 'Paranal/SPHERE.IRDIS_D_H23_2',
# 'Paranal/SPHERE.IRDIS_D_H23_3',
# 'Paranal/SPHERE.IRDIS_D_K12_1',
# 'Paranal/SPHERE.IRDIS_D_K12_2',
# 'Paranal/NACO.Lp',
# 'Paranal/NACO.NB405',
# 'Paranal/NACO.Mp']
#
# for i, item in enumerate(filters):
# readfilter = read_filter.ReadFilter(item)
# filter_wavelength = readfilter.mean_wavelength()
# filter_width = readfilter.filter_fwhm()
#
# # if i == 5:
# # ax1.errorbar(filter_wavelength, 1.3e4, xerr=filter_width/2., color='dimgray', elinewidth=2.5, zorder=10)
# # else:
# # ax1.errorbar(filter_wavelength, 6e3, xerr=filter_width/2., color='dimgray', elinewidth=2.5, zorder=10)
#
# if i == 0:
# ax1.text(filter_wavelength, 1e-2, r'H$\alpha$', ha='center', va='center', fontsize=10, color='black')
# elif i == 1:
# ax1.text(filter_wavelength, 1e-2, r'H$\beta$', ha='center', va='center', fontsize=10, color='black')
# elif i == 2:
# ax1.text(filter_wavelength, 1e-2, 'ALMA\nband 7 rms', ha='center', va='center', fontsize=8, color='black')
#
# if i == 0:
# ax1.text(filter_wavelength, 1.4, 'Y', ha='center', va='center', fontsize=10, color='black')
# elif i == 1:
# ax1.text(filter_wavelength, 1.4, 'J', ha='center', va='center', fontsize=10, color='black')
# elif i == 2:
# ax1.text(filter_wavelength-0.04, 1.4, 'H2', ha='center', va='center', fontsize=10, color='black')
# elif i == 3:
# ax1.text(filter_wavelength+0.04, 1.4, 'H3', ha='center', va='center', fontsize=10, color='black')
# elif i == 4:
# ax1.text(filter_wavelength, 1.4, 'K1', ha='center', va='center', fontsize=10, color='black')
# elif i == 5:
# ax1.text(filter_wavelength, 1.4, 'K2', ha='center', va='center', fontsize=10, color='black')
# elif i == 6:
# ax1.text(filter_wavelength, 1.4, 'L$\'$', ha='center', va='center', fontsize=10, color='black')
# elif i == 7:
# ax1.text(filter_wavelength, 1.4, 'NB4.05', ha='center', va='center', fontsize=10, color='black')
# elif i == 8:
# ax1.text(filter_wavelength, 1.4, 'M$\'}$', ha='center', va='center', fontsize=10, color='black')
#
# ax1.text(1.26, 0.58, 'VLT/SPHERE', ha='center', va='center', fontsize=8., color='slateblue', rotation=43.)
# ax1.text(2.5, 1.28, 'VLT/SINFONI', ha='left', va='center', fontsize=8., color='darkgray')
plt.savefig(os.getcwd()+'/'+output, bbox_inches='tight')
plt.clf()
plt.close()
print(' [DONE]')
|
[
"matplotlib.pyplot.subplot",
"numpy.abs",
"matplotlib.pyplot.clf",
"math.ceil",
"matplotlib.pyplot.close",
"os.getcwd",
"species.read.read_filter.ReadFilter",
"numpy.isnan",
"numpy.argsort",
"numpy.log10",
"matplotlib.pyplot.figure",
"matplotlib.ticker.AutoMinorLocator",
"math.log10",
"matplotlib.pyplot.rc",
"numpy.array",
"warnings.warn",
"matplotlib.gridspec.GridSpec"
] |
[((5193, 5241), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'edgecolor': '"""black"""', 'linewidth': '(2.2)'}), "('axes', edgecolor='black', linewidth=2.2)\n", (5199, 5241), True, 'import matplotlib.pyplot as plt\n'), ((33899, 33908), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (33906, 33908), True, 'import matplotlib.pyplot as plt\n'), ((33913, 33924), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (33922, 33924), True, 'import matplotlib.pyplot as plt\n'), ((5649, 5679), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': 'figsize'}), '(1, figsize=figsize)\n', (5659, 5679), True, 'import matplotlib.pyplot as plt\n'), ((5697, 5749), 'matplotlib.gridspec.GridSpec', 'mpl.gridspec.GridSpec', (['(3)', '(1)'], {'height_ratios': '[1, 3, 1]'}), '(3, 1, height_ratios=[1, 3, 1])\n', (5718, 5749), True, 'import matplotlib as mpl\n'), ((5841, 5866), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[1, 0]'], {}), '(gridsp[1, 0])\n', (5852, 5866), True, 'import matplotlib.pyplot as plt\n'), ((5881, 5906), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[0, 0]'], {}), '(gridsp[0, 0])\n', (5892, 5906), True, 'import matplotlib.pyplot as plt\n'), ((5921, 5946), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[2, 0]'], {}), '(gridsp[2, 0])\n', (5932, 5946), True, 'import matplotlib.pyplot as plt\n'), ((28610, 28634), 'math.ceil', 'math.ceil', (['(1.1 * res_max)'], {}), '(1.1 * res_max)\n', (28619, 28634), False, 'import math\n'), ((5988, 6018), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': 'figsize'}), '(1, figsize=figsize)\n', (5998, 6018), True, 'import matplotlib.pyplot as plt\n'), ((6036, 6085), 'matplotlib.gridspec.GridSpec', 'mpl.gridspec.GridSpec', (['(2)', '(1)'], {'height_ratios': '[4, 1]'}), '(2, 1, height_ratios=[4, 1])\n', (6057, 6085), True, 'import matplotlib as mpl\n'), ((6177, 6202), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[0, 0]'], {}), '(gridsp[0, 0])\n', (6188, 6202), True, 'import matplotlib.pyplot as plt\n'), ((6236, 6261), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[1, 0]'], {}), '(gridsp[1, 0])\n', (6247, 6261), True, 'import matplotlib.pyplot as plt\n'), ((8746, 8765), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(5)'], {}), '(5)\n', (8762, 8765), False, 'from matplotlib.ticker import AutoMinorLocator, MultipleLocator\n'), ((8833, 8852), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(5)'], {}), '(5)\n', (8849, 8852), False, 'from matplotlib.ticker import AutoMinorLocator, MultipleLocator\n'), ((9039, 9058), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(5)'], {}), '(5)\n', (9055, 9058), False, 'from matplotlib.ticker import AutoMinorLocator, MultipleLocator\n'), ((9152, 9171), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(5)'], {}), '(5)\n', (9168, 9171), False, 'from matplotlib.ticker import AutoMinorLocator, MultipleLocator\n'), ((26258, 26286), 'species.read.read_filter.ReadFilter', 'read_filter.ReadFilter', (['item'], {}), '(item)\n', (26280, 26286), False, 'from species.read import read_filter\n'), ((6301, 6331), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': 'figsize'}), '(1, figsize=figsize)\n', (6311, 6331), True, 'import matplotlib.pyplot as plt\n'), ((6349, 6398), 'matplotlib.gridspec.GridSpec', 'mpl.gridspec.GridSpec', (['(2)', '(1)'], {'height_ratios': '[1, 4]'}), '(2, 1, height_ratios=[1, 4])\n', (6370, 6398), True, 'import matplotlib as mpl\n'), ((6490, 6515), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[1, 0]'], {}), '(gridsp[1, 0])\n', (6501, 6515), True, 'import matplotlib.pyplot as plt\n'), ((6530, 6555), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[0, 0]'], {}), '(gridsp[0, 0])\n', (6541, 6555), True, 'import matplotlib.pyplot as plt\n'), ((6594, 6624), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': 'figsize'}), '(1, figsize=figsize)\n', (6604, 6624), True, 'import matplotlib.pyplot as plt\n'), ((6642, 6669), 'matplotlib.gridspec.GridSpec', 'mpl.gridspec.GridSpec', (['(1)', '(1)'], {}), '(1, 1)\n', (6663, 6669), True, 'import matplotlib as mpl\n'), ((6761, 6786), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gridsp[0, 0]'], {}), '(gridsp[0, 0])\n', (6772, 6786), True, 'import matplotlib.pyplot as plt\n'), ((10410, 10429), 'math.log10', 'math.log10', (['ylim[1]'], {}), '(ylim[1])\n', (10420, 10429), False, 'import math\n'), ((12879, 12911), 'numpy.array', 'np.array', (['flux'], {'dtype': 'np.float64'}), '(flux, dtype=np.float64)\n', (12887, 12911), True, 'import numpy as np\n'), ((33850, 33861), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (33859, 33861), False, 'import os\n'), ((16940, 16972), 'numpy.array', 'np.array', (['flux'], {'dtype': 'np.float64'}), '(flux, dtype=np.float64)\n', (16948, 16972), True, 'import numpy as np\n'), ((28010, 28047), 'numpy.abs', 'np.abs', (['residuals.photometry[item][1]'], {}), '(residuals.photometry[item][1])\n', (28016, 28047), True, 'import numpy as np\n'), ((28493, 28512), 'numpy.abs', 'np.abs', (['value[:, 1]'], {}), '(value[:, 1])\n', (28499, 28512), True, 'import numpy as np\n'), ((12960, 12974), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (12968, 12974), True, 'import numpy as np\n'), ((16274, 16309), 'numpy.array', 'np.array', (['flux[i]'], {'dtype': 'np.float64'}), '(flux[i], dtype=np.float64)\n', (16282, 16309), True, 'import numpy as np\n'), ((17565, 17611), 'species.read.read_filter.ReadFilter', 'read_filter.ReadFilter', (['boxitem.filter_name[i]'], {}), '(boxitem.filter_name[i])\n', (17587, 17611), False, 'from species.read import read_filter\n'), ((30037, 30112), 'warnings.warn', 'warnings.warn', (['f"""The object type {item} is not implemented for the legend."""'], {}), "(f'The object type {item} is not implemented for the legend.')\n", (30050, 30112), False, 'import warnings\n'), ((17021, 17035), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (17029, 17035), True, 'import numpy as np\n'), ((19453, 19475), 'numpy.argsort', 'np.argsort', (['wavel_list'], {}), '(wavel_list)\n', (19463, 19475), True, 'import numpy as np\n'), ((21097, 21119), 'numpy.argsort', 'np.argsort', (['wavel_list'], {}), '(wavel_list)\n', (21107, 21119), True, 'import numpy as np\n'), ((16362, 16376), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (16370, 16376), True, 'import numpy as np\n'), ((20926, 20954), 'species.read.read_filter.ReadFilter', 'read_filter.ReadFilter', (['item'], {}), '(item)\n', (20948, 20954), False, 'from species.read import read_filter\n'), ((21347, 21375), 'species.read.read_filter.ReadFilter', 'read_filter.ReadFilter', (['item'], {}), '(item)\n', (21369, 21375), False, 'from species.read import read_filter\n'), ((24531, 24559), 'species.read.read_filter.ReadFilter', 'read_filter.ReadFilter', (['item'], {}), '(item)\n', (24553, 24559), False, 'from species.read import read_filter\n'), ((19772, 19806), 'numpy.isnan', 'np.isnan', (['boxitem.spectrum[key][0]'], {}), '(boxitem.spectrum[key][0])\n', (19780, 19806), True, 'import numpy as np\n'), ((14716, 14737), 'numpy.log10', 'np.log10', (['param[item]'], {}), '(param[item])\n', (14724, 14737), True, 'import numpy as np\n')]
|
from btclib_node.p2p.messages import get_payload
from btclib_node.p2p.messages.errors import Notfound, Reject, RejectCode
def test_not_found():
msg = Notfound([(1, "00" * 32)])
msg_bytes = bytes.fromhex("00" * 4) + msg.serialize()
assert msg == Notfound.deserialize(get_payload(msg_bytes)[1])
def test_reject():
msg = Reject("tx", RejectCode(0x42), "", "00" * 32)
msg_bytes = bytes.fromhex("00" * 4) + msg.serialize()
assert msg == Reject.deserialize(get_payload(msg_bytes)[1])
|
[
"btclib_node.p2p.messages.errors.RejectCode",
"btclib_node.p2p.messages.get_payload",
"btclib_node.p2p.messages.errors.Notfound"
] |
[((156, 182), 'btclib_node.p2p.messages.errors.Notfound', 'Notfound', (["[(1, '00' * 32)]"], {}), "([(1, '00' * 32)])\n", (164, 182), False, 'from btclib_node.p2p.messages.errors import Notfound, Reject, RejectCode\n'), ((351, 365), 'btclib_node.p2p.messages.errors.RejectCode', 'RejectCode', (['(66)'], {}), '(66)\n', (361, 365), False, 'from btclib_node.p2p.messages.errors import Notfound, Reject, RejectCode\n'), ((280, 302), 'btclib_node.p2p.messages.get_payload', 'get_payload', (['msg_bytes'], {}), '(msg_bytes)\n', (291, 302), False, 'from btclib_node.p2p.messages import get_payload\n'), ((479, 501), 'btclib_node.p2p.messages.get_payload', 'get_payload', (['msg_bytes'], {}), '(msg_bytes)\n', (490, 501), False, 'from btclib_node.p2p.messages import get_payload\n')]
|
from meraki_sdk.meraki_sdk_client import MerakiSdkClient
from tools.api_key import key
from get_network_id import get_network_id
meraki = MerakiSdkClient(key)
# net_id = get_network_id()
net_id = 'L_594475150812909110'
cient_id = 'k01816e'
clients_controller = meraki.clients
params = {}
params['network_id'] = net_id
params['client_id'] = 'k01816e'
client = clients_controller.get_network_client_traffic_history(params)
print(client)
|
[
"meraki_sdk.meraki_sdk_client.MerakiSdkClient"
] |
[((139, 159), 'meraki_sdk.meraki_sdk_client.MerakiSdkClient', 'MerakiSdkClient', (['key'], {}), '(key)\n', (154, 159), False, 'from meraki_sdk.meraki_sdk_client import MerakiSdkClient\n')]
|
import numpy as np
import cv2
def preprocess(img, side):
img = cv2.rotate(img, cv2.ROTATE_90_COUNTERCLOCKWISE)
img = cv2.transpose(img)
size_y, size_x, _ = img.shape
img_crop_size = (480, 480)
min_resize = max(img_crop_size[0] / size_x, img_crop_size[1] / size_y)
img = cv2.resize(img, (int(size_x * min_resize), int(size_y * min_resize))) # keeps the same aspect ratio
size_y, size_x, _ = img.shape
if side == 1:
# road is on the left so crop it there
img = img[(size_y - img_crop_size[1]):size_y, 0:img_crop_size[0]]
elif side == -1:
# road is on the right so crop it there
img = img[(size_y - img_crop_size[1]):size_y, (size_x - img_crop_size[0]):size_x]
else:
img = img[(size_y - img_crop_size[1]):size_y, int((size_x - img_crop_size[0]) / 2):int(size_x - (size_x - img_crop_size[0]) / 2)]
edges = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(edges, 200, 300)
edges = cv2.GaussianBlur(edges, (3, 3), 0)
img = cv2.medianBlur(img, 5)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
edges = cv2.dilate(edges, kernel)
# remove the blue red layer for smaller image size
b, g, r = cv2.split(img)
img = cv2.merge((b, edges, r))
cv2.imshow('test2', img)
cv2.waitKey(1)
img = img / 255
return img
|
[
"cv2.GaussianBlur",
"cv2.Canny",
"cv2.rotate",
"cv2.medianBlur",
"cv2.cvtColor",
"cv2.getStructuringElement",
"cv2.dilate",
"cv2.waitKey",
"cv2.transpose",
"cv2.split",
"cv2.merge",
"cv2.imshow"
] |
[((69, 116), 'cv2.rotate', 'cv2.rotate', (['img', 'cv2.ROTATE_90_COUNTERCLOCKWISE'], {}), '(img, cv2.ROTATE_90_COUNTERCLOCKWISE)\n', (79, 116), False, 'import cv2\n'), ((127, 145), 'cv2.transpose', 'cv2.transpose', (['img'], {}), '(img)\n', (140, 145), False, 'import cv2\n'), ((890, 927), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (902, 927), False, 'import cv2\n'), ((940, 966), 'cv2.Canny', 'cv2.Canny', (['edges', '(200)', '(300)'], {}), '(edges, 200, 300)\n', (949, 966), False, 'import cv2\n'), ((979, 1013), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['edges', '(3, 3)', '(0)'], {}), '(edges, (3, 3), 0)\n', (995, 1013), False, 'import cv2\n'), ((1025, 1047), 'cv2.medianBlur', 'cv2.medianBlur', (['img', '(5)'], {}), '(img, 5)\n', (1039, 1047), False, 'import cv2\n'), ((1062, 1114), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_ELLIPSE', '(3, 3)'], {}), '(cv2.MORPH_ELLIPSE, (3, 3))\n', (1087, 1114), False, 'import cv2\n'), ((1127, 1152), 'cv2.dilate', 'cv2.dilate', (['edges', 'kernel'], {}), '(edges, kernel)\n', (1137, 1152), False, 'import cv2\n'), ((1223, 1237), 'cv2.split', 'cv2.split', (['img'], {}), '(img)\n', (1232, 1237), False, 'import cv2\n'), ((1248, 1272), 'cv2.merge', 'cv2.merge', (['(b, edges, r)'], {}), '((b, edges, r))\n', (1257, 1272), False, 'import cv2\n'), ((1278, 1302), 'cv2.imshow', 'cv2.imshow', (['"""test2"""', 'img'], {}), "('test2', img)\n", (1288, 1302), False, 'import cv2\n'), ((1307, 1321), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1318, 1321), False, 'import cv2\n')]
|
#!/usr/bin/python3
#
# Copyright (C) 2019 Trinity College of Dublin, the University of Dublin.
# Copyright (c) 2019 <NAME>
# Author: <NAME> <<EMAIL>>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''This module is used for some specific functions or tests for controller. '''
import sys
import time
import argparse
import traceback
import random
from pyndn import Name
from pyndn import Face
from pyndn.security import KeyChain
from oscommand import OSCommand
from ofmsg import OFMSG
from featurereq import FeatureReq
from packetout import PacketOutMsg
from facemod import FaceModMsg
from node_prefix_table import NodePrefixTable
class Controller_Setter(object):
def __init__(self):
pass
#self.keyChain = KeyChain()
#self.isDone = False
#self.ofmsg = OFMSG()
#self.nodeid = OSCommand.getnodeid()
#self.face = Face()
#self.featurereq = FeatureReq()
#self.helloreq_name_list = []
def run(self):
pass
def packetoutsender(self):
'''This section is used to send packetout msg if necessary'''
PacketOut_suffix = "all---all---/Ireland/Dublin/TCD/---2---0---3600---36000---0x0001---faceid255---0x0001"
PacketOutMsg().run(PacketOut_suffix)
def facemodsender(self):
'''This section is used to send facemod msg if necessary'''
facemod_suffix = "255---0x0001" # "faceid---Action"; Action ={create=0x0000, destroy=0x0001}
FaceModMsg().run(facemod_suffix)
|
[
"packetout.PacketOutMsg",
"facemod.FaceModMsg"
] |
[((1713, 1727), 'packetout.PacketOutMsg', 'PacketOutMsg', ([], {}), '()\n', (1725, 1727), False, 'from packetout import PacketOutMsg\n'), ((1958, 1970), 'facemod.FaceModMsg', 'FaceModMsg', ([], {}), '()\n', (1968, 1970), False, 'from facemod import FaceModMsg\n')]
|
import datetime
import re
from django.http import HttpResponse
from django.utils.dateparse import parse_datetime
from mtp_common.utils import format_currency
from openpyxl import Workbook
from security.models import credit_sources, disbursement_methods
from security.templatetags.security import (
format_card_number, format_sort_code,
format_resolution, format_disbursement_resolution,
list_prison_names,
)
from security.utils import EmailSet, NameSet
class ObjectListXlsxResponse(HttpResponse):
def __init__(self, object_list, object_type, attachment_name='export.xlsx', **kwargs):
kwargs.setdefault(
'content_type',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
)
super().__init__(**kwargs)
self['Content-Disposition'] = 'attachment; filename="%s"' % attachment_name
serialiser = ObjectListSerialiser.serialiser_for(object_type)
workbook = serialiser.make_workbook(object_list)
workbook.save(self)
class ObjectListSerialiser:
serialisers = {}
headers = []
def __init_subclass__(cls, object_type):
cls.serialisers[object_type] = cls
@classmethod
def serialiser_for(cls, object_type):
try:
return cls.serialisers[object_type]()
except KeyError:
raise NotImplementedError(f'Cannot export {object_type}')
def make_workbook(self, object_list):
workbook = Workbook(write_only=True)
worksheet = workbook.create_sheet()
worksheet.append(self.headers)
for record in object_list:
serialised_record = self.serialise(record)
worksheet.append([
escape_formulae(serialised_record.get(field))
for field in self.headers
])
return workbook
def serialise(self, record):
raise NotImplementedError
class CreditListSerialiser(ObjectListSerialiser, object_type='credits'):
headers = [
'Internal ID',
'Date started', 'Date received', 'Date credited',
'Amount',
'Prisoner number', 'Prisoner name', 'Prison',
'Sender name', 'Payment method',
'Bank transfer sort code', 'Bank transfer account', 'Bank transfer roll number',
'Debit card number', 'Debit card expiry', 'Debit card billing address',
'Sender email', 'Sender IP address',
'Status',
'NOMIS transaction',
]
def serialise(self, record):
return {
'Internal ID': record['id'],
'Date started': record['started_at'],
'Date received': (
record['received_at'].strftime('%Y-%m-%d')
if record['source'] == 'bank_transfer' else record['received_at']
),
'Date credited': record['credited_at'],
'Amount': format_currency(record['amount']),
'Prisoner number': record['prisoner_number'],
'Prisoner name': record['prisoner_name'],
'Prison': record['prison_name'],
'Sender name': record['sender_name'],
'Payment method': str(credit_sources.get(record['source'], record['source'])),
'Bank transfer sort code': (
format_sort_code(record['sender_sort_code']) if record['sender_sort_code'] else None
),
'Bank transfer account': record['sender_account_number'],
'Bank transfer roll number': record['sender_roll_number'],
'Debit card number': (
f'{record["card_number_first_digits"] or "******"}******{record["card_number_last_digits"]}'
if record['card_number_last_digits']
else None
),
'Debit card expiry': record['card_expiry_date'],
'Debit card billing address': credit_address_for_export(record['billing_address']),
'Sender email': record['sender_email'],
'Sender IP address': record['ip_address'],
'Status': str(format_resolution(record['resolution'])),
'NOMIS transaction': record['nomis_transaction_id'],
}
class DisbursementListSerialiser(ObjectListSerialiser, object_type='disbursements'):
headers = [
'Internal ID',
'Date entered', 'Date confirmed', 'Date sent',
'Amount',
'Prisoner number', 'Prisoner name', 'Prison',
'Recipient name', 'Payment method',
'Bank transfer sort code', 'Bank transfer account', 'Bank transfer roll number',
'Recipient address', 'Recipient email',
'Status',
'NOMIS transaction', 'SOP invoice number',
]
def serialise(self, record):
last_action_dates = {
log_item['action']: parse_datetime(log_item['created'])
for log_item in record['log_set']
}
return {
'Internal ID': record['id'],
'Date entered': record['created'],
'Date confirmed': last_action_dates.get('confirmed', ''),
'Date sent': last_action_dates.get('sent', ''),
'Amount': format_currency(record['amount']),
'Prisoner number': record['prisoner_number'],
'Prisoner name': record['prisoner_name'],
'Prison': record['prison_name'],
'Recipient name': f'{record["recipient_first_name"]} {record["recipient_last_name"]}'.strip(),
'Payment method': str(disbursement_methods.get(record['method'], record['method'])),
'Bank transfer sort code': (
format_sort_code(record['sort_code']) if record['sort_code'] else ''
),
'Bank transfer account': record['account_number'],
'Bank transfer roll number': record['roll_number'],
'Recipient address': disbursement_address_for_export(record),
'Recipient email': record['recipient_email'],
'Status': str(format_disbursement_resolution(record['resolution'])),
'NOMIS transaction': record['nomis_transaction_id'],
'SOP invoice number': record['invoice_number'],
}
class SenderListSerialiser(ObjectListSerialiser, object_type='senders'):
headers = [
'Sender name', 'Payment method',
'Credits sent', 'Total amount sent',
'Prisoners sent to', 'Prisons sent to',
'Bank transfer sort code', 'Bank transfer account', 'Bank transfer roll number',
'Debit card number', 'Debit card expiry', 'Debit card postcode',
'Other cardholder names', 'Cardholder emails',
]
def serialise(self, record):
serialised_record = {
'Credits sent': record['credit_count'],
'Total amount sent': format_currency(record['credit_total']),
'Prisoners sent to': record['prisoner_count'],
'Prisons sent to': record['prison_count'],
}
if record.get('bank_transfer_details'):
bank_transfer = record['bank_transfer_details'][0]
return {
**serialised_record,
'Sender name': bank_transfer['sender_name'],
'Payment method': 'Bank transfer',
'Bank transfer sort code': format_sort_code(bank_transfer['sender_sort_code']),
'Bank transfer account': bank_transfer['sender_account_number'],
'Bank transfer roll number': bank_transfer['sender_roll_number'],
}
if record.get('debit_card_details'):
debit_card = record['debit_card_details'][0]
try:
sender_name = debit_card['cardholder_names'][0]
except IndexError:
sender_name = 'Unknown'
other_sender_names = NameSet(debit_card['cardholder_names'])
if sender_name in other_sender_names:
other_sender_names.remove(sender_name)
return {
**serialised_record,
'Sender name': sender_name,
'Payment method': 'Debit card',
'Debit card number': format_card_number(debit_card),
'Debit card expiry': debit_card['card_expiry_date'],
'Debit card postcode': debit_card['postcode'] or 'Unknown',
'Other cardholder names': ', '.join(other_sender_names),
'Cardholder emails': ', '.join(EmailSet(debit_card['sender_emails'])),
}
return {
**serialised_record,
'Sender name': '(Unknown)',
'Payment method': '(Unknown)',
}
class PrisonerListSerialiser(ObjectListSerialiser, object_type='prisoners'):
headers = [
'Prisoner number',
'Prisoner name',
'Date of birth',
'Credits received',
'Total amount received',
'Payment sources',
'Disbursements sent',
'Total amount sent',
'Recipients',
'Current prison',
'All known prisons',
'Names given by senders',
]
def serialise(self, record):
if record['current_prison']:
current_prison = record['current_prison']['name']
else:
current_prison = 'Not in a public prison'
provided_names = NameSet(record['provided_names'])
return {
'Prisoner number': record['prisoner_number'],
'Prisoner name': record['prisoner_name'],
'Date of birth': record['prisoner_dob'],
'Credits received': record['credit_count'],
'Total amount received': format_currency(record['credit_total']),
'Payment sources': record['sender_count'],
'Disbursements sent': record['disbursement_count'],
'Total amount sent': format_currency(record['disbursement_total']),
'Recipients': record['recipient_count'],
'Current prison': current_prison,
'All known prisons': list_prison_names(record['prisons']),
'Names given by senders': ', '.join(provided_names),
}
def escape_formulae(value):
"""
Escapes formulae (strings that start with =) to prevent
spreadsheet software vulnerabilities being exploited
:param value: the value being added to a CSV cell
"""
if isinstance(value, str) and value.startswith('='):
return "'" + value
if isinstance(value, datetime.datetime):
return value.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(value, datetime.date):
return value.strftime('%Y-%m-%d')
return value
def credit_address_for_export(address):
if not address:
return ''
whitespace = re.compile(r'\s+')
keys = ('line1', 'line2', 'city', 'postcode', 'country')
lines = (whitespace.sub(' ', address[key]).strip() for key in keys if address.get(key))
return ', '.join(lines)
def disbursement_address_for_export(disbursement):
whitespace = re.compile(r'\s+')
keys = ('address_line1', 'address_line2', 'city', 'postcode', 'country')
lines = (whitespace.sub(' ', disbursement[key]).strip() for key in keys if disbursement.get(key))
return ', '.join(lines)
|
[
"security.models.disbursement_methods.get",
"security.models.credit_sources.get",
"security.utils.NameSet",
"openpyxl.Workbook",
"security.templatetags.security.format_disbursement_resolution",
"security.utils.EmailSet",
"mtp_common.utils.format_currency",
"security.templatetags.security.format_card_number",
"security.templatetags.security.list_prison_names",
"django.utils.dateparse.parse_datetime",
"security.templatetags.security.format_resolution",
"security.templatetags.security.format_sort_code",
"re.compile"
] |
[((10580, 10598), 're.compile', 're.compile', (['"""\\\\s+"""'], {}), "('\\\\s+')\n", (10590, 10598), False, 'import re\n'), ((10850, 10868), 're.compile', 're.compile', (['"""\\\\s+"""'], {}), "('\\\\s+')\n", (10860, 10868), False, 'import re\n'), ((1460, 1485), 'openpyxl.Workbook', 'Workbook', ([], {'write_only': '(True)'}), '(write_only=True)\n', (1468, 1485), False, 'from openpyxl import Workbook\n'), ((9192, 9225), 'security.utils.NameSet', 'NameSet', (["record['provided_names']"], {}), "(record['provided_names'])\n", (9199, 9225), False, 'from security.utils import EmailSet, NameSet\n'), ((2856, 2889), 'mtp_common.utils.format_currency', 'format_currency', (["record['amount']"], {}), "(record['amount'])\n", (2871, 2889), False, 'from mtp_common.utils import format_currency\n'), ((4737, 4772), 'django.utils.dateparse.parse_datetime', 'parse_datetime', (["log_item['created']"], {}), "(log_item['created'])\n", (4751, 4772), False, 'from django.utils.dateparse import parse_datetime\n'), ((5086, 5119), 'mtp_common.utils.format_currency', 'format_currency', (["record['amount']"], {}), "(record['amount'])\n", (5101, 5119), False, 'from mtp_common.utils import format_currency\n'), ((6695, 6734), 'mtp_common.utils.format_currency', 'format_currency', (["record['credit_total']"], {}), "(record['credit_total'])\n", (6710, 6734), False, 'from mtp_common.utils import format_currency\n'), ((7703, 7742), 'security.utils.NameSet', 'NameSet', (["debit_card['cardholder_names']"], {}), "(debit_card['cardholder_names'])\n", (7710, 7742), False, 'from security.utils import EmailSet, NameSet\n'), ((9501, 9540), 'mtp_common.utils.format_currency', 'format_currency', (["record['credit_total']"], {}), "(record['credit_total'])\n", (9516, 9540), False, 'from mtp_common.utils import format_currency\n'), ((9694, 9739), 'mtp_common.utils.format_currency', 'format_currency', (["record['disbursement_total']"], {}), "(record['disbursement_total'])\n", (9709, 9739), False, 'from mtp_common.utils import format_currency\n'), ((9873, 9909), 'security.templatetags.security.list_prison_names', 'list_prison_names', (["record['prisons']"], {}), "(record['prisons'])\n", (9890, 9909), False, 'from security.templatetags.security import format_card_number, format_sort_code, format_resolution, format_disbursement_resolution, list_prison_names\n'), ((3132, 3186), 'security.models.credit_sources.get', 'credit_sources.get', (["record['source']", "record['source']"], {}), "(record['source'], record['source'])\n", (3150, 3186), False, 'from security.models import credit_sources, disbursement_methods\n'), ((3246, 3290), 'security.templatetags.security.format_sort_code', 'format_sort_code', (["record['sender_sort_code']"], {}), "(record['sender_sort_code'])\n", (3262, 3290), False, 'from security.templatetags.security import format_card_number, format_sort_code, format_resolution, format_disbursement_resolution, list_prison_names\n'), ((4015, 4054), 'security.templatetags.security.format_resolution', 'format_resolution', (["record['resolution']"], {}), "(record['resolution'])\n", (4032, 4054), False, 'from security.templatetags.security import format_card_number, format_sort_code, format_resolution, format_disbursement_resolution, list_prison_names\n'), ((5419, 5479), 'security.models.disbursement_methods.get', 'disbursement_methods.get', (["record['method']", "record['method']"], {}), "(record['method'], record['method'])\n", (5443, 5479), False, 'from security.models import credit_sources, disbursement_methods\n'), ((5539, 5576), 'security.templatetags.security.format_sort_code', 'format_sort_code', (["record['sort_code']"], {}), "(record['sort_code'])\n", (5555, 5576), False, 'from security.templatetags.security import format_card_number, format_sort_code, format_resolution, format_disbursement_resolution, list_prison_names\n'), ((5908, 5960), 'security.templatetags.security.format_disbursement_resolution', 'format_disbursement_resolution', (["record['resolution']"], {}), "(record['resolution'])\n", (5938, 5960), False, 'from security.templatetags.security import format_card_number, format_sort_code, format_resolution, format_disbursement_resolution, list_prison_names\n'), ((7185, 7236), 'security.templatetags.security.format_sort_code', 'format_sort_code', (["bank_transfer['sender_sort_code']"], {}), "(bank_transfer['sender_sort_code'])\n", (7201, 7236), False, 'from security.templatetags.security import format_card_number, format_sort_code, format_resolution, format_disbursement_resolution, list_prison_names\n'), ((8035, 8065), 'security.templatetags.security.format_card_number', 'format_card_number', (['debit_card'], {}), '(debit_card)\n', (8053, 8065), False, 'from security.templatetags.security import format_card_number, format_sort_code, format_resolution, format_disbursement_resolution, list_prison_names\n'), ((8332, 8369), 'security.utils.EmailSet', 'EmailSet', (["debit_card['sender_emails']"], {}), "(debit_card['sender_emails'])\n", (8340, 8369), False, 'from security.utils import EmailSet, NameSet\n')]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'designer/qcode.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_DialogShowQrCode(object):
def setupUi(self, DialogShowQrCode):
DialogShowQrCode.setObjectName("DialogShowQrCode")
DialogShowQrCode.resize(480, 320)
self.buttonBox = QtWidgets.QDialogButtonBox(DialogShowQrCode)
self.buttonBox.setGeometry(QtCore.QRect(326, 268, 150, 50))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.buttonBox.sizePolicy().hasHeightForWidth())
self.buttonBox.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(24)
self.buttonBox.setFont(font)
self.buttonBox.setStyleSheet("background-color: lightgrey;\n"
"font: 24pt \"Arial\";")
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.top_right_logo = QtWidgets.QLabel(DialogShowQrCode)
self.top_right_logo.setGeometry(QtCore.QRect(430, 2, 40, 60))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.top_right_logo.sizePolicy().hasHeightForWidth())
self.top_right_logo.setSizePolicy(sizePolicy)
self.top_right_logo.setText("")
self.top_right_logo.setPixmap(QtGui.QPixmap(":/RaspiBlitz/images/RaspiBlitz_Logo_Berry.png"))
self.top_right_logo.setScaledContents(True)
self.top_right_logo.setAlignment(QtCore.Qt.AlignCenter)
self.top_right_logo.setObjectName("top_right_logo")
self.frame = QtWidgets.QFrame(DialogShowQrCode)
self.frame.setGeometry(QtCore.QRect(0, 0, 320, 320))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setStyleSheet("background-color: rgb(255, 255, 255);")
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.qcode = QtWidgets.QLabel(self.frame)
self.qcode.setGeometry(QtCore.QRect(1, 1, 318, 318))
self.qcode.setStyleSheet("background-color: white")
self.qcode.setText("")
self.qcode.setPixmap(QtGui.QPixmap(":/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png"))
self.qcode.setScaledContents(True)
self.qcode.setAlignment(QtCore.Qt.AlignCenter)
self.qcode.setObjectName("qcode")
self.label = QtWidgets.QLabel(DialogShowQrCode)
self.label.setGeometry(QtCore.QRect(330, 4, 88, 60))
self.label.setText("")
self.label.setPixmap(QtGui.QPixmap(":/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png"))
self.label.setScaledContents(True)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.horizontalLayoutWidget = QtWidgets.QWidget(DialogShowQrCode)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(320, 70, 161, 191))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.horizontalLayoutWidget)
self.verticalLayout.setContentsMargins(6, 0, 6, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.line = QtWidgets.QFrame(self.horizontalLayoutWidget)
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.verticalLayout.addWidget(self.line)
self.memo_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(75)
self.memo_key.setFont(font)
self.memo_key.setScaledContents(False)
self.memo_key.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.memo_key.setWordWrap(True)
self.memo_key.setObjectName("memo_key")
self.verticalLayout.addWidget(self.memo_key)
self.memo_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
self.memo_value.setFont(font)
self.memo_value.setScaledContents(False)
self.memo_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.memo_value.setWordWrap(True)
self.memo_value.setObjectName("memo_value")
self.verticalLayout.addWidget(self.memo_value)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.status_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setUnderline(False)
font.setWeight(75)
self.status_key.setFont(font)
self.status_key.setScaledContents(False)
self.status_key.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.status_key.setWordWrap(True)
self.status_key.setObjectName("status_key")
self.horizontalLayout.addWidget(self.status_key)
self.status_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
self.status_value.setFont(font)
self.status_value.setScaledContents(False)
self.status_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.status_value.setWordWrap(True)
self.status_value.setObjectName("status_value")
self.horizontalLayout.addWidget(self.status_value)
self.verticalLayout.addLayout(self.horizontalLayout)
self.inv_amt_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.inv_amt_key.setFont(font)
self.inv_amt_key.setObjectName("inv_amt_key")
self.verticalLayout.addWidget(self.inv_amt_key)
self.inv_amt_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.inv_amt_value.setFont(font)
self.inv_amt_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.inv_amt_value.setObjectName("inv_amt_value")
self.verticalLayout.addWidget(self.inv_amt_value)
self.amt_paid_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.amt_paid_key.setFont(font)
self.amt_paid_key.setObjectName("amt_paid_key")
self.verticalLayout.addWidget(self.amt_paid_key)
self.amt_paid_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
self.amt_paid_value.setFont(font)
self.amt_paid_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.amt_paid_value.setObjectName("amt_paid_value")
self.verticalLayout.addWidget(self.amt_paid_value)
self.spinner = QtWidgets.QWidget(DialogShowQrCode)
self.spinner.setGeometry(QtCore.QRect(440, 0, 40, 40))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinner.sizePolicy().hasHeightForWidth())
self.spinner.setSizePolicy(sizePolicy)
self.spinner.setObjectName("spinner")
self.spinner.raise_()
self.buttonBox.raise_()
self.top_right_logo.raise_()
self.frame.raise_()
self.label.raise_()
self.horizontalLayoutWidget.raise_()
self.retranslateUi(DialogShowQrCode)
self.buttonBox.accepted.connect(DialogShowQrCode.accept)
QtCore.QMetaObject.connectSlotsByName(DialogShowQrCode)
def retranslateUi(self, DialogShowQrCode):
_translate = QtCore.QCoreApplication.translate
DialogShowQrCode.setWindowTitle(_translate("DialogShowQrCode", "Dialog"))
self.memo_key.setText(_translate("DialogShowQrCode", "Memo"))
self.memo_value.setText(_translate("DialogShowQrCode", "RB-Vivid-Badger"))
self.status_key.setText(_translate("DialogShowQrCode", "Status"))
self.status_value.setText(_translate("DialogShowQrCode", "Open/Paid"))
self.inv_amt_key.setText(_translate("DialogShowQrCode", "Invoice Amount"))
self.inv_amt_value.setText(_translate("DialogShowQrCode", "123456798"))
self.amt_paid_key.setText(_translate("DialogShowQrCode", "Amount Paid"))
self.amt_paid_value.setText(_translate("DialogShowQrCode", "N/A"))
from . import resources_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
DialogShowQrCode = QtWidgets.QDialog()
ui = Ui_DialogShowQrCode()
ui.setupUi(DialogShowQrCode)
DialogShowQrCode.show()
sys.exit(app.exec_())
|
[
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QSizePolicy",
"PyQt5.QtWidgets.QFrame",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QDialog",
"PyQt5.QtGui.QFont",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtGui.QPixmap",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QDialogButtonBox"
] |
[((10059, 10091), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (10081, 10091), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10115, 10134), 'PyQt5.QtWidgets.QDialog', 'QtWidgets.QDialog', ([], {}), '()\n', (10132, 10134), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((449, 493), 'PyQt5.QtWidgets.QDialogButtonBox', 'QtWidgets.QDialogButtonBox', (['DialogShowQrCode'], {}), '(DialogShowQrCode)\n', (475, 493), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((583, 662), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (604, 662), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((897, 910), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (908, 910), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1316, 1350), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['DialogShowQrCode'], {}), '(DialogShowQrCode)\n', (1332, 1350), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1442, 1521), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (1463, 1521), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2090, 2124), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['DialogShowQrCode'], {}), '(DialogShowQrCode)\n', (2106, 2124), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2207, 2286), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (2228, 2286), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2757, 2785), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame'], {}), '(self.frame)\n', (2773, 2785), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3194, 3228), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['DialogShowQrCode'], {}), '(DialogShowQrCode)\n', (3210, 3228), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3594, 3629), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['DialogShowQrCode'], {}), '(DialogShowQrCode)\n', (3611, 3629), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3817, 3867), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (3838, 3867), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4007, 4052), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (4023, 4052), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4280, 4325), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (4296, 4325), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4341, 4354), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (4352, 4354), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4882, 4927), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (4898, 4927), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4943, 4956), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (4954, 4956), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5389, 5412), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (5410, 5412), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5503, 5548), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (5519, 5548), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5564, 5577), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5575, 5577), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6093, 6138), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (6109, 6138), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6154, 6167), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6165, 6167), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6672, 6717), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (6688, 6717), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6733, 6746), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6744, 6746), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7041, 7086), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (7057, 7086), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7102, 7115), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (7113, 7115), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7527, 7572), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (7543, 7572), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7588, 7601), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (7599, 7601), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7901, 7946), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (7917, 7946), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7962, 7975), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (7973, 7975), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8332, 8367), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['DialogShowQrCode'], {}), '(DialogShowQrCode)\n', (8349, 8367), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8452, 8531), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (8473, 8531), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9112, 9167), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['DialogShowQrCode'], {}), '(DialogShowQrCode)\n', (9149, 9167), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((529, 560), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(326)', '(268)', '(150)', '(50)'], {}), '(326, 268, 150, 50)\n', (541, 560), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1391, 1419), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(430)', '(2)', '(40)', '(60)'], {}), '(430, 2, 40, 60)\n', (1403, 1419), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1829, 1891), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['""":/RaspiBlitz/images/RaspiBlitz_Logo_Berry.png"""'], {}), "(':/RaspiBlitz/images/RaspiBlitz_Logo_Berry.png')\n", (1842, 1891), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2156, 2184), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(320)', '(320)'], {}), '(0, 0, 320, 320)\n', (2168, 2184), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2817, 2845), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(1)', '(1)', '(318)', '(318)'], {}), '(1, 1, 318, 318)\n', (2829, 2845), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2967, 3031), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['""":/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png"""'], {}), "(':/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png')\n", (2980, 3031), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3260, 3288), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(330)', '(4)', '(88)', '(60)'], {}), '(330, 4, 88, 60)\n', (3272, 3288), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3350, 3414), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['""":/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png"""'], {}), "(':/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png')\n", (3363, 3414), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3678, 3709), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(320)', '(70)', '(161)', '(191)'], {}), '(320, 70, 161, 191)\n', (3690, 3709), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8401, 8429), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(440)', '(0)', '(40)', '(40)'], {}), '(440, 0, 40, 40)\n', (8413, 8429), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
|
from Client.interfaces import IClient
from Command.base import BaseCommand, CommandType
from Database.interfaces import IDatabase
from Timer.event import TimeoutEvent
from Timer.timestamp import Timestamp
from Conf.command import CMD_RES
class Set(BaseCommand):
args_order = ['key', 'value', 'expires_time']
min_args = 2
cmd_type = CommandType.CMD_WRITE
def handle(self, args, kwargs):
db: IDatabase = self.client.get_database()
expires_time = kwargs.get('expires_time')
if expires_time is None:
db.store(kwargs['key'], kwargs['value'])
else:
expires_time = int(expires_time)
db.store(kwargs['key'], kwargs['value'])
self.set_expires_timer(kwargs['key'], expires_time)
return CMD_RES.OK
def set_expires_timer(self, key, expires_time):
db: IDatabase = self.client.get_database()
timestamp = Timestamp(expires_time, 's')
db.store_expires(key, timestamp.get_time())
timeout_event = ExpiresKeyRemoveEvent(timestamp)
timeout_event.set_extra_data({
"client": self.client,
"expires_key": key
})
server = self.client.get_server()
reactor = server.get_loop()
reactor.create_timeout_event(timeout_event)
print('expire event build')
class ExpiresKeyRemoveEvent(TimeoutEvent):
def handle_event(self, reactor):
extra_data = self.extra_data
print('expire event activate')
client: IClient = extra_data['client']
db: IDatabase = client.get_database()
db.remove_expires(extra_data['expires_key'])
|
[
"Timer.timestamp.Timestamp"
] |
[((920, 948), 'Timer.timestamp.Timestamp', 'Timestamp', (['expires_time', '"""s"""'], {}), "(expires_time, 's')\n", (929, 948), False, 'from Timer.timestamp import Timestamp\n')]
|
"""
The Atari environment(env) wrapper. Some envs needed some configurations which you can find below.
"""
import gym
import numpy as np
from scipy.misc import imresize
class AtariWrapper():
def __init__(self, env):
self.env = env
self.observation_space = self.env.observation_space
self.reward_range = self.env.reward_range
self.metadata = self.env.metadata
self.spec = self.env.spec
def step(self, *args, **kwargs):
state, reward, done, info = self.env.step(*args, **kwargs)
info['org_obs'] = state
state = self.process_atari_image(state)
return state, reward, done, info
@property
def action_space(self):
return self.env.action_space
def close(self, *args, **kwargs):
return self.env.close(*args, **kwargs)
def render(self, mode='human', inspect=False, img=None):
if not inspect:
return self.env.render(mode)
else:
if mode == 'rgb_array':
return img
elif mode == 'human':
from gym.envs.classic_control import rendering
if self.env.env.viewer is None:
self.env.env.viewer = rendering.SimpleImageViewer()
self.env.env.viewer.imshow(img)
return self.env.env.viewer.isopen
def reset(self, inspect=False):
state = self.env.reset()
if inspect:
return self.process_atari_image(state), state
else:
return self.process_atari_image(state)
def seed(self, *args, **kwargs):
return self.env.seed(*args, **kwargs)
@staticmethod
def process_atari_image(img):
return imresize(img[5:195].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class Crop35And195(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[35:195].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class Crop15And195(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[:195].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class PongWrapper(Crop35And195):
def __init__(self, env):
Crop35And195.__init__(self, env)
def step(self, action):
if action > 2:
raise Exception('Unknown Action')
if action == 1:
action = 4
elif action == 2:
action = 5
state, reward, done, info = self.env.step(action)
info['org_obs'] = state
state = self.process_atari_image(state)
return state, reward, done, info
@property
def action_space(self):
return gym.spaces.discrete.Discrete(3)
class SpaceInvaderWrapper(Crop15And195):
def __init__(self, env):
Crop15And195.__init__(self, env)
@property
def action_space(self):
return gym.spaces.discrete.Discrete(4)
class EnduroWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[0:155, 10:].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BeamRiderWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[30:180, 10:].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class FreewayWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[25:195, 10:].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BoxingWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[15:180, 30:130].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BreakoutWrapper(Crop35And195):
def __init__(self, env):
Crop35And195.__init__(self, env)
class QbertWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[30:190, 10:150].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BowlingWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@property
def action_space(self):
return gym.spaces.discrete.Discrete(4)
@staticmethod
def process_atari_image(img):
return imresize(img[105:172, :].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class ElevatorActionWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
def atari_wrapper(env_name):
x = env_name.lower()
x = x.split('-')[0]
if x.__contains__('pong'):
env = PongWrapper(gym.make(env_name))
elif x.__contains__('spaceinvaders'):
env = SpaceInvaderWrapper(gym.make(env_name))
elif x.__contains__('enduro'):
env = EnduroWrapper(gym.make(env_name))
elif x.__contains__('beamrider'):
env = BeamRiderWrapper(gym.make(env_name))
elif x.__contains__('freeway'):
env = FreewayWrapper(gym.make(env_name))
elif x.__contains__('boxing'):
env = BoxingWrapper(gym.make(env_name))
elif x.__contains__('breakout'):
env = BreakoutWrapper(gym.make(env_name))
elif x.__contains__('qbert'):
env = QbertWrapper(gym.make(env_name))
elif x.__contains__('bowling'):
env = BowlingWrapper(gym.make(env_name))
elif x.__contains__('elevatoraction'):
env = ElevatorActionWrapper(gym.make(env_name))
else:
env = AtariWrapper(gym.make(env_name))
return env
|
[
"gym.envs.classic_control.rendering.SimpleImageViewer",
"gym.make",
"gym.spaces.discrete.Discrete"
] |
[((2844, 2875), 'gym.spaces.discrete.Discrete', 'gym.spaces.discrete.Discrete', (['(3)'], {}), '(3)\n', (2872, 2875), False, 'import gym\n'), ((3047, 3078), 'gym.spaces.discrete.Discrete', 'gym.spaces.discrete.Discrete', (['(4)'], {}), '(4)\n', (3075, 3078), False, 'import gym\n'), ((4692, 4723), 'gym.spaces.discrete.Discrete', 'gym.spaces.discrete.Discrete', (['(4)'], {}), '(4)\n', (4720, 4723), False, 'import gym\n'), ((5134, 5152), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5142, 5152), False, 'import gym\n'), ((5230, 5248), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5238, 5248), False, 'import gym\n'), ((5313, 5331), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5321, 5331), False, 'import gym\n'), ((1215, 1244), 'gym.envs.classic_control.rendering.SimpleImageViewer', 'rendering.SimpleImageViewer', ([], {}), '()\n', (1242, 1244), False, 'from gym.envs.classic_control import rendering\n'), ((5402, 5420), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5410, 5420), False, 'import gym\n'), ((5487, 5505), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5495, 5505), False, 'import gym\n'), ((5570, 5588), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5578, 5588), False, 'import gym\n'), ((5657, 5675), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5665, 5675), False, 'import gym\n'), ((5738, 5756), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5746, 5756), False, 'import gym\n'), ((5823, 5841), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5831, 5841), False, 'import gym\n'), ((5922, 5940), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5930, 5940), False, 'import gym\n'), ((5979, 5997), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (5987, 5997), False, 'import gym\n')]
|
import os
import shutil
from pathlib import Path
from pydantic import DirectoryPath, FilePath
from pydantic.validators import path_validator
__all__ = [
'FilePath',
'DirectoryPath',
'AutoCreateDirectoryPath',
'DirectoryFindUp',
'PathExpandUser',
'ExecutablePath',
]
class PathExpandUser(DirectoryPath):
@staticmethod
def _expand_user(path: Path):
path = path.expanduser()
return path
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls._expand_user
super().__get_validators__()
class AutoCreateDirectoryPath(PathExpandUser):
@staticmethod
def _ensure_exsits(path: Path):
if not path.exists():
os.makedirs(path)
return path
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls._expand_user
yield cls._ensure_exsits
super().__get_validators__()
class DirectoryFindUp(DirectoryPath):
@staticmethod
def __find_up(path: Path):
if path.exists():
return path
else:
start_dir = Path(os.getcwd())
for parent_dir in start_dir.parents:
parent_path = Path(parent_dir, path)
if parent_path.exists():
return parent_path
raise FileNotFoundError(f"{path} not found in {start_dir} or parents")
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls.__find_up
yield cls.validate
class ExecutablePath(Path):
@staticmethod
def __find_in_system_path(path: Path):
full_path = shutil.which(path)
if full_path is None:
raise FileNotFoundError(f"{path} not found")
# Note: on Windows any existing file appears as executable
elif not os.access(path, os.X_OK):
raise ValueError(f"{path} found but is not executable")
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls.__find_in_system_path
|
[
"os.makedirs",
"os.getcwd",
"shutil.which",
"pathlib.Path",
"os.access"
] |
[((1653, 1671), 'shutil.which', 'shutil.which', (['path'], {}), '(path)\n', (1665, 1671), False, 'import shutil\n'), ((728, 745), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (739, 745), False, 'import os\n'), ((1129, 1140), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1138, 1140), False, 'import os\n'), ((1221, 1243), 'pathlib.Path', 'Path', (['parent_dir', 'path'], {}), '(parent_dir, path)\n', (1225, 1243), False, 'from pathlib import Path\n'), ((1843, 1867), 'os.access', 'os.access', (['path', 'os.X_OK'], {}), '(path, os.X_OK)\n', (1852, 1867), False, 'import os\n')]
|
import unittest
from ishell.console import Console
from ishell.command import Command
import io
from contextlib import redirect_stdout
class TestConsole(unittest.TestCase):
def test_console_creation(self):
"""Console must be created."""
c = Console()
assert isinstance(c, Console)
def test_console_has_prompt(self):
"""Console should have a default prompt string."""
c = Console()
assert c.prompt == "Prompt"
assert c.prompt_delim == ">"
class TestCommand(unittest.TestCase):
def test_command_creation(self):
"""Command must be created with name and default help message."""
cmd = Command('configure')
assert cmd.name == 'configure'
assert cmd.help == 'No help provided'
assert cmd.dynamic_args == False
def test_simple_completion(self):
"""Command must complete with only one option."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd1.addChild(cmd2)
candidates = cmd1.complete('', '', 0, run=False, full_line='configure ')
assert 'terminal ' == candidates
candidates = cmd1.complete('', '', 1, run=False, full_line='configure ')
assert None == candidates
def test_double_completion(self):
"""Command must complete with two options."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd3 = Command('interface')
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
# State 0 must print all commands followed by help message
# and return None as candidates
candidates = cmd1.complete('', '', 0, run=False, full_line='configure ')
assert None == candidates
candidates = cmd1.complete('', 'in', 0, run=False, full_line='configure in')
assert 'interface ' == candidates
candidates = cmd1.complete('', 't', 0, run=False, full_line='configure t')
assert 'terminal ' == candidates
def test_double_overlapping_completion(self):
"""Command must complete with two overlapping options."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd2.run = lambda l: "terminal output"
cmd3 = Command('terminal_1')
cmd3.run = lambda l: "terminal_1 output"
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
# State 0 must print all commands followed by help message
# and return None as candidates
candidates = cmd1.complete('', '', 0, run=False, full_line='configure ')
assert None == candidates
candidates = cmd1.complete('', 't', 0, run=False, full_line='configure t')
assert 'terminal ' == candidates
candidates = cmd1.complete('', 't', 1, run=False, full_line='configure t')
assert 'terminal_1 ' == candidates
# user pressing tab on ambiguous command
candidates = cmd1.complete(["terminal"], 'terminal', 0, run=False, full_line=None)
assert "terminal " == candidates
candidates = cmd1.complete(["terminal"], 'terminal', 1, run=False, full_line=None)
assert "terminal_1 " == candidates
output = cmd1.complete(["terminal"], 'configure terminal', 0, run=True, full_line='configure terminal')
assert 'terminal output' == output
output = cmd1.complete(["terminal_1"], 'configure terminal_1', 0, run=True, full_line='configure terminal_1')
assert 'terminal_1 output' == output
def test_double_overlapping_nested_completion(self):
"""Command must complete with two overlapping nested options."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd3 = Command('terminal_1')
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
cmd4 = Command("option")
cmd2.addChild(cmd4)
cmd3.addChild(cmd4)
cmd5 = Command("Aaaa")
cmd4.addChild(cmd5)
cmd6 = Command("B")
cmd4.addChild(cmd6)
# show help for A, B (NOT terminal terminal_1) tab complete
with io.StringIO() as buf, redirect_stdout(buf):
output = cmd1.complete(["terminal", "option"], '', 0, run=False, full_line=None)
assert output is None
help_output = buf.getvalue()
assert help_output == '\rPossible Completions:\n\r Aaaa No help provided\n\r B No help provided\n'
# show help for A, B (NOT terminal terminal_1) enter
with io.StringIO() as buf, redirect_stdout(buf):
output = cmd1.complete(["terminal", "option"], 'terminal option', 0, run=True, full_line='terminal option')
assert output is None
help_output = buf.getvalue()
assert help_output == '\nIncomplete Command: terminal option\n\nHelp:\nAaaa - No help provided\n B - No help provided\n\n'
def test_completion_with_buffer(self):
"""Command must complete correctly with buffer provided."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd1.addChild(cmd2)
candidates = cmd1.complete(['t'], 't', 0, run=False, full_line='configure ')
assert 'terminal ' == candidates
candidates = cmd1.complete(['t'], 't', 1, run=False, full_line='configure ')
assert None == candidates
def test_completion_with_dynamic_arg(self):
cmd1 = Command('show')
cmd2 = Command('call', dynamic_args=True)
cmd3 = Command('calls', dynamic_args=True)
cmd2.args = lambda: ['100', '101']
cmd3.args = lambda: ['continuous', 'raw']
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
candidates = cmd1.complete(['c'], '', 0, run=False, full_line='show calls')
self.assertEqual(None, candidates)
candidates = cmd1.complete(['c'], 'c', 0, run=False, full_line='show calls')
self.assertEqual('call ', candidates)
candidates = cmd1.complete(['c'], 'c', 1, run=False, full_line='show calls')
self.assertEqual('calls ', candidates)
candidates = cmd2.complete([''], '', 0, run=False, full_line='show calls')
self.assertEqual(None, candidates)
candidates = cmd2.complete([''], '1', 0, run=False, full_line='show calls')
self.assertEqual('100', candidates)
candidates = cmd2.complete([''], '1', 1, run=False, full_line='show calls')
self.assertEqual('101', candidates)
candidates = cmd3.complete([''], '', 0, run=False, full_line='show calls c')
self.assertEqual(None, candidates)
candidates = cmd3.complete([''], 'c', 0, run=False, full_line='show calls c')
self.assertEqual('continuous', candidates)
candidates = cmd3.complete([''], 'r', 0, run=False, full_line='show calls c')
self.assertEqual('raw', candidates)
candidates = cmd1.complete(['calls', 'c'], 'c', 0, run=False, full_line='show calls c')
self.assertEqual('continuous', candidates)
candidates = cmd2.complete(['1'], '1', 0, run=False, full_line='show calls c')
self.assertEqual('100', candidates)
candidates = cmd2.complete(['1'], '1', 1, run=False, full_line='show calls c')
self.assertEqual('101', candidates)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"io.StringIO",
"contextlib.redirect_stdout",
"ishell.command.Command",
"ishell.console.Console"
] |
[((7223, 7238), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7236, 7238), False, 'import unittest\n'), ((263, 272), 'ishell.console.Console', 'Console', ([], {}), '()\n', (270, 272), False, 'from ishell.console import Console\n'), ((422, 431), 'ishell.console.Console', 'Console', ([], {}), '()\n', (429, 431), False, 'from ishell.console import Console\n'), ((670, 690), 'ishell.command.Command', 'Command', (['"""configure"""'], {}), "('configure')\n", (677, 690), False, 'from ishell.command import Command\n'), ((929, 949), 'ishell.command.Command', 'Command', (['"""configure"""'], {}), "('configure')\n", (936, 949), False, 'from ishell.command import Command\n'), ((965, 984), 'ishell.command.Command', 'Command', (['"""terminal"""'], {}), "('terminal')\n", (972, 984), False, 'from ishell.command import Command\n'), ((1358, 1378), 'ishell.command.Command', 'Command', (['"""configure"""'], {}), "('configure')\n", (1365, 1378), False, 'from ishell.command import Command\n'), ((1394, 1413), 'ishell.command.Command', 'Command', (['"""terminal"""'], {}), "('terminal')\n", (1401, 1413), False, 'from ishell.command import Command\n'), ((1429, 1449), 'ishell.command.Command', 'Command', (['"""interface"""'], {}), "('interface')\n", (1436, 1449), False, 'from ishell.command import Command\n'), ((2111, 2131), 'ishell.command.Command', 'Command', (['"""configure"""'], {}), "('configure')\n", (2118, 2131), False, 'from ishell.command import Command\n'), ((2147, 2166), 'ishell.command.Command', 'Command', (['"""terminal"""'], {}), "('terminal')\n", (2154, 2166), False, 'from ishell.command import Command\n'), ((2229, 2250), 'ishell.command.Command', 'Command', (['"""terminal_1"""'], {}), "('terminal_1')\n", (2236, 2250), False, 'from ishell.command import Command\n'), ((3609, 3629), 'ishell.command.Command', 'Command', (['"""configure"""'], {}), "('configure')\n", (3616, 3629), False, 'from ishell.command import Command\n'), ((3645, 3664), 'ishell.command.Command', 'Command', (['"""terminal"""'], {}), "('terminal')\n", (3652, 3664), False, 'from ishell.command import Command\n'), ((3680, 3701), 'ishell.command.Command', 'Command', (['"""terminal_1"""'], {}), "('terminal_1')\n", (3687, 3701), False, 'from ishell.command import Command\n'), ((3773, 3790), 'ishell.command.Command', 'Command', (['"""option"""'], {}), "('option')\n", (3780, 3790), False, 'from ishell.command import Command\n'), ((3862, 3877), 'ishell.command.Command', 'Command', (['"""Aaaa"""'], {}), "('Aaaa')\n", (3869, 3877), False, 'from ishell.command import Command\n'), ((3921, 3933), 'ishell.command.Command', 'Command', (['"""B"""'], {}), "('B')\n", (3928, 3933), False, 'from ishell.command import Command\n'), ((4951, 4971), 'ishell.command.Command', 'Command', (['"""configure"""'], {}), "('configure')\n", (4958, 4971), False, 'from ishell.command import Command\n'), ((4987, 5006), 'ishell.command.Command', 'Command', (['"""terminal"""'], {}), "('terminal')\n", (4994, 5006), False, 'from ishell.command import Command\n'), ((5344, 5359), 'ishell.command.Command', 'Command', (['"""show"""'], {}), "('show')\n", (5351, 5359), False, 'from ishell.command import Command\n'), ((5375, 5409), 'ishell.command.Command', 'Command', (['"""call"""'], {'dynamic_args': '(True)'}), "('call', dynamic_args=True)\n", (5382, 5409), False, 'from ishell.command import Command\n'), ((5425, 5460), 'ishell.command.Command', 'Command', (['"""calls"""'], {'dynamic_args': '(True)'}), "('calls', dynamic_args=True)\n", (5432, 5460), False, 'from ishell.command import Command\n'), ((4044, 4057), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (4055, 4057), False, 'import io\n'), ((4066, 4086), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (4081, 4086), False, 'from contextlib import redirect_stdout\n'), ((4447, 4460), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (4458, 4460), False, 'import io\n'), ((4469, 4489), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (4484, 4489), False, 'from contextlib import redirect_stdout\n')]
|
#!/usr/bin/env python3
#
# Tests if the Fitzhugh-Nagumo toy model runs.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import unittest
import pints
import pints.toy
import numpy as np
class TestFitzhughNagumoModel(unittest.TestCase):
"""
Tests if the Fitzhugh-Nagumo toy model runs.
"""
def test_run(self):
# Test basic properties
model = pints.toy.FitzhughNagumoModel()
self.assertEqual(model.n_parameters(), 3)
self.assertEqual(model.n_outputs(), 2)
# Test simulation
x = model.suggested_parameters()
times = model.suggested_times()
values = model.simulate(x, times)
self.assertEqual(values.shape, (len(times), 2))
# Simulation with sensitivities
values, dvalues_dp = model.simulateS1(x, times)
self.assertEqual(values.shape, (len(times), 2))
self.assertEqual(dvalues_dp.shape, (len(times), 2, 3))
# Test alternative starting position
model = pints.toy.FitzhughNagumoModel([0.1, 0.1])
values = model.simulate(x, times)
self.assertEqual(values.shape, (len(times), 2))
# Times can't be negative
times = [-1, 2, 3, 4]
self.assertRaises(ValueError, model.simulate, x, times)
# Initial value must have size 2
pints.toy.FitzhughNagumoModel([1, 1])
self.assertRaises(ValueError, pints.toy.FitzhughNagumoModel, [1])
def test_values(self):
# value-based tests of Fitzhugh-Nagumo model
parameters = [0.2, 0.4, 2.5]
y0 = [-2, 1.5]
times = np.linspace(0, 20, 201)
model = pints.toy.FitzhughNagumoModel(y0)
values = model.simulate(parameters, times)
self.assertAlmostEqual(values[200, 0], 1.675726, places=6)
self.assertAlmostEqual(values[200, 1], -0.226142, places=6)
def test_sensitivities(self):
# compares sensitivities against standards
model = pints.toy.FitzhughNagumoModel([2, 3])
parameters = [0.2, 0.7, 2.8]
# Test with initial point t=0 included in range
sols, sens = model.simulateS1(parameters, [0, 7, 12])
self.assertAlmostEqual(sens[1, 0, 2], 5.01378, 5)
self.assertAlmostEqual(sens[2, 1, 1], 0.82883, 4)
# Test without initial point in range
sols, sens = model.simulateS1(parameters, [7, 12])
self.assertAlmostEqual(sens[0, 0, 2], 5.01378, 5)
self.assertAlmostEqual(sens[1, 1, 1], 0.82883, 4)
# Test without any points in range
sols, sens = model.simulateS1(parameters, [])
self.assertEqual(sols.shape, (0, 2))
self.assertEqual(sens.shape, (0, 2, 3))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"numpy.linspace",
"pints.toy.FitzhughNagumoModel"
] |
[((2848, 2863), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2861, 2863), False, 'import unittest\n'), ((529, 560), 'pints.toy.FitzhughNagumoModel', 'pints.toy.FitzhughNagumoModel', ([], {}), '()\n', (558, 560), False, 'import pints\n'), ((1142, 1183), 'pints.toy.FitzhughNagumoModel', 'pints.toy.FitzhughNagumoModel', (['[0.1, 0.1]'], {}), '([0.1, 0.1])\n', (1171, 1183), False, 'import pints\n'), ((1461, 1498), 'pints.toy.FitzhughNagumoModel', 'pints.toy.FitzhughNagumoModel', (['[1, 1]'], {}), '([1, 1])\n', (1490, 1498), False, 'import pints\n'), ((1730, 1753), 'numpy.linspace', 'np.linspace', (['(0)', '(20)', '(201)'], {}), '(0, 20, 201)\n', (1741, 1753), True, 'import numpy as np\n'), ((1770, 1803), 'pints.toy.FitzhughNagumoModel', 'pints.toy.FitzhughNagumoModel', (['y0'], {}), '(y0)\n', (1799, 1803), False, 'import pints\n'), ((2092, 2129), 'pints.toy.FitzhughNagumoModel', 'pints.toy.FitzhughNagumoModel', (['[2, 3]'], {}), '([2, 3])\n', (2121, 2129), False, 'import pints\n')]
|
from sklearn.metrics import matthews_corrcoef
y_true = [+1, +1, +1, -1]
y_pred = [+1, -1, +1, +1]
matthews_corrcoef(y_true, y_pred)
|
[
"sklearn.metrics.matthews_corrcoef"
] |
[((98, 131), 'sklearn.metrics.matthews_corrcoef', 'matthews_corrcoef', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (115, 131), False, 'from sklearn.metrics import matthews_corrcoef\n')]
|
from chatterbot import ChatBot
from chatterbot.trainers import ListTrainer
from spacy.cli import download
download('en_core_web_sm')
class ENGSM:
ISO_639_1 = 'en_core_web_sm'
chatbot = ChatBot('Botencio', tagger_langugage=ENGSM)
conversa = [
'Olá',
'Eai',
'Como você está?',
'Estou bem, e você?',
'Estou bem também',
'Então, alguma novidade ai?',
'Sim, aprendendo várias coisas',
'Legal, tipo o que?',
'Hoje to aprendendo sobre chatbots.',
'Interessante!',
'Bastante, e você aprendendo algo?',
'Várias coisas também, mas são coisas relativas',
'Bacana, importante é manter o aprendizado constante né!',
'Isso mesmo.',
'Então é isso',
'Tamo ai'
]
trainer = ListTrainer(chatbot)
trainer.train(conversa)
|
[
"spacy.cli.download",
"chatterbot.trainers.ListTrainer",
"chatterbot.ChatBot"
] |
[((108, 134), 'spacy.cli.download', 'download', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (116, 134), False, 'from spacy.cli import download\n'), ((194, 237), 'chatterbot.ChatBot', 'ChatBot', (['"""Botencio"""'], {'tagger_langugage': 'ENGSM'}), "('Botencio', tagger_langugage=ENGSM)\n", (201, 237), False, 'from chatterbot import ChatBot\n'), ((732, 752), 'chatterbot.trainers.ListTrainer', 'ListTrainer', (['chatbot'], {}), '(chatbot)\n', (743, 752), False, 'from chatterbot.trainers import ListTrainer\n')]
|
from inspect import iscoroutinefunction
from typing import Any, Callable, Optional
from ..protocol import Observable, Observer, Subscription, rx_observer_from
from .rx_create import rx_create
__all__ = ["rx_map"]
def rx_map(
observable: Observable, transform: Callable, expand_arg_parameters: Optional[bool] = False, expand_kwarg_parameters: Optional[bool] = False
) -> Observable:
"""Map operator.
Map operator modifies an Observable<A> into Observable<B> given a function with the type A->B.
For example, if we take the function x => 10 ∗ x and a list of 1,2,3. The result is 10,20,30, see figure 4.
Note that this function did not change the type of the Observable but did change the values.
Args:
observable (Observable): an observable instance
transform (Callable): transform function (sync or async)
expand_arg_parameters (Optional[bool]): if true each item will be expanded as args before call transform
(implique expand_kwarg_parameters = False).
expand_kwarg_parameters (Optional[bool]): if true each item will be expanded as kwargs before call transform.
Returns:
(Observable): observable instance
"""
_is_awaitable = iscoroutinefunction(transform)
async def _subscribe(an_observer: Observer) -> Subscription:
async def _on_next(item: Any):
nonlocal _is_awaitable
if expand_kwarg_parameters:
_next_item = await transform(**item) if _is_awaitable else transform(**item)
elif expand_arg_parameters:
_next_item = await transform(*item) if _is_awaitable else transform(*item)
else:
_next_item = await transform(item) if _is_awaitable else transform(item)
await an_observer.on_next(_next_item)
return await observable.subscribe(rx_observer_from(observer=an_observer, on_next=_on_next))
return rx_create(subscribe=_subscribe)
|
[
"inspect.iscoroutinefunction"
] |
[((1226, 1256), 'inspect.iscoroutinefunction', 'iscoroutinefunction', (['transform'], {}), '(transform)\n', (1245, 1256), False, 'from inspect import iscoroutinefunction\n')]
|
import os
from sendgrid import sendgrid, Email, Content, Mail, To
from django.conf import settings
class SendGridAPIError(Exception):
pass
class EmailClient(object):
def __init__(self, from_email=None, api_key=None):
self.from_email = from_email if from_email else settings.DEFAULT_FROM_EMAIL
if not api_key:
try:
# Can't just use .get() here because the key may be an empty string, which would be returned
self.api_key = os.environ['SENDGRID_API_KEY'] \
if os.environ.get('SENDGRID_API_KEY') \
else 'sendgrid-api-key-not-set'
except KeyError:
raise SendGridAPIError("SENDGRID_API_KEY must be passed as an argument or"
" set as an environment variable")
else:
self.api_key = api_key
self.sendgrid_client = sendgrid.SendGridAPIClient(self.api_key)
def send(self, to_email=None, subject=None, content=None, content_type=None):
from_email = Email(self.from_email)
to_email = To(to_email)
content = Content(content_type, content)
mail = Mail(from_email, to_email, subject, content)
return self.sendgrid_client.client.mail.send.post(request_body=mail.get())
if __name__ == '__main__':
# apikey_message = "Current API Key: " + os.environ.get('SENDGRID_API_KEY')
# print("-"*(len(apikey_message)+1))
# print("Current API Key: ", os.environ.get('SENDGRID_API_KEY'))
c = EmailClient()
print("From Email: ", c.from_email)
# print("-"*(len(apikey_message)+1))
result = c.send(to_email='<EMAIL>', subject='test test',
content='hi', content_type='text/plain')
print("Message Sent")
print("Status Code: ", result.status_code)
# print("-"*(len(apikey_message)+1))
|
[
"sendgrid.Mail",
"sendgrid.Email",
"sendgrid.Content",
"os.environ.get",
"sendgrid.To",
"sendgrid.sendgrid.SendGridAPIClient"
] |
[((916, 956), 'sendgrid.sendgrid.SendGridAPIClient', 'sendgrid.SendGridAPIClient', (['self.api_key'], {}), '(self.api_key)\n', (942, 956), False, 'from sendgrid import sendgrid, Email, Content, Mail, To\n'), ((1061, 1083), 'sendgrid.Email', 'Email', (['self.from_email'], {}), '(self.from_email)\n', (1066, 1083), False, 'from sendgrid import sendgrid, Email, Content, Mail, To\n'), ((1103, 1115), 'sendgrid.To', 'To', (['to_email'], {}), '(to_email)\n', (1105, 1115), False, 'from sendgrid import sendgrid, Email, Content, Mail, To\n'), ((1134, 1164), 'sendgrid.Content', 'Content', (['content_type', 'content'], {}), '(content_type, content)\n', (1141, 1164), False, 'from sendgrid import sendgrid, Email, Content, Mail, To\n'), ((1180, 1224), 'sendgrid.Mail', 'Mail', (['from_email', 'to_email', 'subject', 'content'], {}), '(from_email, to_email, subject, content)\n', (1184, 1224), False, 'from sendgrid import sendgrid, Email, Content, Mail, To\n'), ((553, 587), 'os.environ.get', 'os.environ.get', (['"""SENDGRID_API_KEY"""'], {}), "('SENDGRID_API_KEY')\n", (567, 587), False, 'import os\n')]
|
import skrf
import tkinter as tk
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import numpy as np
import CircuitFig
from PIL import ImageTk, Image, ImageDraw
import io
import MatchCal
l2z = lambda l: l[0] + 1j * l[1]
s4cmp = lambda sf: 'nH' if sf == 'l' else 'pF'
def ld4img2gui(label: tk.Label,
color: str, stage: int, sh_se: bool,
cmp_l: list, cmp_v: list, z_val: str = '50+0j',
valid: bool = True):
cr_cfg = CircuitFig.CircuitFig(color, stage, sh_se, cmp_l, cmp_v, z_val)
image = Image.open(io.BytesIO(cr_cfg.image_data)).resize((300, 180), Image.ANTIALIAS)
im = Image.new('RGBA', (300, 180), (255, 255, 255, 255))
draw = ImageDraw.Draw(im)
im.paste(image, (0, 0))
if not valid:
draw.line((0, 0, 300, 180), fill=(255, 0, 0, 255), width=5)
draw.line((0, 180, 300, 0), fill=(255, 0, 0, 255), width=5)
label.image = ImageTk.PhotoImage(im)
label.configure(image=label.image)
class TkGui:
def __init__(self, master):
self.master = master
self.top_frame = tk.Frame(self.master)
self.top_frame.pack(side=tk.LEFT)
self.right_frame = tk.Frame(self.master)
self.right_frame.pack(side=tk.LEFT, fill=tk.BOTH)
self.upper_sch_f = tk.Frame(self.right_frame)
self.upper_sch_f.grid(row=0, padx=(0, 5), pady=(5, 0), sticky="nsew")
self.lower_ety_f = tk.Frame(self.right_frame)
self.lower_ety_f.grid(row=1, padx=(0, 5), pady=(0, 5), sticky="nsew")
self.fig = Figure(figsize=(5, 6), dpi=100)
self.fig_cvs = FigureCanvasTkAgg(self.fig, master=self.top_frame)
self.ax: Figure = self.fig.gca()
self.fig_cvs.get_tk_widget().pack(side=tk.LEFT, padx=5, pady=5)
try:
with open('ring slot.s1p', 'r'):
pass
except IOError:
with open('ring slot.s1p', 'a+') as wf:
wf.write("""!Created with skrf (http://scikit-rf.org).
# GHz S RI R 50.0
!freq ReS11 ImS11
75.0 -0.503723180993 0.457844804761""")
self.my_slot = skrf.Network('ring slot.s1p')
self.to_match_z = [50, 0]
self.ser_match_z = [50, 0]
self.shu_match_z = [50, 0]
self.shu_ser_match_z_a = [50, 0]
self.shu_ser_match_z_b = [50, 0]
self.ser_shu_match_z_a = [50, 0]
self.ser_shu_match_z_b = [50, 0]
self.plt_z0 = 50 + 0j
self.plt_freq = 2.45e9
self.up2chart()
self.lb1 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb1_tit = tk.Label(
self.upper_sch_f, text='Shunt Matching', relief="raised").grid(
row=0, column=0, sticky="nsew")
self.lb1.grid(row=1, column=0)
self.lb2 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb2_tit = tk.Label(
self.upper_sch_f, text='Series Matching', relief="raised").grid(
row=0, column=1, sticky="nsew")
self.lb2.grid(row=1, column=1)
self.lb3 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb3_tit = tk.Label(
self.upper_sch_f, text='Shunt-Series Matching', relief="raised").grid(
row=2, column=0, sticky="nsew")
self.lb3.grid(row=3, column=0)
self.lb4 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb4_tit = tk.Label(
self.upper_sch_f, text='Shunt-Series Matching', relief="raised").grid(
row=2, column=1, sticky="nsew")
self.lb4.grid(row=3, column=1)
self.lb5 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb5_tit = tk.Label(
self.upper_sch_f, text='Series-Shunt Matching', relief="raised").grid(
row=4, column=0, sticky="nsew")
self.lb5.grid(row=5, column=0)
self.lb6 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb6_tit = tk.Label(
self.upper_sch_f, text='Series-Shunt Matching', relief="raised").grid(
row=4, column=1, sticky="nsew")
self.lb6.grid(row=5, column=1)
ld4img2gui(self.lb1, 'b', 1, False, ['c', 'l', 'c'], ['NC', 'SHORT', ''])
ld4img2gui(self.lb2, 'y', 1, True, ['c', 'l', 'c'], ['', 'SHORT', ''])
ld4img2gui(self.lb3, 'g', 2, False, ['c', 'l', 'c'], ['NC', 'SHORT', ''])
ld4img2gui(self.lb4, 'purple', 2, False, ['c', 'l', 'c'], ['NC', 'SHORT', ''])
ld4img2gui(self.lb5, 'orange', 2, True, ['c', 'l', 'c'], ['', 'SHORT', 'NC'])
ld4img2gui(self.lb6, 'brown', 2, True, ['c', 'l', 'c'], ['', 'SHORT', 'NC'])
###################################################################
self.to_match_r = tk.StringVar(value=str(self.to_match_z[0]))
self.to_match_i = tk.StringVar(value=str(self.to_match_z[1]))
self.ety_lb1 = tk.Label(self.lower_ety_f, text='To Match Complex Value')
self.ety_lb1.pack(side=tk.TOP)
self.ety_lb1b = tk.Label(self.lower_ety_f, text='Z = ')
self.ety_lb1b.pack(side=tk.LEFT)
self.ety1_r = tk.Entry(self.lower_ety_f, textvariable=self.to_match_r)
self.ety1_r.pack(side=tk.LEFT)
self.ety_lb1c = tk.Label(self.lower_ety_f, text=' + ')
self.ety_lb1c.pack(side=tk.LEFT)
self.ety1_i = tk.Entry(self.lower_ety_f, textvariable=self.to_match_i)
self.ety1_i.pack(side=tk.LEFT)
self.ety_lb1c = tk.Label(self.lower_ety_f, text='j')
self.ety_lb1c.pack(side=tk.LEFT)
self.enter = tk.Button(self.lower_ety_f, text="Start Auto Solver",
command=self.ld2chart)
self.enter.pack(side=tk.LEFT)
def ld2chart(self):
self.to_match_z = [float(self.ety1_r.get()), float(self.ety1_i.get())]
tmp_cal = MatchCal.MatchCal()
tmp_cal.tar_freq = self.plt_freq
to_mat = float(self.ety1_r.get()) + 1j * float(self.ety1_i.get())
tmp_cal.shu_0_sol(to_mat)
disp_str = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb1, 'b', 1, False, [tmp_cal.shu_t, 'l', 'c'],
[disp_str, 'SHORT', ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.ser_match_z = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.ser_0_sol(to_mat)
disp_str = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
ld4img2gui(self.lb2, 'y', 1, True, ['c', tmp_cal.ser_t, 'c'],
['', disp_str, ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.shu_match_z = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat, True)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb3, 'g', 2, False, [tmp_cal.shu_t, tmp_cal.ser_t, 'c'],
[disp_str2, disp_str1, ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.shu_ser_match_z_a = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat, True, True)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb4, 'purple', 2, False, [tmp_cal.shu_t, tmp_cal.ser_t, 'c'],
[disp_str2, disp_str1, ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.shu_ser_match_z_b = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb5, 'orange', 2, True, ['c', tmp_cal.ser_t, tmp_cal.shu_t],
['', disp_str1, disp_str2],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.ser_shu_match_z_a = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat, ans_sel=True)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb6, 'brown', 2, True, ['c', tmp_cal.ser_t, tmp_cal.shu_t],
['', disp_str1, disp_str2],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.ser_shu_match_z_b = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
self.up2chart()
def up2chart(self):
self.ax.clear()
self.fig2gui(np.array([[[l2z(self.to_match_z)]]]), 'To Match', 'r', 's')
self.fig2gui(np.array([[[l2z(self.ser_match_z)]]]), 'After Match', 'b', 'o')
self.fig2gui(np.array([[[l2z(self.shu_match_z)]]]), 'After Match', 'y', 'o')
self.fig2gui(np.array([[[l2z(self.shu_ser_match_z_a)]]]), 'After Match', 'g', 'o')
self.fig2gui(np.array([[[l2z(self.shu_ser_match_z_b)]]]), 'After Match', 'purple', 'o')
self.fig2gui(np.array([[[l2z(self.ser_shu_match_z_a)]]]), 'After Match', 'orange', 'o')
self.fig2gui(np.array([[[l2z(self.ser_shu_match_z_b)]]]), 'After Match', 'brown', 'o')
def fig2gui(self, plt_data: np.array,
label: str = '', color: str = 'r', mark: str = 's',
plt_sel: bool = False) -> None:
self.my_slot.frequency = self.plt_freq
self.my_slot.z0 = self.plt_z0
self.my_slot.z = plt_data
if plt_sel:
self.my_slot.plot_s_db(ax=self.ax)
else:
self.my_slot.plot_s_smith(ax=self.ax, draw_labels=True, show_legend=False,
label=label, color=color, chart_type='zy', marker=mark)
self.ax.legend(bbox_to_anchor=(0.5, 1.05), loc='lower center', ncol=3,
fancybox=True, shadow=True)
self.fig_cvs.draw()
|
[
"skrf.Network",
"PIL.Image.new",
"PIL.ImageTk.PhotoImage",
"io.BytesIO",
"tkinter.Button",
"CircuitFig.CircuitFig",
"tkinter.Entry",
"MatchCal.MatchCal",
"matplotlib.figure.Figure",
"tkinter.Frame",
"PIL.ImageDraw.Draw",
"tkinter.Label",
"matplotlib.backends.backend_tkagg.FigureCanvasTkAgg"
] |
[((519, 582), 'CircuitFig.CircuitFig', 'CircuitFig.CircuitFig', (['color', 'stage', 'sh_se', 'cmp_l', 'cmp_v', 'z_val'], {}), '(color, stage, sh_se, cmp_l, cmp_v, z_val)\n', (540, 582), False, 'import CircuitFig\n'), ((683, 734), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', '(300, 180)', '(255, 255, 255, 255)'], {}), "('RGBA', (300, 180), (255, 255, 255, 255))\n", (692, 734), False, 'from PIL import ImageTk, Image, ImageDraw\n'), ((746, 764), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im'], {}), '(im)\n', (760, 764), False, 'from PIL import ImageTk, Image, ImageDraw\n'), ((965, 987), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['im'], {}), '(im)\n', (983, 987), False, 'from PIL import ImageTk, Image, ImageDraw\n'), ((1130, 1151), 'tkinter.Frame', 'tk.Frame', (['self.master'], {}), '(self.master)\n', (1138, 1151), True, 'import tkinter as tk\n'), ((1221, 1242), 'tkinter.Frame', 'tk.Frame', (['self.master'], {}), '(self.master)\n', (1229, 1242), True, 'import tkinter as tk\n'), ((1329, 1355), 'tkinter.Frame', 'tk.Frame', (['self.right_frame'], {}), '(self.right_frame)\n', (1337, 1355), True, 'import tkinter as tk\n'), ((1461, 1487), 'tkinter.Frame', 'tk.Frame', (['self.right_frame'], {}), '(self.right_frame)\n', (1469, 1487), True, 'import tkinter as tk\n'), ((1586, 1617), 'matplotlib.figure.Figure', 'Figure', ([], {'figsize': '(5, 6)', 'dpi': '(100)'}), '(figsize=(5, 6), dpi=100)\n', (1592, 1617), False, 'from matplotlib.figure import Figure\n'), ((1641, 1691), 'matplotlib.backends.backend_tkagg.FigureCanvasTkAgg', 'FigureCanvasTkAgg', (['self.fig'], {'master': 'self.top_frame'}), '(self.fig, master=self.top_frame)\n', (1658, 1691), False, 'from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg\n'), ((2156, 2185), 'skrf.Network', 'skrf.Network', (['"""ring slot.s1p"""'], {}), "('ring slot.s1p')\n", (2168, 2185), False, 'import skrf\n'), ((2560, 2602), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'relief': '"""ridge"""'}), "(self.upper_sch_f, relief='ridge')\n", (2568, 2602), True, 'import tkinter as tk\n'), ((2815, 2857), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'relief': '"""ridge"""'}), "(self.upper_sch_f, relief='ridge')\n", (2823, 2857), True, 'import tkinter as tk\n'), ((3071, 3113), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'relief': '"""ridge"""'}), "(self.upper_sch_f, relief='ridge')\n", (3079, 3113), True, 'import tkinter as tk\n'), ((3333, 3375), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'relief': '"""ridge"""'}), "(self.upper_sch_f, relief='ridge')\n", (3341, 3375), True, 'import tkinter as tk\n'), ((3595, 3637), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'relief': '"""ridge"""'}), "(self.upper_sch_f, relief='ridge')\n", (3603, 3637), True, 'import tkinter as tk\n'), ((3857, 3899), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'relief': '"""ridge"""'}), "(self.upper_sch_f, relief='ridge')\n", (3865, 3899), True, 'import tkinter as tk\n'), ((4842, 4899), 'tkinter.Label', 'tk.Label', (['self.lower_ety_f'], {'text': '"""To Match Complex Value"""'}), "(self.lower_ety_f, text='To Match Complex Value')\n", (4850, 4899), True, 'import tkinter as tk\n'), ((4963, 5002), 'tkinter.Label', 'tk.Label', (['self.lower_ety_f'], {'text': '"""Z = """'}), "(self.lower_ety_f, text='Z = ')\n", (4971, 5002), True, 'import tkinter as tk\n'), ((5066, 5122), 'tkinter.Entry', 'tk.Entry', (['self.lower_ety_f'], {'textvariable': 'self.to_match_r'}), '(self.lower_ety_f, textvariable=self.to_match_r)\n', (5074, 5122), True, 'import tkinter as tk\n'), ((5186, 5224), 'tkinter.Label', 'tk.Label', (['self.lower_ety_f'], {'text': '""" + """'}), "(self.lower_ety_f, text=' + ')\n", (5194, 5224), True, 'import tkinter as tk\n'), ((5288, 5344), 'tkinter.Entry', 'tk.Entry', (['self.lower_ety_f'], {'textvariable': 'self.to_match_i'}), '(self.lower_ety_f, textvariable=self.to_match_i)\n', (5296, 5344), True, 'import tkinter as tk\n'), ((5408, 5444), 'tkinter.Label', 'tk.Label', (['self.lower_ety_f'], {'text': '"""j"""'}), "(self.lower_ety_f, text='j')\n", (5416, 5444), True, 'import tkinter as tk\n'), ((5508, 5584), 'tkinter.Button', 'tk.Button', (['self.lower_ety_f'], {'text': '"""Start Auto Solver"""', 'command': 'self.ld2chart'}), "(self.lower_ety_f, text='Start Auto Solver', command=self.ld2chart)\n", (5517, 5584), True, 'import tkinter as tk\n'), ((5777, 5796), 'MatchCal.MatchCal', 'MatchCal.MatchCal', ([], {}), '()\n', (5794, 5796), False, 'import MatchCal\n'), ((606, 635), 'io.BytesIO', 'io.BytesIO', (['cr_cfg.image_data'], {}), '(cr_cfg.image_data)\n', (616, 635), False, 'import io\n'), ((2626, 2692), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'text': '"""Shunt Matching"""', 'relief': '"""raised"""'}), "(self.upper_sch_f, text='Shunt Matching', relief='raised')\n", (2634, 2692), True, 'import tkinter as tk\n'), ((2881, 2948), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'text': '"""Series Matching"""', 'relief': '"""raised"""'}), "(self.upper_sch_f, text='Series Matching', relief='raised')\n", (2889, 2948), True, 'import tkinter as tk\n'), ((3137, 3210), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'text': '"""Shunt-Series Matching"""', 'relief': '"""raised"""'}), "(self.upper_sch_f, text='Shunt-Series Matching', relief='raised')\n", (3145, 3210), True, 'import tkinter as tk\n'), ((3399, 3472), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'text': '"""Shunt-Series Matching"""', 'relief': '"""raised"""'}), "(self.upper_sch_f, text='Shunt-Series Matching', relief='raised')\n", (3407, 3472), True, 'import tkinter as tk\n'), ((3661, 3734), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'text': '"""Series-Shunt Matching"""', 'relief': '"""raised"""'}), "(self.upper_sch_f, text='Series-Shunt Matching', relief='raised')\n", (3669, 3734), True, 'import tkinter as tk\n'), ((3923, 3996), 'tkinter.Label', 'tk.Label', (['self.upper_sch_f'], {'text': '"""Series-Shunt Matching"""', 'relief': '"""raised"""'}), "(self.upper_sch_f, text='Series-Shunt Matching', relief='raised')\n", (3931, 3996), True, 'import tkinter as tk\n')]
|
from multiprocessing import Process, Queue, Pipe
from baselines import deepq
import gym
from deepq.asyn_sec.actor_interact_env import actor_inter
from deepq.asyn_sec.simple_multi_agent import learn
# from deepq.asyn_trainer_actor.new_models import mlp
from deepq.models import mlp
# from p2os_test.src.seventh_edition_gpw import set_gpw_num
from p2os_test.src.sixth_edition_gpw import set_gpw_num
from baselines.common import set_global_seeds
def trainer(in_actor_deque, in_action_pipes):
# 可以为learn过程设置终止条件
env = gym.make("GpwTrainer-v0")
# env = gym.make("MountainCar-v0")
# model = deepq.models.mlp([64])
model = mlp([256, 256, 128], layer_norm=True)
act = learn(
env,
actor_deque=in_actor_deque,
action_pipes=in_action_pipes,
q_func=model,
lr=1e-4, # 1e-3
max_timesteps=5000000,
buffer_size=1000000, # 300000
exploration_fraction=0.30,
exploration_final_eps=0.05, # 0.02
train_freq=1,
batch_size=32,
print_freq=30000, # 这里的print_freq与step相关,而actor处与episode相关
# checkpoint_freq=10000,
# checkpoint_path=None,
learning_starts=1000,
gamma=1.0,
target_network_update_freq=500,
prioritized_replay=True,
prioritized_replay_alpha=0.6,
# prioritized_replay_beta0=0.4,
# param_noise=True,
)
print("All end")
print("Saving model")
act.save("asyn_rob_model.pkl")
env.close()
def actor(in_ac_num, in_actor_deque, in_action_pipes):
set_global_seeds(0)
set_gpw_num(in_ac_num) # 设置环境号
acenv = gym.make("SixthRobGpw-v0")
# acenv = gym.make("MountainCar-v0")
actor_inter(env=acenv, ac_num=in_ac_num, actor_deque=in_actor_deque,
action_pipes=in_action_pipes, print_freq=20)
acenv.close()
def main():
pipes = [Pipe(duplex=False) for x in range(0, 4)]
pipes_conn1 = [pipes[i][1] for i in range(0, 4)]
actor_inf_queue = Queue(maxsize=5)
train_process = Process(target=trainer, args=(actor_inf_queue, pipes_conn1))
actor_process_01 = Process(target=actor, args=(1, actor_inf_queue, pipes[0][0]))
actor_process_02 = Process(target=actor, args=(2, actor_inf_queue, pipes[1][0]))
actor_process_03 = Process(target=actor, args=(3, actor_inf_queue, pipes[2][0]))
actor_process_04 = Process(target=actor, args=(4, actor_inf_queue, pipes[3][0]))
# actor_process_05 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 5))
# actor_process_06 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 6))
# actor_process_07 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 7))
# actor_process_08 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 8))
train_process.start()
actor_process_01.start()
actor_process_02.start()
actor_process_03.start()
actor_process_04.start()
# actor_process_05.start()
# actor_process_06.start()
# actor_process_07.start()
# actor_process_08.start()
train_process.join()
actor_process_01.join()
actor_process_02.join()
actor_process_03.join()
actor_process_04.join()
# actor_process_05.join()
# actor_process_06.join()
# actor_process_07.join()
# actor_process_08.join()
if __name__ == '__main__':
main()
|
[
"deepq.models.mlp",
"gym.make",
"deepq.asyn_sec.simple_multi_agent.learn",
"baselines.common.set_global_seeds",
"p2os_test.src.sixth_edition_gpw.set_gpw_num",
"deepq.asyn_sec.actor_interact_env.actor_inter",
"multiprocessing.Pipe",
"multiprocessing.Queue",
"multiprocessing.Process"
] |
[((525, 550), 'gym.make', 'gym.make', (['"""GpwTrainer-v0"""'], {}), "('GpwTrainer-v0')\n", (533, 550), False, 'import gym\n'), ((639, 676), 'deepq.models.mlp', 'mlp', (['[256, 256, 128]'], {'layer_norm': '(True)'}), '([256, 256, 128], layer_norm=True)\n', (642, 676), False, 'from deepq.models import mlp\n'), ((687, 1066), 'deepq.asyn_sec.simple_multi_agent.learn', 'learn', (['env'], {'actor_deque': 'in_actor_deque', 'action_pipes': 'in_action_pipes', 'q_func': 'model', 'lr': '(0.0001)', 'max_timesteps': '(5000000)', 'buffer_size': '(1000000)', 'exploration_fraction': '(0.3)', 'exploration_final_eps': '(0.05)', 'train_freq': '(1)', 'batch_size': '(32)', 'print_freq': '(30000)', 'learning_starts': '(1000)', 'gamma': '(1.0)', 'target_network_update_freq': '(500)', 'prioritized_replay': '(True)', 'prioritized_replay_alpha': '(0.6)'}), '(env, actor_deque=in_actor_deque, action_pipes=in_action_pipes, q_func\n =model, lr=0.0001, max_timesteps=5000000, buffer_size=1000000,\n exploration_fraction=0.3, exploration_final_eps=0.05, train_freq=1,\n batch_size=32, print_freq=30000, learning_starts=1000, gamma=1.0,\n target_network_update_freq=500, prioritized_replay=True,\n prioritized_replay_alpha=0.6)\n', (692, 1066), False, 'from deepq.asyn_sec.simple_multi_agent import learn\n'), ((1549, 1568), 'baselines.common.set_global_seeds', 'set_global_seeds', (['(0)'], {}), '(0)\n', (1565, 1568), False, 'from baselines.common import set_global_seeds\n'), ((1573, 1595), 'p2os_test.src.sixth_edition_gpw.set_gpw_num', 'set_gpw_num', (['in_ac_num'], {}), '(in_ac_num)\n', (1584, 1595), False, 'from p2os_test.src.sixth_edition_gpw import set_gpw_num\n'), ((1617, 1643), 'gym.make', 'gym.make', (['"""SixthRobGpw-v0"""'], {}), "('SixthRobGpw-v0')\n", (1625, 1643), False, 'import gym\n'), ((1689, 1806), 'deepq.asyn_sec.actor_interact_env.actor_inter', 'actor_inter', ([], {'env': 'acenv', 'ac_num': 'in_ac_num', 'actor_deque': 'in_actor_deque', 'action_pipes': 'in_action_pipes', 'print_freq': '(20)'}), '(env=acenv, ac_num=in_ac_num, actor_deque=in_actor_deque,\n action_pipes=in_action_pipes, print_freq=20)\n', (1700, 1806), False, 'from deepq.asyn_sec.actor_interact_env import actor_inter\n'), ((1980, 1996), 'multiprocessing.Queue', 'Queue', ([], {'maxsize': '(5)'}), '(maxsize=5)\n', (1985, 1996), False, 'from multiprocessing import Process, Queue, Pipe\n'), ((2017, 2077), 'multiprocessing.Process', 'Process', ([], {'target': 'trainer', 'args': '(actor_inf_queue, pipes_conn1)'}), '(target=trainer, args=(actor_inf_queue, pipes_conn1))\n', (2024, 2077), False, 'from multiprocessing import Process, Queue, Pipe\n'), ((2101, 2162), 'multiprocessing.Process', 'Process', ([], {'target': 'actor', 'args': '(1, actor_inf_queue, pipes[0][0])'}), '(target=actor, args=(1, actor_inf_queue, pipes[0][0]))\n', (2108, 2162), False, 'from multiprocessing import Process, Queue, Pipe\n'), ((2186, 2247), 'multiprocessing.Process', 'Process', ([], {'target': 'actor', 'args': '(2, actor_inf_queue, pipes[1][0])'}), '(target=actor, args=(2, actor_inf_queue, pipes[1][0]))\n', (2193, 2247), False, 'from multiprocessing import Process, Queue, Pipe\n'), ((2271, 2332), 'multiprocessing.Process', 'Process', ([], {'target': 'actor', 'args': '(3, actor_inf_queue, pipes[2][0])'}), '(target=actor, args=(3, actor_inf_queue, pipes[2][0]))\n', (2278, 2332), False, 'from multiprocessing import Process, Queue, Pipe\n'), ((2356, 2417), 'multiprocessing.Process', 'Process', ([], {'target': 'actor', 'args': '(4, actor_inf_queue, pipes[3][0])'}), '(target=actor, args=(4, actor_inf_queue, pipes[3][0]))\n', (2363, 2417), False, 'from multiprocessing import Process, Queue, Pipe\n'), ((1864, 1882), 'multiprocessing.Pipe', 'Pipe', ([], {'duplex': '(False)'}), '(duplex=False)\n', (1868, 1882), False, 'from multiprocessing import Process, Queue, Pipe\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-10-07 12:48
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0028_merge'),
]
operations = [
migrations.CreateModel(
name='SimpleGalleryIndex',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro_title', models.CharField(blank=True, help_text='Optional H1 title for the gallery page.', max_length=250)),
('intro_text', wagtail.core.fields.RichTextField(blank=True, help_text='Optional text to go with the intro text.')),
('images_per_page', models.IntegerField(default=8, help_text='How many images there should be on one page.')),
('use_lightbox', models.BooleanField(default=True, help_text='Use lightbox to view larger images when clicking the thumbnail.')),
('collection', models.ForeignKey(help_text='Show images in this collection in the gallery view.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Collection')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
|
[
"django.db.models.OneToOneField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.IntegerField"
] |
[((483, 653), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""wagtailcore.Page"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'wagtailcore.Page')\n", (503, 653), False, 'from django.db import migrations, models\n'), ((678, 780), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""Optional H1 title for the gallery page."""', 'max_length': '(250)'}), "(blank=True, help_text=\n 'Optional H1 title for the gallery page.', max_length=250)\n", (694, 780), False, 'from django.db import migrations, models\n'), ((947, 1040), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(8)', 'help_text': '"""How many images there should be on one page."""'}), "(default=8, help_text=\n 'How many images there should be on one page.')\n", (966, 1040), False, 'from django.db import migrations, models\n'), ((1071, 1186), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': '"""Use lightbox to view larger images when clicking the thumbnail."""'}), "(default=True, help_text=\n 'Use lightbox to view larger images when clicking the thumbnail.')\n", (1090, 1186), False, 'from django.db import migrations, models\n'), ((1215, 1415), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'help_text': '"""Show images in this collection in the gallery view."""', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""+"""', 'to': '"""wagtailcore.Collection"""'}), "(help_text=\n 'Show images in this collection in the gallery view.', null=True,\n on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=\n 'wagtailcore.Collection')\n", (1232, 1415), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 by <NAME>. All rights reserved. This file is part of
# the Robot OS project and is released under the "Apache Licence, Version 2.0".
# Please see the LICENSE file included as part of this package.
#
# author: <NAME>
# created: 2020-01-18
# modified: 2020-03-26
#
import time, threading
from colorama import init, Fore, Style
init()
from lib.abstract_task import AbstractTask
from lib.event import Event
from lib.message import Message
try:
from gpiozero import Button as GpioButton
print('import :' + Fore.BLACK + ' INFO : successfully imported gpiozero Button.' + Style.RESET_ALL)
except ImportError:
print('import :' + Fore.RED + ' ERROR : failed to import gpiozero Button, using mock...' + Style.RESET_ALL)
from .mock_gpiozero import Button as GpioButton
ms = 50 / 1000 # 50ms loop delay
# ..............................................................................
class Button(AbstractTask):
'''
Button Task: reacts to pressing the red button.
Usage:
TOGGLE = False
button = Button(TOGGLE, mutex)
button.start()
value = button.get()
'''
button_priority = 6
def __init__(self, config, queue, mutex):
'''
Parameters:
config: the YAML-based application configuration
queue: the message queue to receive messages from this task
mutex: vs godzilla
'''
super().__init__("button", queue, None, Button.button_priority, mutex)
if config is None:
raise ValueError('no configuration provided.')
self._queue = queue
_config = config['ros'].get('button')
_pin = _config.get('pin')
self._toggle = _config.get('toggle') # if true, the value toggles when the button is pushed rather than acting as a momentary button
self._log.info('initialising button on pin {:d}; toggle={}'.format(_pin, self._toggle))
self._queue = queue
self._button = GpioButton(_pin)
self._value = False
self._log.debug('ready.')
# ......................................................
def run(self):
super(AbstractTask, self).run()
self.enable()
if self._toggle:
self._button.when_released = self.toggle_state
else:
self.polling = threading.Thread(target=Button.poll, args=[self,])
self.polling.start()
# ......................................................
def toggle_state(self):
self._value = not self._value
self._log.info("button toggle: {}".format(self._value))
_message = Message(Event.BUTTON)
_message.set_value(self._value)
if self._queue:
self._queue.add(_message)
# ......................................................
def poll(self):
while self.is_enabled():
if not self._toggle:
self._value = self._button.is_pressed
self._log.debug('button poll.')
time.sleep(ms)
# ..........................................................................
def enable(self):
super().enable()
# ..........................................................................
def disable(self):
super().disable()
# ......................................................
def get(self):
return self._value
# ......................................................
def close(self):
super().close()
|
[
"colorama.init",
"threading.Thread",
"gpiozero.Button",
"lib.message.Message",
"time.sleep"
] |
[((395, 401), 'colorama.init', 'init', ([], {}), '()\n', (399, 401), False, 'from colorama import init, Fore, Style\n'), ((2109, 2125), 'gpiozero.Button', 'GpioButton', (['_pin'], {}), '(_pin)\n', (2119, 2125), True, 'from gpiozero import Button as GpioButton\n'), ((2753, 2774), 'lib.message.Message', 'Message', (['Event.BUTTON'], {}), '(Event.BUTTON)\n', (2760, 2774), False, 'from lib.message import Message\n'), ((2457, 2506), 'threading.Thread', 'threading.Thread', ([], {'target': 'Button.poll', 'args': '[self]'}), '(target=Button.poll, args=[self])\n', (2473, 2506), False, 'import time, threading\n'), ((3140, 3154), 'time.sleep', 'time.sleep', (['ms'], {}), '(ms)\n', (3150, 3154), False, 'import time, threading\n')]
|
import math
import numpy as np
## Real Data:
# %% Kinect Color Camera
color_cam_matrix = np.array([ 1.0526303338534365e+03, 0., 9.3528526085572480e+02, 0., 1.0534191001014469e+03, 5.2225718970556716e+02, 0., 0., 1. ]).reshape(3,3)
color_distortion_coeffs = np.array([ 4.5467150011699140e-02, -7.4470107942918126e-02, -6.1697129558609537e-03, -2.5667037404509380e-03, -1.4503959457133547e-02 ]).reshape(1,5)
color_rotation = np.eye(3)
color_projection = np.array([ 1.0526303338534365e+03, 0., 9.3528526085572480e+02, 0., 0., 1.0534191001014469e+03, 5.2225718970556716e+02, 0., 0., 0., 1., 0., 0., 0., 0., 1. ]).reshape(4,4)
# %% Kinect IR Camera
ir_cam_matrix = np.array([ 3.5706872738709285e+02, 0., 2.5037220752105404e+02, 0., 3.5700920458183873e+02, 2.0803230739018434e+02, 0., 0., 1. ]).reshape(3,3)
ir_distortion_coeffs = np.array([ 5.5998048975189132e-02, -2.5691440815038830e-01, -5.3889184410447575e-03, -1.6922667364749613e-03, 1.9674519800098919e-01 ]).reshape(1,5)
ir_rotation = np.eye(3)
ir_projection = np.array([ 3.5706872738709285e+02, 0., 2.5037220752105404e+02, 0., 0., 3.5700920458183873e+02, 2.0803230739018434e+02, 0., 0., 0., 1., 0., 0., 0., 0., 1. ]).reshape(4,4)
depthShift = -2.7989551644219979e+01
# %% Pose Calibration between depth and color
rotation = np.array([ 9.9997222955499243e-01, -7.4399336788120839e-03, 4.3301925190808763e-04, 7.4347723554060875e-03, 9.9991294780487039e-01, 1.0900503300210780e-02, -5.1408057825089366e-04, -1.0896981188819882e-02, 9.9994049399058227e-01 ]).reshape(3,3)
translation = np.array([ -5.2291985456630448e-02, -1.9227292627499695e-04, 1.7173350151375650e-03 ]).reshape(3,1)
essential = np.array([ -1.2669151118394222e-05, -1.7150903228939863e-03, -2.1098130088050980e-04, 1.6904050298585356e-03, -5.8260164046387006e-04, 5.2289617408374921e-02, -1.9651142111198186e-04, -5.2288863822328481e-02, -5.6992570216587654e-04 ]).reshape(3,3)
fundamental = np.array([ -8.8142664830290771e-09, -1.1934330447023842e-06, 1.9806702972926870e-04, 1.1751792885051283e-06, -4.0509553642475600e-07, 1.2770218257581496e-02, -7.4941574482561516e-04, -3.6972004067303506e-02, 1. ]).reshape(3,3)
# %% Color Params
color_height = 1080
color_width = 1920
color_fov_x = 360 / math.pi * math.atan2(color_width, 2 * color_cam_matrix[0,0])
color_fov_y = 360 / math.pi * math.atan2(color_height, 2 * color_cam_matrix[1,1] )
color_fx = color_cam_matrix[0,0]
color_fy = color_cam_matrix[1,1]
color_cx = color_cam_matrix[0,2]
color_cy = color_cam_matrix[1,2]
color_fx
color_fy
color_fov_x
color_fov_y
# %% IR Field of View, Width, Height computation
ir_width = 512
ir_height = 424
ir_aspect = ir_width / ir_height
depth_fov_x = 360 / math.pi * math.atan2(ir_width, 2 * color_cam_matrix[0,0])
depth_fov_y = 360 / math.pi * math.atan2(ir_height, 2 * color_cam_matrix[1,1])
ir_fx = ir_cam_matrix[0,0]
ir_fy = ir_cam_matrix[1,1]
ir_cx = ir_cam_matrix[0,2]
ir_cy = ir_cam_matrix[1,2]
## transform into camera frame. useful for reconstruction!
T_magic_to_cam = np.array([ [0. ,-1. , 0. , 0. ],
[0. , 0. ,-1. , 0. ],
[1. , 0. , 0. , 0. ],
[0. , 0. , 0. , 1.0]])
## Simulation Camera Params
# %%
znear = 0.1
zfar = 12
sim_width = 192
sim_height = 108
# sim_width = 720 * 4
# sim_height = 405 * 4
old_sim_fovy = 60 * math.pi / 180
old_sim_fovx = 2 * math.atan(math.tan(old_sim_fovy / 2) * sim_width / sim_height)
old_sim_fovy * 180 / math.pi
old_sim_fovx * 180 / math.pi
old_sim_focal_y = (sim_height / 2) / math.tan(old_sim_fovy / 2)
old_sim_focal_x = (sim_width / 2 ) / math.tan(old_sim_fovx / 2)
old_sim_proj_matrix = np.array([[old_sim_focal_x, 0, sim_width / 2],
[0, old_sim_focal_y, sim_height / 2],
[0, 0, 1]])
# new sim cam Params, using color fov_y
sim_focal_y = (sim_height / 2) / math.tan(color_fov_y * 3.14 / 180.0 / 2)
sim_focal_x = sim_focal_y
sim_proj_matrix = np.array([[sim_focal_x, 0, sim_width / 2],
[0, sim_focal_y, sim_height / 2],
[0, 0, 1]])
# checking that these are reasonable
color_fov_x = 360 / math.pi * math.atan2(color_width, 2 * color_cam_matrix[0,0])
color_fov_y = 360 / math.pi * math.atan2(color_height, 2 * color_cam_matrix[1,1] )
color_fov_x
color_fov_y
test_sim_fov_y = 360 / math.pi * math.atan2(sim_height, 2 * sim_proj_matrix[1,1] )
test_sim_fov_x = 360 / math.pi * math.atan2(sim_width, 2 * sim_proj_matrix[0,0] )
# fake real sim cam Params (ie, size is the full 1920 x 1080)
fake_focal_y = (color_height / 2) / math.tan(color_fov_y * 3.14 / 180.0 / 2)
fake_focal_x = (color_width / 2) / math.tan(color_fov_x * 3.14 / 180.0 / 2)
fake_proj_matrix = np.array([[fake_focal_x, 0, color_width / 2],
[0, fake_focal_y, color_height / 2],
[0, 0, 1]])
if __name__ == '__main__':
np.set_printoptions(suppress=True)
print(' \n simulated cam matrix: \n\t', str(np.round(fake_proj_matrix,0)).replace('\n', '\n\t'))
print(' \n real cam matrix: \n\t', str(np.round(color_cam_matrix,0)).replace('\n', '\n\t'))
print(' \n ')
print(color_fov_y)
|
[
"numpy.set_printoptions",
"math.atan2",
"math.tan",
"numpy.array",
"numpy.eye",
"numpy.round"
] |
[((425, 434), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (431, 434), True, 'import numpy as np\n'), ((991, 1000), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (997, 1000), True, 'import numpy as np\n'), ((3004, 3109), 'numpy.array', 'np.array', (['[[0.0, -1.0, 0.0, 0.0], [0.0, 0.0, -1.0, 0.0], [1.0, 0.0, 0.0, 0.0], [0.0, \n 0.0, 0.0, 1.0]]'], {}), '([[0.0, -1.0, 0.0, 0.0], [0.0, 0.0, -1.0, 0.0], [1.0, 0.0, 0.0, 0.0\n ], [0.0, 0.0, 0.0, 1.0]])\n', (3012, 3109), True, 'import numpy as np\n'), ((3649, 3750), 'numpy.array', 'np.array', (['[[old_sim_focal_x, 0, sim_width / 2], [0, old_sim_focal_y, sim_height / 2],\n [0, 0, 1]]'], {}), '([[old_sim_focal_x, 0, sim_width / 2], [0, old_sim_focal_y, \n sim_height / 2], [0, 0, 1]])\n', (3657, 3750), True, 'import numpy as np\n'), ((3970, 4062), 'numpy.array', 'np.array', (['[[sim_focal_x, 0, sim_width / 2], [0, sim_focal_y, sim_height / 2], [0, 0, 1]]'], {}), '([[sim_focal_x, 0, sim_width / 2], [0, sim_focal_y, sim_height / 2],\n [0, 0, 1]])\n', (3978, 4062), True, 'import numpy as np\n'), ((4753, 4852), 'numpy.array', 'np.array', (['[[fake_focal_x, 0, color_width / 2], [0, fake_focal_y, color_height / 2], [\n 0, 0, 1]]'], {}), '([[fake_focal_x, 0, color_width / 2], [0, fake_focal_y, \n color_height / 2], [0, 0, 1]])\n', (4761, 4852), True, 'import numpy as np\n'), ((2239, 2290), 'math.atan2', 'math.atan2', (['color_width', '(2 * color_cam_matrix[0, 0])'], {}), '(color_width, 2 * color_cam_matrix[0, 0])\n', (2249, 2290), False, 'import math\n'), ((2320, 2372), 'math.atan2', 'math.atan2', (['color_height', '(2 * color_cam_matrix[1, 1])'], {}), '(color_height, 2 * color_cam_matrix[1, 1])\n', (2330, 2372), False, 'import math\n'), ((2692, 2740), 'math.atan2', 'math.atan2', (['ir_width', '(2 * color_cam_matrix[0, 0])'], {}), '(ir_width, 2 * color_cam_matrix[0, 0])\n', (2702, 2740), False, 'import math\n'), ((2770, 2819), 'math.atan2', 'math.atan2', (['ir_height', '(2 * color_cam_matrix[1, 1])'], {}), '(ir_height, 2 * color_cam_matrix[1, 1])\n', (2780, 2819), False, 'import math\n'), ((3536, 3562), 'math.tan', 'math.tan', (['(old_sim_fovy / 2)'], {}), '(old_sim_fovy / 2)\n', (3544, 3562), False, 'import math\n'), ((3600, 3626), 'math.tan', 'math.tan', (['(old_sim_fovx / 2)'], {}), '(old_sim_fovx / 2)\n', (3608, 3626), False, 'import math\n'), ((3885, 3925), 'math.tan', 'math.tan', (['(color_fov_y * 3.14 / 180.0 / 2)'], {}), '(color_fov_y * 3.14 / 180.0 / 2)\n', (3893, 3925), False, 'import math\n'), ((4191, 4242), 'math.atan2', 'math.atan2', (['color_width', '(2 * color_cam_matrix[0, 0])'], {}), '(color_width, 2 * color_cam_matrix[0, 0])\n', (4201, 4242), False, 'import math\n'), ((4272, 4324), 'math.atan2', 'math.atan2', (['color_height', '(2 * color_cam_matrix[1, 1])'], {}), '(color_height, 2 * color_cam_matrix[1, 1])\n', (4282, 4324), False, 'import math\n'), ((4385, 4434), 'math.atan2', 'math.atan2', (['sim_height', '(2 * sim_proj_matrix[1, 1])'], {}), '(sim_height, 2 * sim_proj_matrix[1, 1])\n', (4395, 4434), False, 'import math\n'), ((4469, 4517), 'math.atan2', 'math.atan2', (['sim_width', '(2 * sim_proj_matrix[0, 0])'], {}), '(sim_width, 2 * sim_proj_matrix[0, 0])\n', (4479, 4517), False, 'import math\n'), ((4617, 4657), 'math.tan', 'math.tan', (['(color_fov_y * 3.14 / 180.0 / 2)'], {}), '(color_fov_y * 3.14 / 180.0 / 2)\n', (4625, 4657), False, 'import math\n'), ((4693, 4733), 'math.tan', 'math.tan', (['(color_fov_x * 3.14 / 180.0 / 2)'], {}), '(color_fov_x * 3.14 / 180.0 / 2)\n', (4701, 4733), False, 'import math\n'), ((4944, 4978), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'suppress': '(True)'}), '(suppress=True)\n', (4963, 4978), True, 'import numpy as np\n'), ((90, 207), 'numpy.array', 'np.array', (['[1052.6303338534365, 0.0, 935.2852608557248, 0.0, 1053.419100101447, \n 522.2571897055672, 0.0, 0.0, 1.0]'], {}), '([1052.6303338534365, 0.0, 935.2852608557248, 0.0, \n 1053.419100101447, 522.2571897055672, 0.0, 0.0, 1.0])\n', (98, 207), True, 'import numpy as np\n'), ((258, 384), 'numpy.array', 'np.array', (['[0.04546715001169914, -0.07447010794291813, -0.006169712955860954, -\n 0.002566703740450938, -0.014503959457133547]'], {}), '([0.04546715001169914, -0.07447010794291813, -0.006169712955860954,\n -0.002566703740450938, -0.014503959457133547])\n', (266, 384), True, 'import numpy as np\n'), ((454, 610), 'numpy.array', 'np.array', (['[1052.6303338534365, 0.0, 935.2852608557248, 0.0, 0.0, 1053.419100101447, \n 522.2571897055672, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]'], {}), '([1052.6303338534365, 0.0, 935.2852608557248, 0.0, 0.0, \n 1053.419100101447, 522.2571897055672, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0])\n', (462, 610), True, 'import numpy as np\n'), ((663, 783), 'numpy.array', 'np.array', (['[357.06872738709285, 0.0, 250.37220752105404, 0.0, 357.00920458183873, \n 208.03230739018434, 0.0, 0.0, 1.0]'], {}), '([357.06872738709285, 0.0, 250.37220752105404, 0.0, \n 357.00920458183873, 208.03230739018434, 0.0, 0.0, 1.0])\n', (671, 783), True, 'import numpy as np\n'), ((828, 952), 'numpy.array', 'np.array', (['[0.05599804897518913, -0.2569144081503883, -0.0053889184410447575, -\n 0.0016922667364749613, 0.1967451980009892]'], {}), '([0.05599804897518913, -0.2569144081503883, -0.0053889184410447575,\n -0.0016922667364749613, 0.1967451980009892])\n', (836, 952), True, 'import numpy as np\n'), ((1017, 1177), 'numpy.array', 'np.array', (['[357.06872738709285, 0.0, 250.37220752105404, 0.0, 0.0, 357.00920458183873,\n 208.03230739018434, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]'], {}), '([357.06872738709285, 0.0, 250.37220752105404, 0.0, 0.0, \n 357.00920458183873, 208.03230739018434, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, \n 0.0, 0.0, 1.0])\n', (1025, 1177), True, 'import numpy as np\n'), ((1282, 1499), 'numpy.array', 'np.array', (['[0.9999722295549924, -0.007439933678812084, 0.00043301925190808763, \n 0.0074347723554060875, 0.9999129478048704, 0.01090050330021078, -\n 0.0005140805782508937, -0.010896981188819882, 0.9999404939905823]'], {}), '([0.9999722295549924, -0.007439933678812084, 0.00043301925190808763,\n 0.0074347723554060875, 0.9999129478048704, 0.01090050330021078, -\n 0.0005140805782508937, -0.010896981188819882, 0.9999404939905823])\n', (1290, 1499), True, 'import numpy as np\n'), ((1549, 1628), 'numpy.array', 'np.array', (['[-0.05229198545663045, -0.00019227292627499695, 0.001717335015137565]'], {}), '([-0.05229198545663045, -0.00019227292627499695, 0.001717335015137565])\n', (1557, 1628), True, 'import numpy as np\n'), ((1661, 1898), 'numpy.array', 'np.array', (['[-1.2669151118394222e-05, -0.0017150903228939863, -0.0002109813008805098, \n 0.0016904050298585356, -0.0005826016404638701, 0.05228961740837492, -\n 0.00019651142111198186, -0.05228886382232848, -0.0005699257021658765]'], {}), '([-1.2669151118394222e-05, -0.0017150903228939863, -\n 0.0002109813008805098, 0.0016904050298585356, -0.0005826016404638701, \n 0.05228961740837492, -0.00019651142111198186, -0.05228886382232848, -\n 0.0005699257021658765])\n', (1669, 1898), True, 'import numpy as np\n'), ((1924, 2137), 'numpy.array', 'np.array', (['[-8.814266483029077e-09, -1.1934330447023842e-06, 0.0001980670297292687, \n 1.1751792885051283e-06, -4.05095536424756e-07, 0.012770218257581496, -\n 0.0007494157448256152, -0.036972004067303506, 1.0]'], {}), '([-8.814266483029077e-09, -1.1934330447023842e-06, \n 0.0001980670297292687, 1.1751792885051283e-06, -4.05095536424756e-07, \n 0.012770218257581496, -0.0007494157448256152, -0.036972004067303506, 1.0])\n', (1932, 2137), True, 'import numpy as np\n'), ((3386, 3412), 'math.tan', 'math.tan', (['(old_sim_fovy / 2)'], {}), '(old_sim_fovy / 2)\n', (3394, 3412), False, 'import math\n'), ((5027, 5056), 'numpy.round', 'np.round', (['fake_proj_matrix', '(0)'], {}), '(fake_proj_matrix, 0)\n', (5035, 5056), True, 'import numpy as np\n'), ((5123, 5152), 'numpy.round', 'np.round', (['color_cam_matrix', '(0)'], {}), '(color_cam_matrix, 0)\n', (5131, 5152), True, 'import numpy as np\n')]
|
# pylint: disable=missing-module-docstring
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
import pytest
from quantify_scheduler.schemas.examples import utils
@pytest.mark.parametrize(
"filename",
[
"qblox_test_mapping.json",
"transmon_test_config.json",
"zhinst_test_mapping.json",
],
)
def test_load_json_example_scheme(filename: str):
utils.load_json_example_scheme(filename)
|
[
"quantify_scheduler.schemas.examples.utils.load_json_example_scheme",
"pytest.mark.parametrize"
] |
[((202, 327), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""filename"""', "['qblox_test_mapping.json', 'transmon_test_config.json',\n 'zhinst_test_mapping.json']"], {}), "('filename', ['qblox_test_mapping.json',\n 'transmon_test_config.json', 'zhinst_test_mapping.json'])\n", (225, 327), False, 'import pytest\n'), ((420, 460), 'quantify_scheduler.schemas.examples.utils.load_json_example_scheme', 'utils.load_json_example_scheme', (['filename'], {}), '(filename)\n', (450, 460), False, 'from quantify_scheduler.schemas.examples import utils\n')]
|
from deeplearning import tf_util as U
from init import make_env_fn, make_model_fn
from collections import namedtuple
import os, argparse, json
import numpy as np
def eval_robot(args, env, pi):
rewards = []
lengths = []
for j in range(args.nepisodes):
rewards.append(0)
lengths.append(0)
done = False
ob = env.reset()
while not done:
ac = pi.actor.mode(ob[None])[0]
ob, rew, done, _ = env.step(ac)
rewards[-1] += rew
lengths[-1] += 1
return np.mean(lengths), np.mean(rewards)
def main(args):
U.reset()
with open(os.path.join(args.logdir, 'hyps.json'), 'r') as f:
hyps = json.load(f)
train_args = namedtuple('Args', hyps.keys())(**hyps)
env_fn = make_env_fn(train_args)
model_fn = make_model_fn(train_args)
env = env_fn(0)
model = model_fn(env)
model.build('model', 1, 1)
model.sampler.build('model', 1, 1)
sess = U.make_session()
sess.__enter__()
U.initialize()
t = U.Experiment(args.logdir).load(args.ckpt)
ls = []
rs = []
for i in range(args.samples):
env.update_robot(model.sampler.sample(args.stochastic)[0])
l,r = eval_robot(args, env, model)
ls.append(l)
rs.append(r)
if not args.stochastic:
break
os.makedirs(os.path.join(args.logdir, 'eval'), exist_ok=True)
with open(os.path.join(args.logdir, 'eval', '{}.json'.format(t)), 'w') as f:
json.dump({'l':ls, 'r':rs}, f)
sess.__exit__(None, None, None)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Evaluate a Checkpoint')
parser.add_argument('logdir', type=str, help='log directory')
parser.add_argument('-t', '--ckpt', type=int, default=None, help='which checkpoint file to use')
parser.add_argument('-n', '--nepisodes', type=int, default=1, help='n episodes to show')
parser.add_argument('-s', '--samples', type=int, default=1, help='# of robots to sample')
parser.add_argument('--stochastic', type=bool, default=True, help='If false, eval the mode of the robot distribution')
main(parser.parse_args())
|
[
"json.dump",
"json.load",
"deeplearning.tf_util.initialize",
"argparse.ArgumentParser",
"init.make_env_fn",
"deeplearning.tf_util.reset",
"deeplearning.tf_util.make_session",
"numpy.mean",
"init.make_model_fn",
"os.path.join",
"deeplearning.tf_util.Experiment"
] |
[((601, 610), 'deeplearning.tf_util.reset', 'U.reset', ([], {}), '()\n', (608, 610), True, 'from deeplearning import tf_util as U\n'), ((775, 798), 'init.make_env_fn', 'make_env_fn', (['train_args'], {}), '(train_args)\n', (786, 798), False, 'from init import make_env_fn, make_model_fn\n'), ((814, 839), 'init.make_model_fn', 'make_model_fn', (['train_args'], {}), '(train_args)\n', (827, 839), False, 'from init import make_env_fn, make_model_fn\n'), ((969, 985), 'deeplearning.tf_util.make_session', 'U.make_session', ([], {}), '()\n', (983, 985), True, 'from deeplearning import tf_util as U\n'), ((1011, 1025), 'deeplearning.tf_util.initialize', 'U.initialize', ([], {}), '()\n', (1023, 1025), True, 'from deeplearning import tf_util as U\n'), ((1602, 1662), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Evaluate a Checkpoint"""'}), "(description='Evaluate a Checkpoint')\n", (1625, 1662), False, 'import os, argparse, json\n'), ((545, 561), 'numpy.mean', 'np.mean', (['lengths'], {}), '(lengths)\n', (552, 561), True, 'import numpy as np\n'), ((563, 579), 'numpy.mean', 'np.mean', (['rewards'], {}), '(rewards)\n', (570, 579), True, 'import numpy as np\n'), ((692, 704), 'json.load', 'json.load', (['f'], {}), '(f)\n', (701, 704), False, 'import os, argparse, json\n'), ((1355, 1388), 'os.path.join', 'os.path.join', (['args.logdir', '"""eval"""'], {}), "(args.logdir, 'eval')\n", (1367, 1388), False, 'import os, argparse, json\n'), ((1494, 1526), 'json.dump', 'json.dump', (["{'l': ls, 'r': rs}", 'f'], {}), "({'l': ls, 'r': rs}, f)\n", (1503, 1526), False, 'import os, argparse, json\n'), ((626, 664), 'os.path.join', 'os.path.join', (['args.logdir', '"""hyps.json"""'], {}), "(args.logdir, 'hyps.json')\n", (638, 664), False, 'import os, argparse, json\n'), ((1034, 1059), 'deeplearning.tf_util.Experiment', 'U.Experiment', (['args.logdir'], {}), '(args.logdir)\n', (1046, 1059), True, 'from deeplearning import tf_util as U\n')]
|
import logging
from albow.widgets.Control import Control
from albow.input.TextEditor import TextEditor
class Field(Control, TextEditor):
"""
Field is an abstract base class for controls that edit a value with a textual representation. It provides
facilities for
- Converting between the text and internal representations of the value,
- For specifying minimum and maximum allowed values, and
- Controlling whether the value is allowed to be empty and what representation to use for an empty value.
A Field can be in two states, _editing_ and _non-editing_. In the non-editing state, the control displays
the value to which it is linked via its `ref` attribute. When the user focuses the control and begins typing,
it switches to the editing state. In this state, the text may be edited but the associated value is not yet
updated. When the `Return`, `Enter` or `Tab key` is pressed, or a mouse click occurs anywhere outside the field,
the value is updated and the control returns to the non-editing state. Updating of the value can also be
forced by calling the `commit()` method.
"""
DEFAULT_WIDTH = 100
empty = NotImplemented
"""
Internal value to use when the field is empty. If set to NotImplemented, the user is not allowed to enter
an empty value.
"""
format = "%s"
"""
Format string to use when converting the internal representation to text. See also format_value() below.
"""
min: int = None
"""
Minimum allowable value. If `None`, no minimum value will be enforced.
"""
max: int = None
"""
Maximum allowable value. If `None`, no maximum value will be enforced.
"""
type = None
"""
A function for converting from text to the internal representation. Typically a type object, but
can be any callable object.
"""
editing: bool = None
"""
_Read only_. A boolean which is true when the control is in the editing state.
"""
insertion_point = None
def __init__(self, width=None, **kwds):
"""
Args:
width: The width may be an integer or a string, as for TextEditor. If no width is specified, but a
value for min and/or max is specified at construction time, the width will be determined from
the min or max value. If no other way of determining the width is available, it defaults to 100.
**kwds:
"""
self.logger = logging.getLogger(__name__)
if 'format' in kwds:
self.format = kwds.pop('format')
if 'empty' in kwds:
self.empty = kwds.pop('empty')
self.editing = False
predictedWidth = self._predictWidth(kwds, width)
TextEditor.__init__(self, width=predictedWidth, **kwds)
def _predictWidth(self, kwds, theWidth):
minimum = self.predict_attr(kwds, 'min')
maximum = self.predict_attr(kwds, 'max')
predictedWidth = theWidth
if theWidth is None:
w1 = 0
w2 = 0
if minimum is not None:
w1 = minimum
if maximum is not None:
w2 = maximum
if w1 > w2:
predictedWidth = w1
else:
predictedWidth = w2
if predictedWidth == 0 and theWidth is None:
predictedWidth = Field.DEFAULT_WIDTH
self.logger.debug(f"predictedWidth: {predictedWidth}")
return predictedWidth
def format_value(self, theValueToFormat):
"""
This method is called to format the value for display. By default it uses the format string specified by
the format attribute. You can override this method to format the value in a different way.
Args:
theValueToFormat: The value
Returns: The formatted value
"""
if theValueToFormat == self.empty:
return ""
else:
return self.format % theValueToFormat
def get_text(self):
if self.editing:
return self._text
else:
return self.format_value(self.value)
def set_text(self, theNewText):
self.editing = True
self._text = theNewText
def enter_action(self):
if self.editing:
self.commit()
return 'pass'
def escape_action(self):
if self.editing:
self.editing = False
self.insertion_point = None
else:
return 'pass'
def attention_lost(self):
self.commit()
def commit(self):
"""
When in the editing state, causes the control's value to be updated and places the control
in the non-editing state.
"""
if self.editing:
text = self._text
if text:
try:
value = self.type(text)
except ValueError:
return
if self.min is not None:
value = max(self.min, value)
if self.max is not None:
value = min(self.max, value)
else:
value = self.empty
if value is NotImplemented:
return
self.value = value
self.editing = False
self.insertion_point = None
else:
self.insertion_point = None
|
[
"albow.input.TextEditor.TextEditor.__init__",
"logging.getLogger"
] |
[((2501, 2528), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2518, 2528), False, 'import logging\n'), ((2770, 2825), 'albow.input.TextEditor.TextEditor.__init__', 'TextEditor.__init__', (['self'], {'width': 'predictedWidth'}), '(self, width=predictedWidth, **kwds)\n', (2789, 2825), False, 'from albow.input.TextEditor import TextEditor\n')]
|
# This Python file uses the following encoding: utf-8
# It has been edited by fix-complaints.py .
#############################################################################
##
## Copyright (C) 2019 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of Qt for Python.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 3 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL3 included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 3 requirements
## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 2.0 or (at your option) the GNU General
## Public license version 3 or any later version approved by the KDE Free
## Qt Foundation. The licenses are as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-2.0.html and
## https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
"""
hello.py
--------
This simple script shows a label with changing "Hello World" messages.
It can be used directly as a script, but we use it also to automatically
test PyInstaller. See testing/wheel_tester.py .
When used with PyInstaller, it automatically stops its execution after
2 seconds.
"""
from __future__ import print_function
import sys
import random
import platform
import time
from PySide2.QtWidgets import (QApplication, QLabel, QPushButton,
QVBoxLayout, QWidget)
from PySide2.QtCore import Slot, Qt, QTimer
class MyWidget(QWidget):
def __init__(self):
QWidget.__init__(self)
self.hello = ["<NAME>", "你好,世界", "<NAME>",
"<NAME>", "Привет мир"]
self.button = QPushButton("Click me!")
self.text = QLabel("Hello World embedded={}".format(sys.pyside_uses_embedding))
self.text.setAlignment(Qt.AlignCenter)
self.layout = QVBoxLayout()
self.layout.addWidget(self.text)
self.layout.addWidget(self.button)
self.setLayout(self.layout)
# Connecting the signal
self.button.clicked.connect(self.magic)
@Slot()
def magic(self):
self.text.setText(random.choice(self.hello))
if __name__ == "__main__":
print("Start of hello.py ", time.ctime())
print(" sys.version = {}".format(sys.version.splitlines()[0]))
print(" platform.platform() = {}".format(platform.platform()))
app = QApplication()
widget = MyWidget()
widget.resize(800, 600)
widget.show()
if sys.pyside_uses_embedding:
milliseconds = 2 * 1000 # run 2 second
QTimer.singleShot(milliseconds, app.quit)
retcode = app.exec_()
print("End of hello.py ", time.ctime())
sys.exit(retcode)
|
[
"PySide2.QtCore.Slot",
"PySide2.QtWidgets.QApplication",
"PySide2.QtWidgets.QWidget.__init__",
"time.ctime",
"PySide2.QtCore.QTimer.singleShot",
"random.choice",
"platform.platform",
"PySide2.QtWidgets.QVBoxLayout",
"PySide2.QtWidgets.QPushButton",
"sys.version.splitlines",
"sys.exit"
] |
[((3152, 3158), 'PySide2.QtCore.Slot', 'Slot', ([], {}), '()\n', (3156, 3158), False, 'from PySide2.QtCore import Slot, Qt, QTimer\n'), ((3467, 3481), 'PySide2.QtWidgets.QApplication', 'QApplication', ([], {}), '()\n', (3479, 3481), False, 'from PySide2.QtWidgets import QApplication, QLabel, QPushButton, QVBoxLayout, QWidget\n'), ((3765, 3782), 'sys.exit', 'sys.exit', (['retcode'], {}), '(retcode)\n', (3773, 3782), False, 'import sys\n'), ((2614, 2636), 'PySide2.QtWidgets.QWidget.__init__', 'QWidget.__init__', (['self'], {}), '(self)\n', (2630, 2636), False, 'from PySide2.QtWidgets import QApplication, QLabel, QPushButton, QVBoxLayout, QWidget\n'), ((2748, 2772), 'PySide2.QtWidgets.QPushButton', 'QPushButton', (['"""Click me!"""'], {}), "('Click me!')\n", (2759, 2772), False, 'from PySide2.QtWidgets import QApplication, QLabel, QPushButton, QVBoxLayout, QWidget\n'), ((2931, 2944), 'PySide2.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (2942, 2944), False, 'from PySide2.QtWidgets import QApplication, QLabel, QPushButton, QVBoxLayout, QWidget\n'), ((3298, 3310), 'time.ctime', 'time.ctime', ([], {}), '()\n', (3308, 3310), False, 'import time\n'), ((3642, 3683), 'PySide2.QtCore.QTimer.singleShot', 'QTimer.singleShot', (['milliseconds', 'app.quit'], {}), '(milliseconds, app.quit)\n', (3659, 3683), False, 'from PySide2.QtCore import Slot, Qt, QTimer\n'), ((3747, 3759), 'time.ctime', 'time.ctime', ([], {}), '()\n', (3757, 3759), False, 'import time\n'), ((3206, 3231), 'random.choice', 'random.choice', (['self.hello'], {}), '(self.hello)\n', (3219, 3231), False, 'import random\n'), ((3434, 3453), 'platform.platform', 'platform.platform', ([], {}), '()\n', (3451, 3453), False, 'import platform\n'), ((3358, 3382), 'sys.version.splitlines', 'sys.version.splitlines', ([], {}), '()\n', (3380, 3382), False, 'import sys\n')]
|
# -*- coding: utf-8 -*-
import re
import time
from ..base.account import BaseAccount
class DepositfilesCom(BaseAccount):
__name__ = "DepositfilesCom"
__type__ = "account"
__version__ = "0.39"
__status__ = "testing"
__pyload_version__ = "0.5"
__description__ = """Depositfiles.com account plugin"""
__license__ = "GPLv3"
__authors__ = [
("mkaay", "<EMAIL>"),
("stickell", "<EMAIL>"),
("<NAME>", "<EMAIL>"),
]
def grab_info(self, user, password, data):
html = self.load("https://dfiles.eu/de/gold/")
validuntil = re.search(
r"Sie haben Gold Zugang bis: <b>(.*?)</b></div>", html
).group(1)
validuntil = time.mktime(time.strptime(validuntil, "%Y-%m-%d %H:%M:%S"))
return {"validuntil": validuntil, "trafficleft": -1}
def signin(self, user, password, data):
html = self.load(
"https://dfiles.eu/de/login.php",
get={"return": "/de/gold/payment.php"},
post={"login": user, "password": password},
)
if (
r'<div class="error_message">Sie haben eine falsche Benutzername-Passwort-Kombination verwendet.</div>'
in html
):
self.fail_login()
|
[
"time.strptime",
"re.search"
] |
[((729, 775), 'time.strptime', 'time.strptime', (['validuntil', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(validuntil, '%Y-%m-%d %H:%M:%S')\n", (742, 775), False, 'import time\n'), ((598, 662), 're.search', 're.search', (['"""Sie haben Gold Zugang bis: <b>(.*?)</b></div>"""', 'html'], {}), "('Sie haben Gold Zugang bis: <b>(.*?)</b></div>', html)\n", (607, 662), False, 'import re\n')]
|
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
from requre.import_system import UpgradeImportSystem
from requre.simple_object import Simple
FILTERS = UpgradeImportSystem().decorate("time.sleep", Simple.decorator_plain())
|
[
"requre.import_system.UpgradeImportSystem",
"requre.simple_object.Simple.decorator_plain"
] |
[((229, 253), 'requre.simple_object.Simple.decorator_plain', 'Simple.decorator_plain', ([], {}), '()\n', (251, 253), False, 'from requre.simple_object import Simple\n'), ((184, 205), 'requre.import_system.UpgradeImportSystem', 'UpgradeImportSystem', ([], {}), '()\n', (203, 205), False, 'from requre.import_system import UpgradeImportSystem\n')]
|
"""Added tags table
Revision ID: 42b486977799
Revises: <PASSWORD>
Create Date: 2014-02-05 23:57:37.029556
"""
# revision identifiers, used by Alembic.
revision = '42b486977799'
down_revision = '<PASSWORD>'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('tags',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('tag_name', sa.UnicodeText, nullable=False),
sa.Column('tag_type', sa.Enum('PERSON', 'LOCATION', 'OTHER', name="tag_types")))
op.create_table('news_tags',
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tags.id', ondelete="CASCADE")),
sa.Column('news_id', sa.String, sa.ForeignKey('news.id', ondelete="CASCADE")))
op.create_index('tags_name', 'tags', ['tag_name'])
def downgrade():
op.drop_index('tags_name')
op.drop_table('news_tags')
op.drop_table('tags')
|
[
"alembic.op.drop_table",
"sqlalchemy.Enum",
"alembic.op.drop_index",
"alembic.op.create_index",
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] |
[((780, 830), 'alembic.op.create_index', 'op.create_index', (['"""tags_name"""', '"""tags"""', "['tag_name']"], {}), "('tags_name', 'tags', ['tag_name'])\n", (795, 830), False, 'from alembic import op\n'), ((854, 880), 'alembic.op.drop_index', 'op.drop_index', (['"""tags_name"""'], {}), "('tags_name')\n", (867, 880), False, 'from alembic import op\n'), ((885, 911), 'alembic.op.drop_table', 'op.drop_table', (['"""news_tags"""'], {}), "('news_tags')\n", (898, 911), False, 'from alembic import op\n'), ((916, 937), 'alembic.op.drop_table', 'op.drop_table', (['"""tags"""'], {}), "('tags')\n", (929, 937), False, 'from alembic import op\n'), ((322, 367), 'sqlalchemy.Column', 'sa.Column', (['"""id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('id', sa.Integer, primary_key=True)\n", (331, 367), True, 'import sqlalchemy as sa\n'), ((389, 442), 'sqlalchemy.Column', 'sa.Column', (['"""tag_name"""', 'sa.UnicodeText'], {'nullable': '(False)'}), "('tag_name', sa.UnicodeText, nullable=False)\n", (398, 442), True, 'import sqlalchemy as sa\n'), ((486, 542), 'sqlalchemy.Enum', 'sa.Enum', (['"""PERSON"""', '"""LOCATION"""', '"""OTHER"""'], {'name': '"""tag_types"""'}), "('PERSON', 'LOCATION', 'OTHER', name='tag_types')\n", (493, 542), True, 'import sqlalchemy as sa\n'), ((630, 674), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""tags.id"""'], {'ondelete': '"""CASCADE"""'}), "('tags.id', ondelete='CASCADE')\n", (643, 674), True, 'import sqlalchemy as sa\n'), ((729, 773), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""news.id"""'], {'ondelete': '"""CASCADE"""'}), "('news.id', ondelete='CASCADE')\n", (742, 773), True, 'import sqlalchemy as sa\n')]
|
"""
Bot that wishes happy birthday
"""
import datetime
import json
import logging
import math
import os
import requests
import sys
from dotenv import load_dotenv
from telegram import Update
from telegram.ext import Updater, CommandHandler, CallbackContext
# Load .env file
load_dotenv()
TOKEN = os.getenv("TOKEN")
# Get ordinal function
ordinal = lambda n: "%d%s" % (
n,
"tsnrhtdd"[(math.floor(n / 10) % 10 != 1) * (n % 10 < 4) * n % 10 :: 4],
)
# Enabling logging
logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger()
def start_handler(update: Update, context: CallbackContext):
"""Handles the /start command"""
logger.info("User {} started bot".format(update.effective_user["id"]))
update.message.reply_text(
"Hello there! You have succesfully initiated the birthday wishing bot"
)
context.job_queue.run_once(
wishHB, 0, context={"context": update.message.chat_id, "first": True}
)
nextHour = datetime.datetime.utcnow().hour + 1
context.job_queue.run_repeating(
wishHB,
900,
context={"context": update.message.chat_id, "first": False},
first=datetime.time(nextHour),
) # Timezones can have offsets of 15 minutes and 15min = 900s
def wishHB(context: CallbackContext):
"""Wishes happy birthday"""
bdays = getBdays()
job = context.job
now = datetime.datetime.utcnow()
logger.info("RUN")
for p in bdays:
month = [p["utc_dob"].month, now.month]
day = [p["utc_dob"].day, now.day]
hour = [p["utc_dob"].hour, now.hour]
minute = [p["utc_dob"].minute, now.minute]
checkArr = [month, day, hour, minute]
if job.context["first"]:
there = now + p["delta"]
if there.day == p["dob"].day:
checkArr = [checkArr[0],]
if any(l[0] != l[1] for l in checkArr):
continue
age = now.year - p["utc_dob"].year
logger.info(
"Found birthday for {}! Wishing...".format(
p["username"] if len(p["username"]) else p["name"]
)
)
context.bot.send_message(
job.context["context"],
"Happy {} birthday {}!".format(
ordinal(age), p["username"] if len(p["username"]) else p["name"]
),
)
def getBdays():
"""Parses the birthdays.json file"""
# data = requests.get(
# "https://raw.githubusercontent.com/diogoscf/telegram-birthday-bot/master/birthdays.json"
# ).json()
data = json.load(open("birthdays.json", "r", encoding="utf-8"))
output = []
for p in data:
diff = [int(x) for x in p["tz"].replace("UTC", "").split(":")]
delta = datetime.timedelta(hours=diff[0], minutes=diff[1])
output.append(
{
"name": p["name"],
"dob": datetime.datetime.strptime(p["dob"], "%d.%m.%Y"),
"utc_dob": datetime.datetime.strptime(p["dob"], "%d.%m.%Y") - delta,
"username": p["username"],
"delta": delta,
}
)
return output
if __name__ == "__main__":
logger.info("Starting script")
updater = Updater(TOKEN, use_context=True)
updater.dispatcher.add_handler(CommandHandler("start", start_handler))
# updater.dispatcher.add_handler(CommandHandler('stop', Stop_timer, pass_job_queue=True))
updater.start_polling()
|
[
"logging.basicConfig",
"math.floor",
"dotenv.load_dotenv",
"datetime.datetime.utcnow",
"telegram.ext.Updater",
"datetime.datetime.strptime",
"datetime.timedelta",
"datetime.time",
"telegram.ext.CommandHandler",
"os.getenv",
"logging.getLogger"
] |
[((276, 289), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (287, 289), False, 'from dotenv import load_dotenv\n'), ((299, 317), 'os.getenv', 'os.getenv', (['"""TOKEN"""'], {}), "('TOKEN')\n", (308, 317), False, 'import os\n'), ((479, 575), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s - %(levelname)s - %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s - %(levelname)s - %(message)s')\n", (498, 575), False, 'import logging\n'), ((586, 605), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (603, 605), False, 'import logging\n'), ((1433, 1459), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1457, 1459), False, 'import datetime\n'), ((3255, 3287), 'telegram.ext.Updater', 'Updater', (['TOKEN'], {'use_context': '(True)'}), '(TOKEN, use_context=True)\n', (3262, 3287), False, 'from telegram.ext import Updater, CommandHandler, CallbackContext\n'), ((2779, 2829), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': 'diff[0]', 'minutes': 'diff[1]'}), '(hours=diff[0], minutes=diff[1])\n', (2797, 2829), False, 'import datetime\n'), ((3324, 3362), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""start"""', 'start_handler'], {}), "('start', start_handler)\n", (3338, 3362), False, 'from telegram.ext import Updater, CommandHandler, CallbackContext\n'), ((1029, 1055), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1053, 1055), False, 'import datetime\n'), ((1214, 1237), 'datetime.time', 'datetime.time', (['nextHour'], {}), '(nextHour)\n', (1227, 1237), False, 'import datetime\n'), ((2925, 2973), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["p['dob']", '"""%d.%m.%Y"""'], {}), "(p['dob'], '%d.%m.%Y')\n", (2951, 2973), False, 'import datetime\n'), ((3002, 3050), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["p['dob']", '"""%d.%m.%Y"""'], {}), "(p['dob'], '%d.%m.%Y')\n", (3028, 3050), False, 'import datetime\n'), ((396, 414), 'math.floor', 'math.floor', (['(n / 10)'], {}), '(n / 10)\n', (406, 414), False, 'import math\n')]
|
from flask import Flask, jsonify
from flask import request, render_template
from flask_cors import CORS
import json, os, glob, requests
import base64
from settings import *
from bs4 import BeautifulSoup
import yaml
import re
import string, random
import uuid
app = Flask(__name__)
CORS(app)
annotations = []
@app.route('/create_annotations/', methods=['POST'])
def create_anno():
response = json.loads(request.data)
data_object = response['json']
list_file_path = get_list_filepath(data_object)
uniqid = str(uuid.uuid1())
data_object['@id'] = "{}{}.json".format(origin_url, uniqid)
cleanobject = cleananno(data_object)
updatelistdata(list_file_path, cleanobject)
file_path = os.path.join(filepath, uniqid) + '.json'
writeannos(file_path, cleanobject)
return jsonify(data_object), 201
@app.route('/update_annotations/', methods=['POST'])
def update_anno():
response = json.loads(request.data)
data_object = response['json']
id = cleanid(data_object['@id'])
origin_url_id = "{}{}".format(origin_url, id)
data_object['@id'] = origin_url_id if data_object['@id'] != origin_url_id else data_object['@id']
cleanobject = cleananno(data_object)
file_path = os.path.join(filepath, id)
list_file_path = get_list_filepath(cleanobject)
writeannos(file_path, cleanobject)
newlist = updatelistdata(list_file_path, cleanobject)
return jsonify(data_object), 201
@app.route('/delete_annotations/', methods=['DELETE', 'POST'])
def delete_anno():
response = json.loads(request.data)
id = cleanid(response['id'])
deletefiles = [os.path.join(filepath, id), os.path.join(search_filepath, id).replace('.json', '.md')]
list_file_path = get_list_filepath(str(response['listuri']))
listlength = updatelistdata(list_file_path, {'@id': response['id'], 'delete': True})
if listlength <= 0:
deletefiles.append(list_file_path)
delete_annos(deletefiles)
return jsonify({"File Removed": True}), 201
@app.route('/write_annotation/', methods=['POST'])
def write_annotation():
data = json.loads(request.data)
json_data = data['json']
file = filepath if data['type'] == 'annotation' else '_ranges'
filename = os.path.join(file, data['filename'])
for id in data['deleteids']:
fileid = cleanid(id)
deletefiles = [os.path.join(filepath, fileid), os.path.join(search_filepath, fileid).replace('.json', '.md')]
delete_annos(deletefiles)
if 'list' in json_data['@type'].lower() or 'page' in json_data['@type'].lower():
for anno in json_data['resources']:
id = cleanid(anno['@id'])
single_filename = os.path.join(file, id)
writeannos(single_filename, anno)
writeannos(filename, json_data)
return jsonify({"Annotations Written": True}), 201
def cleananno(data_object):
field = 'resource' if 'resource' in data_object.keys() else 'body'
charfield = 'chars' if 'resource' in data_object.keys() else 'value'
if field in data_object.keys():
for item in data_object[field]:
replace = re.finditer(r'<iiif-(.*?)><\/iiif-(.*?)>', item[charfield])
for rep in replace:
replacestring = rep.group().replace("<","<").replace(">", ">").replace(""", '"')
item[charfield] = item[charfield].replace(rep.group(), replacestring)
return data_object
def cleanid(id):
return id.split('/')[-1].replace('.json', '') + '.json'
def delete_annos(annolist):
for anno in annolist:
if github_repo == "":
os.remove(anno)
else:
existing = github_get_existing(anno)
if 'sha' in existing:
data = createdatadict(anno, 'delete', existing['sha'])
payload = {'ref': github_branch}
requests.delete("{}/{}".format(github_url, anno), headers={'Authorization': 'token {}'.format(github_token)}, data=json.dumps(data), params=payload)
def get_list_filepath(data_object):
if type(data_object) == str:
targetid = data_object
elif 'on' in data_object.keys():
targetid = data_object['on'][0]['full']
else:
targetid = data_object['target']['id']
regex = re.compile('[0-9]')
numbitems = [item for item in targetid.split('/') if bool(regex.search(item)) and len(item) > 2 and ':5555' not in item]
targetid = '-'.join(numbitems) if len(numbitems) > 0 else targetid
targetid = targetid.split("#xywh")[0]
listid = targetid.split('/')[-1].replace("_", "-").replace(":", "").replace(".json", "").replace(".", "").lower()
listfilename = "{}-list.json".format(listid)
list_file_path = os.path.join(filepath, listfilename)
return list_file_path
def github_get_existing(filename):
full_url = github_url + "/{}".format(filename)
payload = {'ref': github_branch}
existing = requests.get(full_url, headers={'Authorization': 'token {}'.format(github_token)}, params=payload).json()
return existing
def get_list_data(filepath):
if github_repo == "":
if os.path.exists(filepath):
filecontents = open(filepath).read()
jsoncontent = json.loads(filecontents.split("---")[-1].strip())
return jsoncontent
else:
return False
else:
existing = github_get_existing(filepath)
if 'content' in existing.keys():
content = base64.b64decode(existing['content']).split("---")[-1].strip()
jsoncontent = json.loads(content)
return jsoncontent
else:
return False
def updatelistdata(list_file_path, newannotation):
listdata = get_list_data(list_file_path)
newannoid = newannotation['@id'].split('/')[-1]
if listdata:
listindex = [i for i, res in enumerate(listdata['resources']) if res['@id'].split('/')[-1] == newannoid ]
listindex = listindex[0] if len(listindex) > 0 else None
if 'delete' in newannotation.keys() and listindex != None:
del listdata['resources'][listindex]
elif listindex != None:
listdata['resources'][listindex] = newannotation
else:
listdata['resources'].append(newannotation)
listdata = updatelistdate(newannotation, listdata)
elif 'delete' not in newannotation.keys():
listdata = create_list([newannotation], newannotation['@context'], newannoid)
listdata = updatelistdate(newannotation, listdata, True)
if listdata:
writeannos(list_file_path, listdata)
length = len(listdata['resources']) if listdata else 1
return length
def updatelistdate(singleanno, annolist, created=False):
if created and 'created' in singleanno.keys():
annolist['created'] = singleanno['created']
elif 'created' in singleanno.keys():
annolist['modified'] = singleanno['created']
if created and 'oa:annotatedAt' in singleanno.keys():
annolist['oa:annotatedAt'] = singleanno['oa:annotatedAt']
elif 'oa:annotatedAt' in singleanno.keys():
annolist['oa:serializedAt'] = singleanno['oa:annotatedAt']
if 'modified' in singleanno.keys():
annolist['modified'] = singleanno['modified']
if 'oa:serializedAt' in singleanno.keys():
annolist['oa:serializedAt'] = singleanno['oa:serializedAt']
return annolist
def writeannos(file_path, data_object):
if 'list' not in file_path and 'ranges' not in file_path:
get_search(data_object, file_path)
if github_repo == '':
writetofile(file_path, data_object)
else:
writetogithub(file_path, data_object)
def create_list(annotation, context, id):
if 'w3.org' in context:
formated_annotation = {"@context":"http://www.w3.org/ns/anno.jsonld",
"@type": "AnnotationPage", "id": "%s%s-list.json"% (origin_url, id), "resources": annotation}
else:
formated_annotation = {"@context":"http://iiif.io/api/presentation/2/context.json",
"@type": "sc:AnnotationList", "@id": "%s%s-list.json"% (origin_url, id), "resources": annotation }
return formated_annotation
def writetogithub(filename, annotation, yaml=False):
full_url = github_url + "/{}".format(filename)
sha = ''
existing = github_get_existing(filename)
if 'sha' in existing.keys():
sha = existing['sha']
anno_text = annotation if yaml else "---\nlayout: null\n---\n" + json.dumps(annotation)
data = createdatadict(filename, anno_text, sha)
response = requests.put(full_url, data=json.dumps(data), headers={'Authorization': 'token {}'.format(github_token), 'charset': 'utf-8'})
def createdatadict(filename, text, sha):
writeordelete = "write" if text != 'delete' else "delete"
message = "{} {}".format(writeordelete, filename)
data = {"message":message, "content": base64.b64encode(text), "branch": github_branch }
if sha != '':
data['sha'] = sha
return data
def writetofile(filename, annotation, yaml=False):
anno_text = annotation if yaml else "---\nlayout: null\n---\n" + json.dumps(annotation)
with open(filename, 'w') as outfile:
outfile.write(anno_text)
def get_search(anno, filename):
imagescr = '<iiif-annotation annotationurl="{}" styling="image_only:true"></iiif-annotation>'.format(anno['@id'])
listname = get_list_filepath(anno).split('/')[-1]
annodata_data = {'tags': [], 'layout': 'searchview', 'listname': listname, 'content': [], 'imagescr': imagescr, 'datecreated':'', 'datemodified': ''}
if 'oa:annotatedAt' in anno.keys():
annodata_data['datecreated'] = encodedecode(anno['oa:annotatedAt'])
if 'created' in anno.keys():
annodata_data['datecreated'] = encodedecode(anno['created'])
if 'oa:serializedAt' in anno.keys():
annodata_data['datemodified'] = encodedecode(anno['oa:serializedAt'])
if 'modified' in anno.keys():
annodata_data['datemodified'] = encodedecode(anno['modified'])
annodata_filename = os.path.join(search_filepath, filename.split('/')[-1].replace('.json', '')) + '.md'
textdata = anno['resource'] if 'resource' in anno.keys() else anno['body']
textdata = textdata if type(textdata) == list else [textdata]
for resource in textdata:
chars = BeautifulSoup(resource['chars'], 'html.parser').get_text() if 'chars' in resource.keys() else ''
chars = encodedecode(chars)
if chars and 'tag' in resource['@type'].lower():
annodata_data['tags'].append(chars)
elif 'purpose' in resource.keys() and 'tag' in resource['purpose']:
tags_data = chars if chars else resource['value']
annodata_data['tags'].append(encodedecode(tags_data))
elif chars:
annodata_data['content'].append(chars)
elif 'items' in resource.keys():
field = 'value' if 'value' in resource['items'][0].keys() else 'chars'
fieldvalues = " ".join([encodedecode(item[field]) for item in resource['items']])
annodata_data['content'].append(fieldvalues)
elif 'value' in resource:
annodata_data['content'].append(encodedecode(resource['value']))
contentvalue = annodata_data.pop('content')
try:
content = '\n'.join(contentvalue)
except:
decodedvalue = [item.decode("utf-8") for item in contentvalue]
content = '\n'.join(decodedvalue)
annodata_yaml = "---\n{}---\n{}".format(yaml.dump(annodata_data), content)
if github_repo == '':
writetofile(annodata_filename, annodata_yaml, True)
else:
writetogithub(annodata_filename, annodata_yaml, True)
def encodedecode(chars):
if type(chars) == str:
return chars
else:
return chars.encode('utf8')
if __name__ == "__main__":
app.run()
|
[
"os.remove",
"json.loads",
"flask_cors.CORS",
"re.finditer",
"flask.Flask",
"os.path.exists",
"yaml.dump",
"json.dumps",
"base64.b64decode",
"uuid.uuid1",
"flask.jsonify",
"base64.b64encode",
"bs4.BeautifulSoup",
"os.path.join",
"re.compile"
] |
[((266, 281), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (271, 281), False, 'from flask import Flask, jsonify\n'), ((282, 291), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (286, 291), False, 'from flask_cors import CORS\n'), ((397, 421), 'json.loads', 'json.loads', (['request.data'], {}), '(request.data)\n', (407, 421), False, 'import json, os, glob, requests\n'), ((914, 938), 'json.loads', 'json.loads', (['request.data'], {}), '(request.data)\n', (924, 938), False, 'import json, os, glob, requests\n'), ((1221, 1247), 'os.path.join', 'os.path.join', (['filepath', 'id'], {}), '(filepath, id)\n', (1233, 1247), False, 'import json, os, glob, requests\n'), ((1532, 1556), 'json.loads', 'json.loads', (['request.data'], {}), '(request.data)\n', (1542, 1556), False, 'import json, os, glob, requests\n'), ((2083, 2107), 'json.loads', 'json.loads', (['request.data'], {}), '(request.data)\n', (2093, 2107), False, 'import json, os, glob, requests\n'), ((2219, 2255), 'os.path.join', 'os.path.join', (['file', "data['filename']"], {}), "(file, data['filename'])\n", (2231, 2255), False, 'import json, os, glob, requests\n'), ((4248, 4267), 're.compile', 're.compile', (['"""[0-9]"""'], {}), "('[0-9]')\n", (4258, 4267), False, 'import re\n'), ((4694, 4730), 'os.path.join', 'os.path.join', (['filepath', 'listfilename'], {}), '(filepath, listfilename)\n', (4706, 4730), False, 'import json, os, glob, requests\n'), ((526, 538), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (536, 538), False, 'import uuid\n'), ((709, 739), 'os.path.join', 'os.path.join', (['filepath', 'uniqid'], {}), '(filepath, uniqid)\n', (721, 739), False, 'import json, os, glob, requests\n'), ((800, 820), 'flask.jsonify', 'jsonify', (['data_object'], {}), '(data_object)\n', (807, 820), False, 'from flask import Flask, jsonify\n'), ((1408, 1428), 'flask.jsonify', 'jsonify', (['data_object'], {}), '(data_object)\n', (1415, 1428), False, 'from flask import Flask, jsonify\n'), ((1609, 1635), 'os.path.join', 'os.path.join', (['filepath', 'id'], {}), '(filepath, id)\n', (1621, 1635), False, 'import json, os, glob, requests\n'), ((1959, 1990), 'flask.jsonify', 'jsonify', (["{'File Removed': True}"], {}), "({'File Removed': True})\n", (1966, 1990), False, 'from flask import Flask, jsonify\n'), ((2783, 2821), 'flask.jsonify', 'jsonify', (["{'Annotations Written': True}"], {}), "({'Annotations Written': True})\n", (2790, 2821), False, 'from flask import Flask, jsonify\n'), ((5089, 5113), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (5103, 5113), False, 'import json, os, glob, requests\n'), ((8833, 8855), 'base64.b64encode', 'base64.b64encode', (['text'], {}), '(text)\n', (8849, 8855), False, 'import base64\n'), ((11428, 11452), 'yaml.dump', 'yaml.dump', (['annodata_data'], {}), '(annodata_data)\n', (11437, 11452), False, 'import yaml\n'), ((2341, 2371), 'os.path.join', 'os.path.join', (['filepath', 'fileid'], {}), '(filepath, fileid)\n', (2353, 2371), False, 'import json, os, glob, requests\n'), ((2667, 2689), 'os.path.join', 'os.path.join', (['file', 'id'], {}), '(file, id)\n', (2679, 2689), False, 'import json, os, glob, requests\n'), ((3098, 3169), 're.finditer', 're.finditer', (['"""<iiif-(.*?)><\\\\/iiif-(.*?)>"""', 'item[charfield]'], {}), "('<iiif-(.*?)><\\\\/iiif-(.*?)>', item[charfield])\n", (3109, 3169), False, 'import re\n'), ((3595, 3610), 'os.remove', 'os.remove', (['anno'], {}), '(anno)\n', (3604, 3610), False, 'import json, os, glob, requests\n'), ((5521, 5540), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (5531, 5540), False, 'import json, os, glob, requests\n'), ((8416, 8438), 'json.dumps', 'json.dumps', (['annotation'], {}), '(annotation)\n', (8426, 8438), False, 'import json, os, glob, requests\n'), ((8534, 8550), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (8544, 8550), False, 'import json, os, glob, requests\n'), ((9064, 9086), 'json.dumps', 'json.dumps', (['annotation'], {}), '(annotation)\n', (9074, 9086), False, 'import json, os, glob, requests\n'), ((1637, 1670), 'os.path.join', 'os.path.join', (['search_filepath', 'id'], {}), '(search_filepath, id)\n', (1649, 1670), False, 'import json, os, glob, requests\n'), ((2373, 2410), 'os.path.join', 'os.path.join', (['search_filepath', 'fileid'], {}), '(search_filepath, fileid)\n', (2385, 2410), False, 'import json, os, glob, requests\n'), ((10261, 10308), 'bs4.BeautifulSoup', 'BeautifulSoup', (["resource['chars']", '"""html.parser"""'], {}), "(resource['chars'], 'html.parser')\n", (10274, 10308), False, 'from bs4 import BeautifulSoup\n'), ((3959, 3975), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3969, 3975), False, 'import json, os, glob, requests\n'), ((5432, 5469), 'base64.b64decode', 'base64.b64decode', (["existing['content']"], {}), "(existing['content'])\n", (5448, 5469), False, 'import base64\n')]
|
from authlib.common.encoding import json_dumps
from didcomm.common.types import (
VerificationMethodType,
VerificationMaterial,
VerificationMaterialFormat,
)
from didcomm.did_doc.did_doc import VerificationMethod, DIDDoc, DIDCommService
from didcomm.protocols.routing.forward import (
PROFILE_DIDCOMM_V2,
PROFILE_DIDCOMM_AIP2_ENV_RFC587,
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1 = VerificationMethod(
id="did:example:bob#key-x25519-1",
controller="did:example:bob#key-x25519-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "GDTrI66K0pFfO54tlCSvfjjNapIs44dzpneBgyx0S3E",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2 = VerificationMethod(
id="did:example:bob#key-x25519-2",
controller="did:example:bob#key-x25519-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "UT9S3F5ep16KSNBBShU2wh3qSfqYjlasZimn0mB8_VM",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3 = VerificationMethod(
id="did:example:bob#key-x25519-3",
controller="did:example:bob#key-x25519-3",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-x25519-not-secrets-1",
controller="did:example:bob#key-x25519-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1 = VerificationMethod(
id="did:example:bob#key-p256-1",
controller="did:example:bob#key-p256-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-256",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2 = VerificationMethod(
id="did:example:bob#key-p256-2",
controller="did:example:bob#key-p256-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-256",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-p256-not-secrets-1",
controller="did:example:bob#key-p256-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-256",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1 = VerificationMethod(
id="did:example:bob#key-p384-1",
controller="did:example:bob#key-p384-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-384",
"x": "MvnE_OwKoTcJVfHyTX-DLSRhhNwlu5LNoQ5UWD9Jmgtdxp_kpjsMuTTBnxg5RF_Y",
"y": "X_3HJBcKFQEG35PZbEOBn8u9_z8V1F9V1Kv-Vh0aSzmH-y9aOuDJUE3D4Hvmi5l7",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2 = VerificationMethod(
id="did:example:bob#key-p384-2",
controller="did:example:bob#key-p384-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-384",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-p384-not-secrets-1",
controller="did:example:bob#key-p384-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-384",
"x": "2x3HOTvR8e-Tu6U4UqMd1wUWsNXMD0RgIunZTMcZsS-zWOwDgsrhYVHmv3k_DjV3",
"y": "W9LLaBjlWYcXUxOf6ECSfcXKaC3-K9z4hCoP0PS87Q_4ExMgIwxVCXUEB6nf0GDd",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1 = VerificationMethod(
id="did:example:bob#key-p521-1",
controller="did:example:bob#key-p521-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-521",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2 = VerificationMethod(
id="did:example:bob#key-p521-2",
controller="did:example:bob#key-p521-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-521",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-p521-not-secrets-1",
controller="did:example:bob#key-p521-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-521",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
DID_DOC_BOB_SPEC_TEST_VECTORS = DIDDoc(
did="did:example:bob",
authentication_kids=[],
key_agreement_kids=[
"did:example:bob#key-x25519-1",
"did:example:bob#key-x25519-2",
"did:example:bob#key-x25519-3",
"did:example:bob#key-p256-1",
"did:example:bob#key-p256-2",
"did:example:bob#key-p384-1",
"did:example:bob#key-p384-2",
"did:example:bob#key-p521-1",
"did:example:bob#key-p521-2",
],
didcomm_services=[],
verification_methods=[
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2,
],
)
DID_DOC_BOB_WITH_NO_SECRETS = DIDDoc(
did="did:example:bob",
authentication_kids=[],
key_agreement_kids=[
"did:example:bob#key-x25519-1",
"did:example:bob#key-x25519-2",
"did:example:bob#key-x25519-3",
"did:example:bob#key-x25519-not-secrets-1",
"did:example:bob#key-p256-1",
"did:example:bob#key-p256-2",
"did:example:bob#key-p256-not-secrets-1",
"did:example:bob#key-p384-1",
"did:example:bob#key-p384-2",
"did:example:bob#key-p384-not-secrets-1",
"did:example:bob#key-p521-1",
"did:example:bob#key-p521-2",
"did:example:bob#key-p521-not-secrets-1",
],
didcomm_services=[
DIDCommService(
id="did:example:123456789abcdefghi#didcomm-1",
service_endpoint="http://example.com/path",
accept=[PROFILE_DIDCOMM_V2, PROFILE_DIDCOMM_AIP2_ENV_RFC587],
routing_keys=["did:example:mediator1#key-x25519-1"],
)
],
verification_methods=[
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_NOT_IN_SECRETS_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_NOT_IN_SECRETS_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_NOT_IN_SECRETS_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_NOT_IN_SECRETS_1,
],
)
|
[
"didcomm.did_doc.did_doc.DIDCommService",
"authlib.common.encoding.json_dumps",
"didcomm.did_doc.did_doc.DIDDoc"
] |
[((7106, 7950), 'didcomm.did_doc.did_doc.DIDDoc', 'DIDDoc', ([], {'did': '"""did:example:bob"""', 'authentication_kids': '[]', 'key_agreement_kids': "['did:example:bob#key-x25519-1', 'did:example:bob#key-x25519-2',\n 'did:example:bob#key-x25519-3', 'did:example:bob#key-p256-1',\n 'did:example:bob#key-p256-2', 'did:example:bob#key-p384-1',\n 'did:example:bob#key-p384-2', 'did:example:bob#key-p521-1',\n 'did:example:bob#key-p521-2']", 'didcomm_services': '[]', 'verification_methods': '[BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2]'}), "(did='did:example:bob', authentication_kids=[], key_agreement_kids=[\n 'did:example:bob#key-x25519-1', 'did:example:bob#key-x25519-2',\n 'did:example:bob#key-x25519-3', 'did:example:bob#key-p256-1',\n 'did:example:bob#key-p256-2', 'did:example:bob#key-p384-1',\n 'did:example:bob#key-p384-2', 'did:example:bob#key-p521-1',\n 'did:example:bob#key-p521-2'], didcomm_services=[],\n verification_methods=[BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1,\n BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2])\n", (7112, 7950), False, 'from didcomm.did_doc.did_doc import VerificationMethod, DIDDoc, DIDCommService\n'), ((8782, 9015), 'didcomm.did_doc.did_doc.DIDCommService', 'DIDCommService', ([], {'id': '"""did:example:123456789abcdefghi#didcomm-1"""', 'service_endpoint': '"""http://example.com/path"""', 'accept': '[PROFILE_DIDCOMM_V2, PROFILE_DIDCOMM_AIP2_ENV_RFC587]', 'routing_keys': "['did:example:mediator1#key-x25519-1']"}), "(id='did:example:123456789abcdefghi#didcomm-1',\n service_endpoint='http://example.com/path', accept=[PROFILE_DIDCOMM_V2,\n PROFILE_DIDCOMM_AIP2_ENV_RFC587], routing_keys=[\n 'did:example:mediator1#key-x25519-1'])\n", (8796, 9015), False, 'from didcomm.did_doc.did_doc import VerificationMethod, DIDDoc, DIDCommService\n'), ((674, 773), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'OKP', 'crv': 'X25519', 'x':\n 'GDTrI66K0pFfO54tlCSvfjjNapIs44dzpneBgyx0S3E'}"], {}), "({'kty': 'OKP', 'crv': 'X25519', 'x':\n 'GDTrI66K0pFfO54tlCSvfjjNapIs44dzpneBgyx0S3E'})\n", (684, 773), False, 'from authlib.common.encoding import json_dumps\n'), ((1177, 1276), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'OKP', 'crv': 'X25519', 'x':\n 'UT9S3F5ep16KSNBBShU2wh3qSfqYjlasZimn0mB8_VM'}"], {}), "({'kty': 'OKP', 'crv': 'X25519', 'x':\n 'UT9S3F5ep16KSNBBShU2wh3qSfqYjlasZimn0mB8_VM'})\n", (1187, 1276), False, 'from authlib.common.encoding import json_dumps\n'), ((1680, 1737), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'OKP', 'crv': 'X25519', 'x': '<KEY>'}"], {}), "({'kty': 'OKP', 'crv': 'X25519', 'x': '<KEY>'})\n", (1690, 1737), False, 'from authlib.common.encoding import json_dumps\n'), ((2184, 2241), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'OKP', 'crv': 'X25519', 'x': '<KEY>'}"], {}), "({'kty': 'OKP', 'crv': 'X25519', 'x': '<KEY>'})\n", (2194, 2241), False, 'from authlib.common.encoding import json_dumps\n'), ((2643, 2712), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-256', 'x': '<KEY>', 'y': '<KEY>'}"], {}), "({'kty': 'EC', 'crv': 'P-256', 'x': '<KEY>', 'y': '<KEY>'})\n", (2653, 2712), False, 'from authlib.common.encoding import json_dumps\n'), ((3130, 3199), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-256', 'x': '<KEY>', 'y': '<KEY>'}"], {}), "({'kty': 'EC', 'crv': 'P-256', 'x': '<KEY>', 'y': '<KEY>'})\n", (3140, 3199), False, 'from authlib.common.encoding import json_dumps\n'), ((3656, 3725), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-256', 'x': '<KEY>', 'y': '<KEY>'}"], {}), "({'kty': 'EC', 'crv': 'P-256', 'x': '<KEY>', 'y': '<KEY>'})\n", (3666, 3725), False, 'from authlib.common.encoding import json_dumps\n'), ((4143, 4338), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-384', 'x':\n 'MvnE_OwKoTcJVfHyTX-DLSRhhNwlu5LNoQ5UWD9Jmgtdxp_kpjsMuTTBnxg5RF_Y', 'y':\n 'X_3HJBcKFQEG35PZbEOBn8u9_z8V1F9V1Kv-Vh0aSzmH-y9aOuDJUE3D4Hvmi5l7'}"], {}), "({'kty': 'EC', 'crv': 'P-384', 'x':\n 'MvnE_OwKoTcJVfHyTX-DLSRhhNwlu5LNoQ5UWD9Jmgtdxp_kpjsMuTTBnxg5RF_Y', 'y':\n 'X_3HJBcKFQEG35PZbEOBn8u9_z8V1F9V1Kv-Vh0aSzmH-y9aOuDJUE3D4Hvmi5l7'})\n", (4153, 4338), False, 'from authlib.common.encoding import json_dumps\n'), ((4748, 4817), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-384', 'x': '<KEY>', 'y': '<KEY>'}"], {}), "({'kty': 'EC', 'crv': 'P-384', 'x': '<KEY>', 'y': '<KEY>'})\n", (4758, 4817), False, 'from authlib.common.encoding import json_dumps\n'), ((5274, 5469), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-384', 'x':\n '2x3HOTvR8e-Tu6U4UqMd1wUWsNXMD0RgIunZTMcZsS-zWOwDgsrhYVHmv3k_DjV3', 'y':\n 'W9LLaBjlWYcXUxOf6ECSfcXKaC3-K9z4hCoP0PS87Q_4ExMgIwxVCXUEB6nf0GDd'}"], {}), "({'kty': 'EC', 'crv': 'P-384', 'x':\n '2x3HOTvR8e-Tu6U4UqMd1wUWsNXMD0RgIunZTMcZsS-zWOwDgsrhYVHmv3k_DjV3', 'y':\n 'W9LLaBjlWYcXUxOf6ECSfcXKaC3-K9z4hCoP0PS87Q_4ExMgIwxVCXUEB6nf0GDd'})\n", (5284, 5469), False, 'from authlib.common.encoding import json_dumps\n'), ((5879, 5948), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-521', 'x': '<KEY>', 'y': '<KEY>'}"], {}), "({'kty': 'EC', 'crv': 'P-521', 'x': '<KEY>', 'y': '<KEY>'})\n", (5889, 5948), False, 'from authlib.common.encoding import json_dumps\n'), ((6366, 6435), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-521', 'x': '<KEY>', 'y': '<KEY>'}"], {}), "({'kty': 'EC', 'crv': 'P-521', 'x': '<KEY>', 'y': '<KEY>'})\n", (6376, 6435), False, 'from authlib.common.encoding import json_dumps\n'), ((6892, 6961), 'authlib.common.encoding.json_dumps', 'json_dumps', (["{'kty': 'EC', 'crv': 'P-521', 'x': '<KEY>', 'y': '<KEY>'}"], {}), "({'kty': 'EC', 'crv': 'P-521', 'x': '<KEY>', 'y': '<KEY>'})\n", (6902, 6961), False, 'from authlib.common.encoding import json_dumps\n')]
|
#!/usr/bin/python
import numpy as np
# Construct an array by executing a function over each coordinate.
def f(x, y):
return 2*x + y + 1
a = np.fromfunction(f, (5, 4), dtype=int)
print(a)
# anonymous functoin
b = np.fromfunction(lambda x, y: 2*x + y, (2, 2))
print(b)
|
[
"numpy.fromfunction"
] |
[((148, 185), 'numpy.fromfunction', 'np.fromfunction', (['f', '(5, 4)'], {'dtype': 'int'}), '(f, (5, 4), dtype=int)\n', (163, 185), True, 'import numpy as np\n'), ((221, 268), 'numpy.fromfunction', 'np.fromfunction', (['(lambda x, y: 2 * x + y)', '(2, 2)'], {}), '(lambda x, y: 2 * x + y, (2, 2))\n', (236, 268), True, 'import numpy as np\n')]
|
import math
import random
import json
import os
from copy import deepcopy
from datetime import date, timedelta
from itertools import cycle
from dateutil.relativedelta import relativedelta
from django.contrib.auth.models import User
from django.utils import timezone
from indicators.models import (
Indicator,
IndicatorType,
Result,
PeriodicTarget,
Level,
DisaggregationType,
DisaggregatedValue,
LevelTier,
)
from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector
from indicators.views.views_indicators import generate_periodic_targets
class ProgramFactory:
module_location = os.path.dirname(__file__)
with open(os.path.join(module_location, 'sample_levels.json'), 'r') as fh:
sample_levels = json.loads(fh.read())
def __init__(self, country):
self.country = country
self.org = Organization.mercy_corps()
self.default_start_date = (date.today() + relativedelta(months=-18)).replace(day=1)
self.default_end_date = (self.default_start_date + relativedelta(months=+32)).replace(day=1) - timedelta(days=1)
def create_program(
self, name, start_date=False, end_date=False, post_satsuma=True, multi_country=False, create_levels=True):
if not start_date:
start_date = self.default_start_date
if not end_date:
end_date = self.default_end_date
program = Program.objects.create(**{
'name': name,
'reporting_period_start': start_date,
'reporting_period_end': end_date,
'funding_status': 'Funded',
'gaitid': 'fake_gait_id_{}'.format(random.randint(1, 9999)),
'_using_results_framework': Program.RF_ALWAYS if post_satsuma else Program.NOT_MIGRATED,
})
program.country.add(self.country)
if multi_country:
country2 = Country.objects.get(country="United States")
program.country.add(country2)
if create_levels:
self.create_levels(program, deepcopy(self.sample_levels))
return program
@staticmethod
def create_levels(program, level_template):
level_data = deepcopy(level_template)
tier_labels = LevelTier.get_templates()['mc_standard']['tiers']
for i, tier in enumerate(tier_labels):
t = LevelTier(name=tier, tier_depth=i + 1, program=program)
t.save()
level_map = {}
for level_fix in level_data:
parent = None
if 'parent_id' in level_fix['fields']:
parent = level_map[level_fix['fields'].pop('parent_id')]
level = Level(**level_fix['fields'])
level.parent = parent
level.program = program
level.save()
level_map[level_fix['pk']] = level
class IndicatorFactory:
standard_params_base = []
for freq in Indicator.TARGET_FREQUENCIES:
for uom_type in (Indicator.NUMBER, Indicator.PERCENTAGE):
for is_cumulative in (True, False):
for direction in (Indicator.DIRECTION_OF_CHANGE_POSITIVE, Indicator.DIRECTION_OF_CHANGE_NEGATIVE):
# Don't create indicators that are LoP|cumulative or percent|non-cumulative
# since we don't support those combinations
if (freq[0] == Indicator.LOP and is_cumulative) or \
(uom_type == Indicator.PERCENTAGE and not is_cumulative):
continue
standard_params_base.append({
'freq': freq[0], 'uom_type': uom_type, 'is_cumulative': is_cumulative,
'direction': direction, 'null_level': None})
null_supplements_params = [
{'freq': Indicator.ANNUAL, 'uom_type': Indicator.NUMBER, 'is_cumulative': False,
'direction': Indicator.DIRECTION_OF_CHANGE_POSITIVE, 'null_level': 'targets'},
{'freq': Indicator.QUARTERLY, 'uom_type': Indicator.PERCENTAGE, 'is_cumulative': True,
'direction': Indicator.DIRECTION_OF_CHANGE_NONE, 'null_level': 'results'},
{'freq': Indicator.LOP, 'uom_type': Indicator.NUMBER, 'is_cumulative': False,
'direction': Indicator.DIRECTION_OF_CHANGE_NONE, 'null_level': 'results'},
{'freq': Indicator.EVENT, 'uom_type': Indicator.PERCENTAGE, 'is_cumulative': True,
'direction': Indicator.DIRECTION_OF_CHANGE_NEGATIVE, 'null_level': 'evidence'},
{'freq': Indicator.MID_END, 'uom_type': Indicator.NUMBER, 'is_cumulative': False,
'direction': Indicator.DIRECTION_OF_CHANGE_POSITIVE, 'null_level': 'evidence'},
]
frequency_labels = {
Indicator.LOP: 'LoP only',
Indicator.MID_END: 'Midline and endline',
Indicator.EVENT: 'Event',
Indicator.ANNUAL: 'Annual',
Indicator.SEMI_ANNUAL: 'Semi-annual',
Indicator.TRI_ANNUAL: 'Tri-annual',
Indicator.QUARTERLY: 'Quarterly',
Indicator.MONTHLY: 'Monthly',
}
uom_labels = {
Indicator.NUMBER: 'Number (#)',
Indicator.PERCENTAGE: "Percentage (%)",
}
direction_labels = {
Indicator.DIRECTION_OF_CHANGE_NONE: "Direction of change NA",
Indicator.DIRECTION_OF_CHANGE_POSITIVE: "Increase (+)",
Indicator.DIRECTION_OF_CHANGE_NEGATIVE: "Decrease (-)",
}
def __init__(self, program, country):
self.program = program
self.country = country
self.sadd_disagg_obj = DisaggregationType.objects.get(
pk=109, disaggregation_type="Sex and Age Disaggregated Data (SADD)")
self.sadd_disagg_labels = self.sadd_disagg_obj.disaggregationlabel_set.all()
def create_standard_indicators(self, **kwargs):
passed_apply_skips = kwargs.pop('apply_skips', None)
apply_skips_main = passed_apply_skips or True
apply_skips_supplement = passed_apply_skips or False
indicator_ids = self.create_indicators(self.standard_params_base, apply_skips=apply_skips_main, **kwargs)
indicator_ids.extend(self.create_indicators(
self.null_supplements_params, apply_skips=apply_skips_supplement, **kwargs))
return indicator_ids
def create_indicators(
self, param_sets, indicator_suffix='', apply_skips=True, apply_rf_skips=False,
personal_indicator=False, indicatorless_levels=None):
indicatorless_levels = [] if not indicatorless_levels else indicatorless_levels
indicator_ids = []
old_levels = list(Indicator.objects.filter(old_level__isnull=False).order_by('old_level')
.distinct().values_list('old_level', flat=True))
old_levels.append(None)
old_level_cycle = cycle(old_levels)
rf_levels = list(Level.objects.filter(program__id=self.program.id).exclude(id__in=indicatorless_levels))
if apply_rf_skips:
rf_levels.append(None)
rf_level_cycle = cycle(rf_levels)
indicator_types = list(IndicatorType.objects.all())
if apply_skips:
indicator_types.append(None)
type_cycle = cycle(indicator_types)
sectors = list(Sector.objects.all()[:5])
if apply_skips:
sectors.append(None)
sector_cycle = cycle(sectors)
sites = list(SiteProfile.objects.filter(country__country="Tolaland"))
if apply_skips:
sites.append(None)
site_cycle = cycle(sites)
result_skip_cycle = cycle([False, False, False, False, True, False, False])
extra_result_cycle = cycle([True, False, False, True, False, False, False])
evidence_skip_cycle = cycle([False, False, True, False, False, False, False])
# Determines how many country disaggs an indicator will have assigned to it
country_disagg_cycle = cycle([0, 1, 2])
# Determins whether the country level SADD disagg will be assigned to an indicator
sadd_disagg_cycle = cycle([True, True, True, False])
# Regardless of what disaggs an indicator has assigned, this controls how many disaggas actually get
# used by a result. That way, there are potentially some results that don't have disagg values
# even though the indicator has been assigned a particular disagg type. one and two
# indicate that one or two disagg types should be used but not the SADD type.
result_disagg_cycle = cycle(['sadd', 'one', 'two', 'none', 'all', 'all', 'all', 'none'])
for n, params in enumerate(param_sets):
if params['is_cumulative']:
cumulative_text = 'Cumulative'
else:
cumulative_text = 'Non-cumulative'
indicator_disagg_count = next(country_disagg_cycle)
sadd_disagg_flag = next(sadd_disagg_cycle)
result_disagg_type = next(result_disagg_cycle)
indicator_name_list = [
self.frequency_labels[params['freq']],
self.uom_labels[params['uom_type']],
cumulative_text,
self.direction_labels[params['direction']],
f"Disagg type - SADD:{sadd_disagg_flag}, Country:{indicator_disagg_count}",
]
if params['null_level']:
indicator_name_list.append(f"| No {params['null_level']}")
else:
result_text_list = []
result_text_list.append(f"SADD:{result_disagg_type in ('all', 'sadd')}") if sadd_disagg_flag else None
result_text_list.append(
f"Country:{result_disagg_type in ('one', 'two', 'all')}"
) if indicator_disagg_count > 0 else None
if len(result_text_list) > 0:
result_text = ", ".join(result_text_list)
else:
result_text = "None"
indicator_name_list.append(
f"Disaggs applied - {result_text}")
if indicator_suffix:
indicator_name_list.append(indicator_suffix)
indicator_name = ' | '.join(indicator_name_list)
frequency = params['freq']
if params['null_level'] == 'targets':
frequency = None
indicator = Indicator(
name=indicator_name,
is_cumulative=params['is_cumulative'],
target_frequency=frequency,
unit_of_measure='This is a UOM',
baseline=0,
unit_of_measure_type=params['uom_type'],
direction_of_change=params['direction'],
program=self.program,
old_level=None if self.program.results_framework else next(old_level_cycle),
level=next(rf_level_cycle),
sector=None if not personal_indicator else next(sector_cycle),
baseline_na=False,
definition="",
means_of_verification="",
data_collection_method="",
method_of_analysis=""
)
indicator.save()
country_assigned_disagg_labelsets = []
for disagg in self.country.disaggregationtype_set.order_by('?').all()[:indicator_disagg_count]:
indicator.disaggregation.add(disagg)
country_assigned_disagg_labelsets.append(list(disagg.disaggregationlabel_set.all()))
if sadd_disagg_flag:
indicator.disaggregation.add(self.sadd_disagg_obj)
i_type = next(type_cycle)
if personal_indicator and i_type:
indicator.indicator_type.add(i_type)
indicator.save()
indicator_ids.append(indicator.id)
if params['null_level'] == 'targets':
indicator.lop_target = 100
indicator.save()
continue
self.make_targets(self.program, indicator)
periodic_targets = PeriodicTarget.objects.filter(indicator__id=indicator.id)
incrementors = self.calc_target_and_achieved_base(
params['uom_type'], params['direction'], params['is_cumulative'], len(periodic_targets))
lop_target = 0
for i, pt in enumerate(periodic_targets):
pt.target = incrementors['target_start'] + incrementors['target_increment'] * i
pt.save()
if params['is_cumulative']:
lop_target = pt.target
else:
lop_target += pt.target
indicator.lop_target = lop_target
indicator.save()
result_factory = ResultFactory(
indicator, self.program, country_assigned_disagg_labelsets, self.sadd_disagg_labels,
result_disagg_type, params['uom_type'], params['null_level'], site_cycle, personal_indicator,
apply_skips)
result_factory.make_results(
periodic_targets, incrementors, evidence_skip_cycle, result_skip_cycle, extra_result_cycle)
return indicator_ids
@staticmethod
def make_targets(program, indicator):
if indicator.target_frequency == Indicator.LOP:
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': 1,
'edit_date': timezone.now(),
'period': 'LOP target',
})
return
elif indicator.target_frequency == Indicator.EVENT:
for i in range(3):
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': i,
'edit_date': timezone.now(),
'period': 'Event {}'.format(i + 1),
})
return
target_generator = PeriodicTarget.generate_for_frequency(indicator.target_frequency)
num_periods = len([p for p in target_generator(program.reporting_period_start, program.reporting_period_end)])
targets_json = generate_periodic_targets(
tf=indicator.target_frequency, start_date=program.reporting_period_start, numTargets=num_periods)
for i, pt in enumerate(targets_json):
if indicator.target_frequency in [Indicator.LOP, Indicator.MID_END]:
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': i,
'edit_date': timezone.now(),
'period': 'Period {}'.format(i + 1),
})
else:
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': i,
'edit_date': timezone.now(),
'period': 'Period {}'.format(i + 1),
'start_date': pt['start_date'],
'end_date': pt['end_date'],
})
@staticmethod
def calc_target_and_achieved_base(uom_type, direction, is_cumulative, pt_count):
if uom_type == Indicator.NUMBER:
if direction == Indicator.DIRECTION_OF_CHANGE_POSITIVE:
if is_cumulative:
target_start = 100
target_increment = target_start
achieved_start = 90
achieved_increment = int(achieved_start * 1.1)
else:
target_start = 100
target_increment = target_start
achieved_start = 90
achieved_increment = int(achieved_start * 1.1)
else:
if is_cumulative:
target_start = 500
target_increment = -int(math.floor((target_start / pt_count) / 10) * 10)
achieved_start = 400
achieved_increment = target_increment + 2
else:
target_start = 500
target_increment = -int(math.floor((target_start / pt_count) / 10) * 10)
achieved_start = 400
achieved_increment = target_increment * .8
else:
if direction == Indicator.DIRECTION_OF_CHANGE_POSITIVE:
# Don't need to check non-cumulative because we don't really handle it
target_start = 10
target_increment = 3
achieved_start = 7
achieved_increment = 4
else:
# Don't need to check non-cumulative because we don't really handle it
target_start = 90
target_increment = max(-math.floor(target_start / pt_count), -2)
achieved_start = 95
achieved_increment = target_increment - 1
return {
"target_start": target_start, "target_increment": target_increment,
"achieved_start": achieved_start, "achieved_increment": achieved_increment}
class ResultFactory:
def __init__(
self, indicator, program, country_assigned_disagg_labelsets, sadd_disagg_labels, result_disagg,
uom_type, null_level, site_cycle, personal_indicator, apply_skips):
self.program = program
self.indicator = indicator
self.sadd_disagg_labels = sadd_disagg_labels
self.indicator_disagg_labelsets = country_assigned_disagg_labelsets
self.result_disagg = result_disagg
self.uom_type = uom_type
self.null_level = null_level
self.site_cycle = site_cycle
self.personal_indicator = personal_indicator
self.apply_skips = apply_skips
def make_results(self, periodic_targets, incrementors, evidence_skip_cycle, result_skip_cycle, extra_result_cycle):
day_offset = timedelta(days=2)
for i, pt in enumerate(periodic_targets):
# Users shouldn't put in results with a date in the future, so neither should we.
if pt.start_date and date.today() < pt.start_date + day_offset:
continue
# Skip creating a result if the null_level is result or if
# the number of results has reached the arbitrary skip point.
result_skip = next(result_skip_cycle)
extra_result = next(extra_result_cycle)
if (self.apply_skips and result_skip) or self.null_level == 'results':
continue
achieved_value = incrementors['achieved_start'] + (incrementors['achieved_increment'] * i)
results_to_create = 1
if self.apply_skips and extra_result:
results_to_create = 2
if self.uom_type == Indicator.NUMBER:
achieved_value = int(achieved_value * .4)
else:
achieved_value = int(achieved_value * .9)
# Now create the Results and their related Records
if pt.start_date:
date_collected = pt.start_date + day_offset
else:
date_collected = date.today()
for c in range(results_to_create):
rs = Result(
periodic_target=pt,
indicator=self.indicator,
program=self.program,
achieved=achieved_value,
date_collected=date_collected)
rs.save()
if self.result_disagg != 'none':
self.disaggregate_result(rs, self.result_disagg, self.indicator)
date_collected = date_collected + day_offset
if self.uom_type == Indicator.NUMBER:
achieved_value = int(achieved_value * 1.5)
else:
achieved_value = int(achieved_value * 1.15)
if self.null_level == 'evidence':
continue
# evidence_skip = next(evidence_skip_cycle)
if self.apply_skips and next(evidence_skip_cycle):
continue
rs.record_name = 'Evidence for result id {}'.format(rs.id)
rs.evidence_url = 'https://www.pinterest.ca/search/pins/?q=cute%20animals'
r_site = next(self.site_cycle)
# TODO: remove personal indicator?
if self.personal_indicator and r_site:
rs.site.add(r_site)
rs.save()
def disaggregate_result(self, result, result_disagg_type, indicator):
label_sets = []
if result_disagg_type == 'sadd':
label_sets.append(self.sadd_disagg_labels)
elif result_disagg_type == 'one' and len(self.indicator_disagg_labelsets) > 0:
try:
label_sets.append(random.choice(self.indicator_disagg_labelsets))
except ValueError:
pass
elif result_disagg_type == 'two' and indicator.disaggregation.all().count() > 1:
try:
label_sets.extend(random.sample(self.indicator_disagg_labelsets, k=2))
except ValueError:
label_sets.extend(self.indicator_disagg_labelsets)
elif result_disagg_type == 'all':
label_sets.append(self.sadd_disagg_labels)
label_sets.extend(self.indicator_disagg_labelsets)
if len(label_sets) < 1:
return
for label_set in label_sets:
# Calculate how many of the labels we will use (k) and then randomly select that number of label indexes
k = random.randrange(1, len(label_set) + 1)
label_indexes = random.sample(list(range(len(label_set))), k)
values = self.make_random_disagg_values(result.achieved, len(label_indexes))
value_objects = []
for label_index, value in zip(label_indexes, values):
label = label_set[label_index]
value_objects.append(DisaggregatedValue(category=label, value=value, result=result))
DisaggregatedValue.objects.bulk_create(value_objects)
@staticmethod
def make_random_disagg_values(aggregate_value, total_slot_count):
filled = []
for slot_index in range(total_slot_count):
slots_available_count = total_slot_count - len(filled)
max_value = aggregate_value - sum(filled) - slots_available_count + 1
if max_value <= 1:
filled.extend([1] * slots_available_count)
break
elif slot_index == total_slot_count - 1:
filled.append(aggregate_value - sum(filled))
else:
filled.append(random.randrange(0, max_value))
if sum(filled) < aggregate_value:
filled[0] += aggregate_value - sum(filled)
if sum(filled) > aggregate_value:
reduction_amount = sum(filled) - aggregate_value
while reduction_amount > 0:
i = filled.index(max(filled))
if filled[i] >= reduction_amount:
filled[i] -= reduction_amount
reduction_amount = 0
else:
reduction_amount -= filled[i]
filled[i] = 0
if sum(filled) != aggregate_value:
raise NotImplementedError('You wrote a bad algorithm')
random.shuffle(filled)
return filled
class Cleaner:
@classmethod
def clean(cls, *args):
if 'clean_all' in args:
cls.clean_programs()
cls.clean_tolaland()
cls.clean_test_users()
else:
if 'clean_tolaland' in args:
cls.clean_tolaland()
if 'clean_programs' in args:
cls.clean_programs()
if 'clean_test_users' in args:
cls.clean_test_users()
@staticmethod
def clean_test_users():
auth_users = User.objects.filter(username__in=user_profiles.keys())
tola_users = TolaUser.objects.filter(user__in=auth_users)
message = f"{auth_users.count()} Auth Users and {tola_users.count()} Tola Users deleted"
tola_users.delete()
auth_users.delete()
print(message)
@staticmethod
def clean_tolaland():
try:
country = Country.objects.get(country='Tolaland')
print("Deleting country: {}".format(country))
disaggregations = DisaggregationType.objects.filter(country=country)
disaggregations.delete()
country.delete()
except Country.DoesNotExist:
pass
@staticmethod
def clean_programs():
programs = Program.objects.filter(name__icontains='QA program -')
if programs.count() > 0:
print("Delete these programs?\n{}".format('\n'.join(p.name for p in programs)))
confirm = input('[yes/no]: ')
if confirm == 'yes':
for program in programs:
print('Deleting program: {}'.format(program))
for indicator in program.indicator_set.all():
indicator.delete()
program.delete()
else:
print('\nPrograms not deleted')
standard_countries = ['Afghanistan', 'Haiti', 'Jordan', 'Tolaland', 'United States']
TEST_ORG, created = Organization.objects.get_or_create(name='Test')
MC_ORG = Organization.objects.get(name='Mercy Corps')
user_profiles = {
'mc-low': {
'first_last': ['mc-low-first', 'mc-low-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'low',
'home_country': 'United States',
'org': MC_ORG,
},
'mc-medium': {
'first_last': ['mc-med-first', 'mc-med-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'medium',
'home_country': 'United States',
'org': MC_ORG,
},
'mc-high': {
'first_last': ['mc-high-first', 'mc-high-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': 'United States',
'org': MC_ORG,
},
'mc-basicadmin': {
'first_last': ['mc-basicadmin-first', 'mc-basicadmin-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': 'United States',
'org': MC_ORG,
'admin': 'all'
},
'gmail-low': {
'first_last': ['gmail-low-first', 'gmail-low-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'low',
'home_country': None,
'org': TEST_ORG,
},
'gmail-medium': {
'first_last': ['gmail-med-first', 'gmail-med-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'medium',
'home_country': None,
'org': TEST_ORG,
},
'gmail-high': {
'first_last': ['gmail-high-first', 'gmail-high-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': None,
'org': TEST_ORG,
},
'external-low': {
'first_last': ['external-low-first', 'external-low-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'low',
'home_country': None,
'org': TEST_ORG,
},
'external-medium': {
'first_last': ['external-med-first', 'external-med-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'medium',
'home_country': None,
'org': TEST_ORG,
},
'external-high': {
'first_last': ['external-high-first', 'external-high-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': None,
'org': TEST_ORG,
},
'demo1': {
'first_last': ['demo', 'one'],
'email': '<EMAIL>',
'accessible_countries': ['Ethiopia'],
'permission_level': 'low',
'home_country': 'Ethiopia',
'org': MC_ORG,
},
'demo2': {
'first_last': ['demo', 'two'],
'email': '<EMAIL>',
'accessible_countries': [],
'permission_level': 'medium',
'home_country': None,
'org': TEST_ORG,
'program_access': [('Ethiopia', 'Collaboration in Cross-Border Areas', 'medium')]
},
'demo3': {
'first_last': ['demo', 'three'],
'email': '<EMAIL>',
'accessible_countries': [],
'permission_level': 'high',
'home_country': None,
'org': TEST_ORG,
'program_access': [('Ethiopia', 'Collaboration in Cross-Border Areas', 'high')]
},
}
|
[
"random.sample",
"random.shuffle",
"indicators.views.views_indicators.generate_periodic_targets",
"workflow.models.Organization.objects.get",
"itertools.cycle",
"workflow.models.Organization.objects.get_or_create",
"indicators.models.DisaggregationType.objects.filter",
"os.path.join",
"workflow.models.Program.objects.filter",
"workflow.models.Country.objects.get",
"indicators.models.PeriodicTarget.objects.filter",
"random.randint",
"django.utils.timezone.now",
"os.path.dirname",
"workflow.models.TolaUser.objects.filter",
"dateutil.relativedelta.relativedelta",
"indicators.models.DisaggregatedValue",
"datetime.timedelta",
"workflow.models.SiteProfile.objects.filter",
"copy.deepcopy",
"indicators.models.Result",
"indicators.models.LevelTier",
"indicators.models.Indicator.objects.filter",
"datetime.date.today",
"indicators.models.DisaggregationType.objects.get",
"indicators.models.Level",
"indicators.models.PeriodicTarget.generate_for_frequency",
"indicators.models.LevelTier.get_templates",
"math.floor",
"random.choice",
"workflow.models.Sector.objects.all",
"random.randrange",
"indicators.models.DisaggregatedValue.objects.bulk_create",
"indicators.models.Level.objects.filter",
"indicators.models.IndicatorType.objects.all",
"workflow.models.Organization.mercy_corps"
] |
[((25237, 25284), 'workflow.models.Organization.objects.get_or_create', 'Organization.objects.get_or_create', ([], {'name': '"""Test"""'}), "(name='Test')\n", (25271, 25284), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((25294, 25338), 'workflow.models.Organization.objects.get', 'Organization.objects.get', ([], {'name': '"""Mercy Corps"""'}), "(name='Mercy Corps')\n", (25318, 25338), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((650, 675), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (665, 675), False, 'import os\n'), ((885, 911), 'workflow.models.Organization.mercy_corps', 'Organization.mercy_corps', ([], {}), '()\n', (909, 911), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((2195, 2219), 'copy.deepcopy', 'deepcopy', (['level_template'], {}), '(level_template)\n', (2203, 2219), False, 'from copy import deepcopy\n'), ((5491, 5595), 'indicators.models.DisaggregationType.objects.get', 'DisaggregationType.objects.get', ([], {'pk': '(109)', 'disaggregation_type': '"""Sex and Age Disaggregated Data (SADD)"""'}), "(pk=109, disaggregation_type=\n 'Sex and Age Disaggregated Data (SADD)')\n", (5521, 5595), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((6735, 6752), 'itertools.cycle', 'cycle', (['old_levels'], {}), '(old_levels)\n', (6740, 6752), False, 'from itertools import cycle\n'), ((6954, 6970), 'itertools.cycle', 'cycle', (['rf_levels'], {}), '(rf_levels)\n', (6959, 6970), False, 'from itertools import cycle\n'), ((7118, 7140), 'itertools.cycle', 'cycle', (['indicator_types'], {}), '(indicator_types)\n', (7123, 7140), False, 'from itertools import cycle\n'), ((7271, 7285), 'itertools.cycle', 'cycle', (['sectors'], {}), '(sectors)\n', (7276, 7285), False, 'from itertools import cycle\n'), ((7441, 7453), 'itertools.cycle', 'cycle', (['sites'], {}), '(sites)\n', (7446, 7453), False, 'from itertools import cycle\n'), ((7483, 7538), 'itertools.cycle', 'cycle', (['[False, False, False, False, True, False, False]'], {}), '([False, False, False, False, True, False, False])\n', (7488, 7538), False, 'from itertools import cycle\n'), ((7568, 7622), 'itertools.cycle', 'cycle', (['[True, False, False, True, False, False, False]'], {}), '([True, False, False, True, False, False, False])\n', (7573, 7622), False, 'from itertools import cycle\n'), ((7653, 7708), 'itertools.cycle', 'cycle', (['[False, False, True, False, False, False, False]'], {}), '([False, False, True, False, False, False, False])\n', (7658, 7708), False, 'from itertools import cycle\n'), ((7825, 7841), 'itertools.cycle', 'cycle', (['[0, 1, 2]'], {}), '([0, 1, 2])\n', (7830, 7841), False, 'from itertools import cycle\n'), ((7962, 7994), 'itertools.cycle', 'cycle', (['[True, True, True, False]'], {}), '([True, True, True, False])\n', (7967, 7994), False, 'from itertools import cycle\n'), ((8418, 8484), 'itertools.cycle', 'cycle', (["['sadd', 'one', 'two', 'none', 'all', 'all', 'all', 'none']"], {}), "(['sadd', 'one', 'two', 'none', 'all', 'all', 'all', 'none'])\n", (8423, 8484), False, 'from itertools import cycle\n'), ((13815, 13880), 'indicators.models.PeriodicTarget.generate_for_frequency', 'PeriodicTarget.generate_for_frequency', (['indicator.target_frequency'], {}), '(indicator.target_frequency)\n', (13852, 13880), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((14024, 14152), 'indicators.views.views_indicators.generate_periodic_targets', 'generate_periodic_targets', ([], {'tf': 'indicator.target_frequency', 'start_date': 'program.reporting_period_start', 'numTargets': 'num_periods'}), '(tf=indicator.target_frequency, start_date=program\n .reporting_period_start, numTargets=num_periods)\n', (14049, 14152), False, 'from indicators.views.views_indicators import generate_periodic_targets\n'), ((17752, 17769), 'datetime.timedelta', 'timedelta', ([], {'days': '(2)'}), '(days=2)\n', (17761, 17769), False, 'from datetime import date, timedelta\n'), ((23255, 23277), 'random.shuffle', 'random.shuffle', (['filled'], {}), '(filled)\n', (23269, 23277), False, 'import random\n'), ((23892, 23936), 'workflow.models.TolaUser.objects.filter', 'TolaUser.objects.filter', ([], {'user__in': 'auth_users'}), '(user__in=auth_users)\n', (23915, 23936), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((24556, 24610), 'workflow.models.Program.objects.filter', 'Program.objects.filter', ([], {'name__icontains': '"""QA program -"""'}), "(name__icontains='QA program -')\n", (24578, 24610), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((690, 741), 'os.path.join', 'os.path.join', (['module_location', '"""sample_levels.json"""'], {}), "(module_location, 'sample_levels.json')\n", (702, 741), False, 'import os\n'), ((1107, 1124), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1116, 1124), False, 'from datetime import date, timedelta\n'), ((1899, 1943), 'workflow.models.Country.objects.get', 'Country.objects.get', ([], {'country': '"""United States"""'}), "(country='United States')\n", (1918, 1943), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((2355, 2410), 'indicators.models.LevelTier', 'LevelTier', ([], {'name': 'tier', 'tier_depth': '(i + 1)', 'program': 'program'}), '(name=tier, tier_depth=i + 1, program=program)\n', (2364, 2410), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((2664, 2692), 'indicators.models.Level', 'Level', ([], {}), "(**level_fix['fields'])\n", (2669, 2692), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((7003, 7030), 'indicators.models.IndicatorType.objects.all', 'IndicatorType.objects.all', ([], {}), '()\n', (7028, 7030), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((7308, 7363), 'workflow.models.SiteProfile.objects.filter', 'SiteProfile.objects.filter', ([], {'country__country': '"""Tolaland"""'}), "(country__country='Tolaland')\n", (7334, 7363), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((11942, 11999), 'indicators.models.PeriodicTarget.objects.filter', 'PeriodicTarget.objects.filter', ([], {'indicator__id': 'indicator.id'}), '(indicator__id=indicator.id)\n', (11971, 11999), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((21934, 21987), 'indicators.models.DisaggregatedValue.objects.bulk_create', 'DisaggregatedValue.objects.bulk_create', (['value_objects'], {}), '(value_objects)\n', (21972, 21987), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((24193, 24232), 'workflow.models.Country.objects.get', 'Country.objects.get', ([], {'country': '"""Tolaland"""'}), "(country='Tolaland')\n", (24212, 24232), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((24321, 24371), 'indicators.models.DisaggregationType.objects.filter', 'DisaggregationType.objects.filter', ([], {'country': 'country'}), '(country=country)\n', (24354, 24371), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((2053, 2081), 'copy.deepcopy', 'deepcopy', (['self.sample_levels'], {}), '(self.sample_levels)\n', (2061, 2081), False, 'from copy import deepcopy\n'), ((2242, 2267), 'indicators.models.LevelTier.get_templates', 'LevelTier.get_templates', ([], {}), '()\n', (2265, 2267), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((7165, 7185), 'workflow.models.Sector.objects.all', 'Sector.objects.all', ([], {}), '()\n', (7183, 7185), False, 'from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector\n'), ((19003, 19015), 'datetime.date.today', 'date.today', ([], {}), '()\n', (19013, 19015), False, 'from datetime import date, timedelta\n'), ((19085, 19219), 'indicators.models.Result', 'Result', ([], {'periodic_target': 'pt', 'indicator': 'self.indicator', 'program': 'self.program', 'achieved': 'achieved_value', 'date_collected': 'date_collected'}), '(periodic_target=pt, indicator=self.indicator, program=self.program,\n achieved=achieved_value, date_collected=date_collected)\n', (19091, 19219), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((947, 959), 'datetime.date.today', 'date.today', ([], {}), '()\n', (957, 959), False, 'from datetime import date, timedelta\n'), ((962, 987), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(-18)'}), '(months=-18)\n', (975, 987), False, 'from dateutil.relativedelta import relativedelta\n'), ((6779, 6828), 'indicators.models.Level.objects.filter', 'Level.objects.filter', ([], {'program__id': 'self.program.id'}), '(program__id=self.program.id)\n', (6799, 6828), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((17947, 17959), 'datetime.date.today', 'date.today', ([], {}), '()\n', (17957, 17959), False, 'from datetime import date, timedelta\n'), ((21857, 21919), 'indicators.models.DisaggregatedValue', 'DisaggregatedValue', ([], {'category': 'label', 'value': 'value', 'result': 'result'}), '(category=label, value=value, result=result)\n', (21875, 21919), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n'), ((1063, 1088), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(+32)'}), '(months=+32)\n', (1076, 1088), False, 'from dateutil.relativedelta import relativedelta\n'), ((1670, 1693), 'random.randint', 'random.randint', (['(1)', '(9999)'], {}), '(1, 9999)\n', (1684, 1693), False, 'import random\n'), ((13332, 13346), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (13344, 13346), False, 'from django.utils import timezone\n'), ((16621, 16656), 'math.floor', 'math.floor', (['(target_start / pt_count)'], {}), '(target_start / pt_count)\n', (16631, 16656), False, 'import math\n'), ((20700, 20746), 'random.choice', 'random.choice', (['self.indicator_disagg_labelsets'], {}), '(self.indicator_disagg_labelsets)\n', (20713, 20746), False, 'import random\n'), ((22571, 22601), 'random.randrange', 'random.randrange', (['(0)', 'max_value'], {}), '(0, max_value)\n', (22587, 22601), False, 'import random\n'), ((14452, 14466), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (14464, 14466), False, 'from django.utils import timezone\n'), ((14726, 14740), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (14738, 14740), False, 'from django.utils import timezone\n'), ((20940, 20991), 'random.sample', 'random.sample', (['self.indicator_disagg_labelsets'], {'k': '(2)'}), '(self.indicator_disagg_labelsets, k=2)\n', (20953, 20991), False, 'import random\n'), ((13677, 13691), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (13689, 13691), False, 'from django.utils import timezone\n'), ((15718, 15758), 'math.floor', 'math.floor', (['(target_start / pt_count / 10)'], {}), '(target_start / pt_count / 10)\n', (15728, 15758), False, 'import math\n'), ((15975, 16015), 'math.floor', 'math.floor', (['(target_start / pt_count / 10)'], {}), '(target_start / pt_count / 10)\n', (15985, 16015), False, 'import math\n'), ((6530, 6579), 'indicators.models.Indicator.objects.filter', 'Indicator.objects.filter', ([], {'old_level__isnull': '(False)'}), '(old_level__isnull=False)\n', (6554, 6579), False, 'from indicators.models import Indicator, IndicatorType, Result, PeriodicTarget, Level, DisaggregationType, DisaggregatedValue, LevelTier\n')]
|
# this file for custom tags
from django import template
from ..models import Post
from django.db.models import Count
# these two lines below for custom filters
from django.utils.safestring import mark_safe
import markdown
register = template.Library()
'''
Each module that contains template tags needs to define a variable called
register to be a valid tag library. This variable is an instance of template.Library,
and it's used to register your own template tags and filters.
'''
@register.simple_tag
def total_posts(): # return the number of post published
return Post.published.count()
@register.inclusion_tag('blog/post/latest_posts.html') # you should create a file to handle the way the returned
# data will be displayed
def show_latest_posts(count=5):
latest_posts = Post.published.order_by('-publish')[:count]
return {'latest_posts': latest_posts}
@register.simple_tag
def get_most_commented_posts(count=5):
return Post.published.annotate(
total_comments=Count('comments')
).order_by('-total_comments')[:count]
'''
After adding a new template tags module, you will need to restart
the Django development server in order to use the new tags and
filters in templates.
'''
'''
custom tags types :
1-simple_tag: Processes the data and returns a string
2-inclusion_tag: Processes the data and returns a rendered template
'''
'''
to use custom tags in html files just load the file by {% load blog_tags %} at the top of the file
to call any function inside html file just use {% functionName %} anywhere in the html file
if the function have parameters you can call it by {% functionName param1 param2 %} and so on
'''
@register.filter(name='markdown')
def markdown_format(text):
return mark_safe(markdown.markdown(text))
|
[
"django.db.models.Count",
"django.template.Library",
"markdown.markdown"
] |
[((242, 260), 'django.template.Library', 'template.Library', ([], {}), '()\n', (258, 260), False, 'from django import template\n'), ((1813, 1836), 'markdown.markdown', 'markdown.markdown', (['text'], {}), '(text)\n', (1830, 1836), False, 'import markdown\n'), ((1031, 1048), 'django.db.models.Count', 'Count', (['"""comments"""'], {}), "('comments')\n", (1036, 1048), False, 'from django.db.models import Count\n')]
|
import logging
import re
from typing import Optional
import ujson
from IPy import IP
from ordered_set import OrderedSet
from irrd import __version__
from irrd.conf import get_setting, RPKI_IRR_PSEUDO_SOURCE
from irrd.mirroring.nrtm_generator import NRTMGenerator, NRTMGeneratorException
from irrd.rpki.status import RPKIStatus
from irrd.rpsl.rpsl_objects import (OBJECT_CLASS_MAPPING, RPKI_RELEVANT_OBJECT_CLASSES)
from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException
from irrd.storage.database_handler import DatabaseHandler, RPSLDatabaseResponse
from irrd.storage.preload import Preloader
from irrd.storage.queries import DatabaseStatusQuery
from irrd.utils.validators import parse_as_number, ValidationError
from .query_response import WhoisQueryResponseType, WhoisQueryResponseMode, WhoisQueryResponse
from ..access_check import is_client_permitted
logger = logging.getLogger(__name__)
class WhoisQueryParser:
"""
Parser for all whois-style queries.
This parser distinguishes RIPE-style, e.g. '-K 192.0.2.1' or '-i mnt-by FOO'
from IRRD-style, e.g. '!oFOO'. Query processing is mostly handled by
QueryResolver, with a few exceptions that are whois-specific.
Some query flags, particularly -k/!! and -s/!s retain state across queries,
so a single instance of this object should be created per session, with
handle_query() being called for each individual query.
"""
def __init__(self, client_ip: str, client_str: str, preloader: Preloader,
database_handler: DatabaseHandler) -> None:
self.multiple_command_mode = False
self.timeout = 30
self.key_fields_only = False
self.client_ip = client_ip
self.client_str = client_str
self.database_handler = database_handler
self.query_resolver = QueryResolver(
preloader=preloader,
database_handler=database_handler,
)
def handle_query(self, query: str) -> WhoisQueryResponse:
"""
Process a single query. Always returns a WhoisQueryResponse object.
Not thread safe - only one call must be made to this method at the same time.
"""
self.key_fields_only = False
if query.startswith('!'):
try:
return self.handle_irrd_command(query[1:])
except InvalidQueryException as exc:
logger.info(f'{self.client_str}: encountered parsing error while parsing query "{query}": {exc}')
return WhoisQueryResponse(
response_type=WhoisQueryResponseType.ERROR_USER,
mode=WhoisQueryResponseMode.IRRD,
result=str(exc)
)
except Exception as exc:
logger.error(f'An exception occurred while processing whois query "{query}": {exc}', exc_info=exc)
return WhoisQueryResponse(
response_type=WhoisQueryResponseType.ERROR_INTERNAL,
mode=WhoisQueryResponseMode.IRRD,
result='An internal error occurred while processing this query.'
)
try:
return self.handle_ripe_command(query)
except InvalidQueryException as exc:
logger.info(f'{self.client_str}: encountered parsing error while parsing query "{query}": {exc}')
return WhoisQueryResponse(
response_type=WhoisQueryResponseType.ERROR_USER,
mode=WhoisQueryResponseMode.RIPE,
result=str(exc)
)
except Exception as exc:
logger.error(f'An exception occurred while processing whois query "{query}": {exc}', exc_info=exc)
return WhoisQueryResponse(
response_type=WhoisQueryResponseType.ERROR_INTERNAL,
mode=WhoisQueryResponseMode.RIPE,
result='An internal error occurred while processing this query.'
)
def handle_irrd_command(self, full_command: str) -> WhoisQueryResponse:
"""Handle an IRRD-style query. full_command should not include the first exclamation mark. """
if not full_command:
raise InvalidQueryException('Missing IRRD command')
command = full_command[0]
parameter = full_command[1:]
response_type = WhoisQueryResponseType.SUCCESS
result = None
# A is not tested here because it is already handled in handle_irrd_routes_for_as_set
queries_with_parameter = list('tg6ijmnors')
if command in queries_with_parameter and not parameter:
raise InvalidQueryException(f'Missing parameter for {command} query')
if command == '!':
self.multiple_command_mode = True
result = None
response_type = WhoisQueryResponseType.NO_RESPONSE
elif full_command.upper() == 'FNO-RPKI-FILTER':
self.query_resolver.disable_rpki_filter()
result = 'Filtering out RPKI invalids is disabled for !r and RIPE style ' \
'queries for the rest of this connection.'
elif full_command.upper() == 'FNO-SCOPE-FILTER':
self.query_resolver.disable_out_of_scope_filter()
result = 'Filtering out out-of-scope objects is disabled for !r and RIPE style ' \
'queries for the rest of this connection.'
elif command == 'v':
result = self.handle_irrd_version()
elif command == 't':
self.handle_irrd_timeout_update(parameter)
elif command == 'g':
result = self.handle_irrd_routes_for_origin_v4(parameter)
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
elif command == '6':
result = self.handle_irrd_routes_for_origin_v6(parameter)
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
elif command == 'a':
result = self.handle_irrd_routes_for_as_set(parameter)
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
elif command == 'i':
result = self.handle_irrd_set_members(parameter)
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
elif command == 'j':
result = self.handle_irrd_database_serial_range(parameter)
elif command == 'J':
result = self.handle_irrd_database_status(parameter)
elif command == 'm':
result = self.handle_irrd_exact_key(parameter)
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
elif command == 'n':
self.handle_user_agent(parameter)
elif command == 'o':
result = self.handle_inverse_attr_search('mnt-by', parameter)
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
elif command == 'r':
result = self.handle_irrd_route_search(parameter)
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
elif command == 's':
result = self.handle_irrd_sources_list(parameter)
else:
raise InvalidQueryException(f'Unrecognised command: {command}')
return WhoisQueryResponse(
response_type=response_type,
mode=WhoisQueryResponseMode.IRRD,
result=result,
)
def handle_irrd_timeout_update(self, timeout: str) -> None:
"""!timeout query - update timeout in connection"""
try:
timeout_value = int(timeout)
except ValueError:
raise InvalidQueryException(f'Invalid value for timeout: {timeout}')
if timeout_value > 0 and timeout_value <= 1000:
self.timeout = timeout_value
else:
raise InvalidQueryException(f'Invalid value for timeout: {timeout}')
def handle_irrd_routes_for_origin_v4(self, origin: str) -> str:
"""!g query - find all originating IPv4 prefixes from an origin, e.g. !gAS65537"""
return self._routes_for_origin(origin, 4)
def handle_irrd_routes_for_origin_v6(self, origin: str) -> str:
"""!6 query - find all originating IPv6 prefixes from an origin, e.g. !6as65537"""
return self._routes_for_origin(origin, 6)
def _routes_for_origin(self, origin: str, ip_version: Optional[int]=None) -> str:
"""
Resolve all route(6)s prefixes for an origin, returning a space-separated list
of all originating prefixes, not including duplicates.
"""
try:
origin_formatted, _ = parse_as_number(origin)
except ValidationError as ve:
raise InvalidQueryException(str(ve))
prefixes = self.query_resolver.routes_for_origin(origin_formatted, ip_version)
return ' '.join(prefixes)
def handle_irrd_routes_for_as_set(self, set_name: str) -> str:
"""
!a query - find all originating prefixes for all members of an AS-set, e.g. !a4AS-FOO or !a6AS-FOO
"""
ip_version: Optional[int] = None
if set_name.startswith('4'):
set_name = set_name[1:]
ip_version = 4
elif set_name.startswith('6'):
set_name = set_name[1:]
ip_version = 6
if not set_name:
raise InvalidQueryException('Missing required set name for A query')
prefixes = self.query_resolver.routes_for_as_set(set_name, ip_version)
return ' '.join(prefixes)
def handle_irrd_set_members(self, parameter: str) -> str:
"""
!i query - find all members of an as-set or route-set, possibly recursively.
e.g. !iAS-FOO for non-recursive, !iAS-FOO,1 for recursive
"""
recursive = False
if parameter.endswith(',1'):
recursive = True
parameter = parameter[:-2]
members = self.query_resolver.members_for_set(parameter, recursive=recursive)
return ' '.join(members)
def handle_irrd_database_serial_range(self, parameter: str) -> str:
"""
!j query - database serial range
This query is legacy and only available in whois, so resolved
directly here instead of in the query resolver.
"""
if parameter == '-*':
sources = self.query_resolver.sources_default if self.query_resolver.sources_default else self.query_resolver.all_valid_sources
else:
sources = [s.upper() for s in parameter.split(',')]
invalid_sources = [s for s in sources if s not in self.query_resolver.all_valid_sources]
query = DatabaseStatusQuery().sources(sources)
query_results = self.database_handler.execute_query(query, refresh_on_error=True)
result_txt = ''
for query_result in query_results:
source = query_result['source'].upper()
keep_journal = 'Y' if get_setting(f'sources.{source}.keep_journal') else 'N'
serial_newest = query_result['serial_newest_mirror']
fields = [
source,
keep_journal,
f'0-{serial_newest}' if serial_newest else '-',
]
if query_result['serial_last_export']:
fields.append(str(query_result['serial_last_export']))
result_txt += ':'.join(fields) + '\n'
for invalid_source in invalid_sources:
result_txt += f'{invalid_source.upper()}:X:Database unknown\n'
return result_txt.strip()
def handle_irrd_database_status(self, parameter: str) -> str:
"""!J query - database status"""
if parameter == '-*':
sources = None
else:
sources = [s.upper() for s in parameter.split(',')]
results = self.query_resolver.database_status(sources)
return ujson.dumps(results, indent=4)
def handle_irrd_exact_key(self, parameter: str):
"""!m query - exact object key lookup, e.g. !maut-num,AS65537"""
try:
object_class, rpsl_pk = parameter.split(',', maxsplit=1)
except ValueError:
raise InvalidQueryException(f'Invalid argument for object lookup: {parameter}')
query = self.query_resolver.key_lookup(object_class, rpsl_pk)
return self._flatten_query_output(query)
def handle_irrd_route_search(self, parameter: str):
"""
!r query - route search with various options:
!r1172.16.58.3/24 returns all exact matching objects
!r192.0.2.0/24,o returns space-separated origins of all exact matching objects
!r192.0.2.0/24,l returns all one-level less specific objects, not including exact
!r192.0.2.0/24,L returns all less specific objects, including exact
!r192.0.2.0/24,M returns all more specific objects, not including exact
"""
option: Optional[str] = None
if ',' in parameter:
address, option = parameter.split(',')
else:
address = parameter
try:
address = IP(address)
except ValueError:
raise InvalidQueryException(f'Invalid input for route search: {parameter}')
lookup_types = {
None: RouteLookupType.EXACT,
'o': RouteLookupType.EXACT,
'l': RouteLookupType.LESS_SPECIFIC_ONE_LEVEL,
'L': RouteLookupType.LESS_SPECIFIC_WITH_EXACT,
'M': RouteLookupType.MORE_SPECIFIC_WITHOUT_EXACT,
}
try:
lookup_type = lookup_types[option]
except KeyError:
raise InvalidQueryException(f'Invalid route search option: {option}')
result = self.query_resolver.route_search(address, lookup_type)
if option == 'o':
prefixes = [r['parsed_data']['origin'] for r in result]
return ' '.join(prefixes)
return self._flatten_query_output(result)
def handle_irrd_sources_list(self, parameter: str) -> Optional[str]:
"""
!s query - set used sources
!s-lc returns all enabled sources, space separated
!sripe,nttcom limits sources to ripe and nttcom
"""
if parameter == '-lc':
return ','.join(self.query_resolver.sources)
sources = parameter.upper().split(',')
self.query_resolver.set_query_sources(sources)
return None
def handle_irrd_version(self):
"""!v query - return version"""
return f'IRRd -- version {__version__}'
def handle_ripe_command(self, full_query: str) -> WhoisQueryResponse:
"""
Process RIPE-style queries. Any query that is not explicitly an IRRD-style
query (i.e. starts with exclamation mark) is presumed to be a RIPE query.
"""
full_query = re.sub(' +', ' ', full_query)
components = full_query.strip().split(' ')
result = None
response_type = WhoisQueryResponseType.SUCCESS
while len(components):
component = components.pop(0)
if component.startswith('-'):
command = component[1:]
try:
if command == 'k':
self.multiple_command_mode = True
elif command in ['l', 'L', 'M', 'x']:
result = self.handle_ripe_route_search(command, components.pop(0))
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
break
elif command == 'i':
result = self.handle_inverse_attr_search(components.pop(0), components.pop(0))
if not result:
response_type = WhoisQueryResponseType.KEY_NOT_FOUND
break
elif command == 's':
self.handle_ripe_sources_list(components.pop(0))
elif command == 'a':
self.handle_ripe_sources_list(None)
elif command == 'T':
self.handle_ripe_restrict_object_class(components.pop(0))
elif command == 't':
result = self.handle_ripe_request_object_template(components.pop(0))
break
elif command == 'K':
self.handle_ripe_key_fields_only()
elif command == 'V':
self.handle_user_agent(components.pop(0))
elif command == 'g':
result = self.handle_nrtm_request(components.pop(0))
elif command in ['F', 'r']:
continue # These flags disable recursion, but IRRd never performs recursion anyways
else:
raise InvalidQueryException(f'Unrecognised flag/search: {command}')
except IndexError:
raise InvalidQueryException(f'Missing argument for flag/search: {command}')
else: # assume query to be a free text search
result = self.handle_ripe_text_search(component)
return WhoisQueryResponse(
response_type=response_type,
mode=WhoisQueryResponseMode.RIPE,
result=result,
)
def handle_ripe_route_search(self, command: str, parameter: str) -> str:
"""
-l/L/M/x query - route search for:
-x 192.0.2.0/2 returns all exact matching objects
-l 192.0.2.0/2 returns all one-level less specific objects, not including exact
-L 192.0.2.0/2 returns all less specific objects, including exact
-M 192.0.2.0/2 returns all more specific objects, not including exact
"""
try:
address = IP(parameter)
except ValueError:
raise InvalidQueryException(f'Invalid input for route search: {parameter}')
lookup_types = {
'x': RouteLookupType.EXACT,
'l': RouteLookupType.LESS_SPECIFIC_ONE_LEVEL,
'L': RouteLookupType.LESS_SPECIFIC_WITH_EXACT,
'M': RouteLookupType.MORE_SPECIFIC_WITHOUT_EXACT,
}
lookup_type = lookup_types[command]
result = self.query_resolver.route_search(address, lookup_type)
return self._flatten_query_output(result)
def handle_ripe_sources_list(self, sources_list: Optional[str]) -> None:
"""-s/-a parameter - set sources list. Empty list enables all sources. """
if sources_list:
sources = sources_list.upper().split(',')
self.query_resolver.set_query_sources(sources)
else:
self.query_resolver.set_query_sources(None)
def handle_ripe_restrict_object_class(self, object_classes) -> None:
"""-T parameter - restrict object classes for this query, comma-seperated"""
self.query_resolver.set_object_class_filter_next_query(object_classes.split(','))
def handle_ripe_request_object_template(self, object_class) -> str:
"""-t query - return the RPSL template for an object class"""
return self.query_resolver.rpsl_object_template(object_class)
def handle_ripe_key_fields_only(self) -> None:
"""-K paramater - only return primary key and members fields"""
self.key_fields_only = True
def handle_ripe_text_search(self, value: str) -> str:
result = self.query_resolver.rpsl_text_search(value)
return self._flatten_query_output(result)
def handle_user_agent(self, user_agent: str):
"""-V/!n parameter/query - set a user agent for the client"""
self.query_resolver.user_agent = user_agent
logger.info(f'{self.client_str}: user agent set to: {user_agent}')
def handle_nrtm_request(self, param):
try:
source, version, serial_range = param.split(':')
except ValueError:
raise InvalidQueryException('Invalid parameter: must contain three elements')
try:
serial_start, serial_end = serial_range.split('-')
serial_start = int(serial_start)
if serial_end == 'LAST':
serial_end = None
else:
serial_end = int(serial_end)
except ValueError:
raise InvalidQueryException(f'Invalid serial range: {serial_range}')
if version not in ['1', '3']:
raise InvalidQueryException(f'Invalid NRTM version: {version}')
source = source.upper()
if source not in self.query_resolver.all_valid_sources:
raise InvalidQueryException(f'Unknown source: {source}')
if not is_client_permitted(self.client_ip, f'sources.{source}.nrtm_access_list'):
raise InvalidQueryException('Access denied')
try:
return NRTMGenerator().generate(source, version, serial_start, serial_end, self.database_handler)
except NRTMGeneratorException as nge:
raise InvalidQueryException(str(nge))
def handle_inverse_attr_search(self, attribute: str, value: str) -> str:
"""
-i/!o query - inverse search for attribute values
e.g. `-i mnt-by FOO` finds all objects where (one of the) maintainer(s) is FOO,
as does `!oFOO`. Restricted to designated lookup fields.
"""
result = self.query_resolver.rpsl_attribute_search(attribute, value)
return self._flatten_query_output(result)
def _flatten_query_output(self, query_response: RPSLDatabaseResponse) -> str:
"""
Flatten an RPSL database response into a string with object text
for easy passing to a WhoisQueryResponse.
"""
if self.key_fields_only:
result = self._filter_key_fields(query_response)
else:
result = ''
for obj in query_response:
result += obj['object_text']
if (
self.query_resolver.rpki_aware and
obj['source'] != RPKI_IRR_PSEUDO_SOURCE and
obj['object_class'] in RPKI_RELEVANT_OBJECT_CLASSES
):
comment = ''
if obj['rpki_status'] == RPKIStatus.not_found:
comment = ' # No ROAs found, or RPKI validation not enabled for source'
result += f'rpki-ov-state: {obj["rpki_status"].name}{comment}\n'
result += '\n'
return result.strip('\n\r')
def _filter_key_fields(self, query_response) -> str:
results: OrderedSet[str] = OrderedSet()
for obj in query_response:
result = ''
rpsl_object_class = OBJECT_CLASS_MAPPING[obj['object_class']]
fields_included = rpsl_object_class.pk_fields + ['members', 'mp-members']
for field_name in fields_included:
field_data = obj['parsed_data'].get(field_name)
if field_data:
if isinstance(field_data, list):
for item in field_data:
result += f'{field_name}: {item}\n'
else:
result += f'{field_name}: {field_data}\n'
results.add(result)
return '\n'.join(results)
|
[
"irrd.utils.validators.parse_as_number",
"irrd.server.query_resolver.InvalidQueryException",
"irrd.mirroring.nrtm_generator.NRTMGenerator",
"irrd.conf.get_setting",
"ordered_set.OrderedSet",
"irrd.server.query_resolver.QueryResolver",
"IPy.IP",
"irrd.storage.queries.DatabaseStatusQuery",
"ujson.dumps",
"re.sub",
"logging.getLogger"
] |
[((905, 932), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (922, 932), False, 'import logging\n'), ((1848, 1917), 'irrd.server.query_resolver.QueryResolver', 'QueryResolver', ([], {'preloader': 'preloader', 'database_handler': 'database_handler'}), '(preloader=preloader, database_handler=database_handler)\n', (1861, 1917), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((11927, 11957), 'ujson.dumps', 'ujson.dumps', (['results'], {'indent': '(4)'}), '(results, indent=4)\n', (11938, 11957), False, 'import ujson\n'), ((14868, 14897), 're.sub', 're.sub', (['""" +"""', '""" """', 'full_query'], {}), "(' +', ' ', full_query)\n", (14874, 14897), False, 'import re\n'), ((22677, 22689), 'ordered_set.OrderedSet', 'OrderedSet', ([], {}), '()\n', (22687, 22689), False, 'from ordered_set import OrderedSet\n'), ((4202, 4247), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['"""Missing IRRD command"""'], {}), "('Missing IRRD command')\n", (4223, 4247), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((4625, 4688), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Missing parameter for {command} query"""'], {}), "(f'Missing parameter for {command} query')\n", (4646, 4688), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((7918, 7980), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid value for timeout: {timeout}"""'], {}), "(f'Invalid value for timeout: {timeout}')\n", (7939, 7980), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((8709, 8732), 'irrd.utils.validators.parse_as_number', 'parse_as_number', (['origin'], {}), '(origin)\n', (8724, 8732), False, 'from irrd.utils.validators import parse_as_number, ValidationError\n'), ((9428, 9490), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['"""Missing required set name for A query"""'], {}), "('Missing required set name for A query')\n", (9449, 9490), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((13148, 13159), 'IPy.IP', 'IP', (['address'], {}), '(address)\n', (13150, 13159), False, 'from IPy import IP\n'), ((17900, 17913), 'IPy.IP', 'IP', (['parameter'], {}), '(parameter)\n', (17902, 17913), False, 'from IPy import IP\n'), ((20514, 20571), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid NRTM version: {version}"""'], {}), "(f'Invalid NRTM version: {version}')\n", (20535, 20571), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((20687, 20737), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Unknown source: {source}"""'], {}), "(f'Unknown source: {source}')\n", (20708, 20737), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((20847, 20885), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['"""Access denied"""'], {}), "('Access denied')\n", (20868, 20885), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((7725, 7787), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid value for timeout: {timeout}"""'], {}), "(f'Invalid value for timeout: {timeout}')\n", (7746, 7787), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((10719, 10740), 'irrd.storage.queries.DatabaseStatusQuery', 'DatabaseStatusQuery', ([], {}), '()\n', (10738, 10740), False, 'from irrd.storage.queries import DatabaseStatusQuery\n'), ((11002, 11047), 'irrd.conf.get_setting', 'get_setting', (['f"""sources.{source}.keep_journal"""'], {}), "(f'sources.{source}.keep_journal')\n", (11013, 11047), False, 'from irrd.conf import get_setting, RPKI_IRR_PSEUDO_SOURCE\n'), ((12212, 12285), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid argument for object lookup: {parameter}"""'], {}), "(f'Invalid argument for object lookup: {parameter}')\n", (12233, 12285), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((13205, 13274), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid input for route search: {parameter}"""'], {}), "(f'Invalid input for route search: {parameter}')\n", (13226, 13274), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((13674, 13737), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid route search option: {option}"""'], {}), "(f'Invalid route search option: {option}')\n", (13695, 13737), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((17959, 18028), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid input for route search: {parameter}"""'], {}), "(f'Invalid input for route search: {parameter}')\n", (17980, 18028), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((20021, 20092), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['"""Invalid parameter: must contain three elements"""'], {}), "('Invalid parameter: must contain three elements')\n", (20042, 20092), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((20394, 20456), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Invalid serial range: {serial_range}"""'], {}), "(f'Invalid serial range: {serial_range}')\n", (20415, 20456), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((20919, 20934), 'irrd.mirroring.nrtm_generator.NRTMGenerator', 'NRTMGenerator', ([], {}), '()\n', (20932, 20934), False, 'from irrd.mirroring.nrtm_generator import NRTMGenerator, NRTMGeneratorException\n'), ((17056, 17125), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Missing argument for flag/search: {command}"""'], {}), "(f'Missing argument for flag/search: {command}')\n", (17077, 17125), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((16933, 16994), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Unrecognised flag/search: {command}"""'], {}), "(f'Unrecognised flag/search: {command}')\n", (16954, 16994), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n'), ((7283, 7340), 'irrd.server.query_resolver.InvalidQueryException', 'InvalidQueryException', (['f"""Unrecognised command: {command}"""'], {}), "(f'Unrecognised command: {command}')\n", (7304, 7340), False, 'from irrd.server.query_resolver import QueryResolver, RouteLookupType, InvalidQueryException\n')]
|
#!/usr/bin/env python3
"""
Delete old deployments and services with test-prefixed names. This is used to
clean up the Telepresence test cluster, as Telepresence tests currently leak.
"""
import argparse
import datetime
import json
from subprocess import check_output
from typing import Dict, List
def get_kubectl() -> List[str]:
"""Get correct kubectl command"""
k8s_namespace = str(
check_output([
"kubectl", "config", "view", "--minify=true",
"-o=jsonpath={.contexts[0].context.namespace}"
]).strip(), "ascii"
)
if k8s_namespace:
return ["kubectl", "--namespace", k8s_namespace]
return ["kubectl"]
KUBECTL = get_kubectl()
def get_now() -> datetime.datetime:
"""Get current date/time in UTC"""
return datetime.datetime.now(tz=datetime.timezone.utc)
def parse_k8s_timestamp(timestamp: str) -> datetime.datetime:
"""Get date/time in UTC from k8s timestamp"""
fmt = "%Y-%m-%dT%H:%M:%SZ"
naive = datetime.datetime.strptime(timestamp, fmt)
return naive.replace(tzinfo=datetime.timezone.utc)
def get_kubectl_json(cmd: List[str]) -> Dict:
"""Call kubectl and parse resulting JSON"""
output = str(check_output(KUBECTL + cmd + ["-o", "json"]), "utf-8")
return json.loads(output)
def get_resources(kind: str, prefix="",
min_age=datetime.timedelta(seconds=0)) -> List[str]:
"""
Return names of k8s resources with the given name prefix and minimum age
"""
now = get_now()
resources = get_kubectl_json(["get", kind])["items"]
names = []
for resource in resources:
name = resource["metadata"]["name"]
if kind == "svc" and name == "kubernetes":
continue
if not name.startswith(prefix):
continue
timestamp_str = resource["metadata"]["creationTimestamp"]
timestamp = parse_k8s_timestamp(timestamp_str)
age = now - timestamp
if age < min_age:
continue
names.append("{}/{}".format(kind, name))
return names
def seconds(value: str) -> datetime.timedelta:
"""Return a timedelta with the given number of seconds"""
try:
return datetime.timedelta(seconds=int(value))
except ValueError:
message = "Invalid age in seconds: {}".format(value)
raise argparse.ArgumentTypeError(message)
def main():
"""Clean up the current Kubernetes cluster"""
parser = argparse.ArgumentParser(
allow_abbrev=False, # can make adding changes not backwards compatible
description=__doc__
)
parser.add_argument(
"--prefix",
default="testing-",
help="prefix for resource name [testing-]"
)
parser.add_argument(
"--min-age",
type=seconds,
default="86400",
help="minimum age in seconds"
)
parser.add_argument(
"--dry-run", action="store_true", help="don't really delete anything"
)
args = parser.parse_args()
names = [
name
for kind in ("svc", "deploy")
for name in get_resources(kind, args.prefix, args.min_age)
]
if not names:
print("Nothing to clean up.")
return
if args.dry_run:
print("Would clean up:")
else:
print("Cleaning up:")
for name in names:
print(" {}".format(name))
if not args.dry_run:
check_output(KUBECTL + ["delete"] + names)
if __name__ == "__main__":
main()
|
[
"argparse.ArgumentParser",
"json.loads",
"subprocess.check_output",
"datetime.datetime.strptime",
"datetime.timedelta",
"datetime.datetime.now",
"argparse.ArgumentTypeError"
] |
[((785, 832), 'datetime.datetime.now', 'datetime.datetime.now', ([], {'tz': 'datetime.timezone.utc'}), '(tz=datetime.timezone.utc)\n', (806, 832), False, 'import datetime\n'), ((990, 1032), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['timestamp', 'fmt'], {}), '(timestamp, fmt)\n', (1016, 1032), False, 'import datetime\n'), ((1267, 1285), 'json.loads', 'json.loads', (['output'], {}), '(output)\n', (1277, 1285), False, 'import json\n'), ((1354, 1383), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(0)'}), '(seconds=0)\n', (1372, 1383), False, 'import datetime\n'), ((2441, 2505), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'allow_abbrev': '(False)', 'description': '__doc__'}), '(allow_abbrev=False, description=__doc__)\n', (2464, 2505), False, 'import argparse\n'), ((1201, 1245), 'subprocess.check_output', 'check_output', (["(KUBECTL + cmd + ['-o', 'json'])"], {}), "(KUBECTL + cmd + ['-o', 'json'])\n", (1213, 1245), False, 'from subprocess import check_output\n'), ((3383, 3425), 'subprocess.check_output', 'check_output', (["(KUBECTL + ['delete'] + names)"], {}), "(KUBECTL + ['delete'] + names)\n", (3395, 3425), False, 'from subprocess import check_output\n'), ((2328, 2363), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['message'], {}), '(message)\n', (2354, 2363), False, 'import argparse\n'), ((403, 515), 'subprocess.check_output', 'check_output', (["['kubectl', 'config', 'view', '--minify=true',\n '-o=jsonpath={.contexts[0].context.namespace}']"], {}), "(['kubectl', 'config', 'view', '--minify=true',\n '-o=jsonpath={.contexts[0].context.namespace}'])\n", (415, 515), False, 'from subprocess import check_output\n')]
|
"""
# Copyright 2022 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
import logging
from cibyl.models.attribute import AttributeDictValue
LOG = logging.getLogger(__name__)
def subset(dictionary, keys):
"""Creates a new dictionary from items from another one. A new
dictionary is formed by extracting the keys explicitly indicated. If one of
the given keys is not present on the dictionary, it is ignored. The
original dictionary is left untouched.
:param dictionary: The dictionary to extract items from.
:type dictionary: dict
:param keys: The keys to get from the dictionary.
:type keys: list
:return: The new dictionary.
:rtype: dict
"""
result = {}
for key in keys:
# Do not crash if a key is not present
if key not in dictionary:
message = "Ignoring key '%s' not found in dictionary: %s"
LOG.debug(message, key, dictionary)
continue
result[key] = dictionary[key]
return result
def nsubset(dictionary, keys):
"""Creates a new dictionary from items from another one. The 'n' stands
for 'negative', meaning that the keys form an excluded list. All keys
from the other dictionary will be extracted except for the ones explicitly
indicated. The original dictionary is left untouched.
:param dictionary: The dictionary to extract items from.
:type dictionary: dict
:param keys: The keys to not get from the dictionary.
:type keys: list
:return: The new dictionary.
:rtype: dict
"""
result = {}
for key in dictionary.keys():
# Ignore keys on the excluded list
if key in keys:
continue
result[key] = dictionary[key]
return result
def chunk_dictionary_into_lists(dictionary: dict, size: int = 300) -> list:
"""It returns a list of sub lists. Each one with the size indicated
in the 'size' parameter where every element is the key of the dictionary
provided. If the size is less than the quantity provided, it creates
just one sublist with those keys.
"""
chunked_list = []
for chunk_max_value in range(
0,
len(list(dictionary.keys())),
size
):
chunked_list.append(
list(
dictionary.keys()
)[chunk_max_value:chunk_max_value + size]
)
return chunked_list
def intersect_models(dict1, dict2):
"""Combine two dictionaries that are returned from a source method call to
keep only those models that are present in both. It assumes that the models
present in both dictionaries are identical and takes them for the first
input dictionary.
:param dict1: The first dictionary with models.
:type dict1: dict
:param dict2: The second dictionary with models.
:type dict2: dict
:return: A new dictionary that contains only the models present in both
input dictionaries.
:rtype: dict
"""
intersection = dict1.keys() & dict2.keys()
models = {key: dict1[key] for key in intersection}
for key, model in models.items():
# make sure that all the information present in models present in both
# dictionaries is incorporated
model.merge(dict2[key])
return AttributeDictValue(dict1.name, attr_type=dict1.attr_type,
value=models)
|
[
"cibyl.models.attribute.AttributeDictValue",
"logging.getLogger"
] |
[((687, 714), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (704, 714), False, 'import logging\n'), ((3813, 3884), 'cibyl.models.attribute.AttributeDictValue', 'AttributeDictValue', (['dict1.name'], {'attr_type': 'dict1.attr_type', 'value': 'models'}), '(dict1.name, attr_type=dict1.attr_type, value=models)\n', (3831, 3884), False, 'from cibyl.models.attribute import AttributeDictValue\n')]
|
"""
15N T1
======
Analyzes 15N T1 experiments. This keeps the spin system purely in-phase
throughout, and is calculated using the (1n)×(1n), single-spin matrix,
where n is the number of states::
{ Iz(a), Iz(b), ... }
References
----------
Kay, Nicholson, Delaglio, Bax, and Torchia. J Mag Reson (1992) 97:359-375
Note
----
A sample configuration file for this module is available using the command::
$ chemex config relaxation_nz
"""
import functools as ft
import numpy as np
import chemex.experiments.helper as ceh
import chemex.helper as ch
import chemex.nmr.liouvillian as cnl
_SCHEMA = {
"type": "object",
"properties": {
"experiment": {
"type": "object",
"properties": {
"observed_state": {"type": "string", "pattern": "[a-z]", "default": "a"}
},
}
},
}
def read(config):
ch.validate(config, _SCHEMA)
config["basis"] = cnl.Basis(type="iz", spin_system="nh")
config["fit"] = _fit_this()
return ceh.load_experiment(config=config, pulse_seq_cls=PulseSeq)
def _fit_this():
return {
"rates": ["r1_i_{observed_state}"],
"model_free": ["tauc_{observed_state}", "s2_{observed_state}"],
}
class PulseSeq:
def __init__(self, config, propagator):
self.prop = propagator
settings = config["experiment"]
self.prop.detection = f"[iz_{settings['observed_state']}]"
@ft.lru_cache(maxsize=10000)
def calculate(self, times, params_local):
self.prop.update(params_local)
start = self.prop.get_equilibrium()
delays = self.prop.delays(times)
return np.array([self.prop.detect(delay @ start) for delay in delays])
|
[
"functools.lru_cache",
"chemex.helper.validate",
"chemex.nmr.liouvillian.Basis",
"chemex.experiments.helper.load_experiment"
] |
[((884, 912), 'chemex.helper.validate', 'ch.validate', (['config', '_SCHEMA'], {}), '(config, _SCHEMA)\n', (895, 912), True, 'import chemex.helper as ch\n'), ((935, 973), 'chemex.nmr.liouvillian.Basis', 'cnl.Basis', ([], {'type': '"""iz"""', 'spin_system': '"""nh"""'}), "(type='iz', spin_system='nh')\n", (944, 973), True, 'import chemex.nmr.liouvillian as cnl\n'), ((1017, 1075), 'chemex.experiments.helper.load_experiment', 'ceh.load_experiment', ([], {'config': 'config', 'pulse_seq_cls': 'PulseSeq'}), '(config=config, pulse_seq_cls=PulseSeq)\n', (1036, 1075), True, 'import chemex.experiments.helper as ceh\n'), ((1436, 1463), 'functools.lru_cache', 'ft.lru_cache', ([], {'maxsize': '(10000)'}), '(maxsize=10000)\n', (1448, 1463), True, 'import functools as ft\n')]
|
import fortnitepy,fortniteAPI
async def SetCosmeticMSG(self,message):
msg = message.content.upper().strip()
args = msg.split(" ")
Lang = self.DefaultLang
if "--LANG=" in msg:
msg = msg + " "
Lang = GetValue(msg,"--LANG="," ")
msg = msg.replace("--LANG=" + Lang, "").strip()
Lang = Lang.lower()
if args[0] == "!SKIN":
Item = GetName("!SKIN",msg)
Item = await fortniteAPI.GetSkin(Item,Lang)
elif args[0] == "!BACKPACK":
Item = GetName("!BACKPACK",msg)
Item = await fortniteAPI.GetBackpack(Item,Lang)
elif args[0] == "!PICKAXE":
Item = GetName("!PICKAXE",msg)
Item = await fortniteAPI.GetPickaxe(Item,Lang)
elif args[0] == "!EMOJI":
Item = GetName("!EMOJI",msg)
Item = await fortniteAPI.GetEmoji(Item,Lang)
elif args[0] == "!EMOTE":
Item = GetName("!EMOTE",msg)
Item = await fortniteAPI.GetEmote(Item,Lang)
if "status" in Item:
await message.reply("Can't find this item")
return
else:
v = []
if msg.count("--") != 0:
if Item["variants"][Lang]: #Make sure that the item has variants
for Variant in GetValues(msg):
VariantChannelName = (Variant.split("=")[0])[2:]
Variant = Variant.split("=")[1]
for variant in Item["variants"][Lang]:
if variant["type"].upper() == VariantChannelName:
for tag in variant["options"]:
if tag["name"].upper() == Variant:
v.append(create_variant(variant["channel"],tag["tag"],item=Item["backendType"]))
else: #The item has no variants
await message.reply("Can't find any variants for this item")
asset=f'{str(Item["path"]).replace("FortniteGame/Content","/Game")}.{Item["id"]}'
if args[0] == "!SKIN":
await self.user.party.me.set_outfit(asset=asset,variants=v)
elif args[0] == "!BACKPACK":
await self.user.party.me.set_backpack(asset=asset,variants=v)
elif args[0] == "!PICKAXE":
await self.user.party.me.set_pickaxe(asset=asset,variants=v)
elif args[0] == "!EMOJI":
await self.user.party.me.set_emote(asset=asset)
elif args[0] == "!EMOTE":
await self.user.party.me.set_emote(asset=asset)
await message.reply(f'{Item["type"].capitalize()} set to {Item["Names"][Lang]}')
def GetName(Name,Message):
if Message.count("--") != 0:
Item = GetValue(Message,f'{Name} ',"--")
else:
Item = Message[(len(Name) + 1):]
return Item.strip()
def create_variant(VariantChannelName,Variant,item="AthenaCharacter"):
return {'item': item,'channel': VariantChannelName,'variant': Variant}
def GetValue(fullLine,startWith,endWith):
startIndex = fullLine.index(startWith) + len(startWith)
endIndex = fullLine[startIndex:].index(endWith) + startIndex
return fullLine[startIndex:endIndex]
def GetValues(fullLine):
Variants = []
for Variant in range(0,fullLine.count("--")):
try:
startIndex = fullLine.index("--")
ValueStartIndex = fullLine[startIndex:].index("=") + startIndex + 1
try:
endIndex = fullLine[ValueStartIndex:].index("--") + ValueStartIndex
except:
endIndex = len(fullLine)
Variants.append(fullLine[startIndex:endIndex])
fullLine = fullLine.replace(fullLine[startIndex:endIndex],"")
except:
return None
return Variants
|
[
"fortniteAPI.GetEmote",
"fortniteAPI.GetEmoji",
"fortniteAPI.GetBackpack",
"fortniteAPI.GetPickaxe",
"fortniteAPI.GetSkin"
] |
[((444, 475), 'fortniteAPI.GetSkin', 'fortniteAPI.GetSkin', (['Item', 'Lang'], {}), '(Item, Lang)\n', (463, 475), False, 'import fortnitepy, fortniteAPI\n'), ((572, 607), 'fortniteAPI.GetBackpack', 'fortniteAPI.GetBackpack', (['Item', 'Lang'], {}), '(Item, Lang)\n', (595, 607), False, 'import fortnitepy, fortniteAPI\n'), ((702, 736), 'fortniteAPI.GetPickaxe', 'fortniteAPI.GetPickaxe', (['Item', 'Lang'], {}), '(Item, Lang)\n', (724, 736), False, 'import fortnitepy, fortniteAPI\n'), ((827, 859), 'fortniteAPI.GetEmoji', 'fortniteAPI.GetEmoji', (['Item', 'Lang'], {}), '(Item, Lang)\n', (847, 859), False, 'import fortnitepy, fortniteAPI\n'), ((950, 982), 'fortniteAPI.GetEmote', 'fortniteAPI.GetEmote', (['Item', 'Lang'], {}), '(Item, Lang)\n', (970, 982), False, 'import fortnitepy, fortniteAPI\n')]
|
from pyclk import Sig, Reg, In, Out, List, Module
from .memory import memory
from .ddr2fpga import ddr2fpga
from .fpga2ddr import fpga2ddr
from .iterator import iterator
from .functions import func
from .fpga_state import FPGA_state
from random import randint
import asyncio
import numpy as np
class Simu(Module):
def __init__(self, fpga_config):
self.func_layout = fpga_config.func_layout
self.mem_nb = fpga_config.config['mem_nb']
self.ddr2fpga_nb = fpga_config.config['ddr2fpga_nb']
self.fpga2ddr_nb = fpga_config.config['fpga2ddr_nb']
self.func_nb = fpga_config.config['func_nb']
self.iter_nb = fpga_config.config['iter_nb']
self.mem_depth = fpga_config.config['mem_depth']
self.chunk_array = [[0 for j in range(fpga_config.config['mem_depth'])] for i in range(fpga_config.config['mem_nb'])]
#self.chunk_array = [np.zeros(fpga_config.config['mem_depth'], dtype=np.uint64) for i in range(fpga_config.config['mem_nb'])]
self.cycle_nb = -1
self.randmax = 2
self.trace = None
# memories
self.u_mem = List()
self.s_mem_wena = List()
self.s_mem_addr = List()
self.s_mem_din = List()
self.s_mem_dout = List()
for i in range(self.mem_nb):
self.s_mem_wena[i] = Sig()
self.s_mem_addr[i] = Sig()
self.s_mem_din[i] = Sig()
self.s_mem_dout[i] = Sig()
self.u_mem[i] = _ = memory(self.mem_depth)
_.i_wena (self.s_mem_wena[i])
_.i_addr (self.s_mem_addr[i])
_.i_din (self.s_mem_din[i])
_.o_dout (self.s_mem_dout[i])
# ddr2fpga
self.u_ddr2fpga = List()
self.s_ddr2fpga_mem_i = List()
self.s_ddr2fpga_data_nb = List()
self.s_ddr2fpga_done = List()
self.s_ddr2fpga_wena = List()
self.s_ddr2fpga_addr = List()
self.s_ddr2fpga_din = List()
for i in range(self.ddr2fpga_nb):
self.s_ddr2fpga_mem_i[i] = Sig()
self.s_ddr2fpga_data_nb[i] = Sig()
self.s_ddr2fpga_done[i] = Sig()
self.s_ddr2fpga_wena[i] = Sig()
self.s_ddr2fpga_addr[i] = Sig()
self.s_ddr2fpga_din[i] = Sig()
self.u_ddr2fpga[i] = _ = ddr2fpga()
_.i_data_nb (self.s_ddr2fpga_data_nb[i])
_.o_done (self.s_ddr2fpga_done[i])
_.o_mem_wena (self.s_ddr2fpga_wena[i])
_.o_mem_addr (self.s_ddr2fpga_addr[i])
_.o_mem_din (self.s_ddr2fpga_din[i])
# fpga2ddr
self.s_fpga2ddr_mem_i = List()
self.s_fpga2ddr_data_nb = List()
self.s_fpga2ddr_done = List()
self.s_fpga2ddr_addr = List()
self.s_fpga2ddr_mem_dout = List()
self.u_fpga2ddr = List()
for i in range(self.fpga2ddr_nb):
self.s_fpga2ddr_mem_dout[i] = Sig()
self.s_fpga2ddr_addr[i] = Sig()
self.s_fpga2ddr_mem_i[i] = Sig()
self.s_fpga2ddr_data_nb[i] = Sig()
self.s_fpga2ddr_done[i] = Sig()
self.u_fpga2ddr[i] = _ = fpga2ddr()
_.i_data_nb (self.s_fpga2ddr_data_nb[i])
_.o_done (self.s_fpga2ddr_done[i])
_.o_mem_addr (self.s_fpga2ddr_addr[i])
_.i_mem_dout (self.s_fpga2ddr_mem_dout[i])
# iterators
self.u_iter = List()
self.s_iter_data_nb = List()
self.s_iter_done = List()
self.s_iter_raddr = List()
self.s_iter_waddr = List()
self.s_iter_wena = List()
self.s_iter_arg_valid = List()
self.s_iter_res_valid = List()
for i in range(self.iter_nb):
self.s_iter_data_nb[i] = Sig()
self.s_iter_done[i] = Sig()
self.s_iter_raddr[i] = Sig()
self.s_iter_waddr[i] = Sig()
self.s_iter_wena[i] = Sig()
self.s_iter_arg_valid[i] = Sig()
self.s_iter_res_valid[i] = Sig()
self.u_iter[i] = _ = iterator()
_.i_data_nb (self.s_iter_data_nb[i])
_.o_done (self.s_iter_done[i])
_.o_raddr (self.s_iter_raddr[i])
_.o_waddr (self.s_iter_waddr[i])
_.o_wena (self.s_iter_wena[i])
_.o_arg_valid (self.s_iter_arg_valid[i])
_.i_res_valid (self.s_iter_res_valid[i])
# functions
self.u_func = List()
self.s_func_arg0 = List()
self.s_func_arg1 = List()
self.s_func_arg_valid = List()
self.s_func_res = List()
self.s_func_res_valid = List()
i = 0
for fname, fnb in self.func_layout.items():
for j in range(fnb):
self.s_func_arg0[i] = Sig()
self.s_func_arg1[i] = Sig()
self.s_func_arg_valid[i] = Sig()
self.s_func_res[i] = Sig()
self.s_func_res_valid[i] = Sig()
self.u_func[i] = _ = func(fname)
_.i_arg0 (self.s_func_arg0[i])
_.i_arg1 (self.s_func_arg1[i])
_.i_arg_valid (self.s_func_arg_valid[i])
_.o_res (self.s_func_res[i])
_.o_res_valid (self.s_func_res_valid[i])
i += 1
self.s_iter_rmem0_i = List()
self.s_iter_rmem1_i = List()
self.s_iter_wmem_i = List()
self.s_iter_func_i = List()
for i in range(self.iter_nb):
self.s_iter_rmem0_i[i] = Sig()
self.s_iter_rmem1_i[i] = Sig()
self.s_iter_wmem_i[i] = Sig()
self.s_iter_func_i[i] = Sig()
self.state = FPGA_state(fpga_config)
self.config = fpga_config.config
def logic(self):
# DDR <-> memory
for i in range(self.mem_nb):
self.s_mem_addr[i].d = 0
self.s_mem_din[i].d = 0
self.s_mem_wena[i].d = 0
for i in range(self.fpga2ddr_nb):
self.s_mem_addr[self.s_fpga2ddr_mem_i[i].d].d += self.s_fpga2ddr_addr[i].d
self.s_fpga2ddr_mem_dout[i].d = self.s_mem_dout[self.s_fpga2ddr_mem_i[i].d].d
for i in range(self.ddr2fpga_nb):
self.s_mem_wena[self.s_ddr2fpga_mem_i[i].d].d += self.s_ddr2fpga_wena[i].d
self.s_mem_addr[self.s_ddr2fpga_mem_i[i].d].d += self.s_ddr2fpga_addr[i].d
self.s_mem_din[self.s_ddr2fpga_mem_i[i].d].d += self.s_ddr2fpga_din[i].d
# memory <-> iterator <-> function
for i in range(self.func_nb):
self.s_func_arg_valid[i].d = 0
self.s_func_arg0[i].d = 0
self.s_func_arg1[i].d = 0
for i in range(self.iter_nb):
self.s_mem_addr[self.s_iter_rmem0_i[i].d].d += self.s_iter_raddr[i].d
self.s_mem_addr[self.s_iter_rmem1_i[i].d].d += self.s_iter_raddr[i].d
self.s_mem_addr[self.s_iter_wmem_i[i].d].d += self.s_iter_waddr[i].d
self.s_mem_wena[self.s_iter_wmem_i[i].d].d += self.s_iter_wena[i].d
self.s_func_arg_valid[self.s_iter_func_i[i].d].d += self.s_iter_arg_valid[i].d
self.s_iter_res_valid[i].d = self.s_func_res_valid[self.s_iter_func_i[i].d].d
if self.s_iter_data_nb[i].d != 0:
self.s_mem_din[self.s_iter_wmem_i[i].d].d += self.s_func_res[self.s_iter_func_i[i].d].d
if self.s_iter_arg_valid[i].d == 1:
self.s_func_arg0[self.s_iter_func_i[i].d].d += self.s_mem_dout[self.s_iter_rmem0_i[i].d].d
self.s_func_arg1[self.s_iter_func_i[i].d].d += self.s_mem_dout[self.s_iter_rmem1_i[i].d].d
def set_cycle_nb(self, cycle_nb=-1):
self.cycle_nb = cycle_nb
def set_trace(self, trace):
self.trace = trace
async def op(self, iter_i, func_i, rmem0_i, rmem1_i, wmem_i, data_nb):
# operation request
self.s_iter_data_nb[iter_i].d = data_nb
self.s_iter_func_i[iter_i].d = func_i
self.s_iter_rmem0_i[iter_i].d = rmem0_i
self.s_iter_rmem1_i[iter_i].d = rmem1_i
self.s_iter_wmem_i[iter_i].d = wmem_i
clkNb = randint(1, self.randmax)
self.run(clkNb=clkNb, trace=self.trace)
# operation completion check
# software is polling, run the FPGA
done = False
while not done:
if (self.cycle_nb >= 0) and (self.time >= self.cycle_nb):
return
if self.s_iter_done[iter_i].d == 1:
self.s_iter_data_nb[iter_i].d = 0
done = True
else:
done = False
clkNb = randint(1, self.randmax)
self.run(clkNb=clkNb, trace=self.trace)
await asyncio.sleep(0)
async def ddr2fpga(self, ddr2fpga_i, mem_i, array_ptr, data_nb):
# memory write
self.s_ddr2fpga_mem_i[ddr2fpga_i].d = mem_i
self.s_ddr2fpga_data_nb[ddr2fpga_i].d = data_nb
self.u_ddr2fpga[ddr2fpga_i].array_ptr = array_ptr
clkNb = randint(1, self.randmax)
self.run(clkNb=clkNb, trace=self.trace)
# memory copy completion check
# software is polling, run the FPGA
done = False
while not done:
if (self.cycle_nb >= 0) and (self.time >= self.cycle_nb):
return
if self.s_ddr2fpga_done[ddr2fpga_i].d == 1:
self.s_ddr2fpga_data_nb[ddr2fpga_i].d = 0
done = True
else:
done = False
clkNb = randint(1, self.randmax)
self.run(clkNb=clkNb, trace=self.trace)
await asyncio.sleep(0)
async def fpga2ddr(self, fpga2ddr_i, mem_i, array_ptr, data_nb):
# memory read
self.s_fpga2ddr_mem_i[fpga2ddr_i].d = mem_i
self.s_fpga2ddr_data_nb[fpga2ddr_i].d = data_nb
self.u_fpga2ddr[fpga2ddr_i].array_ptr = array_ptr
clkNb = randint(1, self.randmax)
self.run(clkNb=clkNb, trace=self.trace)
# memory copy completion check
# software is polling, run the FPGA
done = False
while not done:
if (self.cycle_nb >= 0) and (self.time >= self.cycle_nb):
return
if self.s_fpga2ddr_done[fpga2ddr_i].d == 1:
self.s_fpga2ddr_data_nb[fpga2ddr_i].d = 0
done = True
else:
done = False
clkNb = randint(1, self.randmax)
self.run(clkNb=clkNb, trace=self.trace)
await asyncio.sleep(0)
|
[
"pyclk.List",
"asyncio.sleep",
"random.randint",
"pyclk.Sig"
] |
[((1123, 1129), 'pyclk.List', 'List', ([], {}), '()\n', (1127, 1129), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1156, 1162), 'pyclk.List', 'List', ([], {}), '()\n', (1160, 1162), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1189, 1195), 'pyclk.List', 'List', ([], {}), '()\n', (1193, 1195), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1221, 1227), 'pyclk.List', 'List', ([], {}), '()\n', (1225, 1227), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1254, 1260), 'pyclk.List', 'List', ([], {}), '()\n', (1258, 1260), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1734, 1740), 'pyclk.List', 'List', ([], {}), '()\n', (1738, 1740), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1773, 1779), 'pyclk.List', 'List', ([], {}), '()\n', (1777, 1779), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1814, 1820), 'pyclk.List', 'List', ([], {}), '()\n', (1818, 1820), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1852, 1858), 'pyclk.List', 'List', ([], {}), '()\n', (1856, 1858), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1890, 1896), 'pyclk.List', 'List', ([], {}), '()\n', (1894, 1896), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1928, 1934), 'pyclk.List', 'List', ([], {}), '()\n', (1932, 1934), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1965, 1971), 'pyclk.List', 'List', ([], {}), '()\n', (1969, 1971), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2655, 2661), 'pyclk.List', 'List', ([], {}), '()\n', (2659, 2661), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2696, 2702), 'pyclk.List', 'List', ([], {}), '()\n', (2700, 2702), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2734, 2740), 'pyclk.List', 'List', ([], {}), '()\n', (2738, 2740), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2772, 2778), 'pyclk.List', 'List', ([], {}), '()\n', (2776, 2778), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2814, 2820), 'pyclk.List', 'List', ([], {}), '()\n', (2818, 2820), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2847, 2853), 'pyclk.List', 'List', ([], {}), '()\n', (2851, 2853), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3428, 3434), 'pyclk.List', 'List', ([], {}), '()\n', (3432, 3434), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3465, 3471), 'pyclk.List', 'List', ([], {}), '()\n', (3469, 3471), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3499, 3505), 'pyclk.List', 'List', ([], {}), '()\n', (3503, 3505), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3534, 3540), 'pyclk.List', 'List', ([], {}), '()\n', (3538, 3540), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3569, 3575), 'pyclk.List', 'List', ([], {}), '()\n', (3573, 3575), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3603, 3609), 'pyclk.List', 'List', ([], {}), '()\n', (3607, 3609), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3642, 3648), 'pyclk.List', 'List', ([], {}), '()\n', (3646, 3648), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3681, 3687), 'pyclk.List', 'List', ([], {}), '()\n', (3685, 3687), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4474, 4480), 'pyclk.List', 'List', ([], {}), '()\n', (4478, 4480), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4508, 4514), 'pyclk.List', 'List', ([], {}), '()\n', (4512, 4514), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4542, 4548), 'pyclk.List', 'List', ([], {}), '()\n', (4546, 4548), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4581, 4587), 'pyclk.List', 'List', ([], {}), '()\n', (4585, 4587), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4614, 4620), 'pyclk.List', 'List', ([], {}), '()\n', (4618, 4620), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4653, 4659), 'pyclk.List', 'List', ([], {}), '()\n', (4657, 4659), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5372, 5378), 'pyclk.List', 'List', ([], {}), '()\n', (5376, 5378), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5409, 5415), 'pyclk.List', 'List', ([], {}), '()\n', (5413, 5415), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5445, 5451), 'pyclk.List', 'List', ([], {}), '()\n', (5449, 5451), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5481, 5487), 'pyclk.List', 'List', ([], {}), '()\n', (5485, 5487), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((8272, 8296), 'random.randint', 'randint', (['(1)', 'self.randmax'], {}), '(1, self.randmax)\n', (8279, 8296), False, 'from random import randint\n'), ((9144, 9168), 'random.randint', 'randint', (['(1)', 'self.randmax'], {}), '(1, self.randmax)\n', (9151, 9168), False, 'from random import randint\n'), ((10033, 10057), 'random.randint', 'randint', (['(1)', 'self.randmax'], {}), '(1, self.randmax)\n', (10040, 10057), False, 'from random import randint\n'), ((1331, 1336), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (1334, 1336), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1370, 1375), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (1373, 1375), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1408, 1413), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (1411, 1413), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((1447, 1452), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (1450, 1452), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2054, 2059), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2057, 2059), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2101, 2106), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2104, 2106), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2145, 2150), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2148, 2150), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2189, 2194), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2192, 2194), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2233, 2238), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2236, 2238), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2276, 2281), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2279, 2281), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2939, 2944), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2942, 2944), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((2983, 2988), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (2986, 2988), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3028, 3033), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3031, 3033), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3075, 3080), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3078, 3080), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3119, 3124), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3122, 3124), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3763, 3768), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3766, 3768), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3803, 3808), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3806, 3808), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3844, 3849), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3847, 3849), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3885, 3890), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3888, 3890), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3925, 3930), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3928, 3930), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((3970, 3975), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (3973, 3975), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4015, 4020), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (4018, 4020), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5564, 5569), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (5567, 5569), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5607, 5612), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (5610, 5612), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5649, 5654), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (5652, 5654), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((5691, 5696), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (5694, 5696), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((8757, 8781), 'random.randint', 'randint', (['(1)', 'self.randmax'], {}), '(1, self.randmax)\n', (8764, 8781), False, 'from random import randint\n'), ((9647, 9671), 'random.randint', 'randint', (['(1)', 'self.randmax'], {}), '(1, self.randmax)\n', (9654, 9671), False, 'from random import randint\n'), ((10536, 10560), 'random.randint', 'randint', (['(1)', 'self.randmax'], {}), '(1, self.randmax)\n', (10543, 10560), False, 'from random import randint\n'), ((4797, 4802), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (4800, 4802), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4841, 4846), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (4844, 4846), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4890, 4895), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (4893, 4895), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4933, 4938), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (4936, 4938), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((4982, 4987), 'pyclk.Sig', 'Sig', ([], {}), '()\n', (4985, 4987), False, 'from pyclk import Sig, Reg, In, Out, List, Module\n'), ((8852, 8868), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (8865, 8868), False, 'import asyncio\n'), ((9742, 9758), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (9755, 9758), False, 'import asyncio\n'), ((10631, 10647), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (10644, 10647), False, 'import asyncio\n')]
|
from datetime import datetime
import os.path as op
import re
from uuid import uuid4
from . import models
from .pynwb_utils import (
_get_pynwb_metadata,
get_neurodata_types,
get_nwb_version,
ignore_benign_pynwb_warnings,
metadata_cache,
)
from .utils import ensure_datetime
from . import __version__, get_logger
from .dandiset import Dandiset
lgr = get_logger()
@metadata_cache.memoize_path
def get_metadata(path):
"""Get selected metadata from a .nwb file or a dandiset directory
If a directory given and it is not a Dandiset, None is returned
Parameters
----------
path: str or Path
Returns
-------
dict
"""
# when we run in parallel, these annoying warnings appear
ignore_benign_pynwb_warnings()
path = str(path) # for Path
meta = dict()
if op.isdir(path):
try:
dandiset = Dandiset(path)
return dandiset.metadata
except ValueError as exc:
lgr.debug("Failed to get metadata for %s: %s", path, exc)
return None
# First read out possibly available versions of specifications for NWB(:N)
meta["nwb_version"] = get_nwb_version(path)
# PyNWB might fail to load because of missing extensions.
# There is a new initiative of establishing registry of such extensions.
# Not yet sure if PyNWB is going to provide "native" support for needed
# functionality: https://github.com/NeurodataWithoutBorders/pynwb/issues/1143
# So meanwhile, hard-coded workaround for data types we care about
ndtypes_registry = {
"AIBS_ecephys": "allensdk.brain_observatory.ecephys.nwb",
"ndx-labmetadata-abf": "ndx_dandi_icephys",
}
tried_imports = set()
while True:
try:
meta.update(_get_pynwb_metadata(path))
break
except KeyError as exc: # ATM there is
lgr.debug("Failed to read %s: %s", path, exc)
import re
res = re.match(r"^['\"\\]+(\S+). not a namespace", str(exc))
if not res:
raise
ndtype = res.groups()[0]
if ndtype not in ndtypes_registry:
raise ValueError(
"We do not know which extension provides %s. "
"Original exception was: %s. " % (ndtype, exc)
)
import_mod = ndtypes_registry[ndtype]
lgr.debug("Importing %r which should provide %r", import_mod, ndtype)
if import_mod in tried_imports:
raise RuntimeError(
"We already tried importing %s to provide %s, but it seems it didn't help"
% (import_mod, ndtype)
)
tried_imports.add(import_mod)
__import__(import_mod)
meta["nd_types"] = get_neurodata_types(path)
return meta
def parse_age(age):
"""
Convert a human-friendly duration string into an ISO 8601 duration
Parameters
----------
age : str
Returns
-------
str
"""
m = re.fullmatch(r"(\d+)\s*(y(ear)?|m(onth)?|w(eek)?|d(ay)?)s?", age, flags=re.I)
if m:
qty = int(m.group(1))
unit = m.group(2)[0].upper()
return f"P{qty}{unit}"
else:
raise ValueError(age)
def extract_age(metadata):
try:
dob = ensure_datetime(metadata["date_of_birth"])
start = ensure_datetime(metadata["session_start_time"])
except (KeyError, TypeError, ValueError):
try:
duration = parse_age(metadata["age"])
except (KeyError, TypeError, ValueError):
return ...
else:
if start < dob:
raise ValueError("session_start_time precedes date_of_birth")
duration = timedelta2duration(start - dob)
return models.PropertyValue(value=duration, unitText="Years from birth")
def timedelta2duration(delta):
"""
Convert a datetime.timedelta to ISO 8601 duration format
Parameters
----------
delta : datetime.timedelta
Returns
-------
str
"""
s = "P"
if delta.days:
s += f"{delta.days}D"
if delta.seconds or delta.microseconds:
sec = delta.seconds
if delta.microseconds:
# Don't add when microseconds is 0, so that sec will be an int then
sec += delta.microseconds / 1000000
s += f"T{sec}S"
if s == "P":
s += "0D"
return s
def extract_sex(metadata):
value = metadata.get("sex", None)
if value is not None:
value = value.lower()
if value in ["m", "male"]:
value_id = "http://purl.obolibrary.org/obo/PATO_0000384"
value = "Male"
elif value in ["f", "female"]:
value_id = "http://purl.obolibrary.org/obo/PATO_0000383"
value = "Female"
elif value in ["unknown"]:
value_id = None
value = "Unknown"
elif value in ["other"]:
value_id = None
value = "Other"
elif value.startswith("http"):
value_id = value
value = None
else:
raise ValueError(f"Cannot interpret sex field: {value}")
return models.SexType(identifier=value_id, name=value)
else:
return ...
def extract_species(metadata):
value = metadata.get("species", None)
if value is not None:
value = value.lower()
if "mouse" in value or value.startswith("mus"):
value_id = "http://purl.obolibrary.org/obo/NCBITaxon_10090"
value = "House mouse"
elif "human" in value or value.startswith("homo"):
value_id = "http://purl.obolibrary.org/obo/NCBITaxon_9606"
value = "Human"
elif "rat" in value:
value_id = "http://purl.obolibrary.org/obo/NCBITaxon_10117"
value = "House rat"
elif "mulatta" in value or "rhesus" in value:
value_id = "http://purl.obolibrary.org/obo/NCBITaxon_9544"
value = "Rhesus monkey"
elif "jacchus" in value:
value_id = "http://purl.obolibrary.org/obo/NCBITaxon_9483"
value = "Common marmoset"
elif "melanogaster" in value or "fruit fly" in value:
value_id = "http://purl.obolibrary.org/obo/NCBITaxon_7227"
value = "Common fruit fly"
elif value.startswith("http"):
value_id = value
value = None
else:
raise ValueError(f"Cannot interpret species field: {value}")
return models.SpeciesType(identifier=value_id, name=value.capitalize())
else:
return ...
def extract_assay_type(metadata):
if "assayType" in metadata:
return [models.AssayType(identifier="assayType", name=metadata["assayType"])]
else:
return ...
def extract_anatomy(metadata):
if "anatomy" in metadata:
return [models.Anatomy(identifier="anatomy", name=metadata["anatomy"])]
else:
return ...
def extract_model(modelcls, metadata, **kwargs):
m = modelcls.unvalidated()
for field in m.__fields__.keys():
value = kwargs.get(field, extract_field(field, metadata))
if value is not Ellipsis:
setattr(m, field, value)
# return modelcls(**m.dict())
return m
def extract_wasDerivedFrom(metadata):
return [
extract_model(models.BioSample, metadata, identifier=metadata.get("subject_id"))
]
def extract_digest(metadata):
if "digest" in metadata:
return models.Digest(
value=metadata["digest"],
cryptoType=models.DigestType[metadata["digest_type"]],
)
else:
return ...
FIELD_EXTRACTORS = {
"wasDerivedFrom": extract_wasDerivedFrom,
"age": extract_age,
"sex": extract_sex,
"assayType": extract_assay_type,
"anatomy": extract_anatomy,
"digest": extract_digest,
"species": extract_species,
}
def extract_field(field, metadata):
if field in FIELD_EXTRACTORS:
return FIELD_EXTRACTORS[field](metadata)
else:
return metadata.get(field, ...)
def nwb2asset(nwb_path, digest=None, digest_type=None):
start_time = datetime.now().astimezone()
metadata = get_metadata(nwb_path)
if digest is not None:
metadata["digest"] = digest
metadata["digest_type"] = digest_type
metadata["contentSize"] = op.getsize(nwb_path)
metadata["encodingFormat"] = "application/x-nwb"
asset = metadata2asset(metadata)
end_time = datetime.now().astimezone()
asset.wasGeneratedBy = models.Activity(
identifier=str(uuid4()),
name="Metadata generation",
description="Metadata generated by DANDI cli",
wasAssociatedWith=models.Software(
identifier={"propertyID": "RRID", "value": "SCR_019009"},
name="DANDI Command Line Interface",
description=f"dandi-cli {__version__}",
version=__version__,
url="https://github.com/dandi/dandi-cli",
),
startedAt=start_time,
endedAt=end_time,
)
return asset
def metadata2asset(metadata):
return extract_model(models.AssetMeta, metadata)
"""
The following section converts metadata schema from the current girder dandiset
model to the new schema in dandi-cli. This section should be removed
after the migration is finished to the
"""
mapping = {
"identifier": ["identifier"],
"name": ["name"],
"description": ["description"],
"contributors": ["contributor"],
"sponsors": ["contributor", ["Sponsor"]],
"license": ["license"],
"keywords": ["keywords"],
"project": ["generatedBy"],
"conditions_studied": ["about"],
"associated_anatomy": ["about"],
"protocols": ["protocol"],
"ethicsApprovals": ["ethicsApproval"],
"access": ["access"],
"associatedData": ["relatedResource", "IsDerivedFrom"],
"publications": ["relatedResource", "IsDescribedBy"],
"age": ["variableMeasured"],
"organism": ["variableMeasured"],
"sex": ["variableMeasured"],
"number_of_subjects": ["assetsSummary", "numberOfSubjects"],
"number_of_cells": ["assetsSummary", "numberOfCells"],
"number_of_tissue_samples": ["assetsSummary", "numberOfSamples"],
}
def toContributor(value):
if not isinstance(value, list):
value = [value]
out = []
for item in value:
contrib = {}
if "name" in item:
name = item["name"].split()
item["name"] = f"{name[-1]}, {' '.join(name[:-1])}"
if "roles" in item:
roles = []
for role in item["roles"]:
tmp = role.split()
if len(tmp) > 1:
roles.append("".join([val.capitalize() for val in tmp]))
else:
roles.append(tmp.pop())
contrib["roleName"] = roles
del item["roles"]
if "awardNumber" in item:
contrib["awardNumber"] = item["awardNumber"]
del item["awardNumber"]
if "orcid" in item:
if item["orcid"]:
contrib["identifier"] = models.PropertyValue(
value=item["orcid"], propertyID="ORCID"
)
else:
contrib["identifier"] = models.PropertyValue()
del item["orcid"]
if "affiliations" in item:
item["affiliation"] = item["affiliations"]
del item["affiliations"]
contrib.update(**{f"{k}": v for k, v in item.items()})
out.append(contrib)
return out
def convertv1(data):
oldmeta = data["dandiset"] if "dandiset" in data else data
newmeta = {}
for oldkey, value in oldmeta.items():
if oldkey in ["language", "altid", "number_of_slices"]:
continue
if oldkey not in mapping:
raise KeyError(f"Could not find {oldkey}")
if len(mapping[oldkey]) == 0:
newkey = f"schema:{oldkey}"
else:
newkey = mapping[oldkey][0]
if oldkey in ["contributors", "sponsors"]:
value = toContributor(value)
if oldkey == "access":
value = [
models.AccessRequirements(
status=models.AccessType.Open, email=value["access_contact_email"]
)
]
if oldkey == "identifier":
value = models.PropertyValue(value=value, propertyID="DANDI")
if len(mapping[oldkey]) == 2:
extra = mapping[oldkey][1]
if newkey == "contributor":
extrakey = "roleName"
if oldkey == "sponsors":
extrakey = "roleName"
if oldkey in ["publications", "associatedData"]:
extrakey = "relation"
if not isinstance(value, list):
value = [value]
out = []
for item in value:
if isinstance(item, dict):
out.append({k: v for k, v in item.items()})
else:
present = False
for val in out:
if item in val.values():
present = True
if not present:
out.append({"url": item})
value = out
if oldkey in [
"number_of_subjects",
"number_of_cells",
"number_of_tissue_samples",
]:
value = {extra: value}
extrakey = None
if isinstance(value, list):
for val in value:
if extrakey:
val[extrakey] = extra
if isinstance(value, dict):
if extrakey:
value[extrakey] = extra
if newkey == "variableMeasured":
if oldkey in ["age", "sex"]:
vm = {"name": oldkey}
if oldkey == "sex":
vm["value"] = value
else:
if "maximum" in value:
if "days" in value["maximum"]:
value["units"] = "days"
if "Gestational" in value["maximum"]:
value["units"] = "Gestational Week"
value["maximum"] = value["maximum"].split()[-1]
if value["maximum"].startswith("P"):
value["maximum"] = value["maximum"][1:-1]
value["units"] = value["maximum"][-1]
if "None" not in value["maximum"]:
value["maximum"] = float(value["maximum"].split()[0])
if "minimum" in value:
if "days" in value["minimum"]:
value["units"] = "days"
if "Gestational" in value["minimum"]:
value["units"] = "Gestational Week"
value["minimum"] = value["minimum"].split()[-1]
if value["minimum"].startswith("P"):
value["minimum"] = value["minimum"][1:-1]
value["units"] = value["minimum"][-1]
if "None" not in value["minimum"]:
value["minimum"] = float(value["minimum"].split()[0])
value["unitText"] = value["units"]
del value["units"]
vm.update(**value)
else:
newvalues = []
for val in value:
if "species" in val:
newvalues.append(val["species"])
vm = {"name": "species", "value": newvalues}
value = vm
if newkey not in newmeta:
newmeta[newkey] = value
else:
curvalue = newmeta[newkey]
if not isinstance(curvalue, list):
newmeta[newkey] = [curvalue]
if not isinstance(value, list):
value = [value]
newmeta[newkey].extend(value)
if "assetsSummary" in newmeta:
del newmeta["assetsSummary"]
if "variableMeasured" in newmeta:
del newmeta["variableMeasured"]
return newmeta
def migrate2newschema(meta):
newmeta = convertv1(meta)
dandimeta = models.DandiMeta.unvalidated(**newmeta)
return dandimeta
|
[
"re.fullmatch",
"uuid.uuid4",
"os.path.isdir",
"os.path.getsize",
"datetime.datetime.now"
] |
[((829, 843), 'os.path.isdir', 'op.isdir', (['path'], {}), '(path)\n', (837, 843), True, 'import os.path as op\n'), ((3056, 3134), 're.fullmatch', 're.fullmatch', (['"""(\\\\d+)\\\\s*(y(ear)?|m(onth)?|w(eek)?|d(ay)?)s?"""', 'age'], {'flags': 're.I'}), "('(\\\\d+)\\\\s*(y(ear)?|m(onth)?|w(eek)?|d(ay)?)s?', age, flags=re.I)\n", (3068, 3134), False, 'import re\n'), ((8360, 8380), 'os.path.getsize', 'op.getsize', (['nwb_path'], {}), '(nwb_path)\n', (8370, 8380), True, 'import os.path as op\n'), ((8155, 8169), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8167, 8169), False, 'from datetime import datetime\n'), ((8486, 8500), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8498, 8500), False, 'from datetime import datetime\n'), ((8581, 8588), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (8586, 8588), False, 'from uuid import uuid4\n')]
|
import logging
import os
from google.appengine.api import memcache
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
from twimonial.models import User, Twimonial
from twimonial.ui import render_write
import config
class UserPage(webapp.RequestHandler):
def get(self, screen_name):
if config.CACHE:
# Check cache first
cached_page = memcache.get(screen_name, 'userpage')
if cached_page:
self.response.out.write(cached_page)
return
user = User.get_by_screen_name(screen_name)
if not user:
self.error(404)
rendered_page = render_write({'screen_name': screen_name},
'user_404.html', self.request, self.response)
return
user.check_profile_image()
tmpl_values = {
'user': user.dictize(),
}
# Send out and cache it
rendered_page = render_write(tmpl_values, 'user.html', self.request,
self.response)
if config.CACHE:
memcache.set(screen_name, rendered_page, config.CACHE_TIME_USERPAGE,
namespace='userpage')
def head(self, screen_name):
pass
class UserListPage(webapp.RequestHandler):
def get(self, screen_name, screen_names_string):
limit = 10
screen_names = [name for name in screen_names_string.split('-') if name][:limit]
screen_names.sort()
screen_names_string = '-'.join(screen_names)
if config.CACHE:
# Check cache first
cached_page = memcache.get(screen_names_string, 'userlist_%s' % screen_name)
if cached_page:
self.response.out.write(cached_page)
return
user = User.get_by_screen_name(screen_name)
if not user:
self.error(404)
rendered_page = render_write({'screen_name': screen_name},
'user_404.html', self.request, self.response)
return
twimonials = [t.dictize() for t in Twimonial.get_tos_from(screen_names, user)]
missings = []
t_screen_names = [t['to_user']['screen_name'].lower() for t in twimonials]
for name in screen_names:
if name.lower() not in t_screen_names:
missings.append(name)
tmpl_values = {
'user': user,
'twimonials': twimonials,
'missings': ', '.join(missings),
}
# Send out and cache it
rendered_page = render_write(tmpl_values, 'userlist.html', self.request,
self.response)
if config.CACHE:
memcache.set(screen_names_string, rendered_page,
config.CACHE_TIME_USERLISTPAGE, namespace='userlist_%s' % screen_name)
application = webapp.WSGIApplication([
('/user/([_a-zA-Z0-9]+)', UserPage),
('/userlist/([_a-zA-Z0-9]+)/([-_a-zA-Z0-9]+)', UserListPage),
],
debug=config.DEBUG)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
[
"google.appengine.ext.webapp.util.run_wsgi_app",
"twimonial.ui.render_write",
"twimonial.models.User.get_by_screen_name",
"twimonial.models.Twimonial.get_tos_from",
"google.appengine.ext.webapp.WSGIApplication",
"google.appengine.api.memcache.set",
"google.appengine.api.memcache.get"
] |
[((2607, 2760), 'google.appengine.ext.webapp.WSGIApplication', 'webapp.WSGIApplication', (["[('/user/([_a-zA-Z0-9]+)', UserPage), (\n '/userlist/([_a-zA-Z0-9]+)/([-_a-zA-Z0-9]+)', UserListPage)]"], {'debug': 'config.DEBUG'}), "([('/user/([_a-zA-Z0-9]+)', UserPage), (\n '/userlist/([_a-zA-Z0-9]+)/([-_a-zA-Z0-9]+)', UserListPage)], debug=\n config.DEBUG)\n", (2629, 2760), False, 'from google.appengine.ext import webapp\n'), ((2789, 2814), 'google.appengine.ext.webapp.util.run_wsgi_app', 'run_wsgi_app', (['application'], {}), '(application)\n', (2801, 2814), False, 'from google.appengine.ext.webapp.util import run_wsgi_app\n'), ((587, 623), 'twimonial.models.User.get_by_screen_name', 'User.get_by_screen_name', (['screen_name'], {}), '(screen_name)\n', (610, 623), False, 'from twimonial.models import User, Twimonial\n'), ((939, 1006), 'twimonial.ui.render_write', 'render_write', (['tmpl_values', '"""user.html"""', 'self.request', 'self.response'], {}), "(tmpl_values, 'user.html', self.request, self.response)\n", (951, 1006), False, 'from twimonial.ui import render_write\n'), ((1685, 1721), 'twimonial.models.User.get_by_screen_name', 'User.get_by_screen_name', (['screen_name'], {}), '(screen_name)\n', (1708, 1721), False, 'from twimonial.models import User, Twimonial\n'), ((2354, 2425), 'twimonial.ui.render_write', 'render_write', (['tmpl_values', '"""userlist.html"""', 'self.request', 'self.response'], {}), "(tmpl_values, 'userlist.html', self.request, self.response)\n", (2366, 2425), False, 'from twimonial.ui import render_write\n'), ((455, 492), 'google.appengine.api.memcache.get', 'memcache.get', (['screen_name', '"""userpage"""'], {}), "(screen_name, 'userpage')\n", (467, 492), False, 'from google.appengine.api import memcache\n'), ((685, 777), 'twimonial.ui.render_write', 'render_write', (["{'screen_name': screen_name}", '"""user_404.html"""', 'self.request', 'self.response'], {}), "({'screen_name': screen_name}, 'user_404.html', self.request,\n self.response)\n", (697, 777), False, 'from twimonial.ui import render_write\n'), ((1042, 1136), 'google.appengine.api.memcache.set', 'memcache.set', (['screen_name', 'rendered_page', 'config.CACHE_TIME_USERPAGE'], {'namespace': '"""userpage"""'}), "(screen_name, rendered_page, config.CACHE_TIME_USERPAGE,\n namespace='userpage')\n", (1054, 1136), False, 'from google.appengine.api import memcache\n'), ((1528, 1590), 'google.appengine.api.memcache.get', 'memcache.get', (['screen_names_string', "('userlist_%s' % screen_name)"], {}), "(screen_names_string, 'userlist_%s' % screen_name)\n", (1540, 1590), False, 'from google.appengine.api import memcache\n'), ((1783, 1875), 'twimonial.ui.render_write', 'render_write', (["{'screen_name': screen_name}", '"""user_404.html"""', 'self.request', 'self.response'], {}), "({'screen_name': screen_name}, 'user_404.html', self.request,\n self.response)\n", (1795, 1875), False, 'from twimonial.ui import render_write\n'), ((2461, 2585), 'google.appengine.api.memcache.set', 'memcache.set', (['screen_names_string', 'rendered_page', 'config.CACHE_TIME_USERLISTPAGE'], {'namespace': "('userlist_%s' % screen_name)"}), "(screen_names_string, rendered_page, config.\n CACHE_TIME_USERLISTPAGE, namespace='userlist_%s' % screen_name)\n", (2473, 2585), False, 'from google.appengine.api import memcache\n'), ((1934, 1976), 'twimonial.models.Twimonial.get_tos_from', 'Twimonial.get_tos_from', (['screen_names', 'user'], {}), '(screen_names, user)\n', (1956, 1976), False, 'from twimonial.models import User, Twimonial\n')]
|
import queue
import networkx
from math import radians, sin, cos, sqrt, asin
from util import hav
def shortest_path(g, source, dest):
"""Return single source single destination shortest path using A* search.
Haversine distance is used as heuristic.
Arguments:
g -- networkx graph loaded from shapefile
source -- source intersection's index in g
dest -- destination intersection's index in g
"""
def heuristic(a,b):
"""Return heuristic distance between two nodes in g.
Haversine distance guranteed to be shorter than actual distance,
since it's the shortest distance between points on a sphere
(which the earth approximates).
Arguments:
a -- one node index in g
b -- another node index in g
"""
# lat and lon internally stored in degrees, convert and call function
# lona, lonb, lata, latb = [g.node[a]['lon'], g.node[b]['lon'], g.node[a]['lat'], g.node[b]['lat']]
return 0 # hav((lona, lata), (lonb, latb))
# frontier of nodes to explore
exploring = queue.PriorityQueue()
# property maps which will be built and returned outside
# actual cost to node
cost = {}
# which immediate node was the shortest path from
parent = {}
# queue.PriorityQueue expects put(priority, data) we store node index as data
exploring.put((0,source))
parent[source] = None
cost[source] = 0
while not exploring.empty():
u_cost, u = exploring.get()
if u == dest:
break
for v in g[u]:
new_cost = cost[u] + g[u][v]['length']
if v not in cost or new_cost < cost[v]:
# relax edge with new_cost
cost[v] = new_cost
parent[v] = u
heuristic_cost = new_cost + heuristic(u,v)
# doesn't matter if v's already in exploring queue with higher cost
# we'll have duplicate nodes, but those won't affect correctness
# since they'll be explored after the cheaper ones are explored,
# they won't yield any new shorter paths
exploring.put((heuristic_cost,v))
return cost, parent
|
[
"queue.PriorityQueue"
] |
[((994, 1015), 'queue.PriorityQueue', 'queue.PriorityQueue', ([], {}), '()\n', (1013, 1015), False, 'import queue\n')]
|
from typing import Any
from datetime import datetime
# Defaults
def noop(*args, **kw):
"""No operation. Returns nothing"""
pass
def identity(x: Any) -> Any:
"""Returns argument x"""
return x
def default_now() -> datetime:
return datetime.utcnow()
def default_comparer(x: Any, y: Any) -> bool:
return x == y
def default_sub_comparer(x, y):
return x - y
def default_key_serializer(x: Any) -> str:
return str(x)
def default_error(err) -> Exception:
if isinstance(err, BaseException):
raise err
else:
raise Exception(err)
|
[
"datetime.datetime.utcnow"
] |
[((255, 272), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (270, 272), False, 'from datetime import datetime\n')]
|
# Using the logger.exception() method
import logging
logger = logging.getLogger()
try:
print('starting')
x = 1 / 0
print(x)
except:
logger.exception('an exception message')
print('Done')
|
[
"logging.getLogger"
] |
[((64, 83), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (81, 83), False, 'import logging\n')]
|
# import curses and GPIO
import curses
import serial
import time
from picamera.array import PiRGBArray
from picamera import PiCamera
import cv2
import numpy as np
ser = serial.Serial("/dev/ttyUSB0", "9600")
serLidar = serial.Serial("/dev/ttyACM0", "115200")
cap = cv2.VideoCapture(0)
piCam = False
#check if picamera exists
try:
camera = PiCamera()
camera.resolution = (224,224)
camera.framerate = 20
rawCapture = PiRGBArray(camera, size=(224,224))
piCam = True
except:
print("Pi camera does not exist, using USB camera")
# Get the curses window, turn off echoing of keyboard to screen, turn on
# instant (no waiting) key response, and use special values for cursor keys
screen = curses.initscr()
curses.noecho()
curses.cbreak()
screen.keypad(True)
keyRec = open('key_strokes.txt','w+')
train_data = []
try:
while True:
distString = serLidar.readline()
dist = 1000
try:
dist = int(distString.decode("utf-8"))
except:
print("can't convert dist")
if piCam == True:
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
image_np = np.array(frame.array)
rawCapture.truncate(0)
char = screen.getch()
key = [0,0,0,0,1]
if char == ord('x'):
np.save("train_data.npy", train_data)
ser.write(b'5')
keyRec.close()
curses.nocbreak(); screen.keypad(0); curses.echo()
curses.endwin()
break
elif char == ord('w') and dist > 100:
ser.write(b'1')
key = [1,0,0,0,0]
elif char == ord('s') and dist > 100:
ser.write(b'2')
key = [0,1,0,0,0]
elif char == ord('a') and dist > 100:
ser.write(b'3')
key = [0,0,1,0,0]
elif char == ord('d') and dist > 100:
ser.write(b'4')
key = [0,0,0,1,0]
elif char == ord(' '):
ser.write(b'5')
key = [0,0,0,0,1]
val_dict = {"input":key, "image":image_np}
train_data.append(val_dict)
keyRec.write(str(key)+"\n")
if len(train_data) % 100 == 0:
np.save("train_data.npy", train_data)
#no pi camera, using USB
else:
ret, image_np = cap.read()
char = screen.getch()
key = [0,0,0,0,1]
if char == ord('x'):
np.save("train_data.npy", train_data)
ser.write(b'5')
keyRec.close()
curses.nocbreak(); screen.keypad(0); curses.echo()
curses.endwin()
break
elif char == ord('w') and dist > 100:
ser.write(b'1')
key = [1,0,0,0,0]
elif char == ord('s') and dist > 100:
ser.write(b'2')
key = [0,1,0,0,0]
elif char == ord('a') and dist > 100:
ser.write(b'3')
key = [0,0,1,0,0]
elif char == ord('d') and dist > 100:
ser.write(b'4')
key = [0,0,0,1,0]
elif char == ord(' '):
ser.write(b'5')
key = [0,0,0,0,1]
val_dict = {"input":key, "image":image_np}
train_data.append(val_dict)
keyRec.write(str(key)+"\n")
if len(train_data) % 100 == 0:
np.save("train_data.npy", train_data)
finally:
#Close down curses properly, inc turn echo back on!
keyRec.close()
curses.nocbreak(); screen.keypad(0); curses.echo()
curses.endwin()
|
[
"serial.Serial",
"numpy.save",
"curses.noecho",
"curses.initscr",
"curses.endwin",
"cv2.VideoCapture",
"curses.cbreak",
"numpy.array",
"picamera.array.PiRGBArray",
"curses.nocbreak",
"curses.echo",
"picamera.PiCamera"
] |
[((170, 207), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyUSB0"""', '"""9600"""'], {}), "('/dev/ttyUSB0', '9600')\n", (183, 207), False, 'import serial\n'), ((219, 258), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyACM0"""', '"""115200"""'], {}), "('/dev/ttyACM0', '115200')\n", (232, 258), False, 'import serial\n'), ((265, 284), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (281, 284), False, 'import cv2\n'), ((705, 721), 'curses.initscr', 'curses.initscr', ([], {}), '()\n', (719, 721), False, 'import curses\n'), ((722, 737), 'curses.noecho', 'curses.noecho', ([], {}), '()\n', (735, 737), False, 'import curses\n'), ((739, 754), 'curses.cbreak', 'curses.cbreak', ([], {}), '()\n', (752, 754), False, 'import curses\n'), ((343, 353), 'picamera.PiCamera', 'PiCamera', ([], {}), '()\n', (351, 353), False, 'from picamera import PiCamera\n'), ((431, 466), 'picamera.array.PiRGBArray', 'PiRGBArray', (['camera'], {'size': '(224, 224)'}), '(camera, size=(224, 224))\n', (441, 466), False, 'from picamera.array import PiRGBArray\n'), ((4006, 4023), 'curses.nocbreak', 'curses.nocbreak', ([], {}), '()\n', (4021, 4023), False, 'import curses\n'), ((4043, 4056), 'curses.echo', 'curses.echo', ([], {}), '()\n', (4054, 4056), False, 'import curses\n'), ((4061, 4076), 'curses.endwin', 'curses.endwin', ([], {}), '()\n', (4074, 4076), False, 'import curses\n'), ((1186, 1207), 'numpy.array', 'np.array', (['frame.array'], {}), '(frame.array)\n', (1194, 1207), True, 'import numpy as np\n'), ((2822, 2859), 'numpy.save', 'np.save', (['"""train_data.npy"""', 'train_data'], {}), "('train_data.npy', train_data)\n", (2829, 2859), True, 'import numpy as np\n'), ((2939, 2956), 'curses.nocbreak', 'curses.nocbreak', ([], {}), '()\n', (2954, 2956), False, 'import curses\n'), ((2976, 2989), 'curses.echo', 'curses.echo', ([], {}), '()\n', (2987, 2989), False, 'import curses\n'), ((3006, 3021), 'curses.endwin', 'curses.endwin', ([], {}), '()\n', (3019, 3021), False, 'import curses\n'), ((3880, 3917), 'numpy.save', 'np.save', (['"""train_data.npy"""', 'train_data'], {}), "('train_data.npy', train_data)\n", (3887, 3917), True, 'import numpy as np\n'), ((1410, 1447), 'numpy.save', 'np.save', (['"""train_data.npy"""', 'train_data'], {}), "('train_data.npy', train_data)\n", (1417, 1447), True, 'import numpy as np\n'), ((1539, 1556), 'curses.nocbreak', 'curses.nocbreak', ([], {}), '()\n', (1554, 1556), False, 'import curses\n'), ((1576, 1589), 'curses.echo', 'curses.echo', ([], {}), '()\n', (1587, 1589), False, 'import curses\n'), ((1610, 1625), 'curses.endwin', 'curses.endwin', ([], {}), '()\n', (1623, 1625), False, 'import curses\n'), ((2568, 2605), 'numpy.save', 'np.save', (['"""train_data.npy"""', 'train_data'], {}), "('train_data.npy', train_data)\n", (2575, 2605), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
############################################################################
#
# Copyright © 2013, 2015 OnlineGroups.net and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
############################################################################
from __future__ import absolute_import, unicode_literals
from zope.cachedescriptors.property import Lazy
from zope.component import createObject
from gs.group.base import GroupViewlet
class LatestPost(GroupViewlet):
def __init__(self, messages, request, view, manager):
super(LatestPost, self).__init__(messages, request, view, manager)
@Lazy
def topic(self):
retval = [post for post in self.view.topic if not(post['hidden'])]
return retval
@Lazy
def relativeUrl(self):
retval = ''
if self.topic:
lastPost = self.topic[-1]
url = '{groupUrl}/messages/topic/{lastPostId}/'\
'#post-{lastPostId}'
retval = url.format(groupUrl=self.groupInfo.relativeURL,
lastPostId=lastPost['post_id'])
return retval
@Lazy
def authorInfo(self):
if self.topic:
lastPost = self.topic[-1]
authorId = lastPost['author_id']
else:
authorId = ''
retval = createObject('groupserver.UserFromId', self.context,
authorId)
return retval
@Lazy
def lastPostDate(self):
retval = None
if self.topic:
retval = self.topic[-1]['date']
return retval
@Lazy
def show(self):
retval = len(self.topic) > 1
return retval
|
[
"zope.component.createObject"
] |
[((1715, 1777), 'zope.component.createObject', 'createObject', (['"""groupserver.UserFromId"""', 'self.context', 'authorId'], {}), "('groupserver.UserFromId', self.context, authorId)\n", (1727, 1777), False, 'from zope.component import createObject\n')]
|
# Copyright (c) 2020 Graphcore Ltd. All rights reserved.
import numpy as np
import popart
import torch
from op_tester import op_tester
def test_asinh(op_tester):
# create test data
# Notice: as asinh(x) = ln(x + sqrt(x^2 + 1)), absolute precision
# deteriorates for larger negative numbers as you will have ln(0.0001).
d1 = np.array([
-30.0, -20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, 10.0,
100.0, 2001.0
],
dtype=np.float32)
def init_builder(builder):
i1 = builder.addInputTensor(d1)
o = builder.aiOnnx.asinh([i1])
builder.addOutputTensor(o)
return [o]
def reference(ref_data):
out = np.arcsinh(d1)
return [out]
op_tester.setPatterns(['DecomposeBinaryConstScalar'],
enableRuntimeAsserts=False)
op_tester.run(init_builder, reference, 'infer')
def test_asinh_inplace(op_tester):
# create test data
d1 = np.array([
-30.0, -20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, 10.0,
100.0, 2001.0
],
dtype=np.float32)
def init_builder(builder):
i1 = builder.addInputTensor(d1)
o = builder.aiOnnx.asinh([i1])
builder.addOutputTensor(o)
return [o]
def reference(ref_data):
out = np.arcsinh(d1)
return [out]
op_tester.setPatterns(['InPlace', 'DecomposeBinaryConstScalar'],
enableRuntimeAsserts=False)
op_tester.run(init_builder, reference, 'infer')
def test_asinh_grad(op_tester):
# create test data
d1 = np.array([
-20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, 10.0, 100.0,
2001.0
],
dtype=np.float32)
def derivative_asinh(x):
return 1 / (np.sqrt(np.power(x, 2) + 1))
def init_builder(builder):
i1 = builder.addInputTensor(d1)
o = builder.aiOnnx.asinh([i1])
builder.addOutputTensor(o)
return [
o,
popart.reservedGradientPrefix() + i1,
popart.reservedGradientPrefix() + o,
]
def reference(ref_data):
out = np.arcsinh(d1)
d__o = derivative_asinh(d1) * ref_data.getOutputTensorGrad(0)
return [out, d__o, None]
op_tester.setPatterns([
'SubtractArg1GradOp', 'LogGradOp', 'SqrtGradOp', 'PowArg0GradOp',
'DecomposeBinaryConstScalar'
],
enableRuntimeAsserts=False)
op_tester.run(init_builder, reference, 'train')
|
[
"op_tester.op_tester.setPatterns",
"numpy.power",
"op_tester.op_tester.run",
"popart.reservedGradientPrefix",
"numpy.array",
"numpy.arcsinh"
] |
[((342, 461), 'numpy.array', 'np.array', (['[-30.0, -20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, 10.0, \n 100.0, 2001.0]'], {'dtype': 'np.float32'}), '([-30.0, -20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, \n 10.0, 100.0, 2001.0], dtype=np.float32)\n', (350, 461), True, 'import numpy as np\n'), ((747, 833), 'op_tester.op_tester.setPatterns', 'op_tester.setPatterns', (["['DecomposeBinaryConstScalar']"], {'enableRuntimeAsserts': '(False)'}), "(['DecomposeBinaryConstScalar'], enableRuntimeAsserts=\n False)\n", (768, 833), False, 'from op_tester import op_tester\n'), ((859, 906), 'op_tester.op_tester.run', 'op_tester.run', (['init_builder', 'reference', '"""infer"""'], {}), "(init_builder, reference, 'infer')\n", (872, 906), False, 'from op_tester import op_tester\n'), ((976, 1095), 'numpy.array', 'np.array', (['[-30.0, -20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, 10.0, \n 100.0, 2001.0]'], {'dtype': 'np.float32'}), '([-30.0, -20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, \n 10.0, 100.0, 2001.0], dtype=np.float32)\n', (984, 1095), True, 'import numpy as np\n'), ((1381, 1477), 'op_tester.op_tester.setPatterns', 'op_tester.setPatterns', (["['InPlace', 'DecomposeBinaryConstScalar']"], {'enableRuntimeAsserts': '(False)'}), "(['InPlace', 'DecomposeBinaryConstScalar'],\n enableRuntimeAsserts=False)\n", (1402, 1477), False, 'from op_tester import op_tester\n'), ((1504, 1551), 'op_tester.op_tester.run', 'op_tester.run', (['init_builder', 'reference', '"""infer"""'], {}), "(init_builder, reference, 'infer')\n", (1517, 1551), False, 'from op_tester import op_tester\n'), ((1618, 1730), 'numpy.array', 'np.array', (['[-20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, 10.0, 100.0, 2001.0]'], {'dtype': 'np.float32'}), '([-20.12, -2.2, -1.5, -0.2, 0.0, 0.234, 1.0, 1.2, 2.0, 3.0, 10.0, \n 100.0, 2001.0], dtype=np.float32)\n', (1626, 1730), True, 'import numpy as np\n'), ((2299, 2450), 'op_tester.op_tester.setPatterns', 'op_tester.setPatterns', (["['SubtractArg1GradOp', 'LogGradOp', 'SqrtGradOp', 'PowArg0GradOp',\n 'DecomposeBinaryConstScalar']"], {'enableRuntimeAsserts': '(False)'}), "(['SubtractArg1GradOp', 'LogGradOp', 'SqrtGradOp',\n 'PowArg0GradOp', 'DecomposeBinaryConstScalar'], enableRuntimeAsserts=False)\n", (2320, 2450), False, 'from op_tester import op_tester\n'), ((2499, 2546), 'op_tester.op_tester.run', 'op_tester.run', (['init_builder', 'reference', '"""train"""'], {}), "(init_builder, reference, 'train')\n", (2512, 2546), False, 'from op_tester import op_tester\n'), ((706, 720), 'numpy.arcsinh', 'np.arcsinh', (['d1'], {}), '(d1)\n', (716, 720), True, 'import numpy as np\n'), ((1340, 1354), 'numpy.arcsinh', 'np.arcsinh', (['d1'], {}), '(d1)\n', (1350, 1354), True, 'import numpy as np\n'), ((2176, 2190), 'numpy.arcsinh', 'np.arcsinh', (['d1'], {}), '(d1)\n', (2186, 2190), True, 'import numpy as np\n'), ((2035, 2066), 'popart.reservedGradientPrefix', 'popart.reservedGradientPrefix', ([], {}), '()\n', (2064, 2066), False, 'import popart\n'), ((2085, 2116), 'popart.reservedGradientPrefix', 'popart.reservedGradientPrefix', ([], {}), '()\n', (2114, 2116), False, 'import popart\n'), ((1824, 1838), 'numpy.power', 'np.power', (['x', '(2)'], {}), '(x, 2)\n', (1832, 1838), True, 'import numpy as np\n')]
|
import argparse
def add_one(num: int) -> int:
if not num:
return 0
return num + 1
def add_with_args(num: int) -> int:
added = add_one(num)
return added
def parse_command_line_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description='My python template.')
parser.add_argument('--foo', type=int, help='foo help')
args = parser.parse_args()
return args
def main() -> None:
args = parse_command_line_args()
added = add_with_args(args.foo)
print(f'added: {added}')
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser"
] |
[((248, 306), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""My python template."""'}), "(description='My python template.')\n", (271, 306), False, 'import argparse\n')]
|
from datetime import (
datetime,
timezone,
)
def current_datetime() -> datetime:
"""Get current datetime in `UTC`.
:return: A ``datetime`` instance.
"""
return datetime.now(tz=timezone.utc)
NULL_DATETIME = datetime.max.replace(tzinfo=timezone.utc)
|
[
"datetime.datetime.now",
"datetime.datetime.max.replace"
] |
[((235, 276), 'datetime.datetime.max.replace', 'datetime.max.replace', ([], {'tzinfo': 'timezone.utc'}), '(tzinfo=timezone.utc)\n', (255, 276), False, 'from datetime import datetime, timezone\n'), ((187, 216), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (199, 216), False, 'from datetime import datetime, timezone\n')]
|
# painting.py
import os
import cv2 as cv
import numpy as np
from matplotlib import pyplot as plt
from preprocess import Kmeans
from monitor import Monitor
import tkinter as tk
import csv
from utils import BlankImg
class Painting():
def __init__(self, K, shape):
#self.radius = 3
self.K = K
self.size = shape
self.count = 0
self.fixcount = 0
self.brush_size = 3
self.img = np.zeros((self.size))
for i in range(0, self.size[0]):
for j in range(0, self.size[1]):
self.img[i, j, 0] = self.img[i, j, 1] = self.img[i, j, 2] = 255
def Painting(self):
img = BlankImg(self.size)
self.color_list = []
for i in range(0, self.K):
filename = "./points/" + str(i) + "_point.csv"
with open(filename, newline='') as csvfile:
rows = csv.reader(csvfile)
for row in rows:
print(row)
if (len(row) != 2):
r, g, b = int(row[3]), int(row[4]), int(row[5])
self.color_list.append((r, g, b))
else:
x = int(row[0])
y = int(row[1])
for a in range(x-self.brush_size, x+self.brush_size):
for b in range(y-self.brush_size, y+self.brush_size):
if (a >= 0 and a <= self.size[0]-1):
if (b >= 0 and b <= self.size[1]-1):
img[a, b, 0] = r
img[a ,b ,1] = g
img[a ,b, 2] = b
save_name = "./painting/" + str(i) + ".png"
cv.imwrite(save_name, img)
words = "finished " + str(i)
print (words)
return (self.color_list)
def DectectImg(self, targetname, comparename):
target_img = cv.imread(targetname)
compare_img = cv.imread(comparename)
different_img = BlankImg(self.size)
for x in range(0, self.size[0]):
for y in range(0, self.size[1]):
if (int(target_img[x, y, 0]) != int(compare_img[x, y, 0])):
different_img[x, y, 0] = target_img[x, y, 0]
different_img[x, y, 1] = target_img[x, y, 1]
different_img[x, y, 2] = target_img[x, y, 2]
else:
if (int(target_img[x, y, 1]) != int(compare_img[x, y, 1])):
different_img[x, y, 0] = target_img[x, y, 0]
different_img[x, y, 1] = target_img[x, y, 1]
different_img[x, y, 2] = target_img[x, y, 2]
else:
if (int(target_img[x, y, 2]) != int(compare_img[x, y, 2])):
different_img[x, y, 0] = target_img[x, y, 0]
different_img[x, y, 1] = target_img[x, y, 1]
different_img[x, y, 2] = target_img[x, y, 2]
save_name = "./difference/" + str(self.count) + ".png"
cv.imwrite(save_name, different_img)
self.count += 1
"""
def DectectImg(self, targetname, comparedname):
targetimg = cv.imread(targetname)
comparedimg = cv.imread(comparedname)
print (type(targetimg))
print (type(comparedimg))
fiximg = np.zeros((self.size))
for x in range(0, self.size[0]):
for y in range(0, self.size[1]):
if (targetimg[x, y, 0] == comparedimg[x, y, 0] and \
targetimg[x, y, 1] == comparedimg[x, y, 1] and \
targetimg[x, y, 2] == comparedimg[x, y, 2]):
fiximg[x, y, 0] = fiximg[x, y, 1] = fiximg[x, y, 2] = 255
else:
fiximg[x, y, 0] = targetimg[x, y, 0]
fiximg[x, y, 1] = targetimg[x, y, 1]
fiximg[x, y, 2] = targetimg[x, y, 2]
save_name = "./fixpoint/" + str(self.fixcount) + "_fix.png"
cv.imwrite(save_name, fiximg)
print ("save name: ", save_name)
self.fixcount += 1
return (save_name)
"""
if __name__ == "__main__":
K = 298
filename = "K_298_1_2.png"
img = cv.imread(filename)
size = img.shape
new = Painting(K, size)
#filename = "./points/0_line.csv"
color_list = new.Painting()
comparename = "./painting/297.png"
new.DectectImg(filename, comparename)
print ("finished.")
|
[
"csv.reader",
"cv2.imwrite",
"utils.BlankImg",
"numpy.zeros",
"cv2.imread"
] |
[((4358, 4377), 'cv2.imread', 'cv.imread', (['filename'], {}), '(filename)\n', (4367, 4377), True, 'import cv2 as cv\n'), ((436, 455), 'numpy.zeros', 'np.zeros', (['self.size'], {}), '(self.size)\n', (444, 455), True, 'import numpy as np\n'), ((663, 682), 'utils.BlankImg', 'BlankImg', (['self.size'], {}), '(self.size)\n', (671, 682), False, 'from utils import BlankImg\n'), ((2002, 2023), 'cv2.imread', 'cv.imread', (['targetname'], {}), '(targetname)\n', (2011, 2023), True, 'import cv2 as cv\n'), ((2046, 2068), 'cv2.imread', 'cv.imread', (['comparename'], {}), '(comparename)\n', (2055, 2068), True, 'import cv2 as cv\n'), ((2093, 2112), 'utils.BlankImg', 'BlankImg', (['self.size'], {}), '(self.size)\n', (2101, 2112), False, 'from utils import BlankImg\n'), ((3179, 3215), 'cv2.imwrite', 'cv.imwrite', (['save_name', 'different_img'], {}), '(save_name, different_img)\n', (3189, 3215), True, 'import cv2 as cv\n'), ((885, 904), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (895, 904), False, 'import csv\n'), ((1794, 1820), 'cv2.imwrite', 'cv.imwrite', (['save_name', 'img'], {}), '(save_name, img)\n', (1804, 1820), True, 'import cv2 as cv\n')]
|
import re
import requests
from accounts.models import Company, CustomUser
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect, render
from django.urls import reverse
from django.views import View
from django.views.generic import DetailView, ListView, TemplateView
from django.views.generic.edit import CreateView, UpdateView
from pages.models import Auction, Bid, DataSource
class HomePageView(TemplateView):
template_name = 'pages/home.html'
def get_context_data(self, *args, **kwargs):
context = super(HomePageView, self).get_context_data(*args, **kwargs)
context['users'] = CustomUser.objects.all()
return context
class AboutPageView(TemplateView):
template_name = 'pages/about.html'
# @login_required
class DashboardPageView(TemplateView):
template_name = 'pages/dashboard.html'
def get_context_data(self, *args, **kwargs):
context = super(DashboardPageView, self).get_context_data(
*args, **kwargs)
return context
class AuctionListView(ListView):
model = Auction
def get_context_data(self, *args, **kwargs):
context = super(AuctionListView, self).get_context_data(
*args, **kwargs)
# company=Company.objects.get(user=self.request.user)
# context['company'] = company
return context
class MyAuctionDetailView(View):
def get(self, request, *args, **kwargs):
my_auction = Auction.objects.filter(user=request.user).first()
if my_auction:
return render(request, 'pages/my-auction-detail.html', {"my_auction": my_auction})
else:
return redirect('auction-create')
class AuctionCreateView(CreateView):
model = Auction
fields = ['description', 'starting_price', 'category', 'active']
def form_valid(self, form):
company = Company.objects.filter(user=self.request.user).first()
if company:
form.instance.user = self.request.user
form.instance.company = company
return super().form_valid(form)
else:
return redirect('company-create')
class AuctionUpdateView(UpdateView):
model = Auction
fields = ['description', 'starting_price', 'category', 'active']
template_name_suffix = '_update_form'
class AuctionDetailView(DetailView):
context_object_name = 'auction'
queryset = Auction.objects.all()
def get_context_data(self, *args, **kwargs):
context = super(AuctionDetailView, self).get_context_data(
*args, **kwargs)
company = Company.objects.get(user=self.request.user)
context['company'] = company
return context
class StripeConnectionView(View):
# stripe.api_key = "<KEY>"
# stripe.client_id = "ca_L3q4MuEPR0JHtn2AlFe5bbf8TqrZDAcq"
def get(self, request, *args, **kwargs):
# get the company of the user
company, created = Company.objects.get_or_create(user=request.user)
# at this time, the Company should be created at Codat
baseUrl = "https://api-uat.codat.io"
authHeaderValue = "Basic bDRlbDRiWDhwdGdhbzVYR1c2d2dxV0s2NHpEa3NOYTlIQk9wOVFEZQ=="
# Add your authorization header
headers = {"Authorization": authHeaderValue}
# TODO first create the company
data = {"name": "Recipe test company"}
response = requests.post(
'https://api.codat.io/companies', json=data, headers=headers)
data = response.json()
data_source, created = DataSource.objects.get_or_create(
company=company)
data_source.codat_id = data['id']
data_source.save()
redirect_url = data['redirect']
# url = stripe.OAuth.authorize_url(scope='read_write')
# company=Company.objects.first()
# data, created= DataSource.objects.update_or_create(company=company, url=url)
# print("this is the url ........................", url)
# return render(request, "pages/stripe-connection.html")
return redirect(redirect_url)
class DatasourceView(View):
def get(self, request, *args, **kwargs):
# get the company of the user
company, created = Company.objects.get_or_create(user=request.user)
# at this time, the Company should be created at Codat
baseUrl = "https://api-uat.codat.io"
authHeaderValue = "Basic bDRlbDRiWDhwdGdhbzVYR1c2d2dxV0s2NHpEa3NOYTlIQk9wOVFEZQ=="
# Add your authorization header
headers = {"Authorization": authHeaderValue}
data_source, created = DataSource.objects.get_or_create(
company=company)
codat_id = data_source.codat_id
print('codat id ....................', codat_id)
response = requests.get(
'https://api.codat.io/companies/'+codat_id, headers=headers)
data = response.json()
print('data, ........', data)
print('hey .................',
data['dataConnections'][0]['status'] == 'Linked')
if data['dataConnections'][0]['status'] == 'Linked':
data_source.codac_id = data['id']
data_source.platform = data['platform']
data_source.redirect = data['redirect']
data_source.last_sync = data['lastSync']
data_source.status = data['dataConnections'][0]['status']
data_source.save()
return render(request, 'pages/data-source.html', {'data_source': data_source})
else:
return redirect('company-create')
def get_context_data(self, *args, **kwargs):
context = super(DatasourceView, self).get_context_data(
*args, **kwargs)
company = Company.objects.get(user=self.request.user)
context['company'] = company
return context
class BidCreateView(CreateView):
model = Bid
fields = ['user', 'auction', 'bid_price']
def get_absolute_url(self):
return reverse('bid-detail', kwargs={'pk': self.pk})
def get_context_data(self, *args, **kwargs):
context = super(BidCreateView, self).get_context_data(*args, **kwargs)
company = Company.objects.get(user=self.request.user)
context['company'] = company
return context
class BidDetailView(DetailView):
context_object_name = 'bid'
queryset = Bid.objects.all()
def get_context_data(self, *args, **kwargs):
context = super(BidDetailView, self).get_context_data(*args, **kwargs)
company = Company.objects.get(user=self.request.user)
context['company'] = company
return context
|
[
"pages.models.DataSource.objects.get_or_create",
"django.shortcuts.redirect",
"accounts.models.Company.objects.get",
"pages.models.Auction.objects.all",
"django.urls.reverse",
"pages.models.Auction.objects.filter",
"accounts.models.Company.objects.filter",
"requests.get",
"accounts.models.CustomUser.objects.all",
"django.shortcuts.render",
"pages.models.Bid.objects.all",
"requests.post",
"accounts.models.Company.objects.get_or_create"
] |
[((2416, 2437), 'pages.models.Auction.objects.all', 'Auction.objects.all', ([], {}), '()\n', (2435, 2437), False, 'from pages.models import Auction, Bid, DataSource\n'), ((6371, 6388), 'pages.models.Bid.objects.all', 'Bid.objects.all', ([], {}), '()\n', (6386, 6388), False, 'from pages.models import Auction, Bid, DataSource\n'), ((651, 675), 'accounts.models.CustomUser.objects.all', 'CustomUser.objects.all', ([], {}), '()\n', (673, 675), False, 'from accounts.models import Company, CustomUser\n'), ((2603, 2646), 'accounts.models.Company.objects.get', 'Company.objects.get', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (2622, 2646), False, 'from accounts.models import Company, CustomUser\n'), ((2951, 2999), 'accounts.models.Company.objects.get_or_create', 'Company.objects.get_or_create', ([], {'user': 'request.user'}), '(user=request.user)\n', (2980, 2999), False, 'from accounts.models import Company, CustomUser\n'), ((3400, 3475), 'requests.post', 'requests.post', (['"""https://api.codat.io/companies"""'], {'json': 'data', 'headers': 'headers'}), "('https://api.codat.io/companies', json=data, headers=headers)\n", (3413, 3475), False, 'import requests\n'), ((3551, 3600), 'pages.models.DataSource.objects.get_or_create', 'DataSource.objects.get_or_create', ([], {'company': 'company'}), '(company=company)\n', (3583, 3600), False, 'from pages.models import Auction, Bid, DataSource\n'), ((4065, 4087), 'django.shortcuts.redirect', 'redirect', (['redirect_url'], {}), '(redirect_url)\n', (4073, 4087), False, 'from django.shortcuts import redirect, render\n'), ((4230, 4278), 'accounts.models.Company.objects.get_or_create', 'Company.objects.get_or_create', ([], {'user': 'request.user'}), '(user=request.user)\n', (4259, 4278), False, 'from accounts.models import Company, CustomUser\n'), ((4604, 4653), 'pages.models.DataSource.objects.get_or_create', 'DataSource.objects.get_or_create', ([], {'company': 'company'}), '(company=company)\n', (4636, 4653), False, 'from pages.models import Auction, Bid, DataSource\n'), ((4786, 4861), 'requests.get', 'requests.get', (["('https://api.codat.io/companies/' + codat_id)"], {'headers': 'headers'}), "('https://api.codat.io/companies/' + codat_id, headers=headers)\n", (4798, 4861), False, 'import requests\n'), ((5739, 5782), 'accounts.models.Company.objects.get', 'Company.objects.get', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (5758, 5782), False, 'from accounts.models import Company, CustomUser\n'), ((5990, 6035), 'django.urls.reverse', 'reverse', (['"""bid-detail"""'], {'kwargs': "{'pk': self.pk}"}), "('bid-detail', kwargs={'pk': self.pk})\n", (5997, 6035), False, 'from django.urls import reverse\n'), ((6184, 6227), 'accounts.models.Company.objects.get', 'Company.objects.get', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (6203, 6227), False, 'from accounts.models import Company, CustomUser\n'), ((6537, 6580), 'accounts.models.Company.objects.get', 'Company.objects.get', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (6556, 6580), False, 'from accounts.models import Company, CustomUser\n'), ((1566, 1641), 'django.shortcuts.render', 'render', (['request', '"""pages/my-auction-detail.html"""', "{'my_auction': my_auction}"], {}), "(request, 'pages/my-auction-detail.html', {'my_auction': my_auction})\n", (1572, 1641), False, 'from django.shortcuts import redirect, render\n'), ((1676, 1702), 'django.shortcuts.redirect', 'redirect', (['"""auction-create"""'], {}), "('auction-create')\n", (1684, 1702), False, 'from django.shortcuts import redirect, render\n'), ((2130, 2156), 'django.shortcuts.redirect', 'redirect', (['"""company-create"""'], {}), "('company-create')\n", (2138, 2156), False, 'from django.shortcuts import redirect, render\n'), ((5432, 5503), 'django.shortcuts.render', 'render', (['request', '"""pages/data-source.html"""', "{'data_source': data_source}"], {}), "(request, 'pages/data-source.html', {'data_source': data_source})\n", (5438, 5503), False, 'from django.shortcuts import redirect, render\n'), ((5538, 5564), 'django.shortcuts.redirect', 'redirect', (['"""company-create"""'], {}), "('company-create')\n", (5546, 5564), False, 'from django.shortcuts import redirect, render\n'), ((1474, 1515), 'pages.models.Auction.objects.filter', 'Auction.objects.filter', ([], {'user': 'request.user'}), '(user=request.user)\n', (1496, 1515), False, 'from pages.models import Auction, Bid, DataSource\n'), ((1883, 1929), 'accounts.models.Company.objects.filter', 'Company.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (1905, 1929), False, 'from accounts.models import Company, CustomUser\n')]
|
# -*- coding: utf-8 -*-
"""
examples.bromelia_hss
~~~~~~~~~~~~~~~~~~~~~
This module contains an example on how to setup a dummy HSS
by using the Bromelia class features of bromelia library.
:copyright: (c) 2020 <NAME>.
:license: MIT, see LICENSE for more details.
"""
import os
import sys
basedir = os.path.dirname(os.path.abspath(__file__))
examples_dir = os.path.dirname(basedir)
bromelia_dir = os.path.dirname(examples_dir)
sys.path.insert(0, bromelia_dir)
from bromelia import Bromelia
from bromelia.avps import *
from bromelia.constants import *
from bromelia.etsi_3gpp_s6a_s6d.avps import *
from bromelia.etsi_3gpp_s6a_s6d.messages import CancelLocationAnswer as CLA
from bromelia.etsi_3gpp_s6a_s6d.messages import CancelLocationRequest as CLR
#: Application initialization
config_file = os.path.join(basedir, "bromelia_hss_config.yaml")
app = Bromelia(config_file=config_file)
app.load_messages_into_application_id([CLA, CLR], DIAMETER_APPLICATION_S6a_S6d)
CLR = app.s6a_s6d.CLR #: Creating CLR alias
if __name__ == "__main__":
app.run() #: It will be blocked until connection has been established
clr = CLR(user_name="123456789012345",
clr_flags=2,
destination_host=app.configs[0]["PEER_NODE_HOSTNAME"],
supported_features=[
VendorIdAVP(VENDOR_ID_3GPP),
FeatureListIdAVP(1),
FeatureListAVP(134217728)])
cla = app.send_message(clr)
|
[
"os.path.abspath",
"os.path.dirname",
"sys.path.insert",
"bromelia.Bromelia",
"os.path.join"
] |
[((386, 410), 'os.path.dirname', 'os.path.dirname', (['basedir'], {}), '(basedir)\n', (401, 410), False, 'import os\n'), ((426, 455), 'os.path.dirname', 'os.path.dirname', (['examples_dir'], {}), '(examples_dir)\n', (441, 455), False, 'import os\n'), ((457, 489), 'sys.path.insert', 'sys.path.insert', (['(0)', 'bromelia_dir'], {}), '(0, bromelia_dir)\n', (472, 489), False, 'import sys\n'), ((827, 876), 'os.path.join', 'os.path.join', (['basedir', '"""bromelia_hss_config.yaml"""'], {}), "(basedir, 'bromelia_hss_config.yaml')\n", (839, 876), False, 'import os\n'), ((884, 917), 'bromelia.Bromelia', 'Bromelia', ([], {'config_file': 'config_file'}), '(config_file=config_file)\n', (892, 917), False, 'from bromelia import Bromelia\n'), ((344, 369), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (359, 369), False, 'import os\n')]
|
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="fast_sql_manager",
version="0.1.5",
author="<NAME>",
author_email="<EMAIL>",
description="Um pacote simples para realizar operações no banco",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/OscarSilvaOfficial/easy_sql",
packages=setuptools.find_packages(),
install_requires=[
'six>=1.15.0',
'mysqlclient>=2.0.3',
'mysql-connector-python>=8.0.22',
'mysql>=0.0.2'
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
|
[
"setuptools.find_packages"
] |
[((454, 480), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (478, 480), False, 'import setuptools\n')]
|
from django.db import transaction
from rest_framework import viewsets
from .serializers import TaskSerializer, CreateTaskSerializer
from .models import Task
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
def get_serializer(self, data=None, *args, **kwargs):
if getattr(self, 'creating', False):
return CreateTaskSerializer(data=data)
return super().get_serializer(data, *args, **kwargs)
def create(self, request, *args, **kwargs):
self.creating = True
with transaction.atomic():
return super().create(request, *args, **kwargs)
|
[
"django.db.transaction.atomic"
] |
[((581, 601), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (599, 601), False, 'from django.db import transaction\n')]
|
import os
import pickle
import re
import string
from flask import Flask, request, jsonify
CUR_DIR = os.path.dirname(__file__)
STOP_WORDS = pickle.load(open(
os.path.join(CUR_DIR,
'pkl_objects',
'stopwords.pkl'), 'rb'))
VECTORIZER = pickle.load(open(
os.path.join(CUR_DIR,
'pkl_objects',
'vectorizer.pkl'), 'rb'))
CLF = pickle.load(open(
os.path.join(CUR_DIR,
'pkl_objects',
'classifier.pkl'), 'rb'))
LABEL_DICT = {0: 'The tweet contains hate speech',
1: 'The tweet is not offensive',
2: 'The tweet uses offensive language but not hate speech'}
app = Flask(__name__)
def preprocess_tweet(tweet):
tweet = tweet.lower()
# Remove urls
tweet = re.sub('((www\.[^\s]+)|(https?://[^\s]+))', '', tweet)
# Remove usernames
tweet = re.sub('@[^\s]+', '', tweet)
# Remove white space
tweet = tweet.strip()
# Remove hashtags
tweet = re.sub(r'#([^\s]+)', '', tweet)
# Remove stopwords
tweet = " ".join([word for word in tweet.split(' ') if word not in STOP_WORDS])
# Remove punctuation
tweet = "".join(l for l in tweet if l not in string.punctuation)
return tweet
@app.route("/analyse/sentiment", methods=['POST'])
def classify_tweet():
sentence = request.get_json()['sentence']
sentence_to_clf = preprocess_tweet(sentence)
sentence_to_clf = VECTORIZER.transform([sentence_to_clf])
label = CLF.predict(sentence_to_clf)[0]
confidence = max(CLF.predict_proba(sentence_to_clf)[0]) * 100
return jsonify(
sentence=LABEL_DICT[label],
polarity=confidence
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
|
[
"os.path.join",
"os.path.dirname",
"flask.Flask",
"flask.jsonify",
"re.sub",
"flask.request.get_json"
] |
[((102, 127), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (117, 127), False, 'import os\n'), ((697, 712), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (702, 712), False, 'from flask import Flask, request, jsonify\n'), ((800, 857), 're.sub', 're.sub', (['"""((www\\\\.[^\\\\s]+)|(https?://[^\\\\s]+))"""', '""""""', 'tweet'], {}), "('((www\\\\.[^\\\\s]+)|(https?://[^\\\\s]+))', '', tweet)\n", (806, 857), False, 'import re\n'), ((890, 919), 're.sub', 're.sub', (['"""@[^\\\\s]+"""', '""""""', 'tweet'], {}), "('@[^\\\\s]+', '', tweet)\n", (896, 919), False, 'import re\n'), ((1004, 1035), 're.sub', 're.sub', (['"""#([^\\\\s]+)"""', '""""""', 'tweet'], {}), "('#([^\\\\s]+)', '', tweet)\n", (1010, 1035), False, 'import re\n'), ((1609, 1665), 'flask.jsonify', 'jsonify', ([], {'sentence': 'LABEL_DICT[label]', 'polarity': 'confidence'}), '(sentence=LABEL_DICT[label], polarity=confidence)\n', (1616, 1665), False, 'from flask import Flask, request, jsonify\n'), ((164, 217), 'os.path.join', 'os.path.join', (['CUR_DIR', '"""pkl_objects"""', '"""stopwords.pkl"""'], {}), "(CUR_DIR, 'pkl_objects', 'stopwords.pkl')\n", (176, 217), False, 'import os\n'), ((295, 349), 'os.path.join', 'os.path.join', (['CUR_DIR', '"""pkl_objects"""', '"""vectorizer.pkl"""'], {}), "(CUR_DIR, 'pkl_objects', 'vectorizer.pkl')\n", (307, 349), False, 'import os\n'), ((421, 475), 'os.path.join', 'os.path.join', (['CUR_DIR', '"""pkl_objects"""', '"""classifier.pkl"""'], {}), "(CUR_DIR, 'pkl_objects', 'classifier.pkl')\n", (433, 475), False, 'import os\n'), ((1345, 1363), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1361, 1363), False, 'from flask import Flask, request, jsonify\n')]
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
REQUIRES = ['numpy', 'pytest', 'flask']
config = {
'description': 'Fibonacci webservice (with neural network)',
'author': '<NAME>',
'url': 'https://github.com/soerendip/fibo',
'download_url': 'https://github.com/soerendip/fibo',
'author_email': '<EMAIL>',
'version': '0.1',
'install_requires': [REQUIRES],
'name': 'fibo',
'packages': ['fibo'],
'description': 'A webservice for fibonacci numbers.',
'platform': 'Linux'
}
setup(**config)
|
[
"distutils.core.setup"
] |
[((562, 577), 'distutils.core.setup', 'setup', ([], {}), '(**config)\n', (567, 577), False, 'from distutils.core import setup\n')]
|
from itertools import chain, combinations
import numpy as np
from numpy.testing import assert_allclose
from wpca.tests.tools import assert_allclose_upto_sign
from wpca.utils import orthonormalize, random_orthonormal, weighted_mean
def test_orthonormalize():
rand = np.random.RandomState(42)
X = rand.randn(3, 4)
X2 = orthonormalize(X)
assert_allclose_upto_sign(X[0] / np.linalg.norm(X[0]), X2[0])
assert_allclose(np.dot(X2, X2.T), np.eye(X2.shape[0]), atol=1E-15)
def test_random_orthonormal():
def check_random_orthonormal(N, M, rows):
X = random_orthonormal(N, M, rows=rows, random_state=42)
assert X.shape == (N, M)
if rows:
C = np.dot(X, X.T)
else:
C = np.dot(X.T, X)
assert_allclose(C, np.eye(C.shape[0]), atol=1E-15)
for M in [5]:
for N in range(1, M + 1):
yield check_random_orthonormal, N, M, True
yield check_random_orthonormal, M, N, False
def test_weighted_mean():
def check_weighted_mean(shape, axis):
rand = np.random.RandomState(0)
x = rand.rand(*shape)
w = rand.rand(*shape)
wm = weighted_mean(x, w, axis)
assert_allclose(wm, np.average(x, axis, w))
assert_allclose(wm, (w * x).sum(axis) / w.sum(axis))
for ndim in range(1, 5):
shape = tuple(range(3, 3 + ndim))
axis_tuples = chain(*(combinations(range(ndim), nax)
for nax in range(ndim + 1)))
for axis in chain([None], range(ndim), axis_tuples):
yield check_weighted_mean, shape, axis
|
[
"numpy.average",
"numpy.eye",
"wpca.utils.orthonormalize",
"numpy.random.RandomState",
"numpy.linalg.norm",
"numpy.dot",
"wpca.utils.random_orthonormal",
"wpca.utils.weighted_mean"
] |
[((273, 298), 'numpy.random.RandomState', 'np.random.RandomState', (['(42)'], {}), '(42)\n', (294, 298), True, 'import numpy as np\n'), ((333, 350), 'wpca.utils.orthonormalize', 'orthonormalize', (['X'], {}), '(X)\n', (347, 350), False, 'from wpca.utils import orthonormalize, random_orthonormal, weighted_mean\n'), ((437, 453), 'numpy.dot', 'np.dot', (['X2', 'X2.T'], {}), '(X2, X2.T)\n', (443, 453), True, 'import numpy as np\n'), ((455, 474), 'numpy.eye', 'np.eye', (['X2.shape[0]'], {}), '(X2.shape[0])\n', (461, 474), True, 'import numpy as np\n'), ((579, 631), 'wpca.utils.random_orthonormal', 'random_orthonormal', (['N', 'M'], {'rows': 'rows', 'random_state': '(42)'}), '(N, M, rows=rows, random_state=42)\n', (597, 631), False, 'from wpca.utils import orthonormalize, random_orthonormal, weighted_mean\n'), ((1065, 1089), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (1086, 1089), True, 'import numpy as np\n'), ((1163, 1188), 'wpca.utils.weighted_mean', 'weighted_mean', (['x', 'w', 'axis'], {}), '(x, w, axis)\n', (1176, 1188), False, 'from wpca.utils import orthonormalize, random_orthonormal, weighted_mean\n'), ((388, 408), 'numpy.linalg.norm', 'np.linalg.norm', (['X[0]'], {}), '(X[0])\n', (402, 408), True, 'import numpy as np\n'), ((698, 712), 'numpy.dot', 'np.dot', (['X', 'X.T'], {}), '(X, X.T)\n', (704, 712), True, 'import numpy as np\n'), ((743, 757), 'numpy.dot', 'np.dot', (['X.T', 'X'], {}), '(X.T, X)\n', (749, 757), True, 'import numpy as np\n'), ((785, 803), 'numpy.eye', 'np.eye', (['C.shape[0]'], {}), '(C.shape[0])\n', (791, 803), True, 'import numpy as np\n'), ((1217, 1239), 'numpy.average', 'np.average', (['x', 'axis', 'w'], {}), '(x, axis, w)\n', (1227, 1239), True, 'import numpy as np\n')]
|
import numpy as np
from acoustics.turbulence import Gaussian2DTemp, VonKarman2DTemp, Comparison, Field2D
def main():
mu_0 = np.sqrt(10.0**(-6))
correlation_length = 1.0 # Typical correlation length for Gaussian spectrum.
x = 20.0
y = 0.0
z = 40.0
plane = (1,0,1)
#f_resolution = wavenumber_resolution / (2.0*np.pi)
spatial_resolution = 0.05
N = 100
min_wavenumber = 0.01
max_wavenumber = 10.0
wavenumber_resolution = (max_wavenumber - min_wavenumber) / N
"""Create an object to describe an Gaussian turbulence spectrum."""
g = Gaussian2DTemp(plane=plane, a=correlation_length, mu_0=mu_0, wavenumber_resolution=wavenumber_resolution, max_mode_order=N)
"""Create an object to describe a VonKarman turbulence spectrum."""
s = VonKarman2DTemp(plane=plane, a=correlation_length, mu_0=mu_0, wavenumber_resolution=wavenumber_resolution, max_mode_order=N)
g.plot_mode_amplitudes('Gaussian2DTemp_mode_amplitudes.png')
s.plot_mode_amplitudes('VonKarman2DTemp_mode_amplitudes.png')
c = Comparison([g, s])
c.plot_mode_amplitudes('Gaussian2DTemp_and_VonKarman2DTemp_mode_amplitudes.png')
field_g = Field2D(x=x, y=y, z=z, spatial_resolution=spatial_resolution, spectrum=g)
field_s = Field2D(x=x, y=y, z=z, spatial_resolution=spatial_resolution, spectrum=s)
field_g.generate().plot('Gaussian2DTemp_field.png')
field_s.generate().plot('VonKarman2DTemp_field.png')
if __name__ == '__main__':
main()
|
[
"acoustics.turbulence.Comparison",
"acoustics.turbulence.Field2D",
"acoustics.turbulence.Gaussian2DTemp",
"numpy.sqrt",
"acoustics.turbulence.VonKarman2DTemp"
] |
[((134, 153), 'numpy.sqrt', 'np.sqrt', (['(10.0 ** -6)'], {}), '(10.0 ** -6)\n', (141, 153), True, 'import numpy as np\n'), ((647, 774), 'acoustics.turbulence.Gaussian2DTemp', 'Gaussian2DTemp', ([], {'plane': 'plane', 'a': 'correlation_length', 'mu_0': 'mu_0', 'wavenumber_resolution': 'wavenumber_resolution', 'max_mode_order': 'N'}), '(plane=plane, a=correlation_length, mu_0=mu_0,\n wavenumber_resolution=wavenumber_resolution, max_mode_order=N)\n', (661, 774), False, 'from acoustics.turbulence import Gaussian2DTemp, VonKarman2DTemp, Comparison, Field2D\n'), ((856, 984), 'acoustics.turbulence.VonKarman2DTemp', 'VonKarman2DTemp', ([], {'plane': 'plane', 'a': 'correlation_length', 'mu_0': 'mu_0', 'wavenumber_resolution': 'wavenumber_resolution', 'max_mode_order': 'N'}), '(plane=plane, a=correlation_length, mu_0=mu_0,\n wavenumber_resolution=wavenumber_resolution, max_mode_order=N)\n', (871, 984), False, 'from acoustics.turbulence import Gaussian2DTemp, VonKarman2DTemp, Comparison, Field2D\n'), ((1126, 1144), 'acoustics.turbulence.Comparison', 'Comparison', (['[g, s]'], {}), '([g, s])\n', (1136, 1144), False, 'from acoustics.turbulence import Gaussian2DTemp, VonKarman2DTemp, Comparison, Field2D\n'), ((1259, 1332), 'acoustics.turbulence.Field2D', 'Field2D', ([], {'x': 'x', 'y': 'y', 'z': 'z', 'spatial_resolution': 'spatial_resolution', 'spectrum': 'g'}), '(x=x, y=y, z=z, spatial_resolution=spatial_resolution, spectrum=g)\n', (1266, 1332), False, 'from acoustics.turbulence import Gaussian2DTemp, VonKarman2DTemp, Comparison, Field2D\n'), ((1347, 1420), 'acoustics.turbulence.Field2D', 'Field2D', ([], {'x': 'x', 'y': 'y', 'z': 'z', 'spatial_resolution': 'spatial_resolution', 'spectrum': 's'}), '(x=x, y=y, z=z, spatial_resolution=spatial_resolution, spectrum=s)\n', (1354, 1420), False, 'from acoustics.turbulence import Gaussian2DTemp, VonKarman2DTemp, Comparison, Field2D\n')]
|
import ctypes
import pytest
c_lib = ctypes.CDLL('../solutions/0434-segment-string/segment-string.so')
@pytest.mark.parametrize('str, ans',
[(b'Hello, my name is John', 5),
(b'Hello', 1),
(b"love live! mu'sic forever", 4),
(b"", 0)])
def test_segment_string(str, ans):
out = c_lib.countSegments(str)
assert out == ans
|
[
"pytest.mark.parametrize",
"ctypes.CDLL"
] |
[((37, 102), 'ctypes.CDLL', 'ctypes.CDLL', (['"""../solutions/0434-segment-string/segment-string.so"""'], {}), "('../solutions/0434-segment-string/segment-string.so')\n", (48, 102), False, 'import ctypes\n'), ((105, 239), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""str, ans"""', '[(b\'Hello, my name is John\', 5), (b\'Hello\', 1), (\n b"love live! mu\'sic forever", 4), (b\'\', 0)]'], {}), '(\'str, ans\', [(b\'Hello, my name is John\', 5), (\n b\'Hello\', 1), (b"love live! mu\'sic forever", 4), (b\'\', 0)])\n', (128, 239), False, 'import pytest\n')]
|
import torch as th
from tqdm import tqdm
from . import BaseFlow, register_flow
from ..models import build_model
from ..models.GATNE import NSLoss
import torch
from tqdm.auto import tqdm
from numpy import random
import dgl
from ..sampler.GATNE_sampler import NeighborSampler, generate_pairs
@register_flow("GATNE_trainer")
class GATNE(BaseFlow):
def __init__(self, args):
super(GATNE, self).__init__(args)
self.model = build_model(self.model_name).build_model_from_args(self.args, self.hg).to(self.device)
self.train_pairs = None
self.train_dataloader = None
self.nsloss = None
self.neighbor_sampler = None
self.orig_val_hg = self.task.val_hg
self.orig_test_hg = self.task.test_hg
self.preprocess()
self.train()
def preprocess(self):
assert len(self.hg.ntypes) == 1
bidirected_hg = dgl.to_bidirected(dgl.to_simple(self.hg.to('cpu')))
all_walks = []
for etype in self.hg.etypes:
nodes = torch.unique(bidirected_hg.edges(etype=etype)[0]).repeat(self.args.rw_walks)
traces, types = dgl.sampling.random_walk(
bidirected_hg, nodes, metapath=[etype] * (self.args.rw_length - 1)
)
all_walks.append(traces)
self.train_pairs = generate_pairs(all_walks, self.args.window_size, self.args.num_workers)
self.neighbor_sampler = NeighborSampler(bidirected_hg, [self.args.neighbor_samples])
self.train_dataloader = torch.utils.data.DataLoader(
self.train_pairs,
batch_size=self.args.batch_size,
collate_fn=self.neighbor_sampler.sample,
shuffle=True,
num_workers=self.args.num_workers,
pin_memory=True,
)
self.nsloss = NSLoss(self.hg.num_nodes(), self.args.neg_size, self.args.dim).to(self.device)
self.optimizer = torch.optim.Adam(
[{"params": self.model.parameters()}, {"params": self.nsloss.parameters()}], lr=self.args.learning_rate
)
return
def train(self):
best_score = 0
patience = 0
for self.epoch in range(self.args.max_epoch):
self._full_train_step()
cur_score = self._full_test_step()
if cur_score > best_score:
best_score = cur_score
patience = 0
else:
patience += 1
if patience > self.args.patience:
self.logger.train_info(f'Early Stop!\tEpoch:{self.epoch:03d}.')
break
def _full_train_step(self):
self.model.train()
random.shuffle(self.train_pairs)
data_iter = tqdm(
self.train_dataloader,
desc="epoch %d" % self.epoch,
total=(len(self.train_pairs) + (self.args.batch_size - 1)) // self.args.batch_size,
)
avg_loss = 0.0
for i, (block, head_invmap, tails, block_types) in enumerate(data_iter):
self.optimizer.zero_grad()
# embs: [batch_size, edge_type_count, embedding_size]
block_types = block_types.to(self.device)
embs = self.model(block[0].to(self.device))[head_invmap]
embs = embs.gather(
1, block_types.view(-1, 1, 1).expand(embs.shape[0], 1, embs.shape[2])
)[:, 0]
loss = self.nsloss(
block[0].dstdata[dgl.NID][head_invmap].to(self.device),
embs,
tails.to(self.device),
)
loss.backward()
self.optimizer.step()
avg_loss += loss.item()
post_fix = {
"epoch": self.epoch,
"iter": i,
"avg_loss": avg_loss / (i + 1),
"loss": loss.item(),
}
data_iter.set_postfix(post_fix)
def _full_test_step(self):
self.model.eval()
# {'1': {}, '2': {}}
final_model = dict(
zip(self.hg.etypes, [th.empty(self.hg.num_nodes(), self.args.dim) for _ in range(len(self.hg.etypes))]))
for i in tqdm(range(self.hg.num_nodes()), desc='Evaluating...'):
train_inputs = (
torch.tensor([i for _ in range(len(self.hg.etypes))])
.unsqueeze(1)
.to(self.device)
) # [i, i]
train_types = (
torch.tensor(list(range(len(self.hg.etypes)))).unsqueeze(1).to(self.device)
) # [0, 1]
pairs = torch.cat(
(train_inputs, train_inputs, train_types), dim=1
) # (2, 3)
(
train_blocks,
train_invmap,
fake_tails,
train_types,
) = self.neighbor_sampler.sample(pairs)
node_emb = self.model(train_blocks[0].to(self.device))[train_invmap]
node_emb = node_emb.gather(
1,
train_types.to(self.device)
.view(-1, 1, 1)
.expand(node_emb.shape[0], 1, node_emb.shape[2]),
)[:, 0]
for j in range(len(self.hg.etypes)):
final_model[self.hg.etypes[j]][i] = node_emb[j].detach()
metric = {}
score = []
for etype in self.hg.etypes:
self.task.val_hg = dgl.edge_type_subgraph(self.orig_val_hg, [etype])
self.task.test_hg = dgl.edge_type_subgraph(self.orig_test_hg, [etype])
for split in ['test', 'valid']:
n_embedding = {self.hg.ntypes[0]: final_model[etype].to(self.device)}
res = self.task.evaluate(n_embedding=n_embedding, mode=split)
metric[split] = res
if split == 'valid':
score.append(res.get('roc_auc'))
self.logger.train_info(etype + self.logger.metric2str(metric))
avg_score = sum(score) / len(score)
return avg_score
|
[
"torch.utils.data.DataLoader",
"torch.cat",
"dgl.sampling.random_walk",
"dgl.edge_type_subgraph",
"numpy.random.shuffle"
] |
[((1512, 1706), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['self.train_pairs'], {'batch_size': 'self.args.batch_size', 'collate_fn': 'self.neighbor_sampler.sample', 'shuffle': '(True)', 'num_workers': 'self.args.num_workers', 'pin_memory': '(True)'}), '(self.train_pairs, batch_size=self.args.\n batch_size, collate_fn=self.neighbor_sampler.sample, shuffle=True,\n num_workers=self.args.num_workers, pin_memory=True)\n', (1539, 1706), False, 'import torch\n'), ((2652, 2684), 'numpy.random.shuffle', 'random.shuffle', (['self.train_pairs'], {}), '(self.train_pairs)\n', (2666, 2684), False, 'from numpy import random\n'), ((1128, 1225), 'dgl.sampling.random_walk', 'dgl.sampling.random_walk', (['bidirected_hg', 'nodes'], {'metapath': '([etype] * (self.args.rw_length - 1))'}), '(bidirected_hg, nodes, metapath=[etype] * (self.\n args.rw_length - 1))\n', (1152, 1225), False, 'import dgl\n'), ((4539, 4598), 'torch.cat', 'torch.cat', (['(train_inputs, train_inputs, train_types)'], {'dim': '(1)'}), '((train_inputs, train_inputs, train_types), dim=1)\n', (4548, 4598), False, 'import torch\n'), ((5363, 5412), 'dgl.edge_type_subgraph', 'dgl.edge_type_subgraph', (['self.orig_val_hg', '[etype]'], {}), '(self.orig_val_hg, [etype])\n', (5385, 5412), False, 'import dgl\n'), ((5445, 5495), 'dgl.edge_type_subgraph', 'dgl.edge_type_subgraph', (['self.orig_test_hg', '[etype]'], {}), '(self.orig_test_hg, [etype])\n', (5467, 5495), False, 'import dgl\n')]
|
# import logging
from pathlib import Path
from enum import Enum
import uvicorn
from fastapi import FastAPI
from custom_logger import CustomizeLogger
import schemas
from mappings.daft_listings import get_daft_search_result
from mappings.listing_details import get_listing_details
# logger = logging.getLogger(__name__)
config_path=Path(__file__).with_name("custom_logger.json")
def create_app() -> FastAPI:
app = FastAPI(title='CustomLogger', debug=False)
logger = CustomizeLogger.make_logger(config_path)
app.logger = logger
return app
app = create_app()
# app = FastAPI()
@app.get("/search_result/", response_model=schemas.SearchResultList)
async def search_result():
result = await get_daft_search_result()
return result
class DaftMethodListing(str, Enum):
json_details = "json_details"
selenium = "selenium"
@app.get("/listing_details/", response_model=schemas.DaftListing)
async def daft_listing(url, method: DaftMethodListing):
result = await get_listing_details(url)
return result
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000)
|
[
"mappings.listing_details.get_listing_details",
"mappings.daft_listings.get_daft_search_result",
"pathlib.Path",
"uvicorn.run",
"fastapi.FastAPI",
"custom_logger.CustomizeLogger.make_logger"
] |
[((422, 464), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""CustomLogger"""', 'debug': '(False)'}), "(title='CustomLogger', debug=False)\n", (429, 464), False, 'from fastapi import FastAPI\n'), ((478, 518), 'custom_logger.CustomizeLogger.make_logger', 'CustomizeLogger.make_logger', (['config_path'], {}), '(config_path)\n', (505, 518), False, 'from custom_logger import CustomizeLogger\n'), ((1071, 1114), 'uvicorn.run', 'uvicorn.run', (['app'], {'host': '"""0.0.0.0"""', 'port': '(8000)'}), "(app, host='0.0.0.0', port=8000)\n", (1082, 1114), False, 'import uvicorn\n'), ((336, 350), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (340, 350), False, 'from pathlib import Path\n'), ((714, 738), 'mappings.daft_listings.get_daft_search_result', 'get_daft_search_result', ([], {}), '()\n', (736, 738), False, 'from mappings.daft_listings import get_daft_search_result\n'), ((996, 1020), 'mappings.listing_details.get_listing_details', 'get_listing_details', (['url'], {}), '(url)\n', (1015, 1020), False, 'from mappings.listing_details import get_listing_details\n')]
|
import asyncio
import discord
import datetime
import pytz
import random
import colorsys
import os
from discord.ext import commands
from cogs.utils.embed import passembed
from cogs.utils.embed import errorembed
class Mod(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.case = {}
def check_me(ctx):
return ctx.message.author.id == os.getenv("OWNER_ID")
# Purge message
@commands.command()
@commands.has_any_role('Server Moderator')
async def purge(self, ctx, amount=100):
amount = int(amount)
await ctx.channel.purge(limit=amount+1)
pembed = passembed(description='{0} messages have been deleted.'.format(amount))
await ctx.send(embed=pembed, delete_after=25)
@purge.error
async def purge_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
return
# Ban command
@commands.command()
@commands.has_any_role('Server Moderator')
async def ban(self, ctx, user: discord.Member, *, reason: str=None):
if reason is None:
reason = 'no reason'
await user.ban(reason=reason)
pembed = passembed(description='{0} has been banned by {1} due to {2}.'.format(user, ctx.message.author, reason))
await ctx.send(embed=pembed)
# Logging
for channel in ctx.guild.channels:
if channel.name == 'mod-logs':
guild_id = ctx.message.guild.id
if guild_id in self.case:
self.case[guild_id]+=1
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
embed.set_author(name='Case #{0} | Ban | {1}'.format(int(self.case.get(guild_id)), user), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.add_field(name='Reason', value='{0}'.format(reason), inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
else:
self.case[guild_id]=0
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
embed.set_author(name='Case #{0} | Ban | {1}'.format(int(self.case.get(guild_id)), user), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.add_field(name='Reason', value='{0}'.format(reason), inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
@ban.error
async def ban_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
eembed = errorembed(description='Please indicate the User you wish to ban.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.BadArgument):
eembed = errorembed(description='Invalid User. Please tag the User you wish to ban.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.CheckFailure):
return
# Force ban command
@commands.command()
@commands.has_any_role('Server Moderator')
async def forceban(self, ctx, id: int, *, reason: str=None):
if reason is None:
reason = 'no reason'
try:
limitedUser = await self.bot.fetch_user(id)
pembed = passembed(description='{0} has been banned by {1} due to {2}.'.format(user, ctx.message.author, reason))
await ctx.send(embed=pembed)
except Exception as e:
if 'Unknown User' in str(e):
eembed = errorembed(description='User ID could not be found. Please input a valid User ID.')
await ctx.send(embed=eembed)
@forceban.error
async def forceban_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
eembed = errorembed(description='Please indicate the User you wish to force ban.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.BadArgument):
eembed = errorembed(description='User ID is invalid. Please input a valid User ID.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.CheckFailure):
return
# Unban command
@commands.command()
@commands.has_any_role('Server Moderator')
async def unban(self, ctx, id: int):
try:
banuser = await self.bot.fetch_user(id)
await ctx.guild.unban(banuser)
pembed = passembed(description='{0} has been unbanned by {1} due to {2}.'.format(banuser, ctx.message.author, reason))
await ctx.send(embed=pembed)
except Exception as e:
if 'Unknown Ban' in str(e):
eembed = errorembed(description='{0} {1} is not banned in the server. Please check again.'.format(ctx.message.author.mention, banuser))
await ctx.send(embed=eembed)
elif 'Unknown User' in str(e):
eembed = errorembed(description='User ID could not be found. Please input a valid User ID.')
await ctx.send(embed=eembed)
# Logging
for channel in ctx.guild.channels:
if channel.name == 'mod-logs':
guild_id = ctx.message.guild.id
if guild_id in self.case:
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
embed.set_author(name='Case #{0} | Unban | {1}'.format(int(self.case.get(guild_id)), banuser), icon_url=banuser.avatar_url)
embed.add_field(name='User',value='{0}'.format(banuser.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.set_footer(text='ID: {0}'.format(banuser.id))
await channel.send(embed=embed)
else:
self.case[guild_id]=0
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
embed.set_author(name='Case #{0} | Unban | {1}'.format(int(self.case.get(guild_id)), banuser), icon_url=banuser.avatar_url)
embed.add_field(name='User',value='{0}'.format(banuser.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.set_footer(text='ID: {0}'.format(banuser.id))
await channel.send(embed=embed)
@unban.error
async def unban_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
eembed = errorembed(description='Please indicate the User ID you wish to unban.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.BadArgument):
eembed = errorembed(description='User ID is either not banned or invalid/not found. Please input a valid User ID.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.CheckFailure):
return
# Mute command
@commands.command()
@commands.has_any_role('Server Moderator')
async def mute(self, ctx, user: discord.Member, reason: str=None, time: int=5):
# If not specified, defaulted as 5 minutes.
secs = time * 60
if reason is None:
reason = 'no reason'
for channel in ctx.guild.channels:
if isinstance(channel, discord.TextChannel):
await ctx.channel.set_permissions(user, overwrite=discord.PermissionOverwrite(send_messages=False))
elif isinstance(channel, discord.VoiceChannel):
await ctx.channel.set_permissions(user, overwrite=discord.PermissionOverwrite(connect=False))
pembed = passembed(description='{0} has been muted for {1} minutes due to {2}.'.format(user, time, reason))
await ctx.send(embed=pembed)
# Logging
for channel in ctx.guild.channels:
if channel.name == 'mod-logs':
guild_id = ctx.message.guild.id
if guild_id in self.case:
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
embed.set_author(name='Case #{0} | Mute | {1}'.format(int(self.case.get(guild_id)), user.name), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.add_field(name='Length', value='{0} mins'.format(time), inline=True)
embed.add_field(name='Reason', value='{0}'.format(reason), inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
else:
self.case[guild_id]=0
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
case = self.case.get(guild_id)
embed.set_author(name='Case #{0} | Mute | {1}'.format(int(self.case.get(guild_id)), user.name), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.add_field(name='Length', value='{0} mins'.format(time), inline=True)
embed.add_field(name='Reason', value='{0}'.format(reason), inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
await asyncio.sleep(secs)
for channel in ctx.guild.channels:
if isinstance(channel, discord.TextChannel):
await ctx.channel.set_permissions(user, overwrite=None)
elif isinstance(channel, discord.VoiceChannel):
await ctx.channel.set_permissions(user, overwrite=None)
pembed = passembed(description='{0} has been unmuted in the server.'.format(user))
await ctx.send(embed=pembed)
# Logging
for channel in ctx.guild.channels:
if channel.name == 'mod-logs':
guild_id = ctx.message.guild.id
if guild_id in self.case:
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
embed.set_author(name='Case #{0} | Unmute | {1}'.format(int(self.case.get(guild_id)), user.name), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(self.bot.user.mention), inline=True)
embed.add_field(name='Reason', value='timeout', inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
else:
self.case[guild_id]=0
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
case = self.case.get(guild_id)
embed.set_author(name='Case #{0} | Unmute | {1}'.format(int(self.case.get(guild_id)), user.name), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(self.bot.user.mention), inline=True)
embed.add_field(name='Reason', value='timeout', inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
@mute.error
async def mute_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
eembed = errorembed(description='Please indicate the user you wish to mute.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.BadArgument):
eembed = errorembed(description='User could not be found. Please tag a valid User.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.CheckFailure):
return
# Unmute command
@commands.command()
@commands.has_any_role('Server Moderator')
async def unmute(self, ctx, user: discord.Member, reason: str=None):
if reason is None:
reason = 'no reason'
for channel in ctx.guild.channels:
if isinstance(channel, discord.TextChannel):
await ctx.channel.set_permissions(user, send_messages=None)
elif isinstance(channel, discord.VoiceChannel):
await ctx.channel.set_permissions(user, connect=None)
pembed = passembed(description='{0} has been unmuted in the server.'.format(user))
await ctx.send(embed=pembed)
# Logging
for channel in ctx.guild.channels:
if channel.name == 'mod-logs':
guild_id = ctx.message.guild.id
if guild_id in self.case:
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
embed.set_author(name='Case #{0} | Unmute | {1}'.format(int(self.case.get(guild_id)), user.name), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.add_field(name='Reason', value='{0}'.format(reason), inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
else:
self.case[guild_id]=0
self.case[guild_id]+=1
print(self.case)
embed=discord.Embed(color=discord.Color.red())
embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
case = self.case.get(guild_id)
embed.set_author(name='Case #{0} | Unmute | {1}'.format(int(self.case.get(guild_id)), user.name), icon_url=user.avatar_url)
embed.add_field(name='User',value='{0}'.format(user.mention), inline=True)
embed.add_field(name='Moderator',value='{0}'.format(ctx.message.author.mention), inline=True)
embed.add_field(name='Reason', value='{0}'.format(reason), inline=True)
embed.set_footer(text='ID: {0}'.format(user.id))
await channel.send(embed=embed)
@unmute.error
async def unmute_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
eembed = errorembed(description='Please indicate the user you wish to unmute.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.BadArgument):
eembed = errorembed(description='User could not be found. Please tag a valid User.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.CheckFailure):
return
# Announce command
@commands.command()
@commands.has_any_role('Server Moderator')
async def announce(self, ctx, channel: discord.TextChannel, *,message: str):
await channel.send(message)
@announce.error
async def announce_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
eembed = errorembed(description='Please specify text channel in the command.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.BadArgument):
eembed = errorembed(description='Channel could not be found in the server. Please specify the correct text channel.')
return await ctx.send(embed=eembed)
elif 'message is a required argument' in str(error):
eembed = errorembed(description='Please indicate your message in the command.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.CheckFailure):
return
# Embed announce command
@commands.command()
@commands.has_any_role('Server Moderator')
async def emannounce(self, ctx, channel: discord.TextChannel, *, message: str):
r, g, b = tuple(int(x * 255) for x in colorsys.hsv_to_rgb(random.random(), 1, 1))
embed=discord.Embed(description="{0}".format(message), color=discord.Color((r << 16) + (g << 8) + b), icon_url=self.bot.user.avatar_url)
#embed.timestamp=datetime.datetime.now(tz=pytz.timezone('Asia/Singapore'))
#embed.set_footer(text='Announced by {0}'.format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
await channel.send(embed=embed)
@emannounce.error
async def emannounce_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
eembed = errorembed(description='Please specify text channel in the command.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.BadArgument):
eembed = errorembed(description='Channel could not be found. Please specify the correct text channel.')
return await ctx.send(embed=eembed)
elif 'message is a required argument' in str(error):
eembed = errorembed(description='Please indicate your message in the command.')
return await ctx.send(embed=eembed)
elif isinstance(error, commands.CheckFailure):
return
# Set watching status
@commands.command()
@commands.check(check_me)
async def watching(self, ctx, *name: str):
type = discord.ActivityType.watching
activity = discord.Activity(name=name, type=type)
await self.bot.change_presence(activity=activity)
pembed = passembed(description='Status has been updated.')
await ctx.send(embed=pembed, delete_after=5)
#@watching.error
#async def watching_error(self, ctx, error):
# if isinstance(error, commands.CheckFailure):
# return
# Resets status of bot
@commands.command()
@commands.check(check_me)
async def reset(self, ctx):
await self.bot.change_presence(activity=discord.Activity(name=f'{str(len(bot.users))} users in FortniteAsia', type=2))
pembed = passembed(description='Status has been reseted.')
await ctx.send(embed=pembed, delete_after=5)
@reset.error
async def reset_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
return
# Lock command
@commands.command()
@commands.has_any_role('Server Moderator')
async def lock(self, ctx, channelname: discord.TextChannel=None):
overwrite = discord.PermissionOverwrite(send_messages=False)
# Can be used without specifying channel name
if channelname is None:
await ctx.message.channel.set_permissions(ctx.guild.default_role, overwrite=overwrite)
pembed = passembed(description='{0} has been locked by {1}.'.format(ctx.channel.mention, ctx.message.author))
await ctx.send(embed=pembed)
else:
await channelname.set_permissions(ctx.guild.default_role, overwrite=overwrite)
pembed = passembed(description='{0} has been locked by {1}.'.format(channelname.mention, ctx.message.author))
await ctx.send(embed=pembed)
@lock.error
async def lock_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
return
# Unlock command
@commands.command()
@commands.has_any_role('Server Moderator')
async def unlock(self, ctx, channelname: discord.TextChannel=None):
overwrite = discord.PermissionOverwrite(send_messages=True)
# Can be used without specifying channel name
if channelname is None:
await ctx.message.channel.set_permissions(ctx.guild.default_role, overwrite=overwrite)
pembed = passembed(description='{0} has been unlocked by {1}.'.format(ctx.channel.mention, ctx.message.author))
await ctx.send(embed=pembed)
else:
await channelname.set_permissions(ctx.guild.default_role, overwrite=overwrite)
pembed = passembed(description='{0} has been unlocked by {1}.'.format(channelname.mention, ctx.message.author))
await ctx.send(embed=pembed)
@unlock.error
async def unlock_error(self, ctx, error):
if isinstance(error, commands.CheckFailure):
return
# Adding the cog to main script
def setup(bot):
bot.add_cog(Mod(bot))
|
[
"discord.Activity",
"discord.ext.commands.command",
"asyncio.sleep",
"discord.ext.commands.check",
"cogs.utils.embed.errorembed",
"discord.Color.red",
"discord.ext.commands.has_any_role",
"discord.PermissionOverwrite",
"discord.Color",
"random.random",
"pytz.timezone",
"cogs.utils.embed.passembed",
"os.getenv"
] |
[((433, 451), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (449, 451), False, 'from discord.ext import commands\n'), ((457, 498), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (478, 498), False, 'from discord.ext import commands\n'), ((938, 956), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (954, 956), False, 'from discord.ext import commands\n'), ((962, 1003), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (983, 1003), False, 'from discord.ext import commands\n'), ((3737, 3755), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (3753, 3755), False, 'from discord.ext import commands\n'), ((3761, 3802), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (3782, 3802), False, 'from discord.ext import commands\n'), ((4987, 5005), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (5003, 5005), False, 'from discord.ext import commands\n'), ((5011, 5052), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (5032, 5052), False, 'from discord.ext import commands\n'), ((8164, 8182), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (8180, 8182), False, 'from discord.ext import commands\n'), ((8188, 8229), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (8209, 8229), False, 'from discord.ext import commands\n'), ((14063, 14081), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (14079, 14081), False, 'from discord.ext import commands\n'), ((14087, 14128), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (14108, 14128), False, 'from discord.ext import commands\n'), ((17214, 17232), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (17230, 17232), False, 'from discord.ext import commands\n'), ((17238, 17279), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (17259, 17279), False, 'from discord.ext import commands\n'), ((18219, 18237), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (18235, 18237), False, 'from discord.ext import commands\n'), ((18243, 18284), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (18264, 18284), False, 'from discord.ext import commands\n'), ((19658, 19676), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (19674, 19676), False, 'from discord.ext import commands\n'), ((19682, 19706), 'discord.ext.commands.check', 'commands.check', (['check_me'], {}), '(check_me)\n', (19696, 19706), False, 'from discord.ext import commands\n'), ((20219, 20237), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (20235, 20237), False, 'from discord.ext import commands\n'), ((20243, 20267), 'discord.ext.commands.check', 'commands.check', (['check_me'], {}), '(check_me)\n', (20257, 20267), False, 'from discord.ext import commands\n'), ((20721, 20739), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (20737, 20739), False, 'from discord.ext import commands\n'), ((20745, 20786), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (20766, 20786), False, 'from discord.ext import commands\n'), ((21711, 21729), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (21727, 21729), False, 'from discord.ext import commands\n'), ((21735, 21776), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['"""Server Moderator"""'], {}), "('Server Moderator')\n", (21756, 21776), False, 'from discord.ext import commands\n'), ((19818, 19856), 'discord.Activity', 'discord.Activity', ([], {'name': 'name', 'type': 'type'}), '(name=name, type=type)\n', (19834, 19856), False, 'import discord\n'), ((19932, 19981), 'cogs.utils.embed.passembed', 'passembed', ([], {'description': '"""Status has been updated."""'}), "(description='Status has been updated.')\n", (19941, 19981), False, 'from cogs.utils.embed import passembed\n'), ((20444, 20493), 'cogs.utils.embed.passembed', 'passembed', ([], {'description': '"""Status has been reseted."""'}), "(description='Status has been reseted.')\n", (20453, 20493), False, 'from cogs.utils.embed import passembed\n'), ((20877, 20925), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'send_messages': '(False)'}), '(send_messages=False)\n', (20904, 20925), False, 'import discord\n'), ((21869, 21916), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'send_messages': '(True)'}), '(send_messages=True)\n', (21896, 21916), False, 'import discord\n'), ((381, 402), 'os.getenv', 'os.getenv', (['"""OWNER_ID"""'], {}), "('OWNER_ID')\n", (390, 402), False, 'import os\n'), ((3317, 3384), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please indicate the User you wish to ban."""'}), "(description='Please indicate the User you wish to ban.')\n", (3327, 3384), False, 'from cogs.utils.embed import errorembed\n'), ((4557, 4630), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please indicate the User you wish to force ban."""'}), "(description='Please indicate the User you wish to force ban.')\n", (4567, 4630), False, 'from cogs.utils.embed import errorembed\n'), ((7695, 7767), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please indicate the User ID you wish to unban."""'}), "(description='Please indicate the User ID you wish to unban.')\n", (7705, 7767), False, 'from cogs.utils.embed import errorembed\n'), ((11133, 11152), 'asyncio.sleep', 'asyncio.sleep', (['secs'], {}), '(secs)\n', (11146, 11152), False, 'import asyncio\n'), ((13634, 13702), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please indicate the user you wish to mute."""'}), "(description='Please indicate the user you wish to mute.')\n", (13644, 13702), False, 'from cogs.utils.embed import errorembed\n'), ((16781, 16851), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please indicate the user you wish to unmute."""'}), "(description='Please indicate the user you wish to unmute.')\n", (16791, 16851), False, 'from cogs.utils.embed import errorembed\n'), ((17559, 17628), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please specify text channel in the command."""'}), "(description='Please specify text channel in the command.')\n", (17569, 17628), False, 'from cogs.utils.embed import errorembed\n'), ((19007, 19076), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please specify text channel in the command."""'}), "(description='Please specify text channel in the command.')\n", (19017, 19076), False, 'from cogs.utils.embed import errorembed\n'), ((3508, 3584), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Invalid User. Please tag the User you wish to ban."""'}), "(description='Invalid User. Please tag the User you wish to ban.')\n", (3518, 3584), False, 'from cogs.utils.embed import errorembed\n'), ((4754, 4829), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""User ID is invalid. Please input a valid User ID."""'}), "(description='User ID is invalid. Please input a valid User ID.')\n", (4764, 4829), False, 'from cogs.utils.embed import errorembed\n'), ((7891, 8007), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""User ID is either not banned or invalid/not found. Please input a valid User ID."""'}), "(description=\n 'User ID is either not banned or invalid/not found. Please input a valid User ID.'\n )\n", (7901, 8007), False, 'from cogs.utils.embed import errorembed\n'), ((13826, 13901), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""User could not be found. Please tag a valid User."""'}), "(description='User could not be found. Please tag a valid User.')\n", (13836, 13901), False, 'from cogs.utils.embed import errorembed\n'), ((16975, 17050), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""User could not be found. Please tag a valid User."""'}), "(description='User could not be found. Please tag a valid User.')\n", (16985, 17050), False, 'from cogs.utils.embed import errorembed\n'), ((17752, 17870), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Channel could not be found in the server. Please specify the correct text channel."""'}), "(description=\n 'Channel could not be found in the server. Please specify the correct text channel.'\n )\n", (17762, 17870), False, 'from cogs.utils.embed import errorembed\n'), ((18528, 18567), 'discord.Color', 'discord.Color', (['((r << 16) + (g << 8) + b)'], {}), '((r << 16) + (g << 8) + b)\n', (18541, 18567), False, 'import discord\n'), ((19200, 19299), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Channel could not be found. Please specify the correct text channel."""'}), "(description=\n 'Channel could not be found. Please specify the correct text channel.')\n", (19210, 19299), False, 'from cogs.utils.embed import errorembed\n'), ((4274, 4362), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""User ID could not be found. Please input a valid User ID."""'}), "(description=\n 'User ID could not be found. Please input a valid User ID.')\n", (4284, 4362), False, 'from cogs.utils.embed import errorembed\n'), ((17991, 18061), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please indicate your message in the command."""'}), "(description='Please indicate your message in the command.')\n", (18001, 18061), False, 'from cogs.utils.embed import errorembed\n'), ((19425, 19495), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""Please indicate your message in the command."""'}), "(description='Please indicate your message in the command.')\n", (19435, 19495), False, 'from cogs.utils.embed import errorembed\n'), ((5710, 5798), 'cogs.utils.embed.errorembed', 'errorembed', ([], {'description': '"""User ID could not be found. Please input a valid User ID."""'}), "(description=\n 'User ID could not be found. Please input a valid User ID.')\n", (5720, 5798), False, 'from cogs.utils.embed import errorembed\n'), ((18435, 18450), 'random.random', 'random.random', ([], {}), '()\n', (18448, 18450), False, 'import random\n'), ((1617, 1636), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (1634, 1636), False, 'import discord\n'), ((1699, 1730), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (1712, 1730), False, 'import pytz\n'), ((2480, 2499), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (2497, 2499), False, 'import discord\n'), ((2562, 2593), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (2575, 2593), False, 'import pytz\n'), ((6159, 6178), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (6176, 6178), False, 'import discord\n'), ((6241, 6272), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (6254, 6272), False, 'import pytz\n'), ((6944, 6963), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (6961, 6963), False, 'import discord\n'), ((7026, 7057), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (7039, 7057), False, 'import pytz\n'), ((8617, 8665), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'send_messages': '(False)'}), '(send_messages=False)\n', (8644, 8665), False, 'import discord\n'), ((9310, 9329), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (9327, 9329), False, 'import discord\n'), ((9392, 9423), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (9405, 9423), False, 'import pytz\n'), ((10294, 10313), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (10311, 10313), False, 'import discord\n'), ((10376, 10407), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (10389, 10407), False, 'import pytz\n'), ((11905, 11924), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (11922, 11924), False, 'import discord\n'), ((11987, 12018), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (12000, 12018), False, 'import pytz\n'), ((12760, 12779), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (12777, 12779), False, 'import discord\n'), ((12842, 12873), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (12855, 12873), False, 'import pytz\n'), ((15016, 15035), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (15033, 15035), False, 'import discord\n'), ((15098, 15129), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (15111, 15129), False, 'import pytz\n'), ((15887, 15906), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (15904, 15906), False, 'import discord\n'), ((15969, 16000), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Singapore"""'], {}), "('Asia/Singapore')\n", (15982, 16000), False, 'import pytz\n'), ((8793, 8835), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'connect': '(False)'}), '(connect=False)\n', (8820, 8835), False, 'import discord\n')]
|
from django.db import models
from django.db.models.signals import pre_save
from django.dispatch import receiver
from django.contrib.auth.models import User
import datetime
class File(models.Model):
"""The actual DLL file itself"""
STATUS_UNKNOWN = 'unknown'
STATUS_VALID = 'valid'
STATUS_MALWARE = 'malware'
STATUS_LIKELY_VALID = 'likely_valid'
STATUS_LIKELY_MALWARE = 'likely_malware'
STATUS_CHOICES = (
(STATUS_UNKNOWN, 'Unknown'),
(STATUS_VALID, 'Valid'),
(STATUS_LIKELY_VALID, 'Likely Valid'),
(STATUS_LIKELY_MALWARE, 'Likely Malware'),
(STATUS_MALWARE, 'Malware'),
)
PLATFORM_WINDOWS = 'Windows'
PLATFORM_LINUX = 'Linux'
PLATFORM_MAC = 'Mac OS X'
PLATFORM_CHOICES = (
(PLATFORM_WINDOWS, 'Windows'),
(PLATFORM_LINUX, 'Linux'),
(PLATFORM_MAC, 'Mac OS X')
)
date_created = models.DateTimeField(default=datetime.datetime.utcnow)
date_modified = models.DateTimeField(default=datetime.datetime.utcnow,
auto_now=True)
created_by = models.ForeignKey(User, related_name="created_by")
modified_by = models.ForeignKey(User, related_name="modified_by")
file_name = models.CharField(max_length=200)
common_name = models.CharField(max_length=200, blank=True, null=True)
version = models.CharField(max_length=100, blank=True, null=True)
platform = models.CharField(max_length=10, choices=PLATFORM_CHOICES,
blank=True)
vendor = models.CharField(max_length=200, blank=True, null=True)
distributors = models.CharField(max_length=200, blank=True, null=True)
md5_hash = models.CharField(max_length=32, blank=True, null=True)
debug = models.CharField(max_length=60, blank=True, null=True)
debug_filename = models.CharField(max_length=60, blank=True, null=True)
status = models.CharField(max_length=10, choices=STATUS_CHOICES)
released = models.DateField(blank=True, null=True)
obsolete = models.BooleanField(default=False)
replaced_by = models.CharField(max_length=200, blank=True, null=True)
details = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.file_name
class Meta:
unique_together = ('file_name', 'debug_filename', 'debug')
class Comment(models.Model):
"""Comments users have made on given DLL files"""
user = models.ForeignKey(User)
dll = models.ForeignKey(File)
date = models.DateTimeField(default=datetime.datetime.utcnow,
auto_now=True)
comment = models.TextField()
class FileHistory(models.Model):
"""A historical record of the DLL file and the changes made to it over
time"""
dll = models.ForeignKey(File)
user = models.ForeignKey(User)
date_changed = models.DateTimeField(auto_now=True)
field = models.CharField(max_length=40)
original_state = models.CharField(max_length=200, blank=True, null=True)
changed_state = models.CharField(max_length=200, blank=True, null=True)
@receiver(pre_save, sender=File)
def compare_history(sender, instance, **kwargs):
if not File.objects.filter(pk=instance.pk).exists():
return sender
EVALUATE = ('file_name', 'common_name', 'vendor', 'distributors',
'md5_hash', 'debug', 'status', 'released', 'obsolete',
'replaced_by', 'details', )
existing = File.objects.get(pk=instance.id)
for key in EVALUATE:
if (getattr(existing, key) != getattr(instance, key) and
any([getattr(existing, key), getattr(instance, key)])):
user = User.objects.get(pk=instance.modified_by_id)
FileHistory.objects.create(user=user,
dll=existing,
field=key,
original_state=getattr(existing, key),
changed_state=getattr(instance, key))
return sender
|
[
"django.db.models.TextField",
"django.contrib.auth.models.User.objects.get",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.dispatch.receiver",
"django.db.models.BooleanField",
"django.db.models.DateField",
"django.db.models.DateTimeField"
] |
[((3108, 3139), 'django.dispatch.receiver', 'receiver', (['pre_save'], {'sender': 'File'}), '(pre_save, sender=File)\n', (3116, 3139), False, 'from django.dispatch import receiver\n'), ((924, 978), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'datetime.datetime.utcnow'}), '(default=datetime.datetime.utcnow)\n', (944, 978), False, 'from django.db import models\n'), ((999, 1068), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'datetime.datetime.utcnow', 'auto_now': '(True)'}), '(default=datetime.datetime.utcnow, auto_now=True)\n', (1019, 1068), False, 'from django.db import models\n'), ((1127, 1177), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'related_name': '"""created_by"""'}), "(User, related_name='created_by')\n", (1144, 1177), False, 'from django.db import models\n'), ((1196, 1247), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'related_name': '"""modified_by"""'}), "(User, related_name='modified_by')\n", (1213, 1247), False, 'from django.db import models\n'), ((1264, 1296), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1280, 1296), False, 'from django.db import models\n'), ((1315, 1370), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)', 'null': '(True)'}), '(max_length=200, blank=True, null=True)\n', (1331, 1370), False, 'from django.db import models\n'), ((1385, 1440), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'null': '(True)'}), '(max_length=100, blank=True, null=True)\n', (1401, 1440), False, 'from django.db import models\n'), ((1456, 1525), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'choices': 'PLATFORM_CHOICES', 'blank': '(True)'}), '(max_length=10, choices=PLATFORM_CHOICES, blank=True)\n', (1472, 1525), False, 'from django.db import models\n'), ((1571, 1626), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)', 'null': '(True)'}), '(max_length=200, blank=True, null=True)\n', (1587, 1626), False, 'from django.db import models\n'), ((1646, 1701), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)', 'null': '(True)'}), '(max_length=200, blank=True, null=True)\n', (1662, 1701), False, 'from django.db import models\n'), ((1717, 1771), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'blank': '(True)', 'null': '(True)'}), '(max_length=32, blank=True, null=True)\n', (1733, 1771), False, 'from django.db import models\n'), ((1784, 1838), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)', 'blank': '(True)', 'null': '(True)'}), '(max_length=60, blank=True, null=True)\n', (1800, 1838), False, 'from django.db import models\n'), ((1860, 1914), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)', 'blank': '(True)', 'null': '(True)'}), '(max_length=60, blank=True, null=True)\n', (1876, 1914), False, 'from django.db import models\n'), ((1928, 1983), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'choices': 'STATUS_CHOICES'}), '(max_length=10, choices=STATUS_CHOICES)\n', (1944, 1983), False, 'from django.db import models\n'), ((1999, 2038), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2015, 2038), False, 'from django.db import models\n'), ((2054, 2088), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2073, 2088), False, 'from django.db import models\n'), ((2107, 2162), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)', 'null': '(True)'}), '(max_length=200, blank=True, null=True)\n', (2123, 2162), False, 'from django.db import models\n'), ((2177, 2216), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2193, 2216), False, 'from django.db import models\n'), ((2455, 2478), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (2472, 2478), False, 'from django.db import models\n'), ((2489, 2512), 'django.db.models.ForeignKey', 'models.ForeignKey', (['File'], {}), '(File)\n', (2506, 2512), False, 'from django.db import models\n'), ((2524, 2593), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'datetime.datetime.utcnow', 'auto_now': '(True)'}), '(default=datetime.datetime.utcnow, auto_now=True)\n', (2544, 2593), False, 'from django.db import models\n'), ((2640, 2658), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2656, 2658), False, 'from django.db import models\n'), ((2794, 2817), 'django.db.models.ForeignKey', 'models.ForeignKey', (['File'], {}), '(File)\n', (2811, 2817), False, 'from django.db import models\n'), ((2829, 2852), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (2846, 2852), False, 'from django.db import models\n'), ((2872, 2907), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2892, 2907), False, 'from django.db import models\n'), ((2920, 2951), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)'}), '(max_length=40)\n', (2936, 2951), False, 'from django.db import models\n'), ((2973, 3028), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)', 'null': '(True)'}), '(max_length=200, blank=True, null=True)\n', (2989, 3028), False, 'from django.db import models\n'), ((3049, 3104), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)', 'null': '(True)'}), '(max_length=200, blank=True, null=True)\n', (3065, 3104), False, 'from django.db import models\n'), ((3683, 3727), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'instance.modified_by_id'}), '(pk=instance.modified_by_id)\n', (3699, 3727), False, 'from django.contrib.auth.models import User\n')]
|
# -*- coding: utf-8 -*-
# from __future__ import annotations
from typing import Optional, TypeVar
from abc import ABC, abstractmethod
UnitOfWork = TypeVar('UnitOfWork', bound='UnitOfWork')
class UnitOfWork(ABC):
"""
Unit of work
"""
@abstractmethod
def commit(self):
"""
Commit transaction
"""
raise NotImplementedError("Not implemented yet")
@abstractmethod
def add(self, entity) -> None:
"""
Add entity to transactions
@param entity:
@type entity:
@return: None
"""
raise NotImplementedError("Not implemented yet")
@abstractmethod
def flush(self) -> None:
"""
Remove all transactions
@return: None
"""
raise NotImplementedError("Not implemented yet")
|
[
"typing.TypeVar"
] |
[((150, 191), 'typing.TypeVar', 'TypeVar', (['"""UnitOfWork"""'], {'bound': '"""UnitOfWork"""'}), "('UnitOfWork', bound='UnitOfWork')\n", (157, 191), False, 'from typing import Optional, TypeVar\n')]
|
#!/usr/bin/env python3
"""
Creates the header file for the OSERDES test with the correct configuration
of the DATA_WIDTH and DATA_RATE
"""
import argparse
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', required=True, help="Input top file to be generated"
)
parser.add_argument(
'--output', required=True, help="Output top file to be generated"
)
parser.add_argument(
'--data_width', required=True, help="Data width of the OSERDES"
)
parser.add_argument(
'--data_rate', required=True, help="Data rate of the OSERDES"
)
args = parser.parse_args()
with open(args.input, "r") as f:
lines = f.read().splitlines()
with open(args.output, 'w') as f:
print('`define DATA_WIDTH_DEFINE {}'.format(args.data_width), file=f)
print('`define DATA_RATE_DEFINE \"{}\"'.format(args.data_rate), file=f)
for line in lines:
print(line, file=f)
if __name__ == "__main__":
main()
|
[
"argparse.ArgumentParser"
] |
[((183, 227), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (206, 227), False, 'import argparse\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import sys
from dcos_migrate.cmd import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
run()
|
[
"dcos_migrate.cmd.run",
"re.sub"
] |
[((145, 196), 're.sub', 're.sub', (['"""(-script\\\\.pyw|\\\\.exe)?$"""', '""""""', 'sys.argv[0]'], {}), "('(-script\\\\.pyw|\\\\.exe)?$', '', sys.argv[0])\n", (151, 196), False, 'import re\n'), ((200, 205), 'dcos_migrate.cmd.run', 'run', ([], {}), '()\n', (203, 205), False, 'from dcos_migrate.cmd import run\n')]
|
"""
Modification History:
Date: 3/16/2021
Time: 6:00PM
Description:
Will extract the phonemes from a TextGrid file and change them into
syllables using the ARPABET dictionary. Two functions will be used
to do this process. First function, get_phoneme, will get all of the
phonemes and their timings from the TextGrid file into arrays. The second
function, phoneme_to_syllables, will convert the set of phonemes into
syllables. The syllabifier package will be used for the conversion.
The syllable codes and syllable timings are saved.
Current inputs:
TextGrid File
Current output:
Size of the phoneme interval
Output all of the phonemes from the phoneme intervals.
NOTES:
The code only works iff all TextGrid files are formatted the same as the current input
Currently using Anaconda interpreter in base/root environment
Packages Downloaded:
Download the packages via cmd or anaconda cmd
ARPABET dictionary: https://github.com/vgautam/arpabet-syllabifier/blob/master/tests/tmp.ipynb
Textgrid Tool: https://github.com/kylebgorman/textgrid
CMD download: pip install TextGrid
"""
# importing pyglet module
import pyglet
def play_video(video_file):
# width / height of window
width = 720
height = 720
# creating a window
title = "demo"
window = pyglet.window.Window(width, height, title)
# video path
vidPath ="facetest.mp4"
# creating a media player object
player = pyglet.media.Player()
source = pyglet.media.StreamingSource()
MediaLoad = pyglet.media.load(vidPath)
# add this media in the queue
player.queue(MediaLoad)
# play the video
player.play()
# on draw event
@window.event
def on_draw():
# clea the window
window.clear()
# if player sorce exist
# and video format exist
if player.source and player.source.video_format:
# get the texture of video and
# make surface to display on the screen
player.get_texture().blit(0, 0)
# # key press event
# @window.event
# def on_key_press(symbol, modifier):
# # key "p" get press
# if symbol == pyglet.window.key.P:
# # pause the video
# player.pause()
# # printing message
# print("Video is paused")
# # key "r" get press
# if symbol == pyglet.window.key.R:
# # resume the video
# player.play()
# # printing message
# print("Video is resumed")
# run the pyglet application
pyglet.app.run()
|
[
"pyglet.media.Player",
"pyglet.app.run",
"pyglet.media.StreamingSource",
"pyglet.media.load",
"pyglet.window.Window"
] |
[((1462, 1504), 'pyglet.window.Window', 'pyglet.window.Window', (['width', 'height', 'title'], {}), '(width, height, title)\n', (1482, 1504), False, 'import pyglet\n'), ((1602, 1623), 'pyglet.media.Player', 'pyglet.media.Player', ([], {}), '()\n', (1621, 1623), False, 'import pyglet\n'), ((1637, 1667), 'pyglet.media.StreamingSource', 'pyglet.media.StreamingSource', ([], {}), '()\n', (1665, 1667), False, 'import pyglet\n'), ((1684, 1710), 'pyglet.media.load', 'pyglet.media.load', (['vidPath'], {}), '(vidPath)\n', (1701, 1710), False, 'import pyglet\n'), ((2825, 2841), 'pyglet.app.run', 'pyglet.app.run', ([], {}), '()\n', (2839, 2841), False, 'import pyglet\n')]
|
# imports
# re and datetime live on the base OS image, but twilio, dropbox, and cv2 must be installed every time a new docker container is started
# this may take a minute or two the first time you run this program after restarting your computer
import sh
import re
import datetime
from robot_command.rpl import *
# Setup required: Enter the account SID, auth token, and 'from' phone number from your Twilio account here:
account_sid = ""
auth_token = ""
from_phone_number = "+"
# Enter your 'to' (destination) phone number here:
to_phone_number = '+16088498381'
# Enter your Dropbox API Key here:
dropbox_api_key = ""
try:
from twilio.rest import Client
except:
notify("The first time this program is run it must import three packages (Twilio, Dropbox, OpenCV). "
"This may take a minute or two depending on your internet connection. Subsequent program runs will not experience this delay")
sh.sudo.pip3.install("twilio")
from twilio.rest import Client
try:
import dropbox
except:
sh.sudo.pip3.install("dropbox")
import dropbox
try:
import cv2
except:
sh.sudo.pip3.install("opencv-python")
import cv2
def take_and_send_image(filename=None):
if filename == None:
filename = "robot_photo.jpg"
take_snapshot(filename)
url = upload_file_to_dropbox(filename)
send_mms(url)
def take_snapshot(filename):
videoCaptureObject = cv2.VideoCapture(0)
result = True
while (result):
ret, frame = videoCaptureObject.read()
cv2.imwrite(filename, frame)
result = False
videoCaptureObject.release()
cv2.destroyAllWindows()
def upload_file_to_dropbox(filename):
# FIXME - look for an intelligent path, not CWD
# add timestamp
newfilename = "/robot_images/" + str(datetime.datetime.today().strftime('%d-%m-%Y-%H-%M-%S')) + "_" + filename
file_to_upload = newfilename
print("Begin uploading " + file_to_upload + " to DropBox")
# Create a dropbox object using an API v2 key
d = dropbox.Dropbox(dropbox_api_key)
# open the file and upload it
with open(filename, "rb") as f:
d.files_upload(f.read(), file_to_upload)
# create a shared link
link = d.sharing_create_shared_link(file_to_upload)
# url which can be shared
url = link.url
# link which directly downloads by replacing ?dl=0 with ?dl=1
dl_url = re.sub(r"\?dl\=0", "?raw=1", url)
print (dl_url)
return dl_url
def send_mms(url):
client = Client(account_sid, auth_token)
message = client.messages \
.create(
body='Here is a message from your ZA6 robot',
from_= from_phone_number,
media_url=[url],
to= to_phone_number
)
print(message.sid)
|
[
"datetime.datetime.today",
"dropbox.Dropbox",
"cv2.destroyAllWindows",
"cv2.imwrite",
"sh.sudo.pip3.install",
"cv2.VideoCapture",
"twilio.rest.Client",
"re.sub"
] |
[((1401, 1420), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (1417, 1420), False, 'import cv2\n'), ((1603, 1626), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1624, 1626), False, 'import cv2\n'), ((2010, 2042), 'dropbox.Dropbox', 'dropbox.Dropbox', (['dropbox_api_key'], {}), '(dropbox_api_key)\n', (2025, 2042), False, 'import dropbox\n'), ((2377, 2411), 're.sub', 're.sub', (['"""\\\\?dl\\\\=0"""', '"""?raw=1"""', 'url'], {}), "('\\\\?dl\\\\=0', '?raw=1', url)\n", (2383, 2411), False, 'import re\n'), ((2483, 2514), 'twilio.rest.Client', 'Client', (['account_sid', 'auth_token'], {}), '(account_sid, auth_token)\n', (2489, 2514), False, 'from twilio.rest import Client\n'), ((914, 944), 'sh.sudo.pip3.install', 'sh.sudo.pip3.install', (['"""twilio"""'], {}), "('twilio')\n", (934, 944), False, 'import sh\n'), ((1017, 1048), 'sh.sudo.pip3.install', 'sh.sudo.pip3.install', (['"""dropbox"""'], {}), "('dropbox')\n", (1037, 1048), False, 'import sh\n'), ((1101, 1138), 'sh.sudo.pip3.install', 'sh.sudo.pip3.install', (['"""opencv-python"""'], {}), "('opencv-python')\n", (1121, 1138), False, 'import sh\n'), ((1514, 1542), 'cv2.imwrite', 'cv2.imwrite', (['filename', 'frame'], {}), '(filename, frame)\n', (1525, 1542), False, 'import cv2\n'), ((1780, 1805), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1803, 1805), False, 'import datetime\n')]
|
#
# SPDX-License-Identifier: Apache-2.0
#
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from unittest import TestCase
from unittest.mock import patch, call
from kios import config
from kios.data import EXECUTABLE_NAME, NETWORK_PROTOCOL, TRANSPORT_PROTOCOL, PORT_NUMBER, NetworkProtocol, \
TransportProtocol
from kios.exception import DoRollback, UnexpectedLineError
from kios.operation import find_and_save_ports, save_ports_from_data_file
from .helper import get_test_data, TestOperationControl, get_test_file_path, patch_config_app_platform
_data_se = [get_test_data('win_netstat.txt'), get_test_data('win_netstat2.txt')]
path_config_data_file = patch('kios.config.data_file', new=get_test_file_path('win_netstat.txt'))
part1 = [{EXECUTABLE_NAME: 'app1.exe', NETWORK_PROTOCOL: NetworkProtocol.IPv4,
TRANSPORT_PROTOCOL: TransportProtocol.TCP, PORT_NUMBER: 1},
{EXECUTABLE_NAME: None, NETWORK_PROTOCOL: NetworkProtocol.IPv4,
TRANSPORT_PROTOCOL: TransportProtocol.TCP, PORT_NUMBER: 2},
{EXECUTABLE_NAME: 'app1.exe', NETWORK_PROTOCOL: NetworkProtocol.IPv6,
TRANSPORT_PROTOCOL: TransportProtocol.TCP, PORT_NUMBER: 1}]
part2 = [{EXECUTABLE_NAME: None, NETWORK_PROTOCOL: NetworkProtocol.IPv6,
TRANSPORT_PROTOCOL: TransportProtocol.TCP, PORT_NUMBER: 2},
{EXECUTABLE_NAME: 'app2.exe', NETWORK_PROTOCOL: NetworkProtocol.IPv4,
TRANSPORT_PROTOCOL: TransportProtocol.UDP, PORT_NUMBER: 1},
{EXECUTABLE_NAME: None, NETWORK_PROTOCOL: NetworkProtocol.IPv4,
TRANSPORT_PROTOCOL: TransportProtocol.UDP, PORT_NUMBER: 2}]
part3 = [{EXECUTABLE_NAME: 'app1.exe', NETWORK_PROTOCOL: NetworkProtocol.IPv6,
TRANSPORT_PROTOCOL: TransportProtocol.UDP, PORT_NUMBER: 1},
{EXECUTABLE_NAME: None, NETWORK_PROTOCOL: NetworkProtocol.IPv6,
TRANSPORT_PROTOCOL: TransportProtocol.UDP, PORT_NUMBER: 2}]
port_data1 = [*part1, *part2, *part3]
port_data2 = [{EXECUTABLE_NAME: 'app3.exe', NETWORK_PROTOCOL: NetworkProtocol.IPv6,
TRANSPORT_PROTOCOL: TransportProtocol.UDP, PORT_NUMBER: 3}]
@patch_config_app_platform
@patch('kios.factory.persistence_manager', autospec=True)
class OperationsTestCase(TestCase):
@patch('kios.operation.time', autospec=True, side_effect=[0, 1, 6, 10, 0, 1, 6, 10])
@patch('kios.operation.sleep', autospec=True)
@patch('kios.factory.data_source', autospec=True, **{'return_value.get_port_data.side_effect': _data_se})
def test_find_and_save_ports_behavior(self, data_source, sleep, time, persistence_manager):
find_and_save_ports(TestOperationControl())
sleep.assert_called_once_with(config.port_scan_wake_up_interval)
self.assertEqual(persistence_manager.return_value.save_port_data.call_count, 2)
persistence_manager.return_value.save_port_data.assert_has_calls([call(port_data1, commit=True),
call(port_data2, commit=True)])
self.assertEqual(data_source.return_value.get_port_data.call_count, 2)
@path_config_data_file
def test_save_ports_from_data_file_behavior(self, persistence_manager):
save_ports_from_data_file(TestOperationControl())
self.assertEqual(persistence_manager.return_value.save_port_data.call_count, 1)
persistence_manager.return_value.save_port_data.assert_has_calls([call(port_data1)])
@path_config_data_file
@patch('kios.config.import_batch_size', new=3)
def test_save_ports_from_data_file_behavior2(self, persistence_manager):
save_ports_from_data_file(TestOperationControl(3))
self.assertEqual(persistence_manager.return_value.save_port_data.call_count, 3)
persistence_manager.return_value.save_port_data.assert_has_calls([call(part1), call(part2), call(part3)])
@path_config_data_file
@patch('kios.config.import_batch_size', new=3)
def test_save_ports_from_data_file_behavior3(self, persistence_manager):
with self.assertRaises(DoRollback):
save_ports_from_data_file(TestOperationControl(2))
self.assertEqual(persistence_manager.return_value.save_port_data.call_count, 2)
persistence_manager.return_value.save_port_data.assert_has_calls([call(part1), call(part2)])
@patch('kios.config.data_file', new=None)
def test_save_ports_from_data_file_behaviour4(self, persistence_manager):
with self.assertRaises(RuntimeError):
save_ports_from_data_file(TestOperationControl())
persistence_manager.return_value.save_port_data.assert_not_called()
@patch('kios.config.data_file', new=get_test_file_path('win_netstat3.txt'))
def test_save_ports_from_data_file_behaviour5(self, persistence_manager):
with self.assertRaises(UnexpectedLineError) as c:
save_ports_from_data_file(TestOperationControl())
persistence_manager.return_value.save_port_data.assert_not_called()
self.assertEqual(c.exception.line_no, 6)
self.assertEqual(c.exception.line, ' [app3.exe]')
|
[
"unittest.mock.patch",
"unittest.mock.call"
] |
[((2689, 2745), 'unittest.mock.patch', 'patch', (['"""kios.factory.persistence_manager"""'], {'autospec': '(True)'}), "('kios.factory.persistence_manager', autospec=True)\n", (2694, 2745), False, 'from unittest.mock import patch, call\n'), ((2788, 2875), 'unittest.mock.patch', 'patch', (['"""kios.operation.time"""'], {'autospec': '(True)', 'side_effect': '[0, 1, 6, 10, 0, 1, 6, 10]'}), "('kios.operation.time', autospec=True, side_effect=[0, 1, 6, 10, 0, 1,\n 6, 10])\n", (2793, 2875), False, 'from unittest.mock import patch, call\n'), ((2877, 2921), 'unittest.mock.patch', 'patch', (['"""kios.operation.sleep"""'], {'autospec': '(True)'}), "('kios.operation.sleep', autospec=True)\n", (2882, 2921), False, 'from unittest.mock import patch, call\n'), ((2927, 3036), 'unittest.mock.patch', 'patch', (['"""kios.factory.data_source"""'], {'autospec': '(True)'}), "('kios.factory.data_source', autospec=True, **{\n 'return_value.get_port_data.side_effect': _data_se})\n", (2932, 3036), False, 'from unittest.mock import patch, call\n'), ((4007, 4052), 'unittest.mock.patch', 'patch', (['"""kios.config.import_batch_size"""'], {'new': '(3)'}), "('kios.config.import_batch_size', new=3)\n", (4012, 4052), False, 'from unittest.mock import patch, call\n'), ((4424, 4469), 'unittest.mock.patch', 'patch', (['"""kios.config.import_batch_size"""'], {'new': '(3)'}), "('kios.config.import_batch_size', new=3)\n", (4429, 4469), False, 'from unittest.mock import patch, call\n'), ((4849, 4889), 'unittest.mock.patch', 'patch', (['"""kios.config.data_file"""'], {'new': 'None'}), "('kios.config.data_file', new=None)\n", (4854, 4889), False, 'from unittest.mock import patch, call\n'), ((3415, 3444), 'unittest.mock.call', 'call', (['port_data1'], {'commit': '(True)'}), '(port_data1, commit=True)\n', (3419, 3444), False, 'from unittest.mock import patch, call\n'), ((3520, 3549), 'unittest.mock.call', 'call', (['port_data2'], {'commit': '(True)'}), '(port_data2, commit=True)\n', (3524, 3549), False, 'from unittest.mock import patch, call\n'), ((3955, 3971), 'unittest.mock.call', 'call', (['port_data1'], {}), '(port_data1)\n', (3959, 3971), False, 'from unittest.mock import patch, call\n'), ((4351, 4362), 'unittest.mock.call', 'call', (['part1'], {}), '(part1)\n', (4355, 4362), False, 'from unittest.mock import patch, call\n'), ((4364, 4375), 'unittest.mock.call', 'call', (['part2'], {}), '(part2)\n', (4368, 4375), False, 'from unittest.mock import patch, call\n'), ((4377, 4388), 'unittest.mock.call', 'call', (['part3'], {}), '(part3)\n', (4381, 4388), False, 'from unittest.mock import patch, call\n'), ((4816, 4827), 'unittest.mock.call', 'call', (['part1'], {}), '(part1)\n', (4820, 4827), False, 'from unittest.mock import patch, call\n'), ((4829, 4840), 'unittest.mock.call', 'call', (['part2'], {}), '(part2)\n', (4833, 4840), False, 'from unittest.mock import patch, call\n')]
|
#-*- coding: utf-8 -*-
# site: http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level10=1&mix=1
from bs4 import BeautifulSoup
import urllib
import re
def getGroup(arr, g):
for ele in arr:
if (ele[0] == g):
return ele
# if not, add group
new_group = (g, [])
arr.append( new_group )
return new_group
def processTitle(input):
# clickagain's title is so chaos...
# we need to change it
# remove braket and trim
# CF: timepiece phase (CN ver) ... -> SOLVED
return re.sub(r"\(\(.*?\)\)", '', input.replace(u"(", '((').replace(u")", '))')).strip()
#
# ==================================================================
#
def parse8():
return parse("8AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level8=1&mix=1")
def parse9():
return parse("9AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level9=1&mix=1")
def parse10():
return parse("10AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level10=1&mix=1")
def parse11():
return parse("11AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level11=1&mix=1")
def parse12():
return parse("12AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level12=1&mix=1")
def parse8N():
return parseN("8AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level8=1&mix=1")
def parse9N():
return parseN("9AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level9=1&mix=1")
def parse10N():
return parseN("10AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level10=1&mix=1")
def parse11N():
return parseN("11AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level11=1&mix=1")
def parse12N():
return parseN("12AC", "http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level12=1&mix=1")
def parse12_7():
# common
data = urllib.urlopen("http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level12=1")
soup = BeautifulSoup(data)
res = [] # [(group, [song name, ..]), ..]
table = soup.find_all('table')[5]
trs = table.find_all('tr')
group_name = 1
group_idx = 1
idx = 0
for tr in trs:
idx += 1
if (idx <= 4):
continue
# 0:ver, 1:title, 5:normal, 6:hard, 7:op1P, 8:op2P, 9:desc
tds = tr.find_all('td')
if (len(tds) < 9):
# group
# if group idx = 8 (that means 8기) then must exit
if (group_idx >= 8):
break
group_name = str(group_idx) + u'기'
group_idx += 1
continue
title = processTitle(tds[1].get_text())
diff = tds[1]['style']
if (diff.find("red") >= 0):
diff = "SPA"
elif (diff.find("orange") >= 0):
diff = "SPH"
elif (diff.find("#0066FF") >= 0):
diff = "SPN"
else:
diff = "SPA"
group = getGroup(res, group_name)
group[1].append( (title, diff) )
return res
def parse(tableID, uri):
# common
data = urllib.urlopen(uri)
soup = BeautifulSoup(data)
res = [] # [(group, [song name, ..]), ..]
table = soup.find('table', id=tableID)
trs = table.find_all('tr')
for tr in trs:
if (('class' in tr) and tr['class'][0] == u'top'):
continue
# 0:ver, 1:title, 5:normal, 6:hard, 7:op1P, 8:op2P, 9:desc
tds = tr.find_all('td')
if (len(tds) < 9):
break
title = processTitle(tds[1].get_text())
if (title == "title"):
continue
diff = tds[1]['style']
if (diff.find("red") >= 0):
diff = "SPA"
elif (diff.find("orange") >= 0):
diff = "SPH"
elif (diff.find("#0066FF") >= 0):
diff = "SPN"
else:
diff = "SPA"
lv = tds[6].get_text()
group = getGroup(res, lv)
group[1].append( (title, diff) )
return res
def parseN(tableID, uri):
# common
data = urllib.urlopen(uri)
soup = BeautifulSoup(data)
res = [] # [(group, [song name, ..]), ..]
table = soup.find('table', id=tableID)
trs = table.find_all('tr')
for tr in trs:
if (('class' in tr) and tr['class'][0] == u'top'):
continue
# 0:ver, 1:title, 5:normal, 6:hard, 7:op1P, 8:op2P, 9:desc
tds = tr.find_all('td')
if (len(tds) < 9):
break
title = processTitle(tds[1].get_text())
if (title == "title"):
continue
diff = tds[1]['style']
if (diff.find("red") >= 0):
diff = "SPA"
elif (diff.find("orange") >= 0):
diff = "SPH"
elif (diff.find("#0066FF") >= 0):
diff = "SPN"
else:
diff = "SPA"
lv = tds[5].get_text()
group = getGroup(res, lv)
group[1].append( (title, diff) )
return res
#print parse12_7()
|
[
"bs4.BeautifulSoup",
"urllib.urlopen"
] |
[((1794, 1881), 'urllib.urlopen', 'urllib.urlopen', (['"""http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level12=1"""'], {}), "(\n 'http://clickagain.sakura.ne.jp/cgi-bin/sort11/data.cgi?level12=1')\n", (1808, 1881), False, 'import urllib\n'), ((1885, 1904), 'bs4.BeautifulSoup', 'BeautifulSoup', (['data'], {}), '(data)\n', (1898, 1904), False, 'from bs4 import BeautifulSoup\n'), ((2751, 2770), 'urllib.urlopen', 'urllib.urlopen', (['uri'], {}), '(uri)\n', (2765, 2770), False, 'import urllib\n'), ((2779, 2798), 'bs4.BeautifulSoup', 'BeautifulSoup', (['data'], {}), '(data)\n', (2792, 2798), False, 'from bs4 import BeautifulSoup\n'), ((3533, 3552), 'urllib.urlopen', 'urllib.urlopen', (['uri'], {}), '(uri)\n', (3547, 3552), False, 'import urllib\n'), ((3561, 3580), 'bs4.BeautifulSoup', 'BeautifulSoup', (['data'], {}), '(data)\n', (3574, 3580), False, 'from bs4 import BeautifulSoup\n')]
|
import argparse
import torch
from torch import nn
from torch import optim
from torchvision import transforms, datasets, models
from collections import OrderedDict
from PIL import Image
import numpy as np
import json
#Take inputs from user
parser = argparse.ArgumentParser()
parser.add_argument('path_to_image', type=str, help='Set path to image', default='./flowers/test/1/image_06743.jpg')
parser.add_argument('checkpoint', type=str, help='Load checkpoint', default='./checkpoint.pth')
parser.add_argument('--top_k', type=int, help='Return top k most likely classes', default=5)
parser.add_argument('--category_names', type=str, help='Use a mapping of categories to real names', default='cat_to_name.json')
parser.add_argument('--gpu', type=str, help='Use GPU for inference', default='cpu')
args = parser.parse_args()
def load_checkpoint(filepath):
checkpoint = torch.load(filepath)
if checkpoint['model'] == "vgg16":
model = models.vgg16(pretrained=True)
elif checkpoint['model'] == "densenet121":
model = models.densenet121(pretrained=True)
model.eval()
model.classifier = checkpoint['classifier']
model.load_state_dict(checkpoint['state_dict'])
model.class_to_idx = checkpoint['class_to_idx']
epoch = checkpoint['epoch']
return model
def process_image(image):
''' Scales, crops, and normalizes a PIL image for a PyTorch model,
returns an Numpy array
'''
#Perform transformations, convert to tensor and normalize
transform = transforms.Compose([transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])])
#Open image and apply transformation
pil_image = Image.open(image)
pil_image = transform(pil_image)
#Convert to numpy array
np_image = np.array(pil_image)
return np_image
def predict(image_path, model, topk, device):
''' Predict the class (or classes) of an image using a trained deep learning model.
'''
model = load_checkpoint(model)
model.eval()
model.to(device)
np_image = process_image(image_path) #numpy array returned
torch_image = torch.from_numpy(np_image).to(device) #convert to tensor
torch_image = torch_image.unsqueeze_(0)
torch_image = torch_image.float() #returns float tensor of single dimension (1 column)
with torch.no_grad():
output = model.forward(torch_image)
ps = torch.exp(output)
#taking top 5 probabilities and their indices
if topk is None:
probs, indices = torch.topk(ps, 1)
else:
probs, indices = torch.topk(ps, topk)
#invert class_to_idx
inv_class_to_idx = {index: cls for cls, index in model.class_to_idx.items()}
classes = []
for index in indices.cpu().numpy()[0]: #iterating through indices
classes.append(inv_class_to_idx[index])
return probs.cpu().numpy()[0], classes
# Print the most likely image class and it's associated probability
# map with json
if args.gpu == "gpu":
device = "cuda:0"
elif args.gpu == "cpu":
device = "cpu"
probs, classes = predict(args.path_to_image, args.checkpoint, args.top_k, device)
if args.category_names is not None:
with open(args.category_names, 'r') as f:
cat_to_name = json.load(f)
classes = [cat_to_name[c] for c in classes]
print("Most probable class:", classes[0])
print("Probability :", probs[0])
if args.top_k is not None:
print("\nTop",args.top_k,"probable classes and their probabilities are")
for index in range(len(classes)):
print(classes[index],":",probs[index])
|
[
"torch.from_numpy",
"json.load",
"torch.topk",
"argparse.ArgumentParser",
"torch.load",
"torchvision.models.densenet121",
"torchvision.transforms.Normalize",
"PIL.Image.open",
"torchvision.transforms.ToTensor",
"torch.exp",
"numpy.array",
"torchvision.transforms.CenterCrop",
"torchvision.models.vgg16",
"torch.no_grad",
"torchvision.transforms.Resize"
] |
[((250, 275), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (273, 275), False, 'import argparse\n'), ((870, 890), 'torch.load', 'torch.load', (['filepath'], {}), '(filepath)\n', (880, 890), False, 'import torch\n'), ((1899, 1916), 'PIL.Image.open', 'Image.open', (['image'], {}), '(image)\n', (1909, 1916), False, 'from PIL import Image\n'), ((2002, 2021), 'numpy.array', 'np.array', (['pil_image'], {}), '(pil_image)\n', (2010, 2021), True, 'import numpy as np\n'), ((946, 975), 'torchvision.models.vgg16', 'models.vgg16', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (958, 975), False, 'from torchvision import transforms, datasets, models\n'), ((2568, 2583), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2581, 2583), False, 'import torch\n'), ((2644, 2661), 'torch.exp', 'torch.exp', (['output'], {}), '(output)\n', (2653, 2661), False, 'import torch\n'), ((2765, 2782), 'torch.topk', 'torch.topk', (['ps', '(1)'], {}), '(ps, 1)\n', (2775, 2782), False, 'import torch\n'), ((2818, 2838), 'torch.topk', 'torch.topk', (['ps', 'topk'], {}), '(ps, topk)\n', (2828, 2838), False, 'import torch\n'), ((3507, 3519), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3516, 3519), False, 'import json\n'), ((1039, 1074), 'torchvision.models.densenet121', 'models.densenet121', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (1057, 1074), False, 'from torchvision import transforms, datasets, models\n'), ((1530, 1552), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (1547, 1552), False, 'from torchvision import transforms, datasets, models\n'), ((1591, 1617), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (1612, 1617), False, 'from torchvision import transforms, datasets, models\n'), ((1656, 1677), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1675, 1677), False, 'from torchvision import transforms, datasets, models\n'), ((1715, 1781), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (1735, 1781), False, 'from torchvision import transforms, datasets, models\n'), ((2361, 2387), 'torch.from_numpy', 'torch.from_numpy', (['np_image'], {}), '(np_image)\n', (2377, 2387), False, 'import torch\n')]
|
from bottle import run, get, view, post, request
import json
import jwt
import requests
##############################
@get("/company")
@view("index_company.html")
def do():
return dict(company_name="SUPER")
@get("/company-token")
@view("index_company_token.html")
def do():
return dict(company_name="Token stuff")
@post("/get-name-by-cpr")
def do():
# Connect to db
# Execute a SQL/Document query
data_from_client = json.load(request.body)
print("cpr", data_from_client)
cpr = data_from_client['cpr']
file_name = "./data/" + cpr + ".txt" # In python you go from the root
opened_file = open(file_name, "r")
return opened_file.read()
@post("/process-jwt-token")
def do():
result = ""
try:
token = json.load(request.body)["jwt"]
try:
result = jwt.decode(
token, "jwt-secret-key", algorithms=["HS256"])
except Exception as jwt_error:
send_sms(jwt_error)
try:
email = result["email"]
except Exception as emailException:
send_sms("Email missing")
except Exception as json_error:
send_sms(json_error)
return str(result)
def send_sms(message):
endpoint = "https://fatsms.com/api-send-sms"
phone = "42659183"
my_api_key = "7893f0d6872d606467a9e0e3a998d8db"
data_dict = {"to_phone": phone, "api_key": my_api_key, "message": message}
requests.post(endpoint, data = data_dict)
print(str(data_dict))
##############################
run(host="127.0.0.1", port=4444, debug=True, reloader=True, server="paste")
|
[
"bottle.view",
"json.load",
"bottle.get",
"bottle.run",
"requests.post",
"bottle.post",
"jwt.decode"
] |
[((123, 138), 'bottle.get', 'get', (['"""/company"""'], {}), "('/company')\n", (126, 138), False, 'from bottle import run, get, view, post, request\n'), ((140, 166), 'bottle.view', 'view', (['"""index_company.html"""'], {}), "('index_company.html')\n", (144, 166), False, 'from bottle import run, get, view, post, request\n'), ((218, 239), 'bottle.get', 'get', (['"""/company-token"""'], {}), "('/company-token')\n", (221, 239), False, 'from bottle import run, get, view, post, request\n'), ((241, 273), 'bottle.view', 'view', (['"""index_company_token.html"""'], {}), "('index_company_token.html')\n", (245, 273), False, 'from bottle import run, get, view, post, request\n'), ((331, 355), 'bottle.post', 'post', (['"""/get-name-by-cpr"""'], {}), "('/get-name-by-cpr')\n", (335, 355), False, 'from bottle import run, get, view, post, request\n'), ((684, 710), 'bottle.post', 'post', (['"""/process-jwt-token"""'], {}), "('/process-jwt-token')\n", (688, 710), False, 'from bottle import run, get, view, post, request\n'), ((1538, 1613), 'bottle.run', 'run', ([], {'host': '"""127.0.0.1"""', 'port': '(4444)', 'debug': '(True)', 'reloader': '(True)', 'server': '"""paste"""'}), "(host='127.0.0.1', port=4444, debug=True, reloader=True, server='paste')\n", (1541, 1613), False, 'from bottle import run, get, view, post, request\n'), ((444, 467), 'json.load', 'json.load', (['request.body'], {}), '(request.body)\n', (453, 467), False, 'import json\n'), ((1428, 1467), 'requests.post', 'requests.post', (['endpoint'], {'data': 'data_dict'}), '(endpoint, data=data_dict)\n', (1441, 1467), False, 'import requests\n'), ((762, 785), 'json.load', 'json.load', (['request.body'], {}), '(request.body)\n', (771, 785), False, 'import json\n'), ((828, 885), 'jwt.decode', 'jwt.decode', (['token', '"""jwt-secret-key"""'], {'algorithms': "['HS256']"}), "(token, 'jwt-secret-key', algorithms=['HS256'])\n", (838, 885), False, 'import jwt\n')]
|
from redis.exceptions import RedisError
from rq.exceptions import NoSuchJobError
from rq.job import Job
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy_utils import ChoiceType
from busy_beaver.extensions import db, rq
class BaseModel(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(
db.DateTime,
onupdate=db.func.current_timestamp(),
default=db.func.current_timestamp(),
)
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
def patch(self, data: dict):
for key, value in data.items():
setattr(self, key, value)
class Task(BaseModel):
"""Task Base Table"""
__tablename__ = "task"
class TaskState:
COMPLETED = "completed"
SCHEDULED = "scheduled"
FAILED = "failed"
CANCELLED = "cancelled"
STATES = [(COMPLETED,) * 2, (SCHEDULED,) * 2, (FAILED,) * 2, (CANCELLED,) * 2]
INITIAL_STATE = SCHEDULED
# Attributes
job_id = db.Column(db.String(36), index=True)
name = db.Column(db.String(128), index=True)
task_state = db.Column(
ChoiceType(TaskState.STATES), default=TaskState.INITIAL_STATE, index=True
)
data = db.Column(db.JSON)
time_to_post = db.Column(db.String(20), nullable=True)
def get_rq_job(self):
try:
rq_job = Job.fetch(self.job_id, rq.connection)
except (RedisError, NoSuchJobError):
return None
return rq_job
def get_progress(self):
job = self.get_rq_job()
return job.meta.get("progress", 0) if job is not None else 100
def __repr__(self): # pragma: no cover
return f"<Task: {self.job_id}-{self.name}>"
|
[
"busy_beaver.extensions.db.String",
"sqlalchemy_utils.ChoiceType",
"busy_beaver.extensions.db.Column",
"busy_beaver.extensions.db.func.current_timestamp",
"rq.job.Job.fetch"
] |
[((303, 342), 'busy_beaver.extensions.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (312, 342), False, 'from busy_beaver.extensions import db, rq\n'), ((1356, 1374), 'busy_beaver.extensions.db.Column', 'db.Column', (['db.JSON'], {}), '(db.JSON)\n', (1365, 1374), False, 'from busy_beaver.extensions import db, rq\n'), ((1153, 1166), 'busy_beaver.extensions.db.String', 'db.String', (['(36)'], {}), '(36)\n', (1162, 1166), False, 'from busy_beaver.extensions import db, rq\n'), ((1201, 1215), 'busy_beaver.extensions.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1210, 1215), False, 'from busy_beaver.extensions import db, rq\n'), ((1265, 1293), 'sqlalchemy_utils.ChoiceType', 'ChoiceType', (['TaskState.STATES'], {}), '(TaskState.STATES)\n', (1275, 1293), False, 'from sqlalchemy_utils import ChoiceType\n'), ((1404, 1417), 'busy_beaver.extensions.db.String', 'db.String', (['(20)'], {}), '(20)\n', (1413, 1417), False, 'from busy_beaver.extensions import db, rq\n'), ((393, 420), 'busy_beaver.extensions.db.func.current_timestamp', 'db.func.current_timestamp', ([], {}), '()\n', (418, 420), False, 'from busy_beaver.extensions import db, rq\n'), ((491, 518), 'busy_beaver.extensions.db.func.current_timestamp', 'db.func.current_timestamp', ([], {}), '()\n', (516, 518), False, 'from busy_beaver.extensions import db, rq\n'), ((536, 563), 'busy_beaver.extensions.db.func.current_timestamp', 'db.func.current_timestamp', ([], {}), '()\n', (561, 563), False, 'from busy_beaver.extensions import db, rq\n'), ((1495, 1532), 'rq.job.Job.fetch', 'Job.fetch', (['self.job_id', 'rq.connection'], {}), '(self.job_id, rq.connection)\n', (1504, 1532), False, 'from rq.job import Job\n')]
|
import unittest, sys
import timidi.tests
def test():
return unittest.main(timidi.tests)
if __name__ == "__main__":
sys.exit(0 if test() else 1)
|
[
"unittest.main"
] |
[((67, 94), 'unittest.main', 'unittest.main', (['timidi.tests'], {}), '(timidi.tests)\n', (80, 94), False, 'import unittest, sys\n')]
|
from django.core.management.base import BaseCommand, CommandError
import time
from atlas.prodtask.mcevgen import sync_cvmfs_db
class Command(BaseCommand):
args = ''
help = 'Sync cvmfs JOs'
def handle(self, *args, **options):
self.stdout.write('Start sync cvmfs for JOs at %s'%time.ctime())
try:
sync_cvmfs_db()
except Exception as e:
raise CommandError('Some problem during syncing: %s'%str(e))
self.stdout.write('Successfully finished cvmfs sync: %s'%time.ctime())
|
[
"atlas.prodtask.mcevgen.sync_cvmfs_db",
"time.ctime"
] |
[((339, 354), 'atlas.prodtask.mcevgen.sync_cvmfs_db', 'sync_cvmfs_db', ([], {}), '()\n', (352, 354), False, 'from atlas.prodtask.mcevgen import sync_cvmfs_db\n'), ((300, 312), 'time.ctime', 'time.ctime', ([], {}), '()\n', (310, 312), False, 'import time\n'), ((524, 536), 'time.ctime', 'time.ctime', ([], {}), '()\n', (534, 536), False, 'import time\n')]
|
from django.http import Http404
from django.utils.translation import ugettext as _
from django.views.generic import DetailView, ListView
from django.views.generic.detail import SingleObjectMixin
from ..core.views import PaginatedListView
from .models import Account, Bookmark, BookmarkTag
class SingleAccountMixin(SingleObjectMixin):
"For views which list bookmarks and also need an Account object."
slug_field = "username"
slug_url_kwarg = "username"
def get(self, request, *args, **kwargs):
self.object = self.get_object(queryset=Account.objects.all())
return super().get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["account"] = self.object
return context
class HomeView(PaginatedListView):
"List all recent Bookmarks and all Accounts"
template_name = "pinboard/home.html"
queryset = Bookmark.public_objects.all().prefetch_related("account")
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["account_list"] = Account.objects.all()
return context
class ToreadListView(PaginatedListView):
template_name = "pinboard/toread_list.html"
queryset = Bookmark.public_toread_objects.all().prefetch_related("account")
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["account_list"] = Account.objects.all()
return context
class AccountDetailView(SingleAccountMixin, PaginatedListView):
"A single Pinboard Account and its Bookmarks."
template_name = "pinboard/account_detail.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["bookmark_list"] = context["object_list"]
return context
def get_queryset(self):
"Show all the public Bookmarks associated with this account."
return Bookmark.public_objects.filter(account=self.object).prefetch_related(
"account"
)
class AccountToreadView(SingleAccountMixin, PaginatedListView):
"A single Pinboard Account and its 'to read' Bookmarks."
template_name = "pinboard/account_toread.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["bookmark_list"] = context["object_list"]
return context
def get_queryset(self):
"Show all the public Bookmarks associated with this account."
return Bookmark.public_toread_objects.filter(
account=self.object
).prefetch_related("account")
class BookmarkDetailView(DetailView):
"A single Bookmark, from one Account"
model = Bookmark
# Only display public bookmarks; private ones will 404.
queryset = Bookmark.public_objects.all()
slug_field = "url_hash"
slug_url_kwarg = "hash"
class TagListView(ListView):
template_name = "pinboard/tag_list.html"
context_object_name = "tag_list"
def get_queryset(self):
return Bookmark.tags.most_common()[:100]
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["account_list"] = Account.objects.all()
return context
class TagDetailView(SingleObjectMixin, PaginatedListView):
"All Bookmarks with a certain tag from all Accounts"
template_name = "pinboard/tag_detail.html"
allow_empty = False
def get(self, request, *args, **kwargs):
self.object = self.get_object(queryset=BookmarkTag.objects.all())
return super().get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["tag"] = self.object
context["account_list"] = Account.objects.all()
context["bookmark_list"] = context["object_list"]
return context
def get_queryset(self):
"Show all the public Bookmarks associated with this tag."
return Bookmark.public_objects.filter(
tags__slug__in=[self.object.slug]
).prefetch_related("account")
class AccountTagDetailView(SingleAccountMixin, PaginatedListView):
"All Bookmarks with a certain Tag from one Account"
template_name = "pinboard/account_tag_detail.html"
allow_empty = False
def get(self, request, *args, **kwargs):
self.tag_object = self.get_tag_object()
return super().get(request, *args, **kwargs)
def get_tag_object(self):
"""Custom method for fetching the Tag."""
try:
obj = BookmarkTag.objects.get(slug=self.kwargs["tag_slug"])
except BookmarkTag.DoesNotExist:
raise Http404(_("No Tags found matching the query"))
return obj
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["tag"] = self.tag_object
context["bookmark_list"] = context["object_list"]
return context
def get_queryset(self):
"""Show all the public Bookmarks associated with this account."""
return Bookmark.public_objects.filter(
account=self.object, tags__slug__in=[self.kwargs["tag_slug"]]
)
|
[
"django.utils.translation.ugettext"
] |
[((4728, 4765), 'django.utils.translation.ugettext', '_', (['"""No Tags found matching the query"""'], {}), "('No Tags found matching the query')\n", (4729, 4765), True, 'from django.utils.translation import ugettext as _\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake, tools
import os
class ConanSqlite3(ConanFile):
name = "sqlite3"
version = "3.21.0"
year = "2017"
sha1 = "ebe33c20d37a715db95288010c1009cd560f2452"
description = "Self-contained, serverless, in-process SQL database engine."
url = "http://github.com/bincrafters/conan-sqlite3"
homepage = "https://www.sqlite.org"
license = "Public Domain"
generators = "cmake"
settings = "os", "compiler", "arch", "build_type"
exports = ["LICENSE.md"]
exports_sources = ["CMakeLists.txt", "FindSQLite3.cmake"]
options = {
"shared": [True, False],
"enable_json1": [True, False],
}
default_options = "shared=False", "enable_json1=False"
def configure(self):
del self.settings.compiler.libcxx
def source(self):
base_url = "https://www.sqlite.org/" + self.year
archive_name = "sqlite-amalgamation-" + self.version.replace(".","") + "000"
archive_ext = "zip"
download_url = "{0}/{1}.{2}".format(base_url, archive_name, archive_ext)
self.output.info("Attempting download of sources from: " + download_url)
tools.get(download_url, sha1=self.sha1)
os.rename(archive_name, "sources")
def build(self):
cmake = CMake(self)
cmake.definitions["CMAKE_POSITION_INDEPENDENT_CODE"] = "ON"
if self.options.shared:
cmake.definitions["BUILD_SHARED_LIBS"] = "ON"
cmake.definitions["ENABLE_JSON1"] = self.options.enable_json1
cmake.verbose = True
cmake.configure()
cmake.build()
def package(self):
self.copy("FindSQLite3.cmake", ".", ".")
self.copy("*.h", dst="include", src="sources")
self.copy(pattern="*.lib", dst="lib", keep_path=False)
self.copy(pattern="*.dll", dst="bin", keep_path=False)
self.copy(pattern="*.a", dst="lib", keep_path=False)
self.copy(pattern="*.pdb", dst="lib", keep_path=False)
self.copy(pattern="*.so", dst="lib", keep_path=False)
self.copy(pattern="*.dylib", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ['sqlite3']
if self.settings.os == "Linux":
self.cpp_info.libs.append("pthread")
self.cpp_info.libs.append("dl")
|
[
"conans.tools.get",
"os.rename",
"conans.CMake"
] |
[((1209, 1248), 'conans.tools.get', 'tools.get', (['download_url'], {'sha1': 'self.sha1'}), '(download_url, sha1=self.sha1)\n', (1218, 1248), False, 'from conans import ConanFile, CMake, tools\n'), ((1257, 1291), 'os.rename', 'os.rename', (['archive_name', '"""sources"""'], {}), "(archive_name, 'sources')\n", (1266, 1291), False, 'import os\n'), ((1330, 1341), 'conans.CMake', 'CMake', (['self'], {}), '(self)\n', (1335, 1341), False, 'from conans import ConanFile, CMake, tools\n')]
|
from s_vae.data.mnist import create_MNIST, vis_mnist
from s_vae.data.synthetic_hypersphere import create_synthetic_hypersphere
def create_dataset(config: dict, seed=0):
data_config = config['data']
name = data_config['name']
path = data_config['path']
train_ratio = data_config['train_ratio']
if name == 'MNIST':
return create_MNIST(config)
elif name == 'synth':
latent_dim = data_config['latent_dim']
observed_dim = data_config['observed_dim']
n_dev_samples = data_config['n_dev_samples']
n_test_samples = data_config['n_test_samples']
return create_synthetic_hypersphere(path, latent_dim, observed_dim, n_dev_samples, n_test_samples, train_ratio,
seed=seed)
else:
raise ValueError(f'{name} is not in datasets')
def dataset_vis_factory(name):
if name == 'MNIST':
return vis_mnist
elif name == 'synth':
return None
else:
raise ValueError(f'{name} is not in datasets')
|
[
"s_vae.data.synthetic_hypersphere.create_synthetic_hypersphere",
"s_vae.data.mnist.create_MNIST"
] |
[((351, 371), 's_vae.data.mnist.create_MNIST', 'create_MNIST', (['config'], {}), '(config)\n', (363, 371), False, 'from s_vae.data.mnist import create_MNIST, vis_mnist\n'), ((619, 738), 's_vae.data.synthetic_hypersphere.create_synthetic_hypersphere', 'create_synthetic_hypersphere', (['path', 'latent_dim', 'observed_dim', 'n_dev_samples', 'n_test_samples', 'train_ratio'], {'seed': 'seed'}), '(path, latent_dim, observed_dim, n_dev_samples,\n n_test_samples, train_ratio, seed=seed)\n', (647, 738), False, 'from s_vae.data.synthetic_hypersphere import create_synthetic_hypersphere\n')]
|
import numpy as np
import fvcore.nn.weight_init as weight_init
import torch
from torch import nn
from torch.nn import functional as F
from typing import Dict
from detectron2.layers import Conv2d, Linear, ShapeSpec, get_norm
from detectron2.modeling.roi_heads import ROI_BOX_HEAD_REGISTRY
from ..attention import SelfAttentionBlock, CrossAttentionBlock
from mvdnet.layers import Conv3d
@ROI_BOX_HEAD_REGISTRY.register()
class MVDNetBoxHead(nn.Module):
def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):
super().__init__()
num_fc = cfg.MODEL.ROI_BOX_HEAD.NUM_FC
fc_dim = cfg.MODEL.ROI_BOX_HEAD.FC_DIM
norm = cfg.MODEL.ROI_BOX_HEAD.NORM
self.history_on = cfg.INPUT.HISTORY_ON
self.num_history = cfg.INPUT.NUM_HISTORY+1
self.pooler_size = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION
assert num_fc > 0
for f in input_shape.keys():
if f.startswith("radar"):
self.radar_key = f
self.radar_output_size = input_shape[f].channels * input_shape[f].height * input_shape[f].width
self.radar_input_channels = input_shape[f].channels
elif f.startswith("lidar"):
self.lidar_key = f
self.lidar_output_size = input_shape[f].channels * input_shape[f].height * input_shape[f].width
self.lidar_input_channels = input_shape[f].channels
assert(self.lidar_output_size >= self.radar_output_size)
if self.lidar_output_size != self.radar_output_size:
self.match_conv = Conv2d(
in_channels = self.lidar_input_channels,
out_channels = self.radar_input_channels,
kernel_size = 3,
padding = 1,
bias = False,
norm = nn.BatchNorm2d(self.radar_input_channels),
activation = F.leaky_relu_
)
else:
self.match_conv = None
self.radar_self_attention = SelfAttentionBlock(self.radar_output_size)
self.lidar_self_attention = SelfAttentionBlock(self.radar_output_size)
self.radar_cross_attention = CrossAttentionBlock(self.radar_output_size)
self.lidar_cross_attention = CrossAttentionBlock(self.radar_output_size)
if self.history_on:
self.tnn1 = Conv3d(
in_channels = self.radar_input_channels*2,
out_channels = self.radar_input_channels,
kernel_size = [3, 3, 3],
padding = [1, 1, 1],
bias=False,
norm=nn.BatchNorm3d(self.radar_input_channels),
activation=F.leaky_relu_
)
self.tnn2 = Conv3d(
in_channels = self.radar_input_channels,
out_channels = self.radar_input_channels,
kernel_size = [3, 3, 3],
padding = [1, 1, 1],
bias=False,
norm=nn.BatchNorm3d(self.radar_input_channels),
activation=F.leaky_relu_
)
self.tnn3 = Conv3d(
in_channels = self.radar_input_channels,
out_channels = self.radar_input_channels,
kernel_size = [self.num_history, 3, 3],
padding = [0, 1, 1],
bias=False,
norm=nn.BatchNorm3d(self.radar_input_channels),
activation=F.leaky_relu_
)
self.tnns = [self.tnn1, self.tnn2, self.tnn3]
else:
self.tnn = Conv2d(
in_channels = self.radar_input_channels*2,
out_channels = self.radar_input_channels,
kernel_size = 3,
padding = 1,
bias=False,
norm=nn.BatchNorm2d(self.radar_input_channels),
activation=F.leaky_relu_
)
self._output_size = self.radar_output_size
self.fcs = []
for k in range(num_fc):
fc = Linear(self._output_size, fc_dim)
self.add_module("fc{}".format(k + 1), fc)
self.fcs.append(fc)
self._output_size = fc_dim
for layer in self.fcs:
weight_init.c2_xavier_fill(layer)
if self.match_conv is not None:
weight_init.c2_msra_fill(self.match_conv)
if self.history_on:
for layer in self.tnns:
weight_init.c2_msra_fill(layer)
else:
weight_init.c2_msra_fill(self.tnn)
def forward(self, x):
radar_features = x[self.radar_key]
lidar_features = x[self.lidar_key]
if self.history_on:
fusion_feature = []
for radar_x, lidar_x in zip(radar_features, lidar_features):
if self.match_conv is not None:
lidar_x = self.match_conv(lidar_x)
radar_x = torch.flatten(radar_x, start_dim=1)
lidar_x = torch.flatten(lidar_x, start_dim=1)
radar_x = self.radar_self_attention(radar_x)
lidar_x = self.lidar_self_attention(lidar_x)
radar_y = self.radar_cross_attention([radar_x, lidar_x])
lidar_y = self.lidar_cross_attention([lidar_x, radar_x])
radar_y = radar_y.reshape(-1, self.radar_input_channels,
self.pooler_size, self.pooler_size)
lidar_y = lidar_y.reshape(-1, self.radar_input_channels,
self.pooler_size, self.pooler_size)
feature_x = torch.cat([radar_y, lidar_y], dim=1)
fusion_feature.append(feature_x)
fusion_feature = torch.stack(fusion_feature).permute(1,2,0,3,4).contiguous()
for layer in self.tnns:
fusion_feature = layer(fusion_feature)
fusion_feature = torch.flatten(fusion_feature, start_dim=1)
else:
if self.match_conv is not None:
lidar_features = self.match_conv(lidar_features)
radar_x = torch.flatten(radar_features, start_dim=1)
lidar_x = torch.flatten(lidar_features, start_dim=1)
radar_x = self.radar_self_attention(radar_x)
lidar_x = self.lidar_self_attention(lidar_x)
radar_y = self.radar_cross_attention([radar_x, lidar_x])
lidar_y = self.lidar_cross_attention([lidar_x, radar_x])
radar_y = radar_y.reshape(-1, self.radar_input_channels,
self.pooler_size, self.pooler_size)
lidar_y = lidar_y.reshape(-1, self.radar_input_channels,
self.pooler_size, self.pooler_size)
feature_x = torch.cat([radar_y, lidar_y], dim=1)
feature_x = self.tnn(feature_x)
fusion_feature = torch.flatten(feature_x, start_dim=1)
for layer in self.fcs:
fusion_feature = F.leaky_relu_(layer(fusion_feature))
return fusion_feature
@property
def output_size(self):
return self._output_size
|
[
"torch.flatten",
"torch.nn.BatchNorm3d",
"torch.stack",
"fvcore.nn.weight_init.c2_xavier_fill",
"detectron2.modeling.roi_heads.ROI_BOX_HEAD_REGISTRY.register",
"torch.cat",
"detectron2.layers.Linear",
"torch.nn.BatchNorm2d",
"fvcore.nn.weight_init.c2_msra_fill"
] |
[((388, 420), 'detectron2.modeling.roi_heads.ROI_BOX_HEAD_REGISTRY.register', 'ROI_BOX_HEAD_REGISTRY.register', ([], {}), '()\n', (418, 420), False, 'from detectron2.modeling.roi_heads import ROI_BOX_HEAD_REGISTRY\n'), ((4020, 4053), 'detectron2.layers.Linear', 'Linear', (['self._output_size', 'fc_dim'], {}), '(self._output_size, fc_dim)\n', (4026, 4053), False, 'from detectron2.layers import Conv2d, Linear, ShapeSpec, get_norm\n'), ((4223, 4256), 'fvcore.nn.weight_init.c2_xavier_fill', 'weight_init.c2_xavier_fill', (['layer'], {}), '(layer)\n', (4249, 4256), True, 'import fvcore.nn.weight_init as weight_init\n'), ((4309, 4350), 'fvcore.nn.weight_init.c2_msra_fill', 'weight_init.c2_msra_fill', (['self.match_conv'], {}), '(self.match_conv)\n', (4333, 4350), True, 'import fvcore.nn.weight_init as weight_init\n'), ((4489, 4523), 'fvcore.nn.weight_init.c2_msra_fill', 'weight_init.c2_msra_fill', (['self.tnn'], {}), '(self.tnn)\n', (4513, 4523), True, 'import fvcore.nn.weight_init as weight_init\n'), ((5847, 5889), 'torch.flatten', 'torch.flatten', (['fusion_feature'], {'start_dim': '(1)'}), '(fusion_feature, start_dim=1)\n', (5860, 5889), False, 'import torch\n'), ((6035, 6077), 'torch.flatten', 'torch.flatten', (['radar_features'], {'start_dim': '(1)'}), '(radar_features, start_dim=1)\n', (6048, 6077), False, 'import torch\n'), ((6100, 6142), 'torch.flatten', 'torch.flatten', (['lidar_features'], {'start_dim': '(1)'}), '(lidar_features, start_dim=1)\n', (6113, 6142), False, 'import torch\n'), ((6661, 6697), 'torch.cat', 'torch.cat', (['[radar_y, lidar_y]'], {'dim': '(1)'}), '([radar_y, lidar_y], dim=1)\n', (6670, 6697), False, 'import torch\n'), ((6771, 6808), 'torch.flatten', 'torch.flatten', (['feature_x'], {'start_dim': '(1)'}), '(feature_x, start_dim=1)\n', (6784, 6808), False, 'import torch\n'), ((4431, 4462), 'fvcore.nn.weight_init.c2_msra_fill', 'weight_init.c2_msra_fill', (['layer'], {}), '(layer)\n', (4455, 4462), True, 'import fvcore.nn.weight_init as weight_init\n'), ((4900, 4935), 'torch.flatten', 'torch.flatten', (['radar_x'], {'start_dim': '(1)'}), '(radar_x, start_dim=1)\n', (4913, 4935), False, 'import torch\n'), ((4962, 4997), 'torch.flatten', 'torch.flatten', (['lidar_x'], {'start_dim': '(1)'}), '(lidar_x, start_dim=1)\n', (4975, 4997), False, 'import torch\n'), ((5552, 5588), 'torch.cat', 'torch.cat', (['[radar_y, lidar_y]'], {'dim': '(1)'}), '([radar_y, lidar_y], dim=1)\n', (5561, 5588), False, 'import torch\n'), ((1829, 1870), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['self.radar_input_channels'], {}), '(self.radar_input_channels)\n', (1843, 1870), False, 'from torch import nn\n'), ((2603, 2644), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', (['self.radar_input_channels'], {}), '(self.radar_input_channels)\n', (2617, 2644), False, 'from torch import nn\n'), ((2975, 3016), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', (['self.radar_input_channels'], {}), '(self.radar_input_channels)\n', (2989, 3016), False, 'from torch import nn\n'), ((3362, 3403), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', (['self.radar_input_channels'], {}), '(self.radar_input_channels)\n', (3376, 3403), False, 'from torch import nn\n'), ((3791, 3832), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['self.radar_input_channels'], {}), '(self.radar_input_channels)\n', (3805, 3832), False, 'from torch import nn\n'), ((5667, 5694), 'torch.stack', 'torch.stack', (['fusion_feature'], {}), '(fusion_feature)\n', (5678, 5694), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mlagents/envs/communicator_objects/brain_parameters_proto.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from mlagents.envs.communicator_objects import (
resolution_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2,
)
from mlagents.envs.communicator_objects import (
space_type_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_space__type__proto__pb2,
)
DESCRIPTOR = _descriptor.FileDescriptor(
name="mlagents/envs/communicator_objects/brain_parameters_proto.proto",
package="communicator_objects",
syntax="proto3",
serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"),
serialized_pb=_b(
'\n?mlagents/envs/communicator_objects/brain_parameters_proto.proto\x12\x14\x63ommunicator_objects\x1a\x39mlagents/envs/communicator_objects/resolution_proto.proto\x1a\x39mlagents/envs/communicator_objects/space_type_proto.proto"\xd4\x02\n\x14\x42rainParametersProto\x12\x1f\n\x17vector_observation_size\x18\x01 \x01(\x05\x12\'\n\x1fnum_stacked_vector_observations\x18\x02 \x01(\x05\x12\x1a\n\x12vector_action_size\x18\x03 \x03(\x05\x12\x41\n\x12\x63\x61mera_resolutions\x18\x04 \x03(\x0b\x32%.communicator_objects.ResolutionProto\x12"\n\x1avector_action_descriptions\x18\x05 \x03(\t\x12\x46\n\x18vector_action_space_type\x18\x06 \x01(\x0e\x32$.communicator_objects.SpaceTypeProto\x12\x12\n\nbrain_name\x18\x07 \x01(\t\x12\x13\n\x0bis_training\x18\x08 \x01(\x08\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3'
),
dependencies=[
mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2.DESCRIPTOR,
mlagents_dot_envs_dot_communicator__objects_dot_space__type__proto__pb2.DESCRIPTOR,
],
)
_BRAINPARAMETERSPROTO = _descriptor.Descriptor(
name="BrainParametersProto",
full_name="communicator_objects.BrainParametersProto",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="vector_observation_size",
full_name="communicator_objects.BrainParametersProto.vector_observation_size",
index=0,
number=1,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="num_stacked_vector_observations",
full_name="communicator_objects.BrainParametersProto.num_stacked_vector_observations",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="vector_action_size",
full_name="communicator_objects.BrainParametersProto.vector_action_size",
index=2,
number=3,
type=5,
cpp_type=1,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="camera_resolutions",
full_name="communicator_objects.BrainParametersProto.camera_resolutions",
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="vector_action_descriptions",
full_name="communicator_objects.BrainParametersProto.vector_action_descriptions",
index=4,
number=5,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="vector_action_space_type",
full_name="communicator_objects.BrainParametersProto.vector_action_space_type",
index=5,
number=6,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="brain_name",
full_name="communicator_objects.BrainParametersProto.brain_name",
index=6,
number=7,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_training",
full_name="communicator_objects.BrainParametersProto.is_training",
index=7,
number=8,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=208,
serialized_end=548,
)
_BRAINPARAMETERSPROTO.fields_by_name[
"camera_resolutions"
].message_type = (
mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2._RESOLUTIONPROTO
)
_BRAINPARAMETERSPROTO.fields_by_name[
"vector_action_space_type"
].enum_type = (
mlagents_dot_envs_dot_communicator__objects_dot_space__type__proto__pb2._SPACETYPEPROTO
)
DESCRIPTOR.message_types_by_name["BrainParametersProto"] = _BRAINPARAMETERSPROTO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
BrainParametersProto = _reflection.GeneratedProtocolMessageType(
"BrainParametersProto",
(_message.Message,),
dict(
DESCRIPTOR=_BRAINPARAMETERSPROTO,
__module__="mlagents.envs.communicator_objects.brain_parameters_proto_pb2"
# @@protoc_insertion_point(class_scope:communicator_objects.BrainParametersProto)
),
)
_sym_db.RegisterMessage(BrainParametersProto)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
[
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor"
] |
[((519, 545), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (543, 545), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((2397, 2788), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""vector_observation_size"""', 'full_name': '"""communicator_objects.BrainParametersProto.vector_observation_size"""', 'index': '(0)', 'number': '(1)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='vector_observation_size', full_name=\n 'communicator_objects.BrainParametersProto.vector_observation_size',\n index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False,\n default_value=0, message_type=None, enum_type=None, containing_type=\n None, is_extension=False, extension_scope=None, serialized_options=None,\n file=DESCRIPTOR)\n", (2424, 2788), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2979, 3391), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""num_stacked_vector_observations"""', 'full_name': '"""communicator_objects.BrainParametersProto.num_stacked_vector_observations"""', 'index': '(1)', 'number': '(2)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='num_stacked_vector_observations',\n full_name=\n 'communicator_objects.BrainParametersProto.num_stacked_vector_observations'\n , index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=\n False, default_value=0, message_type=None, enum_type=None,\n containing_type=None, is_extension=False, extension_scope=None,\n serialized_options=None, file=DESCRIPTOR)\n", (3006, 3391), True, 'from google.protobuf import descriptor as _descriptor\n'), ((3577, 3959), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""vector_action_size"""', 'full_name': '"""communicator_objects.BrainParametersProto.vector_action_size"""', 'index': '(2)', 'number': '(3)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(3)', 'has_default_value': '(False)', 'default_value': '[]', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='vector_action_size', full_name=\n 'communicator_objects.BrainParametersProto.vector_action_size', index=2,\n number=3, type=5, cpp_type=1, label=3, has_default_value=False,\n default_value=[], message_type=None, enum_type=None, containing_type=\n None, is_extension=False, extension_scope=None, serialized_options=None,\n file=DESCRIPTOR)\n", (3604, 3959), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4150, 4534), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""camera_resolutions"""', 'full_name': '"""communicator_objects.BrainParametersProto.camera_resolutions"""', 'index': '(3)', 'number': '(4)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(3)', 'has_default_value': '(False)', 'default_value': '[]', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='camera_resolutions', full_name=\n 'communicator_objects.BrainParametersProto.camera_resolutions', index=3,\n number=4, type=11, cpp_type=10, label=3, has_default_value=False,\n default_value=[], message_type=None, enum_type=None, containing_type=\n None, is_extension=False, extension_scope=None, serialized_options=None,\n file=DESCRIPTOR)\n", (4177, 4534), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4725, 5123), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""vector_action_descriptions"""', 'full_name': '"""communicator_objects.BrainParametersProto.vector_action_descriptions"""', 'index': '(4)', 'number': '(5)', 'type': '(9)', 'cpp_type': '(9)', 'label': '(3)', 'has_default_value': '(False)', 'default_value': '[]', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='vector_action_descriptions', full_name=\n 'communicator_objects.BrainParametersProto.vector_action_descriptions',\n index=4, number=5, type=9, cpp_type=9, label=3, has_default_value=False,\n default_value=[], message_type=None, enum_type=None, containing_type=\n None, is_extension=False, extension_scope=None, serialized_options=None,\n file=DESCRIPTOR)\n", (4752, 5123), True, 'from google.protobuf import descriptor as _descriptor\n'), ((5314, 5708), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""vector_action_space_type"""', 'full_name': '"""communicator_objects.BrainParametersProto.vector_action_space_type"""', 'index': '(5)', 'number': '(6)', 'type': '(14)', 'cpp_type': '(8)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='vector_action_space_type', full_name=\n 'communicator_objects.BrainParametersProto.vector_action_space_type',\n index=5, number=6, type=14, cpp_type=8, label=1, has_default_value=\n False, default_value=0, message_type=None, enum_type=None,\n containing_type=None, is_extension=False, extension_scope=None,\n serialized_options=None, file=DESCRIPTOR)\n", (5341, 5708), True, 'from google.protobuf import descriptor as _descriptor\n'), ((6476, 6848), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""is_training"""', 'full_name': '"""communicator_objects.BrainParametersProto.is_training"""', 'index': '(7)', 'number': '(8)', 'type': '(8)', 'cpp_type': '(7)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(False)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='is_training', full_name=\n 'communicator_objects.BrainParametersProto.is_training', index=7,\n number=8, type=8, cpp_type=7, label=1, has_default_value=False,\n default_value=False, message_type=None, enum_type=None, containing_type\n =None, is_extension=False, extension_scope=None, serialized_options=\n None, file=DESCRIPTOR)\n", (6503, 6848), True, 'from google.protobuf import descriptor as _descriptor\n')]
|
from setuptools import Extension, setup
from Cython.Build import cythonize
import numpy as np
import os.path as osp
__version__ = '1.1.4'
url = 'https://github.com/jannessm/quadric-mesh-simplification'
files = [
'simplify.c',
'array.c',
'clean_mesh.c',
'contract_pair.c',
'edges.c',
'maths.c',
'mesh_inversion.c',
'pair_heap.c',
'pair.c',
'preserve_bounds.c',
'q.c',
'targets.c',
'upper_tri.c',
'valid_pairs.c',
'test_utils.c'
]
src_path = osp.join(osp.dirname(osp.abspath(__file__)), 'quad_mesh_simplify')
ext_modules = [
Extension(
'simplify',
[osp.join(src_path, 'c', f) for f in files] + [osp.join(src_path,'simplify.pyx')],
# extra_compile_args=['-fopenmp'],
# extra_link_args=['-fopenmp'],
include_dirs=[np.get_include()],
define_macros=[("NPY_NO_DEPRECATED_API", "NPY_1_17_API_VERSION")],
),
]
ext_modules = cythonize(ext_modules)
with open("README.md", "r") as fh:
long_description = fh.read()
def parse_requirements(filename):
"""Load requirements from a pip requirements file."""
lineiter = (line.strip() for line in open(filename))
return [line for line in lineiter if line and not line.startswith("#")]
setup(
name='quad_mesh_simplify',
version=__version__,
author='<NAME>',
url=url,
description="Simplify meshes including vertex features.",
long_description=long_description,
long_description_content_type="text/markdown",
install_requires=parse_requirements("requirements.txt"),
python_requires=">=3.6.3",
ext_modules=ext_modules,
zip_safe=False,
)
|
[
"os.path.abspath",
"Cython.Build.cythonize",
"os.path.join",
"numpy.get_include"
] |
[((850, 872), 'Cython.Build.cythonize', 'cythonize', (['ext_modules'], {}), '(ext_modules)\n', (859, 872), False, 'from Cython.Build import cythonize\n'), ((480, 501), 'os.path.abspath', 'osp.abspath', (['__file__'], {}), '(__file__)\n', (491, 501), True, 'import os.path as osp\n'), ((572, 598), 'os.path.join', 'osp.join', (['src_path', '"""c"""', 'f'], {}), "(src_path, 'c', f)\n", (580, 598), True, 'import os.path as osp\n'), ((618, 652), 'os.path.join', 'osp.join', (['src_path', '"""simplify.pyx"""'], {}), "(src_path, 'simplify.pyx')\n", (626, 652), True, 'import os.path as osp\n'), ((741, 757), 'numpy.get_include', 'np.get_include', ([], {}), '()\n', (755, 757), True, 'import numpy as np\n')]
|
import tensorflow as tf
from tensorflow.python.keras import activations
class CoralOrdinal(tf.keras.layers.Layer):
# We skip input_dim/input_shape here and put in the build() method as recommended in the tutorial,
# in case the user doesn't know the input dimensions when defining the model.
def __init__(self, num_classes, activation = None, **kwargs):
""" Ordinal output layer, which produces ordinal logits by default.
Args:
num_classes: how many ranks (aka labels or values) are in the ordinal variable.
activation: (Optional) Activation function to use. The default of None produces
ordinal logits, but passing "ordinal_softmax" will cause the layer to output
a probability prediction for each label.
"""
# Via Dense Layer code:
# https://github.com/tensorflow/tensorflow/blob/v2.2.0/tensorflow/python/keras/layers/core.py#L1128
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
# Pass any additional keyword arguments to Layer() (i.e. name, dtype)
super(CoralOrdinal, self).__init__(**kwargs)
self.num_classes = num_classes
self.activation = activations.get(activation)
# Following https://www.tensorflow.org/guide/keras/custom_layers_and_models#best_practice_deferring_weight_creation_until_the_shape_of_the_inputs_is_known
def build(self, input_shape):
# Single fully-connected neuron - this is the latent variable.
num_units = 1
# I believe glorot_uniform (aka Xavier uniform) is pytorch's default initializer, per
# https://pytorch.org/docs/master/generated/torch.nn.Linear.html
# and https://www.tensorflow.org/api_docs/python/tf/keras/initializers/GlorotUniform
self.fc = self.add_weight(shape = (input_shape[-1], num_units),
# Need a unique name if there are multiple coral_ordinal layers.
name = self.name + "_latent",
initializer = 'glorot_uniform',
# Not sure if this is necessary:
dtype = tf.float32,
trainable = True)
# num_classes - 1 bias terms, defaulting to 0.
self.linear_1_bias = self.add_weight(shape = (self.num_classes - 1, ),
# Need a unique name if there are multiple coral_ordinal layers.
name = self.name + "_bias",
initializer = 'zeros',
# Not sure if this is necessary:
dtype = tf.float32,
trainable = True)
# This defines the forward pass.
def call(self, inputs):
fc_inputs = tf.matmul(inputs, self.fc)
logits = fc_inputs + self.linear_1_bias
if self.activation is None:
outputs = logits
else:
# Not yet tested:
outputs = self.activation(logits)
return outputs
# This allows for serialization supposedly.
# https://www.tensorflow.org/guide/keras/custom_layers_and_models#you_can_optionally_enable_serialization_on_your_layers
def get_config(self):
config = super(CoralOrdinal, self).get_config()
config.update({'num_classes': self.num_classes})
return config
|
[
"tensorflow.matmul",
"tensorflow.python.keras.activations.get"
] |
[((1200, 1227), 'tensorflow.python.keras.activations.get', 'activations.get', (['activation'], {}), '(activation)\n', (1215, 1227), False, 'from tensorflow.python.keras import activations\n'), ((2808, 2834), 'tensorflow.matmul', 'tf.matmul', (['inputs', 'self.fc'], {}), '(inputs, self.fc)\n', (2817, 2834), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 9 17:53:39 2019
@author: alankar
"""
import numpy as np
import matplotlib.pyplot as plt
from scipy.special.orthogonal import p_roots #Legendre Polynomial roots
from scipy import constants
def gauss_quad(func,a,b,n,*args):#Legendre
[x,w] = p_roots(n+1)
I_G = 0.5*(b-a)*np.sum(w*func(0.5*(b-a)*x+0.5*(b+a),*args))
return I_G
V = 1000*1e-6 #m^3
rho = 6.022e28 #m^-3
thetaD = 428 #K
def CV(T):
N = 50
f = lambda x:(x**4*np.exp(x))/(np.exp(x)-1)**2
return 9*V*rho*constants.k*(T/thetaD)**3*gauss_quad(f,0,thetaD/T,N)
Temperature = np.linspace(5,500,1000)
Heat_cap = np.array([CV(T) for T in Temperature])
plt.figure(figsize=(13,10))
plt.plot(Temperature,Heat_cap)
plt.grid()
plt.title(r'Debye Heat capacity $C_V(T)$ in a solid',size=25,y=1.02)
plt.xlabel(r'$T$',size=22)
plt.ylabel(r'$C_V(T)$',size=22)
plt.tick_params(axis='both', which='major', labelsize=15)
plt.tick_params(axis='both', which='minor', labelsize=12)
plt.savefig('9.png')
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"scipy.special.orthogonal.p_roots",
"matplotlib.pyplot.figure",
"numpy.exp",
"numpy.linspace",
"matplotlib.pyplot.tick_params",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig"
] |
[((634, 659), 'numpy.linspace', 'np.linspace', (['(5)', '(500)', '(1000)'], {}), '(5, 500, 1000)\n', (645, 659), True, 'import numpy as np\n'), ((712, 740), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(13, 10)'}), '(figsize=(13, 10))\n', (722, 740), True, 'import matplotlib.pyplot as plt\n'), ((740, 771), 'matplotlib.pyplot.plot', 'plt.plot', (['Temperature', 'Heat_cap'], {}), '(Temperature, Heat_cap)\n', (748, 771), True, 'import matplotlib.pyplot as plt\n'), ((771, 781), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (779, 781), True, 'import matplotlib.pyplot as plt\n'), ((782, 851), 'matplotlib.pyplot.title', 'plt.title', (['"""Debye Heat capacity $C_V(T)$ in a solid"""'], {'size': '(25)', 'y': '(1.02)'}), "('Debye Heat capacity $C_V(T)$ in a solid', size=25, y=1.02)\n", (791, 851), True, 'import matplotlib.pyplot as plt\n'), ((851, 877), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$T$"""'], {'size': '(22)'}), "('$T$', size=22)\n", (861, 877), True, 'import matplotlib.pyplot as plt\n'), ((878, 909), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$C_V(T)$"""'], {'size': '(22)'}), "('$C_V(T)$', size=22)\n", (888, 909), True, 'import matplotlib.pyplot as plt\n'), ((910, 967), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'axis': '"""both"""', 'which': '"""major"""', 'labelsize': '(15)'}), "(axis='both', which='major', labelsize=15)\n", (925, 967), True, 'import matplotlib.pyplot as plt\n'), ((968, 1025), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'axis': '"""both"""', 'which': '"""minor"""', 'labelsize': '(12)'}), "(axis='both', which='minor', labelsize=12)\n", (983, 1025), True, 'import matplotlib.pyplot as plt\n'), ((1026, 1046), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""9.png"""'], {}), "('9.png')\n", (1037, 1046), True, 'import matplotlib.pyplot as plt\n'), ((1047, 1057), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1055, 1057), True, 'import matplotlib.pyplot as plt\n'), ((317, 331), 'scipy.special.orthogonal.p_roots', 'p_roots', (['(n + 1)'], {}), '(n + 1)\n', (324, 331), False, 'from scipy.special.orthogonal import p_roots\n'), ((513, 522), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (519, 522), True, 'import numpy as np\n'), ((525, 534), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (531, 534), True, 'import numpy as np\n')]
|
from django.contrib import admin
from django import forms
from django.db.models import Sum
from django.shortcuts import redirect
from django.http import HttpResponseRedirect
from .models import *
from .forms import *
# Register your models here.
class RelationalProductInline(admin.TabularInline):
search_fields = ['product']
model = SalesOrder.product.through
verbose_name = '商品名稱'
form = RelationalProductForm
extra = 2
fields = ('product', 'retail_price', 'number', 'discount','total')
readonly_fields = ('retail_price', 'total',)
suit_classes = 'suit-tab suit-tab-general'
autocomplete_fields = ['product']
@admin.register(Product)
class ProductAdmin(admin.ModelAdmin):
def get_sales_volume(self, obj):
return sum(obj.salesorder_set.through.objects.filter(product=obj).values_list('number', flat=True))
def get_inventory_volume(self, obj):
return obj.purchase_volume - sum(obj.salesorder_set.through.objects.filter(product=obj).values_list('number', flat=True))
# list_display = ('name', 'retail_price', 'special_price', 'purchase_volume', 'sales_volume', 'inventory_volume')
search_fields = ['name']
list_display = ('name', 'retail_price', 'purchase_volume', 'get_sales_volume', 'get_inventory_volume')
get_sales_volume.short_description = '銷售量'
get_inventory_volume.short_description = '庫存量'
@admin.register(SalesOrder)
class SalesOrderAdmin(admin.ModelAdmin):
def get_product(self, obj):
return "、".join([p.name for p in obj.product.all()])
list_display = ('order_id', 'customer', 'get_product', 'date')
form = SalesOrderForm
inlines = [RelationalProductInline,]
change_form_template = "admin/product/export_changeform.html"
autocomplete_fields = ['customer']
# def response_add(request, obj, post_url_continue=None):
# return redirect(f'/product/export/{obj.order_id}')
def response_change(self, request, obj):
if "_export" in request.POST:
obj.save()
return redirect(f'/product/export/?order_id={obj.order_id}')
return super().response_change(request, obj)
@admin.register(Customer)
class CustomerAdmin(admin.ModelAdmin):
search_fields = ['name']
list_display = ('name', 'tax_id', 'phone', 'address')
|
[
"django.shortcuts.redirect",
"django.contrib.admin.register"
] |
[((657, 680), 'django.contrib.admin.register', 'admin.register', (['Product'], {}), '(Product)\n', (671, 680), False, 'from django.contrib import admin\n'), ((1403, 1429), 'django.contrib.admin.register', 'admin.register', (['SalesOrder'], {}), '(SalesOrder)\n', (1417, 1429), False, 'from django.contrib import admin\n'), ((2193, 2217), 'django.contrib.admin.register', 'admin.register', (['Customer'], {}), '(Customer)\n', (2207, 2217), False, 'from django.contrib import admin\n'), ((2071, 2124), 'django.shortcuts.redirect', 'redirect', (['f"""/product/export/?order_id={obj.order_id}"""'], {}), "(f'/product/export/?order_id={obj.order_id}')\n", (2079, 2124), False, 'from django.shortcuts import redirect\n')]
|
import types
import sys
from .equation import *
from .pick import *
from .calculus import *
from functools import wraps
this_module = sys.modules[__name__]
def _get_imported_names(module):
names = module.__all__ if hasattr(module, '__all__') else dir(module)
return [name for name in names if not name.startswith('_')]
def _wrap_function(func):
@wraps(func)
def f(*args, **kwargs):
if isinstance(args[0], Eq):
if len(args) > 1:
other_args = tuple(args[1:])
else:
other_args = ()
return args[0].apply('both', func, *other_args, **kwargs)
else:
return func(*args, **kwargs)
return f
_names_from_sympy = _get_imported_names(sympy)
for name in _names_from_sympy:
obj = getattr(sympy, name)
if isinstance(obj, types.FunctionType) or isinstance(obj, sympy.FunctionClass):
setattr(this_module, name, _wrap_function(obj))
|
[
"functools.wraps"
] |
[((365, 376), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (370, 376), False, 'from functools import wraps\n')]
|
import os
import sys
from synchronizers.new_base.SyncInstanceUsingAnsible import SyncInstanceUsingAnsible
from synchronizers.new_base.modelaccessor import *
from xos.logger import Logger, logging
parentdir = os.path.join(os.path.dirname(__file__), "..")
sys.path.insert(0, parentdir)
logger = Logger(level=logging.INFO)
class SyncExampleTenant(SyncInstanceUsingAnsible):
provides = [ExampleTenant]
observes = ExampleTenant
requested_interval = 0
template_name = "exampletenant_playbook.yaml"
service_key_name = "/opt/xos/synchronizers/exampleservice/exampleservice_private_key"
watches = [ModelLink(CoarseTenant,via='coarsetenant'), ModelLink(ServiceMonitoringAgentInfo,via='monitoringagentinfo')]
def __init__(self, *args, **kwargs):
super(SyncExampleTenant, self).__init__(*args, **kwargs)
def get_exampleservice(self, o):
if not o.provider_service:
return None
exampleservice = ExampleService.objects.filter(id=o.provider_service.id)
if not exampleservice:
return None
return exampleservice[0]
# Gets the attributes that are used by the Ansible template but are not
# part of the set of default attributes.
def get_extra_attributes(self, o):
fields = {}
fields['tenant_message'] = o.tenant_message
exampleservice = self.get_exampleservice(o)
fields['service_message'] = exampleservice.service_message
return fields
def delete_record(self, port):
# Nothing needs to be done to delete an exampleservice; it goes away
# when the instance holding the exampleservice is deleted.
pass
def handle_service_monitoringagentinfo_watch_notification(self, monitoring_agent_info):
if not monitoring_agent_info.service:
logger.info("handle watch notifications for service monitoring agent info...ignoring because service attribute in monitoring agent info:%s is null" % (monitoring_agent_info))
return
if not monitoring_agent_info.target_uri:
logger.info("handle watch notifications for service monitoring agent info...ignoring because target_uri attribute in monitoring agent info:%s is null" % (monitoring_agent_info))
return
objs = ExampleTenant.objects.all()
for obj in objs:
if obj.provider_service.id != monitoring_agent_info.service.id:
logger.info("handle watch notifications for service monitoring agent info...ignoring because service attribute in monitoring agent info:%s is not matching" % (monitoring_agent_info))
return
instance = self.get_instance(obj)
if not instance:
logger.warn("handle watch notifications for service monitoring agent info...: No valid instance found for object %s" % (str(obj)))
return
logger.info("handling watch notification for monitoring agent info:%s for ExampleTenant object:%s" % (monitoring_agent_info, obj))
#Run ansible playbook to update the routing table entries in the instance
fields = self.get_ansible_fields(instance)
fields["ansible_tag"] = obj.__class__.__name__ + "_" + str(obj.id) + "_monitoring"
fields["target_uri"] = monitoring_agent_info.target_uri
template_name = "monitoring_agent.yaml"
super(SyncExampleTenant, self).run_playbook(obj, fields, template_name)
pass
|
[
"os.path.dirname",
"sys.path.insert",
"xos.logger.Logger"
] |
[((255, 284), 'sys.path.insert', 'sys.path.insert', (['(0)', 'parentdir'], {}), '(0, parentdir)\n', (270, 284), False, 'import sys\n'), ((295, 321), 'xos.logger.Logger', 'Logger', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (301, 321), False, 'from xos.logger import Logger, logging\n'), ((222, 247), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (237, 247), False, 'import os\n')]
|
"""
Component Model
"""
# Third Party Library
from django.db import models
class ComponentGroup(models.Model):
UPTIME_CHOICES = (
('on', 'ON'),
('off', 'OFF')
)
name = models.CharField(max_length=100, verbose_name="Name")
description = models.CharField(max_length=200, verbose_name="Description")
uptime = models.CharField(max_length=50, choices=UPTIME_CHOICES, default="off", verbose_name="Uptime")
created_at = models.DateTimeField(auto_now_add=True, verbose_name="Created at")
updated_at = models.DateTimeField(auto_now=True, verbose_name="Updated at")
class Meta:
db_table = "app_component_group"
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField"
] |
[((201, 254), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""Name"""'}), "(max_length=100, verbose_name='Name')\n", (217, 254), False, 'from django.db import models\n'), ((273, 333), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""Description"""'}), "(max_length=200, verbose_name='Description')\n", (289, 333), False, 'from django.db import models\n'), ((347, 444), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'choices': 'UPTIME_CHOICES', 'default': '"""off"""', 'verbose_name': '"""Uptime"""'}), "(max_length=50, choices=UPTIME_CHOICES, default='off',\n verbose_name='Uptime')\n", (363, 444), False, 'from django.db import models\n'), ((458, 524), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Created at"""'}), "(auto_now_add=True, verbose_name='Created at')\n", (478, 524), False, 'from django.db import models\n'), ((542, 604), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Updated at"""'}), "(auto_now=True, verbose_name='Updated at')\n", (562, 604), False, 'from django.db import models\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.