repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
antopen/alipay-sdk-python-all | alipay/aop/api/domain/AlipayMerchantAuthDeleteModel.py | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayMerchantAuthDeleteModel(object):
def __init__(self):
self._channel_code = None
self._operator_id = None
self._role = None
self._scene_code = None
self._user_id_list = None
@property
def channel_code(self):
return self._channel_code
@channel_code.setter
def channel_code(self, value):
self._channel_code = value
@property
def operator_id(self):
return self._operator_id
@operator_id.setter
def operator_id(self, value):
self._operator_id = value
@property
def role(self):
return self._role
@role.setter
def role(self, value):
self._role = value
@property
def scene_code(self):
return self._scene_code
@scene_code.setter
def scene_code(self, value):
self._scene_code = value
@property
def user_id_list(self):
return self._user_id_list
@user_id_list.setter
def user_id_list(self, value):
if isinstance(value, list):
self._user_id_list = list()
for i in value:
self._user_id_list.append(i)
def to_alipay_dict(self):
params = dict()
if self.channel_code:
if hasattr(self.channel_code, 'to_alipay_dict'):
params['channel_code'] = self.channel_code.to_alipay_dict()
else:
params['channel_code'] = self.channel_code
if self.operator_id:
if hasattr(self.operator_id, 'to_alipay_dict'):
params['operator_id'] = self.operator_id.to_alipay_dict()
else:
params['operator_id'] = self.operator_id
if self.role:
if hasattr(self.role, 'to_alipay_dict'):
params['role'] = self.role.to_alipay_dict()
else:
params['role'] = self.role
if self.scene_code:
if hasattr(self.scene_code, 'to_alipay_dict'):
params['scene_code'] = self.scene_code.to_alipay_dict()
else:
params['scene_code'] = self.scene_code
if self.user_id_list:
if isinstance(self.user_id_list, list):
for i in range(0, len(self.user_id_list)):
element = self.user_id_list[i]
if hasattr(element, 'to_alipay_dict'):
self.user_id_list[i] = element.to_alipay_dict()
if hasattr(self.user_id_list, 'to_alipay_dict'):
params['user_id_list'] = self.user_id_list.to_alipay_dict()
else:
params['user_id_list'] = self.user_id_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayMerchantAuthDeleteModel()
if 'channel_code' in d:
o.channel_code = d['channel_code']
if 'operator_id' in d:
o.operator_id = d['operator_id']
if 'role' in d:
o.role = d['role']
if 'scene_code' in d:
o.scene_code = d['scene_code']
if 'user_id_list' in d:
o.user_id_list = d['user_id_list']
return o
| [] |
LaudateCorpus1/audio | test/torchaudio_unittest/models/emformer/emformer_cpu_test.py | a007e922d34028270197c0549bf452b79499d039 | import torch
from torchaudio_unittest.common_utils import PytorchTestCase
from torchaudio_unittest.models.emformer.emformer_test_impl import EmformerTestImpl
class EmformerFloat32CPUTest(EmformerTestImpl, PytorchTestCase):
dtype = torch.float32
device = torch.device("cpu")
class EmformerFloat64CPUTest(EmformerTestImpl, PytorchTestCase):
dtype = torch.float64
device = torch.device("cpu")
| [((264, 283), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (276, 283), False, 'import torch\n'), ((390, 409), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (402, 409), False, 'import torch\n')] |
stanton119/nba-analysis | src/nba_analysis/pipelines/data_processing/pipeline.py | 79343150edaaa97472939c47b3ce521e038871b0 | """
Two pipelines:
* full history
* update latest season
* Only updates latest season year
"""
from functools import partial
import itertools
from kedro.pipeline import Pipeline, node
from nba_analysis.pipelines.data_processing import basketball_reference
from . import nodes
def create_pipeline(**kwargs):
season_range = range(2018, 2021)
download_nodes = [
node(
func=partial(nodes.download_season_data, season=season),
inputs=[],
outputs=f"season_data_{season}",
name=f"download_season_data_{season}_node",
)
for season in season_range
]
# month_range = ['october','november','december','january','february','march','april','may','june','july','august','september']
# download_game_log_nodes = [
# node(
# func=partial(nodes.download_game_log_data, season=season, month=month),
# inputs=[],
# outputs=f"game_log_data_{season}_{month}",
# name=f"download_game_log_data_{season}_{month}_node",
# )
# for season, month in itertools.product(season_range,month_range)
# ]
download_game_log_nodes = [
node(
func=partial(
basketball_reference.get_full_season_game_log, season=season
),
inputs=[],
outputs=f"game_log_data_{season}",
name=f"download_game_log_data_{season}_node",
)
for season in season_range
]
process_game_log_nodes = [
node(
func=basketball_reference.process_df_game_log,
inputs=f"game_log_data_{season}",
outputs=f"game_log_data_{season}_int",
name=f"process_game_log_data_{season}_node",
)
for season in season_range
]
return Pipeline(
[
*download_nodes,
node(
func=nodes.process_season_data,
inputs=[f"season_data_{season}" for season in season_range],
outputs="cleaned_season_data",
name="process_season_data_node",
),
*download_game_log_nodes,
*process_game_log_nodes,
]
)
| [((1530, 1709), 'kedro.pipeline.node', 'node', ([], {'func': 'basketball_reference.process_df_game_log', 'inputs': 'f"""game_log_data_{season}"""', 'outputs': 'f"""game_log_data_{season}_int"""', 'name': 'f"""process_game_log_data_{season}_node"""'}), "(func=basketball_reference.process_df_game_log, inputs=\n f'game_log_data_{season}', outputs=f'game_log_data_{season}_int', name=\n f'process_game_log_data_{season}_node')\n", (1534, 1709), False, 'from kedro.pipeline import Pipeline, node\n'), ((1873, 2043), 'kedro.pipeline.node', 'node', ([], {'func': 'nodes.process_season_data', 'inputs': "[f'season_data_{season}' for season in season_range]", 'outputs': '"""cleaned_season_data"""', 'name': '"""process_season_data_node"""'}), "(func=nodes.process_season_data, inputs=[f'season_data_{season}' for\n season in season_range], outputs='cleaned_season_data', name=\n 'process_season_data_node')\n", (1877, 2043), False, 'from kedro.pipeline import Pipeline, node\n'), ((408, 458), 'functools.partial', 'partial', (['nodes.download_season_data'], {'season': 'season'}), '(nodes.download_season_data, season=season)\n', (415, 458), False, 'from functools import partial\n'), ((1211, 1280), 'functools.partial', 'partial', (['basketball_reference.get_full_season_game_log'], {'season': 'season'}), '(basketball_reference.get_full_season_game_log, season=season)\n', (1218, 1280), False, 'from functools import partial\n')] |
baijifeilong/rawsteelp | IceSpringMusicPlayer/plugins/IceSpringHelloWorldPlugin/helloWorldPlugin.py | 425547e6e2395bf4acb62435b18b5b3a4b7ebef4 | # Created by [email protected] at 2022/1/21 17:13
import typing
from IceSpringRealOptional.typingUtils import gg
from PySide2 import QtWidgets, QtCore
from IceSpringMusicPlayer import tt
from IceSpringMusicPlayer.common.pluginMixin import PluginMixin
from IceSpringMusicPlayer.common.pluginWidgetMixin import PluginWidgetMixin
from IceSpringMusicPlayer.tt import Text
class HelloWorldPlugin(QtWidgets.QWidget, PluginMixin, PluginWidgetMixin):
@classmethod
def getPluginName(cls) -> Text:
return tt.HelloWorldPlugin_Name
@classmethod
def getPluginReplacers(cls) -> typing.Dict[Text, typing.Callable[[], PluginWidgetMixin]]:
return {tt.HelloWorldWidget_Name: lambda: cls()}
def __init__(self):
super().__init__()
label = QtWidgets.QLabel("Hello World")
label.setAlignment(gg(QtCore.Qt.AlignmentFlag.AlignCenter))
self.setLayout(QtWidgets.QGridLayout())
self.layout().addWidget(label)
| [((783, 814), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['"""Hello World"""'], {}), "('Hello World')\n", (799, 814), False, 'from PySide2 import QtWidgets, QtCore\n'), ((842, 881), 'IceSpringRealOptional.typingUtils.gg', 'gg', (['QtCore.Qt.AlignmentFlag.AlignCenter'], {}), '(QtCore.Qt.AlignmentFlag.AlignCenter)\n', (844, 881), False, 'from IceSpringRealOptional.typingUtils import gg\n'), ((906, 929), 'PySide2.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (927, 929), False, 'from PySide2 import QtWidgets, QtCore\n')] |
2baOrNot2ba/SWHT | SWHT/Ylm.py | 738718e90d615e624dacf7746f8a2dfa973ec9fe | """
An implementation on spherical harmonics in python becasue scipy.special.sph_harm in scipy<=0.13 is very slow
Originally written by Jozef Vesely
https://github.com/scipy/scipy/issues/1280
"""
import numpy as np
def xfact(m):
# computes (2m-1)!!/sqrt((2m)!)
res = 1.
for i in xrange(1, 2*m+1):
if i % 2: res *= i # (2m-1)!!
res /= np.sqrt(i) # sqrt((2m)!)
return res
def lplm_n(l, m, x):
# associated legendre polynomials normalized as in Ylm, from Numerical Recipes 6.7
l,m = int(l),int(m)
assert 0<=m<=l and np.all(np.abs(x)<=1.)
norm = np.sqrt(2. * l + 1.) / np.sqrt(4. * np.pi)
if m == 0:
pmm = norm * np.ones_like(x)
else:
pmm = (-1.)**m * norm * xfact(m) * (1.-x**2.)**(m/2.)
if l == m:
return pmm
pmmp1 = x * pmm * np.sqrt(2.*m+1.)
if l == m+1:
return pmmp1
for ll in xrange(m+2, l+1):
pll = (x*(2.*ll-1.)*pmmp1 - np.sqrt( (ll-1.)**2. - m**2.)*pmm)/np.sqrt(ll**2.-m**2.)
pmm = pmmp1
pmmp1 = pll
return pll
def Ylm(l, m, phi, theta):
# spherical harmonics
# theta is from 0 to pi with pi/2 on equator
l,m = int(l),int(m)
assert 0 <= np.abs(m) <=l
if m > 0:
return lplm_n(l, m, np.cos(theta)) * np.exp(1J * m * phi)
elif m < 0:
return (-1.)**m * lplm_n(l, -m, np.cos(theta)) * np.exp(1J * m * phi)
return lplm_n(l, m, np.cos(theta)) * np.ones_like(phi)
def Ylmr(l, m, phi, theta):
# real spherical harmonics
# theta is from 0 to pi with pi/2 on equator
l,m = int(l),int(m)
assert 0 <= np.abs(m) <=l
if m > 0:
return lplm_n(l, m, np.cos(theta)) * np.cos(m * phi) * np.sqrt(2.)
elif m < 0:
return (-1.)**m * lplm_n(l, -m, np.cos(theta)) * np.sin(-m * phi) * np.sqrt(2.)
return lplm_n(l, m, np.cos(theta)) * np.ones_like(phi)
if __name__ == "__main__":
from scipy.special import sph_harm
from scipy.misc import factorial2, factorial
from timeit import Timer
def ref_xfact(m):
return factorial2(2*m-1)/np.sqrt(factorial(2*m))
print "Time: xfact(10)", Timer("xfact(10)",
"from __main__ import xfact, ref_xfact").timeit(100)
print "Time: ref_xfact(10)", Timer("ref_xfact(10)",
"from __main__ import xfact, ref_xfact").timeit(100)
print "Time: xfact(80)", Timer("xfact(80)",
"from __main__ import xfact, ref_xfact").timeit(100)
print "Time: ref_xfact(80)", Timer("ref_xfact(80)",
"from __main__ import xfact, ref_xfact").timeit(100)
print "m", "xfact", "ref_xfact"
for m in range(10) + range(80,90):
a = xfact(m)
b = ref_xfact(m)
print m, a, b
phi, theta = np.ogrid[0:2*np.pi:10j,-np.pi/2:np.pi/2:10j]
print "Time: Ylm(1,1,phi,theta)", Timer("Ylm(1,1,phi,theta)",
"from __main__ import Ylm, sph_harm, phi, theta").timeit(10)
print "Time: sph_harm(1,1,phi,theta)", Timer("sph_harm(1,1,phi,theta)",
"from __main__ import Ylm, sph_harm, phi, theta").timeit(10)
print "l", "m", "max|Ylm-sph_harm|"
for l in xrange(0,10):
for m in xrange(-l,l+1):
a = Ylm(l,m,phi,theta)
b = sph_harm(m,l,phi,theta)
print l,m, np.amax(np.abs(a-b))
| [] |
alphacastio/connectors-gcba | 0673.GCBA-HOTEL_STAFF.py | d1b97fb851463694ea844b3b81402c3ea747863b | #!/usr/bin/env python
# coding: utf-8
# In[9]:
import requests
import pandas as pd
from lxml import etree
from bs4 import BeautifulSoup
import datetime
import io
import numpy as np
from alphacast import Alphacast
from dotenv import dotenv_values
API_KEY = dotenv_values(".env").get("API_KEY")
alphacast = Alphacast(API_KEY)
# In[10]:
url1 = "https://www.estadisticaciudad.gob.ar/eyc/wp-content/uploads/2020/11/Eoh_PnoA_0811.xlsx"
df1 = pd.read_excel(url1)
df1[:2] = df1[:2].ffill(1)
df1.columns = "Personal No Asalariado - " + df1.iloc[1] + " - " + df1.iloc[2]
df1 = df1.drop(df1.columns[[1]], axis = 1)
df1 = df1.drop(index=1)
df1 = df1.drop(index=0)
df1 = df1.drop(index=2)
df1 = df1.dropna(subset = [df1.columns[3]])
#df1 = df1.iloc[2: , 3:-2]
#df1 = df1[~df1.iloc[:, 0].astype(str).str.isdigit()]
df1 = df1[df1.columns.dropna()]
df1.index = pd.date_range(start='1/1/2008', periods=len(df1), freq = "QS")
df1.index.name = "Date"
#df1 = df1[df1.columns.drop(list(df1.filter(regex='Participación')))]
df1
# In[11]:
url2 = "https://www.estadisticaciudad.gob.ar/eyc/wp-content/uploads/2018/05/Eoh_PA_0811.xlsx"
df2 = pd.read_excel(url2)
df2[:2] = df2[:2].ffill(1)
df2.columns = "Personal Asalariado - " + df2.iloc[1] + " - " + df2.iloc[2]
df2 = df2.drop(df2.columns[[1]], axis = 1)
df2 = df2.drop(index=1)
df2 = df2.drop(index=0)
df2 = df2.drop(index=2)
df2 = df2.dropna(subset = [df2.columns[3]])
#df2 = df2.iloc[2: , 3:-2]
#df2 = df2[~df2.iloc[:, 0].astype(str).str.isdigit()]
df2 = df2[df2.columns.dropna()]
df2.index = pd.date_range(start='1/1/2008', periods=len(df2), freq = "QS")
df2.index.name = "Date"
df3 = df1.merge(df2, right_index=True, left_index=True)
alphacast.datasets.dataset(7432).upload_data_from_df(df3,
deleteMissingFromDB = True, onConflictUpdateDB = True, uploadIndex=True)
| [((309, 327), 'alphacast.Alphacast', 'Alphacast', (['API_KEY'], {}), '(API_KEY)\n', (318, 327), False, 'from alphacast import Alphacast\n'), ((443, 462), 'pandas.read_excel', 'pd.read_excel', (['url1'], {}), '(url1)\n', (456, 462), True, 'import pandas as pd\n'), ((1129, 1148), 'pandas.read_excel', 'pd.read_excel', (['url2'], {}), '(url2)\n', (1142, 1148), True, 'import pandas as pd\n'), ((260, 281), 'dotenv.dotenv_values', 'dotenv_values', (['""".env"""'], {}), "('.env')\n", (273, 281), False, 'from dotenv import dotenv_values\n')] |
aravi11/approxGed | simpleGmatch4py.py | 6c0a2ed4fd1bcc86c22169e3c96fcf4de717bf8c |
# import the GED using the munkres algorithm
import gmatch4py as gm
import networkx as nx
import collections
import csv
import pickle
from collections import OrderedDict
import json
import concurrent.futures as cf
import time
iter = 0
def getFinishedStatus():
iter +=1
print('*******\t' + str(iter)+ "\t*******")
def getGraphDiff(files):
dotFile_data_path = './DotFiles/'
file1 = files.split(',')[0]
file2 = files.split(',')[1]
g1_name = file1.split('.')[0] # gets the name of first dotFile without its extension
g2_name = file2.split('.')[0] # gets the name of second dotFile without its extension
#print("\n Started pair: "+ str(g1_name) + ', ' + str(g2_name))
graph_1 = nx.drawing.nx_pydot.read_dot(str(dotFile_data_path) + str(file1))
graph_2 = nx.drawing.nx_pydot.read_dot(str(dotFile_data_path) + str(file2))
jsonData = getJsonData(graph_1, graph_2)
dumpJson(jsonData, g1_name, g2_name)
#print("\n >>>Finished pair: "+ str(g1_name) + ', ' + str(g2_name))
#getFinishedStatus()
#print('Total time : '+str(totalTime)+ '\n')
'''
def runParallelCode(pairList):
with cf.ProcessPoolExecutor(max_workers =2) as executor:
try:
for future in cf.as_completed((executor.map(getGraphDiff, pairList, timeout=5000000)), timeout=5000000):
print(str(type(future.result())))
if str(type(future.result())) == "<class 'NoneType'>":
pass
else:
print(future.result(timeout=5000000))
except cf._base.TimeoutError:
print("Time limit exceeded")
pass
'''
def runParallelCode(pairList):
with cf.ProcessPoolExecutor(max_workers =2) as executor:
try:
result = executor.map(getGraphDiff, pairList, timeout=5000000)
for r in result:
if str(type(r)) == "<class 'NoneType'>":
pass
else:
print(r)
except cf._base.TimeoutError:
print("Time limit exceeded")
pass
def getJsonData(graph_1,graph_2):
g1_edgeList = []
g2_edgeList = []
# convert the node labels which are strings to sorted integers without affecting the node attributes.
sortedIntGraph_1 = nx.relabel.convert_node_labels_to_integers(graph_1, first_label=0, ordering='sorted', label_attribute=None)
sortedIntGraph_2 = nx.relabel.convert_node_labels_to_integers(graph_2, first_label=0, ordering='sorted', label_attribute=None)
g1_edgeTuple = list(sortedIntGraph_1.edges(data=False))
g2_edgeTuple = list(sortedIntGraph_2.edges(data=False))
# get graph edge lists
for i in g1_edgeTuple:
g1_edgeList.append(list(i))
for i in g2_edgeTuple:
g2_edgeList.append(list(i))
# get graph attributes in the ascending order as the node labels
nodeLabelList_g1 = []
nodeLabelList_g2 = []
nodeList_g1 = list(sortedIntGraph_1.nodes(data=True))
nodeList_g2 = list(sortedIntGraph_2.nodes(data=True))
for i in range(len(nodeList_g1)):
if nodeList_g1[i][0] == i:
nodeLabelList_g1.insert(i, nodeList_g1[i][1].get('label').replace('"', ''))
for i in range(len(nodeList_g2)):
if nodeList_g2[i][0] == i:
nodeLabelList_g2.insert(i, nodeList_g2[i][1].get('label').replace('"', ''))
# get graph edit distance
#ged = nx.graph_edit_distance(sortedIntGraph_1, sortedIntGraph_2, node_match=return_eq) Commented since its too time expensive
#Gmatch4py code for calculating ged
#abs_ged = gm.BP_2(1,1,1,1)
ged=gm.GraphEditDistance(1,1,1,1) # all edit costs are equal to 1
#hed = gm.HED(1,1,1,1)
result = ged.compare([sortedIntGraph_1, sortedIntGraph_2], None)
# generate the json files
jsonDict = {}
jsonDict["graph_1"] = g1_edgeList
jsonDict["graph_2"] = g2_edgeList
jsonDict["labels_1"] = nodeLabelList_g1
jsonDict["labels_2"] = nodeLabelList_g2
jsonDict["ged"] = int(result[0][1])
#print(jsonDict)
return jsonDict
def return_eq(node1, node2): #function to compare the node labels
return node1['label']==node2['label']
def dumpJson(jsonFile, g1, g2): #function to dump the Json files
outPath = './outFiles/'
with open(str(outPath)+ str(g1) + '::::'+ str(g2) + '.json', 'w') as fp:
json.dump(jsonFile, fp)
def main(): #main function from where the program starts
dotFileList= []
#dotFile_data_path = './DotFiles/test'
with open('./filenames.txt', 'r') as csvFile:
reader = csv.reader(csvFile)
for row in reader:
dotName = str(row).replace('[', '').replace(']','').replace("'","").strip()
dotFileList.append(dotName)
print("Total number of graph files: " + str(len(dotFileList)))
counter = 0
len_dotFileList = len(dotFileList)
totalGraphJsons = len_dotFileList * len_dotFileList #total number of graph similarity json samples
print("Total Graph Similarity json samples: " + str(int(totalGraphJsons)))
pairList = []
#Code for generating graph Similarity json. Takes a non-symmetric pair of graphs from a list and returns their json data
for dotFile_i in dotFileList:
for dotFile_j in dotFileList:
pairList.append(str(dotFile_i + ','+ str(dotFile_j)))
print("<<<<<<<<<<<<<<<<<<<<<< " + str(len(pairList)))
runParallelCode(pairList)
if __name__ == '__main__':
start_time = time.time()
main()
print("--- %s seconds ---" % (time.time() - start_time))
| [((2308, 2420), 'networkx.relabel.convert_node_labels_to_integers', 'nx.relabel.convert_node_labels_to_integers', (['graph_1'], {'first_label': '(0)', 'ordering': '"""sorted"""', 'label_attribute': 'None'}), "(graph_1, first_label=0, ordering\n ='sorted', label_attribute=None)\n", (2350, 2420), True, 'import networkx as nx\n'), ((2439, 2551), 'networkx.relabel.convert_node_labels_to_integers', 'nx.relabel.convert_node_labels_to_integers', (['graph_2'], {'first_label': '(0)', 'ordering': '"""sorted"""', 'label_attribute': 'None'}), "(graph_2, first_label=0, ordering\n ='sorted', label_attribute=None)\n", (2481, 2551), True, 'import networkx as nx\n'), ((3629, 3661), 'gmatch4py.GraphEditDistance', 'gm.GraphEditDistance', (['(1)', '(1)', '(1)', '(1)'], {}), '(1, 1, 1, 1)\n', (3649, 3661), True, 'import gmatch4py as gm\n'), ((5487, 5498), 'time.time', 'time.time', ([], {}), '()\n', (5496, 5498), False, 'import time\n'), ((1700, 1737), 'concurrent.futures.ProcessPoolExecutor', 'cf.ProcessPoolExecutor', ([], {'max_workers': '(2)'}), '(max_workers=2)\n', (1722, 1737), True, 'import concurrent.futures as cf\n'), ((4375, 4398), 'json.dump', 'json.dump', (['jsonFile', 'fp'], {}), '(jsonFile, fp)\n', (4384, 4398), False, 'import json\n'), ((4589, 4608), 'csv.reader', 'csv.reader', (['csvFile'], {}), '(csvFile)\n', (4599, 4608), False, 'import csv\n'), ((5544, 5555), 'time.time', 'time.time', ([], {}), '()\n', (5553, 5555), False, 'import time\n')] |
Dridi/blockdiag | src/blockdiag/utils/rst/nodes.py | bbb16f8a731cdf79a675a63c1ff847e70fdc4a5b | # -*- coding: utf-8 -*-
# Copyright 2011 Takeshi KOMIYA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from hashlib import sha1
from docutils import nodes
import blockdiag.parser
import blockdiag.builder
import blockdiag.drawer
class blockdiag(nodes.General, nodes.Element):
name = 'blockdiag'
processor = blockdiag
def to_diagram(self):
try:
tree = self.processor.parser.parse_string(self['code'])
except:
code = '%s { %s }' % (self.name, self['code'])
tree = self.processor.parser.parse_string(code)
self['code'] = code # replace if succeeded
return self.processor.builder.ScreenNodeBuilder.build(tree)
def to_drawer(self, image_format, filename, fontmap, **kwargs):
diagram = self.to_diagram()
return self.processor.drawer.DiagramDraw(image_format, diagram,
filename, fontmap=fontmap,
**kwargs)
def get_path(self, **options):
options.update(self['options'])
hashseed = (self['code'] + str(options)).encode('utf-8')
hashed = sha1(hashseed).hexdigest()
filename = "%s-%s.%s" % (self.name, hashed, options['format'].lower())
outputdir = options.get('outputdir')
if outputdir:
filename = os.path.join(outputdir, filename)
return filename
| [((1881, 1914), 'os.path.join', 'os.path.join', (['outputdir', 'filename'], {}), '(outputdir, filename)\n', (1893, 1914), False, 'import os\n'), ((1684, 1698), 'hashlib.sha1', 'sha1', (['hashseed'], {}), '(hashseed)\n', (1688, 1698), False, 'from hashlib import sha1\n')] |
emiliachojak/bio-projects | python-advanced/chp1/main.py | d2e5290b48613ef6721e303b3490a98cf4cbf6c0 | # -*- coding: utf-8 -*-
"""
Created on Thu Dec 19 20:00:00 2019
@author: Emilia Chojak
@e-mail: [email protected]
"""
tax_dict = {
'Pan troglodytes' : 'Hominoidea', 'Pongo abelii' : 'Hominoidea',
'Hominoidea' : 'Simiiformes', 'Simiiformes' : 'Haplorrhini',
'Tarsius tarsier' : 'Tarsiiformes', 'Haplorrhini' : 'Primates',
'Tarsiiformes' : 'Haplorrhini', 'Loris tardigradus' :
'Lorisidae',
'Lorisidae' : 'Strepsirrhini', 'Strepsirrhini' : 'Primates',
'Allocebus trichotis' : 'Lemuriformes', 'Lemuriformes' :
'Strepsirrhini',
'Galago alleni' : 'Lorisiformes', 'Lorisiformes' :
'Strepsirrhini',
'Galago moholi' : 'Lorisiformes'
}
def find_ancestors(taxon):
if taxon == 'Primates':
return [taxon]
parent = tax_dict[taxon]
parent_ancestors = find_ancestors(parent)
return [taxon] + parent_ancestors
def find_ancestors_for_many(taxon_list):
many_parents = []
for taxon in taxon_list:
many_parents.append(find_ancestors(taxon))
return many_parents
def last_common_ancestor(many_parents):
for parent in many_parents[0]:
is_ok = True
for parent_list in many_parents:
if parent not in parent_list:
is_ok = False
if is_ok == True:
return parent
print(last_common_ancestor(find_ancestors_for_many(["Galago alleni", "Galago moholi"]))) | [] |
LeishenKOBE/good-good-study | Python/csv/1.py | ac6b859f53b8b95f0746f35c5278009a5cad40a8 | import csv
# with open('./1.csv', newline='', encoding='utf-8') as f:
# reader = csv.reader(f)
# for row in reader:
# print(row)
with open('./1.csv', 'a', encoding='utf-8') as f:
writer = csv.writer(f)
writer.writerow(['4', '猫砂', '25', '1022', '886'])
writer.writerow(['5', '猫罐头', '18', '2234', '3121'])
| [((210, 223), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (220, 223), False, 'import csv\n')] |
broper2/solana-py | src/solana/rpc/responses.py | 146390d959f017e137238335ee6fa362ad1a1ab4 | """This module contains code for parsing RPC responses."""
from dataclasses import dataclass, field
from typing import Union, Tuple, Any, Dict, List, Optional, Literal
from apischema import alias
from apischema.conversions import as_str
from solana.publickey import PublicKey
from solana.transaction import TransactionSignature
as_str(PublicKey)
TransactionErrorResult = Optional[dict]
@dataclass
class TransactionErr:
"""Container for possible transaction errors."""
err: TransactionErrorResult
@dataclass
class Context:
"""RPC result context."""
slot: int
@dataclass
class WithContext:
"""Base class for RPC result including context."""
context: Context
@dataclass
class AccountInfo:
"""Account information."""
lamports: int
owner: PublicKey
data: Union[Literal[""], Tuple[str, str], Dict[str, Any]]
executable: bool
rent_epoch: int = field(metadata=alias("rentEpoch"))
@dataclass
class AccountInfoAndContext(WithContext):
"""Account info and RPC result context."""
value: AccountInfo
@dataclass
class SubscriptionNotificationBase:
"""Base class for RPC subscription notifications."""
subscription: int
result: Any
@dataclass
class AccountNotification(SubscriptionNotificationBase):
"""Account subscription notification."""
result: AccountInfoAndContext
@dataclass
class LogItem(TransactionErr):
"""Container for logs from logSubscribe."""
signature: TransactionSignature
logs: Optional[List[str]]
@dataclass
class LogItemAndContext(WithContext):
"""Log item with RPC result context."""
value: LogItem
@dataclass
class LogsNotification(SubscriptionNotificationBase):
"""Logs subscription notification."""
result: LogItemAndContext
@dataclass
class ProgramAccount:
"""Program account pubkey and account info."""
pubkey: PublicKey
account: AccountInfo
@dataclass
class ProgramAccountAndContext(WithContext):
"""Program subscription data with RPC result context."""
value: ProgramAccount
@dataclass
class ProgramNotification(SubscriptionNotificationBase):
"""Program subscription notification."""
result: ProgramAccountAndContext
@dataclass
class SignatureErrAndContext(WithContext):
"""Signature subscription error info with RPC result context."""
value: TransactionErr
@dataclass
class SignatureNotification(SubscriptionNotificationBase):
"""Signature subscription notification."""
result: SignatureErrAndContext
@dataclass
class SlotBase:
"""Base class for slot container."""
slot: int
@dataclass
class SlotInfo(SlotBase):
"""Slot info."""
parent: int
root: int
@dataclass
class SlotNotification(SubscriptionNotificationBase):
"""Slot subscription notification."""
result: SlotInfo
@dataclass
class RootNotification(SubscriptionNotificationBase):
"""Root subscription notification."""
result: int
@dataclass
class SlotAndTimestampBase(SlotBase):
"""Base class for a slot with timestamp."""
timestamp: int
@dataclass
class FirstShredReceived(SlotAndTimestampBase):
"""First shread received update."""
type: Literal["firstShredReceived"]
@dataclass
class Completed(SlotAndTimestampBase):
"""Slot completed update."""
type: Literal["completed"]
@dataclass
class CreatedBank(SlotAndTimestampBase):
"""Created bank update."""
parent: int
type: Literal["createdBank"]
@dataclass
class SlotTransactionStats:
"""Slot transaction stats."""
num_transaction_entries: int = field(metadata=alias("numTransactionEntries"))
num_successful_transactions: int = field(metadata=alias("numSuccessfulTransactions"))
num_failed_transactions: int = field(metadata=alias("numFailedTransactions"))
max_transactions_per_entry: int = field(metadata=alias("maxTransactionsPerEntry"))
@dataclass
class Frozen(SlotAndTimestampBase):
"""Slot frozen update."""
stats: SlotTransactionStats
type: Literal["frozen"]
@dataclass
class Dead(SlotAndTimestampBase):
"""Dead slot update."""
err: str
type: Literal["dead"]
@dataclass
class OptimisticConfirmation(SlotAndTimestampBase):
"""Optimistic confirmation update."""
type: Literal["optimisticConfirmation"]
@dataclass
class Root(SlotAndTimestampBase):
"""Root update."""
type: Literal["root"]
SlotsUpdatesItem = Union[FirstShredReceived, Completed, CreatedBank, Frozen, Dead, OptimisticConfirmation, Root]
@dataclass
class SlotsUpdatesNotification(SubscriptionNotificationBase):
"""Slots updates notification."""
result: SlotsUpdatesItem
@dataclass
class VoteItem:
"""Vote data."""
hash: str
slots: List[int]
timestamp: Optional[int]
@dataclass
class VoteNotification(SubscriptionNotificationBase):
"""Vote update notification."""
result: VoteItem
SubscriptionNotification = Union[
AccountNotification,
LogsNotification,
ProgramNotification,
SignatureNotification,
SlotNotification,
RootNotification,
SlotsUpdatesNotification,
VoteNotification,
]
| [((332, 349), 'apischema.conversions.as_str', 'as_str', (['PublicKey'], {}), '(PublicKey)\n', (338, 349), False, 'from apischema.conversions import as_str\n'), ((917, 935), 'apischema.alias', 'alias', (['"""rentEpoch"""'], {}), "('rentEpoch')\n", (922, 935), False, 'from apischema import alias\n'), ((3584, 3614), 'apischema.alias', 'alias', (['"""numTransactionEntries"""'], {}), "('numTransactionEntries')\n", (3589, 3614), False, 'from apischema import alias\n'), ((3670, 3704), 'apischema.alias', 'alias', (['"""numSuccessfulTransactions"""'], {}), "('numSuccessfulTransactions')\n", (3675, 3704), False, 'from apischema import alias\n'), ((3756, 3786), 'apischema.alias', 'alias', (['"""numFailedTransactions"""'], {}), "('numFailedTransactions')\n", (3761, 3786), False, 'from apischema import alias\n'), ((3841, 3873), 'apischema.alias', 'alias', (['"""maxTransactionsPerEntry"""'], {}), "('maxTransactionsPerEntry')\n", (3846, 3873), False, 'from apischema import alias\n')] |
adriennekarnoski/data-structures | python/data_structures/binheap.py | 86ccf988ac02884749226236ad4ac37762873efa | """Build a binary min heap object."""
from math import floor
class BinaryHeap(object):
"""Create a Binary Heap object as a Min Heap."""
def __init__(self):
"""Initialize the heap list to be used by Binary Heap."""
self._heap_list = []
def push(self, val):
"""Add new value to heap list and run check heap method."""
self._heap_list.append(val)
if len(self._heap_list) == 2:
self._small_heap()
self._check_heap()
def _small_heap(self):
heap = self._heap_list
if heap[0] > heap[1]:
heap[0], heap[1] = heap[1], heap[0]
return heap
def _check_heap(self):
"""Check all the children are less than their parents."""
heap = self._heap_list
index = floor((len(heap) - 1) / 2)
i = 0
while i < index:
l = (2 * i) + 1
if heap[i] > heap[l]:
heap[i], heap[l] = heap[l], heap[i]
try:
r = (2 * i) + 2
if heap[i] > heap[r]:
heap[i], heap[r] = heap[r], heap[i]
except IndexError: # pragma: no cover
pass
i += 1
return heap
def pop(self):
"""Remove top value of heap and run check heap method."""
try:
heap = self._heap_list
index = len(heap) - 1
heap[0], heap[index] = heap[index], heap[0]
self._heap_list.pop()
if len(self._heap_list) == 2:
self._small_heap()
self._check_heap()
return heap
except IndexError:
raise IndexError('Nothing available to pop')
def _display(self): # pragma: no cover
"""Make it easier during testing."""
for item in self._heap_list:
print(item)
| [] |
RichardLitt/Vesper | vesper/archive_settings.py | 5360844f42a06942e7684121c650b08cf8616285 | """
Vesper archive settings.
The Vesper server serves the Vesper archive that is in the directory
in which the server starts. The archive settings are the composition
of a set of default settings (hard-coded in this module) and settings
(optionally) specified in the file "Archive Settings.yaml" in the
archive directory.
"""
from pathlib import Path
import os
import sys
from vesper.util.settings import Settings
from vesper.util.settings_type import SettingsType
import vesper.archive_paths as archive_paths
_DEFAULT_SETTINGS = Settings.create_from_yaml('''
database:
engine: SQLite
''')
_SETTINGS_TYPE = SettingsType('Archive Settings', _DEFAULT_SETTINGS)
_SETTINGS_FILE_NAME = 'Archive Settings.yaml'
def _create_settings():
archive_dir_path = Path(os.getcwd())
settings = _load_settings_file(archive_dir_path)
archive_paths.initialize(archive_dir_path, settings)
return settings
def _load_settings_file(archive_dir_path):
file_path = archive_dir_path / _SETTINGS_FILE_NAME
if not file_path.exists():
# settings file doex not exist
return _SETTINGS_TYPE.defaults
else:
# settings file exists
try:
return _SETTINGS_TYPE.create_settings_from_yaml_file(file_path)
except Exception as e:
print((
'Load failed for settings file "{}". Error message '
'was: {}').format(file_path, str(e)))
sys.exit(1)
archive_settings = _create_settings()
| [((536, 599), 'vesper.util.settings.Settings.create_from_yaml', 'Settings.create_from_yaml', (['"""\ndatabase:\n engine: SQLite\n"""'], {}), '("""\ndatabase:\n engine: SQLite\n""")\n', (561, 599), False, 'from vesper.util.settings import Settings\n'), ((619, 670), 'vesper.util.settings_type.SettingsType', 'SettingsType', (['"""Archive Settings"""', '_DEFAULT_SETTINGS'], {}), "('Archive Settings', _DEFAULT_SETTINGS)\n", (631, 670), False, 'from vesper.util.settings_type import SettingsType\n'), ((843, 895), 'vesper.archive_paths.initialize', 'archive_paths.initialize', (['archive_dir_path', 'settings'], {}), '(archive_dir_path, settings)\n', (867, 895), True, 'import vesper.archive_paths as archive_paths\n'), ((773, 784), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (782, 784), False, 'import os\n'), ((1487, 1498), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1495, 1498), False, 'import sys\n')] |
DAIM-ML/autotf | autotf/model/vgg16.py | 3f82d858f49c27d5ecb624cee555fb8fd47bf067 | #-*- coding=utf-8 -*-
from __future__ import division, print_function, absolute_import
from base_model import BaseModel
from helper import *
import tensorflow as tf
import pickle
import numpy as np
import time
class Vgg16(BaseModel):
default_param = {
"loss" : "square_loss",
"metrics" : ["loss"],
"optimizer" : "sgd",
"learning_rate" : 1e-2,
"batch_size" : 100,
"num_epochs" : 25,
"keep_prob":0.75
}
def __init__(self,classnum):
self.class_num = classnum
self.model = None
self.sess = tf.Session()
self.scope = {}
self.summary = []
def conv2d(self,layer_name,inputs, out_channels, kernel_size, strides=1, padding='SAME'):
in_channels = inputs.get_shape()[-1]
with tf.variable_scope(layer_name) as scope:
self.scope[layer_name] = scope
w = tf.get_variable(name='weights',
trainable=True,
shape=[kernel_size, kernel_size, in_channels, out_channels],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.get_variable(name='biases',
trainable=True,
shape=[out_channels],
initializer=tf.constant_initializer(0.0))
inputs = tf.nn.conv2d(inputs, w, [1, strides, strides, 1], padding=padding, name='conv')
inputs = tf.nn.bias_add(inputs, b, name='bias_add')
inputs = tf.nn.relu(inputs, name='relu')
return inputs
def max_pool(self, layer_name, inputs, pool_size, strides, padding='SAME'):
with tf.name_scope(layer_name):
return tf.nn.max_pool(inputs, [1, pool_size, pool_size, 1], [1, strides, strides, 1], padding=padding,
name=layer_name)
def avg_pool(self, layer_name, inputs, pool_size, strides, padding='SAME'):
with tf.name_scope(layer_name):
return tf.nn.avg_pool(inputs, [1, pool_size, pool_size, 1], [1, strides, strides, 1], padding=padding,
name=layer_name)
def lrn(self, layer_name, inputs, depth_radius=5, alpha=0.0001, beta=0.75):
with tf.name_scope(layer_name):
return tf.nn.local_response_normalization(name='pool1_norm1', input=inputs, depth_radius=depth_radius,
alpha=alpha, beta=beta)
def concat(self, layer_name, inputs):
with tf.name_scope(layer_name):
one_by_one = inputs[0]
three_by_three = inputs[1]
five_by_five = inputs[2]
pooling = inputs[3]
return tf.concat([one_by_one, three_by_three, five_by_five, pooling], axis=3)
def dropout(self, layer_name, inputs, keep_prob):
# dropout_rate = 1 - keep_prob
with tf.name_scope(layer_name):
return tf.nn.dropout(name=layer_name, x=inputs, keep_prob=keep_prob)
def bn(self, layer_name, inputs, epsilon=1e-3):
with tf.name_scope(layer_name):
batch_mean, batch_var = tf.nn.moments(inputs, [0])
inputs = tf.nn.batch_normalization(inputs, mean=batch_mean, variance=batch_var, offset=None,
scale=None, variance_epsilon=epsilon)
return inputs
def fc(self, layer_name, inputs, out_nodes):
shape = inputs.get_shape()
if len(shape) == 4: # x is 4D tensor
size = shape[1].value * shape[2].value * shape[3].value
else: # x has already flattened
size = shape[-1].value
with tf.variable_scope(layer_name) as scope:
self.scope[layer_name] = scope
w = tf.get_variable('weights',
shape=[size, out_nodes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.get_variable('biases',
shape=[out_nodes],
initializer=tf.constant_initializer(0.0))
flat_x = tf.reshape(inputs, [-1, size])
inputs = tf.nn.bias_add(tf.matmul(flat_x, w), b)
inputs = tf.nn.relu(inputs)
return inputs
def build_model(self):
# 训练数据
self.inputs = tf.placeholder(tf.float32, shape=[None, 224, 224, 3])
# 训练标签数据
self.labels = tf.placeholder(tf.float32, shape=[None, self.class_num])
# dropout
self.keep_prob = tf.placeholder(tf.float32)
self.conv1_1 = self.conv2d("conv1_1",self.inputs,64,3)
self.conv1_2 = self.conv2d("conv1_2",self.conv1_1, 64,3)
self.pool1 = self.max_pool('pool1',self.conv1_2,pool_size=2,strides=2)
#112*112*64
self.conv2_1 = self.conv2d("conv2_1",self.pool1, 128,3)
self.conv2_2 = self.conv2d( "conv2_2",self.conv2_1, 128,3)
self.pool2 = self.max_pool("pool2",self.conv2_2,pool_size=2,strides=2)
#56*56*128
self.conv3_1 = self.conv2d("conv3_1",self.pool2, 256,3)
self.conv3_2 = self.conv2d("conv3_2",self.conv3_1, 256,3)
self.conv3_3 = self.conv2d("conv3_3",self.conv3_2, 256, 3)
self.pool3 = self.max_pool("pool3",self.conv3_3,pool_size=2,strides=2)
#28*28*256
self.conv4_1 = self.conv2d("conv4_1",self.pool3, 512, 3)
self.conv4_2 = self.conv2d("conv4_2",self.conv4_1, 512, 3)
self.conv4_3 = self.conv2d("conv4_3",self.conv4_2, 512, 3)
self.pool4 = self.max_pool("pool4",self.conv4_3, pool_size=2,strides=2)
#14*14*512
self.conv5_1 = self.conv2d("conv5_1",self.pool4, 512, 3)
self.conv5_2 = self.conv2d("conv5_2",self.conv5_1, 512, 3)
self.conv5_3 = self.conv2d("conv5_3",self.conv5_2, 512, 3)
self.pool5 = self.max_pool( 'pool5',self.conv5_3,pool_size=2,strides=2)
#7*7*512
self.fc6 = self.fc("fc6",self.pool5,4096) # 25088 = 7*7*512
self.relu6 = tf.nn.dropout(self.fc6, self.keep_prob)
self.fc7 = self.fc("fc7",self.relu6,4096)
self.relu7 = tf.nn.dropout(self.fc7, self.keep_prob)
self.pred = self.fc("fc8",self.relu7, self.class_num)
def set_parameter(self, param):
for name in self.default_param:
if name not in param:
param[name] = self.default_param[name]
self.build_model()
# 定义交叉熵损失函数
self.keep_prob_value = param["keep_prob"]
loss_fun = param["loss"]
self.loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=self.pred, labels=self.labels))
optimizer = param["optimizer"]
self.learning_rate = param["learning_rate"]
self.optimizer = tf.train.RMSPropOptimizer(self.learning_rate).minimize(self.loss)
self.correct_prediction = tf.equal(tf.argmax(self.pred, 1), tf.argmax(self.labels, 1))
self.accuracy = tf.reduce_mean(tf.cast(self.correct_prediction, tf.float32))
self.batch_size = param["batch_size"]
self.num_epochs = param["num_epochs"]
def get_batch(self, feed_data):
X = feed_data["inputs"]
Y = feed_data["labels"]
totalbatch = int(len(X)/self.batch_size)+1
if (totalbatch * self.batch_size == len(X)):
totalbatch = totalbatch - 1
for i in range(0,totalbatch):
startindex = i*self.batch_size
endindex = (i+1)*self.batch_size
batch_xs = X[startindex:endindex]
batch_ys = Y[startindex:endindex]
yield { "batch_xs" : batch_xs, "batch_ys" : batch_ys }
def train(self, feed_data):
self.sess.run(tf.global_variables_initializer())
trainstep = 0
for epoch in range(self.num_epochs):
avg_cost = 0.0
totalaccuracy = 0.0
for batch in self.get_batch(feed_data):
feed_dict = {
self.inputs : batch["batch_xs"],
self.labels : batch["batch_ys"],
self.keep_prob: self.keep_prob_value,
}
_, loss, acc = self.sess.run([self.optimizer, self.loss,self.accuracy], feed_dict=feed_dict)
totalaccuracy += acc*len(batch["batch_xs"])
avg_cost += loss
trainstep = trainstep + 1
totalaccuracy /= len(feed_data['inputs'])
print("train_step"+"\t"+str(trainstep)+"\t"+"epoch:"+"\t"+str(epoch+1)+"\t"+"accuracy:"+"\t"+str(totalaccuracy)+"\t"+"loss:"+"\t"+str(avg_cost))
def model_load(self,path):
saver = tf.train.Saver()
saver.restore(self.sess, path)
return
def model_save(self,path):
saver = tf.train.Saver()
saver.save(self.sess, path)
return
def evaluate(self, feed_data):
avg_loss = 0.0
totalaccuracy = 0.0
totallen = len(feed_data["inputs"])
for batch in self.get_batch(feed_data):
feed_dict = {
self.inputs: batch["batch_xs"],
self.labels: batch["batch_ys"],
self.keep_prob:self.keep_prob_value
}
loss, acc = self.sess.run([self.loss, self.accuracy], feed_dict=feed_dict)
totalaccuracy += acc * len(batch["batch_xs"])
avg_loss += loss
avg_loss /= totallen
totalaccuracy /= len(feed_data['inputs'])
res = {"accuracy":totalaccuracy,"loss":avg_loss}
return res
def predict(self, feed_data):
res = []
for batch in self.get_batch(feed_data):
feed_dict = {
self.inputs: batch["batch_xs"]
}
pred = self.sess.run(self.pred, feed_dict=feed_dict)
res.extend(pred.tolist())
return res
| [((581, 593), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (591, 593), True, 'import tensorflow as tf\n'), ((4377, 4430), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, 224, 224, 3]'}), '(tf.float32, shape=[None, 224, 224, 3])\n', (4391, 4430), True, 'import tensorflow as tf\n'), ((4471, 4527), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, self.class_num]'}), '(tf.float32, shape=[None, self.class_num])\n', (4485, 4527), True, 'import tensorflow as tf\n'), ((4572, 4598), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (4586, 4598), True, 'import tensorflow as tf\n'), ((6041, 6080), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['self.fc6', 'self.keep_prob'], {}), '(self.fc6, self.keep_prob)\n', (6054, 6080), True, 'import tensorflow as tf\n'), ((6154, 6193), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['self.fc7', 'self.keep_prob'], {}), '(self.fc7, self.keep_prob)\n', (6167, 6193), True, 'import tensorflow as tf\n'), ((8645, 8661), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (8659, 8661), True, 'import tensorflow as tf\n'), ((8764, 8780), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (8778, 8780), True, 'import tensorflow as tf\n'), ((796, 825), 'tensorflow.variable_scope', 'tf.variable_scope', (['layer_name'], {}), '(layer_name)\n', (813, 825), True, 'import tensorflow as tf\n'), ((1396, 1475), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['inputs', 'w', '[1, strides, strides, 1]'], {'padding': 'padding', 'name': '"""conv"""'}), "(inputs, w, [1, strides, strides, 1], padding=padding, name='conv')\n", (1408, 1475), True, 'import tensorflow as tf\n'), ((1497, 1539), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['inputs', 'b'], {'name': '"""bias_add"""'}), "(inputs, b, name='bias_add')\n", (1511, 1539), True, 'import tensorflow as tf\n'), ((1561, 1592), 'tensorflow.nn.relu', 'tf.nn.relu', (['inputs'], {'name': '"""relu"""'}), "(inputs, name='relu')\n", (1571, 1592), True, 'import tensorflow as tf\n'), ((1713, 1738), 'tensorflow.name_scope', 'tf.name_scope', (['layer_name'], {}), '(layer_name)\n', (1726, 1738), True, 'import tensorflow as tf\n'), ((1759, 1876), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['inputs', '[1, pool_size, pool_size, 1]', '[1, strides, strides, 1]'], {'padding': 'padding', 'name': 'layer_name'}), '(inputs, [1, pool_size, pool_size, 1], [1, strides, strides, \n 1], padding=padding, name=layer_name)\n', (1773, 1876), True, 'import tensorflow as tf\n'), ((2000, 2025), 'tensorflow.name_scope', 'tf.name_scope', (['layer_name'], {}), '(layer_name)\n', (2013, 2025), True, 'import tensorflow as tf\n'), ((2046, 2163), 'tensorflow.nn.avg_pool', 'tf.nn.avg_pool', (['inputs', '[1, pool_size, pool_size, 1]', '[1, strides, strides, 1]'], {'padding': 'padding', 'name': 'layer_name'}), '(inputs, [1, pool_size, pool_size, 1], [1, strides, strides, \n 1], padding=padding, name=layer_name)\n', (2060, 2163), True, 'import tensorflow as tf\n'), ((2287, 2312), 'tensorflow.name_scope', 'tf.name_scope', (['layer_name'], {}), '(layer_name)\n', (2300, 2312), True, 'import tensorflow as tf\n'), ((2333, 2456), 'tensorflow.nn.local_response_normalization', 'tf.nn.local_response_normalization', ([], {'name': '"""pool1_norm1"""', 'input': 'inputs', 'depth_radius': 'depth_radius', 'alpha': 'alpha', 'beta': 'beta'}), "(name='pool1_norm1', input=inputs,\n depth_radius=depth_radius, alpha=alpha, beta=beta)\n", (2367, 2456), True, 'import tensorflow as tf\n'), ((2563, 2588), 'tensorflow.name_scope', 'tf.name_scope', (['layer_name'], {}), '(layer_name)\n', (2576, 2588), True, 'import tensorflow as tf\n'), ((2752, 2822), 'tensorflow.concat', 'tf.concat', (['[one_by_one, three_by_three, five_by_five, pooling]'], {'axis': '(3)'}), '([one_by_one, three_by_three, five_by_five, pooling], axis=3)\n', (2761, 2822), True, 'import tensorflow as tf\n'), ((2930, 2955), 'tensorflow.name_scope', 'tf.name_scope', (['layer_name'], {}), '(layer_name)\n', (2943, 2955), True, 'import tensorflow as tf\n'), ((2976, 3037), 'tensorflow.nn.dropout', 'tf.nn.dropout', ([], {'name': 'layer_name', 'x': 'inputs', 'keep_prob': 'keep_prob'}), '(name=layer_name, x=inputs, keep_prob=keep_prob)\n', (2989, 3037), True, 'import tensorflow as tf\n'), ((3104, 3129), 'tensorflow.name_scope', 'tf.name_scope', (['layer_name'], {}), '(layer_name)\n', (3117, 3129), True, 'import tensorflow as tf\n'), ((3167, 3193), 'tensorflow.nn.moments', 'tf.nn.moments', (['inputs', '[0]'], {}), '(inputs, [0])\n', (3180, 3193), True, 'import tensorflow as tf\n'), ((3215, 3340), 'tensorflow.nn.batch_normalization', 'tf.nn.batch_normalization', (['inputs'], {'mean': 'batch_mean', 'variance': 'batch_var', 'offset': 'None', 'scale': 'None', 'variance_epsilon': 'epsilon'}), '(inputs, mean=batch_mean, variance=batch_var,\n offset=None, scale=None, variance_epsilon=epsilon)\n', (3240, 3340), True, 'import tensorflow as tf\n'), ((3698, 3727), 'tensorflow.variable_scope', 'tf.variable_scope', (['layer_name'], {}), '(layer_name)\n', (3715, 3727), True, 'import tensorflow as tf\n'), ((4153, 4183), 'tensorflow.reshape', 'tf.reshape', (['inputs', '[-1, size]'], {}), '(inputs, [-1, size])\n', (4163, 4183), True, 'import tensorflow as tf\n'), ((4266, 4284), 'tensorflow.nn.relu', 'tf.nn.relu', (['inputs'], {}), '(inputs)\n', (4276, 4284), True, 'import tensorflow as tf\n'), ((6591, 6668), 'tensorflow.nn.softmax_cross_entropy_with_logits', 'tf.nn.softmax_cross_entropy_with_logits', ([], {'logits': 'self.pred', 'labels': 'self.labels'}), '(logits=self.pred, labels=self.labels)\n', (6630, 6668), True, 'import tensorflow as tf\n'), ((6899, 6922), 'tensorflow.argmax', 'tf.argmax', (['self.pred', '(1)'], {}), '(self.pred, 1)\n', (6908, 6922), True, 'import tensorflow as tf\n'), ((6924, 6949), 'tensorflow.argmax', 'tf.argmax', (['self.labels', '(1)'], {}), '(self.labels, 1)\n', (6933, 6949), True, 'import tensorflow as tf\n'), ((6990, 7034), 'tensorflow.cast', 'tf.cast', (['self.correct_prediction', 'tf.float32'], {}), '(self.correct_prediction, tf.float32)\n', (6997, 7034), True, 'import tensorflow as tf\n'), ((7715, 7748), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (7746, 7748), True, 'import tensorflow as tf\n'), ((4220, 4240), 'tensorflow.matmul', 'tf.matmul', (['flat_x', 'w'], {}), '(flat_x, w)\n', (4229, 4240), True, 'import tensorflow as tf\n'), ((6789, 6834), 'tensorflow.train.RMSPropOptimizer', 'tf.train.RMSPropOptimizer', (['self.learning_rate'], {}), '(self.learning_rate)\n', (6814, 6834), True, 'import tensorflow as tf\n'), ((1112, 1150), 'tensorflow.contrib.layers.xavier_initializer', 'tf.contrib.layers.xavier_initializer', ([], {}), '()\n', (1148, 1150), True, 'import tensorflow as tf\n'), ((1345, 1373), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.0)'], {}), '(0.0)\n', (1368, 1373), True, 'import tensorflow as tf\n'), ((3925, 3963), 'tensorflow.contrib.layers.xavier_initializer', 'tf.contrib.layers.xavier_initializer', ([], {}), '()\n', (3961, 3963), True, 'import tensorflow as tf\n'), ((4102, 4130), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.0)'], {}), '(0.0)\n', (4125, 4130), True, 'import tensorflow as tf\n')] |
RAJESHSAINI2113/LEGENDX | LEGEND/modules/_exec.py | 82c3c61062e804c3bf8b6e4ee31d1e603ab8bfd0 | import subprocess
from LEGEND import tbot as bot
from LEGEND import tbot as borg
from LEGEND.events import register
from LEGEND import OWNER_ID, SUDO_USERS
import asyncio
import traceback
import io
import os
import sys
import time
from telethon.tl import functions
from telethon.tl import types
from telethon.tl.types import *
from telethon.errors import *
@register(pattern="^/bash (.*)")
async def msg(event):
if event.sender_id == OWNER_ID:
pass
else:
return
PROCESS_RUN_TIME = 100
cmd = event.pattern_match.group(1)
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
e = stderr.decode()
if not e:
e = "No Error"
o = stdout.decode()
if not o:
o = "**Tip**: \n`If you want to see the results of your code, I suggest printing them to stdout.`"
else:
_o = o.split("\n")
o = "`\n".join(_o)
await event.reply(f"**QUERY:**\n__Command:__\n`{cmd}` \n__PID:__\n`{process.pid}`\n\n**stderr:** \n`{e}`\n**Output:**\n{o}"
)
@register(pattern="^/eval")
async def _(event):
if event.sender_id == OWNER_ID:
pass
elif event.sender_id in SUDO_USERS:
pass
else:
return
cmd = event.text.split(" ", maxsplit=1)[1]
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
old_stderr = sys.stderr
old_stdout = sys.stdout
redirected_output = sys.stdout = io.StringIO()
redirected_error = sys.stderr = io.StringIO()
stdout, stderr, exc = None, None, None
try:
await aexec(cmd, event)
except Exception:
exc = traceback.format_exc()
stdout = redirected_output.getvalue()
stderr = redirected_error.getvalue()
sys.stdout = old_stdout
sys.stderr = old_stderr
evaluation = ""
if exc:
evaluation = exc
elif stderr:
evaluation = stderr
elif stdout:
evaluation = stdout
else:
evaluation = "Success"
final_output = "**EVAL**: `{}` \n\n **OUTPUT**: \n`{}` \n".format(cmd, evaluation)
MAX_MESSAGE_SIZE_LIMIT = 4095
if len(final_output) > MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(final_output)) as out_file:
out_file.name = "eval.text"
await bot.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=reply_to_id,
)
else:
await event.reply(final_output)
async def aexec(code, smessatatus):
message = event = smessatatus
def p(_x):
return print(slitu.yaml_format(_x))
reply = await event.get_reply_message()
exec(
"async def __aexec(message, reply, client, p): "
+ "\n event = smessatatus = message"
+ "".join(f"\n {l}" for l in code.split("\n"))
)
return await locals()["__aexec"](message, reply, bot, p)
| [((360, 391), 'LEGEND.events.register', 'register', ([], {'pattern': '"""^/bash (.*)"""'}), "(pattern='^/bash (.*)')\n", (368, 391), False, 'from LEGEND.events import register\n'), ((1284, 1310), 'LEGEND.events.register', 'register', ([], {'pattern': '"""^/eval"""'}), "(pattern='^/eval')\n", (1292, 1310), False, 'from LEGEND.events import register\n'), ((1708, 1721), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1719, 1721), False, 'import io\n'), ((1758, 1771), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1769, 1771), False, 'import io\n'), ((667, 678), 'time.time', 'time.time', ([], {}), '()\n', (676, 678), False, 'import time\n'), ((718, 823), 'asyncio.create_subprocess_shell', 'asyncio.create_subprocess_shell', (['cmd'], {'stdout': 'asyncio.subprocess.PIPE', 'stderr': 'asyncio.subprocess.PIPE'}), '(cmd, stdout=asyncio.subprocess.PIPE, stderr\n =asyncio.subprocess.PIPE)\n', (749, 823), False, 'import asyncio\n'), ((1893, 1915), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1913, 1915), False, 'import traceback\n'), ((2539, 2657), 'LEGEND.tbot.send_file', 'bot.send_file', (['event.chat_id', 'out_file'], {'force_document': '(True)', 'allow_cache': '(False)', 'caption': 'cmd', 'reply_to': 'reply_to_id'}), '(event.chat_id, out_file, force_document=True, allow_cache=\n False, caption=cmd, reply_to=reply_to_id)\n', (2552, 2657), True, 'from LEGEND import tbot as bot\n')] |
MaxSac/build | src/tools/pch.py | 482c25f3a26171073c7e6c59f0427f2259a63fec | # Status: Being ported by Steven Watanabe
# Base revision: 47077
#
# Copyright (c) 2005 Reece H. Dunn.
# Copyright 2006 Ilya Sokolov
# Copyright (c) 2008 Steven Watanabe
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
##### Using Precompiled Headers (Quick Guide) #####
#
# Make precompiled mypch.hpp:
#
# import pch ;
#
# cpp-pch mypch
# : # sources
# mypch.hpp
# : # requiremnts
# <toolset>msvc:<source>mypch.cpp
# ;
#
# Add cpp-pch to sources:
#
# exe hello
# : main.cpp hello.cpp mypch
# ;
from b2.build import type, feature, generators
from b2.tools import builtin
type.register('PCH', ['pch'])
type.register('C_PCH', [], 'PCH')
type.register('CPP_PCH', [], 'PCH')
# Control precompiled header (PCH) generation.
feature.feature('pch',
['on', 'off'],
['propagated'])
feature.feature('pch-header', [], ['free', 'dependency'])
feature.feature('pch-file', [], ['free', 'dependency'])
class PchGenerator(generators.Generator):
"""
Base PCH generator. The 'run' method has the logic to prevent this generator
from being run unless it's being used for a top-level PCH target.
"""
def action_class(self):
return builtin.CompileAction
def run(self, project, name, prop_set, sources):
if not name:
# Unless this generator is invoked as the top-most generator for a
# main target, fail. This allows using 'H' type as input type for
# this generator, while preventing Boost.Build to try this generator
# when not explicitly asked for.
#
# One bad example is msvc, where pch generator produces both PCH
# target and OBJ target, so if there's any header generated (like by
# bison, or by msidl), we'd try to use pch generator to get OBJ from
# that H, which is completely wrong. By restricting this generator
# only to pch main target, such problem is solved.
pass
else:
r = self.run_pch(project, name,
prop_set.add_raw(['<define>BOOST_BUILD_PCH_ENABLED']),
sources)
return generators.add_usage_requirements(
r, ['<define>BOOST_BUILD_PCH_ENABLED'])
# This rule must be overridden by the derived classes.
def run_pch(self, project, name, prop_set, sources):
pass
# NOTE: requirements are empty, default pch generator can be applied when
# pch=off.
generators.register(builtin.DummyGenerator(
"pch.default-c-pch-generator", False, [], ['C_PCH'], []))
generators.register(builtin.DummyGenerator(
"pch.default-cpp-pch-generator", False, [], ['CPP_PCH'], []))
| [((750, 779), 'b2.build.type.register', 'type.register', (['"""PCH"""', "['pch']"], {}), "('PCH', ['pch'])\n", (763, 779), False, 'from b2.build import type, feature, generators\n'), ((780, 813), 'b2.build.type.register', 'type.register', (['"""C_PCH"""', '[]', '"""PCH"""'], {}), "('C_PCH', [], 'PCH')\n", (793, 813), False, 'from b2.build import type, feature, generators\n'), ((814, 849), 'b2.build.type.register', 'type.register', (['"""CPP_PCH"""', '[]', '"""PCH"""'], {}), "('CPP_PCH', [], 'PCH')\n", (827, 849), False, 'from b2.build import type, feature, generators\n'), ((898, 951), 'b2.build.feature.feature', 'feature.feature', (['"""pch"""', "['on', 'off']", "['propagated']"], {}), "('pch', ['on', 'off'], ['propagated'])\n", (913, 951), False, 'from b2.build import type, feature, generators\n'), ((985, 1042), 'b2.build.feature.feature', 'feature.feature', (['"""pch-header"""', '[]', "['free', 'dependency']"], {}), "('pch-header', [], ['free', 'dependency'])\n", (1000, 1042), False, 'from b2.build import type, feature, generators\n'), ((1043, 1098), 'b2.build.feature.feature', 'feature.feature', (['"""pch-file"""', '[]', "['free', 'dependency']"], {}), "('pch-file', [], ['free', 'dependency'])\n", (1058, 1098), False, 'from b2.build import type, feature, generators\n'), ((2654, 2733), 'b2.tools.builtin.DummyGenerator', 'builtin.DummyGenerator', (['"""pch.default-c-pch-generator"""', '(False)', '[]', "['C_PCH']", '[]'], {}), "('pch.default-c-pch-generator', False, [], ['C_PCH'], [])\n", (2676, 2733), False, 'from b2.tools import builtin\n'), ((2760, 2848), 'b2.tools.builtin.DummyGenerator', 'builtin.DummyGenerator', (['"""pch.default-cpp-pch-generator"""', '(False)', '[]', "['CPP_PCH']", '[]'], {}), "('pch.default-cpp-pch-generator', False, [], [\n 'CPP_PCH'], [])\n", (2782, 2848), False, 'from b2.tools import builtin\n'), ((2327, 2400), 'b2.build.generators.add_usage_requirements', 'generators.add_usage_requirements', (['r', "['<define>BOOST_BUILD_PCH_ENABLED']"], {}), "(r, ['<define>BOOST_BUILD_PCH_ENABLED'])\n", (2360, 2400), False, 'from b2.build import type, feature, generators\n')] |
GitHK/osparc-simcore-forked | packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py | 5b01a28d1b8028afcf9a735e1d46a73daa13686e | import re
from typing import Dict
from aiohttp import web
from yarl import URL
from simcore_service_webserver.db_models import UserRole, UserStatus
from simcore_service_webserver.login.cfg import cfg, get_storage
from simcore_service_webserver.login.registration import create_invitation
from simcore_service_webserver.login.utils import encrypt_password, get_random_string
from .utils_assert import assert_status
TEST_MARKS = re.compile(r"TEST (\w+):(.*)")
def parse_test_marks(text):
"""Checs for marks as
TEST name:123123
TEST link:some-value
"""
marks = {}
for m in TEST_MARKS.finditer(text):
key, value = m.groups()
marks[key] = value.strip()
return marks
def parse_link(text):
link = parse_test_marks(text)["link"]
return URL(link).path
async def create_user(data=None) -> Dict:
data = data or {}
password = get_random_string(10)
params = {
"name": get_random_string(10),
"email": "{}@gmail.com".format(get_random_string(10)),
"password_hash": encrypt_password(password),
}
params.update(data)
params.setdefault("status", UserStatus.ACTIVE.name)
params.setdefault("role", UserRole.USER.name)
params.setdefault("created_ip", "127.0.0.1")
user = await cfg.STORAGE.create_user(params)
user["raw_password"] = password
return user
async def log_client_in(client, user_data=None, *, enable_check=True) -> Dict:
# creates user directly in db
user = await create_user(user_data)
# login
url = client.app.router["auth_login"].url_for()
r = await client.post(
url,
json={
"email": user["email"],
"password": user["raw_password"],
},
)
if enable_check:
await assert_status(r, web.HTTPOk, cfg.MSG_LOGGED_IN)
return user
class NewUser:
def __init__(self, params=None, app: web.Application = None):
self.params = params
self.user = None
self.db = get_storage(app) if app else cfg.STORAGE # FIXME:
async def __aenter__(self):
self.user = await create_user(self.params)
return self.user
async def __aexit__(self, *args):
await self.db.delete_user(self.user)
class LoggedUser(NewUser):
def __init__(self, client, params=None, *, check_if_succeeds=True):
super().__init__(params, client.app)
self.client = client
self.enable_check = check_if_succeeds
async def __aenter__(self):
self.user = await log_client_in(
self.client, self.params, enable_check=self.enable_check
)
return self.user
class NewInvitation(NewUser):
def __init__(self, client, guest="", host=None):
super().__init__(host, client.app)
self.client = client
self.guest = guest or get_random_string(10)
self.confirmation = None
async def __aenter__(self):
# creates host user
self.user = await create_user(self.params)
self.confirmation = await create_invitation(self.user, self.guest, self.db)
return self.confirmation
async def __aexit__(self, *args):
if await self.db.get_confirmation(self.confirmation):
await self.db.delete_confirmation(self.confirmation)
| [((431, 461), 're.compile', 're.compile', (['"""TEST (\\\\w+):(.*)"""'], {}), "('TEST (\\\\w+):(.*)')\n", (441, 461), False, 'import re\n'), ((885, 906), 'simcore_service_webserver.login.utils.get_random_string', 'get_random_string', (['(10)'], {}), '(10)\n', (902, 906), False, 'from simcore_service_webserver.login.utils import encrypt_password, get_random_string\n'), ((789, 798), 'yarl.URL', 'URL', (['link'], {}), '(link)\n', (792, 798), False, 'from yarl import URL\n'), ((938, 959), 'simcore_service_webserver.login.utils.get_random_string', 'get_random_string', (['(10)'], {}), '(10)\n', (955, 959), False, 'from simcore_service_webserver.login.utils import encrypt_password, get_random_string\n'), ((1049, 1075), 'simcore_service_webserver.login.utils.encrypt_password', 'encrypt_password', (['password'], {}), '(password)\n', (1065, 1075), False, 'from simcore_service_webserver.login.utils import encrypt_password, get_random_string\n'), ((1279, 1310), 'simcore_service_webserver.login.cfg.cfg.STORAGE.create_user', 'cfg.STORAGE.create_user', (['params'], {}), '(params)\n', (1302, 1310), False, 'from simcore_service_webserver.login.cfg import cfg, get_storage\n'), ((1000, 1021), 'simcore_service_webserver.login.utils.get_random_string', 'get_random_string', (['(10)'], {}), '(10)\n', (1017, 1021), False, 'from simcore_service_webserver.login.utils import encrypt_password, get_random_string\n'), ((1993, 2009), 'simcore_service_webserver.login.cfg.get_storage', 'get_storage', (['app'], {}), '(app)\n', (2004, 2009), False, 'from simcore_service_webserver.login.cfg import cfg, get_storage\n'), ((2823, 2844), 'simcore_service_webserver.login.utils.get_random_string', 'get_random_string', (['(10)'], {}), '(10)\n', (2840, 2844), False, 'from simcore_service_webserver.login.utils import encrypt_password, get_random_string\n'), ((3025, 3074), 'simcore_service_webserver.login.registration.create_invitation', 'create_invitation', (['self.user', 'self.guest', 'self.db'], {}), '(self.user, self.guest, self.db)\n', (3042, 3074), False, 'from simcore_service_webserver.login.registration import create_invitation\n')] |
jmuhlich/indra | indra/tests/test_sparser.py | feab2c08541ea73f328579faa6a21b08082cb026 | from indra import sparser
xml_str1 = '''
<article pmid="54321">
<interpretation>
<sentence-text>MEK1 phosphorylates ERK1</sentence-text>
<sem>
<ref category="phosphorylate">
<var name="agent">
<ref category="protein">
<var name="name">MP2K1_HUMAN</var>
<var name="uid">UP:MP2K1_HUMAN</var>
</ref>
</var>
<var name="substrate">
<ref category="protein">
<var name="name">MK03_HUMAN</var>
<var name="uid">UP:MK03_HUMAN</var>
</ref>
</var>
<var name="present"><ref category="present"></ref></var>
</ref>
</sem>
</interpretation>
</article>
'''
xml_str2 = '''
<article pmid="12345">
<interpretation>
<sentence-text>Hence ASPP2 can be phosphorylated at serine 827 by MAPK1 in vitro</sentence-text>
<sem>
<ref category="phosphorylate">
<var name="subordinate-conjunction">
<ref category="subordinate-conjunction"><var name="word">hence</var></ref></var>
<var name="substrate">
<ref category="protein">
<var name="name">ASPP2_HUMAN</var>
<var name="uid">UP:ASPP2_HUMAN</var>
</ref>
</var>
<var name="agent">
<ref category="protein">
<var name="context">
<ref category="in-vitro"></ref>
</var>
<var name="uid">UP:MK01_HUMAN</var>
<var name="name">MK01_HUMAN</var>
</ref>
</var>
<var name="site">
<ref category="residue-on-protein">
<var name="amino-acid">
<ref category="amino-acid"><var name="name">serine</var></ref>
</var>
<var name="position"> 827</var>
</ref>
</var>
<var name="modal"><ref category="can"></ref></var>
</ref>
</sem>
</interpretation>
</article>
'''
def test_invalid_xml():
sp = sparser.process_xml('xyz')
assert(sp is None)
def test_phosphorylation():
sp = sparser.process_xml(xml_str1)
assert(len(sp.statements) == 1)
assert(sp.statements[0].enz.name == 'MAP2K1')
assert(sp.statements[0].sub.name == 'MAPK3')
assert(len(sp.statements[0].evidence) == 1)
ev = sp.statements[0].evidence[0]
assert(ev.pmid == '54321')
assert(ev.text)
assert(ev.source_api == 'sparser')
def test_phosphorylation2():
sp = sparser.process_xml(xml_str2)
assert(len(sp.statements) == 1)
assert(sp.statements[0].enz.name == 'MAPK1')
assert(sp.statements[0].sub.name == 'TP53BP2')
assert(sp.statements[0].residue == 'S')
assert(sp.statements[0].position == '827')
assert (len(sp.statements[0].evidence) == 1)
ev = sp.statements[0].evidence[0]
assert (ev.pmid == '12345')
assert (ev.text)
assert (ev.source_api == 'sparser')
| [((1899, 1925), 'indra.sparser.process_xml', 'sparser.process_xml', (['"""xyz"""'], {}), "('xyz')\n", (1918, 1925), False, 'from indra import sparser\n'), ((1988, 2017), 'indra.sparser.process_xml', 'sparser.process_xml', (['xml_str1'], {}), '(xml_str1)\n', (2007, 2017), False, 'from indra import sparser\n'), ((2369, 2398), 'indra.sparser.process_xml', 'sparser.process_xml', (['xml_str2'], {}), '(xml_str2)\n', (2388, 2398), False, 'from indra import sparser\n')] |
siforrer/coreali | examples/quickstart/run_example.py | 261e321b546192e608edf87c47719d2173ab4645 | """ Simple Example using coreali to access a register model. Needs no h^ardware"""
# Import dependencies and compile register model with systemrdl-compiler
from systemrdl import RDLCompiler
import coreali
import numpy as np
import os
from coreali import RegisterModel
rdlc = RDLCompiler()
rdlc.compile_file(os.path.dirname(__file__)+"/../systemrdl/logger.rdl")
root = rdlc.elaborate()
# Generate hierarchical register model
rio = coreali.registerio.RegIoNoHW(np.zeros([256], np.uint8()))
logger = RegisterModel(root, rio)
# Use the generated register model
logger.Ctrl.read()
logger.LogMem.write(0,[1,2,3])
logger.LogMem.read()
logger.LogMem[1].write(0,[11,12,13])
print(logger)
| [((278, 291), 'systemrdl.RDLCompiler', 'RDLCompiler', ([], {}), '()\n', (289, 291), False, 'from systemrdl import RDLCompiler\n'), ((502, 526), 'coreali.RegisterModel', 'RegisterModel', (['root', 'rio'], {}), '(root, rio)\n', (515, 526), False, 'from coreali import RegisterModel\n'), ((310, 335), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (325, 335), False, 'import os\n'), ((480, 490), 'numpy.uint8', 'np.uint8', ([], {}), '()\n', (488, 490), True, 'import numpy as np\n')] |
mcguigan/pants | src/python/pants/base/specs.py | e085d45669b72d0c51ab8a54602306fc76e07256 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import re
from abc import ABC, ABCMeta, abstractmethod
from dataclasses import dataclass
from typing import (
TYPE_CHECKING,
Dict,
Iterable,
Iterator,
List,
Optional,
Sequence,
Tuple,
Union,
cast,
)
from pants.engine.fs import PathGlobs
from pants.engine.objects import Collection
from pants.option.custom_types import GlobExpansionConjunction
from pants.option.global_options import GlobMatchErrorBehavior
from pants.util.collections import assert_single_element
from pants.util.dirutil import fast_relpath_optional, recursive_dirname
from pants.util.filtering import create_filters, wrap_filters
from pants.util.memo import memoized_property
from pants.util.meta import frozen_after_init
if TYPE_CHECKING:
from pants.engine.mapper import AddressFamily, AddressMapper
class Spec(ABC):
"""A specification for what Pants should operate on."""
@abstractmethod
def to_spec_string(self) -> str:
"""Return the normalized string representation of this spec."""
class AddressSpec(Spec, metaclass=ABCMeta):
"""Represents address selectors as passed from the command line.
Supports `Single` target addresses as well as `Sibling` (:) and `Descendant` (::) selector forms.
Note: In general, 'spec' should not be a user visible term, it is usually appropriate to
substitute 'address' for a spec resolved to an address, or 'address selector' if you are
referring to an unresolved spec string.
"""
class AddressFamilyResolutionError(Exception):
pass
@abstractmethod
def matching_address_families(
self, address_families_dict: Dict[str, "AddressFamily"],
) -> List["AddressFamily"]:
"""Given a dict of (namespace path) -> AddressFamily, return the values matching this address
spec.
:raises: :class:`AddressSpec.AddressFamilyResolutionError` if no address families matched this spec.
"""
@classmethod
def address_families_for_dir(
cls, address_families_dict: Dict[str, "AddressFamily"], spec_dir_path: str
) -> List["AddressFamily"]:
"""Implementation of `matching_address_families()` for address specs matching at most
one directory."""
maybe_af = address_families_dict.get(spec_dir_path, None)
if maybe_af is None:
raise cls.AddressFamilyResolutionError(
'Path "{}" does not contain any BUILD files.'
.format(spec_dir_path))
return [maybe_af]
class AddressResolutionError(Exception):
pass
@abstractmethod
def address_target_pairs_from_address_families(self, address_families: List["AddressFamily"]):
"""Given a list of AddressFamily, return (address, target) pairs matching this address spec.
:raises: :class:`SingleAddress._SingleAddressResolutionError` for resolution errors with a
:class:`SingleAddress` instance.
:raises: :class:`AddressSpec.AddressResolutionError` if no targets could be found otherwise, if
the address spec type requires a non-empty set of targets.
:return: list of (Address, Target) pairs.
"""
@classmethod
def all_address_target_pairs(cls, address_families):
"""Implementation of `address_target_pairs_from_address_families()` which does no filtering."""
addr_tgt_pairs = []
for af in address_families:
addr_tgt_pairs.extend(af.addressables.items())
return addr_tgt_pairs
@abstractmethod
def make_glob_patterns(self, address_mapper: "AddressMapper") -> List[str]:
"""Generate glob patterns matching exactly all the BUILD files this address spec covers."""
@classmethod
def globs_in_single_dir(cls, spec_dir_path: str, address_mapper: "AddressMapper") -> List[str]:
"""Implementation of `make_glob_patterns()` which only allows a single base directory."""
return [os.path.join(spec_dir_path, pat) for pat in address_mapper.build_patterns]
@dataclass(frozen=True)
class SingleAddress(AddressSpec):
"""An AddressSpec for a single address."""
directory: str
name: str
def __post_init__(self) -> None:
if self.directory is None:
raise ValueError(f'A SingleAddress must have a directory. Got: {self}')
if self.name is None:
raise ValueError(f'A SingleAddress must have a name. Got: {self}')
def to_spec_string(self) -> str:
return '{}:{}'.format(self.directory, self.name)
def matching_address_families(
self, address_families_dict: Dict[str, "AddressFamily"]
) -> List["AddressFamily"]:
return self.address_families_for_dir(address_families_dict, self.directory)
class _SingleAddressResolutionError(Exception):
def __init__(self, single_address_family: "AddressFamily", name: str) -> None:
super().__init__()
self.single_address_family = single_address_family
self.name = name
def address_target_pairs_from_address_families(self, address_families: Sequence["AddressFamily"]):
"""Return the pair for the single target matching the single AddressFamily, or error.
:raises: :class:`SingleAddress._SingleAddressResolutionError` if no targets could be found for a
:class:`SingleAddress` instance.
:return: list of (Address, Target) pairs with exactly one element.
"""
single_af = assert_single_element(address_families)
addr_tgt_pairs = [
(addr, tgt) for addr, tgt in single_af.addressables.items()
if addr.target_name == self.name
]
if len(addr_tgt_pairs) == 0:
raise self._SingleAddressResolutionError(single_af, self.name)
# There will be at most one target with a given name in a single AddressFamily.
assert(len(addr_tgt_pairs) == 1)
return addr_tgt_pairs
def make_glob_patterns(self, address_mapper: "AddressMapper") -> List[str]:
return self.globs_in_single_dir(self.directory, address_mapper)
@dataclass(frozen=True)
class SiblingAddresses(AddressSpec):
"""An AddressSpec representing all addresses located directly within the given directory."""
directory: str
def to_spec_string(self) -> str:
return f'{self.directory}:'
def matching_address_families(
self, address_families_dict: Dict[str, "AddressFamily"],
) -> List["AddressFamily"]:
return self.address_families_for_dir(address_families_dict, self.directory)
def address_target_pairs_from_address_families(self, address_families: Sequence["AddressFamily"]):
return self.all_address_target_pairs(address_families)
def make_glob_patterns(self, address_mapper: "AddressMapper") -> List[str]:
return self.globs_in_single_dir(self.directory, address_mapper)
@dataclass(frozen=True)
class DescendantAddresses(AddressSpec):
"""An AddressSpec representing all addresses located recursively under the given directory."""
directory: str
def to_spec_string(self) -> str:
return f'{self.directory}::'
def matching_address_families(
self, address_families_dict: Dict[str, "AddressFamily"],
) -> List["AddressFamily"]:
return [
af for ns, af in address_families_dict.items()
if fast_relpath_optional(ns, self.directory) is not None
]
def address_target_pairs_from_address_families(self, address_families: Sequence["AddressFamily"]):
addr_tgt_pairs = self.all_address_target_pairs(address_families)
if len(addr_tgt_pairs) == 0:
raise self.AddressResolutionError('AddressSpec {} does not match any targets.'.format(self))
return addr_tgt_pairs
def make_glob_patterns(self, address_mapper: "AddressMapper") -> List[str]:
return [os.path.join(self.directory, '**', pat) for pat in address_mapper.build_patterns]
@dataclass(frozen=True)
class AscendantAddresses(AddressSpec):
"""An AddressSpec representing all addresses located recursively _above_ the given directory."""
directory: str
def to_spec_string(self) -> str:
return f'{self.directory}^'
def matching_address_families(
self, address_families_dict: Dict[str, "AddressFamily"],
) -> List["AddressFamily"]:
return [
af for ns, af in address_families_dict.items()
if fast_relpath_optional(self.directory, ns) is not None
]
def address_target_pairs_from_address_families(self, address_families):
return self.all_address_target_pairs(address_families)
def make_glob_patterns(self, address_mapper: "AddressMapper") -> List[str]:
return [
os.path.join(f, pattern)
for pattern in address_mapper.build_patterns
for f in recursive_dirname(self.directory)
]
_specificity = {
SingleAddress: 0,
SiblingAddresses: 1,
AscendantAddresses: 2,
DescendantAddresses: 3,
type(None): 99
}
def more_specific(
address_spec1: Optional[AddressSpec], address_spec2: Optional[AddressSpec]
) -> AddressSpec:
"""Returns which of the two specs is more specific.
This is useful when a target matches multiple specs, and we want to associate it with
the "most specific" one, which will make the most intuitive sense to the user.
"""
# Note that if either of spec1 or spec2 is None, the other will be returned.
if address_spec1 is None and address_spec2 is None:
raise ValueError('internal error: both specs provided to more_specific() were None')
return cast(
AddressSpec,
address_spec1 if _specificity[type(address_spec1)] < _specificity[type(address_spec2)] else address_spec2
)
@frozen_after_init
@dataclass(unsafe_hash=True)
class AddressSpecsMatcher:
"""Contains filters for the output of a AddressSpecs match.
This class is separated out from `AddressSpecs` to allow for both stuctural equality of the `tags` and
`exclude_patterns`, and for caching of their compiled forms using `@memoized_property` (which uses
the hash of the class instance in its key, and results in a very large key when used with
`AddressSpecs` directly).
"""
tags: Tuple[str, ...]
exclude_patterns: Tuple[str, ...]
def __init__(
self, tags: Optional[Iterable[str]] = None, exclude_patterns: Optional[Iterable[str]] = None,
) -> None:
self.tags = tuple(tags or [])
self.exclude_patterns = tuple(exclude_patterns or [])
@memoized_property
def _exclude_compiled_regexps(self):
return [re.compile(pattern) for pattern in set(self.exclude_patterns or [])]
def _excluded_by_pattern(self, address):
return any(p.search(address.spec) is not None for p in self._exclude_compiled_regexps)
@memoized_property
def _target_tag_matches(self):
def filter_for_tag(tag):
return lambda t: tag in [str(t_tag) for t_tag in t.kwargs().get("tags", [])]
return wrap_filters(create_filters(self.tags, filter_for_tag))
def matches_target_address_pair(self, address, target):
"""
:param Address address: An Address to match
:param HydratedTarget target: The Target for the address.
:return: True if the given Address/HydratedTarget are included by this matcher.
"""
return self._target_tag_matches(target) and not self._excluded_by_pattern(address)
@frozen_after_init
@dataclass(unsafe_hash=True)
class AddressSpecs:
"""A collection of `AddressSpec`s representing AddressSpec subclasses, and a AddressSpecsMatcher
to filter results."""
dependencies: Tuple[AddressSpec, ...]
matcher: AddressSpecsMatcher
def __init__(
self,
dependencies: Iterable[AddressSpec],
tags: Optional[Iterable[str]] = None,
exclude_patterns: Optional[Iterable[str]] = None,
) -> None:
self.dependencies = tuple(dependencies)
self.matcher = AddressSpecsMatcher(tags=tags, exclude_patterns=exclude_patterns)
def __iter__(self) -> Iterator[AddressSpec]:
return iter(self.dependencies)
class FilesystemSpec(Spec, metaclass=ABCMeta):
pass
@dataclass(frozen=True)
class FilesystemLiteralSpec(FilesystemSpec):
"""A literal file name, e.g. `foo.py`."""
file: str
def to_spec_string(self) -> str:
return self.file
@dataclass(frozen=True)
class FilesystemGlobSpec(FilesystemSpec):
"""A spec with a glob or globs, e.g. `*.py` and `**/*.java`."""
glob: str
def to_spec_string(self) -> str:
return self.glob
@dataclass(frozen=True)
class FilesystemIgnoreSpec(FilesystemSpec):
"""A spec to ignore certain files or globs."""
glob: str
def __post_init__(self) -> None:
if self.glob.startswith("!"):
raise ValueError(f"The `glob` for {self} should not start with `!`.")
def to_spec_string(self) -> str:
return f"!{self.glob}"
class FilesystemSpecs(Collection[FilesystemSpec]):
@memoized_property
def includes(self) -> Tuple[Union[FilesystemLiteralSpec, FilesystemGlobSpec], ...]:
return tuple(
spec for spec in self.dependencies
if isinstance(spec, (FilesystemGlobSpec, FilesystemLiteralSpec))
)
@memoized_property
def ignores(self) -> Tuple[FilesystemIgnoreSpec, ...]:
return tuple(spec for spec in self.dependencies if isinstance(spec, FilesystemIgnoreSpec))
@staticmethod
def _generate_path_globs(specs: Iterable[FilesystemSpec]) -> PathGlobs:
return PathGlobs(
globs=(s.to_spec_string() for s in specs),
# We error on unmatched globs for consistency with unmatched address specs. This also
# ensures that scripts don't silently do the wrong thing.
glob_match_error_behavior=GlobMatchErrorBehavior.error,
# We validate that _every_ glob is valid.
conjunction=GlobExpansionConjunction.all_match,
description_of_origin="file arguments",
)
def path_globs_for_spec(
self, spec: Union[FilesystemLiteralSpec, FilesystemGlobSpec]
) -> PathGlobs:
"""Generate PathGlobs for the specific spec, automatically including the instance's
FilesystemIgnoreSpecs.
"""
return self._generate_path_globs(specs=(spec, *self.ignores))
def to_path_globs(self) -> PathGlobs:
"""Generate a single PathGlobs for the instance."""
return self._generate_path_globs(specs=(*self.includes, *self.ignores))
class AmbiguousSpecs(Exception):
pass
@dataclass(frozen=True)
class Specs:
address_specs: AddressSpecs
filesystem_specs: FilesystemSpecs
def __post_init__(self) -> None:
if self.address_specs.dependencies and self.filesystem_specs.dependencies:
raise AmbiguousSpecs(
"Both address specs and filesystem specs given. Please use only one type of spec.\n\n"
f"Address specs: {', '.join(spec.to_spec_string() for spec in self.address_specs)}\n"
f"Filesystem specs: {', '.join(spec.to_spec_string() for spec in self.filesystem_specs)}"
)
@property
def provided_specs(self) -> Union[AddressSpecs, FilesystemSpecs]:
"""Return whichever types of specs was provided by the user.
It is guaranteed that there will only ever be AddressSpecs or FilesystemSpecs, but not both,
through validation in the constructor."""
return (
self.filesystem_specs
if self.filesystem_specs.dependencies
else self.address_specs
)
| [((3945, 3967), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (3954, 3967), False, 'from dataclasses import dataclass\n'), ((5860, 5882), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (5869, 5882), False, 'from dataclasses import dataclass\n'), ((6616, 6638), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (6625, 6638), False, 'from dataclasses import dataclass\n'), ((7627, 7649), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (7636, 7649), False, 'from dataclasses import dataclass\n'), ((9364, 9391), 'dataclasses.dataclass', 'dataclass', ([], {'unsafe_hash': '(True)'}), '(unsafe_hash=True)\n', (9373, 9391), False, 'from dataclasses import dataclass\n'), ((10984, 11011), 'dataclasses.dataclass', 'dataclass', ([], {'unsafe_hash': '(True)'}), '(unsafe_hash=True)\n', (10993, 11011), False, 'from dataclasses import dataclass\n'), ((11674, 11696), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (11683, 11696), False, 'from dataclasses import dataclass\n'), ((11858, 11880), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (11867, 11880), False, 'from dataclasses import dataclass\n'), ((12061, 12083), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (12070, 12083), False, 'from dataclasses import dataclass\n'), ((13921, 13943), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (13930, 13943), False, 'from dataclasses import dataclass\n'), ((5287, 5326), 'pants.util.collections.assert_single_element', 'assert_single_element', (['address_families'], {}), '(address_families)\n', (5308, 5326), False, 'from pants.util.collections import assert_single_element\n'), ((3867, 3899), 'os.path.join', 'os.path.join', (['spec_dir_path', 'pat'], {}), '(spec_dir_path, pat)\n', (3879, 3899), False, 'import os\n'), ((7542, 7581), 'os.path.join', 'os.path.join', (['self.directory', '"""**"""', 'pat'], {}), "(self.directory, '**', pat)\n", (7554, 7581), False, 'import os\n'), ((8365, 8389), 'os.path.join', 'os.path.join', (['f', 'pattern'], {}), '(f, pattern)\n', (8377, 8389), False, 'import os\n'), ((10167, 10186), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (10177, 10186), False, 'import re\n'), ((10562, 10603), 'pants.util.filtering.create_filters', 'create_filters', (['self.tags', 'filter_for_tag'], {}), '(self.tags, filter_for_tag)\n', (10576, 10603), False, 'from pants.util.filtering import create_filters, wrap_filters\n'), ((8456, 8489), 'pants.util.dirutil.recursive_dirname', 'recursive_dirname', (['self.directory'], {}), '(self.directory)\n', (8473, 8489), False, 'from pants.util.dirutil import fast_relpath_optional, recursive_dirname\n'), ((7062, 7103), 'pants.util.dirutil.fast_relpath_optional', 'fast_relpath_optional', (['ns', 'self.directory'], {}), '(ns, self.directory)\n', (7083, 7103), False, 'from pants.util.dirutil import fast_relpath_optional, recursive_dirname\n'), ((8073, 8114), 'pants.util.dirutil.fast_relpath_optional', 'fast_relpath_optional', (['self.directory', 'ns'], {}), '(self.directory, ns)\n', (8094, 8114), False, 'from pants.util.dirutil import fast_relpath_optional, recursive_dirname\n')] |
GordiigPinny/ApiRequesters | Mock/MockRequesterMixin.py | aeb36c7b7b5237c3a74dae6ced7c6141df729ab5 | import json
import requests
from enum import Enum
from typing import Dict
from ..exceptions import JsonDecodeError, UnexpectedResponse, RequestError, BaseApiRequestError
class MockRequesterMixin:
"""
Набор методов для моков реквестеров
"""
class ERRORS(Enum):
ERROR_TOKEN = 'error'
BAD_CODE_400_TOKEN = 'badcode400'
BAD_CODE_401_TOKEN = 'badcode401'
BAD_CODE_403_TOKEN = 'badcode403'
BAD_CODE_404_TOKEN = 'badcode404'
class ERRORS_KEYS(Enum):
AUTH = 'auth_error'
APP_AUTH = 'app_auth_error'
USERS = 'users_error'
AWARDS = 'awards_error'
PLACES = 'places_error'
STATS = 'stats_error'
MEDIA = 'media_error'
class ROLES(Enum):
ANON = 'anon'
USER = 'user'
MODERATOR = 'moderator'
SUPERUSER = 'superuser'
@classmethod
def get_all_roles_tuple(cls):
return tuple([x.value for x in cls.ROLES])
@classmethod
def get_all_registered_roles_tuple(cls):
all_roles = list(cls.get_all_roles_tuple())
all_roles.remove(cls.ROLES.ANON.value)
return tuple(all_roles)
@classmethod
def get_all_errors_tuple(cls):
return tuple([x.value for x in cls.ERRORS])
def get_token_dict(self, token: str) -> Dict[str, str]:
return json.loads(token)
def get_role_part(self, token: str) -> str:
return self.get_token_dict(token)['role']
def get_auth_error_part(self, token: str) -> str:
return self.get_token_dict(token)[self.ERRORS_KEYS.AUTH.value]
def get_app_auth_error_part(self, token: str) -> str:
return self.get_token_dict(token)[self.ERRORS_KEYS.APP_AUTH.value]
def get_awards_error_part(self, token: str) -> str:
return self.get_token_dict(token)[self.ERRORS_KEYS.AWARDS.value]
def get_places_error_part(self, token: str) -> str:
return self.get_token_dict(token)[self.ERRORS_KEYS.PLACES.value]
def get_users_error_part(self, token: str) -> str:
return self.get_token_dict(token)[self.ERRORS_KEYS.USERS.value]
def get_stats_error_part(self, token: str) -> str:
return self.get_token_dict(token)[self.ERRORS_KEYS.STATS.value]
def get_media_error_part(self, token: str) -> str:
return self.get_token_dict(token)[self.ERRORS_KEYS.MEDIA.value]
# Этот метод оверрайдить во всех классах-моках для выборки нужной ошибки из токена
def get_mine_error_part(self, token):
raise NotImplementedError
# Этот метод оверрайдить во всех классах-моках для отправки джосн-ответа
def get_object_on_success(self, token=None):
raise NotImplementedError
# Этот оверрайдить, если дсоны на GET/POST отличаются
def get_list_object_on_success(self, token=None):
return self.get_object_on_success(token)
def get_coded_response(self, code: int) -> requests.Response:
resp = requests.Response()
resp.status_code = code
return resp
def raise_coded_error(self, code: int):
resp = self.get_coded_response(code)
raise UnexpectedResponse(resp)
def _handle_errors(self, token):
"""
Обработка ошибок, переданных в с токеном
"""
token = self.get_mine_error_part(token)
if token == self.ERRORS.ERROR_TOKEN.value:
raise BaseApiRequestError()
elif token == self.ERRORS.BAD_CODE_400_TOKEN.value:
self.raise_coded_error(400)
elif token == self.ERRORS.BAD_CODE_401_TOKEN.value:
self.raise_coded_error(401)
elif token == self.ERRORS.BAD_CODE_403_TOKEN.value:
self.raise_coded_error(403)
elif token == self.ERRORS.BAD_CODE_404_TOKEN.value:
self.raise_coded_error(404)
def _mock_token_handler(self, token: str, list_object=False):
"""
Базовый метод обработки моковых токенов
"""
self._handle_errors(token)
if list_object:
return requests.Response(), self.get_list_object_on_success(token)
else:
return requests.Response(), self.get_object_on_success(token)
| [((1334, 1351), 'json.loads', 'json.loads', (['token'], {}), '(token)\n', (1344, 1351), False, 'import json\n'), ((2924, 2943), 'requests.Response', 'requests.Response', ([], {}), '()\n', (2941, 2943), False, 'import requests\n'), ((3992, 4011), 'requests.Response', 'requests.Response', ([], {}), '()\n', (4009, 4011), False, 'import requests\n'), ((4085, 4104), 'requests.Response', 'requests.Response', ([], {}), '()\n', (4102, 4104), False, 'import requests\n')] |
vkleen/skidl | tests/test_parse.py | f09200c978a39c127e292ef71b8ff89c1a3c0f5a | # -*- coding: utf-8 -*-
# The MIT License (MIT) - Copyright (c) 2016-2021 Dave Vandenbout.
import pytest
from skidl import netlist_to_skidl
from .setup_teardown import get_filename, setup_function, teardown_function
def test_parser_1():
netlist_to_skidl(get_filename("Arduino_Uno_R3_From_Scratch.net"))
| [] |
pkbullock/RaspberryPi | Projects/envirohat-monitor/clear-screen.py | 1c8e83566e97f65fe530d8d43293f4b26c015d0d | #!/usr/bin/env python3
import ST7735
import sys
st7735 = ST7735.ST7735(
port=0,
cs=1,
dc=9,
backlight=12,
rotation=270,
spi_speed_hz=10000000
)
# Reset the display
st7735.begin()
st7735.reset()
st7735.set_backlight(0)
print "\nDone."
# Exit cleanly
sys.exit(0) | [] |
carlosdenner/business_atlas | Scripts/nominatintest.py | 8f95bbd07384baa6c5e51776690103e418b3875e |
from geopy.geocoders import Nominatim
from requests.models import LocationParseError
geolocator = Nominatim(user_agent="geoapiExercises")
Latitude = 25.594095
Longitude = 85.137566
def location(Latitude, Longitude):
lat = str(Latitude)
long = str(Longitude)
print(lat + long)
local = lat + "," + long
print(local)
if(len(local) > 3):
location = geolocator.reverse(local)
locStr = str(location)
print(locStr)
splitted = locStr.split(',')
country = splitted[len(splitted) - 1]
print(country)
print("==============país==============")
return country
else:
return ""
location(Latitude, Longitude)
# Display
| [((100, 139), 'geopy.geocoders.Nominatim', 'Nominatim', ([], {'user_agent': '"""geoapiExercises"""'}), "(user_agent='geoapiExercises')\n", (109, 139), False, 'from geopy.geocoders import Nominatim\n')] |
cristilianojr/JOKENPOH | gamesystem.py | 604970d4f3cfbcc5f851e993af72d3bc86926ae5 | import random
from tkinter import PhotoImage
"""
Esse arquivo define os estados do game
"""
def ia_chocer():
"""IA faz a escolha de um numero aleatório"""
posibility = ['rock', 'paper', 'scissor']
value = posibility[random.randint(0, 2)]
return value
def battle_verification(player_choice, ia_choice):
state_victoryorlose = ''
if player_choice == 'rock':
if ia_choice == 'rock':
state_victoryorlose = 'draw'
elif ia_choice == 'scissor':
state_victoryorlose = 'victory'
elif ia_choice == 'paper':
state_victoryorlose = 'defeat'
elif player_choice == 'scissor':
if ia_choice == 'rock':
state_victoryorlose = 'defeat'
elif ia_choice == 'scissor':
state_victoryorlose = 'draw'
elif ia_choice == 'paper':
state_victoryorlose = 'victory'
elif player_choice == 'paper':
if ia_choice == 'rock':
state_victoryorlose = 'victory'
elif ia_choice == 'scissor':
state_victoryorlose = 'defeat'
elif ia_choice == 'paper':
state_victoryorlose = 'draw'
return state_victoryorlose
| [((233, 253), 'random.randint', 'random.randint', (['(0)', '(2)'], {}), '(0, 2)\n', (247, 253), False, 'import random\n')] |
mister-bailey/MagNET | train/filelocks.py | 4f75a6e2fe34eabf455d13338f318e3dc4bf0295 | from filelock import FileLock, Timeout
import os
import time
class ProcessFileLock(FileLock):
"""
FileLock that is unique per path in each process (for, eg., reentrance)
"""
locks = {}
def __new__(cls, path, *args, **kwargs):
if path in ProcessFileLock.locks:
return ProcessFileLock.locks[path]
else:
lock = super().__new__(cls, path, *args, **kwargs)
lock.__new_init__(path, *args, **kwargs)
ProcessFileLock.locks[path] = lock
return lock
def __new_init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __init__(self, *args, **kwargs):
pass
class ExplosiveFileLock(ProcessFileLock):
def acquire(self, *args, **kwargs):
r = super().acquire(*args, **kwargs)
if self._lock_counter > 1:
raise BlockingIOError(f"Process attempted to reacquire lock for {self._lock_file}")
return r
class HistoriesLock(FileLock):
def __init__(self, dir, ensemble=None):
super().__init__(os.path.join(dir, "histories.lock"))
self.ensemble = ensemble
def release(self, **kwargs):
super().release()
if self.ensemble and self._lock_counter == 0:
self.ensemble.close_histories()
class SamplesLock(FileLock):
def __init__(self, dir, ensemble=None):
super().__init__(os.path.join(dir, "samples.lock"))
self.ensemble = ensemble
def release(self, **kwargs):
if self.ensemble and self._lock_counter == 1:
self.ensemble._test_samples.close()
self.ensemble._test_samples = None
super().release()
def __enter__(self):
print("Acquiring samples lock... ", end='')
super().__enter__()
if self.ensemble._test_samples is None:
from sample_hyperparameters import TrainableSampleGenerator
self.ensemble._test_samples = TrainableSampleGenerator(self.ensemble.config.exploration.sample_file, configs=self.ensemble.config_files, stub=self.ensemble.stub)
print("Done.")
return self.ensemble._test_samples
class ExistLock:
"""
Locks on the existence of the given file.
No guarantees of atomicity!
Unique per process, for reentry
"""
locks={}
def __new__(cls, path, *args, **kwargs):
if path in ExistLock.locks:
lock = ExistLock.locks[path]
#print(f"Reloading ExistLock('{path}')")
#print(f" Lock counter = {lock._lock_counter}")
return lock
else:
#print(f"Creating new ExistLock('{path}')")
lock = super().__new__(cls)
lock.__new_init__(path, *args, **kwargs)
ExistLock.locks[path] = lock
return lock
def __new_init__(self, path, block=True, timeout=None, polling_interval=.05):
self.path = path
if not block:
timeout == 0.0
else:
self.timeout=timeout
self.polling_interval=polling_interval
self._lock_counter = 0
def acquire(self, block=None, timeout=None):
"""
Not atomic. Should probably happen within the context of an
atomic lock.
"""
if block == False:
timeout = 0.0
if timeout is None:
timeout = self.timeout
#print(f"Trying to acquire ExistLock('{self.path}')...")
#print(f" Lock counter = {self._lock_counter}")
start_time = time.time()
while os.path.isfile(self.path):
if self._lock_counter > 0:
self._lock_counter += 1
#print(f"Acquired, lock counter = {self._lock_counter}")
return True
if timeout is None or time.time() - start_time < timeout:
time.sleep(self.polling_interval)
else:
return False
with open(self.path, 'w'):
self._lock_counter = 1
#print(f"Acquired, lock counter = {self._lock_counter}")
return True
def release(self):
self._lock_counter = min(0, self._lock_counter - 1)
if self._lock_counter == 0 and os.path.isfile(self.path):
os.remove(self.path)
def __enter__(self):
if self.acquire():
return self
else:
raise Timeout(f"Failed to acquire ExistLock for file {self.path}")
def __exit__(self, type, value, traceback):
self.release()
| [((3654, 3665), 'time.time', 'time.time', ([], {}), '()\n', (3663, 3665), False, 'import time\n'), ((3681, 3706), 'os.path.isfile', 'os.path.isfile', (['self.path'], {}), '(self.path)\n', (3695, 3706), False, 'import os\n'), ((1113, 1148), 'os.path.join', 'os.path.join', (['dir', '"""histories.lock"""'], {}), "(dir, 'histories.lock')\n", (1125, 1148), False, 'import os\n'), ((1454, 1487), 'os.path.join', 'os.path.join', (['dir', '"""samples.lock"""'], {}), "(dir, 'samples.lock')\n", (1466, 1487), False, 'import os\n'), ((2025, 2160), 'sample_hyperparameters.TrainableSampleGenerator', 'TrainableSampleGenerator', (['self.ensemble.config.exploration.sample_file'], {'configs': 'self.ensemble.config_files', 'stub': 'self.ensemble.stub'}), '(self.ensemble.config.exploration.sample_file,\n configs=self.ensemble.config_files, stub=self.ensemble.stub)\n', (2049, 2160), False, 'from sample_hyperparameters import TrainableSampleGenerator\n'), ((4371, 4396), 'os.path.isfile', 'os.path.isfile', (['self.path'], {}), '(self.path)\n', (4385, 4396), False, 'import os\n'), ((4411, 4431), 'os.remove', 'os.remove', (['self.path'], {}), '(self.path)\n', (4420, 4431), False, 'import os\n'), ((4559, 4619), 'filelock.Timeout', 'Timeout', (['f"""Failed to acquire ExistLock for file {self.path}"""'], {}), "(f'Failed to acquire ExistLock for file {self.path}')\n", (4566, 4619), False, 'from filelock import FileLock, Timeout\n'), ((3980, 4013), 'time.sleep', 'time.sleep', (['self.polling_interval'], {}), '(self.polling_interval)\n', (3990, 4013), False, 'import time\n'), ((3927, 3938), 'time.time', 'time.time', ([], {}), '()\n', (3936, 3938), False, 'import time\n')] |
jnthn/intellij-community | python/testData/quickFixes/PyRenameElementQuickFixTest/renameAwaitClassInPy36_after.py | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | class A_NEW_NAME(object):
pass | [] |
emissible/emissilbe | speedcom/tests/__init__.py | 5537e787ccb883a101d2d40b38d480e257ac9755 | #from . import context
#from . import test_NNModels
#from . import test_data_extract
#from . import test_speedcom
#from . import test_utilities
| [] |
Sanguet/todo-challenge | todo/management/serializers/tasks.py | 8eabc02081e7ce6b33408558d4a4a39edee3944c | # Django REST Framework
from rest_framework import serializers
# Model
from todo.management.models import Task
# Utils
from todo.utils.tasks import TaskMetrics
from todo.utils.serializer_fields import CompleteNameUser
class TaskModelSerializer(serializers.ModelSerializer):
"""Modelo serializer del circulo"""
user = CompleteNameUser(many=False)
class Meta:
"""Meta class"""
model = Task
fields = (
'id', 'user', 'title',
'date_to_finish', 'is_finalize',
'description', 'created',
'priority', 'color'
)
read_only_fields = (
'id', 'user',
'created',
)
def create(self, data):
"""Creacion de la tarea"""
# Sacamos los datos que ya tenemos en el context
user = self.context['request'].user
data['is_finalize'] = False
# Creamos la tarea
task = Task.objects.create(
user=user,
**data
)
# Puntos al perfil
TaskMetrics(action='Create', user=user)
return task
def update(self, instance, data):
"""Actualizacion de la tarea"""
# Extraemos el user del contexto y mandamos la funcion update
user = self.context['request'].user
new_is_finalize = data.get('is_finalize', instance.is_finalize)
if new_is_finalize != instance.is_finalize:
TaskMetrics(action='Update', user=user, is_finalize=new_is_finalize)
# Actualizamos los datos normales
super(TaskModelSerializer, self).update(instance, data)
return instance
| [((330, 358), 'todo.utils.serializer_fields.CompleteNameUser', 'CompleteNameUser', ([], {'many': '(False)'}), '(many=False)\n', (346, 358), False, 'from todo.utils.serializer_fields import CompleteNameUser\n'), ((936, 974), 'todo.management.models.Task.objects.create', 'Task.objects.create', ([], {'user': 'user'}), '(user=user, **data)\n', (955, 974), False, 'from todo.management.models import Task\n'), ((1045, 1084), 'todo.utils.tasks.TaskMetrics', 'TaskMetrics', ([], {'action': '"""Create"""', 'user': 'user'}), "(action='Create', user=user)\n", (1056, 1084), False, 'from todo.utils.tasks import TaskMetrics\n'), ((1436, 1504), 'todo.utils.tasks.TaskMetrics', 'TaskMetrics', ([], {'action': '"""Update"""', 'user': 'user', 'is_finalize': 'new_is_finalize'}), "(action='Update', user=user, is_finalize=new_is_finalize)\n", (1447, 1504), False, 'from todo.utils.tasks import TaskMetrics\n')] |
Sean-Ker/data_homework | outlier_detector.py | 5f289c692690724ee5973683c53e83299958b270 | import numpy as np
import pandas as pd
from sklearn.decomposition import PCA
'''
A function that detects outliers, where k is a tandard deviation threshold hyperparameter preferablly (2, 2.5, 3).
The algo could handle multivariable data frames with any number of features d.
For that manner, it first reduces the dimensionality to 2 using PCA, makes sure that the matrix is positive definite and calculates the Mahalanobis Distance with a threshold value.
Returns a series of n rows back.
'''
def outlier_detector(data, k=2.5):
# Calculate Principal Component Analysis
pca = PCA(n_components=data.shape[1], svd_solver='full')
df = pd.DataFrame(pca.fit_transform(
data), index=data.index, columns=data.columns)
# Calculate covariance and its inverse matrices
cov_matrix = np.cov(df.values, rowvar=False)
inv_cov = np.linalg.inv(cov_matrix)
mean = df.values.mean(axis=0)
# Check matrices are positive definite: https://en.wikipedia.org/wiki/Definiteness_of_a_matrix
assert is_pos_def(cov_matrix) and is_pos_def(inv_cov)
# Calculate Mahalanobis Distance https://en.wikipedia.org/wiki/Mahalanobis_distance
md = mahalanobis_dist(inv_cov, mean, df.values, verbose=False)
threshold = np.mean(md) * k
# res = pd.DataFrame(index=data.index,columns=data.columns)
return data[md > threshold]
# https://www.youtube.com/watch?v=spNpfmWZBmg&t=0s
def mahalanobis_dist(inv_cov_matrix, mean_distr, data, verbose=False):
diff = data - mean_distr
md = []
for i in range(len(diff)):
md.append(np.sqrt(diff[i].dot(inv_cov_matrix).dot(diff[i])))
return np.array(md)
# Check that matrix is positive definite
def is_pos_def(A):
if np.allclose(A, A.T):
try:
np.linalg.cholesky(A)
return True
except np.linalg.LinAlgError:
return False
else:
return False
| [((584, 634), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'data.shape[1]', 'svd_solver': '"""full"""'}), "(n_components=data.shape[1], svd_solver='full')\n", (587, 634), False, 'from sklearn.decomposition import PCA\n'), ((801, 832), 'numpy.cov', 'np.cov', (['df.values'], {'rowvar': '(False)'}), '(df.values, rowvar=False)\n', (807, 832), True, 'import numpy as np\n'), ((847, 872), 'numpy.linalg.inv', 'np.linalg.inv', (['cov_matrix'], {}), '(cov_matrix)\n', (860, 872), True, 'import numpy as np\n'), ((1627, 1639), 'numpy.array', 'np.array', (['md'], {}), '(md)\n', (1635, 1639), True, 'import numpy as np\n'), ((1708, 1727), 'numpy.allclose', 'np.allclose', (['A', 'A.T'], {}), '(A, A.T)\n', (1719, 1727), True, 'import numpy as np\n'), ((1237, 1248), 'numpy.mean', 'np.mean', (['md'], {}), '(md)\n', (1244, 1248), True, 'import numpy as np\n'), ((1754, 1775), 'numpy.linalg.cholesky', 'np.linalg.cholesky', (['A'], {}), '(A)\n', (1772, 1775), True, 'import numpy as np\n')] |
nishio/atcoder | arc113/b.py | 8db36537b5d8580745d5f98312162506ad7d7ab4 | # included from snippets/main.py
def debug(*x, msg=""):
import sys
print(msg, *x, file=sys.stderr)
def solve(SOLVE_PARAMS):
pass
def main():
A, B, C = map(int, input().split())
doubling = [B % 20]
for i in range(32):
doubling.append(
(doubling[-1] ** 2) % 20
)
BC = 1
for i in range(32):
if C % 2:
BC *= doubling[i]
BC %= 20
C //= 2
if BC == 0:
BC = 20
ret = (A % 10) ** BC
ret %= 10
print(ret)
# tests
T1 = """
4 3 2
"""
TEST_T1 = """
>>> as_input(T1)
>>> main()
4
"""
T2 = """
1 2 3
"""
TEST_T2 = """
>>> as_input(T2)
>>> main()
1
"""
T3 = """
3141592 6535897 9323846
"""
TEST_T3 = """
>>> as_input(T3)
>>> main()
2
"""
T4 = """
2 10 1
"""
TEST_T4 = """
>>> as_input(T4)
>>> main()
4
"""
T5 = """
2 20 1
"""
TEST_T5 = """
>>> as_input(T5)
>>> main()
6
"""
def _test():
import doctest
doctest.testmod()
g = globals()
for k in sorted(g):
if k.startswith("TEST_"):
print(k)
doctest.run_docstring_examples(g[k], g, name=k)
def as_input(s):
"use in test, use given string as input file"
import io
f = io.StringIO(s.strip())
g = globals()
g["input"] = lambda: bytes(f.readline(), "ascii")
g["read"] = lambda: bytes(f.read(), "ascii")
if __name__ == "__main__":
import sys
input = sys.stdin.buffer.readline
read = sys.stdin.buffer.read
sys.setrecursionlimit(10 ** 6)
if sys.argv[-1] == "-t":
print("testing")
_test()
sys.exit()
main()
sys.exit()
# end of snippets/main.py
| [((929, 946), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (944, 946), False, 'import doctest\n'), ((1458, 1488), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 6)'], {}), '(10 ** 6)\n', (1479, 1488), False, 'import sys\n'), ((1593, 1603), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1601, 1603), False, 'import sys\n'), ((1567, 1577), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1575, 1577), False, 'import sys\n'), ((1056, 1103), 'doctest.run_docstring_examples', 'doctest.run_docstring_examples', (['g[k]', 'g'], {'name': 'k'}), '(g[k], g, name=k)\n', (1086, 1103), False, 'import doctest\n')] |
ezan2000/Cssi_2018 | pythonG/objects.py | 2385e9f4557c1a2aa642e21d42dcc935e24c88c3 | ezan = {
'name': 'ezan',
'age': 18,
'hair': 'brown',
'cool': True ,
}
print(ezan)
class Person(object): #use class to make object
def __init__(
self, name, age ,hair, color, hungry) : #initialize
#first object inside of a class is self
self.name = 'ezan'
self.age = 18
self.hair = 'brown'
self.cool = True
def eat(self,food):
print("EAT {f}".format(f = food))
self.hungry = food
def play(self, game):
print("Play {p}".format(p = game))
self.play = game
def birth(self,person):
kids = Person(name = " lail", age = 18, hair = 'black', color = 'blue', hungry = True)
ezan = Person( name = "ezan", age = 18, hair = "black", cool = True, hungry = False)
print(ezan.name)
print('I am hungry')
Austin = Person(name = 'austin', age = 18, hair = "Shrek", cool = False, hungry = True)
| [] |
pauvrepetit/leetcode | 62/main.py | 6ad093cf543addc4dfa52d72a8e3c0d05a23b771 | # 62. 不同路径
# 组合数,杨辉三角
yanghui = [[0 for i in range(202)] for j in range(202)]
def comb(n, k):
if yanghui[n][k] == 0:
yanghui[n][k] = (comb(n-1, k-1) + comb(n-1, k))
return yanghui[n][k]
class Solution:
def uniquePaths(self, m: int, n: int) -> int:
for i in range(202):
yanghui[i][0] = 1
yanghui[i][i] = 1
return comb(m+n-2, min(m, n)-1) | [] |
romainledru/GermanOK | GermanOK/run.py | 77bc86de0eabbd3d7413382a288fea286d608540 | from Pages import *
app = App()
app.mainloop()
| [] |
JohnnyPeng18/cauldron | cauldron/cli/server/routes/ui_statuses.py | 09120c2a4cef65df46f8c0c94f5d79395b3298cd | import flask
from cauldron.cli.server import run as server_runner
from cauldron.ui import arguments
from cauldron.ui import statuses
@server_runner.APPLICATION.route('/ui-status', methods=['POST'])
def ui_status():
args = arguments.from_request()
last_timestamp = args.get('last_timestamp', 0)
force = args.get('force', False)
results = statuses.get_status(last_timestamp, force)
return flask.jsonify(results)
| [((137, 200), 'cauldron.cli.server.run.APPLICATION.route', 'server_runner.APPLICATION.route', (['"""/ui-status"""'], {'methods': "['POST']"}), "('/ui-status', methods=['POST'])\n", (168, 200), True, 'from cauldron.cli.server import run as server_runner\n'), ((229, 253), 'cauldron.ui.arguments.from_request', 'arguments.from_request', ([], {}), '()\n', (251, 253), False, 'from cauldron.ui import arguments\n'), ((356, 398), 'cauldron.ui.statuses.get_status', 'statuses.get_status', (['last_timestamp', 'force'], {}), '(last_timestamp, force)\n', (375, 398), False, 'from cauldron.ui import statuses\n'), ((410, 432), 'flask.jsonify', 'flask.jsonify', (['results'], {}), '(results)\n', (423, 432), False, 'import flask\n')] |
Jaram2019/minwoo | google_search.py | d98ce8a84675281e237368cbe97d8a2120ce5840 | import requests
from bs4 import BeautifulSoup
import re
rq = requests.get("https://play.google.com/store/apps/category/GAME_MUSIC?hl=ko")
rqctnt = rq.content
soup = BeautifulSoup(rqctnt,"html.parser")
soup = soup.find_all(attrs={'class':'title'})
blacklsit = ["앱","영화/TV","음악","도서","기기","엔터테인먼트","음악"]
for link in soup:
if link.text.strip() in blacklsit:
pass
else:
print(link.text.strip())
| [((62, 138), 'requests.get', 'requests.get', (['"""https://play.google.com/store/apps/category/GAME_MUSIC?hl=ko"""'], {}), "('https://play.google.com/store/apps/category/GAME_MUSIC?hl=ko')\n", (74, 138), False, 'import requests\n'), ((166, 202), 'bs4.BeautifulSoup', 'BeautifulSoup', (['rqctnt', '"""html.parser"""'], {}), "(rqctnt, 'html.parser')\n", (179, 202), False, 'from bs4 import BeautifulSoup\n')] |
bbinet/PyGall | pygall/tests/test_photos.py | 4d83165e50ca927d664aa6b7b716eb8f484c3cd6 | from unittest import TestCase
from pyramid import testing
class PhotosTests(TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
| [((134, 149), 'pyramid.testing.setUp', 'testing.setUp', ([], {}), '()\n', (147, 149), False, 'from pyramid import testing\n'), ((183, 201), 'pyramid.testing.tearDown', 'testing.tearDown', ([], {}), '()\n', (199, 201), False, 'from pyramid import testing\n')] |
alenasf/AutomateTheBoringStuff | Chapter_4/lists_data_type.py | 041e56221eb98d9893c24d22497034e6344c0490 | #Negative Indexes
spam = ['cat', 'bat', 'rat', 'elephant']
spam[-1] # elepant
spam[-3] # bat
# Getting a List from another List with Slices
spam = ['cat', 'bat', 'rat', 'elephant']
spam[0:4] # ['cat', 'bat', 'rat', 'elephant']
spam[1:3] # ['bat', 'rat']
spam[0:-1] # ['cat', 'bat', 'rat']
spam[:2] # ['cat', 'bat']
spam[1:] # ['bat', 'rat', 'elephant']
spam[:] # ['cat', 'bat', 'rat', 'elephant']
# Getting a List's length with the len() Function
spam = ['cat', 'dog', 'moose']
len(spam) # 3
# Changing Values in a List with Indexes
spam = ['cat', 'bat', 'rat', 'elephant']
spam[1] = 'aardvark'
spam # ['cat', 'aardvark', 'rat', 'elephant']
spam[2]=spam[1]
spam # ['cat', 'aardvark', 'aardvark', 'elephant']
spam[-1] = 12345
spam # ['cat', 'aardvark', 'aardvark', 12345]
# List Concatenation and List Replication
[1, 2, 3] + ['A', 'B', 'C']
# [1, 2, 3, 'A', 'B', 'C']
['X', 'Y', 'Z'] * 3
#['X', 'Y', 'Z', 'X', 'Y', 'Z', 'X', 'Y', 'Z']
spam = [1, 2, 3]
spam = spam + ['A', 'B', 'C']
# [1, 2, 3, 'A', 'B', 'C']
# Removing Values From Lists with del Statements
spam = ['cat', 'bat', 'rat', 'elephant']
del spam[2]
spam # ['cat', 'bat', 'elephant']
del spam[2]
spam # ['cat', 'bat']
# Using for Loops with Lists
for i in range(4):
print(i)
supplies = ['pens', 'staplers', 'flamethrowers', 'binders']
for i in range(len(supplies)):
print('Index ' + str(i) + ' in supplies is: ' + supplies[i])
# The in and not in Operators
'howdy' in ['hello', 'hi', 'howdy', 'heyas'] # True
spam = ['hello', 'hi', 'howdy', 'heyas']
'cat' in spam # False
'howdy' not in spam # False
# Type in a pet name and then check wether the name is in a list of pets
myPets = ['Zophie', 'Pooka', 'Fat-tail']
print('Enter a pet name:')
name = input()
if name not in myPets:
print('I do not have a pet named ' + name)
else:
print(name + ' is my pet.')
# The Multiple Assignment Trick
cat = ['fat', 'gray', 'loud']
size = cat[0]
color = cat[1]
disposition = cat[2]
# type this line
cat = ['fat', 'gray', 'loud']
size, color, disposition = cat
# Using the enumerate() Function with Lists
# enumerate() Function is useful when you need both the item and item's index in loop's block
supplies = ['pens', 'staplers', 'flamethrowers', 'binders']
for index, item in enumerate(supplies):
print('Index ' + str(index) + ' in supplies is: ' + item)
# Using the random.choice() and random.shuffle() Function with Lists
import random
pets = ['Dog', 'Cat', 'Moose']
random.choice(pets)
random.choice(pets)
random.choice(pets)
# random.choice(someList) to be a shorter form of someList[random.randint(0, len(someList)-1)]
import random
people = ['Alice', 'Bob', 'Carol', 'David']
random.shuffle(people)
people # ['Bob', 'Carol', 'David', 'Alice']
random.shuffle(people)
people # random list of people
#Augmented Assignment Operators
spam += 1 # spam = spam + 1
spam -= 1 # spam = spam - 1
spam *= 1 # spam = spam * 1
spam /= 1 #spam = spam / 1
spam %= 1 #spam = spam % 1
| [((2482, 2501), 'random.choice', 'random.choice', (['pets'], {}), '(pets)\n', (2495, 2501), False, 'import random\n'), ((2502, 2521), 'random.choice', 'random.choice', (['pets'], {}), '(pets)\n', (2515, 2521), False, 'import random\n'), ((2522, 2541), 'random.choice', 'random.choice', (['pets'], {}), '(pets)\n', (2535, 2541), False, 'import random\n'), ((2697, 2719), 'random.shuffle', 'random.shuffle', (['people'], {}), '(people)\n', (2711, 2719), False, 'import random\n'), ((2764, 2786), 'random.shuffle', 'random.shuffle', (['people'], {}), '(people)\n', (2778, 2786), False, 'import random\n')] |
chuhaovince/Web-Design-Challenge | WebVisualizations/data.py | 1826a0e2dfbe4e11feb78f0ecce02e0f8a0a7eb5 | import pandas as pd
path = "Resources/cities.csv"
data = pd.read_csv(path)
data_html = data.to_html("data.html", bold_rows = True) | [((57, 74), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (68, 74), True, 'import pandas as pd\n')] |
hyunjoy/scripts | qemu/scripts/codeconverter/codeconverter/test_patching.py | 01114d3627730d695b5ebe61093c719744432ffa | # Copyright (C) 2020 Red Hat Inc.
#
# Authors:
# Eduardo Habkost <[email protected]>
#
# This work is licensed under the terms of the GNU GPL, version 2. See
# the COPYING file in the top-level directory.
from tempfile import NamedTemporaryFile
from .patching import FileInfo, FileMatch, Patch, FileList
from .regexps import *
class BasicPattern(FileMatch):
regexp = '[abc]{3}'
@property
def name(self):
return self.group(0)
def replacement(self) -> str:
# replace match with the middle character repeated 5 times
return self.group(0)[1].upper()*5
def test_pattern_patching():
of = NamedTemporaryFile('wt')
of.writelines(['one line\n',
'this pattern will be patched: defbbahij\n',
'third line\n',
'another pattern: jihaabfed'])
of.flush()
files = FileList()
f = FileInfo(files, of.name)
f.load()
matches = f.matches_of_type(BasicPattern)
assert len(matches) == 2
p2 = matches[1]
# manually add patch, to see if .append() works:
f.patches.append(p2.append('XXX'))
# apply all patches:
f.gen_patches(matches)
patched = f.get_patched_content()
assert patched == ('one line\n'+
'this pattern will be patched: defBBBBBhij\n'+
'third line\n'+
'another pattern: jihAAAAAXXXfed')
class Function(FileMatch):
regexp = S(r'BEGIN\s+', NAMED('name', RE_IDENTIFIER), r'\n',
r'(.*\n)*?END\n')
class Statement(FileMatch):
regexp = S(r'^\s*', NAMED('name', RE_IDENTIFIER), r'\(\)\n')
def test_container_match():
of = NamedTemporaryFile('wt')
of.writelines(['statement1()\n',
'statement2()\n',
'BEGIN function1\n',
' statement3()\n',
' statement4()\n',
'END\n',
'BEGIN function2\n',
' statement5()\n',
' statement6()\n',
'END\n',
'statement7()\n'])
of.flush()
files = FileList()
f = FileInfo(files, of.name)
f.load()
assert len(f.matches_of_type(Function)) == 2
print(' '.join(m.name for m in f.matches_of_type(Statement)))
assert len(f.matches_of_type(Statement)) == 7
f1 = f.find_match(Function, 'function1')
f2 = f.find_match(Function, 'function2')
st1 = f.find_match(Statement, 'statement1')
st2 = f.find_match(Statement, 'statement2')
st3 = f.find_match(Statement, 'statement3')
st4 = f.find_match(Statement, 'statement4')
st5 = f.find_match(Statement, 'statement5')
st6 = f.find_match(Statement, 'statement6')
st7 = f.find_match(Statement, 'statement7')
assert not f1.contains(st1)
assert not f1.contains(st2)
assert not f1.contains(st2)
assert f1.contains(st3)
assert f1.contains(st4)
assert not f1.contains(st5)
assert not f1.contains(st6)
assert not f1.contains(st7)
assert not f2.contains(st1)
assert not f2.contains(st2)
assert not f2.contains(st2)
assert not f2.contains(st3)
assert not f2.contains(st4)
assert f2.contains(st5)
assert f2.contains(st6)
assert not f2.contains(st7)
| [((634, 658), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', (['"""wt"""'], {}), "('wt')\n", (652, 658), False, 'from tempfile import NamedTemporaryFile\n'), ((1664, 1688), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', (['"""wt"""'], {}), "('wt')\n", (1682, 1688), False, 'from tempfile import NamedTemporaryFile\n')] |
Anim-101/CourseHub | Traversy Media/Python Django Dev to Deployment/Python Fundamentals/Tuples and Sets.py | 570ddc2bca794c14921991d24fdf1b4a7d0beb68 | # # Simple Tuple
# fruits = ('Apple', 'Orange', 'Mango')
# # Using Constructor
# fruits = tuple(('Apple', 'Orange', 'Mango'))
# # Getting a Single Value
# print(fruits[1])
# Trying to change based on position
# fruits[1] = 'Grape'
# Tuples with one value should have trailing comma
# fruits = ('Apple')
# fruits = ('Apple',)
# # Getting length of a tupel
# print(len(fruits))
# ## Set
fruits = {'Apple', 'Orange', 'Mango', 'Apple'}
# Checking if in Set
print('Apple' in fruits)
# Add to Set
fruits.add('Grape')
# Removing from Set
fruits.remove('Grape')
# Clearing Set
fruits.clear()
# Delete set
del fruits
print(fruits)
| [] |
flxst/nerblackbox | nerblackbox/modules/ner_training/metrics/ner_metrics.py | 7612b95850e637be258f6bfb01274453b7372f99 | from dataclasses import dataclass
from dataclasses import asdict
from typing import List, Tuple, Callable
import numpy as np
from sklearn.metrics import accuracy_score as accuracy_sklearn
from sklearn.metrics import precision_score as precision_sklearn
from sklearn.metrics import recall_score as recall_sklearn
from sklearn.metrics import precision_recall_fscore_support as prf_sklearn
from sklearn.exceptions import UndefinedMetricWarning
import warnings
from seqeval.metrics import precision_score as precision_seqeval
from seqeval.metrics import recall_score as recall_seqeval
from seqeval.metrics import f1_score as f1_seqeval
from seqeval.scheme import IOB2, BILOU
from nerblackbox.modules.ner_training.annotation_tags.tags import Tags
class NerMetrics:
"""
On the token level, the tags are evaluated in the given annotation scheme (e.g. plain, BIO)
On the entity level, the tags are evaluated in the BIO scheme (after converting if needed)
"""
def __init__(
self,
true_flat,
pred_flat,
level,
scheme,
classes=None,
class_index=None,
verbose=False,
):
"""
:param true_flat: [np array] of shape [batch_size * seq_length]
:param pred_flat: [np array] of shape [batch_size * seq_length]
:param level: [str] 'token' or 'entity'
:param scheme: [str] e.g. 'plain', 'bio'
:param classes: [optional, list] of [str] labels to take into account for metrics -> if level = 'token'
:param class_index: [optional, int] index to take into account for metrics -> if level = 'entity'
:param verbose: [optional, bool] if True, show verbose output
"""
self.true_flat = true_flat # token -> plain. entity -> plain, bio, bilou
self.pred_flat = pred_flat # token -> plain. entity -> plain, bio, bilou
self.scheme = scheme # token -> plain. entity -> plain, bio, bilou
self.classes = classes
self.class_index = class_index
self.level = level
self.verbose = verbose
if self.scheme == "bilou":
self.scheme_entity = "bilou"
self.scheme_entity_seqeval = BILOU
else: # plain, bio
self.scheme_entity = "bio"
self.scheme_entity_seqeval = IOB2
self.results = Results()
self.failure_value = -1
assert self.level in [
"token",
"entity",
], f"ERROR! level = {self.level} unknown."
if self.level == "entity":
self.true_flat_bio: List[str] = Tags(self.true_flat,).convert_scheme(
source_scheme=self.scheme, target_scheme=self.scheme_entity
) # entity -> bio, bilou
self.pred_flat_bio: List[str] = Tags(self.pred_flat).convert_scheme(
source_scheme=self.scheme, target_scheme=self.scheme_entity
) # entity -> bio, bilou
# ASR
self.pred_flat_bio_corrected: List[str]
self.pred_flat_bio_corrected, self.results.asr_abidance = Tags(
self.pred_flat_bio
).restore_annotation_scheme_consistency(
scheme=self.scheme_entity
) # entity -> bio, bilou
def results_as_dict(self):
return asdict(self.results)
def compute(self, _metrics):
"""
computes selected metrics
----------------------------------------------------------
:param _metrics: [list] of [str], e.g. ['acc, 'precision']
:return: -
"""
warnings.filterwarnings("error")
if "acc" in _metrics:
self.accuracy()
if "precision" in _metrics or "recall" in _metrics or "f1" in _metrics:
self._compute_well_defined_classes()
if "precision" in _metrics or "f1" in _metrics:
self.precision()
if "recall" in _metrics or "f1" in _metrics:
self.recall()
if "f1" in _metrics:
self.f1_score()
if (
"asr_abidance" in _metrics
or "asr_precision" in _metrics
or "asr_recall" in _metrics
or "asr_f1" in _metrics
):
self.compute_asr_results()
warnings.resetwarnings()
def accuracy(self):
"""
computes accuracy of predictions (_np_logits) w.r.t. ground truth (_np_label_ids)
---------------------------------------------------------------------------------
:return: acc [np float]
"""
self.results.acc = accuracy_sklearn(
self.true_flat, self.pred_flat, normalize=True
)
def precision(self):
"""
computes precision (macro/micro) of predictions (_pred_flat) w.r.t. ground truth (_true_flat)
Returns:
precision_micro [np array] for all examples
precision_macro [np array] for each class, then averaged
"""
if self.level == "token":
self.results.precision_micro = self._token_evaluation(
evaluation_function=precision_sklearn, average="micro"
)
self.results.precision_macro = self._token_evaluation(
evaluation_function=precision_sklearn, average="macro"
)
elif self.level == "entity":
self.results.precision_micro = self._entity_evaluation_micro(
evaluation_function=precision_seqeval
)
self.results.precision_macro = self._entity_evaluation_macro(
evaluation_function=precision_seqeval,
)
def recall(self):
"""
computes recall (macro/micro) of predictions (_pred_flat) w.r.t. ground truth (_true_flat)
Returns:
recall_micro [np array] for all examples
recall_macro [np array] for each class, then averaged
"""
if self.level == "token":
self.results.recall_micro = self._token_evaluation(
evaluation_function=recall_sklearn, average="micro"
)
self.results.recall_macro = self._token_evaluation(
evaluation_function=recall_sklearn, average="macro"
)
elif self.level == "entity":
self.results.recall_micro = self._entity_evaluation_micro(
evaluation_function=recall_seqeval
)
self.results.recall_macro = self._entity_evaluation_macro(
evaluation_function=recall_seqeval
)
def f1_score(self):
"""
computes f1 score (macro/micro) of predictions (_pred_flat) w.r.t. ground truth (_true_flat)
Returns:
f1_score_micro [np array] for all examples
f1_score_macro [np array] for each class, then averaged
"""
if self.level == "token":
self.results.f1_micro = self._token_evaluation(
evaluation_function=prf_sklearn, average="micro"
)
self.results.f1_macro = self._token_evaluation(
evaluation_function=prf_sklearn, average="macro"
)
elif self.level == "entity":
self.results.f1_micro, self.results.f1_macro = self._entity_evaluation_f1(
evaluation_function=f1_seqeval,
)
def compute_asr_results(self):
"""
computes
- self.results.asr_precision_micro
- self.results.asr_recall_micro
- self.results.asr_f1_micro
"""
def _entity_evaluation_micro_asr(evaluation_function: Callable) -> float:
"""helper function"""
try:
metric = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio_corrected], # corrected !!!
average="micro",
mode="strict",
scheme=self.scheme_entity_seqeval,
)
except UndefinedMetricWarning as e:
if self.verbose:
print(e)
metric = self.failure_value
return metric
self.results.asr_precision_micro = _entity_evaluation_micro_asr(
evaluation_function=precision_seqeval
)
self.results.asr_recall_micro = _entity_evaluation_micro_asr(
evaluation_function=recall_seqeval
)
self.results.asr_f1_micro = _entity_evaluation_micro_asr(
evaluation_function=f1_seqeval
)
def _token_evaluation(self, evaluation_function: Callable, average: str) -> float:
"""
compute precision/recall/f1 on token level
Args:
evaluation_function: precision_sklearn, recall_sklearn, prf_sklearn
average: 'micro' or 'macro'
Returns:
metric: precision/recall on token level, 'micro' or 'macro' averaged
"""
assert evaluation_function in [
precision_sklearn,
recall_sklearn,
prf_sklearn,
], f"evaluation function = {evaluation_function} unknown / not allowed."
assert average in ["micro", "macro"], f"average = {average} unknown."
if self.classes is None or len(self.classes) > 1: # "all" / "fil"
if evaluation_function != prf_sklearn:
metric = evaluation_function(
self.true_flat,
self.pred_flat,
labels=self.classes,
average=average,
zero_division=0,
)
else:
_, _, metric, _ = prf_sklearn(
self.true_flat,
self.pred_flat,
labels=self.classes,
average=average,
zero_division=0,
)
else:
try:
if evaluation_function != prf_sklearn:
metric = evaluation_function(
self.true_flat,
self.pred_flat,
labels=self.classes,
average=average,
zero_division="warn",
)
else:
_, _, metric, _ = prf_sklearn(
self.true_flat,
self.pred_flat,
labels=self.classes,
average=average,
warn_for=("precision", "recall", "f-score"),
zero_division="warn",
)
except UndefinedMetricWarning as e:
if self.verbose:
print(e)
metric = self.failure_value
return metric
def _entity_evaluation_micro(self, evaluation_function: Callable) -> float:
"""
compute precision/recall micro average on entity level
Args:
evaluation_function: precision_seqeval, recall_seqeval
Returns:
metric: precision/recall on entity level, 'macro' averaged
"""
assert evaluation_function in [
precision_seqeval,
recall_seqeval,
], f"evaluation function = {evaluation_function} unknown / not allowed."
if self.class_index is None: # "fil"
try:
metric = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
average="micro",
mode="strict",
scheme=self.scheme_entity_seqeval,
)
except UndefinedMetricWarning as e:
if self.verbose:
print(e)
metric = self.failure_value
else: # "ind"
try:
metric = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
mode="strict",
scheme=self.scheme_entity_seqeval,
average=None,
zero_division="warn",
)[self.class_index]
except UndefinedMetricWarning:
try:
metric = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
mode="strict",
scheme=self.scheme_entity_seqeval,
average=None,
zero_division=0,
)[self.class_index]
except IndexError:
metric = self.failure_value
if metric == 0:
metric = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
mode="strict",
scheme=self.scheme_entity_seqeval,
average=None,
zero_division=1,
)[self.class_index]
if metric == 1:
metric = self.failure_value
except IndexError:
metric = self.failure_value
return metric
def _compute_well_defined_classes(self) -> None:
"""
Created Attributes:
results.classindices_macro: list of indices of well-defined classes in terms of precision, recall, f1
results.numberofclasses_macro: number of well-defined classes in terms of precision, recall, f1
"""
def _get_index_list(
evaluation_function: Callable, true_array, pred_array, scheme_seqeval=None
):
kwargs = (
{"mode": "strict", "scheme": scheme_seqeval}
if scheme_seqeval is not None
else {}
)
try:
metric_list = evaluation_function(
true_array,
pred_array,
average=None,
zero_division="warn",
**kwargs,
)
index_list = [i for i in range(len(metric_list))]
except UndefinedMetricWarning:
metric_list_all = evaluation_function(
true_array,
pred_array,
average=None,
zero_division=0,
**kwargs,
)
index_list = list()
for index, metric_elem in enumerate(metric_list_all):
if metric_elem != 0:
index_list.append(index)
else:
metric_elem_alt = evaluation_function(
true_array,
pred_array,
average=None,
zero_division=1,
**kwargs,
)[index]
if metric_elem_alt != 1:
index_list.append(index)
return index_list
if self.level == "token":
index_list_precision = _get_index_list(
evaluation_function=precision_sklearn,
true_array=self.true_flat,
pred_array=self.pred_flat,
)
index_list_recall = _get_index_list(
evaluation_function=recall_sklearn,
true_array=self.true_flat,
pred_array=self.pred_flat,
)
else:
index_list_precision = _get_index_list(
evaluation_function=precision_seqeval,
true_array=[self.true_flat_bio],
pred_array=[self.pred_flat_bio],
scheme_seqeval=self.scheme_entity_seqeval,
)
index_list_recall = _get_index_list(
evaluation_function=recall_seqeval,
true_array=[self.true_flat_bio],
pred_array=[self.pred_flat_bio],
scheme_seqeval=self.scheme_entity_seqeval,
)
self.results.classindices_macro = tuple(
[index for index in index_list_precision if index in index_list_recall]
)
if self.level == "token":
self.results.numberofclasses_macro = (
len(self.results.classindices_macro) - 1
) # disregard "O" label
else:
self.results.numberofclasses_macro = len(self.results.classindices_macro)
def _entity_evaluation_macro(
self,
evaluation_function: Callable,
) -> float:
"""
compute precision/recall macro average on entity level
Args:
evaluation_function: precision_seqeval, recall_seqeval
Returns:
metric: precision/recall on entity level, 'macro' averaged on well-defined classes
"""
assert evaluation_function in [
precision_seqeval,
recall_seqeval,
], f"evaluation function = {evaluation_function} unknown / not allowed."
metric = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
mode="strict",
scheme=self.scheme_entity_seqeval,
average="macro",
zero_division=0,
)
return metric
def _entity_evaluation_f1(
self, evaluation_function: Callable
) -> Tuple[float, float]:
"""
compute f1 micro or macro average on entity level
Args:
evaluation_function: f1_seqeval
Returns:
f1_micro: f1 on entity level, 'micro' averaged
f1_macro: f1 on entity level, 'macro' averaged on well-defined classes
"""
assert evaluation_function in [
f1_seqeval
], f"evaluation function = {evaluation_function} unknown / not allowed."
# ensure that precision and recall have been called:
# self.precision()
# self.recall()
# f1_micro
if (
self.results.precision_micro == self.failure_value
or self.results.recall_micro == self.failure_value
):
f1_micro = self.failure_value
else:
if self.class_index is None: # "fil"
f1_micro = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
average="micro",
mode="strict",
scheme=self.scheme_entity_seqeval,
)
else: # "ind"
f1_micro = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
mode="strict",
scheme=self.scheme_entity_seqeval,
average=None,
zero_division="warn",
)[self.class_index]
# f1_macro
if (
self.results.precision_macro == self.failure_value
or self.results.recall_macro == self.failure_value
):
f1_macro = self.failure_value
else:
if self.class_index is None: # "fil"
metric_list = evaluation_function(
[self.true_flat_bio],
[self.pred_flat_bio],
mode="strict",
scheme=self.scheme_entity_seqeval,
average=None,
)
f1_macro = np.average(metric_list)
else: # "ind"
f1_macro = self.failure_value
return f1_micro, f1_macro
@dataclass
class Results:
acc: float = -1
precision_micro: float = -1
precision_macro: float = -1
recall_micro: float = -1
recall_macro: float = -1
f1_micro: float = -1
f1_macro: float = -1
classindices_macro: Tuple[float, ...] = ()
numberofclasses_macro: float = -1
asr_abidance: float = -1
asr_precision_micro: float = -1
asr_recall_micro: float = -1
asr_f1_micro: float = -1
| [((3327, 3347), 'dataclasses.asdict', 'asdict', (['self.results'], {}), '(self.results)\n', (3333, 3347), False, 'from dataclasses import asdict\n'), ((3601, 3633), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""error"""'], {}), "('error')\n", (3624, 3633), False, 'import warnings\n'), ((4299, 4323), 'warnings.resetwarnings', 'warnings.resetwarnings', ([], {}), '()\n', (4321, 4323), False, 'import warnings\n'), ((4612, 4676), 'sklearn.metrics.accuracy_score', 'accuracy_sklearn', (['self.true_flat', 'self.pred_flat'], {'normalize': '(True)'}), '(self.true_flat, self.pred_flat, normalize=True)\n', (4628, 4676), True, 'from sklearn.metrics import accuracy_score as accuracy_sklearn\n'), ((9664, 9767), 'sklearn.metrics.precision_recall_fscore_support', 'prf_sklearn', (['self.true_flat', 'self.pred_flat'], {'labels': 'self.classes', 'average': 'average', 'zero_division': '(0)'}), '(self.true_flat, self.pred_flat, labels=self.classes, average=\n average, zero_division=0)\n', (9675, 9767), True, 'from sklearn.metrics import precision_recall_fscore_support as prf_sklearn\n'), ((19582, 19605), 'numpy.average', 'np.average', (['metric_list'], {}), '(metric_list)\n', (19592, 19605), True, 'import numpy as np\n'), ((2617, 2637), 'nerblackbox.modules.ner_training.annotation_tags.tags.Tags', 'Tags', (['self.true_flat'], {}), '(self.true_flat)\n', (2621, 2637), False, 'from nerblackbox.modules.ner_training.annotation_tags.tags import Tags\n'), ((2814, 2834), 'nerblackbox.modules.ner_training.annotation_tags.tags.Tags', 'Tags', (['self.pred_flat'], {}), '(self.pred_flat)\n', (2818, 2834), False, 'from nerblackbox.modules.ner_training.annotation_tags.tags import Tags\n'), ((3106, 3130), 'nerblackbox.modules.ner_training.annotation_tags.tags.Tags', 'Tags', (['self.pred_flat_bio'], {}), '(self.pred_flat_bio)\n', (3110, 3130), False, 'from nerblackbox.modules.ner_training.annotation_tags.tags import Tags\n'), ((10312, 10465), 'sklearn.metrics.precision_recall_fscore_support', 'prf_sklearn', (['self.true_flat', 'self.pred_flat'], {'labels': 'self.classes', 'average': 'average', 'warn_for': "('precision', 'recall', 'f-score')", 'zero_division': '"""warn"""'}), "(self.true_flat, self.pred_flat, labels=self.classes, average=\n average, warn_for=('precision', 'recall', 'f-score'), zero_division='warn')\n", (10323, 10465), True, 'from sklearn.metrics import precision_recall_fscore_support as prf_sklearn\n')] |
spacemanidol/CLMS572 | Assignments/hw4/rank_feat_by_chi_square.py | f0380de9912c984ec21607cdb3b1f190853c5ca8 | import sys
def readInput():
labels, features, all_features, labelCount = [], [], [], {}
l = sys.stdin.readline().strip().split(' ')
while len(l)> 1:
label = l[0]
if label not in labelCount:
labelCount[label] = 0
labelCount[label] += 1
labels.append(label)
currFeat = set()
for key in l[1:]:
feature, _ = key.split(':')
all_features.append(feature)
currFeat.add(feature)
features.append(currFeat)
l = sys.stdin.readline().strip().split(' ')
return [labels, features] , set(all_features), labelCount
def rankByChiSquared(data, features, labelCount):
labels = labelCount.keys()
dataLength = len(data[0])
n = sum(labelCount.values())
results, featureOccourences, featureNonOccourences = [], {}, {}
for feature in features:
for label in labels:
featureOccourences[label] = 0 #Initialize
for i in range(dataLength):
if feature in data[1][i]:
featureOccourences[data[0][i]] += 1 # We could how many times the feature occours in the data for each label
for label in labels:
featureNonOccourences[label] = labelCount[label] - featureOccourences[label] #count of the times it doesnt appear for each label
totalFeatureOccourences = sum(featureOccourences.values())
totalFeatureNonOccourences = sum(featureNonOccourences.values())
chi = sum([((featureOccourences[label]-(labelCount[label]*totalFeatureOccourences/n))**2/(labelCount[label]*totalFeatureOccourences/n) +(featureNonOccourences[label] - (labelCount[label] * totalFeatureNonOccourences/n))**2/(labelCount[label] * totalFeatureNonOccourences/n)) for label in labels]) #Chi squared calc
results.append([feature, chi, totalFeatureOccourences]) #save the re
[print('{} {:.5f} {}'.format(*score)) for score in sorted(results, key = lambda x:(-x[1], -x[2], x[0]), reverse=False)] #print features sorted by chi^2 value, count in text, alphabetically
if __name__ == "__main__":
data, all_features, labelCount= readInput()
results = rankByChiSquared(data, all_features, labelCount) | [((100, 120), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (118, 120), False, 'import sys\n'), ((525, 545), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (543, 545), False, 'import sys\n')] |
LeoCruzG/4chan-thread-downloader | Files/joinfiles.py | d449e50fc7f2a6273a11da3d8ff2f46aad4951d2 | # Importamos la librería para leer archivos json
import json
# Abrimos el archivo master en modo lectura ('r') con todos los id de los archivos descargados
with open('master.json', 'r') as f:
# Guardamos en la variable lista el contenido de master
lista = json.load(f)
# En este ejemplo se representa cómo se asignaría a la lista archivos específicos
#lista = ['2095303', '2169202']
# Abrimos el archivo tryall.json en modo lectura ('w'), si no está creado previamente
# se crea en este momento, se puede cambiar nombre a este archivo
with open('tryall.json', 'w') as outfile:
# Iniciamos un contador para ir marcando cuántos archivos llevamos unidos
contador = 0
# Esta variable ayuda a guardar el nombre del archivo anterior para
# corroborar si no se está repitiendo con el anterior
helper = 0
# Esta variable nos indica que tenemos que escribir dentro del documento lo que hay
# dentro del archivo actual
update = True
# Recorremos toda la lista de archivos descargados
for names in lista:
# Abrimos cada archivo
with open(f'{names}.json') as infile:
# Leemos los primeras 3 líneas
infile.readline()
infile.readline()
infile.readline()
# Guardamos el contenido de la 4° que tiene el número del thread
# en una variable temportal
temp = infile.readline()
# Comprobamos si helper tiene el mismo contenido que temp
if helper != temp:
# Si es diferente se puede hacer la actualización ya que no se va
# a tener threads repetidos
update = True
# asignamos el nuevo contenido a la variable persistente
helper = temp
# Si tienen el mismo contenido entonces no se hace la actualización
else:
update = False
# Abrimos nuevamente el archivo
with open(f'{names}.json') as infile:
# Si el post no está repetido entra
if update == True:
# Se escribe el contenido completo del thread en el archivo de salida
outfile.write(infile.read())
# Se aumenta el contador ya que se escribió un documento nuevo
contador+=1
# Se imporime el contador con el nombre del archivo leído
print(contador, names)
# Se pone un salto de página para escribir el contenido del archivo siguiente
outfile.write("\n") | [((265, 277), 'json.load', 'json.load', (['f'], {}), '(f)\n', (274, 277), False, 'import json\n')] |
SvenSerneels/pycopula | pycopula/archimedean_generators.py | 27c703ab0d25356f6e78b7cc16c8ece1ed80f871 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file contains the generators and their inverses for common archimedean copulas.
"""
import numpy as np
def boundsConditions(x):
if x < 0 or x > 1:
raise ValueError("Unable to compute generator for x equals to {}".format(x))
def claytonGenerator(x, theta):
boundsConditions(x)
if theta == 0:
raise ValueError("The parameter of a Clayton copula must not be equal to 0.")
if theta < -1:
raise ValueError("The parameter of a Clayton copula must be greater than -1 and different from 0.")
return (1. / theta) * (x**(-theta) - 1.)
def claytonGeneratorInvert(x, theta):
if theta == 0:
raise ValueError("The parameter of a Clayton copula must not be equal to 0.")
if theta < -1:
raise ValueError("The parameter of a Clayton copula must be greater than -1 and different from 0.")
return (1. + theta * x)**(-1. / max(theta,1e-6))
def gumbelGenerator(x, theta):
boundsConditions(x)
if theta < 1:
raise ValueError("The parameter of a Gumbel copula must be greater than 1.")
return (-np.log(x))**theta
def gumbelGeneratorInvert(x, theta):
if len(theta) > 1:
theta = theta[0]
if theta < 1:
raise ValueError("The parameter of a Gumbel copula must be greater than 1.")
if (x < 1 and theta != 1):
raise(ValueError("The inverse Gumbel generator cannot be evaluated for negative input and theta > 1"))
return np.exp(-np.power(x,np.divide(1, theta)))
def frankGenerator(x, theta):
boundsConditions(x)
if theta == 0:
raise ValueError("The parameter of a Frank copula must not be equal to 0.")
return -np.log((np.exp(-theta[0] * x) - 1) / (np.exp(-theta[0]) - 1))
def frankGeneratorInvert(x, theta):
if theta == 0:
raise ValueError("The parameter of a Frank copula must not be equal to 0.")
return -1. / theta * np.log(1. + np.exp(-x) * (np.exp(-theta) - 1.))
def joeGenerator(x, theta):
boundsConditions(x)
if theta < 1:
raise ValueError("The parameter of a Joe copula must be greater than 1.")
return -np.log(1. - (1. - x)**theta)
def joeGeneratorInvert(x, theta):
if theta < 1:
raise ValueError("The parameter of a Joe copula must be greater than 1.")
return 1. - (1. - np.exp(-x))**(1. / max(theta,1e-6))
def aliMikhailHaqGenerator(x, theta):
boundsConditions(x)
if theta < -1 or theta >= 1:
raise ValueError("The parameter of an Ali-Mikhail-Haq copula must be between -1 included and 1 excluded.")
return np.log((1. - theta * (1. - x)) / x)
def aliMikhailHaqGeneratorInvert(x, theta):
if theta < -1 or theta >= 1:
raise ValueError("The parameter of an Ali-Mikhail-Haq copula must be between -1 included and 1 excluded.")
return (1. - theta) / (np.exp(x) - theta)
| [((2585, 2622), 'numpy.log', 'np.log', (['((1.0 - theta * (1.0 - x)) / x)'], {}), '((1.0 - theta * (1.0 - x)) / x)\n', (2591, 2622), True, 'import numpy as np\n'), ((2141, 2173), 'numpy.log', 'np.log', (['(1.0 - (1.0 - x) ** theta)'], {}), '(1.0 - (1.0 - x) ** theta)\n', (2147, 2173), True, 'import numpy as np\n'), ((1128, 1137), 'numpy.log', 'np.log', (['x'], {}), '(x)\n', (1134, 1137), True, 'import numpy as np\n'), ((2841, 2850), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (2847, 2850), True, 'import numpy as np\n'), ((1508, 1527), 'numpy.divide', 'np.divide', (['(1)', 'theta'], {}), '(1, theta)\n', (1517, 1527), True, 'import numpy as np\n'), ((2327, 2337), 'numpy.exp', 'np.exp', (['(-x)'], {}), '(-x)\n', (2333, 2337), True, 'import numpy as np\n'), ((1709, 1730), 'numpy.exp', 'np.exp', (['(-theta[0] * x)'], {}), '(-theta[0] * x)\n', (1715, 1730), True, 'import numpy as np\n'), ((1739, 1756), 'numpy.exp', 'np.exp', (['(-theta[0])'], {}), '(-theta[0])\n', (1745, 1756), True, 'import numpy as np\n'), ((1940, 1950), 'numpy.exp', 'np.exp', (['(-x)'], {}), '(-x)\n', (1946, 1950), True, 'import numpy as np\n'), ((1954, 1968), 'numpy.exp', 'np.exp', (['(-theta)'], {}), '(-theta)\n', (1960, 1968), True, 'import numpy as np\n')] |
blackboard/BBDN-Base-Python-Flask | app/admin/__init__.py | 710b82bfb45217798d9e9edda13d5e0f632e2284 | """
"""
from admin import routes
def init_app(app):
"""
:param app:
:return:
"""
routes.init_app(app)
| [((105, 125), 'admin.routes.init_app', 'routes.init_app', (['app'], {}), '(app)\n', (120, 125), False, 'from admin import routes\n')] |
tefra/xsdata-w3c-tests | output/models/nist_data/list_pkg/decimal/schema_instance/nistschema_sv_iv_list_decimal_pattern_2_xsd/__init__.py | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | from output.models.nist_data.list_pkg.decimal.schema_instance.nistschema_sv_iv_list_decimal_pattern_2_xsd.nistschema_sv_iv_list_decimal_pattern_2 import NistschemaSvIvListDecimalPattern2
__all__ = [
"NistschemaSvIvListDecimalPattern2",
]
| [] |
Pengeace/DGP-PDE-FEM | fem/fem.py | 64b7f42ca7083b05f05c42baa6cad21084068d8c | import numpy as np
import pyamg
from scipy import sparse
from scipy.spatial import Delaunay
from linsolver import sparse_solver
from triangulation.delaunay import delaunay
class Element:
def __init__(self, points, global_indexes, fem):
self.points = np.array(points)
self.global_indexes = global_indexes
self.fem = fem
self.reference_triangle = np.array([[0, 0], [1., 0], [0, 1.]])
self.reference_grad = np.array([[-1., -1], [1., 0], [0, 1.]])
def perform_calculation(self):
self._calculate_transform()
self._calculate_stiffness_matrix()
self._calulate_load_vector()
def _calculate_transform(self):
reference_coord = np.array([self.reference_triangle[:, 0], self.reference_triangle[:, 1], [1] * 3])
transformed_coord = np.array([self.points[:, 0], self.points[:, 1], [1] * 3])
trans = np.dot(transformed_coord, np.linalg.inv(reference_coord))
self.transform_matrix = trans[0:-1, 0:-1]
self.area = abs(np.linalg.det(self.transform_matrix)) / 2
def _calculate_stiffness_matrix(self):
transform_matrix_inv = np.linalg.inv(self.transform_matrix)
self.element_stiffness_matrix = np.zeros((3, 3))
for row in range(3):
for col in range(3):
part_u_left_grad = np.dot(np.dot(self.fem.A, transform_matrix_inv.T), self.reference_grad[row])
part_u_right_grad = np.dot(transform_matrix_inv.T, self.reference_grad[col])
part_u_grad = self.area * np.dot(part_u_left_grad, part_u_right_grad)
part_u = (self.area / 6.0) if row == col else (self.area / 12.0)
self.element_stiffness_matrix[row, col] = part_u_grad + self.fem.q * part_u
def _calulate_load_vector(self):
mean_f = np.mean([self.fem.get_func_value(x) for x in self.points])
self.element_load_vector = np.array([mean_f * self.area / 3] * 3)
class FiniteElement:
"""
Finite Element Method to solve the 2D Elliptic Partial Differentiation differential Equation with below form:
div(A grad(u)) + q u = func
"""
def __init__(self, points, boundaries, A, q, func, slow_solver=True):
self.points = np.array(points)
self.dirichlet_boundaries = np.array(boundaries)
self.A = A
self.q = q
self.f = func
self.slow_solver = slow_solver
self.triangles = []
self.point_num = len(points)
def solve(self):
if len(self.triangles) == 0:
self._get_mesh()
self._process_each_element()
self._calculate_global_stiffness_matrix()
self._calulate_global_load_vector()
self._deal_with_dirichlet_bound()
self._solve_linear_equations()
def update_border_and_func(self, boundaries, func):
self.dirichlet_boundaries = np.array(boundaries)
self.f = func
def get_func_value(self, x):
if isinstance(self.f, dict):
return self.f[tuple(x)]
else:
return self.f(x)
def _get_mesh(self):
if self.slow_solver:
self.triangles = delaunay(self.points)
else:
triangulation = Delaunay(self.points)
self.triangles = triangulation.simplices
def _process_each_element(self):
self.elements = []
for tri in self.triangles:
ele = Element(points=[self.points[v] for v in tri], global_indexes=tri, fem=self)
ele.perform_calculation()
self.elements.append(ele)
def _calculate_global_stiffness_matrix(self):
self.global_stiffness_matrix_row = []
self.global_stiffness_matrix_col = []
self.global_stiffness_matrix_data = []
boundary_indexes = set(self.dirichlet_boundaries[:, 0].astype('int'))
for ele in self.elements:
for row in range(3):
if ele.global_indexes[row] not in boundary_indexes:
for col in range(3):
self.global_stiffness_matrix_row.append(ele.global_indexes[row])
self.global_stiffness_matrix_col.append(ele.global_indexes[col])
self.global_stiffness_matrix_data.append(ele.element_stiffness_matrix[row, col])
def _calulate_global_load_vector(self):
self.global_load_vector = np.zeros(self.point_num)
for ele in self.elements:
for v in range(3):
self.global_load_vector[ele.global_indexes[v]] += ele.element_load_vector[v]
def _deal_with_dirichlet_bound(self):
for index, val in self.dirichlet_boundaries:
index = int(index)
self.global_stiffness_matrix_row.append(index)
self.global_stiffness_matrix_col.append(index)
self.global_stiffness_matrix_data.append(1)
self.global_load_vector[index] = val
def _solve_linear_equations(self):
if not self.slow_solver:
self.global_stiffness_matrix_csr = sparse.coo_matrix((self.global_stiffness_matrix_data, (
self.global_stiffness_matrix_row, self.global_stiffness_matrix_col))).tocsr()
self.solution = pyamg.solve(self.global_stiffness_matrix_csr, self.global_load_vector, verb=False,
tol=1e-10)
else:
global_stiffness_sparse = [np.array(self.global_stiffness_matrix_row),
np.array(self.global_stiffness_matrix_col),
np.array(self.global_stiffness_matrix_data)]
self.solution = sparse_solver.sparse_gauss_seidel(global_stiffness_sparse, self.global_load_vector,
sparse_input=True)
## these solver methods are for test
# self.global_stiffness = sparse.coo_matrix((self.global_stiffness_matrix_data, (
# self.global_stiffness_matrix_row, self.global_stiffness_matrix_col))).tocsr()
# self.solution = linsolver.jacobi(self.global_stiffness.toarray(), self.global_load_vector)
# self.solution = linsolver.gauss_seidel(self.global_stiffness.toarray(), self.global_load_vector)
# self.solution = sparse_solver.sparse_jacobi(self.global_stiffness.toarray(), self.global_load_vector, sparse_input=False)
# self.solution = sparse_solver.sparse_gauss_seidel(self.global_stiffness.toarray(), self.global_load_vector, sparse_input=False)
if isinstance(self.solution, str):
print("The inputs for linear solver have problems.")
| [((265, 281), 'numpy.array', 'np.array', (['points'], {}), '(points)\n', (273, 281), True, 'import numpy as np\n'), ((385, 423), 'numpy.array', 'np.array', (['[[0, 0], [1.0, 0], [0, 1.0]]'], {}), '([[0, 0], [1.0, 0], [0, 1.0]])\n', (393, 423), True, 'import numpy as np\n'), ((452, 494), 'numpy.array', 'np.array', (['[[-1.0, -1], [1.0, 0], [0, 1.0]]'], {}), '([[-1.0, -1], [1.0, 0], [0, 1.0]])\n', (460, 494), True, 'import numpy as np\n'), ((707, 796), 'numpy.array', 'np.array', (['[self.reference_triangle[:, (0)], self.reference_triangle[:, (1)], [1] * 3]'], {}), '([self.reference_triangle[:, (0)], self.reference_triangle[:, (1)],\n [1] * 3])\n', (715, 796), True, 'import numpy as np\n'), ((817, 878), 'numpy.array', 'np.array', (['[self.points[:, (0)], self.points[:, (1)], [1] * 3]'], {}), '([self.points[:, (0)], self.points[:, (1)], [1] * 3])\n', (825, 878), True, 'import numpy as np\n'), ((1144, 1180), 'numpy.linalg.inv', 'np.linalg.inv', (['self.transform_matrix'], {}), '(self.transform_matrix)\n', (1157, 1180), True, 'import numpy as np\n'), ((1221, 1237), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (1229, 1237), True, 'import numpy as np\n'), ((1918, 1956), 'numpy.array', 'np.array', (['([mean_f * self.area / 3] * 3)'], {}), '([mean_f * self.area / 3] * 3)\n', (1926, 1956), True, 'import numpy as np\n'), ((2245, 2261), 'numpy.array', 'np.array', (['points'], {}), '(points)\n', (2253, 2261), True, 'import numpy as np\n'), ((2298, 2318), 'numpy.array', 'np.array', (['boundaries'], {}), '(boundaries)\n', (2306, 2318), True, 'import numpy as np\n'), ((2877, 2897), 'numpy.array', 'np.array', (['boundaries'], {}), '(boundaries)\n', (2885, 2897), True, 'import numpy as np\n'), ((4374, 4398), 'numpy.zeros', 'np.zeros', (['self.point_num'], {}), '(self.point_num)\n', (4382, 4398), True, 'import numpy as np\n'), ((918, 948), 'numpy.linalg.inv', 'np.linalg.inv', (['reference_coord'], {}), '(reference_coord)\n', (931, 948), True, 'import numpy as np\n'), ((3155, 3176), 'triangulation.delaunay.delaunay', 'delaunay', (['self.points'], {}), '(self.points)\n', (3163, 3176), False, 'from triangulation.delaunay import delaunay\n'), ((3219, 3240), 'scipy.spatial.Delaunay', 'Delaunay', (['self.points'], {}), '(self.points)\n', (3227, 3240), False, 'from scipy.spatial import Delaunay\n'), ((5208, 5306), 'pyamg.solve', 'pyamg.solve', (['self.global_stiffness_matrix_csr', 'self.global_load_vector'], {'verb': '(False)', 'tol': '(1e-10)'}), '(self.global_stiffness_matrix_csr, self.global_load_vector, verb\n =False, tol=1e-10)\n', (5219, 5306), False, 'import pyamg\n'), ((5634, 5741), 'linsolver.sparse_solver.sparse_gauss_seidel', 'sparse_solver.sparse_gauss_seidel', (['global_stiffness_sparse', 'self.global_load_vector'], {'sparse_input': '(True)'}), '(global_stiffness_sparse, self.\n global_load_vector, sparse_input=True)\n', (5667, 5741), False, 'from linsolver import sparse_solver\n'), ((1026, 1062), 'numpy.linalg.det', 'np.linalg.det', (['self.transform_matrix'], {}), '(self.transform_matrix)\n', (1039, 1062), True, 'import numpy as np\n'), ((1449, 1505), 'numpy.dot', 'np.dot', (['transform_matrix_inv.T', 'self.reference_grad[col]'], {}), '(transform_matrix_inv.T, self.reference_grad[col])\n', (1455, 1505), True, 'import numpy as np\n'), ((5395, 5437), 'numpy.array', 'np.array', (['self.global_stiffness_matrix_row'], {}), '(self.global_stiffness_matrix_row)\n', (5403, 5437), True, 'import numpy as np\n'), ((5478, 5520), 'numpy.array', 'np.array', (['self.global_stiffness_matrix_col'], {}), '(self.global_stiffness_matrix_col)\n', (5486, 5520), True, 'import numpy as np\n'), ((5561, 5604), 'numpy.array', 'np.array', (['self.global_stiffness_matrix_data'], {}), '(self.global_stiffness_matrix_data)\n', (5569, 5604), True, 'import numpy as np\n'), ((1343, 1385), 'numpy.dot', 'np.dot', (['self.fem.A', 'transform_matrix_inv.T'], {}), '(self.fem.A, transform_matrix_inv.T)\n', (1349, 1385), True, 'import numpy as np\n'), ((1549, 1592), 'numpy.dot', 'np.dot', (['part_u_left_grad', 'part_u_right_grad'], {}), '(part_u_left_grad, part_u_right_grad)\n', (1555, 1592), True, 'import numpy as np\n'), ((5030, 5159), 'scipy.sparse.coo_matrix', 'sparse.coo_matrix', (['(self.global_stiffness_matrix_data, (self.global_stiffness_matrix_row, self\n .global_stiffness_matrix_col))'], {}), '((self.global_stiffness_matrix_data, (self.\n global_stiffness_matrix_row, self.global_stiffness_matrix_col)))\n', (5047, 5159), False, 'from scipy import sparse\n')] |
MatthewFlamm/ha-tahoma | custom_components/tahoma/climate_devices/dimmer_exterior_heating.py | 794e8e4a54a8e5f55622b88bb1ab5ffc3ecb0d1b | """Support for Atlantic Electrical Heater IO controller."""
import logging
from typing import List
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from ..coordinator import TahomaDataUpdateCoordinator
from ..tahoma_entity import TahomaEntity
_LOGGER = logging.getLogger(__name__)
COMMAND_GET_LEVEL = "getLevel"
COMMAND_SET_LEVEL = "setLevel"
CORE_LEVEL_STATE = "core:LevelState"
class DimmerExteriorHeating(TahomaEntity, ClimateEntity):
"""Representation of TaHoma IO Atlantic Electrical Heater."""
def __init__(self, device_url: str, coordinator: TahomaDataUpdateCoordinator):
"""Init method."""
super().__init__(device_url, coordinator)
self._saved_level = 100 - self.select_state(CORE_LEVEL_STATE)
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_TARGET_TEMPERATURE
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def min_temp(self) -> float:
"""Return minimum percentage."""
return 0
@property
def max_temp(self) -> float:
"""Return maximum percentage."""
return 100
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return 100 - self.select_state(CORE_LEVEL_STATE)
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
level = kwargs.get(ATTR_TEMPERATURE)
if level is None:
return
await self.async_execute_command(COMMAND_SET_LEVEL, 100 - int(level))
await self.async_execute_command(COMMAND_GET_LEVEL)
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode."""
if self.select_state(CORE_LEVEL_STATE) == 100:
return HVAC_MODE_OFF
return HVAC_MODE_HEAT
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes."""
return [HVAC_MODE_OFF, HVAC_MODE_HEAT]
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
level = 0
if hvac_mode == HVAC_MODE_HEAT:
level = self._saved_level
else:
self._saved_level = self.target_temperature
await self.async_execute_command(COMMAND_SET_LEVEL, 100 - int(level))
await self.async_execute_command(COMMAND_GET_LEVEL)
| [((455, 482), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (472, 482), False, 'import logging\n')] |
emorynlp/stem-cell-hypothesis | elit/components/mtl/attn/joint_encoder.py | 48a628093d93d653865fbac6409d179cddd99293 | # -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2021-03-02 13:32
from typing import Optional, Union, Dict, Any
import torch
from torch import nn
from transformers import PreTrainedTokenizer
from elit.components.mtl.attn.attn import TaskAttention
from elit.components.mtl.attn.transformer import JointEncoder
from elit.layers.embeddings.contextual_word_embedding import ContextualWordEmbeddingModule, ContextualWordEmbedding
from elit.layers.scalar_mix import ScalarMixWithDropoutBuilder
from elit.layers.transformers.utils import pick_tensor_for_each_token
class JointContextualWordEmbeddingModule(ContextualWordEmbeddingModule):
def __init__(self, field: str, transformer: str, transformer_tokenizer: PreTrainedTokenizer, average_subwords=False,
scalar_mix: Union[ScalarMixWithDropoutBuilder, int] = None, word_dropout=None,
max_sequence_length=None, ret_raw_hidden_states=False, transformer_args: Dict[str, Any] = None,
trainable=True, training=True) -> None:
super().__init__(field, transformer, transformer_tokenizer, average_subwords, scalar_mix, word_dropout,
max_sequence_length, ret_raw_hidden_states, transformer_args, trainable, training)
self.adapter: TaskAttention = None
def forward(self, batch: dict, mask=None, **kwargs):
input_ids: torch.LongTensor = batch[f'{self.field}_input_ids']
if self.max_sequence_length and input_ids.size(-1) > self.max_sequence_length:
raise NotImplementedError('Sentence length exceeded and sliding window has not been implemented yet')
token_span: torch.LongTensor = batch.get(f'{self.field}_token_span', None)
token_type_ids: torch.LongTensor = batch.get(f'{self.field}_token_type_ids', None)
attention_mask = input_ids.ne(0)
if self.word_dropout:
input_ids = self.word_dropout(input_ids)
# noinspection PyTypeChecker
transformer: JointEncoder = self.transformer
encoder_outputs = transformer(input_ids, attention_mask, token_type_ids)
outputs = dict()
for task_name, encoder_output in encoder_outputs.items():
encoder_output = encoder_output[0]
outputs[task_name] = pick_tensor_for_each_token(encoder_output, token_span, self.average_subwords)
return outputs
class JointContextualWordEmbedding(ContextualWordEmbedding):
def module(self, training=True, **kwargs) -> Optional[nn.Module]:
return JointContextualWordEmbeddingModule(self.field,
self.transformer,
self._transformer_tokenizer,
self.average_subwords,
self.scalar_mix,
self.word_dropout,
self.max_sequence_length,
self.ret_raw_hidden_states,
self.transformer_args,
self.trainable,
training=training)
| [((2256, 2333), 'elit.layers.transformers.utils.pick_tensor_for_each_token', 'pick_tensor_for_each_token', (['encoder_output', 'token_span', 'self.average_subwords'], {}), '(encoder_output, token_span, self.average_subwords)\n', (2282, 2333), False, 'from elit.layers.transformers.utils import pick_tensor_for_each_token\n')] |
salinsiim/petssa-simulation | simulation/sensors/__init__.py | 8f0f128d462831f86664bb8d246f2c7b659a0b8d | from sensors.sensors import sense_characteristics, sense_pedestrians | [] |
anuragajay/jaxrl | jaxrl/agents/sac_v1/sac_v1_learner.py | a37414aea9e281f19719ccfc09702b32e1ef4e44 | """Implementations of algorithms for continuous control."""
import functools
from typing import Optional, Sequence, Tuple
import jax
import jax.numpy as jnp
import numpy as np
import optax
from jaxrl.agents.sac import temperature
from jaxrl.agents.sac.actor import update as update_actor
from jaxrl.agents.sac.critic import target_update
from jaxrl.agents.sac_v1.critic import update_q, update_v
from jaxrl.datasets import Batch
from jaxrl.networks import critic_net, policies
from jaxrl.networks.common import InfoDict, Model, PRNGKey
@functools.partial(jax.jit, static_argnames=('update_target'))
def _update_jit(
rng: PRNGKey, actor: Model, critic: Model, value: Model,
target_value: Model, temp: Model, batch: Batch, discount: float,
tau: float, target_entropy: float, update_target: bool
) -> Tuple[PRNGKey, Model, Model, Model, Model, Model, InfoDict]:
new_critic, critic_info = update_q(critic, target_value, batch, discount)
rng, key = jax.random.split(rng)
new_actor, actor_info = update_actor(key, actor, new_critic, temp, batch)
rng, key = jax.random.split(rng)
new_value, value_info = update_v(key, new_actor, new_critic, value, temp,
batch, True)
if update_target:
new_target_value = target_update(new_value, target_value, tau)
else:
new_target_value = target_value
new_temp, alpha_info = temperature.update(temp, actor_info['entropy'],
target_entropy)
return rng, new_actor, new_critic, new_value, new_target_value, new_temp, {
**critic_info,
**value_info,
**actor_info,
**alpha_info
}
class SACV1Learner(object):
def __init__(self,
seed: int,
observations: jnp.ndarray,
actions: jnp.ndarray,
actor_lr: float = 3e-4,
value_lr: float = 3e-4,
critic_lr: float = 3e-4,
temp_lr: float = 3e-4,
hidden_dims: Sequence[int] = (256, 256),
discount: float = 0.99,
tau: float = 0.005,
target_update_period: int = 1,
target_entropy: Optional[float] = None,
init_temperature: float = 1.0):
"""
An implementation of the version of Soft-Actor-Critic described in https://arxiv.org/abs/1801.01290
"""
action_dim = actions.shape[-1]
if target_entropy is None:
self.target_entropy = -action_dim / 2
else:
self.target_entropy = target_entropy
self.tau = tau
self.target_update_period = target_update_period
self.discount = discount
rng = jax.random.PRNGKey(seed)
rng, actor_key, critic_key, temp_key = jax.random.split(rng, 4)
actor_def = policies.NormalTanhPolicy(hidden_dims, action_dim)
actor = Model.create(actor_def,
inputs=[actor_key, observations],
tx=optax.adam(learning_rate=actor_lr))
critic_def = critic_net.DoubleCritic(hidden_dims)
critic = Model.create(critic_def,
inputs=[critic_key, observations, actions],
tx=optax.adam(learning_rate=critic_lr))
value_def = critic_net.ValueCritic(hidden_dims)
value = Model.create(value_def,
inputs=[critic_key, observations],
tx=optax.adam(learning_rate=value_lr))
target_value = Model.create(value_def,
inputs=[critic_key, observations])
temp = Model.create(temperature.Temperature(init_temperature),
inputs=[temp_key],
tx=optax.adam(learning_rate=temp_lr))
self.actor = actor
self.critic = critic
self.value = value
self.target_value = target_value
self.temp = temp
self.rng = rng
self.step = 1
def sample_actions(self,
observations: np.ndarray,
temperature: float = 1.0) -> jnp.ndarray:
rng, actions = policies.sample_actions(self.rng, self.actor.apply_fn,
self.actor.params, observations,
temperature)
self.rng = rng
actions = np.asarray(actions)
return np.clip(actions, -1, 1)
def update(self, batch: Batch) -> InfoDict:
self.step += 1
new_rng, new_actor, new_critic, new_value, new_target_value, new_temp, info = _update_jit(
self.rng, self.actor, self.critic, self.value, self.target_value,
self.temp, batch, self.discount, self.tau, self.target_entropy,
self.step % self.target_update_period == 0)
self.rng = new_rng
self.actor = new_actor
self.critic = new_critic
self.value = new_value
self.target_value = new_target_value
self.temp = new_temp
return info
| [((542, 601), 'functools.partial', 'functools.partial', (['jax.jit'], {'static_argnames': '"""update_target"""'}), "(jax.jit, static_argnames='update_target')\n", (559, 601), False, 'import functools\n'), ((907, 954), 'jaxrl.agents.sac_v1.critic.update_q', 'update_q', (['critic', 'target_value', 'batch', 'discount'], {}), '(critic, target_value, batch, discount)\n', (915, 954), False, 'from jaxrl.agents.sac_v1.critic import update_q, update_v\n'), ((971, 992), 'jax.random.split', 'jax.random.split', (['rng'], {}), '(rng)\n', (987, 992), False, 'import jax\n'), ((1021, 1070), 'jaxrl.agents.sac.actor.update', 'update_actor', (['key', 'actor', 'new_critic', 'temp', 'batch'], {}), '(key, actor, new_critic, temp, batch)\n', (1033, 1070), True, 'from jaxrl.agents.sac.actor import update as update_actor\n'), ((1087, 1108), 'jax.random.split', 'jax.random.split', (['rng'], {}), '(rng)\n', (1103, 1108), False, 'import jax\n'), ((1137, 1199), 'jaxrl.agents.sac_v1.critic.update_v', 'update_v', (['key', 'new_actor', 'new_critic', 'value', 'temp', 'batch', '(True)'], {}), '(key, new_actor, new_critic, value, temp, batch, True)\n', (1145, 1199), False, 'from jaxrl.agents.sac_v1.critic import update_q, update_v\n'), ((1409, 1472), 'jaxrl.agents.sac.temperature.update', 'temperature.update', (['temp', "actor_info['entropy']", 'target_entropy'], {}), "(temp, actor_info['entropy'], target_entropy)\n", (1427, 1472), False, 'from jaxrl.agents.sac import temperature\n'), ((1287, 1330), 'jaxrl.agents.sac.critic.target_update', 'target_update', (['new_value', 'target_value', 'tau'], {}), '(new_value, target_value, tau)\n', (1300, 1330), False, 'from jaxrl.agents.sac.critic import target_update\n'), ((2763, 2787), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['seed'], {}), '(seed)\n', (2781, 2787), False, 'import jax\n'), ((2835, 2859), 'jax.random.split', 'jax.random.split', (['rng', '(4)'], {}), '(rng, 4)\n', (2851, 2859), False, 'import jax\n'), ((2881, 2931), 'jaxrl.networks.policies.NormalTanhPolicy', 'policies.NormalTanhPolicy', (['hidden_dims', 'action_dim'], {}), '(hidden_dims, action_dim)\n', (2906, 2931), False, 'from jaxrl.networks import critic_net, policies\n'), ((3125, 3161), 'jaxrl.networks.critic_net.DoubleCritic', 'critic_net.DoubleCritic', (['hidden_dims'], {}), '(hidden_dims)\n', (3148, 3161), False, 'from jaxrl.networks import critic_net, policies\n'), ((3369, 3404), 'jaxrl.networks.critic_net.ValueCritic', 'critic_net.ValueCritic', (['hidden_dims'], {}), '(hidden_dims)\n', (3391, 3404), False, 'from jaxrl.networks import critic_net, policies\n'), ((3601, 3659), 'jaxrl.networks.common.Model.create', 'Model.create', (['value_def'], {'inputs': '[critic_key, observations]'}), '(value_def, inputs=[critic_key, observations])\n', (3613, 3659), False, 'from jaxrl.networks.common import InfoDict, Model, PRNGKey\n'), ((4244, 4348), 'jaxrl.networks.policies.sample_actions', 'policies.sample_actions', (['self.rng', 'self.actor.apply_fn', 'self.actor.params', 'observations', 'temperature'], {}), '(self.rng, self.actor.apply_fn, self.actor.params,\n observations, temperature)\n', (4267, 4348), False, 'from jaxrl.networks import critic_net, policies\n'), ((4481, 4500), 'numpy.asarray', 'np.asarray', (['actions'], {}), '(actions)\n', (4491, 4500), True, 'import numpy as np\n'), ((4516, 4539), 'numpy.clip', 'np.clip', (['actions', '(-1)', '(1)'], {}), '(actions, -1, 1)\n', (4523, 4539), True, 'import numpy as np\n'), ((3725, 3766), 'jaxrl.agents.sac.temperature.Temperature', 'temperature.Temperature', (['init_temperature'], {}), '(init_temperature)\n', (3748, 3766), False, 'from jaxrl.agents.sac import temperature\n'), ((3067, 3101), 'optax.adam', 'optax.adam', ([], {'learning_rate': 'actor_lr'}), '(learning_rate=actor_lr)\n', (3077, 3101), False, 'import optax\n'), ((3311, 3346), 'optax.adam', 'optax.adam', ([], {'learning_rate': 'critic_lr'}), '(learning_rate=critic_lr)\n', (3321, 3346), False, 'import optax\n'), ((3541, 3575), 'optax.adam', 'optax.adam', ([], {'learning_rate': 'value_lr'}), '(learning_rate=value_lr)\n', (3551, 3575), False, 'import optax\n'), ((3846, 3879), 'optax.adam', 'optax.adam', ([], {'learning_rate': 'temp_lr'}), '(learning_rate=temp_lr)\n', (3856, 3879), False, 'import optax\n')] |
plures/rbc | rbc/libfuncs.py | 57c170c148000e7b56f0cda2f0dbea7bdcfa0e1b | """Collections of library function names.
"""
class Library:
"""Base class for a collection of library function names.
"""
@staticmethod
def get(libname, _cache={}):
if libname in _cache:
return _cache[libname]
if libname == 'stdlib':
r = Stdlib()
elif libname == 'stdio':
r = Stdio()
elif libname == 'm':
r = Mlib()
elif libname == 'libdevice':
r = Libdevice()
elif libname == 'nvvm':
r = NVVMIntrinsics()
elif libname == 'llvm':
r = LLVMIntrinsics()
elif libname == 'heavydb':
r = HeavyDB()
else:
raise ValueError(f'Unknown library {libname}')
_cache[libname] = r
return r
def __contains__(self, fname):
return self.check(fname)
def check(self, fname):
"""
Return True if library contains a function with given name.
"""
if fname in self._function_names:
return True
for func in self._function_names:
if func.endswith('.*') and fname.startswith(func[:-2]):
return True
return False
class HeavyDB(Library):
name = 'heavydb'
_function_names = list('''
allocate_varlen_buffer set_output_row_size
TableFunctionManager_error_message TableFunctionManager_set_output_row_size
table_function_error
'''.strip().split())
class Stdlib(Library):
"""
Reference: http://www.cplusplus.com/reference/cstdlib/
"""
name = 'stdlib'
_function_names = list(''' atof atoi atol atoll strtod strtof strtol strtold strtoll strtoul
strtoull rand srand calloc free malloc realloc abort atexit
at_quick_exit exit getenv quick_exit system bsearch qsort abs div
labs ldiv llabs lldiv mblen mbtowc wctomb mbstowcs wcstombs '''.strip().split())
class Stdio(Library):
"""
Reference: http://www.cplusplus.com/reference/cstdio/
"""
name = 'stdio'
_function_names = list(''' remove rename tmpfile tmpnam fclose fflush fopen freopen setbuf
setvbuf fprintf fscanf printf scanf snprintf sprintf sscanf
vfprintf vfscanf vprintf vscanf vsnprintf vsprintf vsscanf fgetc
fgets fputc fputs getc getchar gets putc putchar puts ungetc fread
fwrite fgetpos fseek fsetpos ftell rewind clearerr feof ferror
perror '''.strip().split())
class Mlib(Library):
"""
References:
https://www.gnu.org/software/libc/manual/html_node/Mathematics.html
https://en.cppreference.com/w/cpp/header/cmath
"""
name = 'm'
_function_names = list('''sin sinf sinl cos cosf cosl tan tanf tanl sincos sincosf sincosl
csin csinf csinl ccos ccosf ccosl ctan ctanf ctanl asin asinf
asinl acos acosf acosl atan atanf atanl atan2 atan2f atan2l casin
casinf casinl cacos cacosf cacosl catan catanf catanl exp expf
expl exp2 exp2f exp2l exp10 exp10f exp10l log logf logl log2 log2f
log2l log10 log10f log10l logb logbf logbl ilogb ilogbf ilogbl pow
powf powl sqrt sqrtf sqrtl cbrt cbrtf cbrtl hypot hypotf hypotl
expm1 expm1f expm1l log1p log1pf log1pl clog clogf clogl clog10
clog10f clog10l csqrt csqrtf csqrtl cpow cpowf cpowl sinh sinhf
sinhl cosh coshf coshl tanh tanhf tanhl csinh csinhf csinhl ccosh
ccoshf ccoshl ctanh ctanhf ctanhl asinh asinhf asinhl acosh acoshf
acoshl atanh atanhf atanhl casinh casinhf casinhl cacosh cacoshf
cacoshl catanh catanhf catanhl erf erff erfl erfc erfcf erfcl
lgamma lgammaf lgammal tgamma tgammaf tgammal lgamma_r lgammaf_r
lgammal_r gamma gammaf gammal j0 j0f j0l j1 j1f j1l jn jnf jnl y0
y0f y0l y1 y1f y1l yn ynf ynl rand srand rand_r random srandom
initstate setstate random_r srandom_r initstate_r setstate_r
drand48 erand48 lrand48 nrand48 mrand48 jrand48 srand48 seed48
lcong48 drand48_r erand48_r lrand48_r nrand48_r mrand48_r
jrand48_r srand48_r seed48_r lcong48_r abs labs llabs fabs fabsf
fabsl cabs cabsf cabsl frexp frexpf frexpl ldexp ldexpf ldexpl
scalb scalbf scalbl scalbn scalbnf scalbnl significand
significandf significandl ceil ceilf ceill floor floorf floorl
trunc truncf truncl rint rintf rintl nearbyint nearbyintf
nearbyintl round roundf roundl roundeven roundevenf roundevenl
lrint lrintf lrintl lround lroundf lroundl llround llroundf
llroundl fromfp fromfpf fromfpl ufromfp ufromfpf ufromfpl fromfpx
fromfpxf fromfpxl ufromfpx ufromfpxf ufromfpxl modf modff modfl
fmod fmodf fmodl remainder remainderf remainderl drem dremf dreml
copysign copysignf copysignl signbit signbitf signbitl nextafter
nextafterf nextafterl nexttoward nexttowardf nexttowardl nextup
nextupf nextupl nextdown nextdownf nextdownl nan nanf nanl
canonicalize canonicalizef canonicalizel getpayload getpayloadf
getpayloadl setpayload setpayloadf setpayloadl setpayloadsig
setpayloadsigf setpayloadsigl isgreater isgreaterequal isless
islessequal islessgreater isunordered iseqsig totalorder
totalorderf totalorderl totalordermag totalorderf totalorderl fmin
fminf fminl fmax fmaxf fmaxl fminmag fminmagf fminmagl fmaxmag
fmaxmagf fmaxmagl fdim fdimf fdiml fma fmaf fmal fadd faddf faddl
fsub fsubf fsubl fmul fmulf fmull fdiv fdivf fdivl llrint llrintf
llrintl'''.strip().split())
def drop_suffix(f):
s = f.rsplit('.', 1)[-1]
if s in ['p0i8', 'f64', 'f32', 'i1', 'i8', 'i16', 'i32', 'i64', 'i128']:
f = f[:-len(s)-1]
return drop_suffix(f)
return f
def get_llvm_name(f, prefix='llvm.'):
"""Return normalized name of a llvm intrinsic name.
"""
if f.startswith(prefix):
return drop_suffix(f[len(prefix):])
return f
class LLVMIntrinsics(Library):
"""LLVM intrinsic function names with prefix `llvm.` removed.
Reference: https://llvm.org/docs/LangRef.html#intrinsic-functions
"""
name = 'llvm'
def check(self, fname):
if fname.startswith('llvm.'):
return Library.check(self, get_llvm_name(fname))
return False
_function_names = list(''' va_start va_end va_copy gcroot gcread gcwrite returnaddress
addressofreturnaddress sponentry frameaddress stacksave
stackrestore get.dynamic.area.offset prefetch pcmarker
readcyclecounter clear_cache instrprof.increment
instrprof.increment.step instrprof.value.profile thread.pointer
call.preallocated.setup call.preallocated.arg
call.preallocated.teardown abs smax smin umax umin memcpy
memcpy.inline memmove sqrt powi sin cos pow exp exp2 log log10
log2 fma fabs minnum maxnum minimum maximum copysign floor ceil
trunc rint nearbyint round roundeven lround llround lrint llrint
ctpop ctlz cttz fshl fshr sadd.with.overflow uadd.with.overflow
ssub.with.overflow usub.with.overflow smul.with.overflow
umul.with.overflow sadd.sat uadd.sat ssub.sat usub.sat sshl.sat
ushl.sat smul.fix umul.fix smul.fix.sat umul.fix.sat sdiv.fix
udiv.fix sdiv.fix.sat udiv.fix.sat canonicalize fmuladd
set.loop.iterations test.set.loop.iterations loop.decrement.reg
loop.decrement vector.reduce.add vector.reduce.fadd
vector.reduce.mul vector.reduce.fmul vector.reduce.and
vector.reduce.or vector.reduce.xor vector.reduce.smax
vector.reduce.smin vector.reduce.umax vector.reduce.umin
vector.reduce.fmax vector.reduce.fmin matrix.transpose
matrix.multiply matrix.column.major.load matrix.column.major.store
convert.to.fp16 convert.from.fp16 init.trampoline
adjust.trampoline lifetime.start lifetime.end invariant.start
invariant.end launder.invariant.group strip.invariant.group
experimental.constrained.fadd experimental.constrained.fsub
experimental.constrained.fmul experimental.constrained.fdiv
experimental.constrained.frem experimental.constrained.fma
experimental.constrained.fptoui experimental.constrained.fptosi
experimental.constrained.uitofp experimental.constrained.sitofp
experimental.constrained.fptrunc experimental.constrained.fpext
experimental.constrained.fmuladd experimental.constrained.sqrt
experimental.constrained.pow experimental.constrained.powi
experimental.constrained.sin experimental.constrained.cos
experimental.constrained.exp experimental.constrained.exp2
experimental.constrained.log experimental.constrained.log10
experimental.constrained.log2 experimental.constrained.rint
experimental.constrained.lrint experimental.constrained.llrint
experimental.constrained.nearbyint experimental.constrained.maxnum
experimental.constrained.minnum experimental.constrained.maximum
experimental.constrained.minimum experimental.constrained.ceil
experimental.constrained.floor experimental.constrained.round
experimental.constrained.roundeven experimental.constrained.lround
experimental.constrained.llround experimental.constrained.trunc
experimental.gc.statepoint experimental.gc.result experimental.gc.relocate
experimental.gc.get.pointer.base experimental.gc.get.pointer.offset
experimental.vector.reduce.add.* experimental.vector.reduce.fadd.*
experimental.vector.reduce.mul.* experimental.vector.reduce.fmul.*
experimental.vector.reduce.and.* experimental.vector.reduce.or.*
experimental.vector.reduce.xor.* experimental.vector.reduce.smax.*
experimental.vector.reduce.smin.* experimental.vector.reduce.umax.*
experimental.vector.reduce.umin.* experimental.vector.reduce.fmax.*
experimental.vector.reduce.fmin.*
flt.rounds var.annotation ptr.annotation annotation
codeview.annotation trap debugtrap stackprotector stackguard
objectsize expect expect.with.probability assume ssa_copy
type.test type.checked.load donothing experimental.deoptimize
experimental.guard experimental.widenable.condition load.relative
sideeffect is.constant ptrmask vscale
memcpy.element.unordered.atomic memmove.element.unordered.atomic
memset.element.unordered.atomic objc.autorelease
objc.autoreleasePoolPop objc.autoreleasePoolPush
objc.autoreleaseReturnValue objc.copyWeak objc.destroyWeak
objc.initWeak objc.loadWeak objc.loadWeakRetained objc.moveWeak
objc.release objc.retain objc.retainAutorelease
objc.retainAutoreleaseReturnValue
objc.retainAutoreleasedReturnValue objc.retainBlock
objc.storeStrong objc.storeWeak preserve.array.access.index
preserve.union.access.index preserve.struct.access.index
masked.store.* memset'''.strip().split())
class NVVMIntrinsics(Library):
"""NVVM intrinsic function names with prefix `llvm.` removed.
Reference: https://docs.nvidia.com/cuda/nvvm-ir-spec/index.html#intrinsic-functions
"""
name = 'nvvm'
def check(self, fname):
if fname.startswith('llvm.'):
return Library.check(self, get_llvm_name(fname))
return False
_function_names = list(''' memcpy memmove memset sqrt fma bswap ctpop ctlz cttz fmuladd
convert.to.fp16.f32 convert.from.fp16.f32 convert.to.fp16
convert.from.fp16 lifetime.start lifetime.end invariant.start
invariant.end var.annotation ptr.annotation annotation expect
donothing '''.strip().split())
class Libdevice(Library):
"""NVIDIA libdevice function names with prefix `__nv_` removed.
Reference: https://docs.nvidia.com/cuda/libdevice-users-guide/function-desc.html#function-desc
"""
name = 'libdevice'
def check(self, fname):
if fname.startswith('__nv_'):
return Library.check(self, get_llvm_name(fname, prefix='__nv_'))
return False
_function_names = list(''' abs acos acosf acosh acoshf asin asinf asinh asinhf atan atan2
atan2f atanf atanh atanhf brev brevll byte_perm cbrt cbrtf ceil
ceilf clz clzll copysign copysignf cos cosf cosh coshf cospi
cospif dadd_rd dadd_rn dadd_ru dadd_rz ddiv_rd ddiv_rn ddiv_ru
ddiv_rz dmul_rd dmul_rn dmul_ru dmul_rz double2float_rd
double2float_rn double2float_ru double2float_rz double2hiint
double2int_rd double2int_rn double2int_ru double2int_rz
double2ll_rd double2ll_rn double2ll_ru double2ll_rz double2loint
double2uint_rd double2uint_rn double2uint_ru double2uint_rz
double2ull_rd double2ull_rn double2ull_ru double2ull_rz
double_as_longlong drcp_rd drcp_rn drcp_ru drcp_rz dsqrt_rd
dsqrt_rn dsqrt_ru dsqrt_rz erf erfc erfcf erfcinv erfcinvf erfcx
erfcxf erff erfinv erfinvf exp exp10 exp10f exp2 exp2f expf expm1
expm1f fabs fabsf fadd_rd fadd_rn fadd_ru fadd_rz fast_cosf
fast_exp10f fast_expf fast_fdividef fast_log10f fast_log2f
fast_logf fast_powf fast_sincosf fast_sinf fast_tanf fdim fdimf
fdiv_rd fdiv_rn fdiv_ru fdiv_rz ffs ffsll finitef float2half_rn
float2int_rd float2int_rn float2int_ru float2int_rz float2ll_rd
float2ll_rn float2ll_ru float2ll_rz float2uint_rd float2uint_rn
float2uint_ru float2uint_rz float2ull_rd float2ull_rn float2ull_ru
float2ull_rz float_as_int floor floorf fma fma_rd fma_rn fma_ru
fma_rz fmaf fmaf_rd fmaf_rn fmaf_ru fmaf_rz fmax fmaxf fmin fminf
fmod fmodf fmul_rd fmul_rn fmul_ru fmul_rz frcp_rd frcp_rn frcp_ru
frcp_rz frexp frexpf frsqrt_rn fsqrt_rd fsqrt_rn fsqrt_ru fsqrt_rz
fsub_rd fsub_rn fsub_ru fsub_rz hadd half2float hiloint2double
hypot hypotf ilogb ilogbf int2double_rn int2float_rd int2float_rn
int2float_ru int2float_rz int_as_float isfinited isinfd isinff
isnand isnanf j0 j0f j1 j1f jn jnf ldexp ldexpf lgamma lgammaf
ll2double_rd ll2double_rn ll2double_ru ll2double_rz ll2float_rd
ll2float_rn ll2float_ru ll2float_rz llabs llmax llmin llrint
llrintf llround llroundf log log10 log10f log1p log1pf log2 log2f
logb logbf logf longlong_as_double max min modf modff mul24
mul64hi mulhi nan nanf nearbyint nearbyintf nextafter nextafterf
normcdf normcdff normcdfinv normcdfinvf popc popcll pow powf powi
powif rcbrt rcbrtf remainder remainderf remquo remquof rhadd rint
rintf round roundf rsqrt rsqrtf sad saturatef scalbn scalbnf
signbitd signbitf sin sincos sincosf sincospi sincospif sinf sinh
sinhf sinpi sinpif sqrt sqrtf tan tanf tanh tanhf tgamma tgammaf
trunc truncf uhadd uint2double_rn uint2float_rd uint2float_rn
uint2float_ru uint2float_rz ull2double_rd ull2double_rn
ull2double_ru ull2double_rz ull2float_rd ull2float_rn ull2float_ru
ull2float_rz ullmax ullmin umax umin umul24 umul64hi umulhi urhadd
usad y0 y0f y1 y1f yn ynf '''.strip().split())
| [] |
chenmich/google-ml-crash-course-exercises | quick_pandas.py | d610f890d53b1537a3ce80531ce1ff2df1f5dc84 | import pandas as pd
print(pd.__version__)
city_names = pd.Series(['San Francisco', 'San Jose', 'Sacramento'])
population = pd.Series([852469, 1015785, 485199])
#city_population_table = pd.DataFrame(({'City name': city_names, 'Population': population}))
california_houseing_dataframe = pd.read_csv("https://storage.googleapis.com/mledu-datasets/california_housing_train.csv", sep=",")
california_houseing_dataframe.describe()
california_houseing_dataframe.head()
#some error
#california_houseing_dataframe.hist('housing_median_age')
cities = pd.DataFrame({'City name': city_names, 'Population': population})
#print(type(cities['City name']))
#print(cities['City name'])
#print(type(cities['City name'][1]))
#print(cities['City name'][1])
#print(type(cities[0:2]))
#print(cities[0:2])
#print(population / 1000)
import numpy as np
np.log(population)
#print(population.apply(lambda val: val > 10000))
cities['Area square miles'] = pd.Series([46.87, 176.53, 97.92])
#print(cities)
cities['Population density'] = cities['Population'] / cities['Area square miles']
#print(cities)
print(city_names.index)
print(cities.reindex([2, 0, 1]))
print(cities) | [((55, 109), 'pandas.Series', 'pd.Series', (["['San Francisco', 'San Jose', 'Sacramento']"], {}), "(['San Francisco', 'San Jose', 'Sacramento'])\n", (64, 109), True, 'import pandas as pd\n'), ((123, 159), 'pandas.Series', 'pd.Series', (['[852469, 1015785, 485199]'], {}), '([852469, 1015785, 485199])\n', (132, 159), True, 'import pandas as pd\n'), ((285, 393), 'pandas.read_csv', 'pd.read_csv', (['"""https://storage.googleapis.com/mledu-datasets/california_housing_train.csv"""'], {'sep': '""","""'}), "(\n 'https://storage.googleapis.com/mledu-datasets/california_housing_train.csv'\n , sep=',')\n", (296, 393), True, 'import pandas as pd\n'), ((541, 606), 'pandas.DataFrame', 'pd.DataFrame', (["{'City name': city_names, 'Population': population}"], {}), "({'City name': city_names, 'Population': population})\n", (553, 606), True, 'import pandas as pd\n'), ((828, 846), 'numpy.log', 'np.log', (['population'], {}), '(population)\n', (834, 846), True, 'import numpy as np\n'), ((927, 960), 'pandas.Series', 'pd.Series', (['[46.87, 176.53, 97.92]'], {}), '([46.87, 176.53, 97.92])\n', (936, 960), True, 'import pandas as pd\n')] |
paulproteus/briefcase-toga-button-app-with-hacks | src/helloworld/__main__.py | 61ec41b154204bb4a7a59f55374193dd4f9ca377 | from helloworld.app import main
if True or __name__ == '__main__':
main().main_loop()
| [((72, 78), 'helloworld.app.main', 'main', ([], {}), '()\n', (76, 78), False, 'from helloworld.app import main\n')] |
ianahart/blog | backend/app/main.py | fc52e15a8b56bd4c6482065de7e21f8b31f5d765 | from fastapi import FastAPI
from dotenv import load_dotenv
from fastapi.middleware.cors import CORSMiddleware
from app.api.api_v1.api import api_router
from app.core.config import settings
app = FastAPI()
load_dotenv()
app.include_router(api_router, prefix=settings.API_V1_STR)
# Set all CORS enabled origins
if settings.BACKEND_CORS_ORIGINS:
app.add_middleware(
CORSMiddleware,
allow_origins=[str(origin)
for origin in settings.BACKEND_CORS_ORIGINS],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
if __name__ == "__main__":
# Use this for debugging purposes only
# pyright: reportGeneralTypeIssues=false
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8001, log_level="debug")
| [((197, 206), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (204, 206), False, 'from fastapi import FastAPI\n'), ((207, 220), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (218, 220), False, 'from dotenv import load_dotenv\n'), ((734, 796), 'uvicorn.run', 'uvicorn.run', (['app'], {'host': '"""0.0.0.0"""', 'port': '(8001)', 'log_level': '"""debug"""'}), "(app, host='0.0.0.0', port=8001, log_level='debug')\n", (745, 796), False, 'import uvicorn\n')] |
goldstar611/ssort | test_data/samples/alembic_template_output.py | 05c35ec89dd9ff391ae824c17ed974340e2f5597 | """Example revision
Revision ID: fdf0cf6487a3
Revises:
Create Date: 2021-08-09 17:55:19.491713
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "fdf0cf6487a3"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"example",
sa.Column("example_id", sa.Integer(), nullable=False),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("measurements")
# ### end Alembic commands ###
| [((593, 622), 'alembic.op.drop_table', 'op.drop_table', (['"""measurements"""'], {}), "('measurements')\n", (606, 622), False, 'from alembic import op\n'), ((432, 444), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (442, 444), True, 'import sqlalchemy as sa\n')] |
gearbird/calgo | .archived/snakecode/0173.py | ab48357100de2a5ea47fda2d9f01ced6dc73fa79 | from __future__ import annotations
from typing import Optional
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val: int = 0, left: Optional[TreeNode] = None, right: Optional[TreeNode] = None):
self.val = val
self.left = left
self.right = right
class BSTIterator:
def __init__(self, root: Optional[TreeNode]):
self.stack: list[TreeNode] = []
self.cur = root
def next(self) -> int:
if not self.hasNext():
raise StopIteration()
self.cur = self.stack[-1].right
return self.stack.pop().val
def hasNext(self) -> bool:
while self.cur:
self.stack.append(self.cur)
self.cur = self.cur.left
if self.stack: return True
return False
| [] |
KuiyuanFu/PythonLeetCode | .leetcode/506.relative-ranks.py | 8962df2fa838eb7ae48fa59de272ba55a89756d8 | # @lc app=leetcode id=506 lang=python3
#
# [506] Relative Ranks
#
# https://leetcode.com/problems/relative-ranks/description/
#
# algorithms
# Easy (53.46%)
# Likes: 188
# Dislikes: 9
# Total Accepted: 71.1K
# Total Submissions: 132.4K
# Testcase Example: '[5,4,3,2,1]'
#
# You are given an integer array score of size n, where score[i] is the score
# of the i^th athlete in a competition. All the scores are guaranteed to be
# unique.
#
# The athletes are placed based on their scores, where the 1^st place athlete
# has the highest score, the 2^nd place athlete has the 2^nd highest score, and
# so on. The placement of each athlete determines their rank:
#
#
# The 1^st place athlete's rank is "Gold Medal".
# The 2^nd place athlete's rank is "Silver Medal".
# The 3^rd place athlete's rank is "Bronze Medal".
# For the 4^th place to the n^th place athlete, their rank is their placement
# number (i.e., the x^th place athlete's rank is "x").
#
#
# Return an array answer of size n where answer[i] is the rank of the i^th
# athlete.
#
#
# Example 1:
#
#
# Input: score = [5,4,3,2,1]
# Output: ["Gold Medal","Silver Medal","Bronze Medal","4","5"]
# Explanation: The placements are [1^st, 2^nd, 3^rd, 4^th, 5^th].
#
# Example 2:
#
#
# Input: score = [10,3,8,9,4]
# Output: ["Gold Medal","5","Bronze Medal","Silver Medal","4"]
# Explanation: The placements are [1^st, 5^th, 3^rd, 2^nd, 4^th].
#
#
#
#
# Constraints:
#
#
# n == score.length
# 1 <= n <= 10^4
# 0 <= score[i] <= 10^6
# All the values in score are unique.
#
#
#
# @lc tags=Unknown
# @lc imports=start
from imports import *
# @lc imports=end
# @lc idea=start
#
# 排序。
#
# @lc idea=end
# @lc group=
# @lc rank=
# @lc code=start
class Solution:
def findRelativeRanks(self, score: List[int]) -> List[str]:
s = [(-s, i) for i, s in enumerate(score)]
s.sort()
ss = ['Gold Medal', 'Silver Medal', 'Bronze Medal']
def toS(idx):
if idx >= 3:
return str(idx + 1)
return ss[idx]
res = [''] * len(score)
for idx, (_, i) in enumerate(s):
res[i] = toS(idx)
return res
# @lc code=end
# @lc main=start
if __name__ == '__main__':
print('Example 1:')
print('Input : ')
print('score = [5,4,3,2,1]')
print('Exception :')
print('["Gold Medal","Silver Medal","Bronze Medal","4","5"]')
print('Output :')
print(str(Solution().findRelativeRanks([5, 4, 3, 2, 1])))
print()
print('Example 2:')
print('Input : ')
print('score = [10,3,8,9,4]')
print('Exception :')
print('["Gold Medal","5","Bronze Medal","Silver Medal","4"]')
print('Output :')
print(str(Solution().findRelativeRanks([10, 3, 8, 9, 4])))
print()
pass
# @lc main=end | [] |
QuarkTheAwesome/compiler-rt-be-aeabi | test/msan/lit.cfg.py | 79e7d2bd981b0f38d60d90f8382c6cd5389b95d0 | # -*- Python -*-
import os
# Setup config name.
config.name = 'MemorySanitizer' + getattr(config, 'name_suffix', 'default')
# Setup source root.
config.test_source_root = os.path.dirname(__file__)
# Setup default compiler flags used with -fsanitize=memory option.
clang_msan_cflags = (["-fsanitize=memory",
"-mno-omit-leaf-frame-pointer",
"-fno-omit-frame-pointer",
"-fno-optimize-sibling-calls"] +
[config.target_cflags] +
config.debug_info_flags)
# Some Msan tests leverage backtrace() which requires libexecinfo on FreeBSD.
if config.host_os == 'FreeBSD':
clang_msan_cflags += ["-lexecinfo", "-fPIC"]
clang_msan_cxxflags = config.cxx_mode_flags + clang_msan_cflags
# Flags for KMSAN invocation. This is C-only, we're not interested in C++.
clang_kmsan_cflags = (["-fsanitize=kernel-memory"] +
[config.target_cflags] +
config.debug_info_flags)
def build_invocation(compile_flags):
return " " + " ".join([config.clang] + compile_flags) + " "
config.substitutions.append( ("%clang_msan ", build_invocation(clang_msan_cflags)) )
config.substitutions.append( ("%clangxx_msan ", build_invocation(clang_msan_cxxflags)) )
config.substitutions.append( ("%clang_kmsan ", build_invocation(clang_kmsan_cflags)) )
# Default test suffixes.
config.suffixes = ['.c', '.cc', '.cpp']
if config.host_os not in ['Linux', 'NetBSD', 'FreeBSD']:
config.unsupported = True
# For mips64, mips64el we have forced store_context_size to 1 because these
# archs use slow unwinder which is not async signal safe. Therefore we only
# check the first frame since store_context size is 1.
if config.host_arch in ['mips64', 'mips64el']:
config.substitutions.append( ('CHECK-%short-stack', 'CHECK-SHORT-STACK'))
else:
config.substitutions.append( ('CHECK-%short-stack', 'CHECK-FULL-STACK'))
| [((174, 199), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (189, 199), False, 'import os\n')] |
victor-freitas/ProjetoNCS | application/core/migrations/0001_initial.py | 7c80fad11e49f4ed00eefb90638730d340d78e1f | # Generated by Django 2.0.6 on 2018-06-17 04:47
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Cliente',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('cpf_cnpj', models.IntegerField(db_column='CPF_CNPJ', unique=True)),
('razao', models.CharField(blank=True, db_column='RAZAO', max_length=100, null=True)),
('endereco', models.CharField(db_column='ENDERECO', max_length=80)),
('cep', models.CharField(db_column='CEP', max_length=20)),
('email', models.CharField(db_column='EMAIL', max_length=200)),
('telefone', models.CharField(db_column='TELEFONE', max_length=11)),
('celular', models.CharField(blank=True, db_column='CELULAR', max_length=11, null=True)),
],
options={
'db_table': 'Cliente',
'managed': False,
},
),
migrations.CreateModel(
name='Fornecedor',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('cpf_cnpj', models.IntegerField(db_column='CPF_CNPJ', unique=True)),
('razao', models.CharField(blank=True, db_column='RAZAO', max_length=100, null=True)),
('endereco', models.CharField(db_column='ENDERECO', max_length=80)),
('cep', models.CharField(db_column='CEP', max_length=20)),
('email', models.CharField(db_column='EMAIL', max_length=200)),
('telefone', models.CharField(db_column='TELEFONE', max_length=11)),
('celular', models.CharField(blank=True, db_column='CELULAR', max_length=11, null=True)),
('pessoa_contato', models.CharField(blank=True, db_column='PESSOA_CONTATO', max_length=100, null=True)),
],
options={
'db_table': 'Fornecedor',
'managed': False,
},
),
migrations.CreateModel(
name='Funcionario',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('nome', models.CharField(db_column='NOME', max_length=100)),
('cpf', models.IntegerField(db_column='CPF')),
('cargo', models.SmallIntegerField(db_column='CARGO')),
('login', models.CharField(db_column='LOGIN', max_length=100)),
('senha', models.CharField(db_column='SENHA', max_length=50)),
],
options={
'db_table': 'Funcionario',
'managed': False,
},
),
migrations.CreateModel(
name='Materiaprima',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('nome', models.CharField(db_column='NOME', max_length=60)),
('forma_emb', models.CharField(db_column='FORMA_EMB', max_length=60)),
('peso', models.CharField(db_column='PESO', max_length=20)),
('unid_medida', models.CharField(db_column='UNID_MEDIDA', max_length=50)),
('quantidade', models.IntegerField(db_column='QUANTIDADE')),
('quantidade_min', models.IntegerField(db_column='QUANTIDADE_MIN')),
('descricao', models.CharField(db_column='DESCRICAO', max_length=500)),
('data_recebimento', models.DateField(db_column='DATA_RECEBIMENTO')),
],
options={
'db_table': 'MateriaPrima',
'managed': False,
},
),
migrations.CreateModel(
name='Ordemdeproducao',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('descricao', models.CharField(db_column='Descricao', max_length=500)),
],
options={
'db_table': 'OrdemDeProducao',
'managed': False,
},
),
migrations.CreateModel(
name='Pedido',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('data_pedido', models.DateField(db_column='DATA_PEDIDO')),
('valor', models.CharField(blank=True, db_column='VALOR', max_length=20, null=True)),
],
options={
'db_table': 'Pedido',
'managed': False,
},
),
migrations.CreateModel(
name='Pedidomp',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('data_pedido', models.DateField(db_column='DATA_PEDIDO')),
('data_prevista', models.DateField(db_column='DATA_PREVISTA')),
('descricao', models.CharField(blank=True, db_column='DESCRICAO', max_length=500, null=True)),
('valor', models.CharField(blank=True, db_column='VALOR', max_length=20, null=True)),
],
options={
'db_table': 'PedidoMP',
'managed': False,
},
),
migrations.CreateModel(
name='Produto',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('nome', models.CharField(db_column='NOME', max_length=60)),
('forma_emb', models.CharField(db_column='FORMA_EMB', max_length=60)),
('peso', models.CharField(db_column='PESO', max_length=20)),
('unid_medida', models.CharField(db_column='UNID_MEDIDA', max_length=50)),
('preco', models.CharField(blank=True, db_column='PRECO', max_length=10, null=True)),
('quantidade', models.IntegerField(blank=True, db_column='QUANTIDADE', null=True)),
('desc_produto', models.CharField(db_column='DESC_PRODUTO', max_length=500)),
],
options={
'db_table': 'Produto',
'managed': False,
},
),
migrations.CreateModel(
name='Setor',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('nome', models.CharField(db_column='NOME', max_length=100)),
],
options={
'db_table': 'Setor',
'managed': False,
},
),
migrations.CreateModel(
name='Statusordemproducao',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('status_nome', models.CharField(db_column='STATUS_NOME', max_length=30)),
],
options={
'db_table': 'StatusOrdemProducao',
'managed': False,
},
),
migrations.CreateModel(
name='Tipoproduto',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('nome', models.CharField(db_column='NOME', max_length=100)),
],
options={
'db_table': 'TipoProduto',
'managed': False,
},
),
migrations.CreateModel(
name='Tiposeguimento',
fields=[
('id', models.SmallIntegerField(db_column='ID', primary_key=True, serialize=False)),
('nome', models.CharField(db_column='NOME', max_length=100)),
],
options={
'db_table': 'TipoSeguimento',
'managed': False,
},
),
]
| [((303, 378), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (327, 378), False, 'from django.db import migrations, models\n'), ((410, 464), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_column': '"""CPF_CNPJ"""', 'unique': '(True)'}), "(db_column='CPF_CNPJ', unique=True)\n", (429, 464), False, 'from django.db import migrations, models\n'), ((493, 567), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""RAZAO"""', 'max_length': '(100)', 'null': '(True)'}), "(blank=True, db_column='RAZAO', max_length=100, null=True)\n", (509, 567), False, 'from django.db import migrations, models\n'), ((599, 652), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""ENDERECO"""', 'max_length': '(80)'}), "(db_column='ENDERECO', max_length=80)\n", (615, 652), False, 'from django.db import migrations, models\n'), ((679, 727), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""CEP"""', 'max_length': '(20)'}), "(db_column='CEP', max_length=20)\n", (695, 727), False, 'from django.db import migrations, models\n'), ((756, 807), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""EMAIL"""', 'max_length': '(200)'}), "(db_column='EMAIL', max_length=200)\n", (772, 807), False, 'from django.db import migrations, models\n'), ((839, 892), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""TELEFONE"""', 'max_length': '(11)'}), "(db_column='TELEFONE', max_length=11)\n", (855, 892), False, 'from django.db import migrations, models\n'), ((923, 998), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""CELULAR"""', 'max_length': '(11)', 'null': '(True)'}), "(blank=True, db_column='CELULAR', max_length=11, null=True)\n", (939, 998), False, 'from django.db import migrations, models\n'), ((1244, 1319), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (1268, 1319), False, 'from django.db import migrations, models\n'), ((1351, 1405), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_column': '"""CPF_CNPJ"""', 'unique': '(True)'}), "(db_column='CPF_CNPJ', unique=True)\n", (1370, 1405), False, 'from django.db import migrations, models\n'), ((1434, 1508), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""RAZAO"""', 'max_length': '(100)', 'null': '(True)'}), "(blank=True, db_column='RAZAO', max_length=100, null=True)\n", (1450, 1508), False, 'from django.db import migrations, models\n'), ((1540, 1593), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""ENDERECO"""', 'max_length': '(80)'}), "(db_column='ENDERECO', max_length=80)\n", (1556, 1593), False, 'from django.db import migrations, models\n'), ((1620, 1668), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""CEP"""', 'max_length': '(20)'}), "(db_column='CEP', max_length=20)\n", (1636, 1668), False, 'from django.db import migrations, models\n'), ((1697, 1748), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""EMAIL"""', 'max_length': '(200)'}), "(db_column='EMAIL', max_length=200)\n", (1713, 1748), False, 'from django.db import migrations, models\n'), ((1780, 1833), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""TELEFONE"""', 'max_length': '(11)'}), "(db_column='TELEFONE', max_length=11)\n", (1796, 1833), False, 'from django.db import migrations, models\n'), ((1864, 1939), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""CELULAR"""', 'max_length': '(11)', 'null': '(True)'}), "(blank=True, db_column='CELULAR', max_length=11, null=True)\n", (1880, 1939), False, 'from django.db import migrations, models\n'), ((1977, 2064), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""PESSOA_CONTATO"""', 'max_length': '(100)', 'null': '(True)'}), "(blank=True, db_column='PESSOA_CONTATO', max_length=100,\n null=True)\n", (1993, 2064), False, 'from django.db import migrations, models\n'), ((2310, 2385), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (2334, 2385), False, 'from django.db import migrations, models\n'), ((2413, 2463), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""NOME"""', 'max_length': '(100)'}), "(db_column='NOME', max_length=100)\n", (2429, 2463), False, 'from django.db import migrations, models\n'), ((2490, 2526), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_column': '"""CPF"""'}), "(db_column='CPF')\n", (2509, 2526), False, 'from django.db import migrations, models\n'), ((2555, 2598), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""CARGO"""'}), "(db_column='CARGO')\n", (2579, 2598), False, 'from django.db import migrations, models\n'), ((2627, 2678), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""LOGIN"""', 'max_length': '(100)'}), "(db_column='LOGIN', max_length=100)\n", (2643, 2678), False, 'from django.db import migrations, models\n'), ((2707, 2757), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""SENHA"""', 'max_length': '(50)'}), "(db_column='SENHA', max_length=50)\n", (2723, 2757), False, 'from django.db import migrations, models\n'), ((3009, 3084), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (3033, 3084), False, 'from django.db import migrations, models\n'), ((3112, 3161), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""NOME"""', 'max_length': '(60)'}), "(db_column='NOME', max_length=60)\n", (3128, 3161), False, 'from django.db import migrations, models\n'), ((3194, 3248), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""FORMA_EMB"""', 'max_length': '(60)'}), "(db_column='FORMA_EMB', max_length=60)\n", (3210, 3248), False, 'from django.db import migrations, models\n'), ((3276, 3325), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""PESO"""', 'max_length': '(20)'}), "(db_column='PESO', max_length=20)\n", (3292, 3325), False, 'from django.db import migrations, models\n'), ((3360, 3416), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""UNID_MEDIDA"""', 'max_length': '(50)'}), "(db_column='UNID_MEDIDA', max_length=50)\n", (3376, 3416), False, 'from django.db import migrations, models\n'), ((3450, 3493), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_column': '"""QUANTIDADE"""'}), "(db_column='QUANTIDADE')\n", (3469, 3493), False, 'from django.db import migrations, models\n'), ((3531, 3578), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_column': '"""QUANTIDADE_MIN"""'}), "(db_column='QUANTIDADE_MIN')\n", (3550, 3578), False, 'from django.db import migrations, models\n'), ((3611, 3666), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""DESCRICAO"""', 'max_length': '(500)'}), "(db_column='DESCRICAO', max_length=500)\n", (3627, 3666), False, 'from django.db import migrations, models\n'), ((3706, 3752), 'django.db.models.DateField', 'models.DateField', ([], {'db_column': '"""DATA_RECEBIMENTO"""'}), "(db_column='DATA_RECEBIMENTO')\n", (3722, 3752), False, 'from django.db import migrations, models\n'), ((4008, 4083), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (4032, 4083), False, 'from django.db import migrations, models\n'), ((4116, 4171), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""Descricao"""', 'max_length': '(500)'}), "(db_column='Descricao', max_length=500)\n", (4132, 4171), False, 'from django.db import migrations, models\n'), ((4421, 4496), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (4445, 4496), False, 'from django.db import migrations, models\n'), ((4531, 4572), 'django.db.models.DateField', 'models.DateField', ([], {'db_column': '"""DATA_PEDIDO"""'}), "(db_column='DATA_PEDIDO')\n", (4547, 4572), False, 'from django.db import migrations, models\n'), ((4601, 4674), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""VALOR"""', 'max_length': '(20)', 'null': '(True)'}), "(blank=True, db_column='VALOR', max_length=20, null=True)\n", (4617, 4674), False, 'from django.db import migrations, models\n'), ((4917, 4992), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (4941, 4992), False, 'from django.db import migrations, models\n'), ((5027, 5068), 'django.db.models.DateField', 'models.DateField', ([], {'db_column': '"""DATA_PEDIDO"""'}), "(db_column='DATA_PEDIDO')\n", (5043, 5068), False, 'from django.db import migrations, models\n'), ((5105, 5148), 'django.db.models.DateField', 'models.DateField', ([], {'db_column': '"""DATA_PREVISTA"""'}), "(db_column='DATA_PREVISTA')\n", (5121, 5148), False, 'from django.db import migrations, models\n'), ((5181, 5259), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""DESCRICAO"""', 'max_length': '(500)', 'null': '(True)'}), "(blank=True, db_column='DESCRICAO', max_length=500, null=True)\n", (5197, 5259), False, 'from django.db import migrations, models\n'), ((5288, 5361), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""VALOR"""', 'max_length': '(20)', 'null': '(True)'}), "(blank=True, db_column='VALOR', max_length=20, null=True)\n", (5304, 5361), False, 'from django.db import migrations, models\n'), ((5605, 5672), 'django.db.models.AutoField', 'models.AutoField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (5621, 5672), False, 'from django.db import migrations, models\n'), ((5700, 5749), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""NOME"""', 'max_length': '(60)'}), "(db_column='NOME', max_length=60)\n", (5716, 5749), False, 'from django.db import migrations, models\n'), ((5782, 5836), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""FORMA_EMB"""', 'max_length': '(60)'}), "(db_column='FORMA_EMB', max_length=60)\n", (5798, 5836), False, 'from django.db import migrations, models\n'), ((5864, 5913), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""PESO"""', 'max_length': '(20)'}), "(db_column='PESO', max_length=20)\n", (5880, 5913), False, 'from django.db import migrations, models\n'), ((5948, 6004), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""UNID_MEDIDA"""', 'max_length': '(50)'}), "(db_column='UNID_MEDIDA', max_length=50)\n", (5964, 6004), False, 'from django.db import migrations, models\n'), ((6033, 6106), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""PRECO"""', 'max_length': '(10)', 'null': '(True)'}), "(blank=True, db_column='PRECO', max_length=10, null=True)\n", (6049, 6106), False, 'from django.db import migrations, models\n'), ((6140, 6206), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'db_column': '"""QUANTIDADE"""', 'null': '(True)'}), "(blank=True, db_column='QUANTIDADE', null=True)\n", (6159, 6206), False, 'from django.db import migrations, models\n'), ((6242, 6300), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""DESC_PRODUTO"""', 'max_length': '(500)'}), "(db_column='DESC_PRODUTO', max_length=500)\n", (6258, 6300), False, 'from django.db import migrations, models\n'), ((6541, 6616), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (6565, 6616), False, 'from django.db import migrations, models\n'), ((6644, 6694), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""NOME"""', 'max_length': '(100)'}), "(db_column='NOME', max_length=100)\n", (6660, 6694), False, 'from django.db import migrations, models\n'), ((6947, 7022), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (6971, 7022), False, 'from django.db import migrations, models\n'), ((7057, 7113), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""STATUS_NOME"""', 'max_length': '(30)'}), "(db_column='STATUS_NOME', max_length=30)\n", (7073, 7113), False, 'from django.db import migrations, models\n'), ((7372, 7447), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (7396, 7447), False, 'from django.db import migrations, models\n'), ((7475, 7525), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""NOME"""', 'max_length': '(100)'}), "(db_column='NOME', max_length=100)\n", (7491, 7525), False, 'from django.db import migrations, models\n'), ((7779, 7854), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'db_column': '"""ID"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(db_column='ID', primary_key=True, serialize=False)\n", (7803, 7854), False, 'from django.db import migrations, models\n'), ((7882, 7932), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""NOME"""', 'max_length': '(100)'}), "(db_column='NOME', max_length=100)\n", (7898, 7932), False, 'from django.db import migrations, models\n')] |
vingkan/sql_tools | dataschema/entity.py | 5d6ab6a0ae31dc51e51ac1629f83f7bbf91396c1 | #
# nuna_sql_tools: Copyright 2022 Nuna Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utilityes for checking and."""
import dataclasses
import datetime
import decimal
from types import ModuleType
from typing import NewType, Union
# In your data declaration python modules define a JAVA_PACKAGE
# variable at top level to specify the corresponding Java package of generated
# classes.
JAVA_PACKAGE = 'JAVA_PACKAGE'
def GetJavaPackage(module: ModuleType) -> str:
if hasattr(module, JAVA_PACKAGE):
return getattr(module, JAVA_PACKAGE)
else:
return module.__name__
_SCHEMA_ANNOTATIONS = '__schema_annotations__'
_EXPECTED_DICT_KEYS = set([
'__module__', '__annotations__', '__doc__', '__dict__', '__weakref__',
'__dataclass_params__', '__dataclass_fields__', _SCHEMA_ANNOTATIONS
])
_EXPECTED_FUNCTIONS = ['__init__', '__repr__', '__eq__', '__hash__']
_BASE_TYPES = set([
int, bytes, str, float, bool, datetime.date, datetime.datetime,
decimal.Decimal
])
_SCHEMA_ANNOTATIONS = '__schema_annotations__'
_CLASS_ID = 0
def _Annotate(cls=None, annotation=None):
"""Annotates a class or a type. `annotation` should from annotation.py"""
def Wrap(cls):
schema_annotations = []
if hasattr(cls, _SCHEMA_ANNOTATIONS):
schema_annotations.extend(getattr(cls, _SCHEMA_ANNOTATIONS))
if isinstance(annotation, list):
schema_annotations.extend(annotation)
else:
schema_annotations.append(annotation)
global _CLASS_ID
_CLASS_ID += 1
supertype = cls
if hasattr(cls, '__supertype__'):
supertype = cls.__supertype__
annotated_type = NewType(f'Annotated_{_CLASS_ID}', supertype)
setattr(annotated_type, _SCHEMA_ANNOTATIONS, schema_annotations)
return annotated_type
if cls is None:
return Wrap
return Wrap(cls)
def Annotate(cls, annotation):
"""Annotates a field type with the provided annotation."""
return _Annotate(cls, annotation=annotation)
def IsAnnotatedType(field_cls: type):
"""If provided field_cls is an annotated type."""
return hasattr(field_cls, _SCHEMA_ANNOTATIONS)
def GetAnnotatedType(field_cls: type):
"""Returns the original type behind the annotation (if any)."""
if IsAnnotatedType(field_cls) and hasattr(field_cls, '__supertype__'):
return field_cls.__supertype__
return field_cls
def IsOptionalType(field_cls: type):
"""If the field_cls looks like an Optional[...] type."""
return (hasattr(field_cls, '__origin__')
# pylint: disable=comparison-with-callable
and field_cls.__origin__ == Union and len(field_cls.__args__) == 2
and field_cls.__args__[1] == type(None))
def GetOptionalType(field_cls: type):
"""Returns the type of optional & annotation or None if not optional."""
field_cls = GetAnnotatedType(field_cls)
if IsOptionalType(field_cls):
return field_cls.__args__[0]
return None
def GetOriginalType(field_cls: type):
"""Returns the type of field_cls, behind annotations and Optional."""
field_cls = GetAnnotatedType(field_cls)
if IsOptionalType(field_cls):
return field_cls.__args__[0]
return field_cls
def GetStructuredTypeName(field_cls: type):
"""Returns the structure type name for a type, behind annotation."""
field_cls = GetAnnotatedType(field_cls)
if not hasattr(field_cls, '__origin__'):
return None
if field_cls.__origin__ is dict:
return 'dict'
elif field_cls.__origin__ is list:
return 'list'
elif field_cls.__origin__ is set:
return 'set'
return None
def IsBasicType(field_cls: type):
"""If the type field_cls looks like one of the basic field types."""
if GetAnnotatedType(field_cls) in _BASE_TYPES:
return True
_MAX_DEPTH = 30
class FieldTypeChecker:
"""Checks the type of a fields in a dataclass."""
def __init__(self, field_name, field_cls):
self.field_name = field_name
self.field_cls = field_cls
self.checked = set()
def _check(self, field_cls, depth):
"""Check if the type of a field is acceptable."""
if field_cls in self.checked:
return True
if depth > _MAX_DEPTH:
raise ValueError(f'Recursive field type found at {field_cls} '
f'for field `{self.field_name}`')
field_cls = GetAnnotatedType(field_cls)
if IsBasicType(field_cls):
return True
if hasattr(field_cls, '__origin__'):
if field_cls.__origin__ is dict:
self._check(field_cls.__args__[0], depth)
self._check(field_cls.__args__[1], depth)
elif field_cls.__origin__ is list:
self._check(field_cls.__args__[0], depth)
elif field_cls.__origin__ is set:
self._check(field_cls.__args__[0], depth)
elif ( # pylint: disable=comparison-with-callable
field_cls.__origin__ == Union and
len(field_cls.__args__) == 2 and
field_cls.__args__[1] == type(None)):
if GetStructuredTypeName(field_cls) is not None:
raise ValueError('Cannot have Optional structured fields.'
'(e.g. Optional[List or Set or Dict])')
# Optional[...]
self._check(field_cls.__args__[0], depth)
else:
raise ValueError(f'Invalid origin class for {field_cls}: '
f'`{field_cls.__origin__}`')
else:
checker = DataclassChecker(field_cls)
if checker.check_is_dataclass() is not None:
raise ValueError(
f'Invalid type surfaced for field `{self.field_name}`: '
f'`{self.field_cls}` - {field_cls} is not acceptable')
err = checker.check()
if err:
errors = '; '.join(err)
raise ValueError(
f'Subfield entity class of field `{self.field_name}` '
f'({field_cls}) has type errors: {errors}')
self.checked.add(field_cls)
return True
def check(self):
return self._check(self.field_cls, 0)
class DataclassChecker:
"""Checks if a python type and its structure conforms to Dataclass specs."""
def __init__(self, cls: type):
self.cls = cls
self.nested = []
def _err_class(self):
return f'dataclass class `{self.cls}` in module `{self.cls.__module__}`'
def _err_field(self, field: str):
return (f'field `{field}` of dataclass class `{self.cls.__name__}` '
f'in module `{self.cls.__module__}`')
def check_is_dataclass(self):
if not dataclasses.is_dataclass(self.cls):
return f'{self._err_class()} is not a dataclass'
return None
def _check_type(self, field_name, field_cls):
try:
FieldTypeChecker(field_name, field_cls).check()
return None
except ValueError as e:
return f'{e.args[0]} for {self._err_field(field_name)}'
def _check_field_type(self, field_name, field_cls):
return self._check_type(GetOriginalType(field_name), field_cls)
def _check_dataclass_members(self):
err = []
for key in self.cls.__dict__:
# pylint: disable=comparison-with-callable,unidiomatic-typecheck
if type(self.cls.__dict__[key]) == type:
self.nested.append(
(key, DataclassChecker(self.cls.__dict__[key])))
elif callable(
self.cls.__dict__[key]) and key not in _EXPECTED_FUNCTIONS:
err.append(f'{self._err_class()} has unexpected function '
f'member `{key}`')
elif (key not in _EXPECTED_DICT_KEYS and
key not in _EXPECTED_FUNCTIONS and
key not in self.cls.__annotations__):
err.append(f'{self._err_class()} has unexpected / non annotated'
f' member `{key}`: {self.cls.__dict__[key]}')
for field in dataclasses.fields(self.cls):
field_err = self._check_field_type(field.name, field.type)
if field_err is not None:
err.append(field_err)
for nested in self.nested:
for nested_err in nested[1].check():
err.append(f'{nested_err}; for nested sub-class '
f'{nested[0]} of {self._err_class()}')
return err
def check(self):
err_dataclass = self.check_is_dataclass()
if err_dataclass is not None:
return [err_dataclass]
return self._check_dataclass_members()
def SchemaAnnotations(cls: type):
"""Returns the schema annotations of a type."""
annotations = []
if hasattr(cls, _SCHEMA_ANNOTATIONS):
annotations.extend(cls.__schema_annotations__)
return annotations
| [((2196, 2240), 'typing.NewType', 'NewType', (['f"""Annotated_{_CLASS_ID}"""', 'supertype'], {}), "(f'Annotated_{_CLASS_ID}', supertype)\n", (2203, 2240), False, 'from typing import NewType, Union\n'), ((8773, 8801), 'dataclasses.fields', 'dataclasses.fields', (['self.cls'], {}), '(self.cls)\n', (8791, 8801), False, 'import dataclasses\n'), ((7383, 7417), 'dataclasses.is_dataclass', 'dataclasses.is_dataclass', (['self.cls'], {}), '(self.cls)\n', (7407, 7417), False, 'import dataclasses\n')] |
melkisedeath/Harmonic_Analysis_and_Trajectory | Data_and_Dicts.py | a5a2819c053ddd287dcb668fac2f1be7e44f6c59 | """HERE are the base Points for all valid Tonnetze Systems.
A period of all 12 notes divided by mod 3, mod 4 (always stable)
"""
# x = 4, y = 3
NotePointsT345 = {
0: (0, 0),
1: (1, 3),
2: (2, 2),
3: (0, 1),
4: (1, 0),
5: (2, 3),
6: (0, 2),
7: (1, 1),
8: (2, 0),
9: (0, 3),
10: (1, 2),
11: (2, 1)
}
# x = 8, y = 3
NotePointsT138 = {
0: (0, 0),
1: (2, 3),
2: (1, 2),
3: (0, 1),
4: (2, 0),
5: (1, 3),
6: (0, 2),
7: (2, 1),
8: (1, 0),
9: (0, 3),
10: (2, 2),
11: (1, 1)
}
# x = 2, y = 9
NotePointsT129 = {
0: (0, 0),
1: (2, 1),
2: (1, 0),
3: (0, 3),
4: (2, 0),
5: (1, 3),
6: (0, 2),
7: (2, 3),
8: (1, 2),
9: (0, 1),
10: (2, 2),
11: (1, 1)
}
# x = 4, y = 1
NotePointsT147 = {
0: (0, 0),
1: (0, 1),
2: (0, 2),
3: (0, 3),
4: (1, 0),
5: (1, 1),
6: (1, 2),
7: (1, 3),
8: (2, 0),
9: (2, 1),
10: (2, 2),
11: (2, 3)
}
# x = 2, y = 3
NotePointsT237 = {
0: (0, 0),
1: (2, 3),
2: (1, 0),
3: (0, 1),
4: (2, 0),
5: (1, 1),
6: (0, 2),
7: (2, 1),
8: (1, 2),
9: (0, 3),
10: (2, 2),
11: (1, 3)
}
dictOfTonnetz = {
'T345': NotePointsT345,
'T147': NotePointsT147,
'T138': NotePointsT138,
'T237': NotePointsT237,
'T129': NotePointsT129
}
dictOfTonnetze = {
'T129': [1, 2, 9],
'T138': [1, 3, 8],
'T147': [1, 4, 7],
'T156': [1, 5, 6],
'T237': [2, 3, 7],
'T345': [3, 4, 5]
}
| [] |
alanjjenkins/awacs | awacs/proton.py | 0065e1833eae6a6070edb4ab4f180fd10b26c19a | # Copyright (c) 2012-2021, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "AWS Proton"
prefix = "proton"
class Action(BaseAction):
def __init__(self, action: str = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
CreateEnvironment = Action("CreateEnvironment")
CreateEnvironmentTemplate = Action("CreateEnvironmentTemplate")
CreateEnvironmentTemplateMajorVersion = Action("CreateEnvironmentTemplateMajorVersion")
CreateEnvironmentTemplateMinorVersion = Action("CreateEnvironmentTemplateMinorVersion")
CreateService = Action("CreateService")
CreateServiceTemplate = Action("CreateServiceTemplate")
CreateServiceTemplateMajorVersion = Action("CreateServiceTemplateMajorVersion")
CreateServiceTemplateMinorVersion = Action("CreateServiceTemplateMinorVersion")
DeleteAccountRoles = Action("DeleteAccountRoles")
DeleteEnvironment = Action("DeleteEnvironment")
DeleteEnvironmentTemplate = Action("DeleteEnvironmentTemplate")
DeleteEnvironmentTemplateMajorVersion = Action("DeleteEnvironmentTemplateMajorVersion")
DeleteEnvironmentTemplateMinorVersion = Action("DeleteEnvironmentTemplateMinorVersion")
DeleteService = Action("DeleteService")
DeleteServiceTemplate = Action("DeleteServiceTemplate")
DeleteServiceTemplateMajorVersion = Action("DeleteServiceTemplateMajorVersion")
DeleteServiceTemplateMinorVersion = Action("DeleteServiceTemplateMinorVersion")
GetAccountRoles = Action("GetAccountRoles")
GetEnvironment = Action("GetEnvironment")
GetEnvironmentTemplate = Action("GetEnvironmentTemplate")
GetEnvironmentTemplateMajorVersion = Action("GetEnvironmentTemplateMajorVersion")
GetEnvironmentTemplateMinorVersion = Action("GetEnvironmentTemplateMinorVersion")
GetService = Action("GetService")
GetServiceInstance = Action("GetServiceInstance")
GetServiceTemplate = Action("GetServiceTemplate")
GetServiceTemplateMajorVersion = Action("GetServiceTemplateMajorVersion")
GetServiceTemplateMinorVersion = Action("GetServiceTemplateMinorVersion")
ListEnvironmentTemplateMajorVersions = Action("ListEnvironmentTemplateMajorVersions")
ListEnvironmentTemplateMinorVersions = Action("ListEnvironmentTemplateMinorVersions")
ListEnvironmentTemplates = Action("ListEnvironmentTemplates")
ListEnvironments = Action("ListEnvironments")
ListServiceInstances = Action("ListServiceInstances")
ListServiceTemplateMajorVersions = Action("ListServiceTemplateMajorVersions")
ListServiceTemplateMinorVersions = Action("ListServiceTemplateMinorVersions")
ListServiceTemplates = Action("ListServiceTemplates")
ListServices = Action("ListServices")
ListTagsForResource = Action("ListTagsForResource")
TagResource = Action("TagResource")
UntagResource = Action("UntagResource")
UpdateAccountRoles = Action("UpdateAccountRoles")
UpdateEnvironment = Action("UpdateEnvironment")
UpdateEnvironmentTemplate = Action("UpdateEnvironmentTemplate")
UpdateEnvironmentTemplateMajorVersion = Action("UpdateEnvironmentTemplateMajorVersion")
UpdateEnvironmentTemplateMinorVersion = Action("UpdateEnvironmentTemplateMinorVersion")
UpdateService = Action("UpdateService")
UpdateServiceInstance = Action("UpdateServiceInstance")
UpdateServicePipeline = Action("UpdateServicePipeline")
UpdateServiceTemplate = Action("UpdateServiceTemplate")
UpdateServiceTemplateMajorVersion = Action("UpdateServiceTemplateMajorVersion")
UpdateServiceTemplateMinorVersion = Action("UpdateServiceTemplateMinorVersion")
| [] |
LydiaMelles/relativum | src/error.py | c7c0fc2b5c8d01842f6a02b67e7145cbd3a1ff65 | class RequirementsNotMetError(Exception):
"""For SQL INSERT, missing table attributes."""
def __init__(self, message):
super().__init__(message)
class AuthenticationError(Exception):
"""Generic authentication error."""
def __init__(self, message):
super().__init__(message)
| [] |
lorenrose1013/jaxline | jaxline/utils_test.py | 29fca9944651d42139d4103fe12ef29b24812eb6 | # Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for jaxline's utils."""
import functools
import itertools as it
import time
from unittest import mock
from absl.testing import absltest
from absl.testing import flagsaver
import jax
import jax.numpy as jnp
from jaxline import utils
import numpy as np
class PyPrefetchTest(absltest.TestCase):
def testEmpty(self):
self.assertEqual(list(utils.py_prefetch(lambda: ())), [])
def testBaseCase(self):
self.assertEqual(list(utils.py_prefetch(lambda: range(100))),
list(range(100)))
def testBadFunction(self):
def _bad_function():
raise ValueError
iterable = utils.py_prefetch(_bad_function)
with self.assertRaises(ValueError):
next(iterable)
def testBadFunctionIteration(self):
def _bad_iterable():
yield 1
raise ValueError
iterable = utils.py_prefetch(_bad_iterable)
self.assertEqual(next(iterable), 1)
with self.assertRaises(ValueError):
next(iterable)
class TreePsumTest(absltest.TestCase):
def testBaseCase(self):
# pick leaf objects with leading dimension one as these tests will
# be run on a single device.
data = {"a": jnp.array([1]), "b": jnp.array([2])}
data_summed = jax.pmap(
lambda x: utils.tree_psum(x, axis_name="i"), axis_name="i")(data)
self.assertEqual(data_summed, data)
def testEmpty(self):
data = {"a": jnp.array([]), "b": jnp.array([])}
with self.assertRaises(ZeroDivisionError):
jax.pmap(lambda x: utils.tree_psum(x, axis_name="i"), axis_name="i")(data)
def testSingleLeafTree(self):
data = jnp.array([1])
data_summed = jax.pmap(
lambda x: utils.tree_psum(x, axis_name="i"), axis_name="i")(data)
self.assertEqual(data_summed, data)
def testNotNumpy(self):
data = [1]
with self.assertRaises(ValueError):
jax.pmap(lambda x: utils.tree_psum(x, axis_name="i"), axis_name="i")(data)
def testNumDevicesMismatch(self):
data = jnp.array([1, 2]) # assumes 2 devices but we only have 1
with self.assertRaises(ValueError):
jax.pmap(lambda x: utils.tree_psum(x, axis_name="i"), axis_name="i")(data)
def testNoPmapWrapper(self):
with self.assertRaises(NameError): # axis_name will be undefined
utils.tree_psum(jnp.array([1]), axis_name="i")
def testAxisNameMismatch(self):
data = jnp.array([1])
with self.assertRaises(NameError):
jax.pmap(lambda x: utils.tree_psum(x, axis_name="i"), axis_name="j")(data)
class MakeAsyncTest(absltest.TestCase):
def testBaseCase(self):
"""Tests correct execution for single call."""
r = []
async_fn = utils.make_async()(lambda: r.append("a"))
async_fn()
time.sleep(1)
self.assertListEqual(r, ["a"])
def testNonBlocking(self):
"""Tests async function doesn't block the main thread."""
r = []
async_fn = utils.make_async()(lambda: r.append((time.sleep(5), "a")))
r.append((None, "b"))
async_fn().result()
self.assertListEqual(r, [(None, "b"), (None, "a")])
def testSerialExecution(self):
"""Tests multiple calls to async function execute serially."""
r = []
a = lambda: r.append((time.sleep(5), "a"))
b = lambda: r.append((None, "b"))
async_fn = utils.make_async()(lambda f: f())
async_fn(a)
async_fn(b).result()
self.assertListEqual(r, [(None, "a"), (None, "b")])
def testErrorOnNextCall(self):
"""Tests background thread error raised in main thread on next call."""
@utils.make_async()
def async_fn():
raise ValueError()
# First call will trigger an error in the background thread.
async_fn()
with self.assertRaises(ValueError):
# Background thread error will be raised in the main thread on next call
async_fn()
def testSubsequentCallsDontRun(self):
"""Tests that subsequent calls don't run after an error has occurred."""
runs = []
@utils.make_async()
def async_fn():
runs.append(None)
raise ValueError()
# First call will trigger an error in the background thread.
async_fn()
for _ in range(2):
with self.assertRaises(ValueError):
# Background thread error will be raised in the main thread on
# subsequent calls and _bad_function will not be run.
async_fn()
self.assertListEqual(runs, [None])
def testErrorInBackgroundThread(self):
"""Tests background thread raises the error."""
@utils.make_async()
def async_fn():
raise ValueError()
future = async_fn() # pylint: disable=assignment-from-no-return
self.assertIsNotNone(future.exception())
class TestBroadcast(absltest.TestCase):
def test_bcast_local_devices(self):
self.assertEqual(utils.bcast_local_devices(jnp.zeros([])),
jnp.zeros([jax.local_device_count()]))
self.assertEqual(utils.bcast_local_devices(jnp.ones([])),
jnp.ones([jax.local_device_count()]))
def test_bcast_local_devices_empty_tree(self):
self.assertIsNone(utils.bcast_local_devices(None))
self.assertEqual(utils.bcast_local_devices({}), {})
def test_bcast_local_devices_tree(self):
num_devices = jax.local_device_count()
tree = utils.bcast_local_devices({"ones": jnp.ones([]),
"zeros": jnp.zeros([])})
self.assertEqual(tree, {"ones": jnp.ones([num_devices]),
"zeros": jnp.zeros([num_devices])})
class TestLogActivity(absltest.TestCase):
@mock.patch("jaxline.utils.logging.info")
def test_log_success(self, mock_info):
"""Tests that logging an activity is successful."""
with utils.log_activity("for test"):
pass
mock_info.assert_any_call("[jaxline] %s starting...", "for test")
mock_info.assert_any_call("[jaxline] %s finished.", "for test")
@mock.patch("absl.logging.exception")
@mock.patch("absl.logging.info")
def test_log_failure(self, mock_info, mock_exc):
"""Tests that an error thrown by an activity is correctly caught."""
with self.assertRaisesRegex(ValueError, "Intentional"):
with utils.log_activity("for test"):
raise ValueError("Intentional")
mock_info.assert_any_call("[jaxline] %s starting...", "for test")
mock_exc.assert_any_call("[jaxline] %s failed with error.", "for test")
class TestSpecializeRngHostDevice(absltest.TestCase):
@classmethod
def setUpClass(cls):
super(TestSpecializeRngHostDevice, cls).setUpClass()
rng = jax.random.PRNGKey(0)
cls.rng = jnp.broadcast_to(
rng, (jax.local_device_count(),) + rng.shape)
def test_unique_device(self):
"""Tests that rngs are unique across devices."""
mode = "unique_host_unique_device"
host_id_devices = utils.host_id_devices_for_rng(mode)
specialize_func = jax.pmap(functools.partial(
utils.specialize_rng_host_device, axis_name="i",
mode=mode), axis_name="i")
rng = specialize_func(self.rng, host_id_devices)
self.assertEqual(
np.unique(rng, axis=0).shape[0], jax.local_device_count())
def test_same_device(self):
"""Tests rngs are same across devices."""
mode = "unique_host_same_device"
host_id_devices = utils.host_id_devices_for_rng(mode)
specialize_func = jax.pmap(functools.partial(
utils.specialize_rng_host_device, axis_name="i",
mode=mode), axis_name="i")
rng = specialize_func(self.rng, host_id_devices)
self.assertEqual(
np.unique(rng, axis=0).shape[0], 1)
def test_unique_host(self):
"""Tests rngs unique between hosts."""
mode = "unique_host_same_device"
with mock.patch.object(utils.jax, "host_id", return_value=0):
host_id_devices = utils.host_id_devices_for_rng(mode)
specialize_func = jax.pmap(functools.partial(
utils.specialize_rng_host_device, axis_name="i",
mode=mode), axis_name="i")
rng0 = specialize_func(self.rng, host_id_devices)
with mock.patch.object(utils.jax, "host_id", return_value=1):
host_id_devices = utils.host_id_devices_for_rng(mode)
specialize_func = jax.pmap(functools.partial(
utils.specialize_rng_host_device, axis_name="i",
mode=mode), axis_name="i")
rng1 = specialize_func(self.rng, host_id_devices)
self.assertEqual(
np.unique(np.concatenate([rng0, rng1], axis=0), axis=0).shape[0], 2)
class TestRendezvous(absltest.TestCase):
def test_rendezvous(self):
"""Test that rendezvous doesn't fail."""
utils.rendezvous()
class TestJaxlineDisablePmapJit(absltest.TestCase):
@mock.patch.object(utils.chex, "fake_pmap_and_jit", autospec=True)
def test_pmap_jit_disabled(self, mock_fake_pmap_and_jit):
"""Tests pmap/jit are disabled if --jaxline_disable_pmap_jit is set."""
with self.subTest("PmapJitNotDisabled"):
with flagsaver.flagsaver(jaxline_disable_pmap_jit=False):
utils.disable_pmap_jit(lambda: None)()
mock_fake_pmap_and_jit.assert_not_called()
with self.subTest("PmapJitDisabled"):
with flagsaver.flagsaver(jaxline_disable_pmap_jit=True):
utils.disable_pmap_jit(lambda: None)()
mock_fake_pmap_and_jit.assert_called_once()
class DoubleBufferTest(absltest.TestCase):
def test_double_buffer(self):
if jax.default_backend() != "gpu":
self.skipTest("Only necessary on GPU.")
n = jax.local_device_count()
dataset = it.repeat(np.ones([n]))
iterator = iter(utils.double_buffer(dataset))
batch_ptrs = []
while len(batch_ptrs) < 4:
batch = next(iterator)
ptrs = [b.unsafe_buffer_pointer() for b in batch.device_buffers]
batch_ptrs.append(ptrs)
del batch
self.assertEqual(batch_ptrs[0], batch_ptrs[2])
self.assertEqual(batch_ptrs[1], batch_ptrs[3])
self.assertNotEqual(batch_ptrs[0], batch_ptrs[1])
self.assertNotEqual(batch_ptrs[2], batch_ptrs[3])
if __name__ == "__main__":
absltest.main()
| [((6154, 6194), 'unittest.mock.patch', 'mock.patch', (['"""jaxline.utils.logging.info"""'], {}), "('jaxline.utils.logging.info')\n", (6164, 6194), False, 'from unittest import mock\n'), ((6488, 6524), 'unittest.mock.patch', 'mock.patch', (['"""absl.logging.exception"""'], {}), "('absl.logging.exception')\n", (6498, 6524), False, 'from unittest import mock\n'), ((6528, 6559), 'unittest.mock.patch', 'mock.patch', (['"""absl.logging.info"""'], {}), "('absl.logging.info')\n", (6538, 6559), False, 'from unittest import mock\n'), ((9222, 9287), 'unittest.mock.patch.object', 'mock.patch.object', (['utils.chex', '"""fake_pmap_and_jit"""'], {'autospec': '(True)'}), "(utils.chex, 'fake_pmap_and_jit', autospec=True)\n", (9239, 9287), False, 'from unittest import mock\n'), ((10561, 10576), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (10574, 10576), False, 'from absl.testing import absltest\n'), ((1314, 1346), 'jaxline.utils.py_prefetch', 'utils.py_prefetch', (['_bad_function'], {}), '(_bad_function)\n', (1331, 1346), False, 'from jaxline import utils\n'), ((1526, 1558), 'jaxline.utils.py_prefetch', 'utils.py_prefetch', (['_bad_iterable'], {}), '(_bad_iterable)\n', (1543, 1558), False, 'from jaxline import utils\n'), ((2276, 2290), 'jax.numpy.array', 'jnp.array', (['[1]'], {}), '([1])\n', (2285, 2290), True, 'import jax.numpy as jnp\n'), ((2644, 2661), 'jax.numpy.array', 'jnp.array', (['[1, 2]'], {}), '([1, 2])\n', (2653, 2661), True, 'import jax.numpy as jnp\n'), ((3024, 3038), 'jax.numpy.array', 'jnp.array', (['[1]'], {}), '([1])\n', (3033, 3038), True, 'import jax.numpy as jnp\n'), ((3366, 3379), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3376, 3379), False, 'import time\n'), ((4157, 4175), 'jaxline.utils.make_async', 'utils.make_async', ([], {}), '()\n', (4173, 4175), False, 'from jaxline import utils\n'), ((4578, 4596), 'jaxline.utils.make_async', 'utils.make_async', ([], {}), '()\n', (4594, 4596), False, 'from jaxline import utils\n'), ((5105, 5123), 'jaxline.utils.make_async', 'utils.make_async', ([], {}), '()\n', (5121, 5123), False, 'from jaxline import utils\n'), ((5833, 5857), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (5855, 5857), False, 'import jax\n'), ((7137, 7158), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\n', (7155, 7158), False, 'import jax\n'), ((7393, 7428), 'jaxline.utils.host_id_devices_for_rng', 'utils.host_id_devices_for_rng', (['mode'], {}), '(mode)\n', (7422, 7428), False, 'from jaxline import utils\n'), ((7852, 7887), 'jaxline.utils.host_id_devices_for_rng', 'utils.host_id_devices_for_rng', (['mode'], {}), '(mode)\n', (7881, 7887), False, 'from jaxline import utils\n'), ((9145, 9163), 'jaxline.utils.rendezvous', 'utils.rendezvous', ([], {}), '()\n', (9161, 9163), False, 'from jaxline import utils\n'), ((10009, 10033), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (10031, 10033), False, 'import jax\n'), ((1849, 1863), 'jax.numpy.array', 'jnp.array', (['[1]'], {}), '([1])\n', (1858, 1863), True, 'import jax.numpy as jnp\n'), ((1870, 1884), 'jax.numpy.array', 'jnp.array', (['[2]'], {}), '([2])\n', (1879, 1884), True, 'import jax.numpy as jnp\n'), ((2069, 2082), 'jax.numpy.array', 'jnp.array', (['[]'], {}), '([])\n', (2078, 2082), True, 'import jax.numpy as jnp\n'), ((2089, 2102), 'jax.numpy.array', 'jnp.array', (['[]'], {}), '([])\n', (2098, 2102), True, 'import jax.numpy as jnp\n'), ((3305, 3323), 'jaxline.utils.make_async', 'utils.make_async', ([], {}), '()\n', (3321, 3323), False, 'from jaxline import utils\n'), ((3533, 3551), 'jaxline.utils.make_async', 'utils.make_async', ([], {}), '()\n', (3549, 3551), False, 'from jaxline import utils\n'), ((3910, 3928), 'jaxline.utils.make_async', 'utils.make_async', ([], {}), '()\n', (3926, 3928), False, 'from jaxline import utils\n'), ((5682, 5713), 'jaxline.utils.bcast_local_devices', 'utils.bcast_local_devices', (['None'], {}), '(None)\n', (5707, 5713), False, 'from jaxline import utils\n'), ((5736, 5765), 'jaxline.utils.bcast_local_devices', 'utils.bcast_local_devices', (['{}'], {}), '({})\n', (5761, 5765), False, 'from jaxline import utils\n'), ((6302, 6332), 'jaxline.utils.log_activity', 'utils.log_activity', (['"""for test"""'], {}), "('for test')\n", (6320, 6332), False, 'from jaxline import utils\n'), ((7460, 7537), 'functools.partial', 'functools.partial', (['utils.specialize_rng_host_device'], {'axis_name': '"""i"""', 'mode': 'mode'}), "(utils.specialize_rng_host_device, axis_name='i', mode=mode)\n", (7477, 7537), False, 'import functools\n'), ((7689, 7713), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (7711, 7713), False, 'import jax\n'), ((7919, 7996), 'functools.partial', 'functools.partial', (['utils.specialize_rng_host_device'], {'axis_name': '"""i"""', 'mode': 'mode'}), "(utils.specialize_rng_host_device, axis_name='i', mode=mode)\n", (7936, 7996), False, 'import functools\n'), ((8271, 8326), 'unittest.mock.patch.object', 'mock.patch.object', (['utils.jax', '"""host_id"""'], {'return_value': '(0)'}), "(utils.jax, 'host_id', return_value=0)\n", (8288, 8326), False, 'from unittest import mock\n'), ((8352, 8387), 'jaxline.utils.host_id_devices_for_rng', 'utils.host_id_devices_for_rng', (['mode'], {}), '(mode)\n', (8381, 8387), False, 'from jaxline import utils\n'), ((8601, 8656), 'unittest.mock.patch.object', 'mock.patch.object', (['utils.jax', '"""host_id"""'], {'return_value': '(1)'}), "(utils.jax, 'host_id', return_value=1)\n", (8618, 8656), False, 'from unittest import mock\n'), ((8682, 8717), 'jaxline.utils.host_id_devices_for_rng', 'utils.host_id_devices_for_rng', (['mode'], {}), '(mode)\n', (8711, 8717), False, 'from jaxline import utils\n'), ((9922, 9943), 'jax.default_backend', 'jax.default_backend', ([], {}), '()\n', (9941, 9943), False, 'import jax\n'), ((10058, 10070), 'numpy.ones', 'np.ones', (['[n]'], {}), '([n])\n', (10065, 10070), True, 'import numpy as np\n'), ((10092, 10120), 'jaxline.utils.double_buffer', 'utils.double_buffer', (['dataset'], {}), '(dataset)\n', (10111, 10120), False, 'from jaxline import utils\n'), ((1051, 1081), 'jaxline.utils.py_prefetch', 'utils.py_prefetch', (['(lambda : ())'], {}), '(lambda : ())\n', (1068, 1081), False, 'from jaxline import utils\n'), ((2947, 2961), 'jax.numpy.array', 'jnp.array', (['[1]'], {}), '([1])\n', (2956, 2961), True, 'import jax.numpy as jnp\n'), ((5412, 5425), 'jax.numpy.zeros', 'jnp.zeros', (['[]'], {}), '([])\n', (5421, 5425), True, 'import jax.numpy as jnp\n'), ((5536, 5548), 'jax.numpy.ones', 'jnp.ones', (['[]'], {}), '([])\n', (5544, 5548), True, 'import jax.numpy as jnp\n'), ((5904, 5916), 'jax.numpy.ones', 'jnp.ones', (['[]'], {}), '([])\n', (5912, 5916), True, 'import jax.numpy as jnp\n'), ((5965, 5978), 'jax.numpy.zeros', 'jnp.zeros', (['[]'], {}), '([])\n', (5974, 5978), True, 'import jax.numpy as jnp\n'), ((6017, 6040), 'jax.numpy.ones', 'jnp.ones', (['[num_devices]'], {}), '([num_devices])\n', (6025, 6040), True, 'import jax.numpy as jnp\n'), ((6079, 6103), 'jax.numpy.zeros', 'jnp.zeros', (['[num_devices]'], {}), '([num_devices])\n', (6088, 6103), True, 'import jax.numpy as jnp\n'), ((6756, 6786), 'jaxline.utils.log_activity', 'utils.log_activity', (['"""for test"""'], {}), "('for test')\n", (6774, 6786), False, 'from jaxline import utils\n'), ((8421, 8498), 'functools.partial', 'functools.partial', (['utils.specialize_rng_host_device'], {'axis_name': '"""i"""', 'mode': 'mode'}), "(utils.specialize_rng_host_device, axis_name='i', mode=mode)\n", (8438, 8498), False, 'import functools\n'), ((8751, 8828), 'functools.partial', 'functools.partial', (['utils.specialize_rng_host_device'], {'axis_name': '"""i"""', 'mode': 'mode'}), "(utils.specialize_rng_host_device, axis_name='i', mode=mode)\n", (8768, 8828), False, 'import functools\n'), ((9481, 9532), 'absl.testing.flagsaver.flagsaver', 'flagsaver.flagsaver', ([], {'jaxline_disable_pmap_jit': '(False)'}), '(jaxline_disable_pmap_jit=False)\n', (9500, 9532), False, 'from absl.testing import flagsaver\n'), ((9686, 9736), 'absl.testing.flagsaver.flagsaver', 'flagsaver.flagsaver', ([], {'jaxline_disable_pmap_jit': '(True)'}), '(jaxline_disable_pmap_jit=True)\n', (9705, 9736), False, 'from absl.testing import flagsaver\n'), ((1932, 1965), 'jaxline.utils.tree_psum', 'utils.tree_psum', (['x'], {'axis_name': '"""i"""'}), "(x, axis_name='i')\n", (1947, 1965), False, 'from jaxline import utils\n'), ((2337, 2370), 'jaxline.utils.tree_psum', 'utils.tree_psum', (['x'], {'axis_name': '"""i"""'}), "(x, axis_name='i')\n", (2352, 2370), False, 'from jaxline import utils\n'), ((3836, 3849), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (3846, 3849), False, 'import time\n'), ((5460, 5484), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (5482, 5484), False, 'import jax\n'), ((5582, 5606), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (5604, 5606), False, 'import jax\n'), ((7205, 7229), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (7227, 7229), False, 'import jax\n'), ((7656, 7678), 'numpy.unique', 'np.unique', (['rng'], {'axis': '(0)'}), '(rng, axis=0)\n', (7665, 7678), True, 'import numpy as np\n'), ((8114, 8136), 'numpy.unique', 'np.unique', (['rng'], {'axis': '(0)'}), '(rng, axis=0)\n', (8123, 8136), True, 'import numpy as np\n'), ((9542, 9579), 'jaxline.utils.disable_pmap_jit', 'utils.disable_pmap_jit', (['(lambda : None)'], {}), '(lambda : None)\n', (9564, 9579), False, 'from jaxline import utils\n'), ((9746, 9783), 'jaxline.utils.disable_pmap_jit', 'utils.disable_pmap_jit', (['(lambda : None)'], {}), '(lambda : None)\n', (9768, 9783), False, 'from jaxline import utils\n'), ((2176, 2209), 'jaxline.utils.tree_psum', 'utils.tree_psum', (['x'], {'axis_name': '"""i"""'}), "(x, axis_name='i')\n", (2191, 2209), False, 'from jaxline import utils\n'), ((2540, 2573), 'jaxline.utils.tree_psum', 'utils.tree_psum', (['x'], {'axis_name': '"""i"""'}), "(x, axis_name='i')\n", (2555, 2573), False, 'from jaxline import utils\n'), ((2767, 2800), 'jaxline.utils.tree_psum', 'utils.tree_psum', (['x'], {'axis_name': '"""i"""'}), "(x, axis_name='i')\n", (2782, 2800), False, 'from jaxline import utils\n'), ((3103, 3136), 'jaxline.utils.tree_psum', 'utils.tree_psum', (['x'], {'axis_name': '"""i"""'}), "(x, axis_name='i')\n", (3118, 3136), False, 'from jaxline import utils\n'), ((3570, 3583), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (3580, 3583), False, 'import time\n'), ((8963, 8999), 'numpy.concatenate', 'np.concatenate', (['[rng0, rng1]'], {'axis': '(0)'}), '([rng0, rng1], axis=0)\n', (8977, 8999), True, 'import numpy as np\n')] |
deepcoder42/mysql-lib | test/unit/mysql_class/slaverep_isslverror.py | d3d2459e0476fdbc4465e1d9389612e58d36fb25 | #!/usr/bin/python
# Classification (U)
"""Program: slaverep_isslverror.py
Description: Unit testing of SlaveRep.is_slv_error in mysql_class.py.
Usage:
test/unit/mysql_class/slaverep_isslverror.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
# Local
sys.path.append(os.getcwd())
import mysql_class
import lib.machine as machine
import version
__version__ = version.__version__
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp -> Initialize testing environment.
test_slv_both_true -> Test with all attrs set to True.
test_sql_err_true -> Test with sql_err set to True.
test_io_err_true -> Test with io_err set to True.
test_default -> Test show_slv_state method.
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.name = "Mysql_Server"
self.server_id = 10
self.sql_user = "mysql_user"
self.sql_pass = "my_japd"
self.machine = getattr(machine, "Linux")()
self.host = "host_server"
self.port = 3307
self.defaults_file = "def_cfg_file"
self.extra_def_file = "extra_cfg_file"
def test_slv_both_true(self):
"""Function: test_slv_both_true
Description: Test with all attrs set to True.
Arguments:
"""
mysqlrep = mysql_class.SlaveRep(self.name, self.server_id,
self.sql_user, self.sql_pass,
self.machine,
defaults_file=self.defaults_file)
mysqlrep.sql_err = "Yes"
mysqlrep.io_err = "Yes"
self.assertTrue(mysqlrep.is_slv_error())
def test_sql_err_true(self):
"""Function: test_sql_err_true
Description: Test with sql_err set to True.
Arguments:
"""
mysqlrep = mysql_class.SlaveRep(self.name, self.server_id,
self.sql_user, self.sql_pass,
self.machine,
defaults_file=self.defaults_file)
mysqlrep.sql_err = "Yes"
mysqlrep.io_err = None
self.assertTrue(mysqlrep.is_slv_error())
def test_io_err_true(self):
"""Function: test_io_err_true
Description: Test with io_err set to True.
Arguments:
"""
mysqlrep = mysql_class.SlaveRep(self.name, self.server_id,
self.sql_user, self.sql_pass,
self.machine,
defaults_file=self.defaults_file)
mysqlrep.sql_err = None
mysqlrep.io_err = "Yes"
self.assertTrue(mysqlrep.is_slv_error())
def test_default(self):
"""Function: test_default
Description: Test is_slv_error method.
Arguments:
"""
mysqlrep = mysql_class.SlaveRep(self.name, self.server_id,
self.sql_user, self.sql_pass,
self.machine,
defaults_file=self.defaults_file)
mysqlrep.sql_err = None
mysqlrep.io_err = None
self.assertFalse(mysqlrep.is_slv_error())
if __name__ == "__main__":
unittest.main()
| [((435, 446), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (444, 446), False, 'import os\n'), ((3648, 3663), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3661, 3663), False, 'import unittest\n'), ((1646, 1776), 'mysql_class.SlaveRep', 'mysql_class.SlaveRep', (['self.name', 'self.server_id', 'self.sql_user', 'self.sql_pass', 'self.machine'], {'defaults_file': 'self.defaults_file'}), '(self.name, self.server_id, self.sql_user, self.\n sql_pass, self.machine, defaults_file=self.defaults_file)\n', (1666, 1776), False, 'import mysql_class\n'), ((2189, 2319), 'mysql_class.SlaveRep', 'mysql_class.SlaveRep', (['self.name', 'self.server_id', 'self.sql_user', 'self.sql_pass', 'self.machine'], {'defaults_file': 'self.defaults_file'}), '(self.name, self.server_id, self.sql_user, self.\n sql_pass, self.machine, defaults_file=self.defaults_file)\n', (2209, 2319), False, 'import mysql_class\n'), ((2728, 2858), 'mysql_class.SlaveRep', 'mysql_class.SlaveRep', (['self.name', 'self.server_id', 'self.sql_user', 'self.sql_pass', 'self.machine'], {'defaults_file': 'self.defaults_file'}), '(self.name, self.server_id, self.sql_user, self.\n sql_pass, self.machine, defaults_file=self.defaults_file)\n', (2748, 2858), False, 'import mysql_class\n'), ((3255, 3385), 'mysql_class.SlaveRep', 'mysql_class.SlaveRep', (['self.name', 'self.server_id', 'self.sql_user', 'self.sql_pass', 'self.machine'], {'defaults_file': 'self.defaults_file'}), '(self.name, self.server_id, self.sql_user, self.\n sql_pass, self.machine, defaults_file=self.defaults_file)\n', (3275, 3385), False, 'import mysql_class\n')] |
mengshun/Leetcode | problems/108.py | 8bb676f2fff093e1417a4bed13d9ad708149be78 | """
108. 将有序数组转换为二叉搜索树
"""
from TreeNode import TreeNode
class Solution:
def sortedArrayToBST(self, nums: [int]) -> TreeNode:
def dfs(left, right):
if left > right:
return None
mid = left + (right - left) // 2
root = TreeNode(nums[mid])
root.left = dfs(left, mid-1)
root.right = dfs(mid+1, right)
return root
return dfs(0, len(nums)-1)
t = [-10,-3,0,5,9]
obj = Solution()
node = obj.sortedArrayToBST(t)
node.preorderTraversal()
| [((285, 304), 'TreeNode.TreeNode', 'TreeNode', (['nums[mid]'], {}), '(nums[mid])\n', (293, 304), False, 'from TreeNode import TreeNode\n')] |
hsm207/sage | src/sage/tests/books/computational-mathematics-with-sagemath/domaines_doctest.py | 020bd59ec28717bfab9af44d2231c53da1ff99f1 | ## -*- encoding: utf-8 -*-
"""
This file (./domaines_doctest.sage) was *autogenerated* from ./domaines.tex,
with sagetex.sty version 2011/05/27 v2.3.1.
It contains the contents of all the sageexample environments from this file.
You should be able to doctest this file with:
sage -t ./domaines_doctest.sage
It is always safe to delete this file; it is not used in typesetting your
document.
Sage example in ./domaines.tex, line 10::
sage: x = var('x')
Sage example in ./domaines.tex, line 69::
sage: o = 12/35
sage: type(o)
<... 'sage.rings.rational.Rational'>
Sage example in ./domaines.tex, line 82::
sage: type(12/35)
<... 'sage.rings.rational.Rational'>
Sage example in ./domaines.tex, line 131::
sage: o = 720
sage: o.factor()
2^4 * 3^2 * 5
Sage example in ./domaines.tex, line 142::
sage: type(o).factor(o)
2^4 * 3^2 * 5
Sage example in ./domaines.tex, line 157::
sage: 720.factor()
2^4 * 3^2 * 5
Sage example in ./domaines.tex, line 166::
sage: o = 720 / 133
sage: o.numerator().factor()
2^4 * 3^2 * 5
Sage example in ./domaines.tex, line 253::
sage: 3 * 7
21
Sage example in ./domaines.tex, line 261::
sage: (2/3) * (6/5)
4/5
Sage example in ./domaines.tex, line 267::
sage: (1 + I) * (1 - I)
2
Sage example in ./domaines.tex, line 274::
sage: (x + 2) * (x + 1)
(x + 2)*(x + 1)
sage: (x + 1) * (x + 2)
(x + 2)*(x + 1)
Sage example in ./domaines.tex, line 308::
sage: def fourth_power(a):
....: a = a * a
....: a = a * a
....: return a
Sage example in ./domaines.tex, line 330::
sage: fourth_power(2)
16
sage: fourth_power(3/2)
81/16
sage: fourth_power(I)
1
sage: fourth_power(x+1)
(x + 1)^4
sage: M = matrix([[0,-1],[1,0]]); M
[ 0 -1]
[ 1 0]
sage: fourth_power(M)
[1 0]
[0 1]
Sage example in ./domaines.tex, line 375::
sage: t = type(5/1); t
<... 'sage.rings.rational.Rational'>
sage: t == type(5)
False
Sage example in ./domaines.tex, line 476::
sage: a = 5; a
5
sage: a.is_unit()
False
Sage example in ./domaines.tex, line 484::
sage: a = 5/1; a
5
sage: a.is_unit()
True
Sage example in ./domaines.tex, line 507::
sage: parent(5)
Integer Ring
sage: parent(5/1)
Rational Field
Sage example in ./domaines.tex, line 515::
sage: ZZ
Integer Ring
sage: QQ
Rational Field
Sage example in ./domaines.tex, line 525::
sage: QQ(5).parent()
Rational Field
sage: ZZ(5/1).parent()
Integer Ring
sage: ZZ(1/5)
Traceback (most recent call last):
...
TypeError: no conversion of this rational to integer
Sage example in ./domaines.tex, line 543::
sage: ZZ(1), QQ(1), RR(1), CC(1)
(1, 1, 1.00000000000000, 1.00000000000000)
Sage example in ./domaines.tex, line 568::
sage: cartesian_product([QQ, QQ])
The Cartesian product of (Rational Field, Rational Field)
Sage example in ./domaines.tex, line 574::
sage: ZZ.fraction_field()
Rational Field
Sage example in ./domaines.tex, line 580::
sage: ZZ['x']
Univariate Polynomial Ring in x over Integer Ring
Sage example in ./domaines.tex, line 591::
sage: Z5 = GF(5); Z5
Finite Field of size 5
sage: P = Z5['x']; P
Univariate Polynomial Ring in x over Finite Field of size 5
sage: M = MatrixSpace(P, 3, 3); M
Full MatrixSpace of 3 by 3 dense matrices over
Univariate Polynomial Ring in x over Finite Field of size 5
Sage example in ./domaines.tex, line 602::
sage: M.random_element() # random
[2*x^2 + 3*x + 4 4*x^2 + 2*x + 2 4*x^2 + 2*x]
[ 3*x 2*x^2 + x + 3 3*x^2 + 4*x]
[ 4*x^2 + 3 3*x^2 + 2*x + 4 2*x + 4]
Sage example in ./domaines.tex, line 697::
sage: QQ.category()
Join of Category of number fields and Category of quotient fields and Category of metric spaces
Sage example in ./domaines.tex, line 704::
sage: QQ in Fields()
True
Sage example in ./domaines.tex, line 712::
sage: QQ in CommutativeAdditiveGroups()
True
Sage example in ./domaines.tex, line 718::
sage: QQ['x'] in EuclideanDomains()
True
Sage example in ./domaines.tex, line 859::
sage: 5.parent()
Integer Ring
Sage example in ./domaines.tex, line 872::
sage: type(factor(4))
<class 'sage.structure.factorization_integer.IntegerFactorization'>
Sage example in ./domaines.tex, line 895::
sage: int(5)
5
sage: type(int(5))
<... 'int'>
Sage example in ./domaines.tex, line 909::
sage: Integer(5)
5
sage: type(Integer(5))
<... 'sage.rings.integer.Integer'>
Sage example in ./domaines.tex, line 926::
sage: factorial(99) / factorial(100) - 1 / 50
-1/100
Sage example in ./domaines.tex, line 974::
sage: 72/53 - 5/3 * 2.7
-3.14150943396227
Sage example in ./domaines.tex, line 982::
sage: cos(1), cos(1.)
(cos(1), 0.540302305868140)
Sage example in ./domaines.tex, line 1000::
sage: pi.n(digits=50) # variant: n(pi,digits=50)
3.1415926535897932384626433832795028841971693993751
Sage example in ./domaines.tex, line 1020::
sage: z = CC(1,2); z.arg()
1.10714871779409
Sage example in ./domaines.tex, line 1036::
sage: I.parent()
Number Field in I with defining polynomial x^2 + 1 with I = 1*I
Sage example in ./domaines.tex, line 1043::
sage: (1.+2.*I).parent()
Complex Field with 53 bits of precision
sage: (1.+2.*SR(I)).parent()
Symbolic Ring
Sage example in ./domaines.tex, line 1064::
sage: z = 3 * exp(I*pi/4)
sage: z.real(), z.imag(), z.abs().canonicalize_radical()
(3/2*sqrt(2), 3/2*sqrt(2), 3)
Sage example in ./domaines.tex, line 1094::
sage: a, b, c = 0, 2, 3
sage: a == 1 or (b == 2 and c == 3)
True
Sage example in ./domaines.tex, line 1147::
sage: x, y = var('x, y')
sage: bool( (x-y)*(x+y) == x^2-y^2 )
True
Sage example in ./domaines.tex, line 1171::
sage: Z4 = IntegerModRing(4); Z4
Ring of integers modulo 4
sage: m = Z4(7); m
3
Sage example in ./domaines.tex, line 1184::
sage: 3 * m + 1
2
Sage example in ./domaines.tex, line 1191::
sage: Z3 = GF(3); Z3
Finite Field of size 3
Sage example in ./domaines.tex, line 1243::
sage: a = matrix(QQ, [[1,2,3],[2,4,8],[3,9,27]])
sage: (a^2 + 1) * a^(-1)
[ -5 13/2 7/3]
[ 7 1 25/3]
[ 2 19/2 27]
Sage example in ./domaines.tex, line 1259::
sage: M = MatrixSpace(QQ,3,3); M
Full MatrixSpace of 3 by 3 dense matrices over Rational Field
sage: a = M([[1,2,3],[2,4,8],[3,9,27]])
sage: (a^2 + 1) * a^(-1)
[ -5 13/2 7/3]
[ 7 1 25/3]
[ 2 19/2 27]
Sage example in ./domaines.tex, line 1283::
sage: P = ZZ['x']; P
Univariate Polynomial Ring in x over Integer Ring
sage: F = P.fraction_field(); F
Fraction Field of Univariate Polynomial Ring in x over Integer Ring
sage: p = P(x+1) * P(x); p
x^2 + x
sage: p + 1/p
(x^4 + 2*x^3 + x^2 + 1)/(x^2 + x)
sage: parent(p + 1/p)
Fraction Field of Univariate Polynomial Ring in x over Integer Ring
Sage example in ./domaines.tex, line 1382::
sage: k.<a> = NumberField(x^3 + x + 1); a^3; a^4+3*a
-a - 1
-a^2 + 2*a
Sage example in ./domaines.tex, line 1416::
sage: parent(sin(x))
Symbolic Ring
Sage example in ./domaines.tex, line 1422::
sage: SR
Symbolic Ring
Sage example in ./domaines.tex, line 1428::
sage: SR.category()
Category of fields
Sage example in ./domaines.tex, line 1482::
sage: R = QQ['x1,x2,x3,x4']; R
Multivariate Polynomial Ring in x1, x2, x3, x4 over Rational Field
sage: x1, x2, x3, x4 = R.gens()
Sage example in ./domaines.tex, line 1489::
sage: x1 * (x2 - x3)
x1*x2 - x1*x3
Sage example in ./domaines.tex, line 1496::
sage: (x1+x2)*(x1-x2) - (x1^2 - x2^2)
0
Sage example in ./domaines.tex, line 1509::
sage: P = prod( (a-b) for (a,b) in Subsets([x1,x2,x3,x4],2) ); P * P.lc()
x1^3*x2^2*x3 - x1^2*x2^3*x3 - x1^3*x2*x3^2 + x1*x2^3*x3^2
+ x1^2*x2*x3^3 - x1*x2^2*x3^3 - x1^3*x2^2*x4 + x1^2*x2^3*x4
+ x1^3*x3^2*x4 - x2^3*x3^2*x4 - x1^2*x3^3*x4 + x2^2*x3^3*x4
+ x1^3*x2*x4^2 - x1*x2^3*x4^2 - x1^3*x3*x4^2 + x2^3*x3*x4^2
+ x1*x3^3*x4^2 - x2*x3^3*x4^2 - x1^2*x2*x4^3 + x1*x2^2*x4^3
+ x1^2*x3*x4^3 - x2^2*x3*x4^3 - x1*x3^2*x4^3 + x2*x3^2*x4^3
Sage example in ./domaines.tex, line 1531::
sage: x1, x2, x3, x4 = SR.var('x1, x2, x3, x4')
sage: got = prod( (a-b) for (a,b) in Subsets([x1,x2,x3,x4],2) )
sage: expected1 = -(x1 - x2)*(x1 - x3)*(x1 - x4)*(x2 - x3)*(x2 - x4)*(x3 - x4)
sage: expected2 = (x1 - x2)*(x1 - x3)*(x1 - x4)*(x2 - x3)*(x2 - x4)*(x3 - x4)
sage: bool(got == expected1 or got == expected2)
True
Sage example in ./domaines.tex, line 1581::
sage: x = var('x')
sage: p = 54*x^4+36*x^3-102*x^2-72*x-12
sage: factor(p)
6*(x^2 - 2)*(3*x + 1)^2
Sage example in ./domaines.tex, line 1616::
sage: R = ZZ['x']; R
Univariate Polynomial Ring in x over Integer Ring
Sage example in ./domaines.tex, line 1622::
sage: q = R(p); q
54*x^4 + 36*x^3 - 102*x^2 - 72*x - 12
Sage example in ./domaines.tex, line 1629::
sage: parent(q)
Univariate Polynomial Ring in x over Integer Ring
Sage example in ./domaines.tex, line 1635::
sage: factor(q)
2 * 3 * (3*x + 1)^2 * (x^2 - 2)
Sage example in ./domaines.tex, line 1642::
sage: R = QQ['x']; R
Univariate Polynomial Ring in x over Rational Field
sage: q = R(p); q
54*x^4 + 36*x^3 - 102*x^2 - 72*x - 12
sage: factor(q)
(54) * (x + 1/3)^2 * (x^2 - 2)
Sage example in ./domaines.tex, line 1665::
sage: R = ComplexField(16)['x']; R
Univariate Polynomial Ring in x over Complex Field
with 16 bits of precision
sage: q = R(p); q
54.00*x^4 + 36.00*x^3 - 102.0*x^2 - 72.00*x - 12.00
sage: factor(q)
(54.00) * (x - 1.414) * (x + 0.3333)^2 * (x + 1.414)
Sage example in ./domaines.tex, line 1685::
sage: R = QQ[sqrt(2)]['x']; R
Univariate Polynomial Ring in x over Number Field in sqrt2 with defining polynomial x^2 - 2 with sqrt2 = 1.414213562373095?
sage: q = R(p); q
54*x^4 + 36*x^3 - 102*x^2 - 72*x - 12
sage: factor(q)
(54) * (x - sqrt2) * (x + sqrt2) * (x + 1/3)^2
Sage example in ./domaines.tex, line 1698::
sage: R = GF(5)['x']; R
Univariate Polynomial Ring in x over Finite Field of size 5
sage: q = R(p); q
4*x^4 + x^3 + 3*x^2 + 3*x + 3
sage: factor(q)
(4) * (x + 2)^2 * (x^2 + 3)
"""
| [] |
TheBoringBakery/Riot-Watcher | src/riotwatcher/riotwatcher.py | 6e05fffe127530a75fd63e67da37ba81489fd4fe | from .Deserializer import Deserializer
from .RateLimiter import RateLimiter
from .Handlers import (
DeprecationHandler,
DeserializerAdapter,
DictionaryDeserializer,
RateLimiterAdapter,
ThrowOnErrorHandler,
TypeCorrectorHandler,
)
from .Handlers.RateLimit import BasicRateLimiter
from ._apis import BaseApi
from ._apis.riot import AccountApi
class RiotWatcher:
"""
RiotWatcher class is intended to be the main interaction point with the generic Riot APIs.
"""
def __init__(
self,
api_key: str,
timeout: int = None,
rate_limiter: RateLimiter = BasicRateLimiter(),
deserializer: Deserializer = DictionaryDeserializer(),
):
"""
Initialize a new instance of the RiotWatcher class.
:param string api_key: the API key to use for this instance
:param int timeout: Time to wait for a response before timing out a connection to
the Riot API
:param RateLimiter rate_limiter: Instance to be used for rate limiting.
This defaults to Handlers.RateLimit.BasicRateLimiter.
:param Deserializer deserializer: Instance to be used to deserialize responses
from the Riot Api. Default is Handlers.DictionaryDeserializer.
"""
if not api_key:
raise ValueError("api_key must be set!")
handler_chain = [
DeserializerAdapter(deserializer),
ThrowOnErrorHandler(),
TypeCorrectorHandler(),
RateLimiterAdapter(rate_limiter),
DeprecationHandler(),
]
self._base_api = BaseApi(api_key, handler_chain, timeout=timeout)
self._account = AccountApi(self._base_api)
@property
def account(self) -> AccountApi:
"""
Interface to the Account Endpoint
:rtype: riot.AccountApi
"""
return self._account
| [] |
TaoYibo1866/webots_ros2 | webots_ros2_core/webots_ros2_core/devices/gps_device.py | a72c164825663cebbfd27e0649ea51d3abf9bbed | # Copyright 1996-2021 Cyberbotics Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Webots GPS device wrapper for ROS2."""
from rclpy.qos import QoSReliabilityPolicy, qos_profile_sensor_data
from std_msgs.msg import Float32
from sensor_msgs.msg import NavSatFix, NavSatStatus
from geometry_msgs.msg import PointStamped
from .sensor_device import SensorDevice
from controller import GPS
class GpsDevice(SensorDevice):
"""
ROS2 wrapper for Webots GPS node.
Creates suitable ROS2 interface based on Webots
[GPS](https://cyberbotics.com/doc/reference/gps) node instance:
It allows the following functinalities:
- Publishes position measurements of type `sensor_msgs::NavSatFix` if WGS84
- Publishes position measurements of type `geometry_msgs::PointStamped` if LOCAL
Args:
----
node (WebotsNode): The ROS2 node.
device_key (str): Unique identifier of the device used for configuration.
wb_device (Gps): Webots node of type GPS.
Kwargs:
params (dict): Inherited from `SensorDevice` + the following::
dict: {
'timestep': int, # Publish period in ms (default 128ms)
}
"""
def __init__(self, node, device_key, wb_device, params=None):
super().__init__(node, device_key, wb_device, params)
self.__speed_publisher = None
self.__gps_publisher = None
self.__coordinate_system = self._wb_device.getCoordinateSystem()
# Exit if disabled
if self._disable:
return
# Change default timestep
self._timestep = 128
qos_sensor_reliable = qos_profile_sensor_data
qos_sensor_reliable.reliability = QoSReliabilityPolicy.RELIABLE
# Create topics
self.__speed_publisher = node.create_publisher(
Float32, self._topic_name + '/speed', qos_sensor_reliable)
if self.__coordinate_system == GPS.WGS84:
self.__gps_publisher = node.create_publisher(
NavSatFix, self._topic_name + '/gps', qos_sensor_reliable)
else:
self.__gps_publisher = node.create_publisher(
PointStamped, self._topic_name + '/gps', qos_sensor_reliable)
def step(self):
stamp = super().step()
if not stamp:
return
if self.__gps_publisher.get_subscription_count() > 0 or \
self.__speed_publisher.get_subscription_count() > 0 or \
self._always_publish:
self._wb_device.enable(self._timestep)
msg = Float32()
msg.data = self._wb_device.getSpeed()
self.__speed_publisher.publish(msg)
if self.__coordinate_system == GPS.WGS84:
msg = NavSatFix()
msg.header.stamp = stamp
msg.header.frame_id = self._frame_id
msg.latitude = self._wb_device.getValues()[0]
msg.longitude = self._wb_device.getValues()[1]
msg.altitude = self._wb_device.getValues()[2]
msg.position_covariance_type = NavSatFix.COVARIANCE_TYPE_UNKNOWN
msg.status.service = NavSatStatus.SERVICE_GPS
self.__gps_publisher.publish(msg)
else:
msg = PointStamped()
msg.header.stamp = stamp
msg.header.frame_id = self._frame_id
msg.point.x = self._wb_device.getValues()[0]
msg.point.y = self._wb_device.getValues()[1]
msg.point.z = self._wb_device.getValues()[2]
self.__gps_publisher.publish(msg)
else:
self._wb_device.disable()
| [((3071, 3080), 'std_msgs.msg.Float32', 'Float32', ([], {}), '()\n', (3078, 3080), False, 'from std_msgs.msg import Float32\n'), ((3255, 3266), 'sensor_msgs.msg.NavSatFix', 'NavSatFix', ([], {}), '()\n', (3264, 3266), False, 'from sensor_msgs.msg import NavSatFix, NavSatStatus\n'), ((3781, 3795), 'geometry_msgs.msg.PointStamped', 'PointStamped', ([], {}), '()\n', (3793, 3795), False, 'from geometry_msgs.msg import PointStamped\n')] |
saisankargochhayat/doot | ML/complete_model/setlist.py | 00bd74463a065f23886e829aae677267b7619e13 | setlist = [['a','m','n','s','t','g','q','o','x'],['b','e','c'],['h','k','u','v'],
['d','r','p'],['f'],['l'],['i'],['w'],['y']]
| [] |
jtreim/cant-stop | players/jeff.py | 0ef1a2da67e4232a4ad2be150e950e8f1914a851 | from .player import Player
class JeffPlayer(Player):
"""
JeffPlayer focuses on the odds for continuing turns.
To pick which move, calculates a move value based on odds of continued
turns, moving forward less likely columns when possible, and winning
columns over opponents.
"""
ODDS = 'odds'
ROLLS = 'rolls'
ONE_COLUMN_ODDS = {
'2': { ODDS: .13, ROLLS: 0 },
'3': { ODDS: .23, ROLLS: 0 },
'4': { ODDS: .36, ROLLS: 0 },
'5': { ODDS: .45, ROLLS: 1 },
'6': { ODDS: .56, ROLLS: 1 },
'7': { ODDS: .64, ROLLS: 2 },
'8': { ODDS: .56, ROLLS: 1 },
'9': { ODDS: .45, ROLLS: 1 },
'10': { ODDS: .36, ROLLS: 0 },
'11': { ODDS: .23, ROLLS: 0 },
'12': { ODDS: .13, ROLLS: 0 },
}
TWO_COLUMN_ODDS = {
'2': {
'3': { ODDS: .32, ROLLS: 0 },
'4': { ODDS: .44, ROLLS: 1 },
'5': { ODDS: .53, ROLLS: 1 },
'6': { ODDS: .63, ROLLS: 2 },
'7': { ODDS: .71, ROLLS: 2 },
'8': { ODDS: .67, ROLLS: 2 },
'9': { ODDS: .56, ROLLS: 1 },
'10': { ODDS: .47, ROLLS: 1 },
'11': { ODDS: .36, ROLLS: 1 },
'12': { ODDS: .26, ROLLS: 0 },
},
'3': {
'4': { ODDS: .47, ROLLS: 1 },
'5': { ODDS: .53, ROLLS: 1 },
'6': { ODDS: .64, ROLLS: 2 },
'7': { ODDS: .71, ROLLS: 2 },
'8': { ODDS: .68, ROLLS: 2 },
'9': { ODDS: .64, ROLLS: 2 },
'10': { ODDS: .56, ROLLS: 1 },
'11': { ODDS: .45, ROLLS: 1 },
'12': { ODDS: .36, ROLLS: 1 },
},
'4': {
'5': { ODDS: .61, ROLLS: 2 },
'6': { ODDS: .72, ROLLS: 3 },
'7': { ODDS: .77, ROLLS: 3 },
'8': { ODDS: .75, ROLLS: 3 },
'9': { ODDS: .68, ROLLS: 3 },
'10': { ODDS: .67, ROLLS: 2 },
'11': { ODDS: .56, ROLLS: 1 },
'12': { ODDS: .47, ROLLS: 1 },
},
'5': {
'6': { ODDS: .73, ROLLS: 3 },
'7': { ODDS: .78, ROLLS: 4 },
'8': { ODDS: .77, ROLLS: 3 },
'9': { ODDS: .75, ROLLS: 2 },
'10': { ODDS: .69, ROLLS: 2 },
'11': { ODDS: .68, ROLLS: 2 },
'12': { ODDS: .64, ROLLS: 1 },
},
'6': {
'7': { ODDS: .84, ROLLS: 5 },
'8': { ODDS: .82, ROLLS: 5 },
'9': { ODDS: .77, ROLLS: 3 },
'10': { ODDS: .75, ROLLS: 3 },
'11': { ODDS: .68, ROLLS: 2 },
'12': { ODDS: .67, ROLLS: 2 },
},
'7': {
'8': { ODDS: .84, ROLLS: 5 },
'9': { ODDS: .78, ROLLS: 4 },
'10': { ODDS: .77, ROLLS: 3 },
'11': { ODDS: .71, ROLLS: 2 },
'12': { ODDS: .71, ROLLS: 2 },
},
'8': {
'9': { ODDS: .73, ROLLS: 3 },
'10': { ODDS: .72, ROLLS: 3 },
'11': { ODDS: .64, ROLLS: 2 },
'12': { ODDS: .63, ROLLS: 2 },
},
'9': {
'10': { ODDS: .61, ROLLS: 2 },
'11': { ODDS: .53, ROLLS: 1 },
'12': { ODDS: .53, ROLLS: 1 },
},
'10': {
'11': { ODDS: .47, ROLLS: 1 },
'12': { ODDS: .44, ROLLS: 1 },
},
'11': {
'12': { ODDS: .32, ROLLS: 0 }
},
}
THREE_COLUMN_ODDS = {
'2': {
'3': {
'4': { ODDS: .52, ROLLS: 1 },
'5': { ODDS: .58, ROLLS: 1 },
'6': { ODDS: .68, ROLLS: 2 },
'7': { ODDS: .75, ROLLS: 3 },
'8': { ODDS: .76, ROLLS: 3 },
'9': { ODDS: .71, ROLLS: 2 },
'10': { ODDS: .63, ROLLS: 2 },
'11': { ODDS: .53, ROLLS: 1 },
'12': { ODDS: .44, ROLLS: 1 },
},
'4': {
'5': { ODDS: .66, ROLLS: 2 },
'6': { ODDS: .76, ROLLS: 3 },
'7': { ODDS: .81, ROLLS: 4 },
'8': { ODDS: .82, ROLLS: 5 },
'9': { ODDS: .76, ROLLS: 3 },
'10': { ODDS: .74, ROLLS: 3 },
'11': { ODDS: .63, ROLLS: 2 },
'12': { ODDS: .55, ROLLS: 1 },
},
'5': {
'6': { ODDS: .77, ROLLS: 3 },
'7': { ODDS: .81, ROLLS: 4 },
'8': { ODDS: .83, ROLLS: 5 },
'9': { ODDS: .76, ROLLS: 3 },
'10': { ODDS: .76, ROLLS: 3 },
'11': { ODDS: .71, ROLLS: 2 },
'12': { ODDS: .63, ROLLS: 2 },
},
'6': {
'7': { ODDS: .86, ROLLS: 6 },
'8': { ODDS: .88, ROLLS: 7 },
'9': { ODDS: .83, ROLLS: 5 },
'10': { ODDS: .81, ROLLS: 4 },
'11': { ODDS: .76, ROLLS: 3 },
'12': { ODDS: .74, ROLLS: 3 },
},
'7': {
'8': { ODDS: .89, ROLLS: 8 },
'9': { ODDS: .84, ROLLS: 5 },
'10': { ODDS: .83, ROLLS: 5 },
'11': { ODDS: .78, ROLLS: 4 },
'12': { ODDS: .78, ROLLS: 4 },
},
'8': {
'9': { ODDS: .71, ROLLS: 2 },
'10': { ODDS: .63, ROLLS: 2 },
'11': { ODDS: .53, ROLLS: 1 },
'12': { ODDS: .44, ROLLS: 1 },
},
'9': {
'10': { ODDS: .71, ROLLS: 2 },
'11': { ODDS: .64, ROLLS: 2 },
'12': { ODDS: .63, ROLLS: 2 },
},
'10': {
'11': { ODDS: .58, ROLLS: 1 },
'12': { ODDS: .55, ROLLS: 1 },
},
'11': {
'12': { ODDS: .44, ROLLS: 1 },
},
},
'3': {
'4': {
'5': { ODDS: .67, ROLLS: 2 },
'6': { ODDS: .74, ROLLS: 3 },
'7': { ODDS: .79, ROLLS: 4 },
'8': { ODDS: .80, ROLLS: 4 },
'9': { ODDS: .78, ROLLS: 4 },
'10': { ODDS: .76, ROLLS: 3 },
'11': { ODDS: .66, ROLLS: 2 },
'12': { ODDS: .58, ROLLS: 1 },
},
'5': {
'6': { ODDS: .77, ROLLS: 3 },
'7': { ODDS: .79, ROLLS: 4 },
'8': { ODDS: .81, ROLLS: 4 },
'9': { ODDS: .78, ROLLS: 4 },
'10': { ODDS: .76, ROLLS: 3 },
'11': { ODDS: .71, ROLLS: 2 },
'12': { ODDS: .64, ROLLS: 2 },
},
'6': {
'7': { ODDS: .86, ROLLS: 6 },
'8': { ODDS: .85, ROLLS: 6 },
'9': { ODDS: .83, ROLLS: 5 },
'10': { ODDS: .82, ROLLS: 5 },
'11': { ODDS: .76, ROLLS: 3 },
'12': { ODDS: .74, ROLLS: 3 },
},
'7': {
'8': { ODDS: .89, ROLLS: 8 },
'9': { ODDS: .84, ROLLS: 5 },
'10': { ODDS: .84, ROLLS: 5 },
'11': { ODDS: .78, ROLLS: 4 },
'12': { ODDS: .78, ROLLS: 4 },
},
'8': {
'9': { ODDS: .84, ROLLS: 5 },
'10': { ODDS: .83, ROLLS: 5 },
'11': { ODDS: .76, ROLLS: 3 },
'12': { ODDS: .76, ROLLS: 3 },
},
'9': {
'10': { ODDS: .78, ROLLS: 4 },
'11': { ODDS: .71, ROLLS: 2 },
'12': { ODDS: .71, ROLLS: 2 },
},
'10': {
'11': { ODDS: .66, ROLLS: 2 },
'12': { ODDS: .63, ROLLS: 2 },
},
'11': {
'12': { ODDS: .53, ROLLS: 1 },
},
},
'4': {
'5': {
'6': { ODDS: .80, ROLLS: 4 },
'7': { ODDS: .85, ROLLS: 6 },
'8': { ODDS: .85, ROLLS: 6 },
'9': { ODDS: .80, ROLLS: 4 },
'10': { ODDS: .82, ROLLS: 5 },
'11': { ODDS: .78, ROLLS: 4 },
'12': { ODDS: .71, ROLLS: 2 },
},
'6': {
'7': { ODDS: .89, ROLLS: 8 },
'8': { ODDS: .91, ROLLS: 10 },
'9': { ODDS: .86, ROLLS: 6 },
'10': { ODDS: .88, ROLLS: 7 },
'11': { ODDS: .83, ROLLS: 5 },
'12': { ODDS: .82, ROLLS: 5 },
},
'7': {
'8': { ODDS: .90, ROLLS: 9 },
'9': { ODDS: .89, ROLLS: 8 },
'10': { ODDS: .88, ROLLS: 7 },
'11': { ODDS: .84, ROLLS: 5 },
'12': { ODDS: .83, ROLLS: 5 },
},
'8': {
'9': { ODDS: .86, ROLLS: 6 },
'10': { ODDS: .88, ROLLS: 7 },
'11': { ODDS: .82, ROLLS: 5 },
'12': { ODDS: .81, ROLLS: 4 },
},
'9': {
'10': { ODDS: .82, ROLLS: 5 },
'11': { ODDS: .76, ROLLS: 3 },
'12': { ODDS: .76, ROLLS: 3 },
},
'10': {
'11': { ODDS: .76, ROLLS: 3 },
'12': { ODDS: .74, ROLLS: 3 },
},
'11': {
'12': { ODDS: .63, ROLLS: 2 },
},
},
'5': {
'6': {
'7': { ODDS: .89, ROLLS: 8 },
'8': { ODDS: .90, ROLLS: 9 },
'9': { ODDS: .87, ROLLS: 7 },
'10': { ODDS: .86, ROLLS: 6 },
'11': { ODDS: .84, ROLLS: 5 },
'12': { ODDS: .82, ROLLS: 5 },
},
'7': {
'8': { ODDS: .91, ROLLS: 10 },
'9': { ODDS: .85, ROLLS: 6 },
'10': { ODDS: .89, ROLLS: 8 },
'11': { ODDS: .84, ROLLS: 5 },
'12': { ODDS: .84, ROLLS: 5 },
},
'8': {
'9': { ODDS: .87, ROLLS: 7 },
'10': { ODDS: .86, ROLLS: 6 },
'11': { ODDS: .83, ROLLS: 5 },
'12': { ODDS: .83, ROLLS: 5 },
},
'9': {
'10': { ODDS: .80, ROLLS: 4 },
'11': { ODDS: .78, ROLLS: 4 },
'12': { ODDS: .76, ROLLS: 3 },
},
'10': {
'11': { ODDS: .78, ROLLS: 4 },
'12': { ODDS: .76, ROLLS: 3 },
},
'11': {
'12': { ODDS: .71, ROLLS: 2 },
},
},
'6': {
'7': {
'8': { ODDS: .92, ROLLS: 12 },
'9': { ODDS: .91, ROLLS: 10 },
'10': { ODDS: .90, ROLLS: 9 },
'11': { ODDS: .89, ROLLS: 8 },
'12': { ODDS: .89, ROLLS: 8 },
},
'8': {
'9': { ODDS: .90, ROLLS: 9 },
'10': { ODDS: .91, ROLLS: 10 },
'11': { ODDS: .85, ROLLS: 6 },
'12': { ODDS: .88, ROLLS: 7 },
},
'9': {
'10': { ODDS: .85, ROLLS: 6 },
'11': { ODDS: .81, ROLLS: 4 },
'12': { ODDS: .83, ROLLS: 5 },
},
'10': {
'11': { ODDS: .80, ROLLS: 4 },
'12': { ODDS: .82, ROLLS: 5 },
},
'11': {
'12': { ODDS: .76, ROLLS: 3 },
},
},
'7': {
'8': {
'9': { ODDS: .89, ROLLS: 8 },
'10': { ODDS: .89, ROLLS: 8 },
'11': { ODDS: .86, ROLLS: 6 },
'12': { ODDS: .86, ROLLS: 6 },
},
'9': {
'10': { ODDS: .85, ROLLS: 6 },
'11': { ODDS: .79, ROLLS: 4 },
'12': { ODDS: .81, ROLLS: 4 },
},
'10': {
'11': { ODDS: .79, ROLLS: 4 },
'12': { ODDS: .81, ROLLS: 4 },
},
'11': {
'12': { ODDS: .75, ROLLS: 3 },
},
},
'8': {
'9': {
'10': { ODDS: .80, ROLLS: 4 },
'11': { ODDS: .77, ROLLS: 3 },
'12': { ODDS: .77, ROLLS: 3 },
},
'10': {
'11': { ODDS: .74, ROLLS: 3 },
'12': { ODDS: .76, ROLLS: 3 },
},
'11': {
'12': { ODDS: .68, ROLLS: 2 },
},
},
'9': {
'10': {
'11': { ODDS: .67, ROLLS: 2 },
'12': { ODDS: .66, ROLLS: 2 },
},
'11': {
'12': { ODDS: .58, ROLLS: 1 },
},
},
'10': {
'11': {
'12': { ODDS: .52, ROLLS: 1 },
},
},
}
NEW_COLUMN_PENALTY = 1
FINISH_COLUMN_REWARD = 1
FAVORITE_COLUMN_THRESHOLD = 2/3
CONTESTED_COLUMN = 1
MY_PROGRESS_MODIFIER = .5
OPPONENT_PROGRESS_MODIFIER = .5
STEP_DIVISOR = .08
ROUGH_ODDS_THRESHOLD = .2
DESPERATION_TURNS = 2
def get_progress(self, board, changes):
"""
Returns progress percentages for leader's & player's progress
Leaders are opponents farthest for each given column
"""
leader_progress = {}
my_progress = {}
for key in board.keys():
leader_progress[key] = {}
leader = board[key]['players'][0][0]
lead = board[key]['players'][0][1] / board[key]['steps']
if leader == self.name:
leader = board[key]['players'][1][0]
lead = board[key]['players'][1][1]
for player in board[key]['players']:
progress = player[1] / board[key]['steps']
if lead < progress and player[0] != self.name:
leader = player[0]
lead = progress
if player[0] == self.name:
my_progress[key] = player[1] + changes[key]
my_progress[key] /= board[key]['steps']
leader_progress[key]['leader'] = leader
leader_progress[key]['progress'] = lead
return leader_progress, my_progress
def get_started_columns(self, changes):
"""
Return list of columns that I've started according to changes
"""
started = []
for col in changes.keys():
if col == 'turn':
continue
if changes[col] > 0:
started.append(col)
return sorted(started, key=lambda column: int(column))
def get_finished_columns(self, board, my_progress):
"""
Return a list of all columns finished, including those finished with
my current progress.
"""
finished = []
for key in board.keys():
for player in board[key]['players']:
if player[1] == board[key]['steps']:
finished.append(key)
if key not in finished and my_progress[key] == 1:
finished.append(key)
return sorted(finished, key=lambda column: int(column))
def continue_based_on_odds(self, started, turns):
"""
Determine whether to continue simply based on optimal number of
turns to take.
"""
if len(started) == 3:
col1, col2, col3 = started[0], started[1], started[2]
return self.THREE_COLUMN_ODDS[col1][col2][col3][self.ROLLS] > turns
if len(started) == 2:
col1, col2 = started[0], started[1]
return self.TWO_COLUMN_ODDS[col1][col2][self.ROLLS] > turns
return self.ONE_COLUMN_ODDS[started[0]][self.ROLLS] > turns
def continue_based_on_new_column(self, board, started, finished, turns):
"""
Continue based on chances of getting a new valid column.
Rough estimation for converting 2 column odds to 3 columns.
"""
base_odds = self.TWO_COLUMN_ODDS[started[0]][started[1]][self.ODDS]
base_rolls = self.TWO_COLUMN_ODDS[started[0]][started[1]][self.ROLLS]
available = [col for col in board.keys() if col not in started and col not in finished]
odds = 0
for col in available:
odds += (base_odds * self.ONE_COLUMN_ODDS[col][self.ODDS])
# Quick and dirty estimation
new_rolls = (odds - self.ROUGH_ODDS_THRESHOLD) / self.STEP_DIVISOR
return base_rolls + new_rolls > turns
def continue_based_on_new_columns(self, board, started, finished, turns):
"""
Continue based on chances of getting 2 new valid columns.
Rough estimation for converting 1 column odds to 3 columns.
"""
base_odds = self.ONE_COLUMN_ODDS[started[0]][self.ODDS]
base_rolls = self.ONE_COLUMN_ODDS[started[0]][self.ROLLS]
available = [col for col in board.keys() if col not in started and col not in finished]
odds = 0
for i in range(len(available)):
for j in range(i+1, len(available)):
col1, col2 = available[i], available[j]
odds += (base_odds * self.TWO_COLUMN_ODDS[col1][col2][self.ODDS])
# Quick and dirty estimation
new_rolls = (odds - self.ROUGH_ODDS_THRESHOLD) / self.STEP_DIVISOR
return base_rolls + new_rolls > turns
def opponent_might_win(self, leader_progress):
"""
Check to see if opponent might win in the next turn.
"""
opponents = {}
for col in leader_progress.keys():
leader = leader_progress[col]['leader']
if leader == self.name:
continue
if leader not in opponents.keys():
opponents[leader] = 0
if leader_progress[col]['progress'] == 1.0:
opponents[leader] += 1
if opponents[leader] >= 2:
return True
return False
def started_columns_are_contested(
self, board, changes, my_progress, started):
"""
Check to see if any of my columns I've started are currently contested.
"""
for col in started:
players = board[col]['players']
step_size = 1 / board[col]['steps']
for player in players:
if player[0] == self.name:
continue
# Opponent is within 1/3 of my progress, and it's not finished
if abs(my_progress[col] - player[1] * step_size) <= 1/3 and \
my_progress[col] != 1:
return True
def did_finish_column(self, started, my_progress):
"""
Did I finish a column this turn?
"""
for col in started:
if my_progress[col] == 1.0:
return True
def is_continuing_turn(self, board, changes):
"""
Decide to continue rolling. Based on if I just won the game,
optimal rolling turns, I finished a column, and
number of columns already finished in the game.
"""
leader_progress, my_progress = self.get_progress(board, changes)
started_columns = self.get_started_columns(changes)
finished_columns = self.get_finished_columns(board, my_progress)
# No reason to stop before starting 3 columns and none are finished.
if len(started_columns) < 3 and len(finished_columns) == 0:
return True
# Stop if I won
if len(self.get_my_finished(my_progress)) >= 3:
return False
# If I finished a column, let's just end there.
if self.did_finish_column(started_columns, my_progress):
return False
# If I started 3 columns, and I'm not finishing a column,
# just roll optimal number of times.
if len(started_columns) == 3:
return self.continue_based_on_odds(
started_columns, changes['turn'])
# Columns are finished, but fewer than 3 columns started
if len(started_columns) == 2:
return self.continue_based_on_new_column(
board, started_columns, finished_columns, changes['turn'])
elif len(started_columns) == 1:
return self.continue_based_on_new_columns(
board, started_columns, finished_columns, changes['turn'])
# Shouldn't ever get here...continuing without starting a column...
return True
def determine_move_value(self, move, leader_progress, my_progress, board, started):
"""
Assign a move value primarily based on odds of continuing turns, with
bias towards not starting new columns and finishing columns.
"""
value = 0
if len(move) == 2 and move[0] != move[1]:
col1, col2 = str(move[0]), str(move[1])
value = self.TWO_COLUMN_ODDS[col1][col2][self.ODDS]
elif len(move) == 2:
col = str(move[0])
value = 2 * (self.ONE_COLUMN_ODDS[col][self.ODDS])
else:
col = str(move[0])
value = self.ONE_COLUMN_ODDS[col][self.ODDS]
unique_columns = set(move)
for c in unique_columns:
col = str(c)
step_size = 1 / board[col]['steps']
# Reward for finishing a column
if my_progress[col] + step_size == 1:
value += self.FINISH_COLUMN_REWARD
# Penalize for starting new columns
if str(c) not in started:
value -= self.NEW_COLUMN_PENALTY
# Less likely columns are desirable when 3 columns have started
if len(started) == 3:
value += (1 - self.ONE_COLUMN_ODDS[col][self.ODDS])
return value
def get_my_finished(self, my_progress):
finished_columns = []
for col in my_progress.keys():
if my_progress[col] == 1:
finished_columns.append(col)
return finished_columns
def look_for_the_win(self, board, my_progress, moves):
winning_move = None
finished = self.get_my_finished(my_progress)
for move in moves:
columns_finished = 0
# Consider moving twice on same column
if len(move) == 2 and move[0] == move[1]:
col = str(move[0])
step_size = 2 / board[col]['steps']
if step_size + my_progress[col] == 1:
columns_finished += 1
else:
# Otherwise, maybe I can finish two at a time
for m in move:
col = str(m)
step_size = 1 / board[col]['steps']
if step_size + my_progress[col] == 1:
columns_finished += 1
# If finishing these columns wins me the game, let's do it
if len(finished) + columns_finished >= 3:
winning_move = move
break
return winning_move
def compare_with_leader(self, leader_progress, my_progress, board, col):
step_size = 1 / board[col]['steps']
return (my_progress[col] - leader_progress[col]['progress']) / step_size
def choose_move(self, moves, board, changes, invalid_move=False):
leader_progress, my_progress = self.get_progress(board, changes)
started = self.get_started_columns(changes)
# Look for moves that let me win
best_move = self.look_for_the_win(board, my_progress, moves)
if best_move is not None:
return best_move
# Choose move based on best move value
best_move = moves[0]
best_move_value = self.determine_move_value(
best_move, leader_progress, my_progress, board, started)
for i in range(1, len(moves)):
move = moves[i]
move_value = self.determine_move_value(
move, leader_progress, my_progress, board, started)
if move_value > best_move_value:
best_move = move
best_move_value = move_value
return best_move
| [] |
nataddrho/digicueblue | Python2/src/main.py | 246c87129e6a70d384b1553688672bb3d5c6643e | #!/usr/bin/env python
# Nathan Rhoades 10/13/2017
import serial
import serialport
import bgapi
import gui
import digicueblue
import traceback
import time
import threading
import sys
if sys.version_info[0] < 3:
import Tkinter as Tk
else:
import tkinter as Tk
class App(threading.Thread): # thread GUI to that BGAPI can run in background
def __init__(self, dcb):
self.dcb = dcb
threading.Thread.__init__(self)
self.start()
def callback(self):
self.root.quit()
def run(self):
self.root = Tk.Tk()
self.gui = gui.GUI(self.root, self.dcb)
self.root.mainloop()
def main():
try:
f = open("comport.cfg", "r")
comport = f.readline().strip(' ')
f.close()
except BaseException:
# open comport selection gui
serialport.launch_selection()
return
try:
# open serial port and launch application
print "Opening %s" % comport
ser = serial.Serial(comport, 115200, timeout=1, writeTimeout=1)
dcb = digicueblue.DigicueBlue(filename="data.csv", debugprint=False)
app = App(dcb)
bg = bgapi.Bluegiga(dcb, ser, debugprint=True)
except BaseException:
print traceback.format_exc()
try:
ser.close()
except BaseException:
pass
text = """Please make sure the BLED112 dongle is plugged into the COM port
specified in comport.cfg, and that no other programs are using the port.
Use the serialport GUI to help select the correct port."""
text = text.replace('\n', ' ')
text = text.replace('\t', '')
print text
serialport.launch_selection()
if __name__ == '__main__':
main()
| [] |
plasticruler/newshound | messager.py | c97ef09165eabb27ac65682e4893cf72dae7f3fb | import requests
#newspi key c2d941c74c144421945618d97a458144
class Article:
link:str
headline:str
summary:str
body:str
| [] |
ronin-gw/PyMaSC | PyMaSC/handler/mappability.py | 70c32b647017e162e0b004cadcf4f59a2d4012b6 | import logging
import os
import json
from multiprocessing import Process, Queue, Lock
import numpy as np
from PyMaSC.core.mappability import MappableLengthCalculator
from PyMaSC.utils.progress import ProgressHook, MultiLineProgressManager
from PyMaSC.utils.compatible import tostr, xrange
from PyMaSC.utils.output import prepare_outdir
from PyMaSC.utils.calc import exec_worker_pool
logger = logging.getLogger(__name__)
class BWIOError(IOError):
pass
class JSONIOError(IOError):
pass
class NeedUpdate(Exception):
pass
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (np.long, np.float, np.float_)):
return float(obj)
elif isinstance(obj, (np.uint, np.int32, np.int64)):
return int(obj)
else:
return super(self, NumpyEncoder).default(obj)
class MappabilityHandler(MappableLengthCalculator):
@staticmethod
def calc_mappable_len_required_shift_size(readlen, max_shift):
return max_shift - readlen + 1 if max_shift > 2*readlen - 1 else readlen
def __init__(self, path, max_shift=0, readlen=0, map_path=None, nworker=1):
max_shift = self.calc_mappable_len_required_shift_size(readlen, max_shift)
self.nworker = nworker
if not os.access(path, os.R_OK):
reason = "file is unreadable." if os.path.isfile(path) else "no such file."
logger.critical("Failed to open '{}': {}".format(path, reason))
raise BWIOError
super(MappabilityHandler, self).__init__(path, max_shift)
self.close()
self._progress.disable_bar()
self.need_save_stats = True
if map_path:
self.map_path = map_path
else:
self.map_path = os.path.splitext(path)[0] + "_mappability.json"
if not os.path.exists(self.map_path):
self._check_saving_directory_is_writable()
logger.info("Calcurate mappable length with max shift size {}.".format(max_shift))
elif not os.path.isfile(self.map_path):
logger.critical("Specified path is not file: '{}'".format(self.map_path))
raise JSONIOError
elif not os.access(self.map_path, os.R_OK):
logger.error("Failed to read '{}'".format(self.map_path))
else:
self._try_load_mappability_stats()
if self.need_save_stats:
self._check_stats_is_overwritable()
logger.info("Calcurate mappable length with max shift size {}.".format(max_shift))
else:
logger.info("Use mappability stats read from '{}'".format(self.map_path))
def _check_saving_directory_is_writable(self):
dirname = os.path.dirname(self.map_path)
dirname = dirname if dirname else '.'
if not prepare_outdir(dirname, logger):
raise JSONIOError
def _try_load_mappability_stats(self):
try:
stats = self._read_mappability_stats()
except IOError as e:
logger.error("Failed to read '{}'".format(self.map_path))
logger.error("[Errno {}] {}".format(e.errno, e.message))
except (TypeError, OverflowError, ValueError, KeyError, IndexError) as e:
logger.error("Failed to load json file: '{}'".format(self.map_path))
except NeedUpdate:
pass
else:
self._load_mappability_stats(stats)
def _read_mappability_stats(self):
with open(self.map_path) as f:
stats = json.load(f)
for k in ("max_shift", "__whole__", "references"):
if k not in stats:
logger.error("Mandatory key '{}' not found.".format(k))
raise KeyError(k)
if stats["max_shift"] < self.max_shift:
logger.info("Specified shift length longer than former analysis. The stats will be updated.")
raise NeedUpdate
if stats["max_shift"] != len(stats["__whole__"]) - 1:
logger.error("Max shift length for whole genome unmatched.")
raise IndexError
for ref in self.chromsizes:
if ref not in stats["references"]:
logger.error("Reference '{}' not found.".format(ref))
raise KeyError(ref)
if stats["max_shift"] != len(stats["references"][ref]) - 1:
logger.error("Max shift length for 'ref' unmatched.".format(ref))
raise IndexError
return stats
def _load_mappability_stats(self, stats):
self.mappable_len = stats["__whole__"][:self.max_shift + 1]
self.chrom2mappable_len = {ref: b[:self.max_shift + 1] for ref, b in stats["references"].items()}
self.chrom2is_called = {ref: True for ref in self.chromsizes}
self.is_called = True
self.need_save_stats = False
def _check_stats_is_overwritable(self):
if not os.access(self.map_path, os.W_OK):
logger.critical("Failed to overwrite '{}'".format(self.map_path))
raise JSONIOError
else:
logger.warning("Existing file '{}' will be overwritten.".format(self.map_path))
def save_mappability_stats(self):
if not self.need_save_stats:
return logger.info("Mappability stats updating is not required.")
logger.info("Save mappable length to '{}'".format(self.map_path))
try:
with open(self.map_path, 'w') as f:
json.dump({
"max_shift": self.max_shift,
"__whole__": self.mappable_len,
"references": self.chrom2mappable_len
}, f, indent=4, sort_keys=True, cls=NumpyEncoder)
except IOError as e:
logger.error("Faild to output: {}\n[Errno {}] {}".format(
e.filename, e.errno, e.message))
self.need_save_stats = False
def calc_mappability(self):
target_chroms = [tostr(c) for c, b in self.chrom2is_called.items() if b is False]
if not target_chroms:
return self._sumup_mappability()
order_queue = Queue()
report_queue = Queue()
logger_lock = Lock()
progress = MultiLineProgressManager()
workers = [MappabilityCalcWorker(self.path, self.max_shift, order_queue, report_queue, logger_lock)
for _ in range(min(self.nworker, len(target_chroms)))]
with exec_worker_pool(workers, target_chroms, order_queue):
while not self.is_called:
chrom, obj = report_queue.get()
if chrom is None: # update progress
chrom, body = obj
with logger_lock:
progress.update(chrom, body)
else:
length = obj
self.chrom2mappable_len[chrom] = tuple(length)
self.chrom2is_called[chrom] = True
if all(self.chrom2is_called.values()):
self.is_called = True
with logger_lock:
progress.erase(chrom)
progress.clean()
self._sumup_mappability()
def _sumup_mappability(self):
for length in self.chrom2mappable_len.values():
for i in xrange(self.max_shift + 1):
self.mappable_len[i] += length[i]
class MappabilityCalcWorker(Process):
def __init__(self, path, max_shift, order_queue, report_queue, logger_lock):
super(MappabilityCalcWorker, self).__init__()
self.calculator = MappableLengthCalculator(path, max_shift, logger_lock)
self.calculator._progress.disable_bar()
self.order_queue = order_queue
self.report_queue = report_queue
self.logger_lock = logger_lock
self.calculator._progress = ProgressHook(report_queue)
def run(self):
with self.logger_lock:
logger.debug("{}: Hello. My pid is {}.".format(self.name, os.getpid()))
while True:
chrom = self.order_queue.get()
if chrom is None:
break
with self.logger_lock:
logger.debug("{}: Process {}...".format(self.name, chrom))
self.calculator.calc_mappability(chrom)
self.report_queue.put((chrom, self.calculator.chrom2mappable_len[chrom]))
with self.logger_lock:
logger.debug("{}: Goodbye.".format(self.name))
self.calculator.close()
| [((395, 422), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (412, 422), False, 'import logging\n'), ((2727, 2757), 'os.path.dirname', 'os.path.dirname', (['self.map_path'], {}), '(self.map_path)\n', (2742, 2757), False, 'import os\n'), ((6111, 6118), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (6116, 6118), False, 'from multiprocessing import Process, Queue, Lock\n'), ((6142, 6149), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (6147, 6149), False, 'from multiprocessing import Process, Queue, Lock\n'), ((6172, 6178), 'multiprocessing.Lock', 'Lock', ([], {}), '()\n', (6176, 6178), False, 'from multiprocessing import Process, Queue, Lock\n'), ((6198, 6224), 'PyMaSC.utils.progress.MultiLineProgressManager', 'MultiLineProgressManager', ([], {}), '()\n', (6222, 6224), False, 'from PyMaSC.utils.progress import ProgressHook, MultiLineProgressManager\n'), ((7563, 7617), 'PyMaSC.core.mappability.MappableLengthCalculator', 'MappableLengthCalculator', (['path', 'max_shift', 'logger_lock'], {}), '(path, max_shift, logger_lock)\n', (7587, 7617), False, 'from PyMaSC.core.mappability import MappableLengthCalculator\n'), ((7821, 7847), 'PyMaSC.utils.progress.ProgressHook', 'ProgressHook', (['report_queue'], {}), '(report_queue)\n', (7833, 7847), False, 'from PyMaSC.utils.progress import ProgressHook, MultiLineProgressManager\n'), ((1289, 1313), 'os.access', 'os.access', (['path', 'os.R_OK'], {}), '(path, os.R_OK)\n', (1298, 1313), False, 'import os\n'), ((1833, 1862), 'os.path.exists', 'os.path.exists', (['self.map_path'], {}), '(self.map_path)\n', (1847, 1862), False, 'import os\n'), ((2819, 2850), 'PyMaSC.utils.output.prepare_outdir', 'prepare_outdir', (['dirname', 'logger'], {}), '(dirname, logger)\n', (2833, 2850), False, 'from PyMaSC.utils.output import prepare_outdir\n'), ((3526, 3538), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3535, 3538), False, 'import json\n'), ((4903, 4936), 'os.access', 'os.access', (['self.map_path', 'os.W_OK'], {}), '(self.map_path, os.W_OK)\n', (4912, 4936), False, 'import os\n'), ((5948, 5956), 'PyMaSC.utils.compatible.tostr', 'tostr', (['c'], {}), '(c)\n', (5953, 5956), False, 'from PyMaSC.utils.compatible import tostr, xrange\n'), ((6422, 6475), 'PyMaSC.utils.calc.exec_worker_pool', 'exec_worker_pool', (['workers', 'target_chroms', 'order_queue'], {}), '(workers, target_chroms, order_queue)\n', (6438, 6475), False, 'from PyMaSC.utils.calc import exec_worker_pool\n'), ((7283, 7309), 'PyMaSC.utils.compatible.xrange', 'xrange', (['(self.max_shift + 1)'], {}), '(self.max_shift + 1)\n', (7289, 7309), False, 'from PyMaSC.utils.compatible import tostr, xrange\n'), ((1361, 1381), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1375, 1381), False, 'import os\n'), ((2031, 2060), 'os.path.isfile', 'os.path.isfile', (['self.map_path'], {}), '(self.map_path)\n', (2045, 2060), False, 'import os\n'), ((5458, 5624), 'json.dump', 'json.dump', (["{'max_shift': self.max_shift, '__whole__': self.mappable_len, 'references':\n self.chrom2mappable_len}", 'f'], {'indent': '(4)', 'sort_keys': '(True)', 'cls': 'NumpyEncoder'}), "({'max_shift': self.max_shift, '__whole__': self.mappable_len,\n 'references': self.chrom2mappable_len}, f, indent=4, sort_keys=True,\n cls=NumpyEncoder)\n", (5467, 5624), False, 'import json\n'), ((1769, 1791), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (1785, 1791), False, 'import os\n'), ((2195, 2228), 'os.access', 'os.access', (['self.map_path', 'os.R_OK'], {}), '(self.map_path, os.R_OK)\n', (2204, 2228), False, 'import os\n'), ((7969, 7980), 'os.getpid', 'os.getpid', ([], {}), '()\n', (7978, 7980), False, 'import os\n')] |
Nahid-Hassan/fullstack-software-development | courses/backend/django-for-everybody/Web Application Technologies and Django/resources/dj4e-samples/tmpl/views.py | 892ffb33e46795061ea63378279a6469de317b1a | from django.shortcuts import render
from django.views import View
# Create your views here.
def simple(request):
return render(request, 'tmpl/simple.html')
def guess(request) :
context = {'zap' : '42' }
return render(request, 'tmpl/guess.html', context)
def special(request) :
context = {'txt' : '<b>bold</b>',
'zap' : '42' }
return render(request, 'tmpl/special.html', context)
def loop(request) :
f = ['Apple', 'Orange', 'Banana', 'Lychee']
n = ['peanut', 'cashew']
x = {'fruits' : f, 'nuts' : n, 'zap' : '42' }
return render(request, 'tmpl/loop.html', x)
def cond(request) :
x = {'guess' : '42' }
return render(request, 'tmpl/cond.html', x)
def nested(request) :
x = {'outer' : { 'inner' : '42' } }
return render(request, 'tmpl/nested.html', x)
# Call this with a parameter number
class GameView(View) :
def get(self, request, guess) :
x = {'guess' : int(guess) }
return render(request, 'tmpl/cond.html', x)
# Using inheritance (extend)
class Game2View(View) :
def get(self, request, guess) :
x = {'guess' : int(guess) }
return render(request, 'tmpl/cond2.html', x)
| [((126, 161), 'django.shortcuts.render', 'render', (['request', '"""tmpl/simple.html"""'], {}), "(request, 'tmpl/simple.html')\n", (132, 161), False, 'from django.shortcuts import render\n'), ((225, 268), 'django.shortcuts.render', 'render', (['request', '"""tmpl/guess.html"""', 'context'], {}), "(request, 'tmpl/guess.html', context)\n", (231, 268), False, 'from django.shortcuts import render\n'), ((372, 417), 'django.shortcuts.render', 'render', (['request', '"""tmpl/special.html"""', 'context'], {}), "(request, 'tmpl/special.html', context)\n", (378, 417), False, 'from django.shortcuts import render\n'), ((577, 613), 'django.shortcuts.render', 'render', (['request', '"""tmpl/loop.html"""', 'x'], {}), "(request, 'tmpl/loop.html', x)\n", (583, 613), False, 'from django.shortcuts import render\n'), ((672, 708), 'django.shortcuts.render', 'render', (['request', '"""tmpl/cond.html"""', 'x'], {}), "(request, 'tmpl/cond.html', x)\n", (678, 708), False, 'from django.shortcuts import render\n'), ((783, 821), 'django.shortcuts.render', 'render', (['request', '"""tmpl/nested.html"""', 'x'], {}), "(request, 'tmpl/nested.html', x)\n", (789, 821), False, 'from django.shortcuts import render\n'), ((973, 1009), 'django.shortcuts.render', 'render', (['request', '"""tmpl/cond.html"""', 'x'], {}), "(request, 'tmpl/cond.html', x)\n", (979, 1009), False, 'from django.shortcuts import render\n'), ((1151, 1188), 'django.shortcuts.render', 'render', (['request', '"""tmpl/cond2.html"""', 'x'], {}), "(request, 'tmpl/cond2.html', x)\n", (1157, 1188), False, 'from django.shortcuts import render\n')] |
jykim-rust/python | webapp/ex.py | 50efe51733976d9f8ae3be47d628601ad002d836 | from flask import escape
'''with open('ex') as full:
for line in full:
print(line,end='**')
'''
'''
a=[]
with open('ex') as full:
for line in full:
a.append(line.split('|'))
print(a)
'''
'''
with open('ex') as full:
for line in full.readline():
print(line)
'''
contents=[]
with open('ex') as log:
for line in log:
#contents.append([])
for item in line.split('|'):
contents.append(item)
print(contents)
| [] |
Vman45/ask-alexa-twitter | lib/twitter_utils.py | 1711005e51db1f66beb2e41e762c39ee003273aa | import requests
import jsonpickle
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs, urlencode
import cherrypy
from collections import defaultdict
import json
import os
import re
from collections import defaultdict
# For readable serializations
jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
class LocalCache(object):
""" Generic class for encapsulating twitter credential caching """
server_data_template = "{}.server"
user_data_template = "{0}.user.{1}"
def __init__(self, backup = "tmp/twitter.cache"):
self.backup = backup #Unique identifier for the backup of this cache
self.memcache = {
"users" : defaultdict(lambda : {}),
"server": defaultdict(lambda : {})
}
self.deserialize()
def users(self):
return self.memcache['users']
def set_user_state(self, user_id, state):
self.memcache['users'][user_id] = state
def update_user_state(self, user_id, state = {}):
self.memcache['users'][user_id].update(state)
def get_user_state(self, user_id):
return self.memcache['users'][user_id]
def clear_user_state(self, user_id):
return self.memcache['users'][user_id].clear()
def update_server_state(self, state_dict):
self.memcache['server'].update(state_dict)
def get_server_state(self):
return self.memcache['server']
def clear_server_state(self):
return self.memcache['server'].clear()
def initialize_user_queue(self, user_id, queue):
self.memcache['users'][user_id]['user_queue'] = ReadableQueue(queue)
def user_queue(self, user_id):
if 'user_queue' in self.memcache['users'][user_id]:
return self.memcache['users'][user_id]['user_queue']
def server_fname(self):
return self.server_data_template.format(self.backup)
def user_fname(self, user):
return self.user_data_template.format(self.backup, user)
def deserialize(self):
cache_loaded = False
if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup):
try:
self.memcache = { "server" : {},
"users" : {} }
with open(self.server_fname()) as backupfile:
print ("Attempting to reload cache")
self.memcache['server'] = jsonpickle.decode(backupfile.read())
print ("Server cache loaded", json.dumps(self.memcache, indent=4))
for user in self.memcache['server']['user_list']:
# Try to load as much user data as possible
if os.path.exists(self.user_fname(user)):
print ("found path for user", user)
with open(self.user_fname(user)) as userfile:
user_data = jsonpickle.decode(userfile.read())
self.memcache['users'][user] = user_data
cache_loaded = True
except Exception as e:
print ("Cache file corrupted...")
raise e
if not cache_loaded:
print ("Cache could not be loaded")
pass
else:
print ("CACHE LOADED SUCCESSFULLY!")
def serialize(self):
json_to_serialize = self.memcache['server']
user_list = list(self.users().keys())
json_to_serialize.update({"user_list" : user_list})
with open(self.server_fname(), 'w') as backup_server:
# Serialize Server:
json_encoded = jsonpickle.encode(json_to_serialize)
backup_server.write(json_encoded)
for user in user_list:
user_data = self.get_user_state(user)
json_encoded = jsonpickle.encode(user_data)
with open(self.user_fname(user), 'w') as userfile:
userfile.write(json_encoded)
class ReadableQueue(object):
def __init__(self, queue=[], pos=0):
self.hashmap = { "queue" : [(i, e) for i,e in enumerate(queue)],
"pos" : pos }
return
def queue(self):
return self.hashmap['queue']
def is_empty(self):
return len(self.queue()) == 0
def is_finished(self):
return self.pos() == len(self.queue())
def pos(self):
return self.hashmap['pos']
def set_pos(self, val):
self.hashmap['pos'] = val
def get_next(self, offset=1):
if self.pos() < len(self.queue()):
temp_queue = self.queue()[self.pos(): self.pos() + offset]
self.set_pos(self.pos() + offset)
if self.pos() > len(self.queue()): self.set_pos(len(self.queue()))
return temp_queue
def read_out_next(self, offset=1):
return " ".join([readable.read_out(index) for index,readable in self.get_next(offset)])
def has_prev(self):
return self.pos() > 0
def get_prev(self, offset=1):
if self.pos() > 0:
self.set_pos(self.pos() - offset)
if self.pos() < 0:
offset = offset + self.pos()
# [1, current(2), 3] get_prev(offeset=3)
# pos :=> -2, offset :=> 3-2 = 1, pos :=> 0, then read 0 to 1
self.set_pos(0)
return self.queue()[self.pos() : offset]
return None
def read_out_prev(self, offset=1):
return " ".join([readable.read_out() for readable in self.get_prev(offset)])
#Local cache caches tokens for different users
local_cache = LocalCache()
def strip_html(text):
""" Get rid of ugly twitter html """
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)])
class Tweet(object):
def __init__(self, json_obj):
self.tweet = json_obj
def get_id(self):
return self.tweet['id']
def get_raw_text(self):
return self.tweet['text']
def _process_text(self):
text = strip_html(self.tweet['text'])
user_mentions = self.tweet['entities']['user_mentions']
text = text.replace('@', 'at ')
for user in user_mentions:
text = text.replace(user['screen_name'], user['name'])
return text
def get_screen_name(self):
return self.tweet['user']['screen_name']
def get_user_name(self):
return self.tweet['user']['name']
def read_out(self, index):
text = self._process_text()
return "tweet number {num} by {user} : {text} ,".format(num=index+1,
user=self.get_user_name(),
text = text)
def detailed_description(self):
response_builder = ["This tweet was posted by {user_name} whose twitter handle is {screen_name} the account description reads: {description}."
.format(screen_name=self.tweet['user']['screen_name'],
user_name=self.tweet['user']['name'],
description=self.tweet['user']['description'])]
if self.tweet['retweeted']:
response_builder += ["It's been retweeted {} times.".format(self.tweet['retweet_count'])]
if self.tweet['favorited']:
response_builder += ["{} people have favorited it.".format(self.tweet['favorites_count'])]
if self.tweet["in_reply_to_screen_name"]:
response_builder += ["it was posted in response to user {}.".format(self.tweet['in_reply_to_screen_name'])]
response_builder += ["the text of the tweet is, {}.".format(self._process_text())]
return " ".join(response_builder)
def user_mentions(self):
return self.tweet['user_mentions']
def get_cached_access_pair(uid):
if uid in local_cache.users():
access_token = local_cache.get_user_state(uid)['access_token']
access_secret = local_cache.get_user_state(uid)['access_secret']
return access_token, access_secret
else:
raise ValueError
def get_request_token(callback_url=None):
url = "https://api.twitter.com/oauth/request_token"
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret)
params = { "oauth_callback" : callback_url }
r = requests.post(url, auth=auth, params=params)
response_obj = parse_qs(r.text)
local_cache.update_server_state({ "request_token" : response_obj['oauth_token'][0],
"request_secret": response_obj['oauth_token_secret'][0] })
return response_obj['oauth_token_secret'], response_obj['oauth_token']
def authenticate_user_page(callback_url="", metadata=None):
url = "https://api.twitter.com/oauth/authenticate"
oauth_secret, oauth_token = get_request_token(callback_url)
local_cache.update_server_state({'metadata' : metadata })
params = { "force_login" : True,
"oauth_token": oauth_token }
r = requests.get(url, params=params)
return r.text
def post_tweet(user_id, message, additional_params={}):
"""
Helper function to post a tweet
"""
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message)
def get_access_token(oauth_token, oauth_verifier):
url = "https://api.twitter.com/oauth/access_token"
params = {"oauth_verifier" : oauth_verifier}
server_state = local_cache.get_server_state()
request_token = server_state['request_token']
request_secret = server_state['request_secret']
consumer_key, consumer_secret = server_state['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret, request_token, request_secret)
r = requests.post(url, params = params, auth=auth)
response_obj = parse_qs(r.text)
uid = response_obj['oauth_token'][0]
print ("Access token", uid)
local_cache.set_user_state(user_id = uid,
state = { "access_token" : response_obj['oauth_token'][0],
"access_secret" : response_obj['oauth_token_secret'][0],
'twitter_user_id': response_obj['user_id'][0],
'screen_name' : response_obj ['screen_name'][0]
})
local_cache.serialize()
fragments = {
"state" : local_cache.get_server_state()['metadata']['state'],
"access_token" : uid,
"token_type" : "Bearer"
}
return urlencode(fragments)
def get_twitter_auth(user_id):
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
access_token, access_secret = get_cached_access_pair(user_id)
return OAuth1(consumer_key, consumer_secret, access_token, access_secret)
def process_tweets(tweet_list):
""" Clean tweets and enumerate, preserving only things that we are interested in """
return [Tweet(tweet) for tweet in tweet_list]
def make_twitter_request(url, user_id, params={}, request_type='GET'):
""" Generically make a request to twitter API using a particular user's authorization """
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params)
def get_user_twitter_details(user_id, params={}):
url = "https://api.twitter.com/1.1/users/lookup.json"
user_cache = local_cache.get_user_state(user_id)
params.update({"user_id": user_cache['twitter_user_id'] })
response = make_twitter_request(url, user_id, params)
return response.json()
def geo_search(user_id, search_location):
"""
Search for a location - free form
"""
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response
def closest_trend_search(user_id, params={}):
#url = "https://api.twitter.com/1.1/trends/place.json"
url = "https://api.twitter.com/1.1/trends/closest.json"
response = make_twitter_request(url, user_id, params).json()
return response
def list_trends(user_id, woe_id):
url = "https://api.twitter.com/1.1/trends/place.json"
params = { "id" : woe_id }
response = make_twitter_request(url, user_id, params).json()
return response
def read_out_tweets(processed_tweets, speech_convertor=None):
"""
Input - list of processed 'Tweets'
output - list of spoken responses
"""
return ["tweet number {num} by {user}. {text}.".format(num=index+1, user=user, text=text)
for index, (user, text) in enumerate(processed_tweets)]
def request_tweet_list(url, user_id, params={}):
return process_tweets(make_twitter_request(url, user_id).json())
def get_home_tweets(user_id, input_params={}):
url = "https://api.twitter.com/1.1/statuses/home_timeline.json"
print ("Trying to get home tweets")
response = request_tweet_list(url, user_id)
return response
def get_retweets_of_me(user_id, input_params={}):
""" returns recently retweeted tweets """
url = "https://api.twitter.com/1.1/statuses/retweets_of_me.json"
print ("trying to get retweets")
return request_tweet_list(url, user_id)
def get_my_favourite_tweets(user_id, input_params = {}):
""" Returns a user's favourite tweets """
url = "https://api.twitter.com/1.1/favorites/list.json"
return request_tweet_list(url, user_id)
def get_user_latest_tweets(user_id, params={}):
url = "https://api.twitter.com/1.1/statuses/user_timeline.json?"
return request_tweet_list(url, user_id, params)
def get_latest_twitter_mentions(user_id):
url = "https://api.twitter.com/1.1/statuses/mentions_timeline.json"
return request_tweet_list(url, user_id)
def search_for_tweets_about(user_id, params):
""" Search twitter API """
url = "https://api.twitter.com/1.1/search/tweets.json"
response = make_twitter_request(url, user_id, params)
return process_tweets(response.json()["statuses"])
| [((269, 333), 'jsonpickle.set_encoder_options', 'jsonpickle.set_encoder_options', (['"""json"""'], {'sort_keys': '(True)', 'indent': '(4)'}), "('json', sort_keys=True, indent=4)\n", (299, 333), False, 'import jsonpickle\n'), ((9171, 9208), 'requests_oauthlib.OAuth1', 'OAuth1', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (9177, 9208), False, 'from requests_oauthlib import OAuth1\n'), ((9267, 9311), 'requests.post', 'requests.post', (['url'], {'auth': 'auth', 'params': 'params'}), '(url, auth=auth, params=params)\n', (9280, 9311), False, 'import requests\n'), ((9331, 9347), 'urllib.parse.parse_qs', 'parse_qs', (['r.text'], {}), '(r.text)\n', (9339, 9347), False, 'from urllib.parse import parse_qs, urlencode\n'), ((9949, 9981), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (9961, 9981), False, 'import requests\n'), ((10792, 10860), 'requests_oauthlib.OAuth1', 'OAuth1', (['consumer_key', 'consumer_secret', 'request_token', 'request_secret'], {}), '(consumer_key, consumer_secret, request_token, request_secret)\n', (10798, 10860), False, 'from requests_oauthlib import OAuth1\n'), ((10870, 10914), 'requests.post', 'requests.post', (['url'], {'params': 'params', 'auth': 'auth'}), '(url, params=params, auth=auth)\n', (10883, 10914), False, 'import requests\n'), ((10936, 10952), 'urllib.parse.parse_qs', 'parse_qs', (['r.text'], {}), '(r.text)\n', (10944, 10952), False, 'from urllib.parse import parse_qs, urlencode\n'), ((11676, 11696), 'urllib.parse.urlencode', 'urlencode', (['fragments'], {}), '(fragments)\n', (11685, 11696), False, 'from urllib.parse import parse_qs, urlencode\n'), ((11896, 11962), 'requests_oauthlib.OAuth1', 'OAuth1', (['consumer_key', 'consumer_secret', 'access_token', 'access_secret'], {}), '(consumer_key, consumer_secret, access_token, access_secret)\n', (11902, 11962), False, 'from requests_oauthlib import OAuth1\n'), ((692, 716), 'collections.defaultdict', 'defaultdict', (['(lambda : {})'], {}), '(lambda : {})\n', (703, 716), False, 'from collections import defaultdict\n'), ((740, 764), 'collections.defaultdict', 'defaultdict', (['(lambda : {})'], {}), '(lambda : {})\n', (751, 764), False, 'from collections import defaultdict\n'), ((3667, 3703), 'jsonpickle.encode', 'jsonpickle.encode', (['json_to_serialize'], {}), '(json_to_serialize)\n', (3684, 3703), False, 'import jsonpickle\n'), ((3871, 3899), 'jsonpickle.encode', 'jsonpickle.encode', (['user_data'], {}), '(user_data)\n', (3888, 3899), False, 'import jsonpickle\n'), ((2151, 2177), 'os.path.isdir', 'os.path.isdir', (['self.backup'], {}), '(self.backup)\n', (2164, 2177), False, 'import os\n'), ((2560, 2595), 'json.dumps', 'json.dumps', (['self.memcache'], {'indent': '(4)'}), '(self.memcache, indent=4)\n', (2570, 2595), False, 'import json\n')] |
jamesabel/sundry | sundry/serializable.py | 4f63bfa0624c88a3cd05adf2784e9e3e66e094f4 | import json
from enum import Enum
from decimal import Decimal
def convert_serializable_special_cases(o):
"""
Convert an object to a type that is fairly generally serializable (e.g. json serializable).
This only handles the cases that need converting. The json module handles all the rest.
For JSON, with json.dump or json.dumps with argument default=convert_serializable.
Example:
json.dumps(my_animal, indent=4, default=_convert_serializable)
:param o: object to be converted to a type that is serializable
:return: a serializable representation
"""
if isinstance(o, Enum):
serializable_representation = o.value
elif isinstance(o, Decimal):
# decimal.Decimal (e.g. in AWS DynamoDB), both integer and floating point
if o % 1 == 0:
# if representable with an integer, use an integer
serializable_representation = int(o)
else:
# not representable with an integer so use a float
serializable_representation = float(o)
else:
raise NotImplementedError(f"can not serialize {o} since type={type(o)}")
return serializable_representation
def make_serializable(o):
# Convert an object to a type that is fairly generally serializable (e.g. json serializable).
return json.loads(json.dumps(o, default=convert_serializable_special_cases, sort_keys=True))
| [((1324, 1397), 'json.dumps', 'json.dumps', (['o'], {'default': 'convert_serializable_special_cases', 'sort_keys': '(True)'}), '(o, default=convert_serializable_special_cases, sort_keys=True)\n', (1334, 1397), False, 'import json\n')] |
FrancisLiang/models-1 | legacy/neural_qa/train.py | e14d5bc1ab36d0dd11977f27cff54605bf99c945 | import sys
import os
import argparse
import numpy as np
import paddle.v2 as paddle
import reader
import utils
import network
import config
from utils import logger
def save_model(trainer, model_save_dir, parameters, pass_id):
f = os.path.join(model_save_dir, "params_pass_%05d.tar.gz" % pass_id)
logger.info("model saved to %s" % f)
with utils.open_file(f, "w") as f:
trainer.save_parameter_to_tar(f)
def show_parameter_init_info(parameters):
"""
Print the information of initialization mean and standard deviation of
parameters
:param parameters: the parameters created in a model
"""
logger.info("Parameter init info:")
for p in parameters:
p_val = parameters.get(p)
logger.info(("%-25s : initial_mean=%-7.4f initial_std=%-7.4f "
"actual_mean=%-7.4f actual_std=%-7.4f dims=%s") %
(p, parameters.__param_conf__[p].initial_mean,
parameters.__param_conf__[p].initial_std, p_val.mean(),
p_val.std(), parameters.__param_conf__[p].dims))
logger.info("\n")
def show_parameter_status(parameters):
"""
Print some statistical information of parameters in a network
:param parameters: the parameters created in a model
"""
for p in parameters:
abs_val = np.abs(parameters.get(p))
abs_grad = np.abs(parameters.get_grad(p))
logger.info(
("%-25s avg_abs_val=%-10.6f max_val=%-10.6f avg_abs_grad=%-10.6f "
"max_grad=%-10.6f min_val=%-10.6f min_grad=%-10.6f") %
(p, abs_val.mean(), abs_val.max(), abs_grad.mean(), abs_grad.max(),
abs_val.min(), abs_grad.min()))
def train(conf):
if not os.path.exists(conf.model_save_dir):
os.makedirs(conf.model_save_dir, mode=0755)
settings = reader.Settings(
vocab=conf.vocab,
is_training=True,
label_schema=conf.label_schema,
negative_sample_ratio=conf.negative_sample_ratio,
hit_ans_negative_sample_ratio=conf.hit_ans_negative_sample_ratio,
keep_first_b=conf.keep_first_b,
seed=conf.seed)
samples_per_pass = conf.batch_size * conf.batches_per_pass
train_reader = paddle.batch(
paddle.reader.buffered(
reader.create_reader(conf.train_data_path, settings,
samples_per_pass),
size=samples_per_pass),
batch_size=conf.batch_size)
# TODO(lipeng17) v2 API does not support parallel_nn yet. Therefore, we can
# only use CPU currently
paddle.init(
use_gpu=conf.use_gpu,
trainer_count=conf.trainer_count,
seed=conf.paddle_seed)
# network config
cost = network.training_net(conf)
# create parameters
# NOTE: parameter values are not initilized here, therefore, we need to
# print parameter initialization info in the beginning of the first batch
parameters = paddle.parameters.create(cost)
# create optimizer
rmsprop_optimizer = paddle.optimizer.RMSProp(
learning_rate=conf.learning_rate,
rho=conf.rho,
epsilon=conf.epsilon,
model_average=paddle.optimizer.ModelAverage(
average_window=conf.average_window,
max_average_window=conf.max_average_window))
# create trainer
trainer = paddle.trainer.SGD(cost=cost,
parameters=parameters,
update_equation=rmsprop_optimizer)
# begin training network
def _event_handler(event):
"""
Define end batch and end pass event handler
"""
if isinstance(event, paddle.event.EndIteration):
sys.stderr.write(".")
batch_num = event.batch_id + 1
total_batch = conf.batches_per_pass * event.pass_id + batch_num
if batch_num % conf.log_period == 0:
sys.stderr.write("\n")
logger.info("Total batch=%d Batch=%d CurrentCost=%f Eval: %s" \
% (total_batch, batch_num, event.cost, event.metrics))
if batch_num % conf.show_parameter_status_period == 0:
show_parameter_status(parameters)
elif isinstance(event, paddle.event.EndPass):
save_model(trainer, conf.model_save_dir, parameters, event.pass_id)
elif isinstance(event, paddle.event.BeginIteration):
if event.batch_id == 0 and event.pass_id == 0:
show_parameter_init_info(parameters)
## for debugging purpose
#with utils.open_file("config", "w") as config:
# print >> config, paddle.layer.parse_network(cost)
trainer.train(
reader=train_reader,
event_handler=_event_handler,
feeding=network.feeding,
num_passes=conf.num_passes)
logger.info("Training has finished.")
def main():
conf = config.TrainingConfig()
logger.info("loading word embeddings...")
conf.vocab, conf.wordvecs = utils.load_wordvecs(conf.word_dict_path,
conf.wordvecs_path)
logger.info("loaded")
logger.info("length of word dictionary is : %d." % len(conf.vocab))
train(conf)
if __name__ == "__main__":
main()
| [] |
astro-friedel/yggdrasil | yggdrasil/drivers/MatlabModelDriver.py | 5ecbfd083240965c20c502b4795b6dc93d94b020 | import subprocess
import uuid as uuid_gen
import logging
from datetime import datetime
import os
import psutil
import warnings
import weakref
from yggdrasil import backwards, tools, platform, serialize
from yggdrasil.languages import get_language_dir
from yggdrasil.config import ygg_cfg
from yggdrasil.drivers.InterpretedModelDriver import InterpretedModelDriver
from yggdrasil.tools import TimeOut, sleep
logger = logging.getLogger(__name__)
try: # pragma: matlab
disable_engine = ygg_cfg.get('matlab', 'disable_engine', 'False').lower()
if platform._is_win or (disable_engine == 'true'):
_matlab_engine_installed = False
if not tools.is_subprocess():
logger.debug("matlab.engine disabled")
else:
import matlab.engine
_matlab_engine_installed = True
except ImportError: # pragma: no matlab
logger.debug("Could not import matlab.engine. "
+ "Matlab support for using a sharedEngine will be disabled.")
_matlab_engine_installed = False
_top_lang_dir = get_language_dir('matlab')
_compat_map = {
'R2015b': ['2.7', '3.3', '3.4'],
'R2017a': ['2.7', '3.3', '3.4', '3.5'],
'R2017b': ['2.7', '3.3', '3.4', '3.5', '3.6'],
'R2018b': ['2.7', '3.3', '3.4', '3.5', '3.6']}
def kill_all():
r"""Kill all Matlab shared engines."""
if platform._is_win: # pragma: windows
os.system(('taskkill /F /IM matlab.engine.shareEngine /T'))
else:
os.system(('pkill -f matlab.engine.shareEngine'))
def locate_matlab_engine_processes(): # pragma: matlab
r"""Get all of the active matlab sharedEngine processes.
Returns:
list: Active matlab sharedEngine processes.
"""
out = []
for p in psutil.process_iter():
p.info = p.as_dict(attrs=['name', 'pid', 'cmdline'])
if (((p.info['name'] == 'MATLAB')
and ('matlab.engine.shareEngine' in p.info['cmdline']))):
out.append(p) # p.info['pid'])
return out
def is_matlab_running():
r"""Determine if there is a Matlab engine running.
Returns:
bool: True if there is a Matlab engine running, False otherwise.
"""
if not _matlab_engine_installed: # pragma: no matlab
out = False
else: # pragma: matlab
out = (len(matlab.engine.find_matlab()) != 0)
return out
def locate_matlabroot(): # pragma: matlab
r"""Find directory that servers as matlab root.
Returns:
str: Full path to matlabroot directory.
"""
return MatlabModelDriver.get_matlab_info()[0]
def install_matlab_engine(): # pragma: matlab
r"""Install the MATLAB engine API for Python."""
if not _matlab_engine_installed:
mtl_root = locate_matlabroot()
mtl_setup = os.path.join(mtl_root, 'extern', 'engines', 'python')
cmd = 'python setup.py install'
result = subprocess.check_output(cmd, cwd=mtl_setup)
print(result)
def start_matlab_engine(skip_connect=False, timeout=None): # pragma: matlab
r"""Start a Matlab shared engine session inside a detached screen
session.
Args:
skip_connect (bool, optional): If True, the engine is not connected.
Defaults to False.
timeout (int, optional): Time (in seconds) that should be waited for
Matlab to start up. Defaults to None and is set from the config
option ('matlab', 'startup_waittime_s').
Returns:
tuple: Information on the started session including the name of the
screen session running matlab, the created engine object, the name
of the matlab session, and the matlab engine process.
Raises:
RuntimeError: If Matlab is not installed.
"""
if not _matlab_engine_installed: # pragma: no matlab
raise RuntimeError("Matlab engine is not installed.")
if timeout is None:
timeout = float(ygg_cfg.get('matlab', 'startup_waittime_s', 10))
old_process = set(locate_matlab_engine_processes())
old_matlab = set(matlab.engine.find_matlab())
screen_session = str('ygg_matlab' + datetime.today().strftime("%Y%j%H%M%S")
+ '_%d' % len(old_matlab))
try:
args = ['screen', '-dmS', screen_session, '-c',
os.path.join(_top_lang_dir, 'matlab_screenrc'),
'matlab', '-nodisplay', '-nosplash', '-nodesktop', '-nojvm',
'-r', '"matlab.engine.shareEngine"']
subprocess.call(' '.join(args), shell=True)
T = TimeOut(timeout)
while ((len(set(matlab.engine.find_matlab()) - old_matlab) == 0)
and not T.is_out):
logger.debug('Waiting for matlab engine to start')
sleep(1) # Usually 3 seconds
except KeyboardInterrupt: # pragma: debug
args = ['screen', '-X', '-S', screen_session, 'quit']
subprocess.call(' '.join(args), shell=True)
raise
if (len(set(matlab.engine.find_matlab()) - old_matlab) == 0): # pragma: debug
raise Exception("start_matlab timed out at %f s" % T.elapsed)
new_matlab = list(set(matlab.engine.find_matlab()) - old_matlab)[0]
new_process = list(set(locate_matlab_engine_processes()) - old_process)[0]
# Connect to the engine
matlab_engine = None
if not skip_connect:
matlab_engine = connect_matlab_engine(new_matlab, first_connect=True)
return screen_session, matlab_engine, new_matlab, new_process
def connect_matlab_engine(matlab_session, first_connect=False): # pragma: matlab
r"""Connect to Matlab engine.
Args:
matlab_session (str): Name of the Matlab session that should be
connected to.
first_connect (bool, optional): If True, this is the first time
Python is connecting to the Matlab shared engine and certain
environment variables should be set. Defaults to False.
Returns:
MatlabEngine: Matlab engine that was connected.
"""
matlab_engine = matlab.engine.connect_matlab(matlab_session)
matlab_engine.eval('clear classes;', nargout=0)
err = backwards.StringIO()
try:
matlab_engine.eval("YggInterface('YGG_MSG_MAX');", nargout=0,
stderr=err)
except BaseException:
for x in MatlabModelDriver.paths_to_add:
matlab_engine.addpath(x, nargout=0)
matlab_engine.eval("os = py.importlib.import_module('os');", nargout=0)
if not first_connect:
if backwards.PY2:
matlab_engine.eval("py.reload(os);", nargout=0)
else:
# matlab_engine.eval("py.importlib.reload(os);", nargout=0)
pass
return matlab_engine
def stop_matlab_engine(screen_session, matlab_engine, matlab_session,
matlab_process, keep_engine=False): # pragma: matlab
r"""Stop a Matlab shared engine session running inside a detached screen
session.
Args:
screen_session (str): Name of the screen session that the shared
Matlab session was started in.
matlab_engine (MatlabEngine): Matlab engine that should be stopped.
matlab_session (str): Name of Matlab session that the Matlab engine is
connected to.
matlab_process (psutil.Process): Process running the Matlab shared engine.
keep_engine (bool, optional): If True, the references to the engine will be
removed so it is not deleted. Defaults to False.
Raises:
RuntimeError: If Matlab is not installed.
"""
if not _matlab_engine_installed: # pragma: no matlab
raise RuntimeError("Matlab engine is not installed.")
if keep_engine and (matlab_engine is not None):
if '_matlab' in matlab_engine.__dict__:
matlab_engine.quit()
return
# Remove weakrefs to engine to prevent stopping engine more than once
if matlab_engine is not None:
# Remove weak references so engine not deleted on exit
eng_ref = weakref.getweakrefs(matlab_engine)
for x in eng_ref:
if x in matlab.engine._engines:
matlab.engine._engines.remove(x)
# Either exit the engine or remove its reference
if matlab_session in matlab.engine.find_matlab():
try:
matlab_engine.eval('exit', nargout=0)
except BaseException:
pass
else: # pragma: no cover
matlab_engine.__dict__.pop('_matlab', None)
# Stop the screen session containing the Matlab shared session
if screen_session is not None:
if matlab_session in matlab.engine.find_matlab():
os.system(('screen -X -S %s quit') % screen_session)
T = TimeOut(5)
while ((matlab_session in matlab.engine.find_matlab())
and not T.is_out):
logger.debug("Waiting for matlab engine to exit")
sleep(1)
if (matlab_session in matlab.engine.find_matlab()): # pragma: debug
if matlab_process is not None:
matlab_process.terminate()
logger.error("stop_matlab_engine timed out at %f s. " % T.elapsed
+ "Killed Matlab sharedEngine process.")
class MatlabProcess(tools.YggClass): # pragma: matlab
r"""Add features to mimic subprocess.Popen while running Matlab function
asynchronously.
Args:
target (func): Matlab function that should be called.
args (list, tuple): Arguments that should be passed to target.
kwargs (dict, optional): Keyword arguments that should be passed to
target. Defaults to empty dict.
name (str, optional): A name for the process. Generated if not provided.
matlab_engine (MatlabEngine, optional): MatlabEngine that should be used
to get errors. Defaults to None and errors will not be recovered
unless passed through stdout and stderr before shutdown.
Attributes:
stdout (StringIO): File like string buffer that stdout from target will
be written to.
stderr (StringIO): File like string buffer that stderr from target will
be written to.
target (func): Matlab function that should be called.
args (list, tuple): Arguments that should be passed to target.
kwargs (dict): Keyword arguments that should be passed to target.
future (MatlabFutureResult): Future result from async function. This
will be None until start is called.
matlab_engine (MatlabEngine): MatlabEngine that should be used to get
errors.
Raises:
RuntimeError: If Matlab is not installed.
"""
def __init__(self, target, args, kwargs=None, name=None, matlab_engine=None):
if not _matlab_engine_installed: # pragma: no matlab
raise RuntimeError("Matlab engine is not installed.")
if kwargs is None:
kwargs = {}
self.stdout = backwards.sio.StringIO()
self.stderr = backwards.sio.StringIO()
self._stdout_line = None
self._stderr_line = None
self.target = target
self.args = args
self.kwargs = kwargs
self.kwargs.update(nargout=0, stdout=self.stdout, stderr=self.stderr)
self.kwargs['async'] = True # For python 3.7 where async is reserved
self.future = None
self.matlab_engine = matlab_engine
self._returncode = None
super(MatlabProcess, self).__init__(name)
def poll(self, *args, **kwargs):
r"""Fake poll."""
return self.returncode
@property
def stdout_line(self):
r"""str: Output to stdout from function call."""
if self._stdout_line is None:
if self.stdout is not None:
line = self.stdout.getvalue()
if line:
self._stdout_line = line
return self._stdout_line
@property
def stderr_line(self):
r"""str: Output to stderr from function call."""
if self._stderr_line is None:
if self.stderr is not None:
line = self.stderr.getvalue()
if line:
self._stderr_line = line
return self._stderr_line
def print_output(self):
r"""Print output from stdout and stderr."""
if self.stdout_line:
self.print_encoded(self.stdout_line, end="")
if self.stderr_line:
self.print_encoded(self.stderr_line, end="")
def start(self):
r"""Start asychronous call."""
self.future = self.target(*self.args, **self.kwargs)
def is_started(self):
r"""bool: Has start been called."""
return (self.future is not None)
def is_cancelled(self):
r"""bool: Was the async call cancelled or not."""
if self.is_started():
try:
return self.future.cancelled()
except matlab.engine.EngineError:
self.on_matlab_error()
return True
except BaseException:
return True
return False
def is_done(self):
r"""bool: Is the async call still running."""
if self.is_started():
try:
return self.future.done() or self.is_cancelled()
except matlab.engine.EngineError:
self.on_matlab_error()
return True
except BaseException:
return True
return False
def is_alive(self):
r"""bool: Is the async call funning."""
if self.is_started():
return (not self.is_done())
return False
@property
def returncode(self):
r"""int: Return code."""
if self.is_done():
if self.stderr_line: # or self.is_cancelled():
return -1
else:
return 0
else:
return self._returncode
def kill(self, *args, **kwargs):
r"""Cancel the async call."""
if self.is_alive():
try:
out = self.future.cancel()
self.debug("Result of cancelling Matlab call?: %s", out)
except matlab.engine.EngineError as e:
self.debug('Matlab Engine Error: %s' % e)
self.on_matlab_error()
except BaseException as e:
self.debug('Other error on kill: %s' % e)
self.print_output()
if self.is_alive():
self.info('Error killing Matlab script.')
self.matlab_engine.quit()
self.future = None
self._returncode = -1
assert(not self.is_alive())
def on_matlab_error(self):
r"""Actions performed on error in Matlab engine."""
# self.print_output()
self.debug('')
if self.matlab_engine is not None:
try:
self.matlab_engine.eval('exception = MException.last;', nargout=0)
self.matlab_engine.eval('getReport(exception)')
except matlab.engine.EngineError:
pass
class MatlabModelDriver(InterpretedModelDriver): # pragma: matlab
r"""Base class for running Matlab models.
Args:
name (str): Driver name.
args (str or list): Argument(s) for running the model in matlab.
Generally, this should be the full path to a Matlab script.
**kwargs: Additional keyword arguments are passed to parent class's
__init__ method.
Attributes:
started_matlab (bool): True if the driver had to start a new matlab
engine. False otherwise.
screen_session (str): Screen session that Matlab was started in.
mlengine (object): Matlab engine used to run script.
mlsession (str): Name of the Matlab session that was started.
Raises:
RuntimeError: If Matlab is not installed.
.. note:: Matlab models that call exit will shut down the shared engine.
"""
_schema_subtype_description = ('Model is written in Matlab.')
language = 'matlab'
language_ext = '.m'
base_languages = ['python']
default_interpreter_flags = ['-nodisplay', '-nosplash', '-nodesktop',
'-nojvm', '-batch']
version_flags = ["fprintf('R%s', version('-release')); exit();"]
path_env_variable = 'MATLABPATH'
comm_linger = (os.environ.get('YGG_MATLAB_ENGINE', '').lower() == 'true')
send_converters = {'pandas': serialize.consolidate_array,
'table': serialize.consolidate_array}
recv_converters = {'pandas': 'array'}
type_map = {
'int': 'intX',
'float': 'single, double',
'string': 'char',
'array': 'cell',
'object': 'containers.Map',
'boolean': 'logical',
'null': 'NaN',
'uint': 'uintX',
'complex': 'complex',
'bytes': 'char (utf-8)',
'unicode': 'char',
'1darray': 'mat',
'ndarray': 'mat',
'ply': 'containers.Map',
'obj': 'containers.Map',
'schema': 'containers.Map'}
function_param = {
'input': '{channel} = YggInterface(\'YggInput\', \'{channel_name}\');',
'output': '{channel} = YggInterface(\'YggOutput\', \'{channel_name}\');',
'recv': '[{flag_var}, {recv_var}] = {channel}.recv();',
'send': '{flag_var} = {channel}.send({send_var});',
'function_call': '{output_var} = {function_name}({input_var});',
'define': '{variable} = {value};',
'comment': '%',
'true': 'true',
'not': 'not',
'indent': 2 * ' ',
'quote': '\'',
'print': 'disp(\'{message}\');',
'fprintf': 'fprintf(\'{message}\', {variables});',
'error': 'error(\'{error_msg}\');',
'block_end': 'end;',
'if_begin': 'if ({cond})',
'for_begin': 'for {iter_var} = {iter_begin}:{iter_end}',
'while_begin': 'while ({cond})',
'break': 'break;',
'try_begin': 'try',
'try_except': 'catch {error_var}',
'assign': '{name} = {value};'}
def __init__(self, name, args, **kwargs):
self.using_matlab_engine = _matlab_engine_installed
if self.using_matlab_engine:
kwargs['skip_interpreter'] = True
self.model_wrapper = None
super(MatlabModelDriver, self).__init__(name, args, **kwargs)
self.started_matlab = False
self.screen_session = None
self.mlengine = None
self.mlsession = None
self.mlprocess = None
def parse_arguments(self, args):
r"""Sort model arguments to determine which one is the executable
and which ones are arguments.
Args:
args (list): List of arguments provided.
"""
super(MatlabModelDriver, self).parse_arguments(args)
model_base, model_ext = os.path.splitext(os.path.basename(self.model_file))
wrap_base = 'wrapped_%s_%s' % (model_base, self.uuid.replace('-', '_'))
# Matlab has a variable name limit of 62
wrap_base = wrap_base[:min(len(wrap_base), 60)]
self.model_wrapper = os.path.join(self.model_dir, wrap_base + model_ext)
self.wrapper_products.append(self.model_wrapper)
@classmethod
def write_error_wrapper(cls, fname, try_lines, matlab_engine=None):
r"""Write a wrapper for the model that encloses it in a try except so
that the error can be propagated appropriately.
Args:
fname (str): File where the wrapper should be written.
try_lines (list): List of lines to go in the try block.
model_file (str): Path to model that should be wrapped.
matlab_engine (MatlabEngine, optional): Matlab engine that will be
used to call the wrapper. If not provided, it is assumed the
error will be called using the Matlab interpreter on the command
line. Defautls to None.
Raises:
"""
# Create lines based on use of engine or not
if matlab_engine is not None:
catch_block = ["error(e.message);"]
else:
catch_block = ["rethrow(e);"]
# catch_block = ["fprintf('MATLAB ERROR:\\n%s\\n', e.message);",
# "disp(e.identifier);",
# "disp(e.stack);",
# "exit(0);"]
lines = cls.write_try_except(try_lines, catch_block)
if matlab_engine is None:
lines.append("exit(0);")
# Write lines
logger.debug('Wrapper:\n\t%s', '\n\t'.join(lines))
if fname is None:
return lines
else:
if os.path.isfile(fname): # pragma: debug
os.remove(fname)
with open(fname, 'w') as fd:
fd.write('\n'.join(lines))
logger.debug("Wrote wrapper to: %s" % fname)
@classmethod
def run_executable(cls, args, dont_wrap_error=False, fname_wrapper=None,
matlab_engine=None, **kwargs):
r"""Run a program using the executable for this language and the
provided arguments.
Args:
args (list): The program that should be run and any arguments
that should be provided to it.
dont_wrap_error (bool, optional): If False, the executable will be
wrapped in a try/catch block to prevent errors from stopping
Matlab shutdown. If True, the command will be executed as is
with the Matlab interpreter. Defaults to False.
fname_wrapper (str, optional): File where wrapper should be saved.
If not provided, one is created. Defaults to None.
matlab_engine (MatlabEngine, optional): Matlab engine that should be
used to run the command. If not provided, the Matlab interpreter
is used instead. Defaults to None.
**kwargs: Additional keyword arguments are passed to
cls.executable_command and tools.popen_nobuffer.
Returns:
str: Output to stdout from the run command.
Raises:
RuntimeError: If the language is not installed.
RuntimeError: If there is an error when running the command.
"""
# Strip file if first argument is a file
if os.path.isfile(args[0]):
kwargs.setdefault('working_dir', os.path.dirname(args[0]))
args = [os.path.splitext(os.path.basename(args[0]))[0]] + args[1:]
# Write wrapper
if (not dont_wrap_error) and (len(args) > 0):
if len(args) == 1:
# TODO: Will this work if there is a function defined in the
# script?
try_block = [args[0]]
if not try_block[0].endswith(';'):
try_block[0] += ';'
else:
# Put quotes around arguments since they would be strings when
# passed from the command line
func_call = "%s('%s'" % (args[0], args[1])
for a in args[2:]:
func_call += (", '%s'" % a)
func_call += ');'
try_block = [func_call]
if fname_wrapper is None:
fname_wrapper = 'wrapper_%s%s' % (str(uuid_gen.uuid4()),
cls.language_ext[0])
fname_wrapper = fname_wrapper.replace('-', '_')
working_dir = kwargs.get('working_dir', kwargs.get('cwd', None))
if working_dir is not None:
fname_wrapper = os.path.join(working_dir, fname_wrapper)
cls.write_error_wrapper(fname_wrapper, try_block,
matlab_engine=matlab_engine)
assert(os.path.isfile(fname_wrapper))
args = [os.path.splitext(os.path.basename(fname_wrapper))[0]]
# Call base, catching error to remove temp wrapper
try:
if matlab_engine is None:
kwargs['for_matlab'] = True
out = super(MatlabModelDriver, cls).run_executable(args, **kwargs)
else:
if kwargs.get('debug_flags', None): # pragma: debug
logger.warn("Debugging via valgrind, strace, etc. disabled "
"for Matlab when using a Matlab shared engine.")
assert(kwargs.get('return_process', False))
# Add environment variables
env = kwargs.get('env', {})
old_env = {}
new_env_str = ''
for k, v in env.items():
old_env[k] = matlab_engine.getenv(k)
matlab_engine.setenv(k, v, nargout=0)
new_env_str += "'%s', %s, " % (k, repr(v))
matlab_engine.eval('new_env = py.dict(pyargs(%s));'
% new_env_str[:-2], nargout=0)
matlab_engine.eval('os.environ.update(new_env);', nargout=0)
# Create matlab process using Matlab engine
out = MatlabProcess(name=args[0] + '.MatlabProcess',
target=getattr(matlab_engine, args[0]),
args=args[1:], matlab_engine=matlab_engine)
out.start()
finally:
if (((not kwargs.get('return_process', False))
and (fname_wrapper is not None))):
os.remove(fname_wrapper)
return out
@classmethod
def language_version(cls):
r"""Determine the version of this language.
Returns:
str: Version of compiler/interpreter for this language.
"""
return cls.get_matlab_info()[1]
@classmethod
def executable_command(cls, args, **kwargs):
r"""Compose a command for running a program in this language with the
provied arguments. If not already present, the interpreter command and
interpreter flags are prepended to the provided arguments.
Args:
args (list): The program that returned command should run and any
arguments that should be provided to it.
**kwargs: Additional keyword arguments are ignored.
Returns:
list: Arguments composing the command required to run the program
from the command line using the interpreter for this language.
"""
# if kwargs.get('exec_type', 'interpreter') == 'interpreter':
# args = ["\"%s\"" % (' '.join(args))]
return super(MatlabModelDriver, cls).executable_command(args, **kwargs)
@classmethod
def configure(cls, cfg):
r"""Add configuration options for this language. This includes locating
any required external libraries and setting option defaults.
Args:
cfg (YggConfigParser): Config class that options should be set for.
Returns:
list: Section, option, description tuples for options that could not
be set.
"""
out = InterpretedModelDriver.configure.__func__(cls, cfg)
opts = {
'startup_waittime_s': [('The time allowed for a Matlab engine to start'
'before timing out and reporting an error.'),
'10'],
'version': ['The version (release number) of installed Matlab.', ''],
'matlabroot': ['The path to the default installation of matlab.', '']}
if cfg.get(cls.language, 'disable', 'False').lower() != 'true':
try:
opts['matlabroot'][1], opts['version'][1] = cls.get_matlab_info()
except RuntimeError: # pragma: no matlab
pass
for k in opts.keys():
if not cfg.has_option(cls.language, k):
if opts[k][1]: # pragma: matlab
cfg.set(cls.language, k, opts[k][1])
else:
out.append((cls.language, k, opts[k][0]))
return out
@classmethod
def get_matlab_info(cls): # pragma: matlab
r"""Determine the root directory where Matlab is installed and the version
that is installed (if Matlab is installed at all). This will fail if Matlab
is not installed, cannot be started, or does not operate as expected.
Returns:
tuple: Matlab root directory and Matlab version string.
Raises:
RuntimeError: If Matlab cannot be started or the root directory or
release cannot be determiend.
"""
mtl_id = '=MATLABROOT='
cmd = ("fprintf('" + mtl_id + "%s" + mtl_id + "R%s" + mtl_id + "'"
+ ",matlabroot,version('-release'));")
mtl_proc = cls.run_executable([cmd])
mtl_id = backwards.match_stype(mtl_proc, mtl_id)
if mtl_id not in mtl_proc: # pragma: debug
raise RuntimeError(("Could not locate ID string (%s) in "
"output (%s).") % (mtl_id, mtl_proc))
parts = mtl_proc.split(mtl_id)
if len(parts) < 3: # pragma: debug
raise RuntimeError(("Could not get matlabroot/version from "
"output (%s).") % (mtl_proc))
matlabroot = backwards.as_str(parts[-3])
release = backwards.as_str(parts[-2])
return matlabroot, release
def start_matlab_engine(self):
r"""Start matlab session and connect to it."""
ml_attr = ['screen_session', 'mlengine', 'mlsession', 'mlprocess']
attempt_connect = (len(matlab.engine.find_matlab()) != 0)
# Connect to matlab if a session exists
if attempt_connect:
for mlsession in matlab.engine.find_matlab():
try:
self.debug("Trying to connect to session %s", mlsession)
self.mlengine = connect_matlab_engine(mlsession)
self.mlsession = mlsession
self.debug("Connected to existing shared engine: %s",
self.mlsession)
break
except matlab.engine.EngineError:
pass
# Start if not running or connect failed
if self.mlengine is None:
if attempt_connect:
self.debug("Starting a matlab shared engine (connect failed)")
else:
self.debug("Starting a matlab shared engine (none existing)")
out = start_matlab_engine()
for i, attr in enumerate(ml_attr):
setattr(self, attr, out[i])
self.started_matlab = True
# Add things to Matlab environment
self.mlengine.addpath(self.model_dir, nargout=0)
self.debug("Connected to matlab session '%s'" % self.mlsession)
def before_start(self):
r"""Actions to perform before the run loop."""
kwargs = dict(fname_wrapper=self.model_wrapper)
if self.using_matlab_engine:
self.start_matlab_engine()
kwargs.update(matlab_engine=self.mlengine,
no_queue_thread=True)
else:
kwargs.update(working_dir=self.model_dir)
with self.lock:
if self.using_matlab_engine and (self.mlengine is None): # pragma: debug
self.debug('Matlab engine not set. Stopping')
return
super(MatlabModelDriver, self).before_start(**kwargs)
def run_loop(self):
r"""Loop to check if model is still running and forward output."""
if self.using_matlab_engine:
self.model_process.print_output()
self.periodic_debug('matlab loop', period=100)('Looping')
if self.model_process.is_done():
self.model_process.print_output()
self.set_break_flag()
try:
self.model_process.future.result()
self.model_process.print_output()
except matlab.engine.EngineError:
self.model_process.print_output()
except BaseException:
self.model_process.print_output()
self.exception("Error running model.")
else:
self.sleep()
else:
super(MatlabModelDriver, self).run_loop()
def after_loop(self):
r"""Actions to perform after run_loop has finished. Mainly checking
if there was an error and then handling it."""
if self.using_matlab_engine:
if (self.model_process is not None) and self.model_process.is_alive():
self.info("Model process thread still alive")
self.kill_process()
return
super(MatlabModelDriver, self).after_loop()
if self.using_matlab_engine:
with self.lock:
self.cleanup()
def cleanup(self):
r"""Close the Matlab session and engine."""
if self.using_matlab_engine:
try:
stop_matlab_engine(self.screen_session, self.mlengine,
self.mlsession, self.mlprocess,
keep_engine=(not self.started_matlab))
except (SystemError, Exception) as e: # pragma: debug
self.error('Failed to exit matlab engine')
self.raise_error(e)
self.debug('Stopped Matlab')
self.screen_session = None
self.mlsession = None
self.started_matlab = False
self.mlengine = None
self.mlprocess = None
super(MatlabModelDriver, self).cleanup()
def check_exits(self):
r"""Check to make sure the program dosn't contain any exits as exits
will shut down the Matlab engine as well as the program.
Raises:
RuntimeError: If there are any exit calls in the file.
"""
has_exit = False
with open(self.raw_model_file, 'r') as fd:
for i, line in enumerate(fd):
if line.strip().startswith('exit'):
has_exit = True
break
if self.using_matlab_engine and has_exit:
warnings.warn(
"Line %d in '%s' contains an " % (
i, self.raw_model_file)
+ "'exit' call which will exit the MATLAB engine "
+ "such that it cannot be reused. Please replace 'exit' "
+ "with a return or error.")
def set_env(self):
r"""Get environment variables that should be set for the model process.
Returns:
dict: Environment variables for the model process.
"""
out = super(MatlabModelDriver, self).set_env()
if self.using_matlab_engine:
out['YGG_MATLAB_ENGINE'] = 'True'
# TODO: Move the following to InterpretedModelDriver once another
# language sets path_env_variable
path_list = []
prev_path = out.pop(self.path_env_variable, '')
if prev_path:
path_list.append(prev_path)
if isinstance(self.paths_to_add, list):
for x in self.paths_to_add:
if x not in prev_path:
path_list.append(x)
path_list.append(self.model_dir)
if path_list:
out[self.path_env_variable] = os.pathsep.join(path_list)
return out
@classmethod
def comm_atexit(cls, comm):
r"""Operations performed on comm at exit including draining receive.
Args:
comm (CommBase): Communication object.
"""
if comm.direction == 'recv':
while comm.recv(timeout=0)[0]:
comm.sleep()
else:
comm.send_eof()
comm.linger_close()
@classmethod
def decode_format(cls, format_str):
r"""Method for decoding format strings created in this language.
Args:
format_str (str): Encoded format string.
Returns:
str: Decoded format string.
"""
return backwards.decode_escape(format_str)
@classmethod
def prepare_output_variables(cls, vars_list):
r"""Concatenate a set of output variables such that it can be passed as
a single string to the function_call parameter.
Args:
vars_list (list): List of variable names to concatenate as output
from a function call.
Returns:
str: Concatentated variables list.
"""
out = super(MatlabModelDriver, cls).prepare_output_variables(vars_list)
if isinstance(vars_list, list) and (len(vars_list) > 1):
out = '[%s]' % out
return out
| [((416, 443), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (433, 443), False, 'import logging\n'), ((1037, 1063), 'yggdrasil.languages.get_language_dir', 'get_language_dir', (['"""matlab"""'], {}), "('matlab')\n", (1053, 1063), False, 'from yggdrasil.languages import get_language_dir\n'), ((1724, 1745), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (1743, 1745), False, 'import psutil\n'), ((6078, 6098), 'yggdrasil.backwards.StringIO', 'backwards.StringIO', ([], {}), '()\n', (6096, 6098), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((1376, 1433), 'os.system', 'os.system', (['"""taskkill /F /IM matlab.engine.shareEngine /T"""'], {}), "('taskkill /F /IM matlab.engine.shareEngine /T')\n", (1385, 1433), False, 'import os\n'), ((1454, 1501), 'os.system', 'os.system', (['"""pkill -f matlab.engine.shareEngine"""'], {}), "('pkill -f matlab.engine.shareEngine')\n", (1463, 1501), False, 'import os\n'), ((2749, 2802), 'os.path.join', 'os.path.join', (['mtl_root', '"""extern"""', '"""engines"""', '"""python"""'], {}), "(mtl_root, 'extern', 'engines', 'python')\n", (2761, 2802), False, 'import os\n'), ((2860, 2903), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'cwd': 'mtl_setup'}), '(cmd, cwd=mtl_setup)\n', (2883, 2903), False, 'import subprocess\n'), ((4502, 4518), 'yggdrasil.tools.TimeOut', 'TimeOut', (['timeout'], {}), '(timeout)\n', (4509, 4518), False, 'from yggdrasil.tools import TimeOut, sleep\n'), ((7960, 7994), 'weakref.getweakrefs', 'weakref.getweakrefs', (['matlab_engine'], {}), '(matlab_engine)\n', (7979, 7994), False, 'import weakref\n'), ((8682, 8692), 'yggdrasil.tools.TimeOut', 'TimeOut', (['(5)'], {}), '(5)\n', (8689, 8692), False, 'from yggdrasil.tools import TimeOut, sleep\n'), ((10931, 10955), 'yggdrasil.backwards.sio.StringIO', 'backwards.sio.StringIO', ([], {}), '()\n', (10953, 10955), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((10978, 11002), 'yggdrasil.backwards.sio.StringIO', 'backwards.sio.StringIO', ([], {}), '()\n', (11000, 11002), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((19103, 19154), 'os.path.join', 'os.path.join', (['self.model_dir', '(wrap_base + model_ext)'], {}), '(self.model_dir, wrap_base + model_ext)\n', (19115, 19154), False, 'import os\n'), ((22362, 22385), 'os.path.isfile', 'os.path.isfile', (['args[0]'], {}), '(args[0])\n', (22376, 22385), False, 'import os\n'), ((27159, 27210), 'yggdrasil.drivers.InterpretedModelDriver.InterpretedModelDriver.configure.__func__', 'InterpretedModelDriver.configure.__func__', (['cls', 'cfg'], {}), '(cls, cfg)\n', (27200, 27210), False, 'from yggdrasil.drivers.InterpretedModelDriver import InterpretedModelDriver\n'), ((28913, 28952), 'yggdrasil.backwards.match_stype', 'backwards.match_stype', (['mtl_proc', 'mtl_id'], {}), '(mtl_proc, mtl_id)\n', (28934, 28952), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((29384, 29411), 'yggdrasil.backwards.as_str', 'backwards.as_str', (['parts[-3]'], {}), '(parts[-3])\n', (29400, 29411), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((29430, 29457), 'yggdrasil.backwards.as_str', 'backwards.as_str', (['parts[-2]'], {}), '(parts[-2])\n', (29446, 29457), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((36236, 36271), 'yggdrasil.backwards.decode_escape', 'backwards.decode_escape', (['format_str'], {}), '(format_str)\n', (36259, 36271), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((488, 536), 'yggdrasil.config.ygg_cfg.get', 'ygg_cfg.get', (['"""matlab"""', '"""disable_engine"""', '"""False"""'], {}), "('matlab', 'disable_engine', 'False')\n", (499, 536), False, 'from yggdrasil.config import ygg_cfg\n'), ((656, 677), 'yggdrasil.tools.is_subprocess', 'tools.is_subprocess', ([], {}), '()\n', (675, 677), False, 'from yggdrasil import backwards, tools, platform, serialize\n'), ((3892, 3939), 'yggdrasil.config.ygg_cfg.get', 'ygg_cfg.get', (['"""matlab"""', '"""startup_waittime_s"""', '(10)'], {}), "('matlab', 'startup_waittime_s', 10)\n", (3903, 3939), False, 'from yggdrasil.config import ygg_cfg\n'), ((4260, 4306), 'os.path.join', 'os.path.join', (['_top_lang_dir', '"""matlab_screenrc"""'], {}), "(_top_lang_dir, 'matlab_screenrc')\n", (4272, 4306), False, 'import os\n'), ((4701, 4709), 'yggdrasil.tools.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4706, 4709), False, 'from yggdrasil.tools import TimeOut, sleep\n'), ((8617, 8667), 'os.system', 'os.system', (["('screen -X -S %s quit' % screen_session)"], {}), "('screen -X -S %s quit' % screen_session)\n", (8626, 8667), False, 'import os\n'), ((8864, 8872), 'yggdrasil.tools.sleep', 'sleep', (['(1)'], {}), '(1)\n', (8869, 8872), False, 'from yggdrasil.tools import TimeOut, sleep\n'), ((18854, 18887), 'os.path.basename', 'os.path.basename', (['self.model_file'], {}), '(self.model_file)\n', (18870, 18887), False, 'import os\n'), ((20671, 20692), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (20685, 20692), False, 'import os\n'), ((23832, 23861), 'os.path.isfile', 'os.path.isfile', (['fname_wrapper'], {}), '(fname_wrapper)\n', (23846, 23861), False, 'import os\n'), ((34343, 34564), 'warnings.warn', 'warnings.warn', (['("Line %d in \'%s\' contains an " % (i, self.raw_model_file) +\n "\'exit\' call which will exit the MATLAB engine " +\n "such that it cannot be reused. Please replace \'exit\' " +\n \'with a return or error.\')'], {}), '("Line %d in \'%s\' contains an " % (i, self.raw_model_file) +\n "\'exit\' call which will exit the MATLAB engine " +\n "such that it cannot be reused. Please replace \'exit\' " +\n \'with a return or error.\')\n', (34356, 34564), False, 'import warnings\n'), ((35504, 35530), 'os.pathsep.join', 'os.pathsep.join', (['path_list'], {}), '(path_list)\n', (35519, 35530), False, 'import os\n'), ((16355, 16394), 'os.environ.get', 'os.environ.get', (['"""YGG_MATLAB_ENGINE"""', '""""""'], {}), "('YGG_MATLAB_ENGINE', '')\n", (16369, 16394), False, 'import os\n'), ((20727, 20743), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (20736, 20743), False, 'import os\n'), ((22432, 22456), 'os.path.dirname', 'os.path.dirname', (['args[0]'], {}), '(args[0])\n', (22447, 22456), False, 'import os\n'), ((25520, 25544), 'os.remove', 'os.remove', (['fname_wrapper'], {}), '(fname_wrapper)\n', (25529, 25544), False, 'import os\n'), ((23645, 23685), 'os.path.join', 'os.path.join', (['working_dir', 'fname_wrapper'], {}), '(working_dir, fname_wrapper)\n', (23657, 23685), False, 'import os\n'), ((4087, 4103), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (4101, 4103), False, 'from datetime import datetime\n'), ((23900, 23931), 'os.path.basename', 'os.path.basename', (['fname_wrapper'], {}), '(fname_wrapper)\n', (23916, 23931), False, 'import os\n'), ((22495, 22520), 'os.path.basename', 'os.path.basename', (['args[0]'], {}), '(args[0])\n', (22511, 22520), False, 'import os\n'), ((23330, 23346), 'uuid.uuid4', 'uuid_gen.uuid4', ([], {}), '()\n', (23344, 23346), True, 'import uuid as uuid_gen\n')] |
SACGF/variantgrid | analysis/migrations/0032_auto_20210409_1333.py | 515195e2f03a0da3a3e5f2919d8e0431babfd9c9 | # Generated by Django 3.1.3 on 2021-04-09 04:03
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('snpdb', '0030_one_off_fix_cohort_sample_order'),
('analysis', '0031_auto_20210331_1826'),
]
operations = [
migrations.AddField(
model_name='varianttag',
name='genome_build',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='snpdb.genomebuild'),
),
migrations.AddField(
model_name='varianttag',
name='location',
field=models.CharField(choices=[('A', 'Analysis'), ('E', 'External Import'), ('G', 'Gene Page'), ('V', 'Variant Details')], default='A', max_length=1),
),
migrations.AlterField(
model_name='varianttag',
name='analysis',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analysis.analysis'),
),
]
| [((437, 538), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""snpdb.genomebuild"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='snpdb.genomebuild')\n", (454, 538), False, 'from django.db import migrations, models\n'), ((660, 808), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('A', 'Analysis'), ('E', 'External Import'), ('G', 'Gene Page'), ('V',\n 'Variant Details')]", 'default': '"""A"""', 'max_length': '(1)'}), "(choices=[('A', 'Analysis'), ('E', 'External Import'), ('G',\n 'Gene Page'), ('V', 'Variant Details')], default='A', max_length=1)\n", (676, 808), False, 'from django.db import migrations, models\n'), ((932, 1034), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""analysis.analysis"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='analysis.analysis')\n", (949, 1034), False, 'from django.db import migrations, models\n')] |
meaningfy-ws/ted-sws | ted_sws/mapping_suite_processor/services/conceptual_mapping_generate_sparql_queries.py | d1e351eacb2900f84ec7edc457e49d8202fbaff5 | import pathlib
from typing import Iterator
import pandas as pd
from ted_sws.resources.prefixes import PREFIXES_DEFINITIONS
import re
CONCEPTUAL_MAPPINGS_RULES_SHEET_NAME = "Rules"
RULES_SF_FIELD_ID = 'Standard Form Field ID (M)'
RULES_SF_FIELD_NAME = 'Standard Form Field Name (M)'
RULES_E_FORM_BT_ID = 'eForm BT-ID (O)'
RULES_E_FORM_BT_NAME = 'eForm BT Name (O)'
RULES_BASE_XPATH = 'Base XPath (for anchoring) (M)'
RULES_FIELD_XPATH = 'Field XPath (M)'
RULES_CLASS_PATH = 'Class path (M)'
RULES_PROPERTY_PATH = 'Property path (M)'
DEFAULT_RQ_NAME = 'sparql_query_'
SPARQL_PREFIX_PATTERN = re.compile('(?:\\s+|^)(\\w+)?:')
SPARQL_PREFIX_LINE = 'PREFIX {prefix}: <{value}>'
def get_sparql_prefixes(sparql_q: str) -> set:
finds: list = re.findall(SPARQL_PREFIX_PATTERN, sparql_q)
return set(finds)
def sparql_validation_generator(data: pd.DataFrame) -> Iterator[str]:
"""
This function generates SPARQL queries based on data in the dataframe.
:param data:
:return:
"""
for index, row in data.iterrows():
sf_field_id = row[RULES_SF_FIELD_ID]
sf_field_name = row[RULES_SF_FIELD_NAME]
e_form_bt_id = row[RULES_E_FORM_BT_ID]
e_form_bt_name = row[RULES_E_FORM_BT_NAME]
base_xpath = row[RULES_BASE_XPATH]
field_xpath = row[RULES_FIELD_XPATH]
class_path = row[RULES_CLASS_PATH]
property_path = row[RULES_PROPERTY_PATH]
prefixes = [SPARQL_PREFIX_LINE.format(
prefix=prefix, value=PREFIXES_DEFINITIONS.get(prefix)
) for prefix in get_sparql_prefixes(property_path)]
yield f"#title: {sf_field_id} - {sf_field_name}\n" \
f"#description: “{sf_field_id} - {sf_field_name}” in SF corresponds to “{e_form_bt_id} {e_form_bt_name}” in eForms. The corresponding XML element is {base_xpath}{field_xpath}. The expected ontology instances are epo: {class_path} .\n" \
"\n" + "\n".join(prefixes) + "\n\n" \
f"ASK WHERE {{ {property_path} }}"
def mapping_suite_processor_generate_sparql_queries(conceptual_mappings_file_path: pathlib.Path,
output_sparql_queries_folder_path: pathlib.Path,
rq_name: str = DEFAULT_RQ_NAME):
"""
This function reads data from conceptual_mappings.xlsx and generates SPARQL validation queries in provided package.
:param conceptual_mappings_file_path:
:param output_sparql_queries_folder_path:
:param rq_name:
:return:
"""
with open(conceptual_mappings_file_path, 'rb') as excel_file:
conceptual_mappings_rules_df = pd.read_excel(excel_file, sheet_name=CONCEPTUAL_MAPPINGS_RULES_SHEET_NAME)
conceptual_mappings_rules_df.columns = conceptual_mappings_rules_df.iloc[0]
conceptual_mappings_rules_df = conceptual_mappings_rules_df[1:]
conceptual_mappings_rules_df = conceptual_mappings_rules_df[
conceptual_mappings_rules_df[RULES_PROPERTY_PATH].notnull()]
sparql_queries = sparql_validation_generator(conceptual_mappings_rules_df)
output_sparql_queries_folder_path.mkdir(parents=True, exist_ok=True)
for index, sparql_query in enumerate(sparql_queries):
output_file_path = output_sparql_queries_folder_path / f"{rq_name}{index}.rq"
with open(output_file_path, "w") as output_file:
output_file.write(sparql_query)
| [((593, 625), 're.compile', 're.compile', (['"""(?:\\\\s+|^)(\\\\w+)?:"""'], {}), "('(?:\\\\s+|^)(\\\\w+)?:')\n", (603, 625), False, 'import re\n'), ((743, 786), 're.findall', 're.findall', (['SPARQL_PREFIX_PATTERN', 'sparql_q'], {}), '(SPARQL_PREFIX_PATTERN, sparql_q)\n', (753, 786), False, 'import re\n'), ((2683, 2757), 'pandas.read_excel', 'pd.read_excel', (['excel_file'], {'sheet_name': 'CONCEPTUAL_MAPPINGS_RULES_SHEET_NAME'}), '(excel_file, sheet_name=CONCEPTUAL_MAPPINGS_RULES_SHEET_NAME)\n', (2696, 2757), True, 'import pandas as pd\n'), ((1497, 1529), 'ted_sws.resources.prefixes.PREFIXES_DEFINITIONS.get', 'PREFIXES_DEFINITIONS.get', (['prefix'], {}), '(prefix)\n', (1521, 1529), False, 'from ted_sws.resources.prefixes import PREFIXES_DEFINITIONS\n')] |
codeKgu/BiLevel-Graph-Neural-Network | src/__init__.py | ed89c7d39baca757411cf333c595ac464e991a8e | import sys
from os.path import dirname, abspath, join
cur_folder = dirname(abspath(__file__))
sys.path.insert(0, join(dirname(cur_folder), 'src'))
sys.path.insert(0, dirname(cur_folder))
print(cur_folder) | [((76, 93), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (83, 93), False, 'from os.path import dirname, abspath, join\n'), ((167, 186), 'os.path.dirname', 'dirname', (['cur_folder'], {}), '(cur_folder)\n', (174, 186), False, 'from os.path import dirname, abspath, join\n'), ((119, 138), 'os.path.dirname', 'dirname', (['cur_folder'], {}), '(cur_folder)\n', (126, 138), False, 'from os.path import dirname, abspath, join\n')] |
igormotta92/gta-desafio-python-flask-api | src/controllers/serie.py | 7c048239359e8a21d777109bdb0d58b6c2c18450 | # https://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
# from flask import Flask
from flask_restful import Resource, reqparse
from src.model.serie import SerieModel
from src.server.instance import server
from db import db
# books_db = [{"id": 0, "title": "War and Peace"}, {"id": 1, "title": "Clean Code"}]
api = server.api
class SeriesController(Resource):
@classmethod
def routes(self):
api.add_resource(Series, "/series/<int:id>")
api.add_resource(SeriesList, "/series")
class Series(Resource):
def get(self, id):
SerieModel.setConnectDataBase(db)
serie = SerieModel.find_by_id(id)
if not serie:
return {serie}, 204
return serie
def put(self, id):
SerieModel.setConnectDataBase(db)
serie = SerieModel.find_by_id_build(id)
if not serie:
return None, 204
# __columns__ = ("title" str, "resume" str, "genre" str, "rating" int, "season" int)
parser = reqparse.RequestParser()
parser.add_argument(
"title", type=str, required=True, help="Title cannot be blank"
)
parser.add_argument(
"resume", type=str, required=True, help="Resume cannot be blank"
)
parser.add_argument(
"rating",
type=int,
choices=range(1, 6),
required=True,
help="rating cannot be blank or range invalided",
)
parser.add_argument(
"genre", type=str, required=True, help="Genre cannot be blank"
)
parser.add_argument(
"season", type=int, required=True, help="Season cannot be blank"
)
data = parser.parse_args()
# update
serie.title = data.title
serie.resume = data.resume
serie.genre = data.genre
serie.rating = data.rating
serie.season = data.season
try:
serie.update()
except Exception as error:
return {"Error": str(error)}, 400
return None, 200, {"Location": f"http://127.0.0.1:5000/series/{id}"}
def delete(self, id):
SerieModel.setConnectDataBase(db)
serie = SerieModel.find_by_id_build(id)
if not serie:
return {}, 204
serie.delete()
return serie.to_dict(), 200
class SeriesList(Resource):
def get(self):
SerieModel.setConnectDataBase(db)
try:
series = SerieModel.find_all()
except Exception as error:
return {"Error": str(error)}, 400
return series
def post(self):
SerieModel.setConnectDataBase(db)
###
# __columns__ = ("title" str, "resume" str, "genre" str, "rating" int, "season" int)
# request
parser = reqparse.RequestParser()
parser.add_argument(
"title", type=str, required=True, help="Title cannot be blank"
)
parser.add_argument(
"resume", type=str, required=True, help="Resume cannot be blank"
)
parser.add_argument(
"genre", type=str, required=True, help="Genre cannot be blank"
)
parser.add_argument(
"rating",
type=int,
required=True,
choices=range(1, 6),
help="rating cannot be blank or range invalided",
)
parser.add_argument(
"season", type=str, required=True, help="Season cannot be blank"
)
data = parser.parse_args()
###
serie = SerieModel().build(
data.title, data.resume, data.genre, data.rating, data.season
)
try:
lastid = serie.insert().lastrowid
except Exception as error:
return {"Error": str(error)}, 400
return None, 201, {"Location": f"http://127.0.0.1:5000/series/{lastid}"}
| [((588, 621), 'src.model.serie.SerieModel.setConnectDataBase', 'SerieModel.setConnectDataBase', (['db'], {}), '(db)\n', (617, 621), False, 'from src.model.serie import SerieModel\n'), ((638, 663), 'src.model.serie.SerieModel.find_by_id', 'SerieModel.find_by_id', (['id'], {}), '(id)\n', (659, 663), False, 'from src.model.serie import SerieModel\n'), ((772, 805), 'src.model.serie.SerieModel.setConnectDataBase', 'SerieModel.setConnectDataBase', (['db'], {}), '(db)\n', (801, 805), False, 'from src.model.serie import SerieModel\n'), ((822, 853), 'src.model.serie.SerieModel.find_by_id_build', 'SerieModel.find_by_id_build', (['id'], {}), '(id)\n', (849, 853), False, 'from src.model.serie import SerieModel\n'), ((1017, 1041), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (1039, 1041), False, 'from flask_restful import Resource, reqparse\n'), ((2165, 2198), 'src.model.serie.SerieModel.setConnectDataBase', 'SerieModel.setConnectDataBase', (['db'], {}), '(db)\n', (2194, 2198), False, 'from src.model.serie import SerieModel\n'), ((2215, 2246), 'src.model.serie.SerieModel.find_by_id_build', 'SerieModel.find_by_id_build', (['id'], {}), '(id)\n', (2242, 2246), False, 'from src.model.serie import SerieModel\n'), ((2413, 2446), 'src.model.serie.SerieModel.setConnectDataBase', 'SerieModel.setConnectDataBase', (['db'], {}), '(db)\n', (2442, 2446), False, 'from src.model.serie import SerieModel\n'), ((2635, 2668), 'src.model.serie.SerieModel.setConnectDataBase', 'SerieModel.setConnectDataBase', (['db'], {}), '(db)\n', (2664, 2668), False, 'from src.model.serie import SerieModel\n'), ((2811, 2835), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (2833, 2835), False, 'from flask_restful import Resource, reqparse\n'), ((2481, 2502), 'src.model.serie.SerieModel.find_all', 'SerieModel.find_all', ([], {}), '()\n', (2500, 2502), False, 'from src.model.serie import SerieModel\n'), ((3565, 3577), 'src.model.serie.SerieModel', 'SerieModel', ([], {}), '()\n', (3575, 3577), False, 'from src.model.serie import SerieModel\n')] |
hirnimeshrampuresoftware/python-tcod | tests/test_random.py | c82d60eaaf12e50b405d55df1026c1d00dd283b6 | import copy
import pickle
import tcod
def test_tcod_random() -> None:
rand = tcod.random.Random(tcod.random.COMPLEMENTARY_MULTIPLY_WITH_CARRY)
assert 0 <= rand.randint(0, 100) <= 100
assert 0 <= rand.uniform(0, 100) <= 100
rand.guass(0, 1)
rand.inverse_guass(0, 1)
def test_tcod_random_copy() -> None:
rand = tcod.random.Random(tcod.random.MERSENNE_TWISTER)
rand2 = copy.copy(rand)
assert rand.uniform(0, 1) == rand2.uniform(0, 1)
assert rand.uniform(0, 1) == rand2.uniform(0, 1)
assert rand.uniform(0, 1) == rand2.uniform(0, 1)
def test_tcod_random_pickle() -> None:
rand = tcod.random.Random(tcod.random.MERSENNE_TWISTER)
rand2 = pickle.loads(pickle.dumps(rand))
assert rand.uniform(0, 1) == rand2.uniform(0, 1)
assert rand.uniform(0, 1) == rand2.uniform(0, 1)
assert rand.uniform(0, 1) == rand2.uniform(0, 1)
| [((84, 149), 'tcod.random.Random', 'tcod.random.Random', (['tcod.random.COMPLEMENTARY_MULTIPLY_WITH_CARRY'], {}), '(tcod.random.COMPLEMENTARY_MULTIPLY_WITH_CARRY)\n', (102, 149), False, 'import tcod\n'), ((338, 386), 'tcod.random.Random', 'tcod.random.Random', (['tcod.random.MERSENNE_TWISTER'], {}), '(tcod.random.MERSENNE_TWISTER)\n', (356, 386), False, 'import tcod\n'), ((399, 414), 'copy.copy', 'copy.copy', (['rand'], {}), '(rand)\n', (408, 414), False, 'import copy\n'), ((626, 674), 'tcod.random.Random', 'tcod.random.Random', (['tcod.random.MERSENNE_TWISTER'], {}), '(tcod.random.MERSENNE_TWISTER)\n', (644, 674), False, 'import tcod\n'), ((700, 718), 'pickle.dumps', 'pickle.dumps', (['rand'], {}), '(rand)\n', (712, 718), False, 'import pickle\n')] |
rbanffy/Zope | src/Products/Five/viewlet/viewlet.py | ecf6770219052e7c7f8c9634ddf187a1e6280742 | ##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Viewlet.
"""
import os
import zope.viewlet.viewlet
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
class ViewletBase(zope.viewlet.viewlet.ViewletBase):
pass
class SimpleAttributeViewlet(zope.viewlet.viewlet.SimpleAttributeViewlet):
pass
class simple(zope.viewlet.viewlet.simple):
# We need to ensure that the proper __init__ is called.
__init__ = ViewletBase.__init__
def SimpleViewletClass(template, bases=(), attributes=None, name=''):
"""A function that can be used to generate a viewlet from a set of
information.
"""
# Create the base class hierarchy
bases += (simple, ViewletBase)
attrs = {'index': ViewPageTemplateFile(template),
'__name__': name}
if attributes:
attrs.update(attributes)
# Generate a derived view class.
class_ = type("SimpleViewletClass from %s" % template, bases, attrs)
return class_
class ResourceViewletBase(zope.viewlet.viewlet.ResourceViewletBase):
pass
def JavaScriptViewlet(path):
"""Create a viewlet that can simply insert a javascript link."""
src = os.path.join(os.path.dirname(__file__), 'javascript_viewlet.pt')
klass = type('JavaScriptViewlet',
(ResourceViewletBase, ViewletBase),
{'index': ViewPageTemplateFile(src), '_path': path})
return klass
class CSSResourceViewletBase(zope.viewlet.viewlet.CSSResourceViewletBase):
pass
def CSSViewlet(path, media="all", rel="stylesheet"):
"""Create a viewlet that can simply insert a javascript link."""
src = os.path.join(os.path.dirname(__file__), 'css_viewlet.pt')
klass = type('CSSViewlet',
(CSSResourceViewletBase, ViewletBase),
{'index': ViewPageTemplateFile(src),
'_path': path,
'_media': media,
'_rel': rel})
return klass
| [((1320, 1350), 'Products.Five.browser.pagetemplatefile.ViewPageTemplateFile', 'ViewPageTemplateFile', (['template'], {}), '(template)\n', (1340, 1350), False, 'from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\n'), ((1768, 1793), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1783, 1793), False, 'import os\n'), ((2233, 2258), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2248, 2258), False, 'import os\n'), ((1939, 1964), 'Products.Five.browser.pagetemplatefile.ViewPageTemplateFile', 'ViewPageTemplateFile', (['src'], {}), '(src)\n', (1959, 1964), False, 'from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\n'), ((2393, 2418), 'Products.Five.browser.pagetemplatefile.ViewPageTemplateFile', 'ViewPageTemplateFile', (['src'], {}), '(src)\n', (2413, 2418), False, 'from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\n')] |
bptfreitas/Project-Euler | problema21.py | 02b3ef8f8e3754b886b266fcd5eee7fd00d97dde | #Let d(n) be defined as the sum of proper divisors of n (numbers less than n which divide evenly into n).
#If d(a) = b and d(b) = a, where a b, then a and b are an amicable pair and each of a and b are called amicable numbers.
#For example, the proper divisors of 220 are 1, 2, 4, 5, 10, 11, 20, 22, 44, 55 and 110; therefore d(220) = 284. The proper divisors of 284 are 1, 2, 4, 71 and 142; so d(284) = 220.
#Evaluate the sum of all the amicable numbers under 10000.
import euler
def d(n):
return sum(euler.get_divisors(n))
print euler.get_divisors(284)
print sum(euler.get_divisors(284))
limit=10000
perc=5
step=perc*limit/100
cp=0
a=1
amics=[]
print "Starting..."
for a in range(1,limit+1):
b=d(a)
if a==d(b) and a!=b:
print "Pair:" + str(a) + " and " + str(b)
if (a not in amics):
amics.append(a)
if (b not in amics):
amics.append(b)
print "Sum of amicables:"
print sum(amics)
| [] |
emacslisp/python | python-3.6.0/Doc/includes/email-unpack.py | 5b89ddcc504108f0dfa1081e338e6475cf6ccd2f | #!/usr/bin/env python3
"""Unpack a MIME message into a directory of files."""
import os
import email
import mimetypes
from email.policy import default
from argparse import ArgumentParser
def main():
parser = ArgumentParser(description="""\
Unpack a MIME message into a directory of files.
""")
parser.add_argument('-d', '--directory', required=True,
help="""Unpack the MIME message into the named
directory, which will be created if it doesn't already
exist.""")
parser.add_argument('msgfile')
args = parser.parse_args()
with open(args.msgfile, 'rb') as fp:
msg = email.message_from_binary_file(fp, policy=default)
try:
os.mkdir(args.directory)
except FileExistsError:
pass
counter = 1
for part in msg.walk():
# multipart/* are just containers
if part.get_content_maintype() == 'multipart':
continue
# Applications should really sanitize the given filename so that an
# email message can't be used to overwrite important files
filename = part.get_filename()
if not filename:
ext = mimetypes.guess_extension(part.get_content_type())
if not ext:
# Use a generic bag-of-bits extension
ext = '.bin'
filename = 'part-%03d%s' % (counter, ext)
counter += 1
with open(os.path.join(args.directory, filename), 'wb') as fp:
fp.write(part.get_payload(decode=True))
if __name__ == '__main__':
main()
| [((218, 303), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Unpack a MIME message into a directory of files.\n"""'}), "(description='Unpack a MIME message into a directory of files.\\n'\n )\n", (232, 303), False, 'from argparse import ArgumentParser\n'), ((671, 721), 'email.message_from_binary_file', 'email.message_from_binary_file', (['fp'], {'policy': 'default'}), '(fp, policy=default)\n', (701, 721), False, 'import email\n'), ((740, 764), 'os.mkdir', 'os.mkdir', (['args.directory'], {}), '(args.directory)\n', (748, 764), False, 'import os\n'), ((1445, 1483), 'os.path.join', 'os.path.join', (['args.directory', 'filename'], {}), '(args.directory, filename)\n', (1457, 1483), False, 'import os\n')] |
juliantrue/Streetview-Segmenting | src/streetview/logging_facility.py | 337740e6ebd2284c880ace09a11032c5914b39a4 | import sys, os
import logging
import datetime
module_name = 'Streetview_Module'
debug_mode = True
class LoggingWrapper(object):
def __init__(self, log_folder_path=None):
self.debug_mode = debug_mode
# Create logger with module name
logger = logging.getLogger(module_name)
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
now = datetime.datetime.now()
log_file = '{}{}{}{}{}{}.log'.format(now.year, now.month, now.day,
now.hour, now.minute,
now.second)
# If no folder provided, output to stderr
if log_folder_path == None:
fh = logging.StreamHandler(sys.stderr)
else:
log_file = os.path.join(log_folder_path, log_file)
fh = logging.FileHandler(log_file)
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
| [((273, 303), 'logging.getLogger', 'logging.getLogger', (['module_name'], {}), '(module_name)\n', (290, 303), False, 'import logging\n'), ((419, 442), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (440, 442), False, 'import datetime\n'), ((1020, 1043), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1041, 1043), False, 'import logging\n'), ((1154, 1227), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (1171, 1227), False, 'import logging\n'), ((755, 788), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stderr'], {}), '(sys.stderr)\n', (776, 788), False, 'import logging\n'), ((826, 865), 'os.path.join', 'os.path.join', (['log_folder_path', 'log_file'], {}), '(log_folder_path, log_file)\n', (838, 865), False, 'import sys, os\n'), ((883, 912), 'logging.FileHandler', 'logging.FileHandler', (['log_file'], {}), '(log_file)\n', (902, 912), False, 'import logging\n')] |
DazEB2/SimplePyScripts | tkinter_examples/draw_chess_board.py | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
from tkinter import *
root = Tk()
root.title('Chess board')
canvas = Canvas(root, width=700, height=700, bg='#fff')
canvas.pack()
fill = '#fff'
outline = '#000'
size = 88
for i in range(8):
for j in range(8):
x1, y1, x2, y2 = i * size, j * size, i * size + size, j * size + size
canvas.create_rectangle(x1, y1, x2, y2, fill=fill, outline=outline)
fill, outline = outline, fill
fill, outline = outline, fill
root.mainloop()
| [] |
PremierLangage/sandbox-api | sandbox_api/asandbox.py | 7150ddcb92ac2304ff1d7b23571ec5e20459747b | # asandbox.py
#
# Authors:
# - Coumes Quentin <[email protected]>
"""An asynchronous implementation of the Sandbox API."""
import io
import json
import os
from contextlib import AbstractAsyncContextManager
from typing import BinaryIO, Optional, Union
import aiohttp
from .exceptions import status_exceptions
from .utils import ENDPOINTS
class ASandbox(AbstractAsyncContextManager):
"""Interface a Sandbox server asynchronously."""
def __init__(self, url: str, total: Optional[float] = 60, connect: Optional[float] = None,
sock_connect: Optional[float] = None, sock_read: Optional[float] = None):
"""Initialize a sandbox with the given URL.
Default timeout for the whole operation is one minute, use the following
argument to override :
* total : The whole operation time including connection
establishment, request sending and response reading.
* connect : The time consists connection establishment for a new
connection or waiting for a free connection from a pool if
pool connection limits are exceeded.
* sock_connect : A timeout for connecting to a peer for a new
connection, not given from a pool.
* sock_read : The maximum allowed timeout for period between reading
a new data portion from a peer.
"""
self.url = url
self.session = aiohttp.ClientSession(
timeout=aiohttp.ClientTimeout(total, connect, sock_connect, sock_read)
)
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
async def close(self):
"""Close the aiohttp ClientSession."""
await self.session.close()
async def _build_url(self, endpoint: str, *args: str):
"""Build the url corresponding to <endpoint> with the given <args>."""
return os.path.join(self.url, ENDPOINTS[endpoint] % tuple(args))
async def libraries(self) -> dict:
"""Asynchronously retrieve libraries installed in the containers of the
sandbox."""
async with self.session.get(await self._build_url("libraries")) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
async def specifications(self) -> dict:
"""Asynchronously retrieve specifications of the sandbox."""
async with self.session.get(await self._build_url("specifications")) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
async def usage(self) -> dict:
"""Asynchronously retrieve current usage stats of the sandbox."""
async with self.session.get(await self._build_url("usages")) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
async def download(self, uuid: str, path: str = None) -> BinaryIO:
"""Asynchronously download an environment or a specific file inside an
environment."""
if path is None:
url = await self._build_url("environments", uuid)
else:
url = await self._build_url("files", uuid, path)
async with self.session.get(url) as response:
if response.status != 200:
raise status_exceptions(response)
return io.BytesIO(await response.read())
async def check(self, uuid: str, path: str = None) -> int:
"""Asynchronously check if an environment or a specific file inside an
environment exists."""
if path is None:
url = await self._build_url("environments", uuid)
else:
url = await self._build_url("files", uuid, path)
async with self.session.head(url) as response:
if response.status not in [200, 404]: # pragma: no cover
raise status_exceptions(response)
return 0 if response.status == 404 else response.headers["Content-Length"]
async def execute(self, config: Union[dict], environ: Optional[BinaryIO] = None) -> dict:
"""Asynchronously execute commands on the sandbox according to <config>
and <environ>, returning the response's json as a dict.
<environ>, if not None, will be consumed and closed and shall not be
used further."""
data = aiohttp.FormData()
data.add_field("config", json.dumps(config))
if environ is not None:
data.add_field("environment", environ)
async with self.session.post(await self._build_url("execute"), data=data) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
async def load(self, environ: dict) -> dict:
"""Asynchronously execute commands on the sandbox according to <config>
and <environ>, returning the response's json as a dict.
<environ>, if not None, will be consumed and closed and shall not be
used further."""
data = aiohttp.FormData()
data.add_field("data", json.dumps(environ))
async with self.session.post(await self._build_url("load/fr"), data=data) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
async def demo(self, environ: dict) -> dict:
"""Asynchronously execute commands on the sandbox according to <config>
and <environ>, returning the response's json as a dict.
<environ>, if not None, will be consumed and closed and shall not be
used further."""
data = aiohttp.FormData()
data.add_field("data", json.dumps(environ))
data.add_field("demo", True)
async with self.session.post(await self._build_url("demo"), data=data) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
async def playexo(self, config: dict, environ: dict) -> dict:
"""Asynchronously execute commands on the sandbox according to <config>
and <environ>, returning the response's json as a dict.
<environ>, if not None, will be consumed and closed and shall not be
used further."""
data = aiohttp.FormData()
data.add_field("data", json.dumps(environ))
data.add_field("config", json.dumps(config))
async with self.session.post(await self._build_url("exo"), data=data) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
async def exec(self, datas: dict = {}) -> dict:
"""Asynchronously execute commands on the sandbox according to <config>
and <environ>, returning the response's json as a dict.
<environ>, if not None, will be consumed and closed and shall not be
used further."""
data = aiohttp.FormData()
data.add_field("data", json.dumps(datas))
for key, value in datas.items():
data.add_field(str(key), value)
async with self.session.post(await self._build_url("exec"), data=data) as response:
if response.status != 200:
raise status_exceptions(response)
return await response.json()
| [((4690, 4708), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (4706, 4708), False, 'import aiohttp\n'), ((5413, 5431), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (5429, 5431), False, 'import aiohttp\n'), ((6049, 6067), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (6065, 6067), False, 'import aiohttp\n'), ((6739, 6757), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (6755, 6757), False, 'import aiohttp\n'), ((7429, 7447), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (7445, 7447), False, 'import aiohttp\n'), ((4742, 4760), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (4752, 4760), False, 'import json\n'), ((5463, 5482), 'json.dumps', 'json.dumps', (['environ'], {}), '(environ)\n', (5473, 5482), False, 'import json\n'), ((6099, 6118), 'json.dumps', 'json.dumps', (['environ'], {}), '(environ)\n', (6109, 6118), False, 'import json\n'), ((6789, 6808), 'json.dumps', 'json.dumps', (['environ'], {}), '(environ)\n', (6799, 6808), False, 'import json\n'), ((6843, 6861), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (6853, 6861), False, 'import json\n'), ((7479, 7496), 'json.dumps', 'json.dumps', (['datas'], {}), '(datas)\n', (7489, 7496), False, 'import json\n'), ((1550, 1612), 'aiohttp.ClientTimeout', 'aiohttp.ClientTimeout', (['total', 'connect', 'sock_connect', 'sock_read'], {}), '(total, connect, sock_connect, sock_read)\n', (1571, 1612), False, 'import aiohttp\n')] |
jhonnattan123/fastapi_crud_example | api/services/usuarios_services.py | 24e1c295d41ad364ef839a4756e85b5bd640385a | import datetime
from uuid import UUID
from api.actions import storage
from fastapi import HTTPException
from api.models.usuario import Usuario
from starlette.requests import Request
from api.dependencies import validar_email, validar_formato_fecha,validar_edad
FORMATO_FECHA = "%Y-%m-%d"
EDAD_MINIMA = 18
EDAD_MAXIMA = 100
class Usuarios_Services:
""" Sección de servicios para el manejo de la logica de negocio
Attributes:
FORMATO_FECHA (str): Formato de fecha para validar
EDAD_MINIMA (int): Edad minima para validar
EDAD_MAXIMA (int): Edad maxima para validar
"""
def agregar_usuario(self, usuario: Usuario, request: Request) -> dict:
""" Agrega un usuario a la base de datos.
:param usuario: Usuario a agregar
:param request: Request de FastAPI
"""
try:
if not validar_email(getattr(usuario, "email")):
raise HTTPException(
status_code=400,
detail="El email no es válido"
)
fecha_nacimiento = usuario.fecha_nacimiento
if not validar_formato_fecha(fecha_nacimiento, FORMATO_FECHA):
raise HTTPException(
status_code=400,
detail="El formato de la fecha de nacimiento no es válida"
)
usuario.fecha_nacimiento = datetime.datetime.strptime(fecha_nacimiento, FORMATO_FECHA)
if not validar_edad(usuario.fecha_nacimiento, EDAD_MINIMA, EDAD_MAXIMA):
raise HTTPException(
status_code=400,
detail="La edad no es válida"
)
usuario_id = storage.add(usuario, request)
return { "ID": usuario_id }
except Exception as e:
print("Error al agregar usuario: {}".format(str(e)))
raise e
def editar_usuario(self, usuario_id: UUID, usuario: Usuario, request: Request) -> dict:
""" Edita un usuario de la base de datos.
:param usuario_id: ID del usuario a editar
:param usuario: Usuario a editar
:param request: Request de FastAPI
"""
try:
if not validar_email(getattr(usuario, "email")):
raise HTTPException(
status_code=400,
detail="El email no es válido"
)
fecha_nacimiento = usuario.fecha_nacimiento
if not validar_formato_fecha(fecha_nacimiento, FORMATO_FECHA):
raise HTTPException(
status_code=400,
detail="El formato de la fecha de nacimiento no es válida"
)
usuario.fecha_nacimiento = datetime.datetime.strptime(fecha_nacimiento, FORMATO_FECHA)
if not validar_edad(usuario.fecha_nacimiento, EDAD_MINIMA, EDAD_MAXIMA):
raise HTTPException(
status_code=400,
detail="La edad no es válida"
)
storage.update(usuario_id, usuario, request)
return { "ID": usuario_id }
except Exception as e:
print("Error al editar usuario: {}".format(str(e)))
raise e
def eliminar_usuario(self, usuario_id: UUID, request: Request) -> dict:
""" Elimina un usuario de la base de datos.
:param usuario_id: ID del usuario a eliminar
:param request: Request de FastAPI
"""
try:
storage.delete(Usuario, usuario_id, request)
return { "ID": usuario_id }
except Exception as e:
print("Error al eliminar usuario: {}".format(str(e)))
raise e
def listar_usuarios(self, pagina: int, cantidad: int, order_by: str, sort: str, request: Request)-> dict:
""" Obtiene una lista de usuarios de la base de datos.
:param pagina: Pagina a retornar
:param cantidad: Cantidad de usuarios a retornar
:param order_by: Campo por el cual se ordenará la lista
:param sort: Orden ascendente o descendente
:param request: Request de FastAPI
"""
try:
return storage.get_all(Usuario, pagina, cantidad, request, order_by, sort)
except Exception as e:
print("Error al listar usuarios: {}".format(str(e)))
raise e
def obtener_usuario(self, usuario_id: UUID, request: Request) -> Usuario:
""" Retorna un usuario por su ID
:param usuario_id: ID del usuario a consultar
:param request: Request de FastAPI
"""
try:
usuario = storage.get_by_id(Usuario, usuario_id, request)
return usuario
except Exception as e:
print("Error al obtener usuario: {}".format(str(e)))
raise e | [((1409, 1468), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['fecha_nacimiento', 'FORMATO_FECHA'], {}), '(fecha_nacimiento, FORMATO_FECHA)\n', (1435, 1468), False, 'import datetime\n'), ((1735, 1764), 'api.actions.storage.add', 'storage.add', (['usuario', 'request'], {}), '(usuario, request)\n', (1746, 1764), False, 'from api.actions import storage\n'), ((2814, 2873), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['fecha_nacimiento', 'FORMATO_FECHA'], {}), '(fecha_nacimiento, FORMATO_FECHA)\n', (2840, 2873), False, 'import datetime\n'), ((3127, 3171), 'api.actions.storage.update', 'storage.update', (['usuario_id', 'usuario', 'request'], {}), '(usuario_id, usuario, request)\n', (3141, 3171), False, 'from api.actions import storage\n'), ((3594, 3638), 'api.actions.storage.delete', 'storage.delete', (['Usuario', 'usuario_id', 'request'], {}), '(Usuario, usuario_id, request)\n', (3608, 3638), False, 'from api.actions import storage\n'), ((4284, 4351), 'api.actions.storage.get_all', 'storage.get_all', (['Usuario', 'pagina', 'cantidad', 'request', 'order_by', 'sort'], {}), '(Usuario, pagina, cantidad, request, order_by, sort)\n', (4299, 4351), False, 'from api.actions import storage\n'), ((4736, 4783), 'api.actions.storage.get_by_id', 'storage.get_by_id', (['Usuario', 'usuario_id', 'request'], {}), '(Usuario, usuario_id, request)\n', (4753, 4783), False, 'from api.actions import storage\n'), ((931, 993), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""El email no es válido"""'}), "(status_code=400, detail='El email no es válido')\n", (944, 993), False, 'from fastapi import HTTPException\n'), ((1142, 1196), 'api.dependencies.validar_formato_fecha', 'validar_formato_fecha', (['fecha_nacimiento', 'FORMATO_FECHA'], {}), '(fecha_nacimiento, FORMATO_FECHA)\n', (1163, 1196), False, 'from api.dependencies import validar_email, validar_formato_fecha, validar_edad\n'), ((1220, 1315), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""El formato de la fecha de nacimiento no es válida"""'}), "(status_code=400, detail=\n 'El formato de la fecha de nacimiento no es válida')\n", (1233, 1315), False, 'from fastapi import HTTPException\n'), ((1501, 1565), 'api.dependencies.validar_edad', 'validar_edad', (['usuario.fecha_nacimiento', 'EDAD_MINIMA', 'EDAD_MAXIMA'], {}), '(usuario.fecha_nacimiento, EDAD_MINIMA, EDAD_MAXIMA)\n', (1513, 1565), False, 'from api.dependencies import validar_email, validar_formato_fecha, validar_edad\n'), ((1589, 1650), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""La edad no es válida"""'}), "(status_code=400, detail='La edad no es válida')\n", (1602, 1650), False, 'from fastapi import HTTPException\n'), ((2336, 2398), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""El email no es válido"""'}), "(status_code=400, detail='El email no es válido')\n", (2349, 2398), False, 'from fastapi import HTTPException\n'), ((2547, 2601), 'api.dependencies.validar_formato_fecha', 'validar_formato_fecha', (['fecha_nacimiento', 'FORMATO_FECHA'], {}), '(fecha_nacimiento, FORMATO_FECHA)\n', (2568, 2601), False, 'from api.dependencies import validar_email, validar_formato_fecha, validar_edad\n'), ((2625, 2720), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""El formato de la fecha de nacimiento no es válida"""'}), "(status_code=400, detail=\n 'El formato de la fecha de nacimiento no es válida')\n", (2638, 2720), False, 'from fastapi import HTTPException\n'), ((2906, 2970), 'api.dependencies.validar_edad', 'validar_edad', (['usuario.fecha_nacimiento', 'EDAD_MINIMA', 'EDAD_MAXIMA'], {}), '(usuario.fecha_nacimiento, EDAD_MINIMA, EDAD_MAXIMA)\n', (2918, 2970), False, 'from api.dependencies import validar_email, validar_formato_fecha, validar_edad\n'), ((2994, 3055), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""La edad no es válida"""'}), "(status_code=400, detail='La edad no es válida')\n", (3007, 3055), False, 'from fastapi import HTTPException\n')] |
thisismyrobot/cti-toolkit | certau/util/taxii/client.py | faf6e912af69376f5c55902c1592f7eeb0ce03dd | import os
import logging
import dateutil
import pickle
from six.moves.urllib.parse import urlparse
from libtaxii import get_message_from_http_response, VID_TAXII_XML_11
from libtaxii.messages_11 import PollRequest, PollFulfillmentRequest
from libtaxii.messages_11 import PollResponse, generate_message_id
from libtaxii.clients import HttpClient
from certau import version_string
class SimpleTaxiiClient(HttpClient):
"""A simple interface to libtaxii for sending TAXII client messages.
Args:
username: a username for HTTP basic authentication
password: a password for HTTP basic authentication
key_file: a file containing a private key
(for SSL certificate-based authentication)
cert_file: a file containing a certificate
(for SSL certificate-based authentication)
ca_file: a file containing the CA's certificate
(for verifying the server's certificate)
"""
def __init__(self, username=None, password=None,
key_file=None, cert_file=None, ca_file=None):
super(SimpleTaxiiClient, self).__init__()
self._logger = logging.getLogger()
self.username = username
self.password = password
self.key_file = key_file
self.cert_file = cert_file
self.ca_file = ca_file
def setup_authentication(self, use_ssl):
"""Setup the appropriate credentials and authentication type.
Initialises the authentication settings for the connection.
Args:
use_ssl: should this connection use SSL
"""
self.set_use_https(use_ssl)
credentials = dict()
if self.username and self.password:
credentials['username'] = self.username
credentials['password'] = self.password
if use_ssl and self.key_file and self.cert_file:
credentials['key_file'] = self.key_file
credentials['cert_file'] = self.cert_file
if credentials:
self.set_auth_credentials(credentials)
if self.username and self.password:
if use_ssl and self.key_file and self.cert_file:
self.set_auth_type(HttpClient.AUTH_CERT_BASIC)
self._logger.debug("TAXII authentication using private key "
"(%s), certificate (%s), and credentials "
"for user '%s'", self.key_file,
self.cert_file, self.username)
else:
self.set_auth_type(HttpClient.AUTH_BASIC)
self._logger.debug("TAXII authentication using credentials "
"for user '%s'", self.username)
elif use_ssl and self.key_file and self.cert_file:
self.set_auth_type(HttpClient.AUTH_CERT)
self._logger.debug("TAXII authentication using private key (%s) "
"and certificate (%s) only", self.key_file,
self.cert_file)
else:
self.set_auth_type(HttpClient.AUTH_NONE)
self._logger.debug("no TAXII authentication")
# CA certificate verification
if use_ssl and self.ca_file:
self.set_verify_server(verify_server=True, ca_file=self.ca_file)
self._logger.debug("SSL - verification using CA file (%s)",
self.ca_file)
@staticmethod
def create_poll_request(collection, subscription_id=None,
begin_timestamp=None, end_timestamp=None):
"""Create a poll request message using supplied parameters."""
request_kwargs = dict(
message_id=generate_message_id(),
collection_name=collection,
exclusive_begin_timestamp_label=begin_timestamp,
inclusive_end_timestamp_label=end_timestamp,
)
if subscription_id:
request_kwargs['subscription_id'] = subscription_id
else:
request_kwargs['poll_parameters'] = PollRequest.PollParameters()
return PollRequest(**request_kwargs)
@staticmethod
def create_fulfillment_request(collection, result_id, part_number):
return PollFulfillmentRequest(
message_id=generate_message_id(),
collection_name=collection,
result_id=result_id,
result_part_number=part_number,
)
def send_taxii_message(self, request, host, path, port):
# Send the request message and return the response
http_response = self.call_taxii_service2(
host=host,
path=path,
message_binding=VID_TAXII_XML_11,
post_data=request.to_xml(),
port=port,
user_agent='{} (libtaxii)'.format(version_string)
)
response = get_message_from_http_response(
http_response=http_response,
in_response_to=request.message_id,
)
return response
@staticmethod
def get_poll_time(filename, poll_url, collection):
if os.path.isfile(filename):
with open(filename, 'rb') as state_file:
poll_state = pickle.load(state_file)
if isinstance(poll_state, dict) and poll_url in poll_state:
if collection in poll_state[poll_url]:
time_string = poll_state[poll_url][collection]
return dateutil.parser.parse(time_string)
return None
@staticmethod
def save_poll_time(filename, poll_url, collection, timestamp):
if timestamp is not None:
poll_state = dict()
if os.path.isfile(filename):
with open(filename, 'rb') as state_file:
poll_state = pickle.load(state_file)
if not isinstance(poll_state, dict):
raise Exception('unexpected content encountered when '
'reading TAXII poll state file')
if poll_url not in poll_state:
poll_state[poll_url] = dict()
poll_state[poll_url][collection] = str(timestamp)
with open(filename, 'wb') as state_file:
pickle.dump(poll_state, state_file, protocol=2)
def poll(self, poll_url, collection, subscription_id=None,
begin_timestamp=None, end_timestamp=None, state_file=None):
"""Send the TAXII poll request to the server using the given URL."""
# Parse the poll_url to get the parts required by libtaxii
url_parts = urlparse(poll_url)
# Allow credentials to be provided in poll_url
if url_parts.username and url_parts.password:
self.username = url_parts.username
self.password = url_parts.password
self._logger.debug('updating username and password from poll_url')
if url_parts.scheme not in ['http', 'https']:
raise Exception('invalid scheme in poll_url (%s); expected '
'"http" or "https"', poll_url)
use_ssl = True if url_parts.scheme == 'https' else False
# Initialise the authentication settings
self.setup_authentication(use_ssl)
if state_file and not begin_timestamp:
begin_timestamp = self.get_poll_time(
filename=state_file,
poll_url=poll_url,
collection=collection,
)
request = self.create_poll_request(
collection=collection,
subscription_id=subscription_id,
begin_timestamp=begin_timestamp,
end_timestamp=end_timestamp,
)
self._logger.debug('sending poll request (url=%s, collection=%s)',
poll_url, collection)
response = self.send_taxii_message(
request=request,
host=url_parts.hostname,
path=url_parts.path,
port=url_parts.port,
)
first = True
poll_end_time = None
while True:
if not isinstance(response, PollResponse):
raise Exception('didn\'t get a poll response')
self._logger.debug('received poll response '
'(content_blocks=%d, result_id=%s, more=%s)',
len(response.content_blocks),
response.result_id,
'True' if response.more else 'False')
# Save end timestamp from first PollResponse
if first:
poll_end_time = response.inclusive_end_timestamp_label
if len(response.content_blocks) == 0:
if first:
self._logger.info('poll response contained '
'no content blocks')
break
for content_block in response.content_blocks:
yield content_block
if not response.more:
break
# Send a fulfilment request
if first:
# Initialise fulfilment request values
part_number = response.result_part_number
result_id = response.result_id
first = False
part_number += 1
request = self.create_fulfillment_request(
collection=collection,
result_id=result_id,
part_number=part_number,
)
self._logger.debug('sending fulfilment request '
'(result_id=%s, part_number=%d)',
result_id, part_number)
response = self.send_taxii_message(
request=request,
host=url_parts.hostname,
path=url_parts.path,
port=url_parts.port,
)
# Update the timestamp for the latest poll
if state_file and poll_end_time:
self.save_poll_time(
filename=state_file,
poll_url=poll_url,
collection=collection,
timestamp=poll_end_time,
)
| [((1159, 1178), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1176, 1178), False, 'import logging\n'), ((4119, 4148), 'libtaxii.messages_11.PollRequest', 'PollRequest', ([], {}), '(**request_kwargs)\n', (4130, 4148), False, 'from libtaxii.messages_11 import PollRequest, PollFulfillmentRequest\n'), ((4869, 4968), 'libtaxii.get_message_from_http_response', 'get_message_from_http_response', ([], {'http_response': 'http_response', 'in_response_to': 'request.message_id'}), '(http_response=http_response, in_response_to=\n request.message_id)\n', (4899, 4968), False, 'from libtaxii import get_message_from_http_response, VID_TAXII_XML_11\n'), ((5108, 5132), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (5122, 5132), False, 'import os\n'), ((6618, 6636), 'six.moves.urllib.parse.urlparse', 'urlparse', (['poll_url'], {}), '(poll_url)\n', (6626, 6636), False, 'from six.moves.urllib.parse import urlparse\n'), ((4074, 4102), 'libtaxii.messages_11.PollRequest.PollParameters', 'PollRequest.PollParameters', ([], {}), '()\n', (4100, 4102), False, 'from libtaxii.messages_11 import PollRequest, PollFulfillmentRequest\n'), ((5699, 5723), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (5713, 5723), False, 'import os\n'), ((3728, 3749), 'libtaxii.messages_11.generate_message_id', 'generate_message_id', ([], {}), '()\n', (3747, 3749), False, 'from libtaxii.messages_11 import PollResponse, generate_message_id\n'), ((4302, 4323), 'libtaxii.messages_11.generate_message_id', 'generate_message_id', ([], {}), '()\n', (4321, 4323), False, 'from libtaxii.messages_11 import PollResponse, generate_message_id\n'), ((5216, 5239), 'pickle.load', 'pickle.load', (['state_file'], {}), '(state_file)\n', (5227, 5239), False, 'import pickle\n'), ((6268, 6315), 'pickle.dump', 'pickle.dump', (['poll_state', 'state_file'], {'protocol': '(2)'}), '(poll_state, state_file, protocol=2)\n', (6279, 6315), False, 'import pickle\n'), ((5815, 5838), 'pickle.load', 'pickle.load', (['state_file'], {}), '(state_file)\n', (5826, 5838), False, 'import pickle\n'), ((5477, 5511), 'dateutil.parser.parse', 'dateutil.parser.parse', (['time_string'], {}), '(time_string)\n', (5498, 5511), False, 'import dateutil\n')] |
YipengHu/MPHY0041 | tutorials/registration/data.py | 6e9706eba2b9f9a2449539d7dea5f91dde807584 |
import os
import zipfile
import requests
DATA_PATH = './data'
RESULT_PATH = './result'
if not os.path.exists(DATA_PATH):
os.makedirs(DATA_PATH)
print('Downloading and extracting data...')
url = 'https://weisslab.cs.ucl.ac.uk/WEISSTeaching/datasets/-/archive/hn2dct/datasets-hn2dct.zip'
r = requests.get(url,allow_redirects=True)
temp_file = 'temp.zip'
_ = open(temp_file,'wb').write(r.content)
with zipfile.ZipFile(temp_file,'r') as zip_obj:
zip_obj.extractall(DATA_PATH)
os.remove(temp_file)
print('Done.')
print('Head-neck 2D CT data downloaded: %s' % os.path.abspath(os.path.join(DATA_PATH,'datasets-hn2dct')))
if not os.path.exists(RESULT_PATH):
os.makedirs(RESULT_PATH)
print('Result directory created: %s' % os.path.abspath(RESULT_PATH))
| [((300, 339), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (312, 339), False, 'import requests\n'), ((487, 507), 'os.remove', 'os.remove', (['temp_file'], {}), '(temp_file)\n', (496, 507), False, 'import os\n'), ((98, 123), 'os.path.exists', 'os.path.exists', (['DATA_PATH'], {}), '(DATA_PATH)\n', (112, 123), False, 'import os\n'), ((129, 151), 'os.makedirs', 'os.makedirs', (['DATA_PATH'], {}), '(DATA_PATH)\n', (140, 151), False, 'import os\n'), ((410, 441), 'zipfile.ZipFile', 'zipfile.ZipFile', (['temp_file', '"""r"""'], {}), "(temp_file, 'r')\n", (425, 441), False, 'import zipfile\n'), ((637, 664), 'os.path.exists', 'os.path.exists', (['RESULT_PATH'], {}), '(RESULT_PATH)\n', (651, 664), False, 'import os\n'), ((670, 694), 'os.makedirs', 'os.makedirs', (['RESULT_PATH'], {}), '(RESULT_PATH)\n', (681, 694), False, 'import os\n'), ((585, 627), 'os.path.join', 'os.path.join', (['DATA_PATH', '"""datasets-hn2dct"""'], {}), "(DATA_PATH, 'datasets-hn2dct')\n", (597, 627), False, 'import os\n'), ((738, 766), 'os.path.abspath', 'os.path.abspath', (['RESULT_PATH'], {}), '(RESULT_PATH)\n', (753, 766), False, 'import os\n')] |
lhuett/insights-core | insights/parsers/tests/test_freeipa_healthcheck_log.py | 1c84eeffc037f85e2bbf60c9a302c83aa1a50cf8 | import doctest
from insights.parsers import freeipa_healthcheck_log
from insights.parsers.freeipa_healthcheck_log import FreeIPAHealthCheckLog
from insights.tests import context_wrap
LONG_FREEIPA_HEALTHCHECK_LOG_OK = """
[{"source": "ipahealthcheck.ipa.roles", "check": "IPACRLManagerCheck",
"result": "SUCCESS", "uuid": "1f4177a4-0ddb-4e4d-8258-a5cd5f4638fc",
"when": "20191203122317Z", "duration": "0.002254",
"kw": {"key": "crl_manager", "crlgen_enabled": true}}]
""".strip()
LONG_FREEIPA_HEALTHCHECK_LOG_FAILURES = """
[{"source": "ipahealthcheck.system.filesystemspace",
"check": "FileSystemSpaceCheck",
"result": "ERROR", "uuid": "90ed8765-6ad7-425c-abbd-b07a652649cb",
"when": "20191203122221Z", "duration": "0.000474", "kw": {
"msg": "/var/log/audit/: free space under threshold: 14 MiB < 512 MiB",
"store": "/var/log/audit/", "free_space": 14, "threshold": 512}}]
""".strip()
FREEIPA_HEALTHCHECK_LOG_DOCS_EXAMPLE = '''
[
{
"source": "ipahealthcheck.ipa.roles",
"check": "IPACRLManagerCheck",
"result": "SUCCESS",
"uuid": "1f4177a4-0ddb-4e4d-8258-a5cd5f4638fc",
"when": "20191203122317Z",
"duration": "0.002254",
"kw": {
"key": "crl_manager",
"crlgen_enabled": true
}
},
{
"source": "ipahealthcheck.ipa.roles",
"check": "IPARenewalMasterCheck",
"result": "SUCCESS",
"uuid": "1feb7f99-2e98-4e37-bb52-686896972022",
"when": "20191203122317Z",
"duration": "0.018330",
"kw": {
"key": "renewal_master",
"master": true
}
},
{
"source": "ipahealthcheck.system.filesystemspace",
"check": "FileSystemSpaceCheck",
"result": "ERROR",
"uuid": "90ed8765-6ad7-425c-abbd-b07a652649cb",
"when": "20191203122221Z",
"duration": "0.000474",
"kw": {
"msg": "/var/log/audit/: free space under threshold: 14 MiB < 512 MiB",
"store": "/var/log/audit/",
"free_space": 14,
"threshold": 512
}
}
]
'''.strip()
FREEIPA_HEALTHCHECK_LOG_OK = "".join(LONG_FREEIPA_HEALTHCHECK_LOG_OK.splitlines())
FREEIPA_HEALTHCHECK_LOG_FAILURES = "".join(LONG_FREEIPA_HEALTHCHECK_LOG_FAILURES.splitlines())
def test_freeipa_healthcheck_log_ok():
log_obj = FreeIPAHealthCheckLog(context_wrap(FREEIPA_HEALTHCHECK_LOG_OK))
assert len(log_obj.issues) == 0
def test_freeipa_healthcheck_log_not_ok():
log_obj = FreeIPAHealthCheckLog(context_wrap(FREEIPA_HEALTHCHECK_LOG_FAILURES))
assert len(log_obj.issues) > 0
for issue in log_obj.issues:
assert issue['check'] == 'FileSystemSpaceCheck'
assert issue['source'] == 'ipahealthcheck.system.filesystemspace'
def test_freeipa_healthcheck_get_results_ok():
log_obj = FreeIPAHealthCheckLog(context_wrap(FREEIPA_HEALTHCHECK_LOG_OK))
results = log_obj.get_results('ipahealthcheck.system.filesystemspace', 'FileSystemSpaceCheck')
assert len(results) == 0
def test_freeipa_healthcheck_get_results_not_ok():
log_obj = FreeIPAHealthCheckLog(context_wrap(FREEIPA_HEALTHCHECK_LOG_FAILURES))
results = log_obj.get_results('ipahealthcheck.system.filesystemspace', 'FileSystemSpaceCheck')
assert len(results) == 1
for result in results:
assert result['result'] in ['ERROR', 'CRITICAL']
assert result['check'] == 'FileSystemSpaceCheck'
assert result['source'] == 'ipahealthcheck.system.filesystemspace'
def test_freeipa_healthcheck_log__documentation():
env = {
'healthcheck': FreeIPAHealthCheckLog(context_wrap(FREEIPA_HEALTHCHECK_LOG_DOCS_EXAMPLE)),
}
failed, total = doctest.testmod(freeipa_healthcheck_log, globs=env)
assert failed == 0
| [((3699, 3750), 'doctest.testmod', 'doctest.testmod', (['freeipa_healthcheck_log'], {'globs': 'env'}), '(freeipa_healthcheck_log, globs=env)\n', (3714, 3750), False, 'import doctest\n'), ((2369, 2409), 'insights.tests.context_wrap', 'context_wrap', (['FREEIPA_HEALTHCHECK_LOG_OK'], {}), '(FREEIPA_HEALTHCHECK_LOG_OK)\n', (2381, 2409), False, 'from insights.tests import context_wrap\n'), ((2528, 2574), 'insights.tests.context_wrap', 'context_wrap', (['FREEIPA_HEALTHCHECK_LOG_FAILURES'], {}), '(FREEIPA_HEALTHCHECK_LOG_FAILURES)\n', (2540, 2574), False, 'from insights.tests import context_wrap\n'), ((2859, 2899), 'insights.tests.context_wrap', 'context_wrap', (['FREEIPA_HEALTHCHECK_LOG_OK'], {}), '(FREEIPA_HEALTHCHECK_LOG_OK)\n', (2871, 2899), False, 'from insights.tests import context_wrap\n'), ((3118, 3164), 'insights.tests.context_wrap', 'context_wrap', (['FREEIPA_HEALTHCHECK_LOG_FAILURES'], {}), '(FREEIPA_HEALTHCHECK_LOG_FAILURES)\n', (3130, 3164), False, 'from insights.tests import context_wrap\n'), ((3620, 3670), 'insights.tests.context_wrap', 'context_wrap', (['FREEIPA_HEALTHCHECK_LOG_DOCS_EXAMPLE'], {}), '(FREEIPA_HEALTHCHECK_LOG_DOCS_EXAMPLE)\n', (3632, 3670), False, 'from insights.tests import context_wrap\n')] |
xswz8015/infra | recipes/recipes/windows_image_builder/winpe_customization.py | f956b78ce4c39cc76acdda47601b86794ae0c1ba | # Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import post_process
from PB.recipes.infra.windows_image_builder import windows_image_builder as wib
from PB.recipes.infra.windows_image_builder import actions
from PB.recipes.infra.windows_image_builder import sources
from recipe_engine.post_process import DropExpectation, StatusSuccess
from RECIPE_MODULES.infra.windows_scripts_executor import test_helper as t
DEPS = [
'depot_tools/gitiles',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/raw_io',
'recipe_engine/json',
'windows_adk',
'windows_scripts_executor',
]
PYTHON_VERSION_COMPATIBILITY = 'PY3'
PROPERTIES = wib.Image
def RunSteps(api, image):
""" This recipe executes offline_winpe_customization."""
if not api.platform.is_win:
raise AssertionError('This recipe can only run on windows')
# this recipe will only execute the offline winpe customizations
for cust in image.customizations:
assert (cust.WhichOneof('customization') == 'offline_winpe_customization')
# initialize the image to scripts executor
api.windows_scripts_executor.init()
custs = api.windows_scripts_executor.init_customizations(image)
# pinning all the refs and generating unique keys
custs = api.windows_scripts_executor.process_customizations(custs)
# download all the required refs
api.windows_scripts_executor.download_all_packages(custs)
# download and install the windows ADK and WinPE packages
api.windows_adk.ensure()
# execute the customizations given
api.windows_scripts_executor.execute_customizations(custs)
wpe_image = 'wpe_image'
wpe_cust = 'generic'
arch = 'x86'
key = '9055a3e678be47d58bb860d27b85adbea41fd2ef3e22c5b7cb3180edf358de90'
def GenTests(api):
# actions for adding files from git
ACTION_ADD_STARTNET = actions.Action(
add_file=actions.AddFile(
name='add_startnet_file',
src=sources.Src(
git_src=sources.GITSrc(
repo='chromium.dev',
ref='HEAD',
src='windows/artifacts/startnet.cmd'),),
dst='Windows\\System32',
))
STARTNET_URL = 'chromium.dev/+/ef70cb069518e6dc3ff24bfae7f195de5099c377/' +\
'windows/artifacts/startnet.cmd'
yield (api.test('not_run_on_windows', api.platform('linux', 64)) +
api.expect_exception('AssertionError') +
api.post_process(DropExpectation))
yield (api.test('happy path', api.platform('win', 64)) + api.properties(
t.WPE_IMAGE(wpe_image, wib.ARCH_X86, wpe_cust, 'happy test',
[ACTION_ADD_STARTNET])) +
# mock all the init and deinit steps
t.MOCK_WPE_INIT_DEINIT_SUCCESS(api, key, arch, wpe_image, wpe_cust) +
# mock git pin file
t.GIT_PIN_FILE(api, wpe_cust, 'HEAD', 'windows/artifacts/startnet.cmd',
'HEAD') +
# mock add file to wpe_image mount dir step
t.ADD_FILE(api, wpe_image, wpe_cust, STARTNET_URL) +
# assert that the generated wpe_image was uploaded
t.CHECK_GCS_UPLOAD(
api, wpe_image, wpe_cust,
'\[CLEANUP\]\\\\{}\\\\workdir\\\\gcs.zip'.format(wpe_cust),
'gs://chrome-gce-images/WIB-WIM/{}.zip'.format(key)) +
api.post_process(StatusSuccess) + api.post_process(DropExpectation))
| [((3084, 3134), 'RECIPE_MODULES.infra.windows_scripts_executor.test_helper.ADD_FILE', 't.ADD_FILE', (['api', 'wpe_image', 'wpe_cust', 'STARTNET_URL'], {}), '(api, wpe_image, wpe_cust, STARTNET_URL)\n', (3094, 3134), True, 'from RECIPE_MODULES.infra.windows_scripts_executor import test_helper as t\n'), ((2084, 2174), 'PB.recipes.infra.windows_image_builder.sources.GITSrc', 'sources.GITSrc', ([], {'repo': '"""chromium.dev"""', 'ref': '"""HEAD"""', 'src': '"""windows/artifacts/startnet.cmd"""'}), "(repo='chromium.dev', ref='HEAD', src=\n 'windows/artifacts/startnet.cmd')\n", (2098, 2174), False, 'from PB.recipes.infra.windows_image_builder import sources\n'), ((2916, 2995), 'RECIPE_MODULES.infra.windows_scripts_executor.test_helper.GIT_PIN_FILE', 't.GIT_PIN_FILE', (['api', 'wpe_cust', '"""HEAD"""', '"""windows/artifacts/startnet.cmd"""', '"""HEAD"""'], {}), "(api, wpe_cust, 'HEAD', 'windows/artifacts/startnet.cmd', 'HEAD')\n", (2930, 2995), True, 'from RECIPE_MODULES.infra.windows_scripts_executor import test_helper as t\n'), ((2808, 2875), 'RECIPE_MODULES.infra.windows_scripts_executor.test_helper.MOCK_WPE_INIT_DEINIT_SUCCESS', 't.MOCK_WPE_INIT_DEINIT_SUCCESS', (['api', 'key', 'arch', 'wpe_image', 'wpe_cust'], {}), '(api, key, arch, wpe_image, wpe_cust)\n', (2838, 2875), True, 'from RECIPE_MODULES.infra.windows_scripts_executor import test_helper as t\n'), ((2648, 2736), 'RECIPE_MODULES.infra.windows_scripts_executor.test_helper.WPE_IMAGE', 't.WPE_IMAGE', (['wpe_image', 'wib.ARCH_X86', 'wpe_cust', '"""happy test"""', '[ACTION_ADD_STARTNET]'], {}), "(wpe_image, wib.ARCH_X86, wpe_cust, 'happy test', [\n ACTION_ADD_STARTNET])\n", (2659, 2736), True, 'from RECIPE_MODULES.infra.windows_scripts_executor import test_helper as t\n')] |
wonjinYi/lollang-playground | back/lollangCompiler/main.py | 2df07ccc2518e6dc9f9aa00b2f38ad8d62cdb507 | from lollangCompiler.compiler import Compiler
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--file", required=True, help="컴파일할 파일을 선택해주세요.")
parser.add_argument("--out", default="out.py", help="목적 파이썬 파일경로를 선택해주세요")
args = parser.parse_args()
cmp = Compiler()
cmp.compileFile(args.file, args.out) | [((103, 128), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (126, 128), False, 'import argparse\n'), ((323, 333), 'lollangCompiler.compiler.Compiler', 'Compiler', ([], {}), '()\n', (331, 333), False, 'from lollangCompiler.compiler import Compiler\n')] |
UKPLab/emnlp2019-duplicate_question_detection | reproducing/generator_datacreation/data/readers/SEReader.py | 17a9d97c2414666fcc08015a58619fe9722daf2b | import logging
import subprocess
import xml.etree.ElementTree as ET
from tqdm import tqdm
logger = logging.getLogger('root')
POST_TYPE_QUESTION = '1'
POST_TYPE_ANSWER = '2'
class SEDataReader(object):
"""
NOTE: - a typical xml string for question in original data looks like
<row Id="4" PostTypeId="1" AcceptedAnswerId="7" CreationDate="2008-07-31T21:42:52.667" Score="543" ViewCount="34799" Body="<p>I want to use a track-bar to change a form's opacity.</p>

<p>This is my code:</p>

<pre><code>decimal trans = trackBar1.Value / 5000;
this.Opacity = trans;
</code></pre>

<p>When I build the application, it gives the following error:</p>

<blockquote>
 <p>Cannot implicitly convert type <code>'decimal'</code> to <code>'double'</code>.</p>
</blockquote>

<p>I tried using <code>trans</code> and <code>double</code> but then the control doesn't work. This code worked fine in a past VB.NET project.</p>
" OwnerUserId="8" LastEditorUserId="3151675" LastEditorDisplayName="Rich B" LastEditDate="2017-09-27T05:52:59.927" LastActivityDate="2018-02-22T16:40:13.577" Title="While applying opacity to a form, should we use a decimal or a double value?" Tags="<c#><winforms><type-conversion><decimal><opacity>" AnswerCount="13" CommentCount="1" FavoriteCount="39" CommunityOwnedDate="2012-10-31T16:42:47.213" />
"""
num_skipped_sample_none = 0
num_skipped_other_posttype = 0
num_retrieved = 0
def __init__(self, data_file_path):
"""
data_file_path : (string) path to the posts.xml file
"""
self.data_file_path = data_file_path
def question_data_filter(self, sample):
return dict((k, sample.get(k, None)) for k in ['Id', 'Title', 'Body', 'Tags', 'ParentId', 'PostTypeId'])
def n_items_unfiltered(self):
out = subprocess.check_output(['wc', '-l', self.data_file_path])
return int(out.split()[0])
def read_items(self, allowed_post_types=(POST_TYPE_QUESTION, ), min_score=0, max_year=None):
questions_dict = dict() if POST_TYPE_ANSWER in allowed_post_types else None
with open(self.data_file_path, 'r') as f:
for l in tqdm(f):
try:
sample = ET.fromstring(l.strip()).attrib
except ET.ParseError as e:
print('(Ignoring) ERROR in parsing line (QUESTION READER):\n{}\n'.format(l.strip))
sample = None
if sample:
if questions_dict is not None and sample['PostTypeId'] == POST_TYPE_QUESTION:
filtered_sample = self.question_data_filter(sample)
questions_dict[filtered_sample['Id']] = filtered_sample
has_min_score = int(sample['Score']) >= min_score
has_max_year = True if max_year is None else int(sample['CreationDate'][:4]) <= max_year
if sample['PostTypeId'] in allowed_post_types and has_max_year and has_min_score:
SEDataReader.num_retrieved += 1
filtered_sample = self.question_data_filter(sample)
if sample['PostTypeId'] == POST_TYPE_ANSWER:
q = questions_dict.get(filtered_sample['ParentId'], None)
if q is None:
print('Skipping answer because parent question is unknown')
continue
else:
filtered_sample['Title'] = q.get('Title')
filtered_sample['Tags'] = q.get('Tags')
yield filtered_sample
else:
SEDataReader.num_skipped_other_posttype += 1
else:
SEDataReader.num_skipped_sample_none += 1
| [((102, 127), 'logging.getLogger', 'logging.getLogger', (['"""root"""'], {}), "('root')\n", (119, 127), False, 'import logging\n'), ((2051, 2109), 'subprocess.check_output', 'subprocess.check_output', (["['wc', '-l', self.data_file_path]"], {}), "(['wc', '-l', self.data_file_path])\n", (2074, 2109), False, 'import subprocess\n'), ((2399, 2406), 'tqdm.tqdm', 'tqdm', (['f'], {}), '(f)\n', (2403, 2406), False, 'from tqdm import tqdm\n')] |
Subsets and Splits