repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
benlamonica/energy-monitor | new-influx-client.py | 86714a365c91cc05c265de81bce191ff4ab585f8 | import influxdb_client
from influxdb_client import InfluxDBClient
bucket = "python-client-sandbox"
org = "Energy Monitor"
token = "miQdAvNXHiNDVVzPzV5FpkCaR_8qdQ-L1FlPCOXQPI325Kbrh1fgfhkcDUZ4FepaebDdpZ-A1gmtnnjU0_hViA=="
url = "http://localhost:9999"
client = InfluxDBClient(url=url, token=token, org=org)
writeApi = client.write_api()
write_api.write("my-bucket", "my-org", [{"measurement": "h2o_feet", "tags": {"location": "coyote_creek"}, "fields": {"water_level": 1}, "time": 1}])
| [((262, 307), 'influxdb_client.InfluxDBClient', 'InfluxDBClient', ([], {'url': 'url', 'token': 'token', 'org': 'org'}), '(url=url, token=token, org=org)\n', (276, 307), False, 'from influxdb_client import InfluxDBClient\n')] |
guidow/pyfarm-agent | tests/test_agent/test_manhole.py | bb5d464f9f6549a3db3529a93e3d9f388b365586 | # No shebang line, this module is meant to be imported
#
# Copyright 2014 Oliver Palmer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from collections import namedtuple
from pprint import pprint
from random import randint
from StringIO import StringIO
from textwrap import dedent
try:
from unittest.mock import patch
except ImportError: # pragma: no cover
from mock import patch
from twisted.internet.protocol import ServerFactory
from twisted.cred.portal import Portal
from twisted.conch.telnet import (
ITelnetProtocol, TelnetBootstrapProtocol, TelnetTransport)
from pyfarm.agent.testutil import TestCase
from pyfarm.agent.manhole import (
LoggingManhole, TransportProtocolFactory, TelnetRealm,
manhole_factory, show)
Peer = namedtuple("Peer", ("host", "port"))
class FakeLoggingManhole(LoggingManhole):
QUIT = False
GET_PEER_CALLS = 0
class terminal(object):
RIGHT_ARROW, LEFT_ARROW = None, None
class transport(object):
@classmethod
def getPeer(cls):
FakeLoggingManhole.GET_PEER_CALLS += 1
return Peer(os.urandom(12).encode("hex"), randint(1024, 65535))
def handle_QUIT(self):
self.QUIT = True
class TestManholeBase(TestCase):
def setUp(self):
TelnetRealm.NAMESPACE = None
FakeLoggingManhole.GET_PEER_CALLS = 0
FakeLoggingManhole.QUIT = False
class TestManholeFactory(TestManholeBase):
def test_assertions(self):
with self.assertRaises(AssertionError):
manhole_factory(None, "", "")
with self.assertRaises(AssertionError):
manhole_factory({}, None, "")
with self.assertRaises(AssertionError):
manhole_factory({}, "", None)
def test_instance_one(self):
namespace = {"bob": None}
username = os.urandom(32).encode("hex")
password = os.urandom(32).encode("hex")
manhole_factory(namespace, username, password)
with self.assertRaises(AssertionError):
manhole_factory(namespace, username, password)
def test_instance(self):
namespace = {"bob": None}
username = os.urandom(32).encode("hex")
password = os.urandom(32).encode("hex")
manhole = manhole_factory(namespace, username, password)
self.assertEqual(namespace, {"bob": None})
self.assertEqual(
TelnetRealm.NAMESPACE,
{"bob": None, "pp": pprint, "show": show})
self.assertIsInstance(manhole, ServerFactory)
self.assertIsInstance(manhole.protocol, TransportProtocolFactory)
self.assertIsInstance(manhole.protocol.portal, Portal)
# There could be multiple password checkers, check for the one
# we know we should have added.
for _, instance in manhole.protocol.portal.checkers.items():
found = False
for user, passwd in instance.users.items():
if user == username and passwd == password:
found = True
if found:
break
else:
self.fail("Failed to find correct username and password.")
def test_request_avatar(self):
realm = TelnetRealm()
avatar = realm.requestAvatar(None, ITelnetProtocol)
self.assertEqual(len(avatar), 3)
self.assertIs(avatar[0], ITelnetProtocol)
self.assertIsInstance(avatar[1], TelnetBootstrapProtocol)
self.assertTrue(callable(avatar[2]))
def test_request_avatar_error(self):
realm = TelnetRealm()
with self.assertRaises(NotImplementedError):
realm.requestAvatar(None, None)
def test_protocol_factory(self):
factory = TransportProtocolFactory(None)
transport = factory()
self.assertIsInstance(transport, TelnetTransport)
class TestManholeShow(TestManholeBase):
def test_uses_namespace(self):
namespace = {"bob": None}
username = os.urandom(32).encode("hex")
password = os.urandom(32).encode("hex")
manhole_factory(namespace, username, password)
output = StringIO()
with patch("sys.stdout", output):
show()
output.seek(0)
output = output.getvalue().strip()
self.assertEqual(output, "objects: ['bob', 'pp', 'show']")
def test_custom_object(self):
class Foobar(object):
a, b, c, d, e = True, 1, "yes", {}, 0.0
output = StringIO()
with patch("sys.stdout", output):
show(Foobar)
output.seek(0)
output = output.getvalue().strip()
self.assertEqual(
output,
dedent("""
data attributes of <class 'tests.test_agent.test_manhole.Foobar'>
a : True
b : 1
c : yes
d : {} (0 elements)
e : 0.0
""").strip())
def test_wrap_long_line(self):
class Foobar(object):
a = " " * 90
output = StringIO()
with patch("sys.stdout", output):
show(Foobar)
output.seek(0)
output = output.getvalue().strip()
self.assertEqual(
output,
dedent("""
data attributes of <class 'tests.test_agent.test_manhole.Foobar'>
a : ' """ +
""" '...
""").strip())
class TestLoggingManhole(TestManholeBase):
def test_line_received(self):
f = FakeLoggingManhole()
f.lineReceived("exit")
self.assertTrue(f.QUIT)
| [((1260, 1296), 'collections.namedtuple', 'namedtuple', (['"""Peer"""', "('host', 'port')"], {}), "('Peer', ('host', 'port'))\n", (1270, 1296), False, 'from collections import namedtuple\n'), ((2431, 2477), 'pyfarm.agent.manhole.manhole_factory', 'manhole_factory', (['namespace', 'username', 'password'], {}), '(namespace, username, password)\n', (2446, 2477), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((2764, 2810), 'pyfarm.agent.manhole.manhole_factory', 'manhole_factory', (['namespace', 'username', 'password'], {}), '(namespace, username, password)\n', (2779, 2810), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((3706, 3719), 'pyfarm.agent.manhole.TelnetRealm', 'TelnetRealm', ([], {}), '()\n', (3717, 3719), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((4040, 4053), 'pyfarm.agent.manhole.TelnetRealm', 'TelnetRealm', ([], {}), '()\n', (4051, 4053), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((4207, 4237), 'pyfarm.agent.manhole.TransportProtocolFactory', 'TransportProtocolFactory', (['None'], {}), '(None)\n', (4231, 4237), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((4541, 4587), 'pyfarm.agent.manhole.manhole_factory', 'manhole_factory', (['namespace', 'username', 'password'], {}), '(namespace, username, password)\n', (4556, 4587), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((4605, 4615), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (4613, 4615), False, 'from StringIO import StringIO\n'), ((4946, 4956), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (4954, 4956), False, 'from StringIO import StringIO\n'), ((5559, 5569), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (5567, 5569), False, 'from StringIO import StringIO\n'), ((2047, 2076), 'pyfarm.agent.manhole.manhole_factory', 'manhole_factory', (['None', '""""""', '""""""'], {}), "(None, '', '')\n", (2062, 2076), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((2138, 2167), 'pyfarm.agent.manhole.manhole_factory', 'manhole_factory', (['{}', 'None', '""""""'], {}), "({}, None, '')\n", (2153, 2167), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((2229, 2258), 'pyfarm.agent.manhole.manhole_factory', 'manhole_factory', (['{}', '""""""', 'None'], {}), "({}, '', None)\n", (2244, 2258), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((2539, 2585), 'pyfarm.agent.manhole.manhole_factory', 'manhole_factory', (['namespace', 'username', 'password'], {}), '(namespace, username, password)\n', (2554, 2585), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((4629, 4656), 'mock.patch', 'patch', (['"""sys.stdout"""', 'output'], {}), "('sys.stdout', output)\n", (4634, 4656), False, 'from mock import patch\n'), ((4670, 4676), 'pyfarm.agent.manhole.show', 'show', ([], {}), '()\n', (4674, 4676), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((4970, 4997), 'mock.patch', 'patch', (['"""sys.stdout"""', 'output'], {}), "('sys.stdout', output)\n", (4975, 4997), False, 'from mock import patch\n'), ((5011, 5023), 'pyfarm.agent.manhole.show', 'show', (['Foobar'], {}), '(Foobar)\n', (5015, 5023), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((5583, 5610), 'mock.patch', 'patch', (['"""sys.stdout"""', 'output'], {}), "('sys.stdout', output)\n", (5588, 5610), False, 'from mock import patch\n'), ((5624, 5636), 'pyfarm.agent.manhole.show', 'show', (['Foobar'], {}), '(Foobar)\n', (5628, 5636), False, 'from pyfarm.agent.manhole import LoggingManhole, TransportProtocolFactory, TelnetRealm, manhole_factory, show\n'), ((2346, 2360), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (2356, 2360), False, 'import os\n'), ((2394, 2408), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (2404, 2408), False, 'import os\n'), ((2669, 2683), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (2679, 2683), False, 'import os\n'), ((2717, 2731), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (2727, 2731), False, 'import os\n'), ((4456, 4470), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (4466, 4470), False, 'import os\n'), ((4504, 4518), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (4514, 4518), False, 'import os\n'), ((1657, 1677), 'random.randint', 'randint', (['(1024)', '(65535)'], {}), '(1024, 65535)\n', (1664, 1677), False, 'from random import randint\n'), ((5149, 5450), 'textwrap.dedent', 'dedent', (['"""\n data attributes of <class \'tests.test_agent.test_manhole.Foobar\'>\n a : True\n b : 1\n c : yes\n d : {} (0 elements)\n e : 0.0\n """'], {}), '(\n """\n data attributes of <class \'tests.test_agent.test_manhole.Foobar\'>\n a : True\n b : 1\n c : yes\n d : {} (0 elements)\n e : 0.0\n """\n )\n', (5155, 5450), False, 'from textwrap import dedent\n'), ((5762, 5982), 'textwrap.dedent', 'dedent', (['("""\n data attributes of <class \'tests.test_agent.test_manhole.Foobar\'>\n a : \' """\n + """ \'...\n """)'], {}), '(\n """\n data attributes of <class \'tests.test_agent.test_manhole.Foobar\'>\n a : \' """\n + """ \'...\n """)\n', (5768, 5982), False, 'from textwrap import dedent\n'), ((1627, 1641), 'os.urandom', 'os.urandom', (['(12)'], {}), '(12)\n', (1637, 1641), False, 'import os\n')] |
xcgoo/uiKLine | func-button/klSigmode.py | 80683401d7dc66262ae645db4c2780d6e71be551 | # coding: utf-8
"""
插入所有需要的库,和函数
"""
#----------------------------------------------------------------------
def klSigmode(self):
"""查找模式"""
if self.mode == 'deal':
self.canvas.updateSig(self.signalsOpen)
self.mode = 'dealOpen'
else:
self.canvas.updateSig(self.signals)
self.mode = 'deal'
| [] |
BnF-jadis/projet | utils/thin.py | 212b1e7b179a564650fb959d9c2565648178f6b6 | # 2020, BackThen Maps
# Coded by Remi Petitpierre https://github.com/RPetitpierre
# For Bibliothèque nationale de France (BnF)
import cv2, thinning, os
import numpy as np
import pandas as pd
import shapefile as shp
from skimage.measure import approximate_polygon
from PIL import Image, ImageDraw
from utils.utils import *
from utils.match import toLatLon
Image.MAX_IMAGE_PIXELS = 500000000
def skeletonize(road_network: np.ndarray, path: str = "workshop/vectorized.png", largest_component: bool = False):
''' Thinning/skeletonization of the road network image to a wired model.
Input(s):
road_network: black and white image of the road network (streets in white)
path: path where the skeletonized image should be saved
largest_component: if True, only the largest road network component will be kept
Output(s):
vectorized: skeletonized image
'''
assert len(road_network.shape) == 2, 'ERROR: road_network must be grayscale image'
img = cv2.resize(road_network, (road_network.shape[1]//2, road_network.shape[0]//2))
vectorized = thinning.guo_hall_thinning(img)
vectorized[vectorized > 100] = 255
vectorized[vectorized <= 100] = 0
if largest_component:
try:
_, labels, stats, _ = cv2.connectedComponentsWithStats(vectorized.copy(), connectivity=8, stats=cv2.CC_STAT_AREA)
stats = stats[1:]
main_component = (np.argmax(stats[:,4])+1).astype('int32')
vectorized = (labels == main_component).astype('uint8')*255
except:
'Warning: Skeletonization failed to apply largest_component = True param. Skipping.'
cv2.imwrite(path, vectorized)
return vectorized
def findNodes(image: np.ndarray):
''' Find the nodes in the road network skeleton image.
Input(s):
image: skeletonized image
Output(s):
nodes: array of nodes coordinates (x, y)
degree: degrees of the nodes (2=endpoint, 4=crossroads of 3 streets, 5=crossroads of 4 streets, etc.)
addresses: directions of the crossing roads, with regard to the node
'''
img = image.copy()
# Find row and column locations that are non-zero
(rows, cols) = np.nonzero(img)
nodes, degree, addresses = [], [], []
for (r,c) in zip(rows, cols):
if r > 0 and c > 0 and r < image.shape[0]-1 and c < image.shape[1]-1:
# Extract an 8-connected neighbourhood
(col_neigh, row_neigh) = np.meshgrid(np.array([c-1, c, c+1]), np.array([r-1, r, r+1]))
# Cast to int to index into image
col_neigh = col_neigh.astype('int')
row_neigh = row_neigh.astype('int')
# Convert into a single 1D array and check for non-zero locations
pix_neighbourhood = img[row_neigh, col_neigh].ravel() != 0
# If the number of non-zero locations equals 2, add this to our list of coordinates
n_neighbours = np.sum(pix_neighbourhood)
if (n_neighbours == 2) or (n_neighbours >= 4):
nodes.append((r, c))
degree.append(n_neighbours)
direction_set = np.where(pix_neighbourhood == True)[0]
direction_set = direction_set[direction_set != 4]
addresses.append(direction_set)
nodes = np.asarray(nodes)
return nodes, degree, addresses
def cleanNodesEdges(df_nodes: pd.DataFrame):
df = df_nodes.copy()
new_addresses, new_degree = [], []
for ind, address in df['address'].iteritems():
new_address = avoidDiagonalEdges(address)
new_addresses.append(new_address)
new_degree.append(len(new_address) + 1)
df['address'] = new_addresses
df['degree'] = new_degree
return df
def avoidDiagonalEdges(address: list, direction: int = None):
right, diagonal = [1, 3, 5, 7], {0: [1, 3], 2: [1, 5], 6: [3, 7], 8: [5, 7]}
new_address = []
for r in right:
if r in address:
new_address.append(r)
for d in diagonal.keys():
if d in address:
if not(diagonal[d][0] in address) and not(diagonal[d][1] in address):
if direction != None:
if not((8-direction) in diagonal[d]):
new_address.append(d)
else:
new_address.append(d)
return new_address
def explorePath(start_x: int, start_y: int, start_dir: int, image: np.ndarray, nodes_grid: np.ndarray):
''' Follow the path from one given start node and direction until the next node, and stores the pixels
on the way.
Input(s):
start_x: start node x-coordinate
start_y: start node y-coordinate
start_dir: starting direction ({0, 1, 2,
3, -, 5,
6, 7, 8})
image: skeletonized image of the road network
nodes_grid: grid of the nodes of the skeletonized image
Output(s):
way: list of pixel coordinates on the way
direction: last direction to reach the 2nd node
nodes_grid[x, y]: degree of the arrival node
'''
def absoluteWay(x: int, y: int, way: int):
if way == 0:
x_, y_ = x-1, y-1
elif way == 1:
x_, y_ = x-1, y
elif way == 2:
x_, y_ = x-1, y+1
elif way == 3:
x_, y_ = x, y-1
elif way == 5:
x_, y_ = x, y+1
elif way == 6:
x_, y_ = x+1, y-1
elif way == 7:
x_, y_ = x+1, y
elif way == 8:
x_, y_ = x+1, y+1
else:
raise AttributeError('Parameters invalid: (' + str(x) + ',' + str(y) + ',' + str(way) + '), way \
should be comprised between 0 and 8, and != 4. x, y and way should be of type int.')
return x_, y_
def noTurnBack(direction: int):
wrong_paths = []
if direction == 0:
wrong_paths = [5, 7]
elif direction == 1:
wrong_paths = [6, 8]
elif direction == 2:
wrong_paths = [3, 7]
elif direction == 3:
wrong_paths = [2, 8]
elif direction == 5:
wrong_paths = [0, 6]
elif direction == 6:
wrong_paths = [1, 5]
elif direction == 7:
wrong_paths = [0, 2]
elif direction == 8:
wrong_paths = [1, 3]
return wrong_paths
direction = start_dir
x, y = start_x, start_y
assert image[x, y] != 0, 'ERROR: start point is not white'
end = False
way = [(x, y)]
# First iteration
new_x, new_y = absoluteWay(x, y, direction)
assert image[new_x, new_y] != 0, 'ERROR: 2nd point is not white'
way.append((new_x, new_y))
x, y = new_x, new_y
wrong_paths = noTurnBack(direction)
wrong_paths_active = True
if nodes_grid[x, y]:
end = True
direction = 8-start_dir
while not(end):
if x > 0 and y > 0 and x < image.shape[0]-1 and y < image.shape[1]-1:
# Extract an 8-connected neighbourhood
(row_neigh, col_neigh) = np.meshgrid(np.array([x-1, x, x+1]), np.array([y-1, y, y+1]))
# Cast to int to index into image
col_neigh, row_neigh = col_neigh.astype('int'), row_neigh.astype('int')
# Convert into a single 1D array and check for non-zero locations
try:
pix_neighbourhood = image[row_neigh, col_neigh].transpose().ravel() != 0
except:
print(x, y, image.shape, )
raise AssertionError()
# If the number of non-zero locations equals 2, add this to our list of coordinates
n_neighbours = np.sum(pix_neighbourhood)
direction_set = np.where(pix_neighbourhood == True)[0]
last_ds = [wrong_paths]
last_ds.append(direction_set)
direction_set = direction_set[direction_set != 4]
last_ds.append(direction_set)
direction_set = direction_set[direction_set != (8-direction)]
last_ds.append(direction_set)
direction_set = np.asarray(avoidDiagonalEdges(direction_set, direction))
last_ds.append(direction_set)
if wrong_paths_active:
for wrong_path in wrong_paths:
direction_set = direction_set[direction_set != wrong_path]
wrong_paths_active = False
if len(direction_set) != 1:
end = True
break
direction = direction_set[0]
new_x, new_y = absoluteWay(x, y, direction)
way.append((new_x, new_y))
x, y = new_x, new_y
if nodes_grid[x, y]:
end = True
else:
end = True
return way, direction, nodes_grid[x, y]
def findSegments(df_nodes: pd.DataFrame, image: np.ndarray, min_length: int = 30, return_simple_ways: bool = True):
''' Find all the road segments in the network. Keep the ones that are longer than a given length or non-terminal.
Optionally, compute the Douglas-Peucker simple itinerary of each segment and return it.
Input(s):
df_nodes: list of nodes
image: skeletonized image of the road network
min_length: min segment length if the segment is terminal
return_simple_ways: if True, compute the Douglas-Peucker simple itinerary of each segment and return it
Output(s):
(Optional)(simple_ways: the Douglas-Peucker simple itinerary of each segmenty)
ways: list of segments, containing all the pixels on the way between each couple of nodes
nodes_grid: image containing all the nodes found in the image and their degree
'''
img = image.copy()
done, ways = [], []
df_nodes = df_nodes.sort_values(by='degree').reset_index(drop=True)
nodes_grid = np.zeros(image.shape)
for ind, row in df_nodes[['x', 'y', 'degree']].iterrows():
nodes_grid[row['x'], row['y']] = row['degree']
nodes_grid = nodes_grid.astype('int')
for ind, node in df_nodes.iterrows():
for direct in node['address']:
code = str(node['x']) + '_' + str(node['y']) + '_' + str(direct)
if not(code in done):
way, last_direct, degree = explorePath(start_x=node['x'], start_y=node['y'],
start_dir=direct, image=img, nodes_grid=nodes_grid)
if not((len(way) <= min_length) and ((node['degree'] == 2) or (degree == 2))):
done.append(str(way[-1][0]) + '_' + str(way[-1][1]) + '_' + str(8-last_direct))
ways.append(way)
if return_simple_ways:
simple_ways = []
for way in ways:
inv_way = np.asarray([np.asarray(way)[:,1], image.shape[0]-np.asarray(way)[:,0]]).transpose()
simple_ways.append(approximate_polygon(np.asarray(inv_way), tolerance=1.6).tolist())
return simple_ways, ways, nodes_grid
else:
return ways, nodes_grid
def thinImage(image: np.ndarray, image_name: str, export_file_path: str, exportPNG: bool = False,
exportJSON: bool = False, exportSVG: bool = False, exportSHP: bool = False, geoloc: bool = False):
assert (exportPNG or exportJSON or exportSVG or exportSHP)
# Convert to B&W
road_network = image.copy()
road_network[road_network < 254] = 0
road_network[road_network < 255/2] = 0
road_network[road_network >= 255/2] = 255
vectorized = skeletonize(road_network, largest_component = True)
nodes, degree, addresses = findNodes(vectorized)
if len(degree) < 0:
return [], [], np.zeros((image.shape[1], image.shape[0]))
df_nodes = pd.DataFrame({'x': nodes[:,0], 'y': nodes[:,1], 'degree': degree, 'address': addresses })
df_nodes = df_nodes.sort_values(by='degree').reset_index(drop=True)
df_nodes = cleanNodesEdges(df_nodes)
df_nodes = df_nodes[df_nodes['degree'] != 3]
if (exportJSON or exportSHP):
simple_segments, full_segments, nodes_grid = findSegments(df_nodes, vectorized, min_length = 15,
return_simple_ways = True)
else:
full_segments, nodes_grid = findSegments(df_nodes, vectorized, min_length = 15,
return_simple_ways = False)
simple_segments = []
if exportPNG:
toPNG(full_segments, vectorized, export_file_path)
elif exportSVG:
toPNG(full_segments, vectorized, os.path.join('workshop', 'thin.png'))
if geoloc:
if exportJSON:
project_name = getProjectName()
try:
with open(os.path.join('save', project_name, 'match' , 'primary', image_name + '.json')) as data:
data = json.load(data)
M = np.asarray(data['M'])
simple_segments_JSON = []
for segment in simple_segments:
s = np.asarray([2*np.asarray(segment)[:,0], image.shape[0]-(2*np.asarray(segment)[:,1])]).T
simple_segments_JSON.append(toLatLon((s@M[:, :2]) + M[:, 2:3].transpose()).tolist())
except:
print("La géolocalisation de l'image {} n'a pas encore été calculée. Par conséquent, \
il n'est pas possible de calculer la géolocalisation de son réseau filaire".format(image_name))
simple_segments_JSON = simple_segments
else:
print('La géolocalisation du réseau filaire ne fonctionne que pour le format JSON actuellement.')
else:
simple_segments_JSON = simple_segments
if exportJSON:
with open(export_file_path.replace('png', 'json'), 'w') as outfile:
json.dump(simple_segments_JSON, outfile)
if exportSHP:
os.makedirs(export_file_path.replace('.png', ''), exist_ok=True)
toShapefile(simple_segments, os.path.join(export_file_path.replace('.png', ''), image_name))
if exportSVG:
print("\nAvertissement: Si vous n'avez jamais utilisé cette commande, \
installez d'abord Homebrew, ImageMagick et Potrace via le terminal.\n")
print('Pour installer Homebrew:\n',
' /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"')
print('Pour installer ImageMagick:\n', ' brew install imagemagick')
print('Pour installer Potrace: \n', ' brew install potrace\n')
if exportPNG:
png_path = export_file_path
else:
png_path = os.path.join('workshop', 'thin.png')
pnm_path = os.path.join('workshop', 'thin.pnm')
svg_path = export_file_path.replace('png', 'svg')
os.system('convert ' + png_path + pnm_path)
os.system('potrace ' + pnm_path + ' -s -o ' + svg_path)
return simple_segments, full_segments, nodes_grid
def toPNG(segments: list, vectorized: np.ndarray, out_path: str):
''' Save a given set of segments as a bitmap image from the road network.
Input(s):
segments: list of segments, containing all the pixels on the way between each couple of nodes
vectorized: skeletonized image of the road network
out_path: the path, where the output bitmap image should be save
'''
canvas = (np.ones(vectorized.shape)*255).astype('uint8')
cv2.imwrite('workshop/canvas.png', canvas);
bitmap = Image.open('workshop/canvas.png')
draw = ImageDraw.Draw(bitmap)
for segment in segments:
coords = []
for point in segment:
coords.append((point[1], point[0]))
draw.line(coords, fill = 'black', width=0)
bitmap.save(out_path)
def toShapefile(simple_ways, out_path):
w = shp.Writer(out_path)
w.field('DeletionFlag', 'C', 1, 0)
w.field('gid', 'N', 11, 0)
w.field('streetname', 'C', 41, 0)
w.field('note', 'C', 32, 0)
for i in range(len(simple_ways)):
w.line([simple_ways[i]])
w.record('01', i, '', '')
w.close()
| [((1009, 1095), 'cv2.resize', 'cv2.resize', (['road_network', '(road_network.shape[1] // 2, road_network.shape[0] // 2)'], {}), '(road_network, (road_network.shape[1] // 2, road_network.shape[0] //\n 2))\n', (1019, 1095), False, 'import cv2, thinning, os\n'), ((1105, 1136), 'thinning.guo_hall_thinning', 'thinning.guo_hall_thinning', (['img'], {}), '(img)\n', (1131, 1136), False, 'import cv2, thinning, os\n'), ((1679, 1708), 'cv2.imwrite', 'cv2.imwrite', (['path', 'vectorized'], {}), '(path, vectorized)\n', (1690, 1708), False, 'import cv2, thinning, os\n'), ((2236, 2251), 'numpy.nonzero', 'np.nonzero', (['img'], {}), '(img)\n', (2246, 2251), True, 'import numpy as np\n'), ((3338, 3355), 'numpy.asarray', 'np.asarray', (['nodes'], {}), '(nodes)\n', (3348, 3355), True, 'import numpy as np\n'), ((10128, 10149), 'numpy.zeros', 'np.zeros', (['image.shape'], {}), '(image.shape)\n', (10136, 10149), True, 'import numpy as np\n'), ((12028, 12126), 'pandas.DataFrame', 'pd.DataFrame', (["{'x': nodes[:, (0)], 'y': nodes[:, (1)], 'degree': degree, 'address': addresses\n }"], {}), "({'x': nodes[:, (0)], 'y': nodes[:, (1)], 'degree': degree,\n 'address': addresses})\n", (12040, 12126), True, 'import pandas as pd\n'), ((15774, 15816), 'cv2.imwrite', 'cv2.imwrite', (['"""workshop/canvas.png"""', 'canvas'], {}), "('workshop/canvas.png', canvas)\n", (15785, 15816), False, 'import cv2, thinning, os\n'), ((15831, 15864), 'PIL.Image.open', 'Image.open', (['"""workshop/canvas.png"""'], {}), "('workshop/canvas.png')\n", (15841, 15864), False, 'from PIL import Image, ImageDraw\n'), ((15876, 15898), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['bitmap'], {}), '(bitmap)\n', (15890, 15898), False, 'from PIL import Image, ImageDraw\n'), ((16173, 16193), 'shapefile.Writer', 'shp.Writer', (['out_path'], {}), '(out_path)\n', (16183, 16193), True, 'import shapefile as shp\n'), ((15032, 15068), 'os.path.join', 'os.path.join', (['"""workshop"""', '"""thin.pnm"""'], {}), "('workshop', 'thin.pnm')\n", (15044, 15068), False, 'import cv2, thinning, os\n'), ((15135, 15178), 'os.system', 'os.system', (["('convert ' + png_path + pnm_path)"], {}), "('convert ' + png_path + pnm_path)\n", (15144, 15178), False, 'import cv2, thinning, os\n'), ((15187, 15242), 'os.system', 'os.system', (["('potrace ' + pnm_path + ' -s -o ' + svg_path)"], {}), "('potrace ' + pnm_path + ' -s -o ' + svg_path)\n", (15196, 15242), False, 'import cv2, thinning, os\n'), ((2974, 2999), 'numpy.sum', 'np.sum', (['pix_neighbourhood'], {}), '(pix_neighbourhood)\n', (2980, 2999), True, 'import numpy as np\n'), ((7873, 7898), 'numpy.sum', 'np.sum', (['pix_neighbourhood'], {}), '(pix_neighbourhood)\n', (7879, 7898), True, 'import numpy as np\n'), ((11969, 12011), 'numpy.zeros', 'np.zeros', (['(image.shape[1], image.shape[0])'], {}), '((image.shape[1], image.shape[0]))\n', (11977, 12011), True, 'import numpy as np\n'), ((14963, 14999), 'os.path.join', 'os.path.join', (['"""workshop"""', '"""thin.png"""'], {}), "('workshop', 'thin.png')\n", (14975, 14999), False, 'import cv2, thinning, os\n'), ((2507, 2534), 'numpy.array', 'np.array', (['[c - 1, c, c + 1]'], {}), '([c - 1, c, c + 1])\n', (2515, 2534), True, 'import numpy as np\n'), ((2532, 2559), 'numpy.array', 'np.array', (['[r - 1, r, r + 1]'], {}), '([r - 1, r, r + 1])\n', (2540, 2559), True, 'import numpy as np\n'), ((7273, 7300), 'numpy.array', 'np.array', (['[x - 1, x, x + 1]'], {}), '([x - 1, x, x + 1])\n', (7281, 7300), True, 'import numpy as np\n'), ((7298, 7325), 'numpy.array', 'np.array', (['[y - 1, y, y + 1]'], {}), '([y - 1, y, y + 1])\n', (7306, 7325), True, 'import numpy as np\n'), ((7927, 7962), 'numpy.where', 'np.where', (['(pix_neighbourhood == True)'], {}), '(pix_neighbourhood == True)\n', (7935, 7962), True, 'import numpy as np\n'), ((12858, 12894), 'os.path.join', 'os.path.join', (['"""workshop"""', '"""thin.png"""'], {}), "('workshop', 'thin.png')\n", (12870, 12894), False, 'import cv2, thinning, os\n'), ((15723, 15748), 'numpy.ones', 'np.ones', (['vectorized.shape'], {}), '(vectorized.shape)\n', (15730, 15748), True, 'import numpy as np\n'), ((3172, 3207), 'numpy.where', 'np.where', (['(pix_neighbourhood == True)'], {}), '(pix_neighbourhood == True)\n', (3180, 3207), True, 'import numpy as np\n'), ((13196, 13217), 'numpy.asarray', 'np.asarray', (["data['M']"], {}), "(data['M'])\n", (13206, 13217), True, 'import numpy as np\n'), ((1444, 1468), 'numpy.argmax', 'np.argmax', (['stats[:, (4)]'], {}), '(stats[:, (4)])\n', (1453, 1468), True, 'import numpy as np\n'), ((13040, 13116), 'os.path.join', 'os.path.join', (['"""save"""', 'project_name', '"""match"""', '"""primary"""', "(image_name + '.json')"], {}), "('save', project_name, 'match', 'primary', image_name + '.json')\n", (13052, 13116), False, 'import cv2, thinning, os\n'), ((11184, 11203), 'numpy.asarray', 'np.asarray', (['inv_way'], {}), '(inv_way)\n', (11194, 11203), True, 'import numpy as np\n'), ((11061, 11076), 'numpy.asarray', 'np.asarray', (['way'], {}), '(way)\n', (11071, 11076), True, 'import numpy as np\n'), ((11098, 11113), 'numpy.asarray', 'np.asarray', (['way'], {}), '(way)\n', (11108, 11113), True, 'import numpy as np\n'), ((13359, 13378), 'numpy.asarray', 'np.asarray', (['segment'], {}), '(segment)\n', (13369, 13378), True, 'import numpy as np\n'), ((13403, 13422), 'numpy.asarray', 'np.asarray', (['segment'], {}), '(segment)\n', (13413, 13422), True, 'import numpy as np\n')] |
lutostag/otp | easy2fa/tests/test_checkinput.py | 0792548fa51c489cdc5fcb01a3c6dad1cd453154 | from unittest import TestCase
from unittest.mock import patch
from easy2fa import cli
class TestCheckInput(TestCase):
@patch('builtins.input')
def test_default(self, mock_input):
mock_input.return_value = ''
self.assertEquals(cli.check_input('prompt', default='one'), 'one')
mock_input.return_value = 'two'
self.assertEquals(cli.check_input('prompt', default='one'), 'two')
@patch('builtins.input')
@patch('builtins.print')
def test_assertions(self, mock_print, mock_input):
def assertion(value):
if value not in ['yes', 'no']:
return 'use yes or no'
mock_input.side_effect = ['input', '', 'no']
self.assertEquals(cli.check_input('prompt', assertion=assertion),
'no')
mock_print.assert_called_with('\tInvalid input: use yes or no')
| [((126, 149), 'unittest.mock.patch', 'patch', (['"""builtins.input"""'], {}), "('builtins.input')\n", (131, 149), False, 'from unittest.mock import patch\n'), ((423, 446), 'unittest.mock.patch', 'patch', (['"""builtins.input"""'], {}), "('builtins.input')\n", (428, 446), False, 'from unittest.mock import patch\n'), ((452, 475), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {}), "('builtins.print')\n", (457, 475), False, 'from unittest.mock import patch\n'), ((253, 293), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'default': '"""one"""'}), "('prompt', default='one')\n", (268, 293), False, 'from easy2fa import cli\n'), ((368, 408), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'default': '"""one"""'}), "('prompt', default='one')\n", (383, 408), False, 'from easy2fa import cli\n'), ((723, 769), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'assertion': 'assertion'}), "('prompt', assertion=assertion)\n", (738, 769), False, 'from easy2fa import cli\n')] |
nps1ngh/adversarial-bert-german-attacks-defense | bert_finetuning/data_loader.py | 3cca292ec4c3c07945f4198ae81e1f671462ed90 | from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler
from bert_finetuning.data import GermanData
class GermanDataLoader:
def __init__(
self,
data_paths,
model_name,
do_cleansing,
max_sequence_length,
batch_size=8,
dataset_cls=GermanData,
):
self.german_data = dataset_cls(
data_paths,
model_name,
max_sequence_length=max_sequence_length,
do_cleansing=do_cleansing,
)
self.batch_size = batch_size
self.create_loaders()
def create_loaders(self):
"""
Create Torch dataloaders for data splits
"""
self.german_data.text_to_tensors()
print("creating dataloaders")
train_data = TensorDataset(
self.german_data.train_inputs,
self.german_data.train_masks,
self.german_data.train_labels,
)
train_sampler = RandomSampler(train_data)
self.train_dataloader = DataLoader(
train_data, sampler=train_sampler, batch_size=self.batch_size
)
validation_data = TensorDataset(
self.german_data.validation_inputs,
self.german_data.validation_masks,
self.german_data.validation_labels,
)
validation_sampler = SequentialSampler(validation_data)
self.validation_dataloader = DataLoader(
validation_data, sampler=validation_sampler, batch_size=self.batch_size
)
test_data = TensorDataset(
self.german_data.test_inputs,
self.german_data.test_masks,
self.german_data.test_labels,
)
test_sampler = SequentialSampler(test_data)
self.test_dataloader = DataLoader(
test_data, sampler=test_sampler, batch_size=self.batch_size
)
print("finished creating dataloaders")
"""
** FOR DEBUGGING **
if __name__ == "__main__":
## define data paths
germeval_data_paths = {
"train": "./datasets/hasoc_dataset/hasoc_german_train.csv",
"dev": "./datasets/hasoc_dataset/hasoc_german_validation.csv",
"test": "./datasets/hasoc_dataset/hasoc_german_test.csv",
}
hasoc_german_data_paths = {
"train": "./datasets/hasoc_dataset/hasoc_german_train.csv",
"dev": "./datasets/hasoc_dataset/hasoc_german_validation.csv",
"test": "./datasets/hasoc_dataset/hasoc_german_test.csv",
}
## create dataloaders
print("creating germeval dataloaders...")
germ_eval_dataloader = GermanDataLoader(germeval_data_paths)
print("creating hasoc dataloaders...")
hasoc_german_dataloader = GermanDataLoader(hasoc_german_data_paths)
"""
| [((837, 946), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.train_inputs', 'self.german_data.train_masks', 'self.german_data.train_labels'], {}), '(self.german_data.train_inputs, self.german_data.train_masks,\n self.german_data.train_labels)\n', (850, 946), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1019, 1044), 'torch.utils.data.RandomSampler', 'RandomSampler', (['train_data'], {}), '(train_data)\n', (1032, 1044), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1078, 1151), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'sampler': 'train_sampler', 'batch_size': 'self.batch_size'}), '(train_data, sampler=train_sampler, batch_size=self.batch_size)\n', (1088, 1151), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1205, 1330), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.validation_inputs', 'self.german_data.validation_masks', 'self.german_data.validation_labels'], {}), '(self.german_data.validation_inputs, self.german_data.\n validation_masks, self.german_data.validation_labels)\n', (1218, 1330), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1407, 1441), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['validation_data'], {}), '(validation_data)\n', (1424, 1441), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1480, 1568), 'torch.utils.data.DataLoader', 'DataLoader', (['validation_data'], {'sampler': 'validation_sampler', 'batch_size': 'self.batch_size'}), '(validation_data, sampler=validation_sampler, batch_size=self.\n batch_size)\n', (1490, 1568), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1611, 1717), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.test_inputs', 'self.german_data.test_masks', 'self.german_data.test_labels'], {}), '(self.german_data.test_inputs, self.german_data.test_masks,\n self.german_data.test_labels)\n', (1624, 1717), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1789, 1817), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['test_data'], {}), '(test_data)\n', (1806, 1817), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1850, 1921), 'torch.utils.data.DataLoader', 'DataLoader', (['test_data'], {'sampler': 'test_sampler', 'batch_size': 'self.batch_size'}), '(test_data, sampler=test_sampler, batch_size=self.batch_size)\n', (1860, 1921), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n')] |
Karthik-Ragunath/DDU | data/dirty_mnist.py | b9daae9304bdeb222857884ef8cb3b6b3d004d33 | import torch
import numpy as np
import torch.utils.data as data
from torch.utils.data import Subset
from data.fast_mnist import create_MNIST_dataset
from data.ambiguous_mnist.ambiguous_mnist_dataset import AmbiguousMNIST
def get_train_valid_loader(root, batch_size, val_seed=1, val_size=0.1, **kwargs):
error_msg = "[!] val_size should be in the range [0, 1]."
assert (val_size >= 0) and (val_size <= 1), error_msg
# load the dataset
mnist_train_dataset, _ = create_MNIST_dataset()
# AmbiguousMNIST does whiten the data itself
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_dataset = data.ConcatDataset(
[mnist_train_dataset, AmbiguousMNIST(root=root, train=True, device=device),]
)
valid_dataset = data.ConcatDataset(
[mnist_train_dataset, AmbiguousMNIST(root=root, train=True, device=device),]
)
num_train = len(train_dataset)
indices = list(range(num_train))
split = int(np.floor(val_size * num_train))
np.random.seed(val_seed)
np.random.shuffle(indices)
train_idx, valid_idx = indices[split:], indices[:split]
train_subset = Subset(train_dataset, train_idx)
valid_subset = Subset(valid_dataset, valid_idx)
train_loader = torch.utils.data.DataLoader(train_subset, batch_size=batch_size, num_workers=0, shuffle=True)
valid_loader = torch.utils.data.DataLoader(valid_subset, batch_size=batch_size, num_workers=0, shuffle=False)
return train_loader, valid_loader
def get_test_loader(root, batch_size, **kwargs):
# load the dataset
_, mnist_test_dataset = create_MNIST_dataset()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
test_dataset = data.ConcatDataset(
[mnist_test_dataset, AmbiguousMNIST(root=root, train=False, device=device),]
)
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=0)
return test_loader
| [((480, 502), 'data.fast_mnist.create_MNIST_dataset', 'create_MNIST_dataset', ([], {}), '()\n', (500, 502), False, 'from data.fast_mnist import create_MNIST_dataset\n'), ((1015, 1039), 'numpy.random.seed', 'np.random.seed', (['val_seed'], {}), '(val_seed)\n', (1029, 1039), True, 'import numpy as np\n'), ((1044, 1070), 'numpy.random.shuffle', 'np.random.shuffle', (['indices'], {}), '(indices)\n', (1061, 1070), True, 'import numpy as np\n'), ((1151, 1183), 'torch.utils.data.Subset', 'Subset', (['train_dataset', 'train_idx'], {}), '(train_dataset, train_idx)\n', (1157, 1183), False, 'from torch.utils.data import Subset\n'), ((1203, 1235), 'torch.utils.data.Subset', 'Subset', (['valid_dataset', 'valid_idx'], {}), '(valid_dataset, valid_idx)\n', (1209, 1235), False, 'from torch.utils.data import Subset\n'), ((1256, 1353), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_subset'], {'batch_size': 'batch_size', 'num_workers': '(0)', 'shuffle': '(True)'}), '(train_subset, batch_size=batch_size,\n num_workers=0, shuffle=True)\n', (1283, 1353), False, 'import torch\n'), ((1370, 1468), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['valid_subset'], {'batch_size': 'batch_size', 'num_workers': '(0)', 'shuffle': '(False)'}), '(valid_subset, batch_size=batch_size,\n num_workers=0, shuffle=False)\n', (1397, 1468), False, 'import torch\n'), ((1607, 1629), 'data.fast_mnist.create_MNIST_dataset', 'create_MNIST_dataset', ([], {}), '()\n', (1627, 1629), False, 'from data.fast_mnist import create_MNIST_dataset\n'), ((1854, 1953), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['test_dataset'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(0)'}), '(test_dataset, batch_size=batch_size, shuffle=\n False, num_workers=0)\n', (1881, 1953), False, 'import torch\n'), ((978, 1008), 'numpy.floor', 'np.floor', (['(val_size * num_train)'], {}), '(val_size * num_train)\n', (986, 1008), True, 'import numpy as np\n'), ((589, 614), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (612, 614), False, 'import torch\n'), ((697, 749), 'data.ambiguous_mnist.ambiguous_mnist_dataset.AmbiguousMNIST', 'AmbiguousMNIST', ([], {'root': 'root', 'train': '(True)', 'device': 'device'}), '(root=root, train=True, device=device)\n', (711, 749), False, 'from data.ambiguous_mnist.ambiguous_mnist_dataset import AmbiguousMNIST\n'), ((828, 880), 'data.ambiguous_mnist.ambiguous_mnist_dataset.AmbiguousMNIST', 'AmbiguousMNIST', ([], {'root': 'root', 'train': '(True)', 'device': 'device'}), '(root=root, train=True, device=device)\n', (842, 880), False, 'from data.ambiguous_mnist.ambiguous_mnist_dataset import AmbiguousMNIST\n'), ((1667, 1692), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1690, 1692), False, 'import torch\n'), ((1773, 1826), 'data.ambiguous_mnist.ambiguous_mnist_dataset.AmbiguousMNIST', 'AmbiguousMNIST', ([], {'root': 'root', 'train': '(False)', 'device': 'device'}), '(root=root, train=False, device=device)\n', (1787, 1826), False, 'from data.ambiguous_mnist.ambiguous_mnist_dataset import AmbiguousMNIST\n')] |
jaspersnel/vantage6-server | vantage6/server/resource/recover.py | 88ad40d23cc36eaba57c170929f7ccdd0011720a | # -*- coding: utf-8 -*-
import logging
import datetime
from flask import request, render_template
from flask_jwt_extended import (
create_access_token,
decode_token
)
from jwt.exceptions import DecodeError
from flasgger import swag_from
from http import HTTPStatus
from pathlib import Path
from sqlalchemy.orm.exc import NoResultFound
from vantage6.common import logger_name
from vantage6.server import db
from vantage6.server.resource import (
ServicesResources
)
module_name = logger_name(__name__)
log = logging.getLogger(module_name)
def setup(api, api_base, services):
path = "/".join([api_base, module_name])
log.info(f'Setting up "{path}" and subdirectories')
api.add_resource(
ResetPassword,
path+'/reset',
endpoint="reset_password",
methods=('POST',),
resource_class_kwargs=services
)
api.add_resource(
RecoverPassword,
path+'/lost',
endpoint='recover_password',
methods=('POST',),
resource_class_kwargs=services
)
# ------------------------------------------------------------------------------
# Resources / API's
# ------------------------------------------------------------------------------
class ResetPassword(ServicesResources):
"""user can use recover token to reset their password."""
@swag_from(str(Path(r"swagger/post_reset_password.yaml")),
endpoint='reset_password')
def post(self):
""""submit email-adress receive token."""
# retrieve user based on email or username
body = request.get_json()
reset_token = body.get("reset_token")
password = body.get("password")
if not reset_token or not password:
return {"msg": "reset token and/or password is missing!"}, \
HTTPStatus.BAD_REQUEST
# obtain user
try:
user_id = decode_token(reset_token)['identity'].get('id')
except DecodeError:
return {"msg": "Invalid recovery token!"}, HTTPStatus.BAD_REQUEST
log.debug(user_id)
user = db.User.get(user_id)
# set password
user.set_password(password)
user.save()
log.info(f"Successfull password reset for '{user.username}'")
return {"msg": "password successfully been reset!"}, \
HTTPStatus.OK
class RecoverPassword(ServicesResources):
"""send a mail containing a recover token"""
@swag_from(str(Path(r"swagger/post_recover_password.yaml")),
endpoint='recover_password')
def post(self):
"""username or email generates a token which is mailed."""
# default return string
ret = {"msg": "If the username or email is our database you "
"will soon receive an email"}
# obtain username/email from request'
body = request.get_json()
username = body.get("username")
email = body.get("email")
if not (email or username):
return {"msg": "No username or email provided!"}, \
HTTPStatus.BAD_REQUEST
# find user in the database, if not here we stop!
try:
if username:
user = db.User.get_by_username(username)
else:
user = db.User.get_by_email(email)
except NoResultFound:
# we do not tell them.... But we won't continue either
return ret
log.info(f"Password reset requested for '{user.username}'")
# generate a token that can reset their password
expires = datetime.timedelta(hours=1)
reset_token = create_access_token(
{"id": str(user.id)}, expires_delta=expires
)
self.mail.send_email(
"password reset",
sender="[email protected]",
recipients=[user.email],
text_body=render_template("mail/reset_password_token.txt",
token=reset_token),
html_body=render_template("mail/reset_password_token.html",
token=reset_token)
)
return ret
| [((494, 515), 'vantage6.common.logger_name', 'logger_name', (['__name__'], {}), '(__name__)\n', (505, 515), False, 'from vantage6.common import logger_name\n'), ((522, 552), 'logging.getLogger', 'logging.getLogger', (['module_name'], {}), '(module_name)\n', (539, 552), False, 'import logging\n'), ((1577, 1595), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1593, 1595), False, 'from flask import request, render_template\n'), ((2094, 2114), 'vantage6.server.db.User.get', 'db.User.get', (['user_id'], {}), '(user_id)\n', (2105, 2114), False, 'from vantage6.server import db\n'), ((2862, 2880), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2878, 2880), False, 'from flask import request, render_template\n'), ((3582, 3609), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (3600, 3609), False, 'import datetime\n'), ((1354, 1394), 'pathlib.Path', 'Path', (['"""swagger/post_reset_password.yaml"""'], {}), "('swagger/post_reset_password.yaml')\n", (1358, 1394), False, 'from pathlib import Path\n'), ((2468, 2510), 'pathlib.Path', 'Path', (['"""swagger/post_recover_password.yaml"""'], {}), "('swagger/post_recover_password.yaml')\n", (2472, 2510), False, 'from pathlib import Path\n'), ((3214, 3247), 'vantage6.server.db.User.get_by_username', 'db.User.get_by_username', (['username'], {}), '(username)\n', (3237, 3247), False, 'from vantage6.server import db\n'), ((3289, 3316), 'vantage6.server.db.User.get_by_email', 'db.User.get_by_email', (['email'], {}), '(email)\n', (3309, 3316), False, 'from vantage6.server import db\n'), ((3881, 3948), 'flask.render_template', 'render_template', (['"""mail/reset_password_token.txt"""'], {'token': 'reset_token'}), "('mail/reset_password_token.txt', token=reset_token)\n", (3896, 3948), False, 'from flask import request, render_template\n'), ((4010, 4078), 'flask.render_template', 'render_template', (['"""mail/reset_password_token.html"""'], {'token': 'reset_token'}), "('mail/reset_password_token.html', token=reset_token)\n", (4025, 4078), False, 'from flask import request, render_template\n'), ((1897, 1922), 'flask_jwt_extended.decode_token', 'decode_token', (['reset_token'], {}), '(reset_token)\n', (1909, 1922), False, 'from flask_jwt_extended import create_access_token, decode_token\n')] |
tranvietanh1991/tomodachi | examples/basic_examples/aws_sns_sqs_middleware_service.py | a815fc718b6cc42dc3fe241abb0e5a5829eba0e8 | import os
from typing import Any, Callable, Dict
import tomodachi
from tomodachi import aws_sns_sqs, aws_sns_sqs_publish
from tomodachi.discovery import AWSSNSRegistration
from tomodachi.envelope import JsonBase
async def middleware_function(
func: Callable, service: Any, message: Any, topic: str, context: Dict, *args: Any, **kwargs: Any
) -> Any:
# Functionality before function is called
service.log("middleware before")
return_value = await func(*args, **kwargs)
# There's also the possibility to pass in extra arguments or keywords arguments, for example:
# return_value = await func(*args, id='overridden', **kwargs)
# Functinoality after function is called
service.log("middleware after")
return return_value
class ExampleAWSSNSSQSService(tomodachi.Service):
name = "example-aws-sns-sqs-service"
log_level = "INFO"
uuid = str(os.environ.get("SERVICE_UUID") or "")
# Build own "discovery" functions, to be run on start and stop
# See tomodachi/discovery/aws_sns_registration.py for example
discovery = [AWSSNSRegistration]
# The message envelope class defines how a message should be processed when sent and received
# See tomodachi/envelope/json_base.py for a basic example using JSON and transferring some metadata
message_envelope = JsonBase
# Adds a middleware function that is run on every incoming message.
# Several middlewares can be chained.
message_middleware = [middleware_function]
# Some options can be specified to define credentials, used ports, hostnames, access log, etc.
options = {
"aws_sns_sqs": {
"region_name": None, # specify AWS region (example: 'eu-west-1')
"aws_access_key_id": None, # specify AWS access key (example: 'AKIAXNTIENCJIY2STOCI')
"aws_secret_access_key": None, # specify AWS secret key (example: 'f7sha92hNotarealsecretkeyn29ShnSYQi3nzgA')
},
"aws_endpoint_urls": {
"sns": None, # For example 'http://localhost:4575' if localstack is used for testing
"sqs": None, # For example 'http://localhost:4576' if localstack is used for testing
},
}
@aws_sns_sqs("example-route1")
async def route1a(self, data: Any) -> None:
self.log('Received data (function: route1a) - "{}"'.format(data))
async def _started_service(self) -> None:
async def publish(data: Any, topic: str) -> None:
self.log('Publish data "{}"'.format(data))
await aws_sns_sqs_publish(self, data, topic=topic, wait=False)
await publish("友達", "example-route1")
| [((2199, 2228), 'tomodachi.aws_sns_sqs', 'aws_sns_sqs', (['"""example-route1"""'], {}), "('example-route1')\n", (2210, 2228), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\n'), ((891, 921), 'os.environ.get', 'os.environ.get', (['"""SERVICE_UUID"""'], {}), "('SERVICE_UUID')\n", (905, 921), False, 'import os\n'), ((2529, 2585), 'tomodachi.aws_sns_sqs_publish', 'aws_sns_sqs_publish', (['self', 'data'], {'topic': 'topic', 'wait': '(False)'}), '(self, data, topic=topic, wait=False)\n', (2548, 2585), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\n')] |
ThitsarAung/python-exercises | ex9.py | bca97875e25f9621fc5f58ab1d360426a21efc7f | types_of_people = 10
x = f"There are {types_of_people} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
print(f"I said: {x}")
print(f"I also said: '{y}'")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w="This is the left side of..."
e="a string with a right side."
print(w + e)
| [] |
2yz/MMdnn | mmdnn/conversion/caffe/writer.py | 13d909e4b591a5043b74b611e412c3c0a5eba0cc | import base64
from google.protobuf import json_format
from importlib import import_module
import json
import numpy as np
import os
import sys
from mmdnn.conversion.caffe.errors import ConversionError
from mmdnn.conversion.caffe.common_graph import fetch_attr_value
from mmdnn.conversion.caffe.utils import get_lower_case, get_upper_case, get_real_name
class JsonFormatter(object):
'''Dumpt a DL graph into a Json file.'''
def __init__(self, graph):
self.graph_def = graph.as_graph_def()
def dump(self, json_path):
json_txt = json_format.MessageToJson(self.graph_def)
parsed = json.loads(json_txt)
formatted = json.dumps(parsed, indent=4, sort_keys=True)
with open(json_path, 'w') as f:
f.write(formatted)
class PyWriter(object):
'''Dumpt a DL graph into a Python script.'''
def __init__(self, graph, data, target):
self.graph = graph
self.data = data
self.tab = ' ' * 4
self.prefix = ''
target = target.lower()
if target == 'tensorflow':
self.target = target
self.net = 'TensorFlowNetwork'
elif target == 'keras':
self.target = target
self.net = 'KerasNetwork'
elif target == 'caffe':
self.target = target
self.net = 'CaffeNetwork'
else:
raise ConversionError('Target %s is not supported yet.' % target)
def indent(self):
self.prefix += self.tab
def outdent(self):
self.prefix = self.prefix[:-len(self.tab)]
def statement(self, s):
return self.prefix + s + '\n'
def emit_imports(self):
return self.statement('from dlconv.%s import %s\n' % (self.target, self.net))
def emit_class_def(self, name):
return self.statement('class %s(%s):' % (name, self.net))
def emit_setup_def(self):
return self.statement('def setup(self):')
def emit_node(self, node):
'''Emits the Python source for this node.'''
def pair(key, value):
return '%s=%s' % (key, value)
args = []
for input in node.input:
input = input.strip().split(':')
name = ''.join(input[:-1])
idx = int(input[-1])
assert name in self.graph.node_dict
parent = self.graph.get_node(name)
args.append(parent.output[idx])
#FIXME:
output = [node.output[0]]
# output = node.output
for k, v in node.attr:
if k == 'cell_type':
args.append(pair(k, "'" + fetch_attr_value(v) + "'"))
else:
args.append(pair(k, fetch_attr_value(v)))
args.append(pair('name', "'" + node.name + "'")) # Set the node name
args = ', '.join(args)
return self.statement('%s = self.%s(%s)' % (', '.join(output), node.op, args))
def dump(self, code_output_dir):
if not os.path.exists(code_output_dir):
os.makedirs(code_output_dir)
file_name = get_lower_case(self.graph.name)
code_output_path = os.path.join(code_output_dir, file_name + '.py')
data_output_path = os.path.join(code_output_dir, file_name + '.npy')
with open(code_output_path, 'w') as f:
f.write(self.emit())
with open(data_output_path, 'wb') as f:
np.save(f, self.data)
return code_output_path, data_output_path
def emit(self):
# Decompose DAG into chains
chains = []
for node in self.graph.topologically_sorted():
attach_to_chain = None
if len(node.input) == 1:
parent = get_real_name(node.input[0])
for chain in chains:
if chain[-1].name == parent: # Node is part of an existing chain.
attach_to_chain = chain
break
if attach_to_chain is None: # Start a new chain for this node.
attach_to_chain = []
chains.append(attach_to_chain)
attach_to_chain.append(node)
# Generate Python code line by line
source = self.emit_imports()
source += self.emit_class_def(self.graph.name)
self.indent()
source += self.emit_setup_def()
self.indent()
blocks = []
for chain in chains:
b = ''
for node in chain:
b += self.emit_node(node)
blocks.append(b[:-1])
source += '\n\n'.join(blocks)
return source
class ModelSaver(object):
def __init__(self, code_output_path, data_output_path):
self.code_output_path = code_output_path
self.data_output_path = data_output_path
def dump(self, model_output_dir):
'''Return the file path containing graph in generated model files.'''
if not os.path.exists(model_output_dir):
os.makedirs(model_output_dir)
sys.path.append(os.path.dirname(self.code_output_path))
file_name = os.path.splitext(os.path.basename(self.code_output_path))[0]
module = import_module(file_name)
class_name = get_upper_case(file_name)
net = getattr(module, class_name)
return net.dump(self.data_output_path, model_output_dir)
class GraphDrawer(object):
def __init__(self, toolkit, meta_path):
self.toolkit = toolkit.lower()
self.meta_path = meta_path
def dump(self, graph_path):
if self.toolkit == 'tensorflow':
from dlconv.tensorflow.visualizer import TensorFlowVisualizer
if self._is_web_page(graph_path):
TensorFlowVisualizer(self.meta_path).dump_html(graph_path)
else:
raise NotImplementedError('Image format or %s is unsupported!' % graph_path)
elif self.toolkit == 'keras':
from dlconv.keras.visualizer import KerasVisualizer
png_path, html_path = (None, None)
if graph_path.endswith('.png'):
png_path = graph_path
elif self._is_web_page(graph_path):
png_path = graph_path + ".png"
html_path = graph_path
else:
raise NotImplementedError('Image format or %s is unsupported!' % graph_path)
KerasVisualizer(self.meta_path).dump_png(png_path)
if html_path:
self._png_to_html(png_path, html_path)
os.remove(png_path)
else:
raise NotImplementedError('Visualization of %s is unsupported!' % self.toolkit)
def _is_web_page(self, path):
return path.split('.')[-1] in ('html', 'htm')
def _png_to_html(self, png_path, html_path):
with open(png_path, "rb") as f:
encoded = base64.b64encode(f.read()).decode('utf-8')
source = """<!DOCTYPE>
<html>
<head>
<meta charset="utf-8">
<title>Keras</title>
</head>
<body>
<img alt="Model Graph" src="data:image/png;base64,{base64_str}" />
</body>
</html>""".format(base64_str=encoded)
with open(html_path, 'w', encoding='utf-8') as f:
f.write(source) | [((558, 599), 'google.protobuf.json_format.MessageToJson', 'json_format.MessageToJson', (['self.graph_def'], {}), '(self.graph_def)\n', (583, 599), False, 'from google.protobuf import json_format\n'), ((617, 637), 'json.loads', 'json.loads', (['json_txt'], {}), '(json_txt)\n', (627, 637), False, 'import json\n'), ((658, 702), 'json.dumps', 'json.dumps', (['parsed'], {'indent': '(4)', 'sort_keys': '(True)'}), '(parsed, indent=4, sort_keys=True)\n', (668, 702), False, 'import json\n'), ((3046, 3077), 'mmdnn.conversion.caffe.utils.get_lower_case', 'get_lower_case', (['self.graph.name'], {}), '(self.graph.name)\n', (3060, 3077), False, 'from mmdnn.conversion.caffe.utils import get_lower_case, get_upper_case, get_real_name\n'), ((3105, 3153), 'os.path.join', 'os.path.join', (['code_output_dir', "(file_name + '.py')"], {}), "(code_output_dir, file_name + '.py')\n", (3117, 3153), False, 'import os\n'), ((3181, 3230), 'os.path.join', 'os.path.join', (['code_output_dir', "(file_name + '.npy')"], {}), "(code_output_dir, file_name + '.npy')\n", (3193, 3230), False, 'import os\n'), ((5127, 5151), 'importlib.import_module', 'import_module', (['file_name'], {}), '(file_name)\n', (5140, 5151), False, 'from importlib import import_module\n'), ((5173, 5198), 'mmdnn.conversion.caffe.utils.get_upper_case', 'get_upper_case', (['file_name'], {}), '(file_name)\n', (5187, 5198), False, 'from mmdnn.conversion.caffe.utils import get_lower_case, get_upper_case, get_real_name\n'), ((2952, 2983), 'os.path.exists', 'os.path.exists', (['code_output_dir'], {}), '(code_output_dir)\n', (2966, 2983), False, 'import os\n'), ((2997, 3025), 'os.makedirs', 'os.makedirs', (['code_output_dir'], {}), '(code_output_dir)\n', (3008, 3025), False, 'import os\n'), ((3371, 3392), 'numpy.save', 'np.save', (['f', 'self.data'], {}), '(f, self.data)\n', (3378, 3392), True, 'import numpy as np\n'), ((4889, 4921), 'os.path.exists', 'os.path.exists', (['model_output_dir'], {}), '(model_output_dir)\n', (4903, 4921), False, 'import os\n'), ((4935, 4964), 'os.makedirs', 'os.makedirs', (['model_output_dir'], {}), '(model_output_dir)\n', (4946, 4964), False, 'import os\n'), ((4989, 5027), 'os.path.dirname', 'os.path.dirname', (['self.code_output_path'], {}), '(self.code_output_path)\n', (5004, 5027), False, 'import os\n'), ((3672, 3700), 'mmdnn.conversion.caffe.utils.get_real_name', 'get_real_name', (['node.input[0]'], {}), '(node.input[0])\n', (3685, 3700), False, 'from mmdnn.conversion.caffe.utils import get_lower_case, get_upper_case, get_real_name\n'), ((5066, 5105), 'os.path.basename', 'os.path.basename', (['self.code_output_path'], {}), '(self.code_output_path)\n', (5082, 5105), False, 'import os\n'), ((1384, 1443), 'mmdnn.conversion.caffe.errors.ConversionError', 'ConversionError', (["('Target %s is not supported yet.' % target)"], {}), "('Target %s is not supported yet.' % target)\n", (1399, 1443), False, 'from mmdnn.conversion.caffe.errors import ConversionError\n'), ((6470, 6489), 'os.remove', 'os.remove', (['png_path'], {}), '(png_path)\n', (6479, 6489), False, 'import os\n'), ((2682, 2701), 'mmdnn.conversion.caffe.common_graph.fetch_attr_value', 'fetch_attr_value', (['v'], {}), '(v)\n', (2698, 2701), False, 'from mmdnn.conversion.caffe.common_graph import fetch_attr_value\n'), ((5664, 5700), 'dlconv.tensorflow.visualizer.TensorFlowVisualizer', 'TensorFlowVisualizer', (['self.meta_path'], {}), '(self.meta_path)\n', (5684, 5700), False, 'from dlconv.tensorflow.visualizer import TensorFlowVisualizer\n'), ((6322, 6353), 'dlconv.keras.visualizer.KerasVisualizer', 'KerasVisualizer', (['self.meta_path'], {}), '(self.meta_path)\n', (6337, 6353), False, 'from dlconv.keras.visualizer import KerasVisualizer\n'), ((2600, 2619), 'mmdnn.conversion.caffe.common_graph.fetch_attr_value', 'fetch_attr_value', (['v'], {}), '(v)\n', (2616, 2619), False, 'from mmdnn.conversion.caffe.common_graph import fetch_attr_value\n')] |
LionTao/algo_weekend | week1/85-maximal-rectangle.py | d25756761d47491b8c78ecf8a857080497910c76 | """
leetcode-85
给定一个仅包含 0 和 1 , 大小为 rows x cols 的二维二进制矩阵, 找出只包含 1 的最大矩形, 并返回其面积。
"""
from typing import List
class Solution:
def maximalRectangle(self, matrix: List[List[str]]) -> int:
"""
统计直方图然后单调递增栈
"""
rows = len(matrix)
if rows == 0:
return 0
columns = len(matrix[0])
res = 0
heights = [0]*columns
for r in range(rows):
for c in range(columns):
if matrix[r][c]=="1":
heights[c]+=1
else:
heights[c]=0
res = max(res,self.largestRectangleArea(heights))
def largestRectangleArea(self, heights: List[int]) -> int:
#单调递增栈
heights = [-1] + heights + [-1]
res = 0
ascend_stack = []
for i in range(len(heights)):
while ascend_stack and heights[ascend_stack[-1]] > heights[i]:
window_L_height_min_height = heights[ascend_stack.pop(-1)]
window_L = ascend_stack[-1] + 1
window_R = i - 1
cur_area = window_L_height_min_height * (window_R - window_L + 1)
res = max(res, cur_area)
ascend_stack.append(i)
return res | [] |
mathildebadoual/pandapower | pandapower/test/opf/test_costs_pwl.py | 9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc | # -*- coding: utf-8 -*-
# Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel. All rights reserved.
import numpy as np
import pytest
from pandapower.optimal_powerflow import OPFNotConverged
import pandapower as pp
try:
import pplog as logging
except ImportError:
import logging
logger = logging.getLogger(__name__)
logger.setLevel("DEBUG")
def test_cost_piecewise_linear_gen():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "gen", np.array([[-150, -100], [-75, -50], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_gen.p_kw.values / 1.5 < 1e-3
def test_cost_piecewise_linear_eg():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10)
pp.create_ext_grid(net, 0, max_p_kw=0, min_p_kw=-50)
pp.create_gen(net, 1, p_kw=-10, max_p_kw=0, min_p_kw=-50, controllable=True)
# pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "ext_grid", np.array([[-50, -500], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - - net.res_ext_grid.p_kw.values * 10 < 1e-3
# check and assert result
def test_get_costs():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "gen", np.array([[-150, -300], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost == 2 * net.res_gen.p_kw.values
# check and assert result
def test_cost_piecewise_linear_sgen():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array([[-150, -100], [-75, -50], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_sgen.p_kw.values / 1.5 < 1e-3
def test_cost_piecewise_linear_load():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_load(net, 1, p_kw=100, controllable=True, max_p_kw=150, min_p_kw=50, max_q_kvar=0,
min_q_kvar=0)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "load", np.array([[0, 0], [75, 50], [150, 100]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert abs(net.res_cost - net.res_load.p_kw.values / 1.5) < 1e-3
def test_cost_piecewise_linear_sgen_uneven_slopes():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array([[-150, -200], [-75, -50], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_sgen.p_kw.values / 1.5 < 1e-3
def test_cost_piecewise_linear_load_uneven_slopes():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_load(net, 1, p_kw=100, controllable=True, max_p_kw=150, min_p_kw=50, max_q_kvar=0,
min_q_kvar=0)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "load", np.array([[0, 0], [75, 51], [150, 101]]))
# run OPF
with pytest.raises(OPFNotConverged):
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert abs(net.res_cost - net.res_load.p_kw.values / 1.5) < 1e-3
def test_cost_piecewise_linear_sgen_very_unsteady_slopes():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.5
vm_min = 0.5
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-1000, controllable=True, max_p_kw=0, min_p_kw=-1500,
max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array([[-1500, 2],[-750,1 ], [0,2]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# assert net.res_cost - net.res_sgen.p_kw.values / 1.5 < 1e-3
if __name__ == "__main__":
# test_cost_piecewise_linear_sgen_very_unsteady_slopes()
pytest.main(["test_costs_pwl.py", "-s"])
| [((389, 416), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (406, 416), False, 'import logging\n'), ((660, 685), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (683, 685), True, 'import pandapower as pp\n'), ((690, 756), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (703, 756), True, 'import pandapower as pp\n'), ((760, 825), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(0.4)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=0.4)\n', (773, 825), True, 'import pandapower as pp\n'), ((829, 944), 'pandapower.create_gen', 'pp.create_gen', (['net', '(1)'], {'p_kw': '(-100)', 'controllable': '(True)', 'max_p_kw': '(-5)', 'min_p_kw': '(-150)', 'max_q_kvar': '(50)', 'min_q_kvar': '(-50)'}), '(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-\n 150, max_q_kvar=50, min_q_kvar=-50)\n', (842, 944), True, 'import pandapower as pp\n'), ((962, 988), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {}), '(net, 0)\n', (980, 988), True, 'import pandapower as pp\n'), ((993, 1044), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(20)', 'controllable': '(False)'}), '(net, 1, p_kw=20, controllable=False)\n', (1007, 1044), True, 'import pandapower as pp\n'), ((1049, 1227), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (1079, 1227), True, 'import pandapower as pp\n'), ((1405, 1434), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (1414, 1434), True, 'import pandapower as pp\n'), ((1749, 1774), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (1772, 1774), True, 'import pandapower as pp\n'), ((1779, 1845), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (1792, 1845), True, 'import pandapower as pp\n'), ((1849, 1913), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10)\n', (1862, 1913), True, 'import pandapower as pp\n'), ((1918, 1970), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {'max_p_kw': '(0)', 'min_p_kw': '(-50)'}), '(net, 0, max_p_kw=0, min_p_kw=-50)\n', (1936, 1970), True, 'import pandapower as pp\n'), ((1975, 2051), 'pandapower.create_gen', 'pp.create_gen', (['net', '(1)'], {'p_kw': '(-10)', 'max_p_kw': '(0)', 'min_p_kw': '(-50)', 'controllable': '(True)'}), '(net, 1, p_kw=-10, max_p_kw=0, min_p_kw=-50, controllable=True)\n', (1988, 2051), True, 'import pandapower as pp\n'), ((2089, 2140), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(20)', 'controllable': '(False)'}), '(net, 1, p_kw=20, controllable=False)\n', (2103, 2140), True, 'import pandapower as pp\n'), ((2145, 2323), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (2175, 2323), True, 'import pandapower as pp\n'), ((2493, 2522), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (2502, 2522), True, 'import pandapower as pp\n'), ((2858, 2883), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (2881, 2883), True, 'import pandapower as pp\n'), ((2888, 2954), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (2901, 2954), True, 'import pandapower as pp\n'), ((2958, 3023), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(0.4)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=0.4)\n', (2971, 3023), True, 'import pandapower as pp\n'), ((3027, 3142), 'pandapower.create_gen', 'pp.create_gen', (['net', '(1)'], {'p_kw': '(-100)', 'controllable': '(True)', 'max_p_kw': '(-5)', 'min_p_kw': '(-150)', 'max_q_kvar': '(50)', 'min_q_kvar': '(-50)'}), '(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-\n 150, max_q_kvar=50, min_q_kvar=-50)\n', (3040, 3142), True, 'import pandapower as pp\n'), ((3160, 3186), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {}), '(net, 0)\n', (3178, 3186), True, 'import pandapower as pp\n'), ((3191, 3242), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(20)', 'controllable': '(False)'}), '(net, 1, p_kw=20, controllable=False)\n', (3205, 3242), True, 'import pandapower as pp\n'), ((3247, 3425), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (3277, 3425), True, 'import pandapower as pp\n'), ((3591, 3620), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (3600, 3620), True, 'import pandapower as pp\n'), ((3960, 3985), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (3983, 3985), True, 'import pandapower as pp\n'), ((3990, 4056), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (4003, 4056), True, 'import pandapower as pp\n'), ((4060, 4125), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(0.4)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=0.4)\n', (4073, 4125), True, 'import pandapower as pp\n'), ((4129, 4245), 'pandapower.create_sgen', 'pp.create_sgen', (['net', '(1)'], {'p_kw': '(-100)', 'controllable': '(True)', 'max_p_kw': '(-5)', 'min_p_kw': '(-150)', 'max_q_kvar': '(50)', 'min_q_kvar': '(-50)'}), '(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=\n -150, max_q_kvar=50, min_q_kvar=-50)\n', (4143, 4245), True, 'import pandapower as pp\n'), ((4264, 4290), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {}), '(net, 0)\n', (4282, 4290), True, 'import pandapower as pp\n'), ((4295, 4346), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(20)', 'controllable': '(False)'}), '(net, 1, p_kw=20, controllable=False)\n', (4309, 4346), True, 'import pandapower as pp\n'), ((4351, 4529), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (4381, 4529), True, 'import pandapower as pp\n'), ((4708, 4737), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (4717, 4737), True, 'import pandapower as pp\n'), ((5055, 5080), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (5078, 5080), True, 'import pandapower as pp\n'), ((5085, 5151), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (5098, 5151), True, 'import pandapower as pp\n'), ((5155, 5220), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(0.4)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=0.4)\n', (5168, 5220), True, 'import pandapower as pp\n'), ((5224, 5335), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(100)', 'controllable': '(True)', 'max_p_kw': '(150)', 'min_p_kw': '(50)', 'max_q_kvar': '(0)', 'min_q_kvar': '(0)'}), '(net, 1, p_kw=100, controllable=True, max_p_kw=150, min_p_kw=\n 50, max_q_kvar=0, min_q_kvar=0)\n', (5238, 5335), True, 'import pandapower as pp\n'), ((5354, 5380), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {}), '(net, 0)\n', (5372, 5380), True, 'import pandapower as pp\n'), ((5385, 5563), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (5415, 5563), True, 'import pandapower as pp\n'), ((5738, 5767), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (5747, 5767), True, 'import pandapower as pp\n'), ((6103, 6128), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (6126, 6128), True, 'import pandapower as pp\n'), ((6133, 6199), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (6146, 6199), True, 'import pandapower as pp\n'), ((6203, 6268), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(0.4)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=0.4)\n', (6216, 6268), True, 'import pandapower as pp\n'), ((6272, 6388), 'pandapower.create_sgen', 'pp.create_sgen', (['net', '(1)'], {'p_kw': '(-100)', 'controllable': '(True)', 'max_p_kw': '(-5)', 'min_p_kw': '(-150)', 'max_q_kvar': '(50)', 'min_q_kvar': '(-50)'}), '(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=\n -150, max_q_kvar=50, min_q_kvar=-50)\n', (6286, 6388), True, 'import pandapower as pp\n'), ((6407, 6433), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {}), '(net, 0)\n', (6425, 6433), True, 'import pandapower as pp\n'), ((6438, 6489), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(20)', 'controllable': '(False)'}), '(net, 1, p_kw=20, controllable=False)\n', (6452, 6489), True, 'import pandapower as pp\n'), ((6494, 6672), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (6524, 6672), True, 'import pandapower as pp\n'), ((6851, 6880), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (6860, 6880), True, 'import pandapower as pp\n'), ((7212, 7237), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (7235, 7237), True, 'import pandapower as pp\n'), ((7242, 7308), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (7255, 7308), True, 'import pandapower as pp\n'), ((7312, 7377), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(0.4)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=0.4)\n', (7325, 7377), True, 'import pandapower as pp\n'), ((7381, 7492), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(100)', 'controllable': '(True)', 'max_p_kw': '(150)', 'min_p_kw': '(50)', 'max_q_kvar': '(0)', 'min_q_kvar': '(0)'}), '(net, 1, p_kw=100, controllable=True, max_p_kw=150, min_p_kw=\n 50, max_q_kvar=0, min_q_kvar=0)\n', (7395, 7492), True, 'import pandapower as pp\n'), ((7511, 7537), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {}), '(net, 0)\n', (7529, 7537), True, 'import pandapower as pp\n'), ((7542, 7720), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (7572, 7720), True, 'import pandapower as pp\n'), ((8318, 8343), 'pandapower.create_empty_network', 'pp.create_empty_network', ([], {}), '()\n', (8341, 8343), True, 'import pandapower as pp\n'), ((8348, 8414), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(10.0)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.0)\n', (8361, 8414), True, 'import pandapower as pp\n'), ((8418, 8483), 'pandapower.create_bus', 'pp.create_bus', (['net'], {'max_vm_pu': 'vm_max', 'min_vm_pu': 'vm_min', 'vn_kv': '(0.4)'}), '(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=0.4)\n', (8431, 8483), True, 'import pandapower as pp\n'), ((8487, 8604), 'pandapower.create_sgen', 'pp.create_sgen', (['net', '(1)'], {'p_kw': '(-1000)', 'controllable': '(True)', 'max_p_kw': '(0)', 'min_p_kw': '(-1500)', 'max_q_kvar': '(50)', 'min_q_kvar': '(-50)'}), '(net, 1, p_kw=-1000, controllable=True, max_p_kw=0, min_p_kw=\n -1500, max_q_kvar=50, min_q_kvar=-50)\n', (8501, 8604), True, 'import pandapower as pp\n'), ((8642, 8668), 'pandapower.create_ext_grid', 'pp.create_ext_grid', (['net', '(0)'], {}), '(net, 0)\n', (8660, 8668), True, 'import pandapower as pp\n'), ((8673, 8724), 'pandapower.create_load', 'pp.create_load', (['net', '(1)'], {'p_kw': '(20)', 'controllable': '(False)'}), '(net, 1, p_kw=20, controllable=False)\n', (8687, 8724), True, 'import pandapower as pp\n'), ((8729, 8907), 'pandapower.create_line_from_parameters', 'pp.create_line_from_parameters', (['net', '(0)', '(1)', '(50)'], {'name': '"""line2"""', 'r_ohm_per_km': '(0.876)', 'c_nf_per_km': '(260.0)', 'max_i_ka': '(0.123)', 'x_ohm_per_km': '(0.1159876)', 'max_loading_percent': '(100 * 690)'}), "(net, 0, 1, 50, name='line2', r_ohm_per_km=\n 0.876, c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,\n max_loading_percent=100 * 690)\n", (8759, 8907), True, 'import pandapower as pp\n'), ((9081, 9110), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (9090, 9110), True, 'import pandapower as pp\n'), ((9306, 9346), 'pytest.main', 'pytest.main', (["['test_costs_pwl.py', '-s']"], {}), "(['test_costs_pwl.py', '-s'])\n", (9317, 9346), False, 'import pytest\n'), ((1341, 1385), 'numpy.array', 'np.array', (['[[-150, -100], [-75, -50], [0, 0]]'], {}), '([[-150, -100], [-75, -50], [0, 0]])\n', (1349, 1385), True, 'import numpy as np\n'), ((2442, 2473), 'numpy.array', 'np.array', (['[[-50, -500], [0, 0]]'], {}), '([[-50, -500], [0, 0]])\n', (2450, 2473), True, 'import numpy as np\n'), ((3539, 3571), 'numpy.array', 'np.array', (['[[-150, -300], [0, 0]]'], {}), '([[-150, -300], [0, 0]])\n', (3547, 3571), True, 'import numpy as np\n'), ((4644, 4688), 'numpy.array', 'np.array', (['[[-150, -100], [-75, -50], [0, 0]]'], {}), '([[-150, -100], [-75, -50], [0, 0]])\n', (4652, 4688), True, 'import numpy as np\n'), ((5678, 5718), 'numpy.array', 'np.array', (['[[0, 0], [75, 50], [150, 100]]'], {}), '([[0, 0], [75, 50], [150, 100]])\n', (5686, 5718), True, 'import numpy as np\n'), ((6787, 6831), 'numpy.array', 'np.array', (['[[-150, -200], [-75, -50], [0, 0]]'], {}), '([[-150, -200], [-75, -50], [0, 0]])\n', (6795, 6831), True, 'import numpy as np\n'), ((7836, 7876), 'numpy.array', 'np.array', (['[[0, 0], [75, 51], [150, 101]]'], {}), '([[0, 0], [75, 51], [150, 101]])\n', (7844, 7876), True, 'import numpy as np\n'), ((7901, 7931), 'pytest.raises', 'pytest.raises', (['OPFNotConverged'], {}), '(OPFNotConverged)\n', (7914, 7931), False, 'import pytest\n'), ((7941, 7970), 'pandapower.runopp', 'pp.runopp', (['net'], {'verbose': '(False)'}), '(net, verbose=False)\n', (7950, 7970), True, 'import pandapower as pp\n'), ((9022, 9063), 'numpy.array', 'np.array', (['[[-1500, 2], [-750, 1], [0, 2]]'], {}), '([[-1500, 2], [-750, 1], [0, 2]])\n', (9030, 9063), True, 'import numpy as np\n')] |
guoxianru/cookie_pool_lite | cookie_refresh.py | 02c4b2009b4c8aa3306ae1f5f7c5decde1eb5f3f | # -*- coding: utf-8 -*-
# @Author: GXR
# @CreateTime: 2022-01-20
# @UpdateTime: 2022-01-20
import redis
import config
import cookie_login
from cookie_api import app
red = redis.Redis(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.REDIS_DB,
decode_responses=True,
)
# 刷新cookie数量
def cookie_refresh():
while 1:
cookie_list = red.smembers(config.REDIS_KEY_COOKIE)
if len(cookie_list) >= config.COOKIE_COUNT:
break
cookie_login.run_cookie_login(1)
app.logger.info("[cookie数量正常]-[%s]" % len(cookie_list))
def run_cookie_refresh():
cookie_refresh()
if __name__ == "__main__":
run_cookie_refresh()
| [((174, 281), 'redis.Redis', 'redis.Redis', ([], {'host': 'config.REDIS_HOST', 'port': 'config.REDIS_PORT', 'db': 'config.REDIS_DB', 'decode_responses': '(True)'}), '(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.\n REDIS_DB, decode_responses=True)\n', (185, 281), False, 'import redis\n'), ((484, 516), 'cookie_login.run_cookie_login', 'cookie_login.run_cookie_login', (['(1)'], {}), '(1)\n', (513, 516), False, 'import cookie_login\n')] |
bitcoinfees/feemodel | feemodel/app/__init__.py | 5d582d87eca6e54eb20b81f4e21c81273a59b468 | from feemodel.app.transient import TransientOnline
from feemodel.app.pools import PoolsOnlineEstimator
from feemodel.app.predict import Prediction
from feemodel.app.simonline import SimOnline
__all__ = [
'TransientOnline',
'PoolsOnlineEstimator',
'Prediction',
'SimOnline'
]
| [] |
ParikhKadam/django-angular | examples/server/models/image_file_upload.py | 1fdd2ab3211ed1655acc2d172d826ed7f3ad0574 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
# start tutorial
from django.db import models
from djng.forms import NgModelFormMixin, NgFormValidationMixin
from djng.styling.bootstrap3.forms import Bootstrap3ModelForm
class SubscribeUser(models.Model):
full_name = models.CharField(
"Full name",
max_length=99)
avatar = models.ImageField("Avatar", blank=False, null=True)
permit = models.FileField("Permit", blank=True, null=True)
class SubscribeForm(NgModelFormMixin, NgFormValidationMixin, Bootstrap3ModelForm):
use_required_attribute = False
scope_prefix = 'subscribe_data'
form_name = 'my_form'
class Meta:
model = SubscribeUser
fields = ['full_name', 'avatar', 'permit']
| [((288, 332), 'django.db.models.CharField', 'models.CharField', (['"""Full name"""'], {'max_length': '(99)'}), "('Full name', max_length=99)\n", (304, 332), False, 'from django.db import models\n'), ((364, 415), 'django.db.models.ImageField', 'models.ImageField', (['"""Avatar"""'], {'blank': '(False)', 'null': '(True)'}), "('Avatar', blank=False, null=True)\n", (381, 415), False, 'from django.db import models\n'), ((430, 479), 'django.db.models.FileField', 'models.FileField', (['"""Permit"""'], {'blank': '(True)', 'null': '(True)'}), "('Permit', blank=True, null=True)\n", (446, 479), False, 'from django.db import models\n')] |
yangulei/tvm | python/tvm/topi/hexagon/slice_ops/add_subtract_multiply.py | d2cbdf381b68134951bfd7525c6a3a67838e5bdf | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
"""Compute and schedule for add, multiply, subtract slice op
Please note the following assumptions made by the implementation:
1) The inputs will be multiple of crouton layout except for the axis that needs broadcasting."""
from tvm import te
from tvm import tir
from tvm import topi
from ..utils import get_layout_transform_fn
def add_broadcast_compute(input_a, input_b):
"""Call the add op from topi"""
return topi.add(input_a, input_b)
def subtract_broadcast_compute(input_a, input_b):
"""Call the subtract op from topi"""
return topi.subtract(input_a, input_b)
def multiply_broadcast_compute(input_a, input_b):
"""Call the multiply op from topi"""
return topi.multiply(input_a, input_b)
def tir_broadcast_schedule(
out_m,
input_a,
input_b,
output_layout: str,
input_a_layout: str,
input_b_layout: str,
op_name: str,
):
"""Schedule for input and output layout nhwc-8h2w32c2w-2d considering broadcast"""
func = te.create_prim_func([input_a, input_b, out_m])
s = tir.Schedule(func)
block_dict = {"add": "T_add", "subtract": "T_subtract", "multiply": "T_multiply"}
block = s.get_block(block_dict[op_name])
if input_a_layout == "nhwc-8h2w32c2w-2d":
input_a_transformed_layout = get_layout_transform_fn(input_a_layout)
s.transform_layout(block, buffer=("read", 0), index_map=input_a_transformed_layout)
if input_b_layout == "nhwc-8h2w32c2w-2d":
input_b_transformed_layout = get_layout_transform_fn(input_b_layout)
s.transform_layout(block, buffer=("read", 1), index_map=input_b_transformed_layout)
output_transformed_layout = get_layout_transform_fn(output_layout)
s.transform_layout(block, buffer=("write", 0), index_map=output_transformed_layout)
n, h, w, c = s.get_loops(block)
h_o, h_i = s.split(h, [None, 8])
w_o, w_i = s.split(w, [None, 4])
c_o, c_i = s.split(c, [None, 32])
wio, wii = s.split(w_i, [None, 2])
s.reorder(n, h_o, w_o, c_o, h_i, wio, c_i, wii)
fused = s.fuse(c_i, wii)
s.vectorize(fused)
return s
| [((1274, 1300), 'tvm.topi.add', 'topi.add', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1282, 1300), False, 'from tvm import topi\n'), ((1410, 1441), 'tvm.topi.subtract', 'topi.subtract', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1423, 1441), False, 'from tvm import topi\n'), ((1551, 1582), 'tvm.topi.multiply', 'topi.multiply', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1564, 1582), False, 'from tvm import topi\n'), ((1856, 1902), 'tvm.te.create_prim_func', 'te.create_prim_func', (['[input_a, input_b, out_m]'], {}), '([input_a, input_b, out_m])\n', (1875, 1902), False, 'from tvm import te\n'), ((1914, 1932), 'tvm.tir.Schedule', 'tir.Schedule', (['func'], {}), '(func)\n', (1926, 1932), False, 'from tvm import tir\n')] |
jrouly/dagster | python_modules/automation/automation/docker/dagster_docker.py | 2b3104db2fc6439050f7825d4b9ebaf39ddf6c0c | import contextlib
import os
from collections import namedtuple
import yaml
from dagster import __version__ as current_dagster_version
from dagster import check
from .ecr import ecr_image, get_aws_account_id, get_aws_region
from .utils import (
execute_docker_build,
execute_docker_push,
execute_docker_tag,
python_version_image_tag,
)
# Default repository prefix used for local images
DEFAULT_LOCAL_PREFIX = "dagster"
# Location of the template assets used here
IMAGES_PATH = os.path.join(os.path.dirname(__file__), "images")
@contextlib.contextmanager
def do_nothing(_cwd):
yield
class DagsterDockerImage(namedtuple("_DagsterDockerImage", "image build_cm path")):
"""Represents a Dagster image.
Properties:
image (str): Name of the image
build_cm (function): function that is a context manager for build (e.g. for populating a
build cache)
path (Optional(str)): The path to the image's path. Defaults to docker/images/<IMAGE NAME>
"""
def __new__(cls, image, build_cm=do_nothing, path=None):
return super(DagsterDockerImage, cls).__new__(
cls,
check.str_param(image, "image"),
check.callable_param(build_cm, "build_cm"),
check.opt_str_param(
path, "path", default=os.path.join(os.path.dirname(__file__), "images", image)
),
)
@property
def python_versions(self):
"""List of Python versions supported for this image."""
with open(os.path.join(self.path, "versions.yaml"), "r") as f:
versions = yaml.safe_load(f.read())
return list(versions.keys())
def _get_last_updated_for_python_version(self, python_version):
"""Retrieve the last_updated timestamp for a particular python_version of this image."""
check.str_param(python_version, "python_version")
with open(os.path.join(self.path, "last_updated.yaml"), "r") as f:
last_updated = yaml.safe_load(f.read())
return last_updated[python_version]
def _set_last_updated_for_python_version(self, timestamp, python_version):
"""Update the last_updated timestamp for a particular python_version of this image."""
check.str_param(timestamp, "timestamp")
check.str_param(python_version, "python_version")
last_updated = {}
last_updated_path = os.path.join(self.path, "last_updated.yaml")
if os.path.exists(last_updated_path):
with open(last_updated_path, "r") as f:
last_updated = yaml.safe_load(f.read())
last_updated[python_version] = timestamp
with open(os.path.join(self.path, "last_updated.yaml"), "w") as f:
yaml.dump(last_updated, f, default_flow_style=False)
def local_image(self, python_version):
"""Generates the local image name, like: "dagster/foo:some-tag" """
check.str_param(python_version, "python_version")
last_updated = self._get_last_updated_for_python_version(python_version)
tag = python_version_image_tag(python_version, last_updated)
return "{}/{}:{}".format(DEFAULT_LOCAL_PREFIX, self.image, tag)
def aws_image(self, python_version=None, custom_tag=None):
"""Generates the AWS ECR image name, like:
"1234567890.dkr.ecr.us-west-1.amazonaws.com/foo:some-tag"
"""
check.invariant(not (python_version and custom_tag))
check.opt_str_param(python_version, "python_version")
check.opt_str_param(custom_tag, "custom_tag")
if python_version:
last_updated = self._get_last_updated_for_python_version(python_version)
tag = python_version_image_tag(python_version, last_updated)
else:
tag = custom_tag
return ecr_image(
self.image,
tag,
aws_account_id=get_aws_account_id(),
aws_region=get_aws_region(),
)
def _get_docker_args(self, python_version):
"""Retrieve Docker arguments from this image's versions.yaml, and update with latest Dagster
version.
Also, we allow references in the image versions.yaml to another Dagster image to use as a
base image. If defined, set the BASE_IMAGE Docker arg from the full name of the parent
image.
"""
with open(os.path.join(self.path, "versions.yaml"), "r") as f:
versions = yaml.safe_load(f.read())
image_info = versions.get(python_version, {})
docker_args = image_info.get("docker_args", {})
if "base_image" in image_info:
check.invariant(
"BASE_IMAGE" not in docker_args, "Cannot override an existing BASE_IMAGE"
)
base_image = DagsterDockerImage(image_info["base_image"]["name"])
source = image_info["base_image"]["source"]
if source == "aws":
docker_args["BASE_IMAGE"] = base_image.aws_image(python_version)
elif source == "local":
docker_args["BASE_IMAGE"] = base_image.local_image(python_version)
else:
raise Exception("Unrecognized source {}".format(source))
# Set Dagster version
docker_args["DAGSTER_VERSION"] = current_dagster_version
return docker_args
def build(self, timestamp, dagster_version, python_version):
check.str_param(timestamp, "timestamp")
check.str_param(python_version, "python_version")
check.invariant(
dagster_version == current_dagster_version,
desc="Current dagster version ({}) does not match provided arg ({})".format(
current_dagster_version, dagster_version
),
)
with self.build_cm(self.path):
self._set_last_updated_for_python_version(timestamp, python_version)
execute_docker_build(
self.local_image(python_version),
docker_args=self._get_docker_args(python_version),
cwd=self.path,
)
def push(self, python_version, custom_tag=None):
"""Push this image to ECR."""
if custom_tag:
execute_docker_tag(
self.local_image(python_version),
self.aws_image(python_version=None, custom_tag=custom_tag),
)
execute_docker_push(self.aws_image(python_version=None, custom_tag=custom_tag))
else:
execute_docker_tag(self.local_image(python_version), self.aws_image(python_version))
execute_docker_push(self.aws_image(python_version))
| [((634, 690), 'collections.namedtuple', 'namedtuple', (['"""_DagsterDockerImage"""', '"""image build_cm path"""'], {}), "('_DagsterDockerImage', 'image build_cm path')\n", (644, 690), False, 'from collections import namedtuple\n'), ((509, 534), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (524, 534), False, 'import os\n'), ((1841, 1890), 'dagster.check.str_param', 'check.str_param', (['python_version', '"""python_version"""'], {}), "(python_version, 'python_version')\n", (1856, 1890), False, 'from dagster import check\n'), ((2249, 2288), 'dagster.check.str_param', 'check.str_param', (['timestamp', '"""timestamp"""'], {}), "(timestamp, 'timestamp')\n", (2264, 2288), False, 'from dagster import check\n'), ((2297, 2346), 'dagster.check.str_param', 'check.str_param', (['python_version', '"""python_version"""'], {}), "(python_version, 'python_version')\n", (2312, 2346), False, 'from dagster import check\n'), ((2403, 2447), 'os.path.join', 'os.path.join', (['self.path', '"""last_updated.yaml"""'], {}), "(self.path, 'last_updated.yaml')\n", (2415, 2447), False, 'import os\n'), ((2459, 2492), 'os.path.exists', 'os.path.exists', (['last_updated_path'], {}), '(last_updated_path)\n', (2473, 2492), False, 'import os\n'), ((2921, 2970), 'dagster.check.str_param', 'check.str_param', (['python_version', '"""python_version"""'], {}), "(python_version, 'python_version')\n", (2936, 2970), False, 'from dagster import check\n'), ((3395, 3447), 'dagster.check.invariant', 'check.invariant', (['(not (python_version and custom_tag))'], {}), '(not (python_version and custom_tag))\n', (3410, 3447), False, 'from dagster import check\n'), ((3456, 3509), 'dagster.check.opt_str_param', 'check.opt_str_param', (['python_version', '"""python_version"""'], {}), "(python_version, 'python_version')\n", (3475, 3509), False, 'from dagster import check\n'), ((3518, 3563), 'dagster.check.opt_str_param', 'check.opt_str_param', (['custom_tag', '"""custom_tag"""'], {}), "(custom_tag, 'custom_tag')\n", (3537, 3563), False, 'from dagster import check\n'), ((5412, 5451), 'dagster.check.str_param', 'check.str_param', (['timestamp', '"""timestamp"""'], {}), "(timestamp, 'timestamp')\n", (5427, 5451), False, 'from dagster import check\n'), ((5460, 5509), 'dagster.check.str_param', 'check.str_param', (['python_version', '"""python_version"""'], {}), "(python_version, 'python_version')\n", (5475, 5509), False, 'from dagster import check\n'), ((1159, 1190), 'dagster.check.str_param', 'check.str_param', (['image', '"""image"""'], {}), "(image, 'image')\n", (1174, 1190), False, 'from dagster import check\n'), ((1204, 1246), 'dagster.check.callable_param', 'check.callable_param', (['build_cm', '"""build_cm"""'], {}), "(build_cm, 'build_cm')\n", (1224, 1246), False, 'from dagster import check\n'), ((2740, 2792), 'yaml.dump', 'yaml.dump', (['last_updated', 'f'], {'default_flow_style': '(False)'}), '(last_updated, f, default_flow_style=False)\n', (2749, 2792), False, 'import yaml\n'), ((4635, 4729), 'dagster.check.invariant', 'check.invariant', (["('BASE_IMAGE' not in docker_args)", '"""Cannot override an existing BASE_IMAGE"""'], {}), "('BASE_IMAGE' not in docker_args,\n 'Cannot override an existing BASE_IMAGE')\n", (4650, 4729), False, 'from dagster import check\n'), ((1529, 1569), 'os.path.join', 'os.path.join', (['self.path', '"""versions.yaml"""'], {}), "(self.path, 'versions.yaml')\n", (1541, 1569), False, 'import os\n'), ((1909, 1953), 'os.path.join', 'os.path.join', (['self.path', '"""last_updated.yaml"""'], {}), "(self.path, 'last_updated.yaml')\n", (1921, 1953), False, 'import os\n'), ((2671, 2715), 'os.path.join', 'os.path.join', (['self.path', '"""last_updated.yaml"""'], {}), "(self.path, 'last_updated.yaml')\n", (2683, 2715), False, 'import os\n'), ((4367, 4407), 'os.path.join', 'os.path.join', (['self.path', '"""versions.yaml"""'], {}), "(self.path, 'versions.yaml')\n", (4379, 4407), False, 'import os\n'), ((1332, 1357), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1347, 1357), False, 'import os\n')] |
Fusion-Rom/android_external_chromium_org | chrome/test/telemetry/chromeos/login_unittest.py | d8b126911c6ea9753e9f526bee5654419e1d0ebd | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
import unittest
from telemetry.core import browser_finder
from telemetry.core import exceptions
from telemetry.core import extension_to_load
from telemetry.core import util
from telemetry.core.backends.chrome import cros_interface
from telemetry.unittest import options_for_unittests
class CrOSAutoTest(unittest.TestCase):
def setUp(self):
options = options_for_unittests.GetCopy()
self._cri = cros_interface.CrOSInterface(options.cros_remote,
options.cros_ssh_identity)
self._is_guest = options.browser_type == 'cros-chrome-guest'
self._username = '' if self._is_guest else options.browser_options.username
self._password = options.browser_options.password
def _IsCryptohomeMounted(self):
"""Returns True if cryptohome is mounted"""
cryptohomeJSON, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome',
'--action=status'])
cryptohomeStatus = json.loads(cryptohomeJSON)
return (cryptohomeStatus['mounts'] and
cryptohomeStatus['mounts'][0]['mounted'])
def _CreateBrowser(self, autotest_ext=False, auto_login=True):
"""Finds and creates a browser for tests. if autotest_ext is True,
also loads the autotest extension"""
options = options_for_unittests.GetCopy()
if autotest_ext:
extension_path = os.path.join(os.path.dirname(__file__), 'autotest_ext')
self._load_extension = extension_to_load.ExtensionToLoad(
path=extension_path,
browser_type=options.browser_type,
is_component=True)
options.extensions_to_load = [self._load_extension]
browser_to_create = browser_finder.FindBrowser(options)
self.assertTrue(browser_to_create)
options.browser_options.create_browser_with_oobe = True
options.browser_options.auto_login = auto_login
b = browser_to_create.Create()
b.Start()
return b
def _GetAutotestExtension(self, browser):
"""Returns the autotest extension instance"""
extension = browser.extensions[self._load_extension]
self.assertTrue(extension)
return extension
def _GetLoginStatus(self, browser):
extension = self._GetAutotestExtension(browser)
self.assertTrue(extension.EvaluateJavaScript(
"typeof('chrome.autotestPrivate') != 'undefined'"))
extension.ExecuteJavaScript('''
window.__login_status = null;
chrome.autotestPrivate.loginStatus(function(s) {
window.__login_status = s;
});
''')
return util.WaitFor(
lambda: extension.EvaluateJavaScript('window.__login_status'), 10)
def testCryptohomeMounted(self):
"""Verifies cryptohome mount status for regular and guest user and when
logged out"""
with self._CreateBrowser() as b:
self.assertEquals(1, len(b.tabs))
self.assertTrue(b.tabs[0].url)
self.assertTrue(self._IsCryptohomeMounted())
chronos_fs = self._cri.FilesystemMountedAt('/home/chronos/user')
self.assertTrue(chronos_fs)
if self._is_guest:
self.assertEquals(chronos_fs, 'guestfs')
else:
home, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome-path',
'user', self._username])
self.assertEquals(self._cri.FilesystemMountedAt(home.rstrip()),
chronos_fs)
self.assertFalse(self._IsCryptohomeMounted())
self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'),
'/dev/mapper/encstateful')
def testLoginStatus(self):
"""Tests autotestPrivate.loginStatus"""
with self._CreateBrowser(autotest_ext=True) as b:
login_status = self._GetLoginStatus(b)
self.assertEquals(type(login_status), dict)
self.assertEquals(not self._is_guest, login_status['isRegularUser'])
self.assertEquals(self._is_guest, login_status['isGuest'])
self.assertEquals(login_status['email'], self._username)
self.assertFalse(login_status['isScreenLocked'])
def _IsScreenLocked(self, browser):
return self._GetLoginStatus(browser)['isScreenLocked']
def _LockScreen(self, browser):
self.assertFalse(self._IsScreenLocked(browser))
extension = self._GetAutotestExtension(browser)
self.assertTrue(extension.EvaluateJavaScript(
"typeof chrome.autotestPrivate.lockScreen == 'function'"))
logging.info('Locking screen')
extension.ExecuteJavaScript('chrome.autotestPrivate.lockScreen();')
logging.info('Waiting for the lock screen')
def ScreenLocked():
return (browser.oobe and
browser.oobe.EvaluateJavaScript("typeof Oobe == 'function'") and
browser.oobe.EvaluateJavaScript(
"typeof Oobe.authenticateForTesting == 'function'"))
util.WaitFor(ScreenLocked, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _AttemptUnlockBadPassword(self, browser):
logging.info('Trying a bad password')
def ErrorBubbleVisible():
return not browser.oobe.EvaluateJavaScript('''
document.getElementById('bubble').hidden
''')
self.assertFalse(ErrorBubbleVisible())
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', 'bad');
''' % self._username)
util.WaitFor(ErrorBubbleVisible, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _UnlockScreen(self, browser):
logging.info('Unlocking')
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', '%s');
''' % (self._username, self._password))
util.WaitFor(lambda: not browser.oobe, 10)
self.assertFalse(self._IsScreenLocked(browser))
def testScreenLock(self):
"""Tests autotestPrivate.screenLock"""
with self._CreateBrowser(autotest_ext=True) as browser:
self._LockScreen(browser)
self._AttemptUnlockBadPassword(browser)
self._UnlockScreen(browser)
def testLogout(self):
"""Tests autotestPrivate.logout"""
with self._CreateBrowser(autotest_ext=True) as b:
extension = self._GetAutotestExtension(b)
try:
extension.ExecuteJavaScript('chrome.autotestPrivate.logout();')
except (exceptions.BrowserConnectionGoneException,
exceptions.BrowserGoneException):
pass
util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)
def _SwitchRegion(self, region):
self._cri.RunCmdOnDevice(['stop', 'ui'])
# Change VPD (requires RW-enabled firmware).
# To save time, region and initial_timezone are not set.
vpd = {'initial_locale': region.language_code,
'keyboard_layout': region.keyboard}
for (key, value) in vpd.items():
self._cri.RunCmdOnDevice(['vpd', '-s', '"%s"="%s"' % (key, value)])
# Remove cached files to clear initial locale info and force regeneration.
self._cri.RunCmdOnDevice(['rm', '/home/chronos/Local\ State'])
self._cri.RunCmdOnDevice(['rm', '/home/chronos/.oobe_completed'])
self._cri.RunCmdOnDevice(['dump_vpd_log', '--force'])
self._cri.RunCmdOnDevice(['start', 'ui'])
def _OobeHasOption(self, browser, selectId, value):
hasOptionJs = '''
// Check that the option is present, and selected if it is the default.
(function hasOption(selectId, value, isDefault) {
var options = document.getElementById(selectId).options;
for (var i = 0; i < options.length; i++) {
if (options[i].value == value) {
// The option is present. Make sure it's selected if necessary.
return !isDefault || options.selectedIndex == i;
}
}
return false;
})("%s", "%s", %s);
'''
return browser.oobe.EvaluateJavaScript(
hasOptionJs % (selectId, value, 'true'))
def _ResolveLanguage(self, locale):
# If the locale matches a language but not the country, fall back to
# an existing locale. See ui/base/l10n/l10n_util.cc.
lang, _, region = map(str.lower, locale.partition('-'))
if not region:
return ""
# Map from other countries to a localized country
if lang == 'es' and region == 'es':
return 'es-419'
if lang == 'zh':
if region in ('hk', 'mo'):
return 'zh-TW'
return 'zh-CN'
if lang == 'en':
if region in ('au', 'ca', 'nz', 'za'):
return 'en-GB'
return 'en-US'
# No mapping found
return ""
def testOobeLocalization(self):
"""Tests different region configurations at OOBE"""
# Save the original device localization settings.
# To save time, only read initial_locale and keyboard_layout.
initial_region = self.Region('', '', '', '', '')
initial_region.language_code, _ = self._cri.RunCmdOnDevice(
['vpd', '-g', 'initial_locale'])
initial_region.keyboard, _ = self._cri.RunCmdOnDevice(
['vpd', '-g', 'keyboard_layout'])
for region in self.REGIONS_LIST:
self._SwitchRegion(region)
with self._CreateBrowser(auto_login=False) as browser:
# Ensure the dropdown lists have been created.
util.WaitFor(lambda: browser.oobe.EvaluateJavaScript(
'document.getElementById("language-select") != null'),
10)
# Find the language, or an acceptable fallback value.
languageFound = self._OobeHasOption(browser,
'language-select',
region.language_code)
if not languageFound:
fallback = self._ResolveLanguage(region.language_code)
self.assertTrue(fallback and
self._OobeHasOption(browser,
'language-select',
fallback))
# Find the keyboard layout.
self.assertTrue(self._OobeHasOption(
browser, 'keyboard-select', region.keyboard))
# Test is finished. Restore original region settings.
self._SwitchRegion(initial_region)
# The Region class and region list will be available in regions.py.
class Region(object):
def __init__(self, region_code, keyboard, time_zone, language_code,
keyboard_mechanical_layout, description=None, notes=None):
self.region_code = region_code
self.keyboard = keyboard
self.time_zone = time_zone
self.language_code = language_code
self.keyboard_mechanical_layout = keyboard_mechanical_layout
self.description = description or region_code
self.notes = notes
class Enum(frozenset):
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
KeyboardMechanicalLayout = Enum(['ANSI', 'ISO', 'JIS', 'ABNT2'])
_KML = KeyboardMechanicalLayout
REGIONS_LIST = [
Region('au', 'xkb:us::eng', 'Australia/Sydney', 'en-AU', _KML.ANSI,
'Australia'),
Region('ca.ansi', 'xkb:us::eng', 'America/Toronto', 'en-CA', _KML.ANSI,
'Canada (US keyboard)',
'Canada with US (ANSI) keyboard; see http://goto/cros-canada'),
Region('ca.fr', 'xkb:ca::fra', 'America/Toronto', 'fr-CA', _KML.ISO,
'Canada (French keyboard)',
('Canadian French (ISO) keyboard. The most common configuration for '
'Canadian French SKUs. See http://goto/cros-canada')),
Region('ca.hybrid', 'xkb:ca:eng:eng', 'America/Toronto', 'en-CA', _KML.ISO,
'Canada (hybrid)',
('Canada with hybrid xkb:ca:eng:eng + xkb:ca::fra keyboard (ISO), '
'defaulting to English language and keyboard. Used only if there '
'needs to be a single SKU for all of Canada. See '
'http://goto/cros-canada')),
Region('ca.multix', 'xkb:ca:multix:fra', 'America/Toronto', 'fr-CA',
_KML.ISO, 'Canada (multilingual)',
("Canadian Multilingual keyboard; you probably don't want this. See "
"http://goto/cros-canada")),
Region('de', 'xkb:de::ger', 'Europe/Berlin', 'de', _KML.ISO, 'Germany'),
Region('fi', 'xkb:fi::fin', 'Europe/Helsinki', 'fi', _KML.ISO, 'Finland'),
Region('fr', 'xkb:fr::fra', 'Europe/Paris', 'fr', _KML.ISO, 'France'),
Region('gb', 'xkb:gb:extd:eng', 'Europe/London', 'en-GB', _KML.ISO, 'UK'),
Region('ie', 'xkb:gb:extd:eng', 'Europe/Dublin', 'en-GB', _KML.ISO,
'Ireland'),
Region('in', 'xkb:us::eng', 'Asia/Calcutta', 'en-US', _KML.ANSI, 'India'),
Region('my', 'xkb:us::eng', 'Asia/Kuala_Lumpur', 'ms', _KML.ANSI,
'Malaysia'),
Region('nl', 'xkb:us:intl:eng', 'Europe/Amsterdam', 'nl', _KML.ANSI,
'Netherlands'),
Region('nordic', 'xkb:se::swe', 'Europe/Stockholm', 'en-US', _KML.ISO,
'Nordics',
('Unified SKU for Sweden, Norway, and Denmark. This defaults '
'to Swedish keyboard layout, but starts with US English language '
'for neutrality. Use if there is a single combined SKU for Nordic '
'countries.')),
Region('se', 'xkb:se::swe', 'Europe/Stockholm', 'sv', _KML.ISO, 'Sweden',
("Use this if there separate SKUs for Nordic countries (Sweden, "
"Norway, and Denmark), or the device is only shipping to Sweden. "
"If there is a single unified SKU, use 'nordic' instead.")),
Region('sg', 'xkb:us::eng', 'Asia/Singapore', 'en-GB', _KML.ANSI,
'Singapore'),
Region('us', 'xkb:us::eng', 'America/Los_Angeles', 'en-US', _KML.ANSI,
'United States'),
]
| [((561, 592), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\n', (590, 592), False, 'from telemetry.unittest import options_for_unittests\n'), ((609, 685), 'telemetry.core.backends.chrome.cros_interface.CrOSInterface', 'cros_interface.CrOSInterface', (['options.cros_remote', 'options.cros_ssh_identity'], {}), '(options.cros_remote, options.cros_ssh_identity)\n', (637, 685), False, 'from telemetry.core.backends.chrome import cros_interface\n'), ((1179, 1205), 'json.loads', 'json.loads', (['cryptohomeJSON'], {}), '(cryptohomeJSON)\n', (1189, 1205), False, 'import json\n'), ((1495, 1526), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\n', (1524, 1526), False, 'from telemetry.unittest import options_for_unittests\n'), ((1880, 1915), 'telemetry.core.browser_finder.FindBrowser', 'browser_finder.FindBrowser', (['options'], {}), '(options)\n', (1906, 1915), False, 'from telemetry.core import browser_finder\n'), ((4602, 4632), 'logging.info', 'logging.info', (['"""Locking screen"""'], {}), "('Locking screen')\n", (4614, 4632), False, 'import logging\n'), ((4714, 4757), 'logging.info', 'logging.info', (['"""Waiting for the lock screen"""'], {}), "('Waiting for the lock screen')\n", (4726, 4757), False, 'import logging\n'), ((5010, 5040), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ScreenLocked', '(10)'], {}), '(ScreenLocked, 10)\n', (5022, 5040), False, 'from telemetry.core import util\n'), ((5149, 5186), 'logging.info', 'logging.info', (['"""Trying a bad password"""'], {}), "('Trying a bad password')\n", (5161, 5186), False, 'import logging\n'), ((5512, 5548), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ErrorBubbleVisible', '(10)'], {}), '(ErrorBubbleVisible, 10)\n', (5524, 5548), False, 'from telemetry.core import util\n'), ((5645, 5670), 'logging.info', 'logging.info', (['"""Unlocking"""'], {}), "('Unlocking')\n", (5657, 5670), False, 'import logging\n'), ((5815, 5858), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['(lambda : not browser.oobe)', '(10)'], {}), '(lambda : not browser.oobe, 10)\n', (5827, 5858), False, 'from telemetry.core import util\n'), ((1657, 1770), 'telemetry.core.extension_to_load.ExtensionToLoad', 'extension_to_load.ExtensionToLoad', ([], {'path': 'extension_path', 'browser_type': 'options.browser_type', 'is_component': '(True)'}), '(path=extension_path, browser_type=options\n .browser_type, is_component=True)\n', (1690, 1770), False, 'from telemetry.core import extension_to_load\n'), ((1585, 1610), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1600, 1610), False, 'import os\n')] |
Neotrinost/Neotrinost.ir | generator/database.py | f501b8cf410c1e6ec6cc4e5fce935147b8be1e61 | import sqlite3
class Database:
def get_connection(self):
return sqlite3.connect("./db.sqlite")
def add_card(self, card_title, card_text, card_link_text, card_link_url):
con = self.get_connection()
cur = con.cursor()
create_table_query = "CREATE TABLE IF NOT EXISTS cards('card_title' VARCHAR," + \
" 'card_text' TEXT, 'card_link_text' VARCHAR, 'card_link_url' VARCHAR )"
insert_data_query = f"INSERT INTO " + \
f"cards VALUES ({card_title}, {card_text}, {card_link_text}, {card_link_url})"
try:
cur.execute(create_table_query)
cur.execute(insert_data_query)
con.commit()
except:
print("an error has been occurred !")
| [((78, 108), 'sqlite3.connect', 'sqlite3.connect', (['"""./db.sqlite"""'], {}), "('./db.sqlite')\n", (93, 108), False, 'import sqlite3\n')] |
frbapolkosnik/crits | crits/backdoors/forms.py | 1278c034f2238e2fe34e65e32ce241128a014df2 | from django import forms
from django.forms.utils import ErrorList
from crits.campaigns.campaign import Campaign
from crits.core.forms import add_bucketlist_to_form, add_ticket_to_form
from crits.core.handlers import get_item_names, get_source_names
from crits.core.user_tools import get_user_organization
from crits.core import form_consts
from crits.vocabulary.relationships import RelationshipTypes
relationship_choices = [(c, c) for c in RelationshipTypes.values(sort=True)]
class AddBackdoorForm(forms.Form):
"""
Django form for adding a Backdoor to CRITs.
"""
error_css_class = 'error'
required_css_class = 'required'
name = forms.CharField(label=form_consts.Backdoor.NAME, required=True)
aliases = forms.CharField(label=form_consts.Backdoor.ALIASES,
required=False)
version = forms.CharField(label=form_consts.Backdoor.VERSION,
required=False)
description = forms.CharField(label=form_consts.Backdoor.DESCRIPTION,
required=False)
campaign = forms.ChoiceField(widget=forms.Select,
label=form_consts.Backdoor.CAMPAIGN,
required=False)
confidence = forms.ChoiceField(label=form_consts.Backdoor.CAMPAIGN_CONFIDENCE,
required=False)
source = forms.ChoiceField(widget=forms.Select(attrs={'class': 'bulknoinitial'}),
label=form_consts.Backdoor.SOURCE,
required=True)
source_method = forms.CharField(label=form_consts.Backdoor.SOURCE_METHOD,
required=False)
source_reference = forms.CharField(widget=forms.TextInput(attrs={'size': '90'}),
label=form_consts.Backdoor.SOURCE_REFERENCE,
required=False)
related_id = forms.CharField(widget=forms.HiddenInput(), required=False, label=form_consts.Common.RELATED_ID)
related_type = forms.CharField(widget=forms.HiddenInput(), required=False, label=form_consts.Common.RELATED_TYPE)
relationship_type = forms.ChoiceField(required=False,
label=form_consts.Common.RELATIONSHIP_TYPE,
widget=forms.Select(attrs={'id':'relationship_type'}))
def __init__(self, username, *args, **kwargs):
super(AddBackdoorForm, self).__init__(*args, **kwargs)
self.fields['campaign'].choices = [('', '')] + [
(c.name, c.name) for c in get_item_names(Campaign, True)]
self.fields['confidence'].choices = [
('', ''),
('low', 'low'),
('medium', 'medium'),
('high', 'high')]
self.fields['source'].choices = [
(c.name, c.name) for c in get_source_names(True, True, username)]
self.fields['source'].initial = get_user_organization(username)
self.fields['relationship_type'].choices = relationship_choices
self.fields['relationship_type'].initial = RelationshipTypes.RELATED_TO
add_bucketlist_to_form(self)
add_ticket_to_form(self)
def clean(self):
cleaned_data = super(AddBackdoorForm, self).clean()
campaign = cleaned_data.get('campaign')
if campaign:
confidence = cleaned_data.get('confidence')
if not confidence or confidence == '':
self._errors.setdefault('confidence', ErrorList())
self._errors['confidence'].append(u'This field is required if campaign is specified.')
return cleaned_data
| [((662, 725), 'django.forms.CharField', 'forms.CharField', ([], {'label': 'form_consts.Backdoor.NAME', 'required': '(True)'}), '(label=form_consts.Backdoor.NAME, required=True)\n', (677, 725), False, 'from django import forms\n'), ((740, 807), 'django.forms.CharField', 'forms.CharField', ([], {'label': 'form_consts.Backdoor.ALIASES', 'required': '(False)'}), '(label=form_consts.Backdoor.ALIASES, required=False)\n', (755, 807), False, 'from django import forms\n'), ((852, 919), 'django.forms.CharField', 'forms.CharField', ([], {'label': 'form_consts.Backdoor.VERSION', 'required': '(False)'}), '(label=form_consts.Backdoor.VERSION, required=False)\n', (867, 919), False, 'from django import forms\n'), ((972, 1043), 'django.forms.CharField', 'forms.CharField', ([], {'label': 'form_consts.Backdoor.DESCRIPTION', 'required': '(False)'}), '(label=form_consts.Backdoor.DESCRIPTION, required=False)\n', (987, 1043), False, 'from django import forms\n'), ((1093, 1188), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'widget': 'forms.Select', 'label': 'form_consts.Backdoor.CAMPAIGN', 'required': '(False)'}), '(widget=forms.Select, label=form_consts.Backdoor.CAMPAIGN,\n required=False)\n', (1110, 1188), False, 'from django import forms\n'), ((1268, 1354), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': 'form_consts.Backdoor.CAMPAIGN_CONFIDENCE', 'required': '(False)'}), '(label=form_consts.Backdoor.CAMPAIGN_CONFIDENCE, required=\n False)\n', (1285, 1354), False, 'from django import forms\n'), ((1603, 1676), 'django.forms.CharField', 'forms.CharField', ([], {'label': 'form_consts.Backdoor.SOURCE_METHOD', 'required': '(False)'}), '(label=form_consts.Backdoor.SOURCE_METHOD, required=False)\n', (1618, 1676), False, 'from django import forms\n'), ((445, 480), 'crits.vocabulary.relationships.RelationshipTypes.values', 'RelationshipTypes.values', ([], {'sort': '(True)'}), '(sort=True)\n', (469, 480), False, 'from crits.vocabulary.relationships import RelationshipTypes\n'), ((2974, 3005), 'crits.core.user_tools.get_user_organization', 'get_user_organization', (['username'], {}), '(username)\n', (2995, 3005), False, 'from crits.core.user_tools import get_user_organization\n'), ((3167, 3195), 'crits.core.forms.add_bucketlist_to_form', 'add_bucketlist_to_form', (['self'], {}), '(self)\n', (3189, 3195), False, 'from crits.core.forms import add_bucketlist_to_form, add_ticket_to_form\n'), ((3204, 3228), 'crits.core.forms.add_ticket_to_form', 'add_ticket_to_form', (['self'], {}), '(self)\n', (3222, 3228), False, 'from crits.core.forms import add_bucketlist_to_form, add_ticket_to_form\n'), ((1423, 1469), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'bulknoinitial'}"}), "(attrs={'class': 'bulknoinitial'})\n", (1435, 1469), False, 'from django import forms\n'), ((1759, 1796), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'size': '90'}"}), "(attrs={'size': '90'})\n", (1774, 1796), False, 'from django import forms\n'), ((1977, 1996), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1994, 1996), False, 'from django import forms\n'), ((2093, 2112), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (2110, 2112), False, 'from django import forms\n'), ((2362, 2409), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'id': 'relationship_type'}"}), "(attrs={'id': 'relationship_type'})\n", (2374, 2409), False, 'from django import forms\n'), ((2894, 2932), 'crits.core.handlers.get_source_names', 'get_source_names', (['(True)', '(True)', 'username'], {}), '(True, True, username)\n', (2910, 2932), False, 'from crits.core.handlers import get_item_names, get_source_names\n'), ((2622, 2652), 'crits.core.handlers.get_item_names', 'get_item_names', (['Campaign', '(True)'], {}), '(Campaign, True)\n', (2636, 2652), False, 'from crits.core.handlers import get_item_names, get_source_names\n'), ((3543, 3554), 'django.forms.utils.ErrorList', 'ErrorList', ([], {}), '()\n', (3552, 3554), False, 'from django.forms.utils import ErrorList\n')] |
fkamrani/adversarial-policies | src/aprl/agents/monte_carlo.py | 53e129c2083f6557ddc18dbb39e4e633a2d7ab9b | """Monte Carlo receding horizon control."""
from abc import ABC, abstractmethod
from multiprocessing import Pipe, Process
import gym
from stable_baselines.common.vec_env import CloudpickleWrapper
from aprl.common.mujoco import MujocoState, ResettableEnv
class MujocoResettableWrapper(ResettableEnv, gym.Wrapper):
"""Converts a MujocoEnv into a ResettableEnv.
Note all MuJoCo environments are resettable."""
def __init__(self, env):
"""Wraps a MujocoEnv, adding get_state and set_state methods.
:param env: a MujocoEnv. NOTE: it must not be wrapped in a TimeLimit."""
if hasattr(env, "_max_episode_steps"):
raise TypeError(
"Environment must not have a time limit " "(try passing in env.unwrapped instead)."
)
gym.Wrapper.__init__(self, env)
self.sim = env.unwrapped.sim
def get_state(self):
"""Serializes the qpos and qvel state of the MuJoCo emulator."""
return MujocoState.from_mjdata(self.sim.data).flatten()
def set_state(self, x):
"""Restores qpos and qvel, calling forward() to derive other values."""
state = MujocoState.from_flattened(x, self.sim)
state.set_mjdata(self.sim.data)
self.sim.forward() # put mjData in consistent state
def reset(self):
"""See base class."""
return self.env.reset()
def step(self, a):
"""See base class."""
return self.env.step(a)
class MonteCarlo(ABC):
"""Selects an action for a ResettableEnv by random search. Randomly samples
fixed-length sequences of actions. Evaluates each trajectory in the
environment, resetting the state to the original after each trajectory."""
@abstractmethod
def __init__(self, horizon, trajectories):
"""Constructs a MonteCarlo instance for env.
:param horizon: the length of the trajectories to search over.
:param trajectories: the number of trajectories to evaluate."""
self.horizon = horizon
self.trajectories = trajectories
@abstractmethod
def seed(self, seed):
"""Sets a seed for the PRNG for the action sequences.
:param seed (int): a seed."""
pass
@abstractmethod
def best_action(self, state):
"""Returns the best action out of a random search of action sequences.
Generates self.trajectories action sequences, each of length
self.horizon. The cumulative reward of each action sequence is computed,
starting from state. The function returns the first action and the
cumulative reward of the action sequences with the largest cumulative
reward.
:param state: a value returned by env.get_state().
:return (action, reward): the best action found and associated reward."""
pass
class MonteCarloSingle(MonteCarlo):
"""Selects an action for a ResettableEnv by random search.
See base class for details. This implementation is not parallelized."""
def __init__(self, env, horizon, trajectories):
"""See base class."""
super().__init__(horizon, trajectories)
self.env = env
def seed(self, seed):
"""Sets a seed for the PRNG for the action sequences.
:param seed (int): a seed."""
self.env.action_space.np_random.seed(seed)
def best_action(self, state):
"""Returns the best action out of a random search of action sequences.
See base class for details.
Search takes place in a single environment, which is reset to state
before evaluating each action sequence."""
res = []
for _ in range(self.trajectories):
self.env.set_state(state)
us = [self.env.action_space.sample() for _ in range(self.horizon)]
total_rew = 0
for u in us:
_ob, rew, done, _info = self.env.step(u)
total_rew += rew
if done:
break
res.append((us[0], total_rew))
self.env.set_state(state)
best = max(res, key=lambda x: x[1])
return best
def _worker(remote, parent_remote, dynamic_fn_wrapper, horizon, trajectories):
parent_remote.close()
dynamics = dynamic_fn_wrapper.var()
dynamics.reset()
mc = MonteCarloSingle(dynamics, horizon, trajectories)
try:
while True:
cmd, x = remote.recv()
if cmd == "seed":
mc.seed(x)
elif cmd == "search":
best_u, best_r = mc.best_action(x)
remote.send((best_u, best_r))
elif cmd == "close":
remote.close()
break
else:
raise NotImplementedError
except KeyboardInterrupt:
print("MonteCarloParallel worker: got KeyboardInterrupt")
finally:
dynamics.close()
class MonteCarloParallel(MonteCarlo):
"""Like MonteCarlo, but performs the random search in parallel."""
# This implementation is inspired by Baselines SubprocVecEnv.
def __init__(self, env_fns, horizon, trajectories, seed=0):
"""Launch subprocess workers and store configuration parameters.
:param env_fns (list<()->ResettableEnv>): list of thunks.
:param horizon (int): length of trajectories to search over.
:param trajectories (int): minimum number of trajectories to evaluate.
It will be rounded up to the nearest multiple of len(make_env)."""
super().__init__(horizon, trajectories)
nremotes = len(env_fns)
# Integer ceiling of self.trajectories / nworkers
traj_per_worker = (self.trajectories - 1) // nremotes + 1
pipes = [Pipe() for _ in range(nremotes)]
self.remotes, self.work_remotes = zip(*pipes)
worker_cfgs = zip(self.work_remotes, self.remotes, env_fns)
self.ps = []
for i, (work_remote, remote, dynamic_fn) in enumerate(worker_cfgs):
args = (work_remote, remote, CloudpickleWrapper(dynamic_fn), horizon, traj_per_worker)
process = Process(target=_worker, args=args)
process.daemon = True
# If the main process crashes, we should not cause things to hang
process.start()
self.ps.append(process)
for remote in self.work_remotes:
remote.close()
def seed(self, seed):
"""See base class."""
for i, remote in enumerate(self.remotes):
remote.send(("seed", seed + i))
def best_action(self, state):
"""Returns the best action out of a random search of action sequences."""
for remote in self.remotes:
remote.send(("search", state))
results = [remote.recv() for remote in self.remotes]
best = max(results, key=lambda x: x[1])
return best
def close(self):
"""Shuts down parallel workers."""
for remote in self.remotes:
remote.send(("close", None))
for p in self.ps:
p.join()
def receding_horizon(monte_carlo, env):
"""Receding horizon control
:param monte_carlo(MonteCarlo): a Monte Carlo controller for env or a clone of env.
:param env(ResettableEnv): a resettable environment."""
while True:
state = env.get_state()
a, _seq_rew = monte_carlo.best_action(state)
ob, rew, done, info = env.step(a)
yield a, ob, rew, done, info
if done:
break
| [((800, 831), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (820, 831), False, 'import gym\n'), ((1157, 1196), 'aprl.common.mujoco.MujocoState.from_flattened', 'MujocoState.from_flattened', (['x', 'self.sim'], {}), '(x, self.sim)\n', (1183, 1196), False, 'from aprl.common.mujoco import MujocoState, ResettableEnv\n'), ((5717, 5723), 'multiprocessing.Pipe', 'Pipe', ([], {}), '()\n', (5721, 5723), False, 'from multiprocessing import Pipe, Process\n'), ((6090, 6124), 'multiprocessing.Process', 'Process', ([], {'target': '_worker', 'args': 'args'}), '(target=_worker, args=args)\n', (6097, 6124), False, 'from multiprocessing import Pipe, Process\n'), ((983, 1021), 'aprl.common.mujoco.MujocoState.from_mjdata', 'MujocoState.from_mjdata', (['self.sim.data'], {}), '(self.sim.data)\n', (1006, 1021), False, 'from aprl.common.mujoco import MujocoState, ResettableEnv\n'), ((6010, 6040), 'stable_baselines.common.vec_env.CloudpickleWrapper', 'CloudpickleWrapper', (['dynamic_fn'], {}), '(dynamic_fn)\n', (6028, 6040), False, 'from stable_baselines.common.vec_env import CloudpickleWrapper\n')] |
xuwening/tensorflowDemo | machineLearnInAction/bayes.py | 65687a61e16f947b7ec8a85d12213f954a71542b |
import numpy as np
def loadDataSet():
postingList = [['my', 'dog', 'has', 'flea', 'problems', 'help', 'please'], #[0,0,1,1,1......]
['maybe', 'not', 'take', 'him', 'to', 'dog', 'park', 'stupid'],
['my', 'dalmation', 'is', 'so', 'cute', 'I', 'love', 'him'],
['stop', 'posting', 'stupid', 'worthless', 'garbage'],
['mr', 'licks', 'ate', 'my', 'steak', 'how', 'to', 'stop', 'him'],
['quit', 'buying', 'worthless', 'dog', 'food', 'stupid']]
classVec = [0, 1, 0, 1, 0, 1] # 1 is abusive, 0 not
return postingList, classVec
def createVocabList(dataSet):
vocabSet = set([])
for document in dataSet:
vocabSet = vocabSet | set(document)
return list(vocabSet)
def setOfWords2Vec(vocabList, inputSet):
returnVec = [0] * len(vocabList)
for word in inputSet:
if word in vocabList:
returnVec[vocabList.index(word)] = 1
else:
print('the word: %s is not in my vocabulary' % word)
return returnVec
def trainNB0(trainMatrix, trainCategory):
numTrainDocs = len(trainMatrix)
numWords = len(trainMatrix[0])
pAbusive = sum(trainCategory) / float(numTrainDocs)
p0Num = np.zeros(numWords)
p1Num = np.zeros(numWords)
p0Denom = 0.0
p1Denom = 0.0
for i in range(numTrainDocs):
if trainCategory[i] == 1:
p1Num += trainMatrix[i]
p1Denom += sum(trainMatrix[i])
else:
p0Num += trainMatrix[i]
p0Denom += sum(trainMatrix[i])
p1Vect = p1Num / p1Denom
p0Vect = p0Num / p0Denom
return p0Vect, p1Vect, pAbusive
if __name__ == '__main__':
postinList, classVec = loadDataSet()
myVocabList = createVocabList(postinList)
# print(setOfWords2Vec(myVocabList, postinList[0]))
trainMat = []
for postinDoc in postinList:
trainMat.append(setOfWords2Vec(myVocabList, postinDoc))
print(trainMat)
p0V, p1V, pAb = trainNB0(trainMat, classVec)
print(p0V, p1V, pAb) | [((1253, 1271), 'numpy.zeros', 'np.zeros', (['numWords'], {}), '(numWords)\n', (1261, 1271), True, 'import numpy as np\n'), ((1284, 1302), 'numpy.zeros', 'np.zeros', (['numWords'], {}), '(numWords)\n', (1292, 1302), True, 'import numpy as np\n')] |
segrids/arduino_due | py/debug/__init__.py | f375020b81459eae9b325aa3646ff84efc2853e8 | from .swd import SWD
from .ahb import AHB
from .debugger import Debugger, HaltError, NotHaltedError
try:
from .dwarf import ELFDebugger
except ImportError:
pass
| [] |
cbdunc2/pi-kit | HAP-NodeJS/Switch3_1.py | bf7e9e118af7853d509e0a10c95ba5d8564bb157 | import subprocess
subprocess.Popen(['sh', '../Switches/Switch3_On.sh'])
| [((18, 71), 'subprocess.Popen', 'subprocess.Popen', (["['sh', '../Switches/Switch3_On.sh']"], {}), "(['sh', '../Switches/Switch3_On.sh'])\n", (34, 71), False, 'import subprocess\n')] |
Software-Natives-OSS/cicd_sim | src/cicd_sim/artifact/__init__.py | 19452a5b06a6c6d99322c9b6777c501025e954f1 | from . artifactory import Artifactory
__all__ = ['Artifactory']
| [] |
Spiritdude/mandoline-py | mandoline/line_segment3d.py | 702cd1f9264c7d5d814600ff919406387fd86185 |
class LineSegment3D(object):
"""A class to represent a 3D line segment."""
def __init__(self, p1, p2):
"""Initialize with two endpoints."""
if p1 > p2:
p1, p2 = (p2, p1)
self.p1 = p1
self.p2 = p2
self.count = 1
def __len__(self):
"""Line segment always has two endpoints."""
return 2
def __iter__(self):
"""Iterator generator for endpoints."""
yield self.p1
yield self.p2
def __getitem__(self, idx):
"""Given a vertex number, returns a vertex coordinate vector."""
if idx == 0:
return self.p1
if idx == 1:
return self.p2
raise LookupError()
def __hash__(self):
"""Returns hash value for endpoints"""
return hash((self.p1, self.p2))
def __lt__(self, p):
return self < p
def __cmp__(self, p):
"""Compare points for sort ordering in an arbitrary heirarchy."""
val = self[0].__cmp__(p[0])
if val != 0:
return val
return self[1].__cmp__(p[1])
def __format__(self, fmt):
"""Provides .format() support."""
pfx = ""
sep = " - "
sfx = ""
if "a" in fmt:
pfx = "["
sep = ", "
sfx = "]"
elif "s" in fmt:
pfx = ""
sep = " "
sfx = ""
p1 = self.p1.__format__(fmt)
p2 = self.p2.__format__(fmt)
return pfx + p1 + sep + p2 + sfx
def __repr__(self):
"""Standard string representation."""
return "<LineSegment3D: {0}>".format(self)
def __str__(self):
"""Returns a human readable coordinate string."""
return "{0:a}".format(self)
def translate(self,offset):
"""Translate the endpoint's vertices"""
self.p1 = (self.p1[a] + offset[a] for a in range(3))
self.p2 = (self.p2[a] + offset[a] for a in range(3))
def scale(self,scale):
"""Translate the endpoint's vertices"""
self.p1 = (self.p1[a] * scale[a] for a in range(3))
self.p2 = (self.p2[a] * scale[a] for a in range(3))
def length(self):
"""Returns the length of the line."""
return self.p1.distFromPoint(self.p2)
class LineSegment3DCache(object):
"""Cache class for 3D Line Segments."""
def __init__(self):
"""Initialize as an empty cache."""
self.endhash = {}
self.seghash = {}
def _add_endpoint(self, p, seg):
"""Remember that this segment has a given endpoint"""
if p not in self.endhash:
self.endhash[p] = []
self.endhash[p].append(seg)
def rehash(self):
"""Reset the hashes for changed edge vertices"""
oldseghash = self.seghash
self.seghash = {
(v[0], v[1]): v
for v in oldseghash.values()
}
oldendhash = self.endhash
self.endhash = {
k: v
for v in oldendhash.values()
for k in v
}
def translate(self,offset):
"""Translate vertices of all edges."""
for v in self.seghash.values():
v.translate(offset)
self.rehash()
def scale(self,scale):
"""Scale vertices of all edges."""
for v in self.seghash.values():
v.scale(scale)
self.rehash()
def endpoint_segments(self, p):
"""get list of edges that end at point p"""
if p not in self.endhash:
return []
return self.endhash[p]
def get(self, p1, p2):
"""Given 2 endpoints, return the cached LineSegment3D inst, if any."""
key = (p1, p2) if p1 < p2 else (p2, p1)
if key not in self.seghash:
return None
return self.seghash[key]
def add(self, p1, p2):
"""Given 2 endpoints, return the (new or cached) LineSegment3D inst."""
key = (p1, p2) if p1 < p2 else (p2, p1)
if key in self.seghash:
seg = self.seghash[key]
seg.count += 1
return seg
seg = LineSegment3D(p1, p2)
self.seghash[key] = seg
self._add_endpoint(p1, seg)
self._add_endpoint(p2, seg)
return seg
def __iter__(self):
"""Creates an iterator for the line segments in the cache."""
for pt in self.seghash.values():
yield pt
def __len__(self):
"""Length of sequence."""
return len(self.seghash)
# vim: expandtab tabstop=4 shiftwidth=4 softtabstop=4 nowrap
| [] |
d1hotpep/cacheable | cacheable/adapter/PeeweeAdapter.py | 9ea97d6504965179f8fe495b67e466c068719445 | import peewee
import playhouse.kv
from time import time
from . import CacheableAdapter
class PeeweeAdapter(CacheableAdapter, peewee.Model):
key = peewee.CharField(max_length=256, unique=True)
value = playhouse.kv.JSONField()
mtime = peewee.IntegerField(default=time)
ttl = peewee.IntegerField(default=0)
class Meta:
database = peewee.Proxy()
def __init__(self, db_connection, table_name=None):
if table_name:
self._meta.db_table = table_name
self._meta.database.initialize(db_connection)
def multiget(self, keys):
cls = self.__class__
res = self.select(cls.key, cls.value) \
.where(cls.key << keys & self.__ttl_filter()) \
.tuples()
return { x[0] : x[1] for x in res }
@classmethod
def multiset(cls, data, ttl=None):
ts = int(time())
ttl = ttl or 0
kvs = []
for key, value in data.items():
kvs.append({
cls.key : key,
cls.value : value,
cls.mtime : ts,
cls.ttl : ttl,
})
cls.insert_many(kvs).upsert().execute()
def delete(self, key_or_keys):
if list == type(key_or_keys):
keys = key_or_keys
else:
keys = [ key_or_keys ]
cls = self.__class__
peewee.DeleteQuery(cls).where(cls.key << keys).execute()
def list(self, prefix=None, limit=None):
cls = self.__class__
q = self.select(cls.key, cls.value)
if prefix:
if self.__db_type() == peewee.SqliteDatabase:
wildcard = '*'
else:
wildcard = '%'
q = q.where(cls.key % ('%s%s' % (prefix, wildcard)))
q = q.where(self.__ttl_filter())
if limit:
q = q.limit(limit)
res = { x[0] : x[1] for x in q.tuples() }
if prefix:
res = { k[len(prefix):] : v for k, v in res.items() }
return res
def __ttl_filter(self):
"""
Add the TTL where clause to a query, to filter out stale results
"""
ts = int(time())
cls = self.__class__
return cls.ttl == 0 | (cls.mtime + cls.ttl > ts)
def __db_type(self):
return type(self._meta.database.obj)
| [((153, 198), 'peewee.CharField', 'peewee.CharField', ([], {'max_length': '(256)', 'unique': '(True)'}), '(max_length=256, unique=True)\n', (169, 198), False, 'import peewee\n'), ((248, 281), 'peewee.IntegerField', 'peewee.IntegerField', ([], {'default': 'time'}), '(default=time)\n', (267, 281), False, 'import peewee\n'), ((292, 322), 'peewee.IntegerField', 'peewee.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (311, 322), False, 'import peewee\n'), ((359, 373), 'peewee.Proxy', 'peewee.Proxy', ([], {}), '()\n', (371, 373), False, 'import peewee\n'), ((867, 873), 'time.time', 'time', ([], {}), '()\n', (871, 873), False, 'from time import time\n'), ((2161, 2167), 'time.time', 'time', ([], {}), '()\n', (2165, 2167), False, 'from time import time\n'), ((1366, 1389), 'peewee.DeleteQuery', 'peewee.DeleteQuery', (['cls'], {}), '(cls)\n', (1384, 1389), False, 'import peewee\n')] |
plutoyuxie/mmgeneration | mmgen/models/architectures/arcface/helpers.py | 0a7f5d16c970de1766ebf049d7a0264fe506504b | from collections import namedtuple
import torch
from torch.nn import (AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d,
Module, PReLU, ReLU, Sequential, Sigmoid)
# yapf: disable
"""
ArcFace implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch) # isort:skip # noqa
"""
# yapf: enable
class Flatten(Module):
"""Flatten Module."""
def forward(self, input):
return input.view(input.size(0), -1)
def l2_norm(input, axis=1):
"""l2 normalization.
Args:
input (torch.Tensor): The input tensor.
axis (int, optional): Specifies which axis of input to calculate the
norm across. Defaults to 1.
Returns:
Tensor: Tensor after L2 normalization per-instance.
"""
norm = torch.norm(input, 2, axis, True)
output = torch.div(input, norm)
return output
class Bottleneck(namedtuple('Block', ['in_channel', 'depth', 'stride'])):
"""A named tuple describing a ResNet block."""
def get_block(in_channel, depth, num_units, stride=2):
"""Get a single block config.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
num_units (int): Number of unit modules.
stride (int, optional): Conv2d stride. Defaults to 2.
Returns:
list: A list of unit modules' config.
"""
return [Bottleneck(in_channel, depth, stride)
] + [Bottleneck(depth, depth, 1) for i in range(num_units - 1)]
def get_blocks(num_layers):
"""Get block configs of backbone.
Args:
num_layers (int): Number of ConvBlock layers in backbone.
Raises:
ValueError: `num_layers` must be one of [50, 100, 152].
Returns:
list: A list of block configs.
"""
if num_layers == 50:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=4),
get_block(in_channel=128, depth=256, num_units=14),
get_block(in_channel=256, depth=512, num_units=3)
]
elif num_layers == 100:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=13),
get_block(in_channel=128, depth=256, num_units=30),
get_block(in_channel=256, depth=512, num_units=3)
]
elif num_layers == 152:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=8),
get_block(in_channel=128, depth=256, num_units=36),
get_block(in_channel=256, depth=512, num_units=3)
]
else:
raise ValueError(
'Invalid number of layers: {}. Must be one of [50, 100, 152]'.
format(num_layers))
return blocks
class SEModule(Module):
"""Squeeze-and-Excitation Modules.
Args:
channels (int): Input channels.
reduction (int): Intermediate channels reduction ratio.
"""
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = AdaptiveAvgPool2d(1)
self.fc1 = Conv2d(
channels,
channels // reduction,
kernel_size=1,
padding=0,
bias=False)
self.relu = ReLU(inplace=True)
self.fc2 = Conv2d(
channels // reduction,
channels,
kernel_size=1,
padding=0,
bias=False)
self.sigmoid = Sigmoid()
def forward(self, x):
"""Forward Function."""
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class bottleneck_IR(Module):
"""Intermediate Resblock of bottleneck.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
stride (int): Conv2d stride.
"""
def __init__(self, in_channel, depth, stride):
"""Intermediate Resblock of bottleneck.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
stride (int): Conv2d stride.
"""
super(bottleneck_IR, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False),
BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth))
def forward(self, x):
"""Forward function."""
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
class bottleneck_IR_SE(Module):
"""Intermediate Resblock of bottleneck with SEModule.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
stride (int): Conv2d stride.
"""
def __init__(self, in_channel, depth, stride):
super(bottleneck_IR_SE, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False),
BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth), SEModule(depth, 16))
def forward(self, x):
"""Forward function."""
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
| [((890, 944), 'collections.namedtuple', 'namedtuple', (['"""Block"""', "['in_channel', 'depth', 'stride']"], {}), "('Block', ['in_channel', 'depth', 'stride'])\n", (900, 944), False, 'from collections import namedtuple\n'), ((784, 816), 'torch.norm', 'torch.norm', (['input', '(2)', 'axis', '(True)'], {}), '(input, 2, axis, True)\n', (794, 816), False, 'import torch\n'), ((830, 852), 'torch.div', 'torch.div', (['input', 'norm'], {}), '(input, norm)\n', (839, 852), False, 'import torch\n'), ((3144, 3164), 'torch.nn.AdaptiveAvgPool2d', 'AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (3161, 3164), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3184, 3261), 'torch.nn.Conv2d', 'Conv2d', (['channels', '(channels // reduction)'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels, channels // reduction, kernel_size=1, padding=0, bias=False)\n', (3190, 3261), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3343, 3361), 'torch.nn.ReLU', 'ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3347, 3361), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3381, 3458), 'torch.nn.Conv2d', 'Conv2d', (['(channels // reduction)', 'channels'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels // reduction, channels, kernel_size=1, padding=0, bias=False)\n', (3387, 3458), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3543, 3552), 'torch.nn.Sigmoid', 'Sigmoid', ([], {}), '()\n', (3550, 3552), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4378, 4398), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\n', (4387, 4398), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4615, 4638), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (4626, 4638), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4652, 4708), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\n', (4658, 4708), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4722, 4734), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\n', (4727, 4734), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4736, 4787), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\n', (4742, 4787), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4801, 4819), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (4812, 4819), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5379, 5399), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\n', (5388, 5399), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5616, 5639), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (5627, 5639), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5653, 5709), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\n', (5659, 5709), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5723, 5735), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\n', (5728, 5735), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5737, 5788), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\n', (5743, 5788), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5802, 5820), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (5813, 5820), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4475, 4528), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\n', (4481, 4528), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4546, 4564), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (4557, 4564), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5476, 5529), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\n', (5482, 5529), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5547, 5565), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (5558, 5565), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n')] |
mahi0601/SpotifyPlaylist | createplaylist.py | 55e30bb4c13f291693b892d6eeccc70b4a769805 | import os
from spotifyclient import SpotifyClient
def main():
spotify_client = SpotifyClient(os.getenv("SPOTIFY_AUTHORIZATION_TOKEN"),
os.getenv("SPOTIFY_USER_ID"))
# get last played tracks
num_tracks_to_visualise = int(input("How many tracks would you like to visualise? "))
last_played_tracks = spotify_client.get_last_played_tracks(num_tracks_to_visualise)
print(f"\nHere are the last {num_tracks_to_visualise} tracks you listened to on Spotify:")
for index, track in enumerate(last_played_tracks):
print(f"{index+1}- {track}")
# choose which tracks to use as a seed to generate a playlist
indexes = input("\nEnter a list of up to 5 tracks you'd like to use as seeds. Use indexes separated by a space: ")
indexes = indexes.split()
seed_tracks = [last_played_tracks[int(index)-1] for index in indexes]
# get recommended tracks based off seed tracks
recommended_tracks = spotify_client.get_track_recommendations(seed_tracks)
print("\nHere are the recommended tracks which will be included in your new playlist:")
for index, track in enumerate(recommended_tracks):
print(f"{index+1}- {track}")
# get playlist name from user and create playlist
playlist_name = input("\nWhat's the playlist name? ")
playlist = spotify_client.create_playlist(playlist_name)
print(f"\nPlaylist '{playlist.name}' was created successfully.")
# populate playlist with recommended tracks
spotify_client.populate_playlist(playlist, recommended_tracks)
print(f"\nRecommended tracks successfully uploaded to playlist '{playlist.name}'.")
if __name__ == "__main__":
main() | [((100, 140), 'os.getenv', 'os.getenv', (['"""SPOTIFY_AUTHORIZATION_TOKEN"""'], {}), "('SPOTIFY_AUTHORIZATION_TOKEN')\n", (109, 140), False, 'import os\n'), ((177, 205), 'os.getenv', 'os.getenv', (['"""SPOTIFY_USER_ID"""'], {}), "('SPOTIFY_USER_ID')\n", (186, 205), False, 'import os\n')] |
thieman/dd-trace-py | tests/contrib/flask/test_request.py | 1e87c9bdf7769032982349c4ccc0e1c2e6866a16 | # -*- coding: utf-8 -*-
from ddtrace.compat import PY2
from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY
from ddtrace.contrib.flask.patch import flask_version
from ddtrace.ext import http
from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID
from flask import abort
from . import BaseFlaskTestCase
from ...utils import assert_span_http_status_code
base_exception_name = 'builtins.Exception'
if PY2:
base_exception_name = 'exceptions.Exception'
class FlaskRequestTestCase(BaseFlaskTestCase):
def test_request(self):
"""
When making a request
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.index',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')
assert_span_http_status_code(req_span, 200)
assert http.QUERY_STRING not in req_span.meta
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')
self.assertEqual(handler_span.resource, '/')
self.assertEqual(req_span.error, 0)
def test_request_query_string_trace(self):
"""Make sure when making a request that we create the expected spans and capture the query string."""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_http_config('flask', dict(trace_query_string=True)):
self.client.get('/?foo=bar&baz=biz')
spans = self.get_spans()
# Request tags
assert spans[0].get_tag(http.QUERY_STRING) == 'foo=bar&baz=biz'
def test_analytics_global_on_integration_default(self):
"""
When making a request
When an integration trace search is not event sample rate is not set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=True)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 1.0,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_on_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=True)):
with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 0.5,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_default(self):
"""
When making a request
When an integration trace search is not set and sample rate is set and globally trace search is disabled
We expect the root span to not include tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=False)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is disabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=False)):
with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 0.5,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_distributed_tracing(self):
"""
When making a request
When distributed tracing headers are present
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
# Default: distributed tracing enabled
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertEqual(span.trace_id, 678910)
self.assertEqual(span.parent_id, 12345)
# Explicitly enable distributed tracing
with self.override_config('flask', dict(distributed_tracing_enabled=True)):
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertEqual(span.trace_id, 678910)
self.assertEqual(span.parent_id, 12345)
# With distributed tracing disabled
with self.override_config('flask', dict(distributed_tracing_enabled=False)):
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertNotEqual(span.trace_id, 678910)
self.assertIsNone(span.parent_id)
def test_request_query_string(self):
"""
When making a request
When the request contains a query string
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
res = self.client.get('/', query_string=dict(hello='flask'))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.index',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
# Note: contains no query string
self.assertEqual(req_span.resource, 'GET /')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')
# Note: contains no query string
self.assertEqual(req_span.get_tag('flask.url_rule'), '/')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
# Note: contains no query string
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')
assert_span_http_status_code(req_span, 200)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')
# Note: contains no query string
self.assertEqual(handler_span.resource, '/')
self.assertEqual(req_span.error, 0)
def test_request_unicode(self):
"""
When making a request
When the url contains unicode
We create the expected spans
"""
@self.app.route(u'/üŋïĉóđē')
def unicode():
return 'üŋïĉóđē', 200
res = self.client.get(u'/üŋïĉóđē')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'\xc3\xbc\xc5\x8b\xc3\xaf\xc4\x89\xc3\xb3\xc4\x91\xc4\x93')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.unicode',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, u'GET /üŋïĉóđē')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'unicode')
self.assertEqual(req_span.get_tag('flask.url_rule'), u'/üŋïĉóđē')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), u'http://localhost/üŋïĉóđē')
assert_span_http_status_code(req_span, 200)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.unicode')
self.assertEqual(handler_span.resource, u'/üŋïĉóđē')
self.assertEqual(req_span.error, 0)
def test_request_404(self):
"""
When making a request
When the requested endpoint was not found
We create the expected spans
"""
res = self.client.get('/not-found')
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
self.assertEqual(len(spans), 9)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET 404')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')
assert_span_http_status_code(req_span, 404)
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
def test_request_abort_404(self):
"""
When making a request
When the requested endpoint calls `abort(404)`
We create the expected spans
"""
@self.app.route('/not-found')
def not_found():
abort(404)
res = self.client.get('/not-found')
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.not_found',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /not-found')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')
assert_span_http_status_code(req_span, 404)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'not_found')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/not-found')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.not_found')
self.assertEqual(handler_span.resource, '/not-found')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
def test_request_500(self):
"""
When making a request
When the requested endpoint raises an exception
We create the expected spans
"""
@self.app.route('/500')
def fivehundred():
raise Exception('500 error')
res = self.client.get('/500')
self.assertEqual(res.status_code, 500)
spans = self.get_spans()
self.assertEqual(len(spans), 9)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /500')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')
self.assertEqual(handler_span.resource, '/500')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 1)
self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)
def test_request_501(self):
"""
When making a request
When the requested endpoint calls `abort(501)`
We create the expected spans
"""
@self.app.route('/501')
def fivehundredone():
abort(501)
res = self.client.get('/501')
self.assertEqual(res.status_code, 501)
spans = self.get_spans()
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundredone',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /501')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/501')
assert_span_http_status_code(req_span, 501)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundredone')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/501')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('501 Not Implemented'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundredone')
self.assertEqual(handler_span.resource, '/501')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('501 Not Implemented'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 0)
def test_request_error_handler(self):
"""
When making a request
When the requested endpoint raises an exception
We create the expected spans
"""
@self.app.errorhandler(500)
def error_handler(e):
return 'Whoops', 500
@self.app.route('/500')
def fivehundred():
raise Exception('500 error')
res = self.client.get('/500')
self.assertEqual(res.status_code, 500)
self.assertEqual(res.data, b'Whoops')
spans = self.get_spans()
if flask_version >= (0, 12, 0):
self.assertEqual(len(spans), 11)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'tests.contrib.flask.test_request.error_handler',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
else:
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'tests.contrib.flask.test_request.error_handler',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /500')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')
self.assertEqual(handler_span.resource, '/500')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 1)
self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)
| [((16267, 16277), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (16272, 16277), False, 'from flask import abort\n'), ((22922, 22932), 'flask.abort', 'abort', (['(501)'], {}), '(501)\n', (22927, 22932), False, 'from flask import abort\n')] |
blazejdolicki/CHEDAR | ConvDR/data/preprocess_cast19.py | e4819775e7f6ffa2d6f1ad798ee262f01370b236 | import argparse
from trec_car import read_data
from tqdm import tqdm
import pickle
import os
import json
import copy
from utils.util import NUM_FOLD
def parse_sim_file(filename):
"""
Reads the deduplicated documents file and stores the
duplicate passage ids into a dictionary
"""
sim_dict = {}
lines = open(filename).readlines()
for line in lines:
data = line.strip().split(':')
if len(data[1]) > 0:
sim_docs = data[-1].split(',')
for docs in sim_docs:
sim_dict[docs] = 1
return sim_dict
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--car_cbor", type=str)
parser.add_argument("--msmarco_collection", type=str)
parser.add_argument("--duplicate_file", type=str)
parser.add_argument("--cast_dir", type=str)
parser.add_argument("--out_data_dir", type=str)
parser.add_argument("--out_collection_dir", type=str)
args = parser.parse_args()
# INPUT
sim_file = args.duplicate_file
cast_topics_raw_file = os.path.join(args.cast_dir,
"evaluation_topics_v1.0.json")
cast_topics_manual_file = os.path.join(
args.cast_dir, "evaluation_topics_annotated_resolved_v1.0.tsv")
cast_qrels_file = os.path.join(args.cast_dir, "2019qrels.txt")
# OUTPUT
out_topics_file = os.path.join(args.out_data_dir, "eval_topics.jsonl")
out_raw_queries_file = os.path.join(args.out_data_dir, "queries.raw.tsv")
out_manual_queries_file = os.path.join(args.out_data_dir,
"queries.manual.tsv")
out_qrels_file = os.path.join(args.out_data_dir, "qrels.tsv")
car_id_to_idx_file = os.path.join(args.out_collection_dir,
"car_id_to_idx.pickle")
car_idx_to_id_file = os.path.join(args.out_collection_dir,
"car_idx_to_id.pickle")
out_collection_file = os.path.join(args.out_collection_dir,
"collection.tsv")
# 1. Combine TREC-CAR & MS MARCO, remove duplicate passages, assign new ids
car_id_to_idx = {}
car_idx_to_id = []
if os.path.exists(out_collection_file) and os.path.exists(
car_id_to_idx_file) and os.path.exists(car_idx_to_id_file):
print("Preprocessed collection found. Loading car_id_to_idx...")
with open(car_id_to_idx_file, "rb") as f:
car_id_to_idx = pickle.load(f)
else:
sim_dict = parse_sim_file(sim_file)
car_base_id = 10000000
i = 0
with open(out_collection_file, "w", encoding="utf-8") as f: #FIX change 'a' to 'w' in normal run
print("Processing TREC-CAR...")
for para in tqdm(
read_data.iter_paragraphs(open(args.car_cbor, 'rb'))):
car_id = "CAR_" + para.para_id
text = para.get_text()
text = text.replace("\t", " ").replace("\n",
" ").replace("\r", " ")
idx = car_base_id + i
car_id_to_idx[
car_id] = idx # e.g. CAR_76a4a716d4b1b01995c6663ee16e94b4ca35fdd3 -> 10000044
car_idx_to_id.append(car_id)
f.write("{}\t{}\n".format(idx, text))
i += 1
print("Processing MS MARCO...")
removed = 0
with open(args.msmarco_collection, "r") as m:
for line in tqdm(m):
marco_id, text = line.strip().split("\t")
if ("MARCO_" + marco_id) in sim_dict:
removed += 1
continue
f.write("{}\t{}\n".format(marco_id, text))
print("Removed " + str(removed) + " passages")
print("Dumping id mappings to {} and {}...".format(car_id_to_idx_file, car_idx_to_id_file))
with open(car_id_to_idx_file, "wb") as f:
pickle.dump(car_id_to_idx, f)
with open(car_idx_to_id_file, "wb") as f:
pickle.dump(car_idx_to_id, f)
# 2. Process queries
print("Processing CAsT utterances...")
with open(cast_topics_raw_file, "r") as fin:
raw_data = json.load(fin)
with open(cast_topics_manual_file, "r") as fin:
annonated_lines = fin.readlines()
out_raw_queries = open(out_raw_queries_file, "w")
out_manual_queries = open(out_manual_queries_file, "w")
all_annonated = {}
for line in annonated_lines:
splitted = line.split('\t')
out_manual_queries.write(line)
topic_query = splitted[0]
query = splitted[1].strip()
topic_id = topic_query.split('_')[0]
query_id = topic_query.split('_')[1]
if topic_id not in all_annonated:
all_annonated[topic_id] = {}
all_annonated[topic_id][query_id] = query
out_manual_queries.close()
topic_number_dict = {}
data = []
for group in raw_data:
topic_number, description, turn, title = str(
group['number']), group.get('description',
''), group['turn'], group.get(
'title', '')
queries = []
for query in turn:
query_number, raw_utterance = str(
query['number']), query['raw_utterance']
queries.append(raw_utterance)
record = {}
record['topic_number'] = topic_number
record['query_number'] = query_number
record['description'] = description
record['title'] = title
record['input'] = copy.deepcopy(queries)
record['target'] = all_annonated[topic_number][query_number]
out_raw_queries.write("{}_{}\t{}\n".format(topic_number,
query_number,
raw_utterance))
if not topic_number in topic_number_dict:
topic_number_dict[topic_number] = len(topic_number_dict)
data.append(record)
out_raw_queries.close()
with open(out_topics_file, 'w') as fout:
for item in data:
json_str = json.dumps(item)
fout.write(json_str + '\n')
# Split eval data into K-fold
topic_per_fold = len(topic_number_dict) // NUM_FOLD
for i in range(NUM_FOLD):
with open(out_topics_file + "." + str(i), 'w') as fout:
for item in data:
idx = topic_number_dict[item['topic_number']]
if idx // topic_per_fold == i:
json_str = json.dumps(item)
fout.write(json_str + '\n')
# 3. Process and convert qrels
print("Processing qrels...")
with open(cast_qrels_file, "r") as oq, open(out_qrels_file, "w") as nq:
for line in oq:
qid, _, pid, rel = line.strip().split()
if pid.startswith("CAR_"):
assert car_id_to_idx[pid] != -1
pid = car_id_to_idx[pid]
elif pid.startswith("MARCO_"):
pid = int(pid[6:])
else:
continue
nq.write(qid + "\t0\t" + str(pid) + "\t" + rel + "\n")
print("End")
| [((622, 647), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (645, 647), False, 'import argparse\n'), ((1073, 1131), 'os.path.join', 'os.path.join', (['args.cast_dir', '"""evaluation_topics_v1.0.json"""'], {}), "(args.cast_dir, 'evaluation_topics_v1.0.json')\n", (1085, 1131), False, 'import os\n'), ((1202, 1278), 'os.path.join', 'os.path.join', (['args.cast_dir', '"""evaluation_topics_annotated_resolved_v1.0.tsv"""'], {}), "(args.cast_dir, 'evaluation_topics_annotated_resolved_v1.0.tsv')\n", (1214, 1278), False, 'import os\n'), ((1310, 1354), 'os.path.join', 'os.path.join', (['args.cast_dir', '"""2019qrels.txt"""'], {}), "(args.cast_dir, '2019qrels.txt')\n", (1322, 1354), False, 'import os\n'), ((1391, 1443), 'os.path.join', 'os.path.join', (['args.out_data_dir', '"""eval_topics.jsonl"""'], {}), "(args.out_data_dir, 'eval_topics.jsonl')\n", (1403, 1443), False, 'import os\n'), ((1471, 1521), 'os.path.join', 'os.path.join', (['args.out_data_dir', '"""queries.raw.tsv"""'], {}), "(args.out_data_dir, 'queries.raw.tsv')\n", (1483, 1521), False, 'import os\n'), ((1552, 1605), 'os.path.join', 'os.path.join', (['args.out_data_dir', '"""queries.manual.tsv"""'], {}), "(args.out_data_dir, 'queries.manual.tsv')\n", (1564, 1605), False, 'import os\n'), ((1670, 1714), 'os.path.join', 'os.path.join', (['args.out_data_dir', '"""qrels.tsv"""'], {}), "(args.out_data_dir, 'qrels.tsv')\n", (1682, 1714), False, 'import os\n'), ((1740, 1801), 'os.path.join', 'os.path.join', (['args.out_collection_dir', '"""car_id_to_idx.pickle"""'], {}), "(args.out_collection_dir, 'car_id_to_idx.pickle')\n", (1752, 1801), False, 'import os\n'), ((1865, 1926), 'os.path.join', 'os.path.join', (['args.out_collection_dir', '"""car_idx_to_id.pickle"""'], {}), "(args.out_collection_dir, 'car_idx_to_id.pickle')\n", (1877, 1926), False, 'import os\n'), ((1991, 2046), 'os.path.join', 'os.path.join', (['args.out_collection_dir', '"""collection.tsv"""'], {}), "(args.out_collection_dir, 'collection.tsv')\n", (2003, 2046), False, 'import os\n'), ((2221, 2256), 'os.path.exists', 'os.path.exists', (['out_collection_file'], {}), '(out_collection_file)\n', (2235, 2256), False, 'import os\n'), ((2261, 2295), 'os.path.exists', 'os.path.exists', (['car_id_to_idx_file'], {}), '(car_id_to_idx_file)\n', (2275, 2295), False, 'import os\n'), ((2313, 2347), 'os.path.exists', 'os.path.exists', (['car_idx_to_id_file'], {}), '(car_idx_to_id_file)\n', (2327, 2347), False, 'import os\n'), ((4280, 4294), 'json.load', 'json.load', (['fin'], {}), '(fin)\n', (4289, 4294), False, 'import json\n'), ((2500, 2514), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2511, 2514), False, 'import pickle\n'), ((4021, 4050), 'pickle.dump', 'pickle.dump', (['car_id_to_idx', 'f'], {}), '(car_id_to_idx, f)\n', (4032, 4050), False, 'import pickle\n'), ((4113, 4142), 'pickle.dump', 'pickle.dump', (['car_idx_to_id', 'f'], {}), '(car_idx_to_id, f)\n', (4124, 4142), False, 'import pickle\n'), ((5699, 5721), 'copy.deepcopy', 'copy.deepcopy', (['queries'], {}), '(queries)\n', (5712, 5721), False, 'import copy\n'), ((6286, 6302), 'json.dumps', 'json.dumps', (['item'], {}), '(item)\n', (6296, 6302), False, 'import json\n'), ((3538, 3545), 'tqdm.tqdm', 'tqdm', (['m'], {}), '(m)\n', (3542, 3545), False, 'from tqdm import tqdm\n'), ((6698, 6714), 'json.dumps', 'json.dumps', (['item'], {}), '(item)\n', (6708, 6714), False, 'import json\n')] |
brandonxiang/example-pyQGIS | coord_convert/geojson_utils.py | a61d0321d223d0b82e44bb809521965858fde857 | __doc__ = 'github: https://github.com/brandonxiang/geojson-python-utils'
import math
from coordTransform_utils import wgs84togcj02
from coordTransform_utils import gcj02tobd09
def linestrings_intersect(line1, line2):
"""
To valid whether linestrings from geojson are intersected with each other.
reference: http://www.kevlindev.com/gui/math/intersection/Intersection.js
Keyword arguments:
line1 -- first line geojson object
line2 -- second line geojson object
if(line1 intersects with other) return intersect point array else empty array
"""
intersects = []
for i in range(0, len(line1['coordinates']) - 1):
for j in range(0, len(line2['coordinates']) - 1):
a1_x = line1['coordinates'][i][1]
a1_y = line1['coordinates'][i][0]
a2_x = line1['coordinates'][i + 1][1]
a2_y = line1['coordinates'][i + 1][0]
b1_x = line2['coordinates'][j][1]
b1_y = line2['coordinates'][j][0]
b2_x = line2['coordinates'][j + 1][1]
b2_y = line2['coordinates'][j + 1][0]
ua_t = (b2_x - b1_x) * (a1_y - b1_y) - \
(b2_y - b1_y) * (a1_x - b1_x)
ub_t = (a2_x - a1_x) * (a1_y - b1_y) - \
(a2_y - a1_y) * (a1_x - b1_x)
u_b = (b2_y - b1_y) * (a2_x - a1_x) - (b2_x - b1_x) * (a2_y - a1_y)
if not u_b == 0:
u_a = ua_t / u_b
u_b = ub_t / u_b
if 0 <= u_a and u_a <= 1 and 0 <= u_b and u_b <= 1:
intersects.append({'type': 'Point', 'coordinates': [
a1_x + u_a * (a2_x - a1_x), a1_y + u_a * (a2_y - a1_y)]})
# if len(intersects) == 0:
# intersects = False
return intersects
def _bbox_around_polycoords(coords):
"""
bounding box
"""
x_all = []
y_all = []
for first in coords[0]:
x_all.append(first[1])
y_all.append(first[0])
return [min(x_all), min(y_all), max(x_all), max(y_all)]
def _point_in_bbox(point, bounds):
"""
valid whether the point is inside the bounding box
"""
return not(point['coordinates'][1] < bounds[0] or point['coordinates'][1] > bounds[2]
or point['coordinates'][0] < bounds[1] or point['coordinates'][0] > bounds[3])
def _pnpoly(x, y, coords):
"""
the algorithm to judge whether the point is located in polygon
reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation
"""
vert = [[0, 0]]
for coord in coords:
for node in coord:
vert.append(node)
vert.append(coord[0])
vert.append([0, 0])
inside = False
i = 0
j = len(vert) - 1
while i < len(vert):
if ((vert[i][0] > y) != (vert[j][0] > y)) and (x < (vert[j][1] - vert[i][1])
* (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]):
inside = not inside
j = i
i += 1
return inside
def _point_in_polygon(point, coords):
inside_box = False
for coord in coords:
if inside_box:
break
if _point_in_bbox(point, _bbox_around_polycoords(coord)):
inside_box = True
if not inside_box:
return False
inside_poly = False
for coord in coords:
if inside_poly:
break
if _pnpoly(point['coordinates'][1], point['coordinates'][0], coord):
inside_poly = True
return inside_poly
def point_in_polygon(point, poly):
"""
valid whether the point is located in a polygon
Keyword arguments:
point -- point geojson object
poly -- polygon geojson object
if(point inside poly) return true else false
"""
coords = [poly['coordinates']] if poly[
'type'] == 'Polygon' else poly['coordinates']
return _point_in_polygon(point, coords)
def point_in_multipolygon(point, multipoly):
"""
valid whether the point is located in a mulitpolygon (donut polygon is not supported)
Keyword arguments:
point -- point geojson object
multipoly -- multipolygon geojson object
if(point inside multipoly) return true else false
"""
coords_array = [multipoly['coordinates']] if multipoly[
'type'] == "MultiPolygon" else multipoly['coordinates']
for coords in coords_array:
if _point_in_polygon(point, coords):
return True
return False
def number2radius(number):
"""
convert degree into radius
Keyword arguments:
number -- degree
return radius
"""
return number * math.pi / 180
def number2degree(number):
"""
convert radius into degree
Keyword arguments:
number -- radius
return degree
"""
return number * 180 / math.pi
def draw_circle(radius_in_meters, center_point, steps=15):
"""
get a circle shape polygon based on centerPoint and radius
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
if(point inside multipoly) return true else false
"""
steps = steps if steps > 15 else 15
center = [center_point['coordinates'][1], center_point['coordinates'][0]]
dist = (radius_in_meters / 1000) / 6371
# convert meters to radiant
rad_center = [number2radius(center[0]), number2radius(center[1])]
# 15 sided circle
poly = []
for step in range(0, steps):
brng = 2 * math.pi * step / steps
lat = math.asin(math.sin(rad_center[0]) * math.cos(dist) +
math.cos(rad_center[0]) * math.sin(dist) * math.cos(brng))
lng = rad_center[1] + math.atan2(math.sin(brng) * math.sin(dist)
* math.cos(rad_center[0]), math.cos(dist) - math.sin(rad_center[0]) * math.sin(lat))
poly.append([number2degree(lng), number2degree(lat)])
return {"type": "Polygon", "coordinates": [poly]}
def rectangle_centroid(rectangle):
"""
get the centroid of the rectangle
Keyword arguments:
rectangle -- polygon geojson object
return centroid
"""
bbox = rectangle['coordinates'][0]
xmin = bbox[0][0]
ymin = bbox[0][1]
xmax = bbox[2][0]
ymax = bbox[2][1]
xwidth = xmax - xmin
ywidth = ymax - ymin
return {'type': 'Point', 'coordinates': [xmin + xwidth / 2, ymin + ywidth / 2]}
def point_distance(point1, point2):
"""
calculate the distance between two point on the sphere like google map
reference http://www.movable-type.co.uk/scripts/latlong.html
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
if(point inside multipoly) return true else false
"""
lon1 = point1['coordinates'][0]
lat1 = point1['coordinates'][1]
lon2 = point2['coordinates'][0]
lat2 = point2['coordinates'][1]
deg_lat = number2radius(lat2 - lat1)
deg_lon = number2radius(lon2 - lon1)
a = math.pow(math.sin(deg_lat / 2), 2) + math.cos(number2radius(lat1)) * \
math.cos(number2radius(lat2)) * math.pow(math.sin(deg_lon / 2), 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return (6371 * c) * 1000
def geometry_within_radius(geometry, center, radius):
"""
To valid whether point or linestring or polygon is inside a radius around a center
Keyword arguments:
geometry -- point/linstring/polygon geojson object
center -- point geojson object
radius -- radius
if(geometry inside radius) return true else false
"""
if geometry['type'] == 'Point':
return point_distance(geometry, center) <= radius
elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon':
point = {}
# it's enough to check the exterior ring of the Polygon
coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates']
for coordinate in coordinates:
point['coordinates'] = coordinate
if point_distance(point, center) > radius:
return False
return True
def area(poly):
"""
calculate the area of polygon
Keyword arguments:
poly -- polygon geojson object
return polygon area
"""
poly_area = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
poly_area += p1_x * p2_y
poly_area -= p1_y * p2_x
j = i
poly_area /= 2
return poly_area
def centroid(poly):
"""
get the centroid of polygon
adapted from http://paulbourke.net/geometry/polyarea/javascript.txt
Keyword arguments:
poly -- polygon geojson object
return polygon centroid
"""
f_total = 0
x_total = 0
y_total = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
f_total = p1_x * p2_y - p2_x * p1_y
x_total += (p1_x + p2_x) * f_total
y_total += (p1_y + p2_y) * f_total
j = i
six_area = area(poly) * 6
return {'type': 'Point', 'coordinates': [y_total / six_area, x_total / six_area]}
def destination_point(point, brng, dist):
"""
Calculate a destination Point base on a base point and a distance
Keyword arguments:
pt -- polygon geojson object
brng -- an angle in degrees
dist -- distance in Kilometer between destination and base point
return destination point object
"""
dist = float(dist) / 6371 # convert dist to angular distance in radians
brng = number2radius(brng)
lon1 = number2radius(point['coordinates'][0])
lat1 = number2radius(point['coordinates'][1])
lat2 = math.asin(math.sin(lat1) * math.cos(dist) +
math.cos(lat1) * math.sin(dist) * math.cos(brng))
lon2 = lon1 + math.atan2(math.sin(brng) * math.sin(dist) *
math.cos(lat1), math.cos(dist) - math.sin(lat1) * math.sin(lat2))
lon2 = (lon2 + 3 * math.pi) % (2 * math.pi) - math.pi # normalise to -180 degree +180 degree
return {'type': 'Point', 'coordinates': [number2degree(lon2), number2degree(lat2)]}
def simplify(source, kink=20):
"""
source[] array of geojson points
kink in metres, kinks above this depth kept
kink depth is the height of the triangle abc where a-b and b-c are two consecutive line segments
"""
source_coord = map(lambda o: {"lng": o.coordinates[0], "lat": o.coordinates[1]}, source)
# count, n_stack, n_dest, start, end, i, sig;
# dev_sqr, max_dev_sqr, band_sqr;
# x12, y12, d12, x13, y13, d13, x23, y23, d23;
F = (math.pi / 180.0) * 0.5
index = [] # aray of indexes of source points to include in the reduced line
sig_start = [] # indices of start & end of working section
sig_end = []
# check for simple cases
count = len(source_coord)
if count < 3:
return source_coord # one or two points
# more complex case. initialize stack
band_sqr = kink * 360.0 / (2.0 * math.pi * 6378137.0) # Now in degrees
band_sqr *= band_sqr
n_dest = 0
sig_start[0] = 0
sig_end[0] = count - 1
n_stack = 1
# while the stack is not empty
while n_stack > 0:
# ... pop the top-most entries off the stacks
start = sig_start[n_stack - 1]
end = sig_end[n_stack - 1]
n_stack -= 1
if (end - start) > 1: #any intermediate points ?
# ... yes, so find most deviant intermediate point to either side of line joining start & end points
x12 = source[end]["lng"] - source[start]["lng"]
y12 = source[end]["lat"] - source[start]["lat"]
if math.fabs(x12) > 180.0:
x12 = 360.0 - math.fabs(x12)
x12 *= math.cos(F * (source[end]["lat"] + source[start]["lat"])) # use avg lat to reduce lng
d12 = (x12 * x12) + (y12 * y12)
i = start + 1
sig = start
max_dev_sqr = -1.0
while i < end:
x13 = source[i]["lng"] - source[start]["lng"]
y13 = source[i]["lat"] - source[start]["lat"]
if math.fabs(x13) > 180.0:
x13 = 360.0 - math.fabs(x13)
x13 *= math.cos(F * (source[i]["lat"] + source[start]["lat"]))
d13 = (x13 * x13) + (y13 * y13)
x23 = source[i]["lng"] - source[end]["lng"]
y23 = source[i]["lat"] - source[end]["lat"]
if math.fabs(x23) > 180.0:
x23 = 360.0 - math.fabs(x23)
x23 *= math.cos(F * (source[i]["lat"] + source[end]["lat"]))
d23 = (x23 * x23) + (y23 * y23)
if d13 >= (d12 + d23):
dev_sqr = d23
elif d23 >= (d12 + d13):
dev_sqr = d13
else:
dev_sqr = (x13 * y12 - y13 * x12) * (x13 * y12 - y13 * x12) / d12 # solve triangle
if dev_sqr > max_dev_sqr:
sig = i
max_dev_sqr = dev_sqr
i += 1
if max_dev_sqr < band_sqr: # is there a sig. intermediate point ?
#... no, so transfer current start point
index[n_dest] = start
n_dest += 1
else: # ... yes, so push two sub-sections on stack for further processing
n_stack += 1
sig_start[n_stack - 1] = sig
sig_end[n_stack - 1] = end
n_stack += 1
sig_start[n_stack - 1] = start
sig_end[n_stack - 1] = sig
else: # ... no intermediate points, so transfer current start point
index[n_dest] = start
n_dest += 1
# transfer last point
index[n_dest] = count - 1
n_dest += 1
# make return array
r = []
for i in range(0, n_dest):
r.append(source_coord[index[i]])
return map(lambda o: {"type": "Point","coordinates": [o.lng, o.lat]}, r)
def wgs2gcj(geometry):
"""
convert wgs84 to gcj
referencing by https://github.com/wandergis/coordTransform_py
"""
# TODO: point linestring point
if geometry['type'] == 'MultiLineString':
coordinates = geometry['coordinates']
for lines in coordinates:
for line in lines:
line[0], line[1] = wgs84togcj02(line[0], line[1])
return geometry
def gcj2bd(geometry):
"""
convert gcj to bd
referencing by https://github.com/wandergis/coordTransform_py
"""
# TODO: point linestring point
if geometry['type'] == 'MultiLineString':
coordinates = geometry['coordinates']
for lines in coordinates:
for line in lines:
line[0], line[1] = gcj02tobd09(line[0], line[1])
return geometry
| [((7021, 7042), 'math.sin', 'math.sin', (['(deg_lat / 2)'], {}), '(deg_lat / 2)\n', (7029, 7042), False, 'import math\n'), ((7181, 7193), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\n', (7190, 7193), False, 'import math\n'), ((7195, 7211), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\n', (7204, 7211), False, 'import math\n'), ((12146, 12203), 'math.cos', 'math.cos', (["(F * (source[end]['lat'] + source[start]['lat']))"], {}), "(F * (source[end]['lat'] + source[start]['lat']))\n", (12154, 12203), False, 'import math\n'), ((7132, 7153), 'math.sin', 'math.sin', (['(deg_lon / 2)'], {}), '(deg_lon / 2)\n', (7140, 7153), False, 'import math\n'), ((10081, 10095), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (10089, 10095), False, 'import math\n'), ((10098, 10112), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (10106, 10112), False, 'import math\n'), ((10170, 10184), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (10178, 10184), False, 'import math\n'), ((10278, 10292), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (10286, 10292), False, 'import math\n'), ((10294, 10308), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (10302, 10308), False, 'import math\n'), ((12058, 12072), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\n', (12067, 12072), False, 'import math\n'), ((12624, 12679), 'math.cos', 'math.cos', (["(F * (source[i]['lat'] + source[start]['lat']))"], {}), "(F * (source[i]['lat'] + source[start]['lat']))\n", (12632, 12679), False, 'import math\n'), ((12963, 13016), 'math.cos', 'math.cos', (["(F * (source[i]['lat'] + source[end]['lat']))"], {}), "(F * (source[i]['lat'] + source[end]['lat']))\n", (12971, 13016), False, 'import math\n'), ((14749, 14779), 'coordTransform_utils.wgs84togcj02', 'wgs84togcj02', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\n', (14761, 14779), False, 'from coordTransform_utils import wgs84togcj02\n'), ((15155, 15184), 'coordTransform_utils.gcj02tobd09', 'gcj02tobd09', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\n', (15166, 15184), False, 'from coordTransform_utils import gcj02tobd09\n'), ((5546, 5569), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\n', (5554, 5569), False, 'import math\n'), ((5572, 5586), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (5580, 5586), False, 'import math\n'), ((5656, 5670), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (5664, 5670), False, 'import math\n'), ((5788, 5811), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\n', (5796, 5811), False, 'import math\n'), ((5813, 5827), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (5821, 5827), False, 'import math\n'), ((10136, 10150), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (10144, 10150), False, 'import math\n'), ((10153, 10167), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (10161, 10167), False, 'import math\n'), ((10215, 10229), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (10223, 10229), False, 'import math\n'), ((10232, 10246), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (10240, 10246), False, 'import math\n'), ((10311, 10325), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (10319, 10325), False, 'import math\n'), ((10328, 10342), 'math.sin', 'math.sin', (['lat2'], {}), '(lat2)\n', (10336, 10342), False, 'import math\n'), ((12112, 12126), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\n', (12121, 12126), False, 'import math\n'), ((12528, 12542), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\n', (12537, 12542), False, 'import math\n'), ((12867, 12881), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\n', (12876, 12881), False, 'import math\n'), ((5613, 5636), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\n', (5621, 5636), False, 'import math\n'), ((5639, 5653), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (5647, 5653), False, 'import math\n'), ((5713, 5727), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (5721, 5727), False, 'import math\n'), ((5730, 5744), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (5738, 5744), False, 'import math\n'), ((5830, 5853), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\n', (5838, 5853), False, 'import math\n'), ((5856, 5869), 'math.sin', 'math.sin', (['lat'], {}), '(lat)\n', (5864, 5869), False, 'import math\n'), ((12586, 12600), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\n', (12595, 12600), False, 'import math\n'), ((12925, 12939), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\n', (12934, 12939), False, 'import math\n')] |
Rinku92/Mini_Project3 | config.py | eab11ce3743fddda2ccc158367a37d4522ba1e39 | import os
'''
user = os.environ['POSTGRES_USER']
password = os.environ['POSTGRES_PASSWORD']
host = os.environ['POSTGRES_HOST']
database = os.environ['POSTGRES_DB']
port = os.environ['POSTGRES_PORT']
'''
user = 'test'
password = 'password'
host = 'localhost'
database = 'example'
port = '5432'
DATABASE_CONNECTION_URI = f'postgresql+psycopg2://{user}:{password}@{host}:{port}/{database}' | [] |
sercangul/HackerRank | 10_days_of_statistics_8_1.py | e6d7056babe03baafee8d7f1cacdca7c28b72ded | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 3 19:26:47 2019
@author: sercangul
"""
n = 5
xy = [map(int, input().split()) for _ in range(n)]
sx, sy, sx2, sxy = map(sum, zip(*[(x, y, x**2, x * y) for x, y in xy]))
b = (n * sxy - sx * sy) / (n * sx2 - sx**2)
a = (sy / n) - b * (sx / n)
print('{:.3f}'.format(a + b * 80)) | [] |
vermouth1992/rl-util | rlutils/gym/envs/reset_obs/hopper.py | 4c06ab8f5c96a44e58f88cf30146bcb837057112 | import gym.envs.mujoco.hopper as hopper
import numpy as np
class HopperEnv(hopper.HopperEnv):
def _get_obs(self):
return np.concatenate([
self.sim.data.qpos.flat[1:],
self.sim.data.qvel.flat,
])
def reset_obs(self, obs):
state = np.insert(obs, 0, 0.)
qpos = state[:self.model.nq]
qvel = state[self.model.nq:]
self.set_state(qpos, qvel)
return self._get_obs()
| [((135, 205), 'numpy.concatenate', 'np.concatenate', (['[self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat]'], {}), '([self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat])\n', (149, 205), True, 'import numpy as np\n'), ((288, 310), 'numpy.insert', 'np.insert', (['obs', '(0)', '(0.0)'], {}), '(obs, 0, 0.0)\n', (297, 310), True, 'import numpy as np\n')] |
yutian-zhao/recommenders | reco_utils/recommender/deeprec/io/iterator.py | 17b9c1280a79019dd91f50b3a7e66f25cb5004b1 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
# import tensorflow as tf
import abc
class BaseIterator(object):
@abc.abstractmethod
def parser_one_line(self, line):
pass
@abc.abstractmethod
def load_data_from_file(self, infile):
pass
@abc.abstractmethod
def _convert_data(self, labels, features):
pass
@abc.abstractmethod
def gen_feed_dict(self, data_dict):
pass
# class FFMTextIterator(BaseIterator):
# """Data loader for FFM format based models, such as xDeepFM.
# Iterator will not load the whole data into memory. Instead, it loads data into memory
# per mini-batch, so that large files can be used as input data.
# """
# def __init__(self, hparams, graph, col_spliter=" ", ID_spliter="%"):
# """Initialize an iterator. Create necessary placeholders for the model.
# Args:
# hparams (obj): Global hyper-parameters. Some key settings such as #_feature and #_field are there.
# graph (obj): the running graph. All created placeholder will be added to this graph.
# col_spliter (str): column splitter in one line.
# ID_spliter (str): ID splitter in one line.
# """
# self.feature_cnt = hparams.FEATURE_COUNT
# self.field_cnt = hparams.FIELD_COUNT
# self.col_spliter = col_spliter
# self.ID_spliter = ID_spliter
# self.batch_size = hparams.batch_size
# self.graph = graph
# with self.graph.as_default():
# self.labels = tf.placeholder(tf.float32, [None, 1], name="label")
# self.fm_feat_indices = tf.placeholder(
# tf.int64, [None, 2], name="fm_feat_indices"
# )
# self.fm_feat_values = tf.placeholder(
# tf.float32, [None], name="fm_feat_values"
# )
# self.fm_feat_shape = tf.placeholder(tf.int64, [None], name="fm_feat_shape")
# self.dnn_feat_indices = tf.placeholder(
# tf.int64, [None, 2], name="dnn_feat_indices"
# )
# self.dnn_feat_values = tf.placeholder(
# tf.int64, [None], name="dnn_feat_values"
# )
# self.dnn_feat_weights = tf.placeholder(
# tf.float32, [None], name="dnn_feat_weights"
# )
# self.dnn_feat_shape = tf.placeholder(
# tf.int64, [None], name="dnn_feat_shape"
# )
# def parser_one_line(self, line):
# """Parse one string line into feature values.
# Args:
# line (str): a string indicating one instance
# Returns:
# list: Parsed results,including label, features and impression_id
# """
# impression_id = 0
# words = line.strip().split(self.ID_spliter)
# if len(words) == 2:
# impression_id = words[1].strip()
# cols = words[0].strip().split(self.col_spliter)
# label = float(cols[0])
# features = []
# for word in cols[1:]:
# if not word.strip():
# continue
# tokens = word.split(":")
# features.append([int(tokens[0]) - 1, int(tokens[1]) - 1, float(tokens[2])])
# return label, features, impression_id
# def load_data_from_file(self, infile):
# """Read and parse data from a file.
# Args:
# infile (str): text input file. Each line in this file is an instance.
# Returns:
# obj: An iterator that will yields parsed results, in the format of graph feed_dict.
# """
# label_list = []
# features_list = []
# impression_id_list = []
# cnt = 0
# with tf.gfile.GFile(infile, "r") as rd:
# for line in rd:
# label, features, impression_id = self.parser_one_line(line)
# features_list.append(features)
# label_list.append(label)
# impression_id_list.append(impression_id)
# cnt += 1
# if cnt == self.batch_size:
# res = self._convert_data(label_list, features_list)
# yield self.gen_feed_dict(res), impression_id_list, self.batch_size
# label_list = []
# features_list = []
# impression_id_list = []
# cnt = 0
# if cnt > 0:
# res = self._convert_data(label_list, features_list)
# yield self.gen_feed_dict(res), impression_id_list, cnt
# def _convert_data(self, labels, features):
# """Convert data into numpy arrays that are good for further operation.
# Args:
# labels (list): a list of ground-truth labels.
# features (list): a 3-dimensional list, carrying a list (batch_size) of feature array,
# where each feature array is a list of [field_idx, feature_idx, feature_value] tuple.
# Returns:
# dict: A dictionary, contains multiple numpy arrays that are convenient for further operation.
# """
# dim = self.feature_cnt
# FIELD_COUNT = self.field_cnt
# instance_cnt = len(labels)
# fm_feat_indices = []
# fm_feat_values = []
# fm_feat_shape = [instance_cnt, dim]
# dnn_feat_indices = []
# dnn_feat_values = []
# dnn_feat_weights = []
# dnn_feat_shape = [instance_cnt * FIELD_COUNT, -1]
# for i in range(instance_cnt):
# m = len(features[i])
# dnn_feat_dic = {}
# for j in range(m):
# fm_feat_indices.append([i, features[i][j][1]])
# fm_feat_values.append(features[i][j][2])
# if features[i][j][0] not in dnn_feat_dic:
# dnn_feat_dic[features[i][j][0]] = 0
# else:
# dnn_feat_dic[features[i][j][0]] += 1
# dnn_feat_indices.append(
# [
# i * FIELD_COUNT + features[i][j][0],
# dnn_feat_dic[features[i][j][0]],
# ]
# )
# dnn_feat_values.append(features[i][j][1])
# dnn_feat_weights.append(features[i][j][2])
# if dnn_feat_shape[1] < dnn_feat_dic[features[i][j][0]]:
# dnn_feat_shape[1] = dnn_feat_dic[features[i][j][0]]
# dnn_feat_shape[1] += 1
# sorted_index = sorted(
# range(len(dnn_feat_indices)),
# key=lambda k: (dnn_feat_indices[k][0], dnn_feat_indices[k][1]),
# )
# res = {}
# res["fm_feat_indices"] = np.asarray(fm_feat_indices, dtype=np.int64)
# res["fm_feat_values"] = np.asarray(fm_feat_values, dtype=np.float32)
# res["fm_feat_shape"] = np.asarray(fm_feat_shape, dtype=np.int64)
# res["labels"] = np.asarray([[label] for label in labels], dtype=np.float32)
# res["dnn_feat_indices"] = np.asarray(dnn_feat_indices, dtype=np.int64)[
# sorted_index
# ]
# res["dnn_feat_values"] = np.asarray(dnn_feat_values, dtype=np.int64)[
# sorted_index
# ]
# res["dnn_feat_weights"] = np.asarray(dnn_feat_weights, dtype=np.float32)[
# sorted_index
# ]
# res["dnn_feat_shape"] = np.asarray(dnn_feat_shape, dtype=np.int64)
# return res
# def gen_feed_dict(self, data_dict):
# """Construct a dictionary that maps graph elements to values.
# Args:
# data_dict (dict): a dictionary that maps string name to numpy arrays.
# Returns:
# dict: a dictionary that maps graph elements to numpy arrays.
# """
# feed_dict = {
# self.labels: data_dict["labels"],
# self.fm_feat_indices: data_dict["fm_feat_indices"],
# self.fm_feat_values: data_dict["fm_feat_values"],
# self.fm_feat_shape: data_dict["fm_feat_shape"],
# self.dnn_feat_indices: data_dict["dnn_feat_indices"],
# self.dnn_feat_values: data_dict["dnn_feat_values"],
# self.dnn_feat_weights: data_dict["dnn_feat_weights"],
# self.dnn_feat_shape: data_dict["dnn_feat_shape"],
# }
# return feed_dict
| [] |
kolyasalubov/Lv-677.PythonCore | HW6/YuliiaKutsyk/3_ unfinished_loop_bug_fixing.py | c9f9107c734a61e398154a90b8a3e249276c2704 | def create_array(n):
res=[]
i=1
while i<=n:
res.append(i)
i += 1
return res
| [] |
SDhuangao/netease-cloud-music-dl | ncm/api.py | 4a970504e1fec0a9848f3920b392aa507d6b3879 | # -*- coding: utf-8 -*-
import requests
from ncm.encrypt import encrypted_request
from ncm.constants import headers
from ncm.constants import song_download_url
from ncm.constants import get_song_url
from ncm.constants import get_album_url
from ncm.constants import get_artist_url
from ncm.constants import get_playlist_url
class CloudApi(object):
def __init__(self, timeout=30):
super().__init__()
self.session = requests.session()
self.session.headers.update(headers)
self.timeout = timeout
def get_request(self, url):
response = self.session.get(url, timeout=self.timeout)
result = response.json()
if result['code'] != 200:
print('Return {} when try to get {}'.format(result, url))
else:
return result
def post_request(self, url, params):
data = encrypted_request(params)
response = self.session.post(url, data=data, timeout=self.timeout)
result = response.json()
if result['code'] != 200:
print('Return {} when try to post {} => {}'.format(result, params, url))
else:
return result
def get_song(self, song_id):
"""
Get song info by song id
:param song_id:
:return:
"""
url = get_song_url(song_id)
result = self.get_request(url)
return result['songs'][0]
def get_album_songs(self, album_id):
"""
Get all album songs info by album id
:param album_id:
:return:
"""
url = get_album_url(album_id)
result = self.get_request(url)
return result['album']['songs']
def get_song_url(self, song_id, bit_rate=320000):
"""Get a song's download url.
:params song_id: song id<int>.
:params bit_rate: {'MD 128k': 128000, 'HD 320k': 320000}
:return:
"""
url = song_download_url
csrf = ''
params = {'ids': [song_id], 'br': bit_rate, 'csrf_token': csrf}
result = self.post_request(url, params)
song_url = result['data'][0]['url']
return song_url
def get_hot_songs(self, artist_id):
"""
Get a artist 50 hot songs
:param artist_id:
:return:
"""
url = get_artist_url(artist_id)
result = self.get_request(url)
return result['hotSongs']
def get_playlist_songs(self, playlist_id):
"""
Get a public playlist all songs
:param playlist_id:
:return:
"""
url = get_playlist_url(playlist_id)
result = self.get_request(url)
return result['playlist']['trackIds'], result['playlist']['name']
| [((438, 456), 'requests.session', 'requests.session', ([], {}), '()\n', (454, 456), False, 'import requests\n'), ((865, 890), 'ncm.encrypt.encrypted_request', 'encrypted_request', (['params'], {}), '(params)\n', (882, 890), False, 'from ncm.encrypt import encrypted_request\n'), ((1304, 1325), 'ncm.constants.get_song_url', 'get_song_url', (['song_id'], {}), '(song_id)\n', (1316, 1325), False, 'from ncm.constants import get_song_url\n'), ((1567, 1590), 'ncm.constants.get_album_url', 'get_album_url', (['album_id'], {}), '(album_id)\n', (1580, 1590), False, 'from ncm.constants import get_album_url\n'), ((2291, 2316), 'ncm.constants.get_artist_url', 'get_artist_url', (['artist_id'], {}), '(artist_id)\n', (2305, 2316), False, 'from ncm.constants import get_artist_url\n'), ((2577, 2606), 'ncm.constants.get_playlist_url', 'get_playlist_url', (['playlist_id'], {}), '(playlist_id)\n', (2593, 2606), False, 'from ncm.constants import get_playlist_url\n')] |
Web-Dev-Collaborative/algos | book/trees/binary_search_tree.py | d280581d74ded382094283d931a202eb55fd8369 | # -*- coding: utf-8 -*-
"""
The `TreeNode` class provides many helper functions that make the work
done in the `BinarySearchTree` class methods much easier. The
constructor for a `TreeNode`, along with these helper functions, is
shown below. As you can see, many of these helper functions help to
classify a node according to its own position as a child, (left or
right) and the kind of children the node has. The `TreeNode` class will
also explicitly keep track of the parent as an attribute of each node.
You will see why this is important when we discuss the implementation
for the `del` operator.
One of the more interesting methods of `TreeNode` provides an interface
to simply iterate over all the keys in the tree in order. You already
know how to traverse a binary tree in order, using the `inorder`
traversal algorithm. However, because we want our iterator to operate
lazily, in this case we use the `yield` keyword to define our `__iter__`
method as a Python generator. Pay close attention to the `__iter__`
implementation as at first glance you might think that the code is
not recursive: in fact, because `__iter__` overrides the `for x
in` operation for iteration, it really is recursive!
Our full implementation of `TreeNode` is provided below. It includes
three further methods `find_successor`, `find_min` and `splice_out`
which you can ignore for now as we will return to them later when
discussing deletion.
"""
class TreeNode(object):
def __init__(self, key, val, left=None, right=None, parent=None):
self.key = key
self.val = val
self.left = left
self.right = right
self.parent = parent
def is_left_child(self):
return self.parent and self.parent.left == self
def is_right_child(self):
return self.parent and self.parent.right == self
def is_leaf(self):
return not (self.right or self.left)
def has_any_children(self):
return self.right or self.left
def has_both_children(self):
return self.right and self.left
def has_one_child(self):
return self.has_any_children() and not self.has_both_children()
def replace_node_data(self, key, val, left, right):
self.key = key
self.val = val
self.left = left
self.right = right
if self.left:
self.left.parent = self
if self.right:
self.right.parent = self
def __iter__(self):
if self is None:
return
if self.left:
# `in` calls `__iter__` so is recursive
for elem in self.left:
yield elem
yield self.key
if self.right:
# recurse again
for elem in self.right:
yield elem
def find_successor(self):
if self.right:
return self.right.find_min()
if self.parent is None:
return None
if self.is_left_child():
return self.parent
self.parent.right = None
successor = self.parent.find_successor()
self.parent.right = self
return successor
def find_min(self):
current = self
while current.left:
current = current.left
return current
def splice_out(self):
if self.is_leaf():
if self.is_left_child():
self.parent.left = None
else:
self.parent.right = None
else:
promoted_node = self.left or self.right
if self.is_left_child():
self.parent.left = promoted_node
else:
self.parent.right = promoted_node
promoted_node.parent = self.parent
"""
Now that we have our `TreeNode` class we can begin to write
`BinarySearchTree` itself. Recall that the core functionality of this
class will be to enable `put`ing to and `get`ing from the tree, so we
begin our implementation with the `put` functionality.
In order to enable the `tree[1] = 'foo'` style assignment interface for
our `BinarySearchTree` instances, we override the `__setitem__` magic
method. In this method we first check to see if the tree already has a
root. If there is not a root then we create a new `TreeNode` and set it
as the root of the tree. If a root node is already in place then `put`
calls the private, recursive, helper function `_put` to search the tree
according to the following algorithm:
- Starting at the root of the tree, search the binary tree comparing
the new key to the key in the current node. If the new key is less
than the current node, search the left subtree. If the new key is
greater than the current node, search the right subtree.
- When there is no left (or right) child to search, we have found the
position in the tree where the new node should be installed.
- To add a node to the tree, create a new `TreeNode` object and insert
the object at the point discovered in the previous step.
The code below shows the Python code for inserting a new
node in the tree. The `_put` function is written recursively following
the steps outlined above. Notice that when a new child is inserted into
the tree, the `node` is passed to the new tree as the parent.
One important problem with our implementation of insert is that
duplicate keys are not handled properly. As our tree is implemented a
duplicate key will create a new node with the same key value in the
right subtree of the node having the original key. The result of this is
that the node with the new key will never be found during a search. A
better way to handle the insertion of a duplicate key is for the value
associated with the new key to replace the old value. We leave fixing
this bug as an exercise for you.
"""
class BinarySearchTree(object):
TreeNodeClass = TreeNode
def __init__(self):
self.root = None
self.size = 0
def __len__(self):
return self.size
def __iter__(self):
return self.root.__iter__()
def __setitem__(self, key, val):
if self.root:
self._put(key, val, self.root)
else:
self.root = self.TreeNodeClass(key, val)
self.size = self.size + 1
def _put(self, key, val, node):
if key < node.key:
if node.left:
self._put(key, val, node.left)
else:
node.left = self.TreeNodeClass(key, val, parent=node)
else:
if node.right:
self._put(key, val, node.right)
else:
node.right = self.TreeNodeClass(key, val, parent=node)
"""
The diagram below illustrates the process for inserting a new
node into a binary search tree. The lightly shaded nodes indicate the
nodes that were visited during the insertion process.

Once the tree is constructed, the next task is to implement the
retrieval of a value for a given key. The `get` functionality is even easier
than the `put` functionality because we simply search the tree recursively
until we get to a non-matching leaf node or find a matching key. When
a matching key is found, the value stored in the val of the node is
returned.
Again, inorder to enable a `tree[1]` retrieval interface, we overload
one of Python’s magic methods—in this case `__getitem__`. Just like with
`__setitem__`, the primary purpose of this method is to handle presence
and absence of a root node, and delegates the core `get` functionality
to `_get`.
The search code in the `_get` method uses the same logic
for choosing the left or right child as the `_put` method. Notice that
the `_get` method returns a `TreeNode` to `__getitem__`, this allows `_get` to
be used as a flexible helper method for other `BinarySearchTree` methods
that may need to make use of other data from the `TreeNode` besides the
val.
"""
def __getitem__(self, key):
if self.root:
result = self._get(key, self.root)
if result:
return result.val
raise KeyError
def _get(self, key, node):
if not node:
return None
if node.key == key:
return node
if key < node.key:
return self._get(key, node.left)
return self._get(key, node.right)
"""
Using `_get`, we can implement the `in` operation by writing a
`__contains__` method for the `BinarySearchTree`. The `__contains__`
method will simply call `_get` and return `True` if `_get` returns a
value, or `False` if it returns `None`. The code for `__contains__` is
shown below.
"""
def __contains__(self, key):
return bool(self._get(key, self.root))
"""
Finally, we turn our attention to the most challenging method in the
binary search tree: the deletion of a key. The first task is
to find the node to delete by searching the tree. If the tree has more
than one node we search using the `_get` method to find the `TreeNode`
that needs to be removed. If the tree only has a single node, that means
we are removing the root of the tree, but we still must check to make
sure the key of the root matches the key that is to be deleted. In
either case if the key is not found the `del` operator raises an error.
"""
def delete(self, key):
if self.size > 1:
node_to_remove = self._get(key, self.root)
if node_to_remove:
self.remove(node_to_remove)
self.size = self.size - 1
return
elif self.size == 1 and self.root.key == key:
self.root = None
self.size = self.size - 1
return
raise KeyError('Error, key not in tree')
def __delitem__(self, key):
self.delete(key)
"""
Once we’ve found the node containing the key we want to delete, there
are three cases that we must consider:
1. The node to be deleted has no children
2. The node to be deleted has only one child
3. The node to be deleted has two children
The first case is straightforward. If
the current node has no children all we need to do is delete the node
and remove the reference to this node in the parent. The code for this
case is shown below.
"""
def remove(self, node):
if node.is_leaf() and node.parent is not None:
if node == node.parent.left:
node.parent.left = None
else:
node.parent.right = None
"""

The second case is only slightly more complicated (see below). If a node
has only a single child, then we can simply promote the child to take
the place of its parent. The code for this case is shown in the next
code sample. As you look at this code you will see that there are six
cases to consider. Since the cases are symmetric with respect to either
having a left or right child we will just discuss the case where the
current node has a left child. The decision proceeds as follows:
1. If the current node is a left child then we only need to update the
parent reference of the left child to point to the parent of the
current node, and then update the left child reference of the parent
to point to the current node’s left child.
2. If the current node is a right child then we only need to update the
parent reference of the right child to point to the parent of the
current node, and then update the right child reference of the
parent to point to the current node’s right child.
3. If the current node has no parent, it must be the root. In this case
we will just replace the `key`, `val`, `left`, and
`right` data by calling the `replace_node_data` method on
the root.
Code for this decision process may look like:
"""
elif node.has_one_child():
promoted_node = node.left or node.right
if node.is_left_child():
promoted_node.parent = node.parent
node.parent.left = promoted_node
elif node.is_right_child():
promoted_node.parent = node.parent
node.parent.right = promoted_node
else:
node.replace_node_data(
promoted_node.key,
promoted_node.val,
promoted_node.left,
promoted_node.right
)
"""

The third case is the most difficult case to handle (see below). If a
node has two children, then it is unlikely that we can simply promote
one of them to take the node’s place. We can, however, search the tree
for a node that can be used to replace the one scheduled for deletion.
What we need is a node that will preserve the binary search tree
relationships for both of the existing left and right subtrees. The node
that will do this is the node that has the next-largest key in the tree.
We call this node the **successor**, and we will look at a way to find
the successor shortly. The successor is guaranteed to have no more than
one child, so we know how to remove it using the two cases for deletion
that we have already implemented. Once the successor has been removed,
we simply put it in the tree in place of the node to be deleted.

The code to handle the third case is shown below. Notice
that we make use of the helper methods `find_successor` and `find_min` to
find the successor. To remove the successor, we make use of the method
`splice_out`. The reason we use `splice_out` is that it goes directly to
the node we want to splice out and makes the right changes. We could
call `delete` recursively, but then we would waste time re-searching for
the key node.
"""
else: # has both children
successor = node.find_successor()
if successor:
successor.splice_out()
node.key = successor.key
node.val = successor.val
"""
The code to find the successor is shown above and as you can see is a
method of the `TreeNode` class. This code makes use of the same
properties of binary search trees that cause an inorder traversal to
print out the nodes in the tree from smallest to largest. There are
three cases to consider when looking for the successor:
1. If the node has a right child, then the successor is the smallest
key in the right subtree.
2. If the node has no right child and is the left child of its parent,
then the parent is the successor.
3. If the node is the right child of its parent, and itself has no
right child, then the successor to this node is the successor of its
parent, excluding this node.
The first condition is the only one that matters for us when deleting a
node from a binary search tree.
The `find_min` method is called to find the minimum key in a subtree. You
should convince yourself that the minimum valued key in any binary
search tree is the leftmost child of the tree. Therefore the `find_min`
method simply follows the `left` references in each node of the
subtree until it reaches a node that does not have a left child.
"""
| [] |
adamruth/python-fire | fire/core.py | 6912ccd56f50e0f4bb30a0725d95858ef29f3bde | # Copyright (C) 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python Fire is a library for creating CLIs from absolutely any Python object.
You can call Fire on any Python object:
functions, classes, modules, objects, dictionaries, lists, tuples, etc.
They all work!
Python Fire turns any Python object into a command line interface.
Simply call the Fire function as your main method to create a CLI.
When using Fire to build a CLI, your main method includes a call to Fire. Eg:
def main(argv):
fire.Fire(Component)
A Fire CLI command is run by consuming the arguments in the command in order to
access a member of current component, call the current component (if it's a
function), or instantiate the current component (if it's a class). The target
component begins as Component, and at each operation the component becomes the
result of the preceding operation.
For example "command fn arg1 arg2" might access the "fn" property of the initial
target component, and then call that function with arguments 'arg1' and 'arg2'.
Additional examples are available in the examples directory.
Fire Flags, common to all Fire CLIs, must go after a separating "--". For
example, to get help for a command you might run: `command -- --help`.
The available flags for all Fire CLIs are:
-v --verbose: Include private members in help and usage information.
-h --help: Provide help and usage information for the command.
-i --interactive: Drop into a Python REPL after running the command.
--completion: Write the Bash completion script for the tool to stdout.
--separator SEPARATOR: Use SEPARATOR in place of the default separator, '-'.
--trace: Get the Fire Trace for the command.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import inspect
import json
import os
import pipes
import shlex
import sys
import types
from fire import completion
from fire import decorators
from fire import helputils
from fire import inspectutils
from fire import interact
from fire import parser
from fire import trace
import six
def Fire(component=None, command=None, name=None):
"""This function, Fire, is the main entrypoint for Python Fire.
Executes a command either from the `command` argument or from sys.argv by
recursively traversing the target object `component`'s members consuming
arguments, evaluating functions, and instantiating classes as it goes.
When building a CLI with Fire, your main method should call this function.
Args:
component: The initial target component.
command: Optional. If supplied, this is the command executed. If not
supplied, then the command is taken from sys.argv instead. This can be
a string or a list of strings; a list of strings is preferred.
name: Optional. The name of the command as entered at the command line.
Used in interactive mode and for generating the completion script.
Returns:
The result of executing the Fire command. Execution begins with the initial
target component. The component is updated by using the command arguments
to either access a member of the current component, call the current
component (if it's a function), or instantiate the current component (if
it's a class). When all arguments are consumed and there's no function left
to call or class left to instantiate, the resulting current component is
the final result.
Raises:
ValueError: If the command argument is supplied, but not a string or a
sequence of arguments.
FireExit: When Fire encounters a FireError, Fire will raise a FireExit with
code 2. When used with the help or trace flags, Fire will raise a
FireExit with code 0 if successful.
"""
name = name or os.path.basename(sys.argv[0])
# Get args as a list.
if isinstance(command, six.string_types):
args = shlex.split(command)
elif isinstance(command, (list, tuple)):
args = command
elif command is None:
# Use the command line args by default if no command is specified.
args = sys.argv[1:]
else:
raise ValueError('The command argument must be a string or a sequence of '
'arguments.')
# Determine the calling context.
caller = inspect.stack()[1]
caller_frame = caller[0]
caller_globals = caller_frame.f_globals
caller_locals = caller_frame.f_locals
context = {}
context.update(caller_globals)
context.update(caller_locals)
component_trace = _Fire(component, args, context, name)
if component_trace.HasError():
for help_flag in ['-h', '--help']:
if help_flag in component_trace.elements[-1].args:
command = '{cmd} -- --help'.format(cmd=component_trace.GetCommand())
print(('WARNING: The proper way to show help is {cmd}.\n'
'Showing help anyway.\n').format(cmd=pipes.quote(command)),
file=sys.stderr)
print('Fire trace:\n{trace}\n'.format(trace=component_trace),
file=sys.stderr)
result = component_trace.GetResult()
print(
helputils.HelpString(result, component_trace, component_trace.verbose),
file=sys.stderr)
raise FireExit(2, component_trace)
elif component_trace.show_trace and component_trace.show_help:
print('Fire trace:\n{trace}\n'.format(trace=component_trace),
file=sys.stderr)
result = component_trace.GetResult()
print(
helputils.HelpString(result, component_trace, component_trace.verbose),
file=sys.stderr)
raise FireExit(0, component_trace)
elif component_trace.show_trace:
print('Fire trace:\n{trace}'.format(trace=component_trace),
file=sys.stderr)
raise FireExit(0, component_trace)
elif component_trace.show_help:
result = component_trace.GetResult()
print(
helputils.HelpString(result, component_trace, component_trace.verbose),
file=sys.stderr)
raise FireExit(0, component_trace)
else:
_PrintResult(component_trace, verbose=component_trace.verbose)
result = component_trace.GetResult()
return result
def CompletionScript(name, component):
"""Returns the text of the Bash completion script for a Fire CLI."""
return completion.Script(name, component)
class FireError(Exception):
"""Exception used by Fire when a Fire command cannot be executed.
These exceptions are not raised by the Fire function, but rather are caught
and added to the FireTrace.
"""
class FireExit(SystemExit):
"""An exception raised by Fire to the client in the case of a FireError.
The trace of the Fire program is available on the `trace` property.
This exception inherits from SystemExit, so clients may explicitly catch it
with `except SystemExit` or `except FireExit`. If not caught, this exception
will cause the client program to exit without a stacktrace.
"""
def __init__(self, code, component_trace):
"""Constructs a FireExit exception.
Args:
code: (int) Exit code for the Fire CLI.
component_trace: (FireTrace) The trace for the Fire command.
"""
super(FireExit, self).__init__(code)
self.trace = component_trace
def _PrintResult(component_trace, verbose=False):
"""Prints the result of the Fire call to stdout in a human readable way."""
# TODO: Design human readable deserializable serialization method
# and move serialization to it's own module.
result = component_trace.GetResult()
if isinstance(result, (list, set, types.GeneratorType)):
for i in result:
print(_OneLineResult(i))
elif inspect.isgeneratorfunction(result):
raise NotImplementedError
elif isinstance(result, dict):
print(_DictAsString(result, verbose))
elif isinstance(result, tuple):
print(_OneLineResult(result))
elif isinstance(result,
(bool, six.string_types, six.integer_types, float, complex)):
print(result)
elif result is not None:
print(helputils.HelpString(result, component_trace, verbose))
def _DictAsString(result, verbose=False):
"""Returns a dict as a string.
Args:
result: The dict to convert to a string
verbose: Whether to include 'hidden' members, those keys starting with _.
Returns:
A string representing the dict
"""
result = {key: value for key, value in result.items()
if _ComponentVisible(key, verbose)}
if not result:
return '{}'
longest_key = max(len(str(key)) for key in result.keys())
format_string = '{{key:{padding}s}} {{value}}'.format(padding=longest_key + 1)
lines = []
for key, value in result.items():
line = format_string.format(key=str(key) + ':',
value=_OneLineResult(value))
lines.append(line)
return '\n'.join(lines)
def _ComponentVisible(component, verbose=False):
"""Returns whether a component should be visible in the output."""
return (
verbose
or not isinstance(component, six.string_types)
or not component.startswith('_'))
def _OneLineResult(result):
"""Returns result serialized to a single line string."""
# TODO: Ensure line is fewer than eg 120 characters.
if isinstance(result, six.string_types):
return str(result).replace('\n', ' ')
try:
# Don't force conversion to ascii.
return json.dumps(result, ensure_ascii=False)
except (TypeError, ValueError):
return str(result).replace('\n', ' ')
def _Fire(component, args, context, name=None):
"""Execute a Fire command on a target component using the args supplied.
Arguments that come after a final isolated '--' are treated as Flags, eg for
interactive mode or completion script generation.
Other arguments are consumed by the execution of the Fire command, eg in the
traversal of the members of the component, or in calling a function or
instantiating a class found during the traversal.
The steps performed by this method are:
1. Parse any Flag args (the args after the final --)
2. Start with component as the current component.
2a. If the current component is a class, instantiate it using args from args.
2b. If the current component is a routine, call it using args from args.
2c. Otherwise access a member from component using an arg from args.
2d. Repeat 2a-2c until no args remain.
3a. Embed into ipython REPL if interactive mode is selected.
3b. Generate a completion script if that flag is provided.
In step 2, arguments will only ever be consumed up to a separator; a single
step will never consume arguments from both sides of a separator.
The separator defaults to a hyphen (-), and can be overwritten with the
--separator Fire argument.
Args:
component: The target component for Fire.
args: A list of args to consume in Firing on the component, usually from
the command line.
context: A dict with the local and global variables available at the call
to Fire.
name: Optional. The name of the command. Used in interactive mode and in
the tab completion script.
Returns:
FireTrace of components starting with component, tracing Fire's execution
path as it consumes args.
Raises:
ValueError: If there are arguments that cannot be consumed.
ValueError: If --completion is specified but no name available.
"""
args, flag_args = parser.SeparateFlagArgs(args)
argparser = parser.CreateParser()
parsed_flag_args, unused_args = argparser.parse_known_args(flag_args)
verbose = parsed_flag_args.verbose
interactive = parsed_flag_args.interactive
separator = parsed_flag_args.separator
show_completion = parsed_flag_args.completion
show_help = parsed_flag_args.help
show_trace = parsed_flag_args.trace
# component can be a module, class, routine, object, etc.
if component is None:
component = context
initial_component = component
component_trace = trace.FireTrace(
initial_component=initial_component, name=name, separator=separator,
verbose=verbose, show_help=show_help, show_trace=show_trace)
instance = None
remaining_args = args
while True:
last_component = component
initial_args = remaining_args
if not remaining_args and (show_help or interactive or show_trace
or show_completion):
# Don't initialize the final class or call the final function unless
# there's a separator after it, and instead process the current component.
break
saved_args = []
used_separator = False
if separator in remaining_args:
# For the current component, only use arguments up to the separator.
separator_index = remaining_args.index(separator)
saved_args = remaining_args[separator_index + 1:]
remaining_args = remaining_args[:separator_index]
used_separator = True
assert separator not in remaining_args
if inspect.isclass(component) or inspect.isroutine(component):
# The component is a class or a routine; we'll try to initialize it or
# call it.
isclass = inspect.isclass(component)
try:
target = component.__name__
filename, lineno = inspectutils.GetFileAndLine(component)
component, consumed_args, remaining_args, capacity = _CallCallable(
component, remaining_args)
# Update the trace.
if isclass:
component_trace.AddInstantiatedClass(
component, target, consumed_args, filename, lineno, capacity)
else:
component_trace.AddCalledRoutine(
component, target, consumed_args, filename, lineno, capacity)
except FireError as error:
component_trace.AddError(error, initial_args)
return component_trace
if last_component is initial_component:
# If the initial component is a class, keep an instance for use with -i.
instance = component
elif isinstance(component, (list, tuple)) and remaining_args:
# The component is a tuple or list; we'll try to access a member.
arg = remaining_args[0]
try:
index = int(arg)
component = component[index]
except (ValueError, IndexError):
error = FireError(
'Unable to index into component with argument:', arg)
component_trace.AddError(error, initial_args)
return component_trace
remaining_args = remaining_args[1:]
filename = None
lineno = None
component_trace.AddAccessedProperty(
component, index, [arg], filename, lineno)
elif isinstance(component, dict) and remaining_args:
# The component is a dict; we'll try to access a member.
target = remaining_args[0]
if target in component:
component = component[target]
elif target.replace('-', '_') in component:
component = component[target.replace('-', '_')]
else:
# The target isn't present in the dict as a string, but maybe it is as
# another type.
# TODO: Consider alternatives for accessing non-string keys.
found_target = False
for key, value in component.items():
if target == str(key):
component = value
found_target = True
break
if not found_target:
error = FireError(
'Cannot find target in dict:', target, component)
component_trace.AddError(error, initial_args)
return component_trace
remaining_args = remaining_args[1:]
filename = None
lineno = None
component_trace.AddAccessedProperty(
component, target, [target], filename, lineno)
elif remaining_args:
# We'll try to access a member of the component.
try:
target = remaining_args[0]
component, consumed_args, remaining_args = _GetMember(
component, remaining_args)
filename, lineno = inspectutils.GetFileAndLine(component)
component_trace.AddAccessedProperty(
component, target, consumed_args, filename, lineno)
except FireError as error:
component_trace.AddError(error, initial_args)
return component_trace
if used_separator:
# Add back in the arguments from after the separator.
if remaining_args:
remaining_args = remaining_args + [separator] + saved_args
elif (inspect.isclass(last_component)
or inspect.isroutine(last_component)):
remaining_args = saved_args
component_trace.AddSeparator()
elif component is not last_component:
remaining_args = [separator] + saved_args
else:
# It was an unnecessary separator.
remaining_args = saved_args
if component is last_component and remaining_args == initial_args:
# We're making no progress.
break
if remaining_args:
component_trace.AddError(
FireError('Could not consume arguments:', remaining_args),
initial_args)
return component_trace
if show_completion:
if name is None:
raise ValueError('Cannot make completion script without command name')
script = CompletionScript(name, initial_component)
component_trace.AddCompletionScript(script)
if interactive:
variables = context.copy()
if name is not None:
variables[name] = initial_component
variables['component'] = initial_component
variables['result'] = component
variables['trace'] = component_trace
if instance is not None:
variables['self'] = instance
interact.Embed(variables, verbose)
component_trace.AddInteractiveMode()
return component_trace
def _GetMember(component, args):
"""Returns a subcomponent of component by consuming an arg from args.
Given a starting component and args, this function gets a member from that
component, consuming one arg in the process.
Args:
component: The component from which to get a member.
args: Args from which to consume in the search for the next component.
Returns:
component: The component that was found by consuming an arg.
consumed_args: The args that were consumed by getting this member.
remaining_args: The remaining args that haven't been consumed yet.
Raises:
FireError: If we cannot consume an argument to get a member.
"""
members = dict(inspect.getmembers(component))
arg = args[0]
arg_names = [
arg,
arg.replace('-', '_'), # treat '-' as '_'.
]
for arg_name in arg_names:
if arg_name in members:
return members[arg_name], [arg], args[1:]
raise FireError('Could not consume arg:', arg)
def _CallCallable(fn, args):
"""Calls the function fn by consuming args from args.
Args:
fn: The function to call or class to instantiate.
args: Args from which to consume for calling the function.
Returns:
component: The object that is the result of the function call.
consumed_args: The args that were consumed for the function call.
remaining_args: The remaining args that haven't been consumed yet.
capacity: Whether the call could have taken additional args.
"""
parse = _MakeParseFn(fn)
(varargs, kwargs), consumed_args, remaining_args, capacity = parse(args)
result = fn(*varargs, **kwargs)
return result, consumed_args, remaining_args, capacity
def _MakeParseFn(fn):
"""Creates a parse function for fn.
Args:
fn: The function or class to create the parse function for.
Returns:
A parse function for fn. The parse function accepts a list of arguments
and returns (varargs, kwargs), remaining_args. The original function fn
can then be called with fn(*varargs, **kwargs). The remaining_args are
the leftover args from the arguments to the parse function.
"""
fn_spec = inspectutils.GetFullArgSpec(fn)
all_args = fn_spec.args + fn_spec.kwonlyargs
metadata = decorators.GetMetadata(fn)
# Note: num_required_args is the number of positional arguments without
# default values. All of these arguments are required.
num_required_args = len(fn_spec.args) - len(fn_spec.defaults)
required_kwonly = set(fn_spec.kwonlyargs) - set(fn_spec.kwonlydefaults)
def _ParseFn(args):
"""Parses the list of `args` into (varargs, kwargs), remaining_args."""
kwargs, remaining_kwargs, remaining_args = _ParseKeywordArgs(
args, all_args, fn_spec.varkw)
# Note: _ParseArgs modifies kwargs.
parsed_args, kwargs, remaining_args, capacity = _ParseArgs(
fn_spec.args, fn_spec.defaults, num_required_args, kwargs,
remaining_args, metadata)
if fn_spec.varargs or fn_spec.varkw:
# If we're allowed *varargs or **kwargs, there's always capacity.
capacity = True
extra_kw = set(kwargs) - set(fn_spec.kwonlyargs)
if fn_spec.varkw is None and extra_kw:
raise FireError('Unexpected kwargs present:', extra_kw)
missing_kwonly = set(required_kwonly) - set(kwargs)
if missing_kwonly:
raise FireError('Missing required flags:', missing_kwonly)
# If we accept *varargs, then use all remaining arguments for *varargs.
if fn_spec.varargs is not None:
varargs, remaining_args = remaining_args, []
else:
varargs = []
for index, value in enumerate(varargs):
varargs[index] = _ParseValue(value, None, None, metadata)
varargs = parsed_args + varargs
remaining_args += remaining_kwargs
consumed_args = args[:len(args) - len(remaining_args)]
return (varargs, kwargs), consumed_args, remaining_args, capacity
return _ParseFn
def _ParseArgs(fn_args, fn_defaults, num_required_args, kwargs,
remaining_args, metadata):
"""Parses the positional and named arguments from the available supplied args.
Modifies kwargs, removing args as they are used.
Args:
fn_args: A list of argument names that the target function accepts,
including positional and named arguments, but not the varargs or kwargs
names.
fn_defaults: A list of the default values in the function argspec.
num_required_args: The number of required arguments from the function's
argspec. This is the number of arguments without a default value.
kwargs: Dict with named command line arguments and their values.
remaining_args: The remaining command line arguments, which may still be
used as positional arguments.
metadata: Metadata about the function, typically from Fire decorators.
Returns:
parsed_args: A list of values to be used as positional arguments for calling
the target function.
kwargs: The input dict kwargs modified with the used kwargs removed.
remaining_args: A list of the supplied args that have not been used yet.
capacity: Whether the call could have taken args in place of defaults.
Raises:
FireError: if additional positional arguments are expected, but none are
available.
"""
accepts_positional_args = metadata.get(decorators.ACCEPTS_POSITIONAL_ARGS)
capacity = False # If we see a default get used, we'll set capacity to True
# Select unnamed args.
parsed_args = []
for index, arg in enumerate(fn_args):
value = kwargs.pop(arg, None)
if value is not None: # A value is specified at the command line.
value = _ParseValue(value, index, arg, metadata)
parsed_args.append(value)
else: # No value has been explicitly specified.
if remaining_args and accepts_positional_args:
# Use a positional arg.
value = remaining_args.pop(0)
value = _ParseValue(value, index, arg, metadata)
parsed_args.append(value)
elif index < num_required_args:
raise FireError(
'The function received no value for the required argument:', arg)
else:
# We're past the args for which there's no default value.
# There's a default value for this arg.
capacity = True
default_index = index - num_required_args # index into the defaults.
parsed_args.append(fn_defaults[default_index])
for key, value in kwargs.items():
kwargs[key] = _ParseValue(value, None, key, metadata)
return parsed_args, kwargs, remaining_args, capacity
def _ParseKeywordArgs(args, fn_args, fn_keywords):
"""Parses the supplied arguments for keyword arguments.
Given a list of arguments, finds occurences of --name value, and uses 'name'
as the keyword and 'value' as the value. Constructs and returns a dictionary
of these keyword arguments, and returns a list of the remaining arguments.
Only if fn_keywords is None, this only finds argument names used by the
function, specified through fn_args.
This returns the values of the args as strings. They are later processed by
_ParseArgs, which converts them to the appropriate type.
Args:
args: A list of arguments
fn_args: A list of argument names that the target function accepts,
including positional and named arguments, but not the varargs or kwargs
names.
fn_keywords: The argument name for **kwargs, or None if **kwargs not used
Returns:
kwargs: A dictionary mapping keywords to values.
remaining_kwargs: A list of the unused kwargs from the original args.
remaining_args: A list of the unused arguments from the original args.
"""
kwargs = {}
remaining_kwargs = []
remaining_args = []
if not args:
return kwargs, remaining_kwargs, remaining_args
skip_argument = False
for index, argument in enumerate(args):
if skip_argument:
skip_argument = False
continue
arg_consumed = False
if argument.startswith('--'):
# This is a named argument; get its value from this arg or the next.
got_argument = False
keyword = argument[2:]
contains_equals = '=' in keyword
is_bool_syntax = (
not contains_equals and
(index + 1 == len(args) or args[index + 1].startswith('--')))
if contains_equals:
keyword, value = keyword.split('=', 1)
got_argument = True
elif is_bool_syntax:
# Since there's no next arg or the next arg is a Flag, we consider
# this flag to be a boolean.
got_argument = True
if keyword in fn_args:
value = 'True'
elif keyword.startswith('no'):
keyword = keyword[2:]
value = 'False'
else:
value = 'True'
else:
if index + 1 < len(args):
value = args[index + 1]
got_argument = True
keyword = keyword.replace('-', '_')
# In order for us to consume the argument as a keyword arg, we either:
# Need to be explicitly expecting the keyword, or we need to be
# accepting **kwargs.
if got_argument:
skip_argument = not contains_equals and not is_bool_syntax
arg_consumed = True
if keyword in fn_args or fn_keywords:
kwargs[keyword] = value
else:
remaining_kwargs.append(argument)
if skip_argument:
remaining_kwargs.append(args[index + 1])
if not arg_consumed:
# The argument was not consumed, so it is still a remaining argument.
remaining_args.append(argument)
return kwargs, remaining_kwargs, remaining_args
def _ParseValue(value, index, arg, metadata):
"""Parses value, a string, into the appropriate type.
The function used to parse value is determined by the remaining arguments.
Args:
value: The string value to be parsed, typically a command line argument.
index: The index of the value in the function's argspec.
arg: The name of the argument the value is being parsed for.
metadata: Metadata about the function, typically from Fire decorators.
Returns:
value, parsed into the appropriate type for calling a function.
"""
parse_fn = parser.DefaultParseValue
# We check to see if any parse function from the fn metadata applies here.
parse_fns = metadata.get(decorators.FIRE_PARSE_FNS)
if parse_fns:
default = parse_fns['default']
positional = parse_fns['positional']
named = parse_fns['named']
if index is not None and 0 <= index < len(positional):
parse_fn = positional[index]
elif arg in named:
parse_fn = named[arg]
elif default is not None:
parse_fn = default
return parse_fn(value)
| [((6713, 6747), 'fire.completion.Script', 'completion.Script', (['name', 'component'], {}), '(name, component)\n', (6730, 6747), False, 'from fire import completion\n'), ((11789, 11818), 'fire.parser.SeparateFlagArgs', 'parser.SeparateFlagArgs', (['args'], {}), '(args)\n', (11812, 11818), False, 'from fire import parser\n'), ((11834, 11855), 'fire.parser.CreateParser', 'parser.CreateParser', ([], {}), '()\n', (11853, 11855), False, 'from fire import parser\n'), ((12335, 12485), 'fire.trace.FireTrace', 'trace.FireTrace', ([], {'initial_component': 'initial_component', 'name': 'name', 'separator': 'separator', 'verbose': 'verbose', 'show_help': 'show_help', 'show_trace': 'show_trace'}), '(initial_component=initial_component, name=name, separator=\n separator, verbose=verbose, show_help=show_help, show_trace=show_trace)\n', (12350, 12485), False, 'from fire import trace\n'), ((20165, 20196), 'fire.inspectutils.GetFullArgSpec', 'inspectutils.GetFullArgSpec', (['fn'], {}), '(fn)\n', (20192, 20196), False, 'from fire import inspectutils\n'), ((20257, 20283), 'fire.decorators.GetMetadata', 'decorators.GetMetadata', (['fn'], {}), '(fn)\n', (20279, 20283), False, 'from fire import decorators\n'), ((4292, 4321), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (4308, 4321), False, 'import os\n'), ((4402, 4422), 'shlex.split', 'shlex.split', (['command'], {}), '(command)\n', (4413, 4422), False, 'import shlex\n'), ((4773, 4788), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (4786, 4788), False, 'import inspect\n'), ((8058, 8093), 'inspect.isgeneratorfunction', 'inspect.isgeneratorfunction', (['result'], {}), '(result)\n', (8085, 8093), False, 'import inspect\n'), ((9762, 9800), 'json.dumps', 'json.dumps', (['result'], {'ensure_ascii': '(False)'}), '(result, ensure_ascii=False)\n', (9772, 9800), False, 'import json\n'), ((17937, 17971), 'fire.interact.Embed', 'interact.Embed', (['variables', 'verbose'], {}), '(variables, verbose)\n', (17951, 17971), False, 'from fire import interact\n'), ((18729, 18758), 'inspect.getmembers', 'inspect.getmembers', (['component'], {}), '(component)\n', (18747, 18758), False, 'import inspect\n'), ((5573, 5643), 'fire.helputils.HelpString', 'helputils.HelpString', (['result', 'component_trace', 'component_trace.verbose'], {}), '(result, component_trace, component_trace.verbose)\n', (5593, 5643), False, 'from fire import helputils\n'), ((13313, 13339), 'inspect.isclass', 'inspect.isclass', (['component'], {}), '(component)\n', (13328, 13339), False, 'import inspect\n'), ((13343, 13371), 'inspect.isroutine', 'inspect.isroutine', (['component'], {}), '(component)\n', (13360, 13371), False, 'import inspect\n'), ((13483, 13509), 'inspect.isclass', 'inspect.isclass', (['component'], {}), '(component)\n', (13498, 13509), False, 'import inspect\n'), ((5927, 5997), 'fire.helputils.HelpString', 'helputils.HelpString', (['result', 'component_trace', 'component_trace.verbose'], {}), '(result, component_trace, component_trace.verbose)\n', (5947, 5997), False, 'from fire import helputils\n'), ((13585, 13623), 'fire.inspectutils.GetFileAndLine', 'inspectutils.GetFileAndLine', (['component'], {}), '(component)\n', (13612, 13623), False, 'from fire import inspectutils\n'), ((16772, 16803), 'inspect.isclass', 'inspect.isclass', (['last_component'], {}), '(last_component)\n', (16787, 16803), False, 'import inspect\n'), ((16819, 16852), 'inspect.isroutine', 'inspect.isroutine', (['last_component'], {}), '(last_component)\n', (16836, 16852), False, 'import inspect\n'), ((6322, 6392), 'fire.helputils.HelpString', 'helputils.HelpString', (['result', 'component_trace', 'component_trace.verbose'], {}), '(result, component_trace, component_trace.verbose)\n', (6342, 6392), False, 'from fire import helputils\n'), ((5365, 5385), 'pipes.quote', 'pipes.quote', (['command'], {}), '(command)\n', (5376, 5385), False, 'import pipes\n'), ((16316, 16354), 'fire.inspectutils.GetFileAndLine', 'inspectutils.GetFileAndLine', (['component'], {}), '(component)\n', (16343, 16354), False, 'from fire import inspectutils\n'), ((8429, 8483), 'fire.helputils.HelpString', 'helputils.HelpString', (['result', 'component_trace', 'verbose'], {}), '(result, component_trace, verbose)\n', (8449, 8483), False, 'from fire import helputils\n')] |
AmirValeev/auto-ml-classifier | app.py | e803fe92d1ec71e87509845ea61ecc46b363bae6 | import os, ast
import pandas as pd
from sklearn.svm import SVC
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder
from sklearn.compose import make_column_transformer
from sklearn.pipeline import make_pipeline
import pickle
def main():
# Get the dataset from the users GitHub repository
dataset_path = "https://raw.githubusercontent.com/" + os.environ["GITHUB_REPOSITORY"] + "/master/dataset.csv"
data = pd.read_csv(dataset_path)
print()
print(data.describe())
x=data.iloc[:,:-1]
y=data.iloc[:,-1]
column_trans = make_column_transformer((OneHotEncoder(),[-1]),remainder='passthrough') # apply encoding on output variable
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.2, random_state=0)
#define a pipeline
pipe = make_pipeline(column_trans,SVC())
pipe.fit(x_train,y_train) #training the model
print("\nModel Training Finished")
accuracy = pipe.score(x_test,y_test)
print("\nAccuracy of the Model: "+str(accuracy*100))
if pipe:
pickle.dump(pipe,open('model.pkl','wb')) # store the artifact in docker container
if not os.environ["INPUT_MYINPUT"] == 'zeroinputs':
inputs = ast.literal_eval(os.environ["INPUT_MYINPUT"])
print("\nThe Predicted Ouput is :")
output = pipe.predict([inputs])
print(output)
else:
output = ["None"]
print("\nUser didn't provided inputs to predict")
print("\n=======================Action Completed========================")
print(f"::set-output name=myOutput::{output[0]}")
if __name__ == "__main__":
main()
| [((467, 492), 'pandas.read_csv', 'pd.read_csv', (['dataset_path'], {}), '(dataset_path)\n', (478, 492), True, 'import pandas as pd\n'), ((744, 797), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.2)', 'random_state': '(0)'}), '(x, y, test_size=0.2, random_state=0)\n', (760, 797), False, 'from sklearn.model_selection import train_test_split\n'), ((866, 871), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (869, 871), False, 'from sklearn.svm import SVC\n'), ((1238, 1283), 'ast.literal_eval', 'ast.literal_eval', (["os.environ['INPUT_MYINPUT']"], {}), "(os.environ['INPUT_MYINPUT'])\n", (1254, 1283), False, 'import os, ast\n'), ((622, 637), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {}), '()\n', (635, 637), False, 'from sklearn.preprocessing import OneHotEncoder\n')] |
giuseppe/quay | util/headers.py | a1b7e4b51974edfe86f66788621011eef2667e6a | import base64
def parse_basic_auth(header_value):
"""
Attempts to parse the given header value as a Base64-encoded Basic auth header.
"""
if not header_value:
return None
parts = header_value.split(" ")
if len(parts) != 2 or parts[0].lower() != "basic":
return None
try:
basic_parts = base64.b64decode(parts[1]).split(":", 1)
if len(basic_parts) != 2:
return None
return basic_parts
except ValueError:
return None
| [((342, 368), 'base64.b64decode', 'base64.b64decode', (['parts[1]'], {}), '(parts[1])\n', (358, 368), False, 'import base64\n')] |
tobiashuste/indico | indico/core/signals/event/core.py | c1e6ec0c8c84745988e38c9b1768142a6feb9e0e | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from indico.core.signals.event import _signals
sidemenu = _signals.signal('sidemenu', """
Expected to return ``MenuEntryData`` objects to be added to the event side menu.
A single entry can be returned directly, multiple entries must be yielded.
""")
deleted = _signals.signal('deleted', """
Called when an event is deleted. The *sender* is the event object.
The `user` kwarg contains the user performing the deletion if available.
""")
updated = _signals.signal('updated', """
Called when basic data of an event is updated. The *sender* is the event.
A dict of changes is passed in the `changes` kwarg, with ``(old, new)``
tuples for each change. Note than the `person_links` change may happen
with `old` and `new` being the same lists for technical reasons. If the
key is present, it should be assumed that something changed (usually
the order or some data on the person link).
""")
cloned = _signals.signal('cloned', """
Called when an event is cloned. The *sender* is the `Event` object of
the old event, the new event is passed in the `new_event` kwarg.
""")
type_changed = _signals.signal('type-changed', """
Called when the type of an event is changed. The `sender` is the event,
the old type is passed in the `old_type` kwarg.
""")
moved = _signals.signal('moved', """
Called when an event is moved to a different category. The `sender` is the event,
the old category is in the `old_parent` kwarg.
""")
created = _signals.signal('created', """
Called when a new event is created. The `sender` is the new Event.
""")
session_updated = _signals.signal('session-updated', """
Called when a session is updated. The *sender* is the session.
""")
session_deleted = _signals.signal('session-deleted', """
Called when a session is deleted. The *sender* is the session.
""")
session_block_deleted = _signals.signal('session-block-deleted', """
Called when a session block is deleted. The *sender* is the session block.
This signal is called before the ``db.session.delete()`` on the block is
executed.
""")
timetable_buttons = _signals.signal('timetable-buttons', """
Expected to return a list of tuples ('button_name', 'js-call-class').
Called when building the timetable view.
""")
get_log_renderers = _signals.signal('get-log-renderers', """
Expected to return `EventLogRenderer` classes.
""")
get_feature_definitions = _signals.signal('get-feature-definitions', """
Expected to return `EventFeature` subclasses.
""")
metadata_postprocess = _signals.signal('metadata-postprocess', """
Called right after a dict-like representation of an event is created,
so that plugins can add their own fields.
The *sender* is a string parameter specifying the source of the metadata.
The *event* kwarg contains the event object. The metadata is passed in
the `data` kwarg.
The signal should return a dict that will be used to update the
original representation (fields to add or override).
""")
| [((274, 475), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""sidemenu"""', '"""\nExpected to return ``MenuEntryData`` objects to be added to the event side menu.\nA single entry can be returned directly, multiple entries must be yielded.\n"""'], {}), '(\'sidemenu\',\n """\nExpected to return ``MenuEntryData`` objects to be added to the event side menu.\nA single entry can be returned directly, multiple entries must be yielded.\n"""\n )\n', (289, 475), False, 'from indico.core.signals.event import _signals\n'), ((478, 662), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""deleted"""', '"""\nCalled when an event is deleted. The *sender* is the event object.\nThe `user` kwarg contains the user performing the deletion if available.\n"""'], {}), '(\'deleted\',\n """\nCalled when an event is deleted. The *sender* is the event object.\nThe `user` kwarg contains the user performing the deletion if available.\n"""\n )\n', (493, 662), False, 'from indico.core.signals.event import _signals\n'), ((665, 1111), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""updated"""', '"""\nCalled when basic data of an event is updated. The *sender* is the event.\nA dict of changes is passed in the `changes` kwarg, with ``(old, new)``\ntuples for each change. Note than the `person_links` change may happen\nwith `old` and `new` being the same lists for technical reasons. If the\nkey is present, it should be assumed that something changed (usually\nthe order or some data on the person link).\n"""'], {}), '(\'updated\',\n """\nCalled when basic data of an event is updated. The *sender* is the event.\nA dict of changes is passed in the `changes` kwarg, with ``(old, new)``\ntuples for each change. Note than the `person_links` change may happen\nwith `old` and `new` being the same lists for technical reasons. If the\nkey is present, it should be assumed that something changed (usually\nthe order or some data on the person link).\n"""\n )\n', (680, 1111), False, 'from indico.core.signals.event import _signals\n'), ((1113, 1291), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""cloned"""', '"""\nCalled when an event is cloned. The *sender* is the `Event` object of\nthe old event, the new event is passed in the `new_event` kwarg.\n"""'], {}), '(\'cloned\',\n """\nCalled when an event is cloned. The *sender* is the `Event` object of\nthe old event, the new event is passed in the `new_event` kwarg.\n"""\n )\n', (1128, 1291), False, 'from indico.core.signals.event import _signals\n'), ((1299, 1468), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""type-changed"""', '"""\nCalled when the type of an event is changed. The `sender` is the event,\nthe old type is passed in the `old_type` kwarg.\n"""'], {}), '(\'type-changed\',\n """\nCalled when the type of an event is changed. The `sender` is the event,\nthe old type is passed in the `old_type` kwarg.\n"""\n )\n', (1314, 1468), False, 'from indico.core.signals.event import _signals\n'), ((1469, 1640), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""moved"""', '"""\nCalled when an event is moved to a different category. The `sender` is the event,\nthe old category is in the `old_parent` kwarg.\n"""'], {}), '(\'moved\',\n """\nCalled when an event is moved to a different category. The `sender` is the event,\nthe old category is in the `old_parent` kwarg.\n"""\n )\n', (1484, 1640), False, 'from indico.core.signals.event import _signals\n'), ((1643, 1749), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""created"""', '"""\nCalled when a new event is created. The `sender` is the new Event.\n"""'], {}), '(\'created\',\n """\nCalled when a new event is created. The `sender` is the new Event.\n""")\n', (1658, 1749), False, 'from indico.core.signals.event import _signals\n'), ((1765, 1875), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""session-updated"""', '"""\nCalled when a session is updated. The *sender* is the session.\n"""'], {}), '(\'session-updated\',\n """\nCalled when a session is updated. The *sender* is the session.\n""")\n', (1780, 1875), False, 'from indico.core.signals.event import _signals\n'), ((1891, 2001), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""session-deleted"""', '"""\nCalled when a session is deleted. The *sender* is the session.\n"""'], {}), '(\'session-deleted\',\n """\nCalled when a session is deleted. The *sender* is the session.\n""")\n', (1906, 2001), False, 'from indico.core.signals.event import _signals\n'), ((2023, 2239), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""session-block-deleted"""', '"""\nCalled when a session block is deleted. The *sender* is the session block.\nThis signal is called before the ``db.session.delete()`` on the block is\nexecuted.\n"""'], {}), '(\'session-block-deleted\',\n """\nCalled when a session block is deleted. The *sender* is the session block.\nThis signal is called before the ``db.session.delete()`` on the block is\nexecuted.\n"""\n )\n', (2038, 2239), False, 'from indico.core.signals.event import _signals\n'), ((2252, 2417), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""timetable-buttons"""', '"""\nExpected to return a list of tuples (\'button_name\', \'js-call-class\').\nCalled when building the timetable view.\n"""'], {}), '(\'timetable-buttons\',\n """\nExpected to return a list of tuples (\'button_name\', \'js-call-class\').\nCalled when building the timetable view.\n"""\n )\n', (2267, 2417), False, 'from indico.core.signals.event import _signals\n'), ((2430, 2526), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""get-log-renderers"""', '"""\nExpected to return `EventLogRenderer` classes.\n"""'], {}), '(\'get-log-renderers\',\n """\nExpected to return `EventLogRenderer` classes.\n""")\n', (2445, 2526), False, 'from indico.core.signals.event import _signals\n'), ((2550, 2651), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""get-feature-definitions"""', '"""\nExpected to return `EventFeature` subclasses.\n"""'], {}), '(\'get-feature-definitions\',\n """\nExpected to return `EventFeature` subclasses.\n""")\n', (2565, 2651), False, 'from indico.core.signals.event import _signals\n'), ((2672, 3123), 'indico.core.signals.event._signals.signal', '_signals.signal', (['"""metadata-postprocess"""', '"""\nCalled right after a dict-like representation of an event is created,\nso that plugins can add their own fields.\n\nThe *sender* is a string parameter specifying the source of the metadata.\nThe *event* kwarg contains the event object. The metadata is passed in\nthe `data` kwarg.\n\nThe signal should return a dict that will be used to update the\noriginal representation (fields to add or override).\n"""'], {}), '(\'metadata-postprocess\',\n """\nCalled right after a dict-like representation of an event is created,\nso that plugins can add their own fields.\n\nThe *sender* is a string parameter specifying the source of the metadata.\nThe *event* kwarg contains the event object. The metadata is passed in\nthe `data` kwarg.\n\nThe signal should return a dict that will be used to update the\noriginal representation (fields to add or override).\n"""\n )\n', (2687, 3123), False, 'from indico.core.signals.event import _signals\n')] |
aarunsai81/netapp | cinder/tests/unit/volume/drivers/emc/scaleio/test_delete_volume.py | 8f0f7bf9be7f4d9fb9c3846bfc639c90a05f86ba | # Copyright (c) 2013 - 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import urllib
from cinder import context
from cinder import exception
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_volume
from cinder.tests.unit.volume.drivers.emc import scaleio
from cinder.tests.unit.volume.drivers.emc.scaleio import mocks
class TestDeleteVolume(scaleio.TestScaleIODriver):
"""Test cases for ``ScaleIODriver.delete_volume()``"""
def setUp(self):
"""Setup a test case environment.
Creates a fake volume object and sets up the required API responses.
"""
super(TestDeleteVolume, self).setUp()
ctx = context.RequestContext('fake', 'fake', auth_token=True)
self.volume = fake_volume.fake_volume_obj(
ctx, **{'provider_id': fake.PROVIDER_ID})
self.volume_name_2x_enc = urllib.parse.quote(
urllib.parse.quote(self.driver._id_to_base64(self.volume.id))
)
self.HTTPS_MOCK_RESPONSES = {
self.RESPONSE_MODE.Valid: {
'types/Volume/instances/getByName::' +
self.volume_name_2x_enc: self.volume.id,
'instances/Volume::{}/action/removeMappedSdc'.format(
self.volume.provider_id): self.volume.provider_id,
'instances/Volume::{}/action/removeVolume'.format(
self.volume.provider_id
): self.volume.provider_id,
},
self.RESPONSE_MODE.BadStatus: {
'types/Volume/instances/getByName::' +
self.volume_name_2x_enc: mocks.MockHTTPSResponse(
{
'errorCode': 401,
'message': 'BadStatus Volume Test',
}, 401
),
'instances/Volume::{}/action/removeVolume'.format(
self.volume.provider_id
): mocks.MockHTTPSResponse(
{
'errorCode': 401,
'message': 'BadStatus Volume Test',
}, 401
),
},
}
def test_bad_login_and_volume(self):
self.set_https_response_mode(self.RESPONSE_MODE.BadStatus)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume,
self.volume)
def test_delete_volume(self):
"""Setting the unmap volume before delete flag for tests """
self.driver.configuration.set_override(
'sio_unmap_volume_before_deletion',
override=True)
self.driver.delete_volume(self.volume)
| [((1268, 1323), 'cinder.context.RequestContext', 'context.RequestContext', (['"""fake"""', '"""fake"""'], {'auth_token': '(True)'}), "('fake', 'fake', auth_token=True)\n", (1290, 1323), False, 'from cinder import context\n'), ((1347, 1416), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['ctx'], {}), "(ctx, **{'provider_id': fake.PROVIDER_ID})\n", (1374, 1416), False, 'from cinder.tests.unit import fake_volume\n'), ((2211, 2299), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', (["{'errorCode': 401, 'message': 'BadStatus Volume Test'}", '(401)'], {}), "({'errorCode': 401, 'message':\n 'BadStatus Volume Test'}, 401)\n", (2234, 2299), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\n'), ((2536, 2624), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', (["{'errorCode': 401, 'message': 'BadStatus Volume Test'}", '(401)'], {}), "({'errorCode': 401, 'message':\n 'BadStatus Volume Test'}, 401)\n", (2559, 2624), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\n')] |
chrisc20042001/python-for-transportation-modeling | example-package/transportation_tutorials/__init__.py | 677129daa390fcaa6e5cde45960e27d9bd6ca4bf | # -*- coding: utf-8 -*-
__version__ = '1.0.2'
import os
import appdirs
import osmnx as ox
import joblib
import requests
from .files import load_vars, save_vars, cached, inflate_tar, download_zipfile
from .data import data, list_data, problematic
from .tools.view_code import show_file
from . import mapping
cache_dir = None
memory = None
def set_cache_dir(location=None, compress=True, verbose=0, **kwargs):
"""
Set up a cache directory for use with the tutorials.
Parameter
---------
cache_dir : Path-like or False, optional
A path for the cache files. Set to False to disable caching.
"""
global memory, cache_dir
if location is None:
location = appdirs.user_cache_dir('transportation_tutorials')
if location is False:
location = None
memory = joblib.Memory(location, compress=compress, verbose=verbose, **kwargs)
make_cache = (
(ox, 'gdf_from_place'),
(ox, 'graph_from_bbox'),
(requests, 'get'),
(requests, 'post'),
)
for module, func_name in make_cache:
try:
func = getattr(module, f"_{func_name}_orig")
except AttributeError:
func = getattr(module, func_name)
setattr(module, f"_{func_name}_orig", func)
setattr(module, func_name, memory.cache(func))
set_cache_dir()
| [((772, 841), 'joblib.Memory', 'joblib.Memory', (['location'], {'compress': 'compress', 'verbose': 'verbose'}), '(location, compress=compress, verbose=verbose, **kwargs)\n', (785, 841), False, 'import joblib\n'), ((668, 718), 'appdirs.user_cache_dir', 'appdirs.user_cache_dir', (['"""transportation_tutorials"""'], {}), "('transportation_tutorials')\n", (690, 718), False, 'import appdirs\n')] |
czajowaty/curry-bot | common/common.py | 91bfbd884342a02c6defd057d27d5b1fcd78cb21 | from requests.models import PreparedRequest
def is_valid_url(url):
prepared_request = PreparedRequest()
try:
prepared_request.prepare_url(url, None)
return True
except Exception as e:
return False
class Timestamp: # a speedrun.com style timestamp e.g. "3h 53m 233s 380ms"
def __init__(self, s):
self.hours, self.minutes, self.seconds, self.milliseconds = 0, 0, 0, 0
for arg in s.split():
if arg.endswith("ms"):
self.milliseconds += int(arg[:-2])
elif arg.endswith("s"):
self.seconds += int(arg[:-1])
elif arg.endswith("m"):
self.minutes += int(arg[:-1])
elif arg.endswith("h"):
self.hours += int(arg[:-1])
@staticmethod
def from_milliseconds(ms):
t = Timestamp("0ms")
temp = ms
t.hours = temp // 3600000
temp %= 3600000
t.minutes = temp // 60000
temp %= 60000
t.seconds = temp // 1000
t.milliseconds = temp % 1000
return t
def __str__(self):
result = []
if self.hours != 0:
result.append("{}h".format(self.hours))
if not (self.hours == 0 and self.minutes == 0):
result.append("{}m".format(self.minutes))
result.append("{}s".format(self.seconds))
if self.milliseconds > 0:
result.append("{}ms".format(self.milliseconds))
return ' '.join(result)
def __eq__(self, other):
return self.hours == other.hours and self.minutes == other.minutes and self.seconds == other.seconds and self.milliseconds == other.milliseconds
def __lt__(self, other):
if self.hours < other.hours:
return True
elif self.hours > other.hours:
return False
if self.minutes < other.minutes:
return True
elif self.minutes > other.minutes:
return False
if self.seconds < other.seconds:
return True
elif self.seconds > other.seconds:
return False
return self.milliseconds < other.milliseconds
| [((92, 109), 'requests.models.PreparedRequest', 'PreparedRequest', ([], {}), '()\n', (107, 109), False, 'from requests.models import PreparedRequest\n')] |
anthonyalmarza/hendrix | hendrix/test/test_ux.py | eebd2a2183cc18ec2267d96a53a70d41b1630ce6 | import os
import sys
from . import HendrixTestCase, TEST_SETTINGS
from hendrix.contrib import SettingsError
from hendrix.options import options as hx_options
from hendrix import ux
from mock import patch
class TestMain(HendrixTestCase):
def setUp(self):
super(TestMain, self).setUp()
self.DEFAULTS = hx_options()
os.environ['DJANGO_SETTINGS_MODULE'] = ''
self.devnull = open(os.devnull, 'w')
self.args_list = ['hx', 'start']
self.patcher = patch('hendrix.ux.findSettingsModule')
self.patcher.start()
def tearDown(self):
super(TestMain, self).tearDown()
self.devnull.close()
self.patcher.stop()
def test_settings_from_system_variable(self):
django_settings = 'django.inanity'
with patch('hendrix.ux.findSettingsModule') as findSettingsMod:
findSettingsMod.return_value = django_settings
options = self.DEFAULTS
self.assertEqual(options['settings'], '')
options = ux.djangoVsWsgi(options)
self.assertEqual(options['settings'], django_settings)
def test_settings_wsgi_absense(self):
with patch('hendrix.ux.findSettingsModule') as findSettingsMod:
findSettingsMod.return_value = ""
self.assertRaises(SettingsError, ux.djangoVsWsgi, self.DEFAULTS)
def test_user_settings_overrides_system_variable(self):
django_settings = 'django.inanity'
with patch('hendrix.ux.findSettingsModule') as findSettingsMod:
findSettingsMod.return_value = django_settings
options = self.DEFAULTS
user_settings = 'myproject.settings'
options['settings'] = user_settings
self.assertEqual(options['settings'], user_settings)
options = ux.djangoVsWsgi(options)
self.assertEqual(options['settings'], user_settings)
def test_wsgi_correct_wsgi_path_works(self):
wsgi_dot_path = 'hendrix.test.wsgi'
options = self.DEFAULTS
options.update({'wsgi': wsgi_dot_path})
options = ux.djangoVsWsgi(options)
self.assertEqual(options['wsgi'], wsgi_dot_path)
def test_wsgi_wrong_path_raises(self):
wsgi_dot_path = '_this.leads.nowhere.man'
options = self.DEFAULTS
options.update({'wsgi': wsgi_dot_path})
self.assertRaises(ImportError, ux.djangoVsWsgi, options)
def test_cwd_exposure(self):
cwd = os.getcwd()
_path = sys.path
sys.path = [p for p in _path if p != cwd]
self.assertTrue(cwd not in sys.path)
ux.exposeProject(self.DEFAULTS)
self.assertTrue(cwd in sys.path)
def test_pythonpath(self):
options = self.DEFAULTS
test_path = os.path.join(
os.path.dirname(os.getcwd()),
'hendrix/test/testproject'
)
options['pythonpath'] = test_path
ux.exposeProject(options)
self.assertTrue(test_path in sys.path)
sys.path = [p for p in sys.path if p != test_path]
def test_shitty_pythonpath(self):
options = self.DEFAULTS
test_path = '/if/u/have/this/path/you/suck'
options['pythonpath'] = test_path
self.assertRaises(IOError, ux.exposeProject, options)
def test_dev_friendly_options(self):
options = self.DEFAULTS
options['dev'] = True
self.assertFalse(options['reload'])
self.assertFalse(options['loud'])
options = ux.devFriendly(options)
self.assertTrue(options['reload'])
self.assertTrue(options['loud'])
def test_noise_control_daemonize(self):
options = self.DEFAULTS
options['quiet'] = True
options['daemonize'] = True
stdout = sys.stdout
stderr = sys.stderr
redirect = ux.noiseControl(options)
self.assertEqual(sys.stdout.name, stdout.name)
self.assertEqual(sys.stderr.name, stderr.name)
self.assertEqual(redirect, None)
def test_noise_control_traceback(self):
options = self.DEFAULTS
options['quiet'] = True
options['daemonize'] = True
options['traceback'] = True
stdout = sys.stdout
stderr = sys.stderr
redirect = ux.noiseControl(options)
self.assertEqual(sys.stdout.name, stdout.name)
self.assertEqual(sys.stderr.name, stderr.name)
self.assertEqual(redirect, None)
def test_main_with_daemonize(self):
sys.argv = self.args_list + ['-d', '--settings', TEST_SETTINGS]
class Process(object):
def poll(self):
return 0
with patch('time.sleep'):
with patch('subprocess.Popen') as popen:
popen.return_value = Process()
ux.main()
self.assertTrue(popen.called)
self.assertTrue('--settings' in popen.call_args[0][0])
sys.argv = []
def test_options_structure(self):
"""
A test to ensure that HendrixDeploy.options also has the complete set
of options available
"""
deploy = self.wsgiDeploy()
expected_keys = self.DEFAULTS.keys()
actual_keys = deploy.options.keys()
self.assertListEqual(expected_keys, actual_keys)
| [((323, 335), 'hendrix.options.options', 'hx_options', ([], {}), '()\n', (333, 335), True, 'from hendrix.options import options as hx_options\n'), ((495, 533), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (500, 533), False, 'from mock import patch\n'), ((2090, 2114), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (2105, 2114), False, 'from hendrix import ux\n'), ((2460, 2471), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2469, 2471), False, 'import os\n'), ((2600, 2631), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['self.DEFAULTS'], {}), '(self.DEFAULTS)\n', (2616, 2631), False, 'from hendrix import ux\n'), ((2912, 2937), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['options'], {}), '(options)\n', (2928, 2937), False, 'from hendrix import ux\n'), ((3479, 3502), 'hendrix.ux.devFriendly', 'ux.devFriendly', (['options'], {}), '(options)\n', (3493, 3502), False, 'from hendrix import ux\n'), ((3807, 3831), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\n', (3822, 3831), False, 'from hendrix import ux\n'), ((4240, 4264), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\n', (4255, 4264), False, 'from hendrix import ux\n'), ((793, 831), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (798, 831), False, 'from mock import patch\n'), ((1023, 1047), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (1038, 1047), False, 'from hendrix import ux\n'), ((1171, 1209), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (1176, 1209), False, 'from mock import patch\n'), ((1470, 1508), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (1475, 1508), False, 'from mock import patch\n'), ((1808, 1832), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (1823, 1832), False, 'from hendrix import ux\n'), ((4628, 4647), 'mock.patch', 'patch', (['"""time.sleep"""'], {}), "('time.sleep')\n", (4633, 4647), False, 'from mock import patch\n'), ((2799, 2810), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2808, 2810), False, 'import os\n'), ((4666, 4691), 'mock.patch', 'patch', (['"""subprocess.Popen"""'], {}), "('subprocess.Popen')\n", (4671, 4691), False, 'from mock import patch\n'), ((4765, 4774), 'hendrix.ux.main', 'ux.main', ([], {}), '()\n', (4772, 4774), False, 'from hendrix import ux\n')] |
Voxel-Fox-Ltd/Novus | discord/types/interactions.py | 3e254115daf1c09455b26dc7819b73fbf5ee56e5 | """
The MIT License (MIT)
Copyright (c) 2015-2021 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import Optional, TYPE_CHECKING, Dict, TypedDict, Union, List, Literal
from .snowflake import Snowflake
from .components import Component, SelectOption
from .embed import Embed
from .channel import ChannelType, Channel
from .member import Member
from .role import Role
from .user import User
if TYPE_CHECKING:
from .message import AllowedMentions, Message
ApplicationCommandType = Literal[1, 2, 3]
class ApplicationCommand(TypedDict):
id: Snowflake
application_id: Snowflake
name: str
description: str
options: Optional[List[ApplicationCommandOption]]
type: Optional[ApplicationCommandType]
ApplicationCommandOptionType = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
class ApplicationCommandOption(TypedDict):
type: ApplicationCommandOptionType
name: str
description: str
required: bool
choices: Optional[List[ApplicationCommandOptionChoice]]
options: Optional[List[ApplicationCommandOption]]
class ApplicationCommandOptionChoice(TypedDict):
name: str
value: Union[str, int]
ApplicationCommandPermissionType = Literal[1, 2]
class ApplicationCommandPermissions(TypedDict):
id: Snowflake
type: ApplicationCommandPermissionType
permission: bool
class BaseGuildApplicationCommandPermissions(TypedDict):
permissions: List[ApplicationCommandPermissions]
class PartialGuildApplicationCommandPermissions(BaseGuildApplicationCommandPermissions):
id: Snowflake
class GuildApplicationCommandPermissions(PartialGuildApplicationCommandPermissions):
application_id: Snowflake
guild_id: Snowflake
InteractionType = Literal[1, 2, 3]
class _ApplicationCommandInteractionDataOption(TypedDict):
name: str
class _ApplicationCommandInteractionDataOptionSubcommand(_ApplicationCommandInteractionDataOption):
type: Literal[1, 2]
options: List[ApplicationCommandInteractionDataOption]
class _ApplicationCommandInteractionDataOptionString(_ApplicationCommandInteractionDataOption):
type: Literal[3]
value: str
class _ApplicationCommandInteractionDataOptionInteger(_ApplicationCommandInteractionDataOption):
type: Literal[4]
value: int
class _ApplicationCommandInteractionDataOptionBoolean(_ApplicationCommandInteractionDataOption):
type: Literal[5]
value: bool
class _ApplicationCommandInteractionDataOptionSnowflake(_ApplicationCommandInteractionDataOption):
type: Literal[6, 7, 8, 9]
value: Snowflake
class _ApplicationCommandInteractionDataOptionNumber(_ApplicationCommandInteractionDataOption):
type: Literal[10]
value: float
ApplicationCommandInteractionDataOption = Union[
_ApplicationCommandInteractionDataOptionString,
_ApplicationCommandInteractionDataOptionInteger,
_ApplicationCommandInteractionDataOptionSubcommand,
_ApplicationCommandInteractionDataOptionBoolean,
_ApplicationCommandInteractionDataOptionSnowflake,
_ApplicationCommandInteractionDataOptionNumber,
]
class ApplicationCommandResolvedPartialChannel(TypedDict):
id: Snowflake
type: ChannelType
permissions: str
name: str
class ApplicationCommandInteractionDataResolved(TypedDict, total=False):
users: Dict[Snowflake, User]
members: Dict[Snowflake, Member]
roles: Dict[Snowflake, Role]
channels: Dict[Snowflake, ApplicationCommandResolvedPartialChannel]
class ApplicationCommandInteractionDataOption(TypedDict):
name: str
type: int
value: Optional[str] # Optional[ApplicationCommandOptionType]
options: Optional[ApplicationCommandInteractionDataOption]
focused: Optional[bool]
components: Optional[List[ApplicationCommandInteractionDataOption]]
class _InteractionDataOptional(TypedDict, total=False):
resolved: Dict[str, dict]
options: List[ApplicationCommandInteractionDataOption]
custom_id: str
component_type: int
values: List[str]
target_id: Snowflake
components: List[ApplicationCommandInteractionDataOption]
class InteractionData(_InteractionDataOptional):
id: Snowflake
name: str
type: ApplicationCommandType
class InteractionResolved(TypedDict):
users: List[Union[User, Member]]
members: List[Member]
roles: List[Role]
channels: List[Channel]
messages: List[Message]
class _InteractionOptional(TypedDict, total=False):
data: InteractionData
guild_id: Snowflake
channel_id: Snowflake
member: Member
user: User
message: Message
guild_locale: str
class Interaction(_InteractionOptional):
id: Snowflake
application_id: Snowflake
type: InteractionType
token: str
version: int
resolved: InteractionResolved
locale: str
class InteractionApplicationCommandCallbackData(TypedDict, total=False):
tts: bool
content: str
embeds: List[Embed]
allowed_mentions: AllowedMentions
flags: int
components: List[Component]
InteractionResponseType = Literal[1, 4, 5, 6, 7]
class _InteractionResponseOptional(TypedDict, total=False):
data: InteractionApplicationCommandCallbackData
class InteractionResponse(_InteractionResponseOptional):
type: InteractionResponseType
class MessageInteraction(TypedDict):
id: Snowflake
type: InteractionType
name: str
user: User
class _EditApplicationCommandOptional(TypedDict, total=False):
description: str
options: Optional[List[ApplicationCommandOption]]
type: ApplicationCommandType
class EditApplicationCommand(_EditApplicationCommandOptional):
name: str
default_permission: bool
| [] |
EVAyo/chaoxing_auto_sign | local/local_sign.py | 7ae91a5e9aa4d15f57a5419ff3f5a455e151930a | # -*- coding: utf8 -*-
import os
import re
import time
import json
import random
import asyncio
from typing import Optional, List, Dict
from aiohttp import ClientSession
from aiohttp.cookiejar import SimpleCookie
from lxml import etree
from bs4 import BeautifulSoup
from config import *
from message import server_chan_send
class AutoSign(object):
def __init__(self, username, password, schoolid=None, enc=None):
"""初始化就进行登录"""
self.headers = {
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.100 Safari/537.36',
}
self.session = ClientSession(headers=self.headers)
self.username = username
self.password = password
self.schoolid = '' if schoolid is None else schoolid
self.enc = '' if enc is None else enc
async def check_login_status(self, status, text):
if status == 403:
return 1002
data = json.loads(text)
if data['result']:
return 1000 # 登录成功
else:
return 1001 # 登录信息有误
async def set_cookies(self):
"""设置cookies"""
cookie = await self.check_cookies()
if not cookie:
# 无效则重新登录,并保存cookies
status, text, cookie = await self.login()
login_status = await self.check_login_status(status, text)
if login_status == 1000:
cookies = self.dict_from_simple_cookie(cookie)
self.save_cookies(cookies)
else:
return 1001
else:
self.session.cookie_jar.update_cookies(cookie)
return 1000
def dict_from_simple_cookie(self, cookies) -> dict:
"""
从响应对象中抽取cookies
"""
result = {}
for key, value in cookies.items():
result[key] = value.value
return result
def save_cookies(self, cookies: dict):
"""保存cookies"""
with open(COOKIES_FILE_PATH, "r") as f:
data = json.load(f)
data[self.username] = cookies
with open(COOKIES_FILE_PATH, 'w') as f2:
json.dump(data, f2)
async def check_cookies(self) -> Optional[SimpleCookie]:
"""检测json文件内是否存有cookies,有则检测,无则登录"""
if "cookies.json" not in os.listdir(COOKIES_PATH):
with open(COOKIES_FILE_PATH, 'w+') as f:
f.write("{}")
with open(COOKIES_FILE_PATH, 'r') as f:
# json文件有无账号cookies, 没有,则直接返回假
try:
data = json.load(f)
cookies = data[self.username]
except Exception:
return False
# 检测cookies是否有效
async with self.session.request(method='GET',
url='http://mooc1-1.chaoxing.com/api/workTestPendingNew',
allow_redirects=False,
cookies=cookies) as resp:
if resp.status != 200:
print("cookie失效")
return None
else:
print("cookie有效!")
return cookies
async def login(self):
"""
登录并返回响应
"""
params = {
'name': self.username,
'pwd': self.password,
'schoolid': self.schoolid,
'verify': 0
}
async with self.session.request(method='GET',
url='https://passport2.chaoxing.com/api/login',
params=params) as resp:
status = resp.status
text = await resp.text()
cookies = resp.cookies
return status, text, cookies
def check_activeid(self, activeid):
"""检测activeid是否存在,不存在则添加"""
activeid += self.username
if "activeid.json" not in os.listdir(ACTIVEID_PATH):
with open(ACTIVEID_FILE_PATH, 'w+') as f:
f.write("{}")
with open(ACTIVEID_FILE_PATH, 'r') as f:
try:
# 读取文件
data = json.load(f)
if data[activeid]:
return True
except BaseException:
# 如果出错,则表示没有此activeid
return False
def save_activeid(self, activeid):
"""保存已成功签到的activeid"""
activeid += self.username
if "activeid.json" not in os.listdir(ACTIVEID_PATH):
with open(ACTIVEID_FILE_PATH, 'w+') as f:
f.write("{}")
with open(ACTIVEID_FILE_PATH, 'r') as f:
data = json.load(f)
with open(ACTIVEID_FILE_PATH, 'w') as f2:
data[activeid] = True
json.dump(data, f2)
async def get_all_classid(self) -> list:
"""获取课程主页中所有课程的classid和courseid"""
res = []
async with self.session.request(method='GET',
url='http://mooc1-2.chaoxing.com/visit/interaction') as resp:
text = await resp.text()
soup = BeautifulSoup(text, "lxml")
course_list = soup.find_all(
'li', class_="course")
for course in course_list:
res.append((course.attrs['courseid'], course.attrs['clazzid'],
course.find_next('span', class_="course-name").text))
print('课程列表: ', res)
return res
async def get_sign_type(self, classid, courseid, activeid):
"""获取签到类型"""
params = {
'activeId': activeid,
'classId': classid,
'courseId': courseid
}
async with self.session.request(method='GET',
url='https://mobilelearn.chaoxing.com/widget/sign/pcStuSignController/preSign',
params=params) as resp:
text = await resp.text()
h = etree.HTML(text)
sign_type = h.xpath('//div[@class="location"]/span/text()')
return sign_type
async def get_activeid(self, classid, courseid, classname):
"""访问任务面板获取课程的活动id"""
res = []
re_rule = r'([\d]+),2'
params = {
'courseId': courseid,
'jclassId': classid
}
async with self.session.request(method='GET',
url="https://mobilelearn.chaoxing.com/widget/pcpick/stu/index",
verify_ssl=False,
params=params) as resp:
text = await resp.text()
h = etree.HTML(text)
activeid_list = h.xpath('//*[@id="startList"]/div/div/@onclick')
for activeid in activeid_list:
activeid = re.findall(re_rule, activeid)
if not activeid:
continue
sign_type = await self.get_sign_type(classid, courseid, activeid[0])
res.append((activeid[0], sign_type[0]))
n = len(res)
if n:
d = {'num': n, 'class': {}}
for i in range(n):
if not self.check_activeid(res[i][0]):
d['class'][i] = {
'classid': classid,
'courseid': courseid,
'activeid': res[i][0],
'classname': classname,
'sign_type': res[i][1]
}
return d
async def general_sign(self, classid, courseid, activeid):
"""普通签到"""
params = {
'activeId': activeid,
'classId': classid,
'fid': '39037',
'courseId': courseid
}
async with self.session.request(
method='GET',
url="https://mobilelearn.chaoxing.com/widget/sign/pcStuSignController/preSign",
params=params,
verify_ssl=False
) as resp:
text = await resp.text()
title = re.findall('<title>(.*)</title>', text)[0]
if "签到成功" not in title:
# 网页标题不含签到成功,则为拍照签到
return self.tphoto_sign(activeid)
else:
s = {
'date': time.strftime("%m-%d %H:%M", time.localtime()),
'status': title
}
return s
async def hand_sign(self, classid, courseid, activeid):
"""手势签到"""
params = {
'courseId': courseid,
'classId': classid,
'activeId': activeid
}
async with self.session.request(
method='GET',
url="https://mobilelearn.chaoxing.com/widget/sign/pcStuSignController/signIn",
params=params,
verify_ssl=False
) as resp:
text = await resp.text()
title = re.findall('<title>(.*)</title>', text)
s = {
'date': time.strftime("%m-%d %H:%M", time.localtime()),
'status': title
}
return s
async def qcode_sign(self, activeid):
"""二维码签到"""
params = {
'enc': self.enc,
'name': '',
'activeId': activeid,
'uid': '',
'clientip': '',
'useragent': '',
'latitude': '-1',
'longitude': '-1',
'fid': '',
'appType': '15'
}
async with self.session.request('GET', 'https://mobilelearn.chaoxing.com/pptSign/stuSignajax',
params=params,
allow_redirects=False) as resp:
text = await resp.text()
return {
'date': time.strftime("%m-%d %H:%M", time.localtime()),
'status': text
}
async def addr_sign(self, activeid):
"""位置签到"""
params = {
'name': '',
'activeId': activeid,
'address': '中国',
'uid': '',
'clientip': clientip,
'latitude': latitude,
'longitude': longitude,
'fid': '',
'appType': '15',
'ifTiJiao': '1'
}
async with self.session.request(
method="GET",
url="https://mobilelearn.chaoxing.com/pptSign/stuSignajax",
params=params
) as resp:
text = await resp.text()
return {
'date': time.strftime("%m-%d %H:%M", time.localtime()),
'status': text
}
async def tphoto_sign(self, activeid, uid):
"""拍照签到"""
objectId = await self.upload_img(uid)
params = {
'name': '',
'activeId': activeid,
'address': '中国',
'uid': '',
'clientip': clientip,
'latitude': latitude,
'longitude': longitude,
'fid': '',
'appType': '15',
'ifTiJiao': '1',
'objectId': objectId
}
async with self.session.request(
method="GET",
url="https://mobilelearn.chaoxing.com/pptSign/stuSignajax",
params=params
) as resp:
text = await resp.text()
return {
'date': time.strftime("%m-%d %H:%M", time.localtime()),
'status': text
}
async def get_token(self):
"""获取上传文件所需参数token"""
url = 'https://pan-yz.chaoxing.com/api/token/uservalid'
async with self.session.request(
method='GET',
url=url
) as resp:
text = await resp.text()
token_dict = json.loads(text)
return token_dict['_token']
async def upload_img(self, uid):
"""上传图片"""
# 从图片文件夹内随机选择一张图片
try:
all_img = os.listdir(IMAGE_PATH)
except Exception as e:
os.mkdir(IMAGE_PATH)
all_img = 0
if len(all_img) == 0:
return "a5d588f7bce1994323c348982332e470"
else:
img = IMAGE_PATH + random.choice(all_img)
# uid = self.session.cookies.get_dict()['UID']
url = 'https://pan-yz.chaoxing.com/upload'
files = {'file': open(img, 'rb')}
uid = self.session.cookie_jar.filter_cookies('').get('UID').value
token = await self.get_token()
param = {
'puid': uid,
'_token': token
}
async with self.session.request(
method='POST',
url=url,
params=param,
data=files
) as resp:
text = await resp.text()
res_dict = json.loads(text)
return res_dict['objectId']
async def send_sign_request(self, classid, courseid, activeid, sign_type):
"""发送签到请求"""
if "手势" in sign_type:
return await self.hand_sign(classid, courseid, activeid)
elif "二维码" in sign_type:
return await self.qcode_sign(activeid)
elif "位置" in sign_type:
return await self.addr_sign(activeid)
elif "拍照" in sign_type:
return await self.tphoto_sign(activeid)
else:
return await self.general_sign(classid, courseid, activeid)
async def send_sign_result(self, results: List[Dict]):
"""
发送签到结果
"""
await server_chan_send(results, self.session)
async def start_sign_task(self):
"""开始所有签到任务"""
tasks = []
res = []
await self.set_cookies()
# 获取所有课程的classid和course_id
classid_courseId = await self.get_all_classid()
# 获取所有课程activeid和签到类型
for i in classid_courseId:
coroutine = self.get_activeid(i[1], i[0], i[2])
tasks.append(coroutine)
results: List[Dict] = await asyncio.gather(*tasks)
for r in results:
if r is None:
continue
for d in r['class'].values():
resp = await self.send_sign_request(
d['classid'],
d['courseid'],
d['activeid'],
d['sign_type']
)
if resp:
# 签到课程, 签到时间, 签到状态
sign_msg = {
'name': d['classname'],
'date': resp['date'],
'status': resp['status']
}
res.append(sign_msg)
if '失败' in resp['status']:
continue
# 签到成功后,新增activeid
self.save_activeid(d['activeid'])
return res
async def close_session(self):
await self.session.close() | [((894, 929), 'aiohttp.ClientSession', 'ClientSession', ([], {'headers': 'self.headers'}), '(headers=self.headers)\n', (907, 929), False, 'from aiohttp import ClientSession\n'), ((1227, 1243), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (1237, 1243), False, 'import json\n'), ((5391, 5418), 'bs4.BeautifulSoup', 'BeautifulSoup', (['text', '"""lxml"""'], {}), "(text, 'lxml')\n", (5404, 5418), False, 'from bs4 import BeautifulSoup\n'), ((6232, 6248), 'lxml.etree.HTML', 'etree.HTML', (['text'], {}), '(text)\n', (6242, 6248), False, 'from lxml import etree\n'), ((6913, 6929), 'lxml.etree.HTML', 'etree.HTML', (['text'], {}), '(text)\n', (6923, 6929), False, 'from lxml import etree\n'), ((9135, 9174), 're.findall', 're.findall', (['"""<title>(.*)</title>"""', 'text'], {}), "('<title>(.*)</title>', text)\n", (9145, 9174), False, 'import re\n'), ((11954, 11970), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (11964, 11970), False, 'import json\n'), ((2304, 2316), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2313, 2316), False, 'import json\n'), ((2592, 2616), 'os.listdir', 'os.listdir', (['COOKIES_PATH'], {}), '(COOKIES_PATH)\n', (2602, 2616), False, 'import os\n'), ((4188, 4213), 'os.listdir', 'os.listdir', (['ACTIVEID_PATH'], {}), '(ACTIVEID_PATH)\n', (4198, 4213), False, 'import os\n'), ((4744, 4769), 'os.listdir', 'os.listdir', (['ACTIVEID_PATH'], {}), '(ACTIVEID_PATH)\n', (4754, 4769), False, 'import os\n'), ((4923, 4935), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4932, 4935), False, 'import json\n'), ((7074, 7103), 're.findall', 're.findall', (['re_rule', 'activeid'], {}), '(re_rule, activeid)\n', (7084, 7103), False, 'import re\n'), ((8304, 8343), 're.findall', 're.findall', (['"""<title>(.*)</title>"""', 'text'], {}), "('<title>(.*)</title>', text)\n", (8314, 8343), False, 'import re\n'), ((12129, 12151), 'os.listdir', 'os.listdir', (['IMAGE_PATH'], {}), '(IMAGE_PATH)\n', (12139, 12151), False, 'import os\n'), ((13025, 13041), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (13035, 13041), False, 'import json\n'), ((13739, 13778), 'message.server_chan_send', 'server_chan_send', (['results', 'self.session'], {}), '(results, self.session)\n', (13755, 13778), False, 'from message import server_chan_send\n'), ((14210, 14232), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (14224, 14232), False, 'import asyncio\n'), ((2428, 2447), 'json.dump', 'json.dump', (['data', 'f2'], {}), '(data, f2)\n', (2437, 2447), False, 'import json\n'), ((2841, 2853), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2850, 2853), False, 'import json\n'), ((4420, 4432), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4429, 4432), False, 'import json\n'), ((5044, 5063), 'json.dump', 'json.dump', (['data', 'f2'], {}), '(data, f2)\n', (5053, 5063), False, 'import json\n'), ((9238, 9254), 'time.localtime', 'time.localtime', ([], {}), '()\n', (9252, 9254), False, 'import time\n'), ((10777, 10793), 'time.localtime', 'time.localtime', ([], {}), '()\n', (10791, 10793), False, 'import time\n'), ((11604, 11620), 'time.localtime', 'time.localtime', ([], {}), '()\n', (11618, 11620), False, 'import time\n'), ((12195, 12215), 'os.mkdir', 'os.mkdir', (['IMAGE_PATH'], {}), '(IMAGE_PATH)\n', (12203, 12215), False, 'import os\n'), ((12378, 12400), 'random.choice', 'random.choice', (['all_img'], {}), '(all_img)\n', (12391, 12400), False, 'import random\n'), ((8542, 8558), 'time.localtime', 'time.localtime', ([], {}), '()\n', (8556, 8558), False, 'import time\n'), ((10029, 10045), 'time.localtime', 'time.localtime', ([], {}), '()\n', (10043, 10045), False, 'import time\n')] |
stahlberggroup/umierrorcorrect | build/scripts-3.6/fit_background_model.py | 8ceabe30a87811dad467d04eb5a08d0213065946 | #!python
import numpy as np
from numpy import inf
from numpy import nan
from scipy.optimize import fmin
from scipy.stats import beta
from scipy.special import beta as B
from scipy.special import comb
import argparse
import sys
def parseArgs():
'''Function for parsing arguments'''
parser = argparse.ArgumentParser(description="Pipeline for analyzing barcoded amplicon \
sequencing data with Unique molecular \
identifiers (UMI)")
parser.add_argument('-cons', '--cons_file', dest='cons_file', help='Path to cons file, for fitting parameters of the bgmodel')
parser.add_argument('-nonbgposfile', '--non-background-positions', dest='nonbgposfile',
help='Path to file with non-background positions')
parser.add_argument('-out', '--out_file',dest='out_file',help="name of output file, default = %(default)s]",default="bgmodel.params")
parser.add_argument('-f','--fsize',dest='fsize', help='Family size cutoff (consensus cutoff) for variant calling. [default = %(default)s]', default=3)
args = parser.parse_args(sys.argv[1:])
return(args)
def parse_cons_file(filename,fsize=3):
n1=[]
f1=[]
c1=[]
posx=[]
data=[]
with open(filename) as f:
for line in f:
if not line.startswith('Sample Name'):
line=line.rstrip('\n')
parts=line.split('\t')
pos=parts[1]+':'+parts[2]
name=parts[3]
#print(name)
if name not in "":
famsize=parts[-4]
if int(famsize)==fsize:
frac=float(parts[-2])
alt=parts[-1]
count=parts[-3]
if frac > 0 and alt not in 'N':
cov=int(parts[-5])
f1.append(float(frac))
n1.append(int(cov))
c1.append(int(count))
posx.append(pos)
data.append(line)
#print(name)
#print(famsize)
return(f1,n1,c1,posx,data)
def betaNLL(params,*args):
a,b = params
data = np.array(args[0])
pdf=beta.pdf(data,a,b,loc=0,scale=1)
lg=np.log(pdf)
#lg=np.where(lg==-np.inf,0,lg)
mask = np.isfinite(lg)
nll = -lg[mask].sum()
nll=-1*np.sum(lg)
return(nll)
def get_beta_parameters(data):
m=np.mean(data)
v=np.var(data)
a0=m*(m * (1-m) / v-1 )
b0=(1-m)*(m * (1-m) / v-1 )
result=fmin(betaNLL,[a0,b0],args=(data,))
return(result)
def run_fit_bgmodel(args):
spikepositions=[178952085,55599321,7577558,7577547,7577538,7577120]
if args.nonbgposfile:
nonbgpos=[]
with open(args.nonbgposfile) as f:
for line in f:
line=line.rstrip()
nonbgpos.append(line)
else:
nonbgpos=spikepositions
if not args.cons_file:
args.cons_file=glob.glob(args.output_path+'/*cons.tsv')[0]
args.fsize=int(args.fsize)
f1,n1,a1,pos,data=parse_cons_file(args.cons_file,args.fsize)
f1 = np.array(f1)
n1 = np.array(n1)
a1 = np.array(a1)
pos = np.array(pos)
data = np.array(data)
result=get_beta_parameters(f1[np.isin(pos,nonbgpos)!=True])
#a=prob_bb(n1,a1,result[0],result[1])
print(pos,nonbgpos,np.isin(pos,nonbgpos))
with open(args.out_file,'w') as g:
g.write('{}\n'.format(result[0]))
g.write('{}\n'.format(result[1]))
#a[a==inf]=1e-10
#a[np.isnan(a)]=1e-10
#Q = -10*np.log10(a)
#data=np.array(data)
#plot_histogram(Q,args.output_path+'/'+args.sample_name+'.histogram.png')
#if args.vc_method.lower()=='bbmodel':
# rout=data[Q >= float(args.qvalue_threshold)]
# Qsig=Q[Q >= float(args.qvalue_threshold)]
#else:
# rout=data[a1 >= float(args.count_cutoff)]
# Qsig=Q[a1 >= float(args.count_cutoff)]
#outfilename=args.output_path+'/'+args.sample_name+'2.vcf'
#write_vcf(outfilename,rout,Qsig,args.reference_file)
if __name__=='__main__':
args=parseArgs()
run_fit_bgmodel(args)
| [((299, 544), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Pipeline for analyzing barcoded amplicon sequencing data with Unique molecular identifiers (UMI)"""'}), "(description=\n 'Pipeline for analyzing barcoded amplicon sequencing data with Unique molecular identifiers (UMI)'\n )\n", (322, 544), False, 'import argparse\n'), ((2300, 2317), 'numpy.array', 'np.array', (['args[0]'], {}), '(args[0])\n', (2308, 2317), True, 'import numpy as np\n'), ((2326, 2362), 'scipy.stats.beta.pdf', 'beta.pdf', (['data', 'a', 'b'], {'loc': '(0)', 'scale': '(1)'}), '(data, a, b, loc=0, scale=1)\n', (2334, 2362), False, 'from scipy.stats import beta\n'), ((2366, 2377), 'numpy.log', 'np.log', (['pdf'], {}), '(pdf)\n', (2372, 2377), True, 'import numpy as np\n'), ((2424, 2439), 'numpy.isfinite', 'np.isfinite', (['lg'], {}), '(lg)\n', (2435, 2439), True, 'import numpy as np\n'), ((2543, 2556), 'numpy.mean', 'np.mean', (['data'], {}), '(data)\n', (2550, 2556), True, 'import numpy as np\n'), ((2563, 2575), 'numpy.var', 'np.var', (['data'], {}), '(data)\n', (2569, 2575), True, 'import numpy as np\n'), ((2647, 2684), 'scipy.optimize.fmin', 'fmin', (['betaNLL', '[a0, b0]'], {'args': '(data,)'}), '(betaNLL, [a0, b0], args=(data,))\n', (2651, 2684), False, 'from scipy.optimize import fmin\n'), ((3231, 3243), 'numpy.array', 'np.array', (['f1'], {}), '(f1)\n', (3239, 3243), True, 'import numpy as np\n'), ((3253, 3265), 'numpy.array', 'np.array', (['n1'], {}), '(n1)\n', (3261, 3265), True, 'import numpy as np\n'), ((3275, 3287), 'numpy.array', 'np.array', (['a1'], {}), '(a1)\n', (3283, 3287), True, 'import numpy as np\n'), ((3298, 3311), 'numpy.array', 'np.array', (['pos'], {}), '(pos)\n', (3306, 3311), True, 'import numpy as np\n'), ((3323, 3337), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (3331, 3337), True, 'import numpy as np\n'), ((2477, 2487), 'numpy.sum', 'np.sum', (['lg'], {}), '(lg)\n', (2483, 2487), True, 'import numpy as np\n'), ((3467, 3489), 'numpy.isin', 'np.isin', (['pos', 'nonbgpos'], {}), '(pos, nonbgpos)\n', (3474, 3489), True, 'import numpy as np\n'), ((3372, 3394), 'numpy.isin', 'np.isin', (['pos', 'nonbgpos'], {}), '(pos, nonbgpos)\n', (3379, 3394), True, 'import numpy as np\n')] |
KevinKecc/caffe2 | caffe2/python/operator_test/partition_ops_test.py | a2b6c6e2f0686358a84277df65e9489fb7d9ddb2 | # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase, rand_array
class TestPartitionOps(TestCase):
def test_configs(self):
# (main dims, partitions, main type, [list of (extra dims, type)])
configs = [
((10, ), 3),
((4, ), 10),
((10, 10), 4),
((100, ), 2),
((5, ), 1),
((1, ), 1),
((2, 10), 2),
]
suffixes = [
[],
[((2, 2), np.float32)],
[((3, ), np.int64), ((2, ), np.float32)],
]
return [
(main_dims, parts, main_type, extra, pack)
for main_dims, parts in configs
for main_type in [np.int32, np.int64] for extra in suffixes
for pack in [False, True]
]
def testPartition(self):
for main_dims, parts, main_type, extra_ins, pack in self.test_configs():
ins = ['in' + str(i) for i in range(1 + len(extra_ins))]
outs = [
'in{}_p{}'.format(j, i)
for i in range(parts) for j in range(1 + len(extra_ins))
]
op = core.CreateOperator(
'Partition', ins, outs, pack_first_input=(1 if pack else 0))
x = []
for i, (dims, t) in enumerate([((), main_type)] + extra_ins):
if t in [np.float32, np.float64]:
d = rand_array(*(main_dims + dims))
else:
d = np.random.randint(-100, 100, (main_dims + dims))
d = d.astype(t)
workspace.FeedBlob(ins[i], d)
x.append(d)
def sharding(x):
# numpy has proper modulo op that yields non-negative results
shards = (x[0] % parts).reshape([-1])
out = []
for i in range(parts):
for ind, v in enumerate(x):
suffix_shape = v.shape[len(x[0].shape):]
accum = []
data = v.reshape((-1, ) + suffix_shape)
if pack and ind == 0:
data = data // parts
for j, s in enumerate(shards):
if s == i:
accum.append(data[j])
def join(a):
if not a:
return np.empty(shape=(0, ) + suffix_shape)
return np.stack(a)
out.append(join(accum))
return out
workspace.RunOperatorOnce(op)
ref = sharding(x)
print(x)
print(ref)
for name, expected in zip(outs, ref):
np.testing.assert_array_equal(
expected, workspace.FetchBlob(name)
)
# test inverse operation (GatherByKey)
if len(main_dims) == 1:
# currently only 1D key tensor supported
for i in range(len(extra_ins)):
expected_out = ins[i + 1]
gather_ins = [ins[0]] + [
outs[len(ins) * p + i + 1] for p in range(parts)]
actual_out = expected_out + '_actual'
op = core.CreateOperator(
'GatherByKey', gather_ins, actual_out)
workspace.RunOperatorOnce(op)
expected = workspace.FetchBlob(expected_out)
actual = workspace.FetchBlob(actual_out)
np.testing.assert_array_equal(expected, actual)
def testLengthsPartition(self):
for main_dims, parts, main_type, extra_ins, pack in self.test_configs():
# For LengthsSharding only 1-D tensors supported as a first input
if len(main_dims) > 1:
continue
ins = ['in' + str(i) for i in range(2 + len(extra_ins))]
outs = [
'in{}_p{}'.format(j, i)
for i in range(parts) for j in range(2 + len(extra_ins))
]
op = core.CreateOperator(
'LengthsPartition', ins, outs,
pack_first_input=(1 if pack else 0)
)
x = []
for i, (dims, t) in enumerate([((), main_type)] + extra_ins):
if t in [np.float32, np.float64]:
d = rand_array(*(main_dims + dims))
else:
d = np.random.randint(-100, 100, (main_dims + dims))
d = d.astype(t)
workspace.FeedBlob(ins[i + 1], d)
x.append(d)
# Randomly generate length tensor as well
elements = np.random.randint(2, 10)
lengths = []
total_length = 0
for _ in range(elements - 1):
lengths.append(np.random.randint(main_dims[0] - total_length))
total_length += lengths[-1]
lengths.append(main_dims[0] - total_length)
workspace.FeedBlob(ins[0], np.array(lengths, dtype=np.int32))
def sharding(x):
# numpy has proper modulo op that yields non-negative results
shards = (x[0] % parts).reshape([-1])
out = []
for i in range(parts):
idx = 0
sharded_lengths = np.zeros(elements)
for ind, length in enumerate(lengths):
for _ in range(length):
if shards[idx] == i:
sharded_lengths[ind] += 1
idx += 1
out.append(sharded_lengths)
for ind, v in enumerate(x):
suffix_shape = v.shape[len(x[0].shape):]
accum = []
data = v.reshape((-1, ) + suffix_shape)
if pack and ind == 0:
data = data // parts
for j, s in enumerate(shards):
if s == i:
accum.append(data[j])
def join(a):
if not a:
return np.empty(shape=(0, ) + suffix_shape)
return np.stack(a)
out.append(join(accum))
return out
workspace.RunOperatorOnce(op)
ref = sharding(x)
for name, expected in zip(outs, ref):
np.testing.assert_array_equal(
expected, workspace.FetchBlob(name)
)
if __name__ == "__main__":
import unittest
unittest.main()
| [((7638, 7653), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7651, 7653), False, 'import unittest\n'), ((2004, 2082), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""Partition"""', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), "('Partition', ins, outs, pack_first_input=1 if pack else 0)\n", (2023, 2082), False, 'from caffe2.python import core, workspace\n'), ((3473, 3502), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (3498, 3502), False, 'from caffe2.python import core, workspace\n'), ((5009, 5098), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""LengthsPartition"""', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), "('LengthsPartition', ins, outs, pack_first_input=1 if\n pack else 0)\n", (5028, 5098), False, 'from caffe2.python import core, workspace\n'), ((5625, 5649), 'numpy.random.randint', 'np.random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (5642, 5649), True, 'import numpy as np\n'), ((7355, 7384), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (7380, 7384), False, 'from caffe2.python import core, workspace\n'), ((2444, 2473), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i]', 'd'], {}), '(ins[i], d)\n', (2462, 2473), False, 'from caffe2.python import core, workspace\n'), ((5485, 5518), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i + 1]', 'd'], {}), '(ins[i + 1], d)\n', (5503, 5518), False, 'from caffe2.python import core, workspace\n'), ((5964, 5997), 'numpy.array', 'np.array', (['lengths'], {'dtype': 'np.int32'}), '(lengths, dtype=np.int32)\n', (5972, 5997), True, 'import numpy as np\n'), ((2269, 2300), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\n', (2279, 2300), False, 'from caffe2.python.test_util import TestCase, rand_array\n'), ((2347, 2393), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\n', (2364, 2393), True, 'import numpy as np\n'), ((3704, 3729), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\n', (3723, 3729), False, 'from caffe2.python import core, workspace\n'), ((4190, 4248), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""GatherByKey"""', 'gather_ins', 'actual_out'], {}), "('GatherByKey', gather_ins, actual_out)\n", (4209, 4248), False, 'from caffe2.python import core, workspace\n'), ((4294, 4323), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (4319, 4323), False, 'from caffe2.python import core, workspace\n'), ((4355, 4388), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['expected_out'], {}), '(expected_out)\n', (4374, 4388), False, 'from caffe2.python import core, workspace\n'), ((4418, 4449), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['actual_out'], {}), '(actual_out)\n', (4437, 4449), False, 'from caffe2.python import core, workspace\n'), ((4470, 4517), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['expected', 'actual'], {}), '(expected, actual)\n', (4499, 4517), True, 'import numpy as np\n'), ((5310, 5341), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\n', (5320, 5341), False, 'from caffe2.python.test_util import TestCase, rand_array\n'), ((5388, 5434), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\n', (5405, 5434), True, 'import numpy as np\n'), ((5777, 5823), 'numpy.random.randint', 'np.random.randint', (['(main_dims[0] - total_length)'], {}), '(main_dims[0] - total_length)\n', (5794, 5823), True, 'import numpy as np\n'), ((6291, 6309), 'numpy.zeros', 'np.zeros', (['elements'], {}), '(elements)\n', (6299, 6309), True, 'import numpy as np\n'), ((7542, 7567), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\n', (7561, 7567), False, 'from caffe2.python import core, workspace\n'), ((3372, 3383), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\n', (3380, 3383), True, 'import numpy as np\n'), ((7254, 7265), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\n', (7262, 7265), True, 'import numpy as np\n'), ((3300, 3335), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\n', (3308, 3335), True, 'import numpy as np\n'), ((7182, 7217), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\n', (7190, 7217), True, 'import numpy as np\n')] |
Maikor/ydk-py | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_fib_common_cfg.py | b86c4a7c570ae3b2c5557d098420446df5de4929 | """ Cisco_IOS_XR_fib_common_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR fib\-common package configuration.
This module contains definitions
for the following management objects\:
fib\: CEF configuration
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class FibPbtsFallback(Enum):
"""
FibPbtsFallback (Enum Class)
Fib pbts fallback
.. data:: list = 1
Fallback to class number list
.. data:: any = 2
Fallback to any class
.. data:: drop = 3
Fallback to drop
"""
list = Enum.YLeaf(1, "list")
any = Enum.YLeaf(2, "any")
drop = Enum.YLeaf(3, "drop")
class FibPbtsForwardClass(Enum):
"""
FibPbtsForwardClass (Enum Class)
Fib pbts forward class
.. data:: any = 8
Any class
"""
any = Enum.YLeaf(8, "any")
class Fib(Entity):
"""
CEF configuration
.. attribute:: pbts_forward_class_fallbacks
PBTS class configuration
**type**\: :py:class:`PbtsForwardClassFallbacks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.PbtsForwardClassFallbacks>`
.. attribute:: platform
FIB platform parameters
**type**\: :py:class:`Platform <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.Platform>`
.. attribute:: auto_hash_recover
Set option for automatcially recovering consistent\-hashing state on interface up
**type**\: bool
.. attribute:: prefer_aib_routes
Set options for adjacency routes overriding RIB routes
**type**\: bool
.. attribute:: encap_sharing_disable
Set true to disable encapsulation sharing
**type**\: bool
.. attribute:: frr_follow_bgp_pic
Set option for fast\-reroute to follow BGP PIC update, not to wait for timeout
**type**\: bool
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib, self).__init__()
self._top_entity = None
self.yang_name = "fib"
self.yang_parent_name = "Cisco-IOS-XR-fib-common-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("pbts-forward-class-fallbacks", ("pbts_forward_class_fallbacks", Fib.PbtsForwardClassFallbacks)), ("platform", ("platform", Fib.Platform))])
self._leafs = OrderedDict([
('auto_hash_recover', (YLeaf(YType.boolean, 'auto-hash-recover'), ['bool'])),
('prefer_aib_routes', (YLeaf(YType.boolean, 'prefer-aib-routes'), ['bool'])),
('encap_sharing_disable', (YLeaf(YType.boolean, 'encap-sharing-disable'), ['bool'])),
('frr_follow_bgp_pic', (YLeaf(YType.boolean, 'frr-follow-bgp-pic'), ['bool'])),
])
self.auto_hash_recover = None
self.prefer_aib_routes = None
self.encap_sharing_disable = None
self.frr_follow_bgp_pic = None
self.pbts_forward_class_fallbacks = Fib.PbtsForwardClassFallbacks()
self.pbts_forward_class_fallbacks.parent = self
self._children_name_map["pbts_forward_class_fallbacks"] = "pbts-forward-class-fallbacks"
self.platform = Fib.Platform()
self.platform.parent = self
self._children_name_map["platform"] = "platform"
self._segment_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib, ['auto_hash_recover', 'prefer_aib_routes', 'encap_sharing_disable', 'frr_follow_bgp_pic'], name, value)
class PbtsForwardClassFallbacks(Entity):
"""
PBTS class configuration
.. attribute:: pbts_forward_class_fallback
Set PBTS class for fallback
**type**\: list of :py:class:`PbtsForwardClassFallback <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback>`
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.PbtsForwardClassFallbacks, self).__init__()
self.yang_name = "pbts-forward-class-fallbacks"
self.yang_parent_name = "fib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("pbts-forward-class-fallback", ("pbts_forward_class_fallback", Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))])
self._leafs = OrderedDict()
self.pbts_forward_class_fallback = YList(self)
self._segment_path = lambda: "pbts-forward-class-fallbacks"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.PbtsForwardClassFallbacks, [], name, value)
class PbtsForwardClassFallback(Entity):
"""
Set PBTS class for fallback
.. attribute:: forward_class_number (key)
PBTS forward class number
**type**\: union of the below types:
**type**\: :py:class:`FibPbtsForwardClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.FibPbtsForwardClass>`
**type**\: int
**range:** 0..8
.. attribute:: fallback_type
Set PBTS fallback type
**type**\: :py:class:`FibPbtsFallback <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.FibPbtsFallback>`
**mandatory**\: True
.. attribute:: fallback_class_number_array
Set PBTS fallback class number array
**type**\: list of int
**range:** 0..7
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, self).__init__()
self.yang_name = "pbts-forward-class-fallback"
self.yang_parent_name = "pbts-forward-class-fallbacks"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['forward_class_number']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('forward_class_number', (YLeaf(YType.str, 'forward-class-number'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsForwardClass', ''),'int'])),
('fallback_type', (YLeaf(YType.enumeration, 'fallback-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsFallback', '')])),
('fallback_class_number_array', (YLeafList(YType.uint32, 'fallback-class-number-array'), ['int'])),
])
self.forward_class_number = None
self.fallback_type = None
self.fallback_class_number_array = []
self._segment_path = lambda: "pbts-forward-class-fallback" + "[forward-class-number='" + str(self.forward_class_number) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/pbts-forward-class-fallbacks/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, ['forward_class_number', 'fallback_type', 'fallback_class_number_array'], name, value)
class Platform(Entity):
"""
FIB platform parameters
.. attribute:: label_switched_multicast
Options for label\-switched\-multicast parameters
**type**\: :py:class:`LabelSwitchedMulticast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.Platform.LabelSwitchedMulticast>`
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.Platform, self).__init__()
self.yang_name = "platform"
self.yang_parent_name = "fib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("label-switched-multicast", ("label_switched_multicast", Fib.Platform.LabelSwitchedMulticast))])
self._leafs = OrderedDict()
self.label_switched_multicast = Fib.Platform.LabelSwitchedMulticast()
self.label_switched_multicast.parent = self
self._children_name_map["label_switched_multicast"] = "label-switched-multicast"
self._segment_path = lambda: "platform"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.Platform, [], name, value)
class LabelSwitchedMulticast(Entity):
"""
Options for label\-switched\-multicast parameters
.. attribute:: frr_holdtime
Set time to keep FRR slots programmed post FRR
**type**\: int
**range:** 3..180
**units**\: second
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.Platform.LabelSwitchedMulticast, self).__init__()
self.yang_name = "label-switched-multicast"
self.yang_parent_name = "platform"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('frr_holdtime', (YLeaf(YType.uint32, 'frr-holdtime'), ['int'])),
])
self.frr_holdtime = None
self._segment_path = lambda: "label-switched-multicast"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/platform/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.Platform.LabelSwitchedMulticast, ['frr_holdtime'], name, value)
def clone_ptr(self):
self._top_entity = Fib()
return self._top_entity
| [((906, 927), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(1)', '"""list"""'], {}), "(1, 'list')\n", (916, 927), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((939, 959), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(2)', '"""any"""'], {}), "(2, 'any')\n", (949, 959), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((972, 993), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(3)', '"""drop"""'], {}), "(3, 'drop')\n", (982, 993), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((1161, 1181), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(8)', '"""any"""'], {}), "(8, 'any')\n", (1171, 1181), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((2597, 2761), 'collections.OrderedDict', 'OrderedDict', (["[('pbts-forward-class-fallbacks', ('pbts_forward_class_fallbacks', Fib.\n PbtsForwardClassFallbacks)), ('platform', ('platform', Fib.Platform))]"], {}), "([('pbts-forward-class-fallbacks', (\n 'pbts_forward_class_fallbacks', Fib.PbtsForwardClassFallbacks)), (\n 'platform', ('platform', Fib.Platform))])\n", (2608, 2761), False, 'from collections import OrderedDict\n'), ((4804, 4943), 'collections.OrderedDict', 'OrderedDict', (["[('pbts-forward-class-fallback', ('pbts_forward_class_fallback', Fib.\n PbtsForwardClassFallbacks.PbtsForwardClassFallback))]"], {}), "([('pbts-forward-class-fallback', ('pbts_forward_class_fallback',\n Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))])\n", (4815, 4943), False, 'from collections import OrderedDict\n'), ((4966, 4979), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4977, 4979), False, 'from collections import OrderedDict\n'), ((5028, 5039), 'ydk.types.YList', 'YList', (['self'], {}), '(self)\n', (5033, 5039), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((8949, 9064), 'collections.OrderedDict', 'OrderedDict', (["[('label-switched-multicast', ('label_switched_multicast', Fib.Platform.\n LabelSwitchedMulticast))]"], {}), "([('label-switched-multicast', ('label_switched_multicast', Fib.\n Platform.LabelSwitchedMulticast))])\n", (8960, 9064), False, 'from collections import OrderedDict\n'), ((9086, 9099), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (9097, 9099), False, 'from collections import OrderedDict\n'), ((6945, 6960), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (6956, 6960), False, 'from collections import OrderedDict\n'), ((10516, 10531), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (10527, 10531), False, 'from collections import OrderedDict\n'), ((2823, 2864), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""auto-hash-recover"""'], {}), "(YType.boolean, 'auto-hash-recover')\n", (2828, 2864), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((2913, 2954), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""prefer-aib-routes"""'], {}), "(YType.boolean, 'prefer-aib-routes')\n", (2918, 2954), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((3007, 3052), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""encap-sharing-disable"""'], {}), "(YType.boolean, 'encap-sharing-disable')\n", (3012, 3052), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((3102, 3144), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""frr-follow-bgp-pic"""'], {}), "(YType.boolean, 'frr-follow-bgp-pic')\n", (3107, 3144), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7051, 7091), 'ydk.types.YLeaf', 'YLeaf', (['YType.str', '"""forward-class-number"""'], {}), "(YType.str, 'forward-class-number')\n", (7056, 7091), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7226, 7267), 'ydk.types.YLeaf', 'YLeaf', (['YType.enumeration', '"""fallback-type"""'], {}), "(YType.enumeration, 'fallback-type')\n", (7231, 7267), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7406, 7460), 'ydk.types.YLeafList', 'YLeafList', (['YType.uint32', '"""fallback-class-number-array"""'], {}), "(YType.uint32, 'fallback-class-number-array')\n", (7415, 7460), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((10614, 10649), 'ydk.types.YLeaf', 'YLeaf', (['YType.uint32', '"""frr-holdtime"""'], {}), "(YType.uint32, 'frr-holdtime')\n", (10619, 10649), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n')] |
dl-stuff/dl9 | action/combo.py | 1cbe98afc53a1de9d413797fb130946acc4b6ba4 | """Series of actions that form a combo chain"""
from __future__ import annotations
from typing import Optional, Sequence, TYPE_CHECKING
from action import Action
from core.utility import Array
from core.constants import PlayerForm, SimActKind, MomentType
from core.database import FromDB
if TYPE_CHECKING:
from entity.player import Player
class Combos:
def __init__(self, player: Player, form: PlayerForm, act_ids: Sequence[int], ex_act_ids: Optional[Sequence[int]] = None) -> None:
self.player = player
self.actions: Array[Action] = Array()
for idx, act_id in enumerate(act_ids):
self.actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1))
self.ex_actions = None
if ex_act_ids:
self.ex_actions: Array[Action] = Array()
for idx, act_id in enumerate(ex_act_ids):
if not act_id:
self.ex_actions.append(None)
continue
self.ex_actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1))
def next(self):
if self.player.current in self.actions:
try:
return self.actions[self.player.current.index + 1]
except IndexError:
pass
return self.actions[1]
def __repr__(self) -> str:
if self.ex_actions:
return "->".join(map(repr, self.actions)) + "\tEX[" + "->".join(map(repr, self.ex_actions)) + "]"
return "->".join(map(repr, self.actions))
class UniqueCombos(Combos, FromDB, table="CharaUniqueCombo"):
def __init__(self, id: int, player: Player) -> None:
FromDB.__init__(self, id)
act_ids = (self._data["_ActionId"] + i for i in range(self._data["_MaxComboNum"]))
ex_act_ids = None if not self._data["_ExActionId"] else (self._data["_ExActionId"] + i for i in range(self._data["_MaxComboNum"]))
Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids)
if self._data["_ShiftConditionType"] == 1:
self.player.events.listen(MomentType.HIT, self.enable)
def enable(self, *args, **kwargs):
pass
class DefaultCombos(Combos, FromDB, table="WeaponType"):
def __init__(self, id: int, player: Player) -> None:
FromDB.__init__(self, id)
act_ids = (self._data[f"_DefaultSkill{i+1:02}"] for i in range(5) if self._data[f"_DefaultSkill{i+1:02}"])
ex_act_ids = None if not self._data["_DefaultSkill05Ex"] else (0, 0, 0, 0, self._data["_DefaultSkill05Ex"])
Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids)
class DragonCombos(Combos):
def __init__(self, id: int, combo_max: int, player: Player) -> None:
act_ids = (id + i for i in range(combo_max))
Combos.__init__(self, player, PlayerForm.DRG, act_ids)
| [((562, 569), 'core.utility.Array', 'Array', ([], {}), '()\n', (567, 569), False, 'from core.utility import Array\n'), ((1689, 1714), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\n', (1704, 1714), False, 'from core.database import FromDB\n'), ((2326, 2351), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\n', (2341, 2351), False, 'from core.database import FromDB\n'), ((821, 828), 'core.utility.Array', 'Array', ([], {}), '()\n', (826, 828), False, 'from core.utility import Array\n'), ((649, 720), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\n', (655, 720), False, 'from action import Action\n'), ((1031, 1102), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\n', (1037, 1102), False, 'from action import Action\n')] |
achiang/flask-unchained | flask_unchained/bundles/session/config.py | 12788a6e618904a25ff2b571eb05ff1dc8f1840f | import os
from datetime import timedelta
from flask_unchained import BundleConfig
try:
from flask_unchained.bundles.sqlalchemy import db
except ImportError:
db = None
class _DefaultFlaskConfigForSessions(BundleConfig):
SESSION_COOKIE_NAME = 'session'
"""
The name of the session cookie.
Defaults to ``'session'``.
"""
SESSION_COOKIE_DOMAIN = None
"""
The domain for the session cookie. If this is not set, the cookie will be
valid for all subdomains of ``SERVER_NAME``.
Defaults to ``None``.
"""
SESSION_COOKIE_PATH = None
"""
The path for the session cookie. If this is not set the cookie will be valid
for all of ``APPLICATION_ROOT`` or if that is not set for '/'.
Defaults to ``None``.
"""
SESSION_COOKIE_HTTPONLY = True
"""
Controls if the cookie should be set with the ``httponly`` flag. Browsers will
not allow JavaScript access to cookies marked as ``httponly`` for security.
Defaults to ``True``.
"""
SESSION_COOKIE_SECURE = False
"""
Controls if the cookie should be set with the ``secure`` flag. Browsers will
only send cookies with requests over HTTPS if the cookie is marked ``secure``.
The application must be served over HTTPS for this to make sense.
Defaults to ``False``.
"""
PERMANENT_SESSION_LIFETIME = timedelta(days=31)
"""
The lifetime of a permanent session as ``datetime.timedelta`` object or an
integer representing seconds.
Defaults to 31 days.
"""
SESSION_COOKIE_SAMESITE = None
"""
Restrict how cookies are sent with requests from external sites. Limits the
scope of the cookie such that it will only be attached to requests if those
requests are "same-site". Can be set to ``'Lax'`` (recommended) or ``'Strict'``.
Defaults to ``None``.
"""
SESSION_REFRESH_EACH_REQUEST = True
"""
Controls the set-cookie behavior. If set to ``True`` a permanent session will
be refreshed each request and get their lifetime extended, if set to ``False``
it will only be modified if the session actually modifies. Non permanent sessions
are not affected by this and will always expire if the browser window closes.
Defaults to ``True``.
"""
class Config(_DefaultFlaskConfigForSessions):
"""
Default configuration options for the Session Bundle.
"""
SESSION_TYPE = 'null'
"""
Specifies which type of session interface to use. Built-in session types:
- ``'null'``: :class:`~flask_unchained.bundles.session.session_interfaces.NullSessionInterface` (default)
- ``'redis'``: :class:`~flask_unchained.bundles.session.session_interfaces.RedisSessionInterface`
- ``'memcached'``: :class:`~flask_unchained.bundles.session.session_interfaces.MemcachedSessionInterface`
- ``'filesystem'``: :class:`~flask_unchained.bundles.session.session_interfaces.FileSystemSessionInterface`
- ``'mongodb'``: :class:`~flask_unchained.bundles.session.session_interfaces.MongoDBSessionInterface`
- ``'sqlalchemy'``: :class:`~flask_unchained.bundles.session.session_interfaces.SqlAlchemySessionInterface`
Defaults to ``'null'``.
"""
SESSION_PERMANENT = True
"""
Whether use permanent session or not.
Defaults to ``True``.
"""
SESSION_USE_SIGNER = False
"""
Whether sign the session cookie sid or not. If set to ``True``, you have to
set ``SECRET_KEY``.
Defaults to ``False``.
"""
SESSION_KEY_PREFIX = 'session:'
"""
A prefix that is added before all session keys. This makes it possible to use
the same backend storage server for different apps.
Defaults to ``'session:'``.
"""
SESSION_REDIS = None
"""
A :class:`redis.Redis` instance.
By default, connect to ``127.0.0.1:6379``.
"""
SESSION_MEMCACHED = None
"""
A :class:`memcached.Client` instance.
By default, connect to ``127.0.0.1:11211``.
"""
SESSION_FILE_DIR = os.path.join(os.getcwd(), 'flask_sessions')
"""
The folder where session files are stored.
Defaults to using a folder named ``flask_sessions`` in your current working
directory.
"""
SESSION_FILE_THRESHOLD = 500
"""
The maximum number of items the session stores before it starts deleting some.
Defaults to 500.
"""
SESSION_FILE_MODE = 0o600
"""
The file mode wanted for the session files. Should be specified as an octal,
eg ``0o600``.
Defaults to ``0o600``.
"""
SESSION_MONGODB = None
"""
A :class:`pymongo.MongoClient` instance.
By default, connect to ``127.0.0.1:27017``.
"""
SESSION_MONGODB_DB = 'flask_session'
"""
The MongoDB database you want to use.
Defaults to ``'flask_session'``.
"""
SESSION_MONGODB_COLLECT = 'sessions'
"""
The MongoDB collection you want to use.
Defaults to ``'sessions'``.
"""
SESSION_SQLALCHEMY = db
"""
A :class:`~flask_unchained.bundles.sqlalchemy.SQLAlchemy` extension instance.
"""
SESSION_SQLALCHEMY_TABLE = 'flask_sessions'
"""
The name of the SQL table you want to use.
Defaults to ``flask_sessions``.
"""
SESSION_SQLALCHEMY_MODEL = None
"""
Set this if you need to customize the
:class:`~flask_unchained.bundles.sqlalchemy.BaseModel` subclass used for
storing sessions in the database.
"""
| [((1367, 1385), 'datetime.timedelta', 'timedelta', ([], {'days': '(31)'}), '(days=31)\n', (1376, 1385), False, 'from datetime import timedelta\n'), ((4029, 4040), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4038, 4040), False, 'import os\n')] |
tombh/sktime | sktime/forecasting/base/adapters/_statsmodels.py | 53df0b9ed9d1fd800539165c414cc5611bcc56b3 | #!/usr/bin/env python3 -u
# -*- coding: utf-8 -*-
__author__ = ["Markus Löning"]
__all__ = ["_StatsModelsAdapter"]
import numpy as np
import pandas as pd
from sktime.forecasting.base._base import DEFAULT_ALPHA
from sktime.forecasting.base._sktime import _OptionalForecastingHorizonMixin
from sktime.forecasting.base._sktime import _SktimeForecaster
class _StatsModelsAdapter(_OptionalForecastingHorizonMixin, _SktimeForecaster):
"""Base class for interfacing statsmodels forecasting algorithms"""
_fitted_param_names = ()
def __init__(self):
self._forecaster = None
self._fitted_forecaster = None
super(_StatsModelsAdapter, self).__init__()
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
# statsmodels does not support the pd.Int64Index as required,
# so we coerce them here to pd.RangeIndex
if isinstance(y, pd.Series) and type(y.index) == pd.Int64Index:
y, X = _coerce_int_to_range_index(y, X)
self._set_y_X(y, X)
self._set_fh(fh)
self._fit_forecaster(y, X)
self._is_fitted = True
return self
def _fit_forecaster(self, y_train, X_train=None):
"""Internal fit"""
raise NotImplementedError("abstract method")
def _predict(self, fh, X=None, return_pred_int=False, alpha=DEFAULT_ALPHA):
"""
Make forecasts.
Parameters
----------
fh : ForecastingHorizon
The forecasters horizon with the steps ahead to to predict.
Default is one-step ahead forecast,
i.e. np.array([1])
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored.
return_pred_int : bool, optional (default=False)
alpha : int or list, optional (default=0.95)
Returns
-------
y_pred : pd.Series
Returns series of predicted values.
"""
if return_pred_int:
raise NotImplementedError()
# statsmodels requires zero-based indexing starting at the
# beginning of the training series when passing integers
start, end = fh.to_absolute_int(self._y.index[0], self.cutoff)[[0, -1]]
y_pred = self._fitted_forecaster.predict(start, end)
# statsmodels forecasts all periods from start to end of forecasting
# horizon, but only return given time points in forecasting horizon
return y_pred.loc[fh.to_absolute(self.cutoff).to_pandas()]
def get_fitted_params(self):
"""Get fitted parameters
Returns
-------
fitted_params : dict
"""
self.check_is_fitted()
return {
name: self._fitted_forecaster.params.get(name)
for name in self._get_fitted_param_names()
}
def _get_fitted_param_names(self):
"""Get names of fitted parameters"""
return self._fitted_param_names
def _coerce_int_to_range_index(y, X=None):
new_index = pd.RangeIndex(y.index[0], y.index[-1] + 1)
try:
np.testing.assert_array_equal(y.index, new_index)
except AssertionError:
raise ValueError(
"Coercion of pd.Int64Index to pd.RangeIndex "
"failed. Please provide `y_train` with a "
"pd.RangeIndex."
)
y.index = new_index
if X is not None:
X.index = new_index
return y, X
| [((3440, 3482), 'pandas.RangeIndex', 'pd.RangeIndex', (['y.index[0]', '(y.index[-1] + 1)'], {}), '(y.index[0], y.index[-1] + 1)\n', (3453, 3482), True, 'import pandas as pd\n'), ((3500, 3549), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['y.index', 'new_index'], {}), '(y.index, new_index)\n', (3529, 3549), True, 'import numpy as np\n')] |
Dieptranivsr/Ros_Diep | melodic/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkProperties.py | d790e75e6f5da916701b11a2fdf3e03b6a47086b | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from gazebo_msgs/GetLinkPropertiesRequest.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetLinkPropertiesRequest(genpy.Message):
_md5sum = "7d82d60381f1b66a30f2157f60884345"
_type = "gazebo_msgs/GetLinkPropertiesRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """string link_name # name of link
# link names are prefixed by model name, e.g. pr2::base_link
"""
__slots__ = ['link_name']
_slot_types = ['string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
link_name
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetLinkPropertiesRequest, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.link_name is None:
self.link_name = ''
else:
self.link_name = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.link_name = str[start:end].decode('utf-8', 'rosmsg')
else:
self.link_name = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.link_name = str[start:end].decode('utf-8', 'rosmsg')
else:
self.link_name = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from gazebo_msgs/GetLinkPropertiesResponse.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import geometry_msgs.msg
class GetLinkPropertiesResponse(genpy.Message):
_md5sum = "a8619f92d17cfcc3958c0fd13299443d"
_type = "gazebo_msgs/GetLinkPropertiesResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """geometry_msgs/Pose com # center of mass location in link frame
# and orientation of the moment of inertias
# relative to the link frame
bool gravity_mode # set gravity mode on/off
float64 mass # linear mass of link
float64 ixx # moment of inertia
float64 ixy # moment of inertia
float64 ixz # moment of inertia
float64 iyy # moment of inertia
float64 iyz # moment of inertia
float64 izz # moment of inertia
bool success # return true if get info is successful
string status_message # comments if available
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of position and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
"""
__slots__ = ['com','gravity_mode','mass','ixx','ixy','ixz','iyy','iyz','izz','success','status_message']
_slot_types = ['geometry_msgs/Pose','bool','float64','float64','float64','float64','float64','float64','float64','bool','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
com,gravity_mode,mass,ixx,ixy,ixz,iyy,iyz,izz,success,status_message
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetLinkPropertiesResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.com is None:
self.com = geometry_msgs.msg.Pose()
if self.gravity_mode is None:
self.gravity_mode = False
if self.mass is None:
self.mass = 0.
if self.ixx is None:
self.ixx = 0.
if self.ixy is None:
self.ixy = 0.
if self.ixz is None:
self.ixz = 0.
if self.iyy is None:
self.iyy = 0.
if self.iyz is None:
self.iyz = 0.
if self.izz is None:
self.izz = 0.
if self.success is None:
self.success = False
if self.status_message is None:
self.status_message = ''
else:
self.com = geometry_msgs.msg.Pose()
self.gravity_mode = False
self.mass = 0.
self.ixx = 0.
self.ixy = 0.
self.ixz = 0.
self.iyy = 0.
self.iyz = 0.
self.izz = 0.
self.success = False
self.status_message = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_7dB7dB().pack(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success))
_x = self.status_message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.com is None:
self.com = geometry_msgs.msg.Pose()
end = 0
_x = self
start = end
end += 114
(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success,) = _get_struct_7dB7dB().unpack(str[start:end])
self.gravity_mode = bool(self.gravity_mode)
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status_message = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status_message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_7dB7dB().pack(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success))
_x = self.status_message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.com is None:
self.com = geometry_msgs.msg.Pose()
end = 0
_x = self
start = end
end += 114
(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success,) = _get_struct_7dB7dB().unpack(str[start:end])
self.gravity_mode = bool(self.gravity_mode)
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status_message = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status_message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_7dB7dB = None
def _get_struct_7dB7dB():
global _struct_7dB7dB
if _struct_7dB7dB is None:
_struct_7dB7dB = struct.Struct("<7dB7dB")
return _struct_7dB7dB
class GetLinkProperties(object):
_type = 'gazebo_msgs/GetLinkProperties'
_md5sum = '0e06a70386d0ee3fb880c02f23fcd821'
_request_class = GetLinkPropertiesRequest
_response_class = GetLinkPropertiesResponse
| [((12651, 12675), 'struct.Struct', 'struct.Struct', (['"""<7dB7dB"""'], {}), "('<7dB7dB')\n", (12664, 12675), False, 'import struct\n'), ((2370, 2399), 'codecs.lookup_error', 'codecs.lookup_error', (['"""rosmsg"""'], {}), "('rosmsg')\n", (2389, 2399), False, 'import codecs\n'), ((3778, 3807), 'codecs.lookup_error', 'codecs.lookup_error', (['"""rosmsg"""'], {}), "('rosmsg')\n", (3797, 3807), False, 'import codecs\n'), ((9257, 9286), 'codecs.lookup_error', 'codecs.lookup_error', (['"""rosmsg"""'], {}), "('rosmsg')\n", (9276, 9286), False, 'import codecs\n'), ((11468, 11497), 'codecs.lookup_error', 'codecs.lookup_error', (['"""rosmsg"""'], {}), "('rosmsg')\n", (11487, 11497), False, 'import codecs\n'), ((2763, 2792), 'genpy.DeserializationError', 'genpy.DeserializationError', (['e'], {}), '(e)\n', (2789, 2792), False, 'import genpy\n'), ((4171, 4200), 'genpy.DeserializationError', 'genpy.DeserializationError', (['e'], {}), '(e)\n', (4197, 4200), False, 'import genpy\n'), ((10157, 10186), 'genpy.DeserializationError', 'genpy.DeserializationError', (['e'], {}), '(e)\n', (10183, 10186), False, 'import genpy\n'), ((12368, 12397), 'genpy.DeserializationError', 'genpy.DeserializationError', (['e'], {}), '(e)\n', (12394, 12397), False, 'import genpy\n'), ((1862, 1893), 'struct.Struct', 'struct.Struct', (["('<I%ss' % length)"], {}), "('<I%ss' % length)\n", (1875, 1893), False, 'import struct\n'), ((3191, 3222), 'struct.Struct', 'struct.Struct', (["('<I%ss' % length)"], {}), "('<I%ss' % length)\n", (3204, 3222), False, 'import struct\n'), ((8749, 8780), 'struct.Struct', 'struct.Struct', (["('<I%ss' % length)"], {}), "('<I%ss' % length)\n", (8762, 8780), False, 'import struct\n'), ((10881, 10912), 'struct.Struct', 'struct.Struct', (["('<I%ss' % length)"], {}), "('<I%ss' % length)\n", (10894, 10912), False, 'import struct\n')] |
st--/jupytext | jupytext/kernels.py | f8e8352859cc22e17b11154d0770fd946c4a430a | """Find kernel specifications for a given language"""
import os
import sys
from .languages import same_language
from .reraise import reraise
try:
# I prefer not to take a dependency on jupyter_client
from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec
except ImportError as err:
find_kernel_specs = reraise(err)
get_kernel_spec = reraise(err)
def set_kernelspec_from_language(notebook):
"""Set the kernel specification based on the 'main_language' metadata"""
language = notebook.metadata.get("jupytext", {}).get("main_language")
if "kernelspec" not in notebook.metadata and language:
try:
kernelspec = kernelspec_from_language(language)
except ValueError:
return
notebook.metadata["kernelspec"] = kernelspec
notebook.metadata.get("jupytext", {}).pop("main_language")
def kernelspec_from_language(language):
"""Return the python kernel that matches the current env, or the first kernel that matches the given language"""
if language == "python":
# Return the kernel that matches the current Python executable
for name in find_kernel_specs():
kernel_specs = get_kernel_spec(name)
cmd = kernel_specs.argv[0]
if (
kernel_specs.language == "python"
and os.path.isfile(cmd)
and os.path.samefile(cmd, sys.executable)
):
return {
"name": name,
"language": language,
"display_name": kernel_specs.display_name,
}
raise ValueError(
"No kernel found that matches the current python executable {}\n".format(
sys.executable
)
+ "Install one with 'python -m ipykernel install --name kernel_name [--user]'"
)
for name in find_kernel_specs():
kernel_specs = get_kernel_spec(name)
if same_language(kernel_specs.language, language):
return {
"name": name,
"language": language,
"display_name": kernel_specs.display_name,
}
raise ValueError("No kernel found for the language {}".format(language))
| [((1903, 1922), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\n', (1920, 1922), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1157, 1176), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\n', (1174, 1176), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1947, 1968), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\n', (1962, 1968), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1205, 1226), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\n', (1220, 1226), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1353, 1372), 'os.path.isfile', 'os.path.isfile', (['cmd'], {}), '(cmd)\n', (1367, 1372), False, 'import os\n'), ((1393, 1430), 'os.path.samefile', 'os.path.samefile', (['cmd', 'sys.executable'], {}), '(cmd, sys.executable)\n', (1409, 1430), False, 'import os\n')] |
dhruv9vats/scipy | scipy/sparse/_matrix_io.py | 48e1dd7e604df3ae57d104b407c5b7a2a6a3247d | import numpy as np
import scipy.sparse
__all__ = ['save_npz', 'load_npz']
# Make loading safe vs. malicious input
PICKLE_KWARGS = dict(allow_pickle=False)
def save_npz(file, matrix, compressed=True):
""" Save a sparse matrix to a file using ``.npz`` format.
Parameters
----------
file : str or file-like object
Either the file name (string) or an open file (file-like object)
where the data will be saved. If file is a string, the ``.npz``
extension will be appended to the file name if it is not already
there.
matrix: spmatrix (format: ``csc``, ``csr``, ``bsr``, ``dia`` or coo``)
The sparse matrix to save.
compressed : bool, optional
Allow compressing the file. Default: True
See Also
--------
scipy.sparse.load_npz: Load a sparse matrix from a file using ``.npz`` format.
numpy.savez: Save several arrays into a ``.npz`` archive.
numpy.savez_compressed : Save several arrays into a compressed ``.npz`` archive.
Examples
--------
Store sparse matrix to disk, and load it again:
>>> import scipy.sparse
>>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]]))
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
>>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix)
>>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz')
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
"""
arrays_dict = {}
if matrix.format in ('csc', 'csr', 'bsr'):
arrays_dict.update(indices=matrix.indices, indptr=matrix.indptr)
elif matrix.format == 'dia':
arrays_dict.update(offsets=matrix.offsets)
elif matrix.format == 'coo':
arrays_dict.update(row=matrix.row, col=matrix.col)
else:
raise NotImplementedError('Save is not implemented for sparse matrix of format {}.'.format(matrix.format))
arrays_dict.update(
format=matrix.format.encode('ascii'),
shape=matrix.shape,
data=matrix.data
)
if compressed:
np.savez_compressed(file, **arrays_dict)
else:
np.savez(file, **arrays_dict)
def load_npz(file):
""" Load a sparse matrix from a file using ``.npz`` format.
Parameters
----------
file : str or file-like object
Either the file name (string) or an open file (file-like object)
where the data will be loaded.
Returns
-------
result : csc_matrix, csr_matrix, bsr_matrix, dia_matrix or coo_matrix
A sparse matrix containing the loaded data.
Raises
------
OSError
If the input file does not exist or cannot be read.
See Also
--------
scipy.sparse.save_npz: Save a sparse matrix to a file using ``.npz`` format.
numpy.load: Load several arrays from a ``.npz`` archive.
Examples
--------
Store sparse matrix to disk, and load it again:
>>> import scipy.sparse
>>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]]))
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
>>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix)
>>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz')
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
"""
with np.load(file, **PICKLE_KWARGS) as loaded:
try:
matrix_format = loaded['format']
except KeyError as e:
raise ValueError('The file {} does not contain a sparse matrix.'.format(file)) from e
matrix_format = matrix_format.item()
if not isinstance(matrix_format, str):
# Play safe with Python 2 vs 3 backward compatibility;
# files saved with SciPy < 1.0.0 may contain unicode or bytes.
matrix_format = matrix_format.decode('ascii')
try:
cls = getattr(scipy.sparse, '{}_matrix'.format(matrix_format))
except AttributeError as e:
raise ValueError('Unknown matrix format "{}"'.format(matrix_format)) from e
if matrix_format in ('csc', 'csr', 'bsr'):
return cls((loaded['data'], loaded['indices'], loaded['indptr']), shape=loaded['shape'])
elif matrix_format == 'dia':
return cls((loaded['data'], loaded['offsets']), shape=loaded['shape'])
elif matrix_format == 'coo':
return cls((loaded['data'], (loaded['row'], loaded['col'])), shape=loaded['shape'])
else:
raise NotImplementedError('Load is not implemented for '
'sparse matrix of format {}.'.format(matrix_format))
| [((2426, 2466), 'numpy.savez_compressed', 'np.savez_compressed', (['file'], {}), '(file, **arrays_dict)\n', (2445, 2466), True, 'import numpy as np\n'), ((2485, 2514), 'numpy.savez', 'np.savez', (['file'], {}), '(file, **arrays_dict)\n', (2493, 2514), True, 'import numpy as np\n'), ((4018, 4048), 'numpy.load', 'np.load', (['file'], {}), '(file, **PICKLE_KWARGS)\n', (4025, 4048), True, 'import numpy as np\n')] |
ed741/PathBench | src/simulator/services/resources/atlas.py | 50fe138eb1f824f49fe1a862705e435a1c3ec3ae | from typing import Dict, List
from simulator.services.resources.directory import Directory
from simulator.services.services import Services
class Atlas(Directory):
def __init__(self, services: Services, name: str, parent: str, create: bool = False) -> None:
super().__init__(services, name, parent, create)
if create:
metadata: Dict[str, any] = {
"next_index": 0,
}
self._save_metadata(metadata)
def append(self, obj: any) -> None:
self.save(str(self._get_next_index()), obj)
self._increment_index()
def load_all(self, max_els: int = float("inf")) -> List[any]:
ret: List[any] = []
idx: int = 0
while idx < max_els:
obj: any = self.load(str(idx))
if obj:
ret.append(obj)
idx += 1
else:
break
return ret
def _get_next_index(self) -> int:
metadata: Dict[str, any] = self._get_metadata()
return metadata["next_index"]
def _increment_index(self) -> None:
metadata: Dict[str, any] = self._get_metadata()
metadata["next_index"] += 1
self._save_metadata(metadata)
def _save_metadata(self, metadata: Dict[str, any]) -> None:
super().save("metadata", metadata)
def _get_metadata(self) -> Dict[str, any]:
return super().load("metadata")
| [] |
ulixius9/OpenMetadata | ingestion/src/metadata/great_expectations/builders/table/row_count_to_equal.py | f121698d968717f0932f685ef2a512c2a4d92438 | # Copyright 2022 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
TestCase builder
"""
from metadata.generated.schema.api.tests.createTableTest import CreateTableTestRequest
from metadata.generated.schema.tests.table import tableRowCountToEqual
from metadata.generated.schema.tests.tableTest import TableTestType
from metadata.great_expectations.builders.table.base_table_test_builders import (
BaseTableTestBuilder,
)
class TableRowCountToEqualBuilder(BaseTableTestBuilder):
"""Builder for `expect_table_row_count_to_equal` GE expectation"""
def _build_test(self) -> CreateTableTestRequest:
"""Specific test builder for the test"""
return self.build_test_request(
config=tableRowCountToEqual.TableRowCountToEqual(
value=self.result["expectation_config"]["kwargs"]["value"],
),
test_type=TableTestType.tableRowCountToEqual,
)
| [((1225, 1331), 'metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual', 'tableRowCountToEqual.TableRowCountToEqual', ([], {'value': "self.result['expectation_config']['kwargs']['value']"}), "(value=self.result[\n 'expectation_config']['kwargs']['value'])\n", (1266, 1331), False, 'from metadata.generated.schema.tests.table import tableRowCountToEqual\n')] |
gustavovaliati/obj-det-experiments | tensorflow/bbox/jrieke-tf-parse-v2/jrieke_tf_dataset.py | e81774a18b34c22d971ad15d7ac6eb8663ac6f22 | '''
This code is based on https://github.com/jrieke/shape-detection/
'''
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
import tensorflow as tf
import datetime
class JriekeBboxDataset:
def generate(self):
print('Generating...')
self.WIDTH = 8
self.HEIGHT = 8
num_imgs = 50000
min_object_size = 1
max_object_size = 4
num_objects = 1
self.bboxes = np.zeros((num_imgs, num_objects, 4))
self.imgs = np.zeros((num_imgs, self.WIDTH, self.HEIGHT)) # set background to 0
for i_img in range(num_imgs):
for i_object in range(num_objects):
w, h = np.random.randint(min_object_size, max_object_size, size=2)
x = np.random.randint(0, self.WIDTH - w)
y = np.random.randint(0, self.HEIGHT - h)
self.imgs[i_img, y:y+h, x:x+w] = 1. # set rectangle to 1
self.bboxes[i_img, i_object] = [x, y, w, h]
print("Shapes: imgs ", self.imgs.shape, " bboxes ", self.bboxes.shape)
#why this?
# X = (self.imgs.reshape(num_imgs, -1) - np.mean(self.imgs)) / np.std(self.imgs)
X = self.imgs
y = self.bboxes.reshape(num_imgs, -1) / self.WIDTH
# Split training and test.
i = int(0.8 * num_imgs)
train_X = X[:i] #80% for training
test_X = X[i:]
train_y = y[:i]
test_y = y[i:]
self.test_imgs = self.imgs[i:]
self.test_bboxes = self.bboxes[i:]
return train_X, train_y, test_X, test_y
def check_dataset_image_compability(self, test_X_sample, test_imgs_sample):
fig = plt.figure(figsize=(12, 3))
fig.suptitle('check if the generated imgs match to the test_X slice image')
fig.subplots_adjust(top=0.85)
plt.subplot(1, 2, 1)
plt.gca().set_title('Returned by the dataset class: used for training')
plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.subplot(1, 2, 2)
plt.gca().set_title('Global image holder: used for plotting.')
plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.show()
print('compare:',TMP,test_imgs_sample)
def IOU(self,bbox1, bbox2):
'''Calculate overlap between two bounding boxes [x, y, w, h] as the area of intersection over the area of unity'''
x1, y1, w1, h1 = bbox1[0], bbox1[1], bbox1[2], bbox1[3]
x2, y2, w2, h2 = bbox2[0], bbox2[1], bbox2[2], bbox2[3]
w_I = min(x1 + w1, x2 + w2) - max(x1, x2)
h_I = min(y1 + h1, y2 + h2) - max(y1, y2)
if w_I <= 0 or h_I <= 0: # no overlap
return 0.
I = w_I * h_I
U = w1 * h1 + w2 * h2 - I
return I / U
def convertDefaultAnnotToCoord(self, annot):
'''
annot -> [x, y, w, h]
'''
w = annot[2] * self.WIDTH
h = annot[3] * self.HEIGHT
x = annot[0] * self.HEIGHT
y = annot[1] * self.HEIGHT
return [x,y,w,h]
def convertYoloAnnotToCoord(self, yolo_annot):
'''
yolo_annot -> [x, y, w, h]
'''
w = yolo_annot[2] * self.WIDTH
h = yolo_annot[3] * self.HEIGHT
x = (yolo_annot[0] * self.WIDTH) - (w/2)
y = (yolo_annot[1] * self.HEIGHT) - (h/2)
return [x,y,w,h]
def show_generated(self, i=0):
fig = plt.figure()
fig.subplots_adjust(top=0.85)
fig.suptitle('Generated image sample + GT')
plt.imshow(self.imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
for bbox in self.bboxes[i]:
plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none'))
plt.gca().legend(['GT'])
plt.show()
def plot_rectangle(self, img, bbox):
fig = plt.figure()
fig.suptitle('Plotting rectangle.')
fig.subplots_adjust(top=0.85)
plt.subplot(1, 1, 1)
plt.imshow(img, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none'))
plt.show()
def check_dataset_image_compability(self, test_X_sample, test_imgs_sample):
fig = plt.figure(figsize=(12, 3))
fig.suptitle('check if the generated imgs match to the test_X slice image')
fig.subplots_adjust(top=0.85)
plt.subplot(1, 2, 1)
plt.gca().set_title('Returned by the dataset class: used for training')
plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.subplot(1, 2, 2)
plt.gca().set_title('Global image holder: used for plotting.')
plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.show()
print('compare:',test_X_sample,test_imgs_sample)
def show_predicted(self, pred_bboxes):
# Show a few images and predicted bounding boxes from the test dataset.
fig = plt.figure(figsize=(12, 3))
fig.subplots_adjust(top=0.85)
fig.suptitle('Prediction demonstration. Random samples.')
legend_plotted = False
for i_subplot in range(1, 11):
plt.subplot(1, 10, i_subplot)
i = np.random.randint(len(pred_bboxes))
plt.imshow(self.test_imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
for pred_bbox, exp_bbox in zip(pred_bboxes[i], self.test_bboxes[i]):
# print('before convertion: pred',pred_bbox, 'gt',exp_bbox)
pred_bbox = self.convertDefaultAnnotToCoord(pred_bbox)
# exp_bbox = self.convertDefaultAnnotToCoord(exp_bbox)
print('after convertion: pred',pred_bbox, 'gt',exp_bbox)
plt.gca().add_patch(matplotlib.patches.Rectangle((pred_bbox[0], pred_bbox[1]), pred_bbox[2], pred_bbox[3], ec='r', fc='none'))
#gt
plt.gca().add_patch(matplotlib.patches.Rectangle((exp_bbox[0], exp_bbox[1]), exp_bbox[2], exp_bbox[3], ec='b', fc='none'))
plt.annotate('IOU: {:.2f}'.format(self.IOU(pred_bbox, exp_bbox)), (pred_bbox[0], pred_bbox[1]+pred_bbox[3]+0.2), color='r')
if not legend_plotted:
legend_plotted = True
plt.gca().legend(['Pred','GT'],loc='upper center', bbox_to_anchor=(0.5, -0.5), fancybox=True)
plt.show()
# plt.savefig('plots/bw-single-rectangle_prediction_{0:%Y-%m-%d%H:%M:%S}.png'.format(datetime.datetime.now()), dpi=300)
| [((440, 476), 'numpy.zeros', 'np.zeros', (['(num_imgs, num_objects, 4)'], {}), '((num_imgs, num_objects, 4))\n', (448, 476), True, 'import numpy as np\n'), ((497, 542), 'numpy.zeros', 'np.zeros', (['(num_imgs, self.WIDTH, self.HEIGHT)'], {}), '((num_imgs, self.WIDTH, self.HEIGHT))\n', (505, 542), True, 'import numpy as np\n'), ((1662, 1689), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (1672, 1689), True, 'import matplotlib.pyplot as plt\n'), ((1821, 1841), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (1832, 1841), True, 'import matplotlib.pyplot as plt\n'), ((1930, 2052), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_X_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (1940, 2052), True, 'import matplotlib.pyplot as plt\n'), ((2057, 2077), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (2068, 2077), True, 'import matplotlib.pyplot as plt\n'), ((2157, 2282), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_imgs_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (2167, 2282), True, 'import matplotlib.pyplot as plt\n'), ((2286, 2296), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2294, 2296), True, 'import matplotlib.pyplot as plt\n'), ((3515, 3527), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3525, 3527), True, 'import matplotlib.pyplot as plt\n'), ((3626, 3746), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.imgs[i]'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(self.imgs[i], cmap='Greys', interpolation='none', origin='lower',\n extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (3636, 3746), True, 'import matplotlib.pyplot as plt\n'), ((3943, 3953), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3951, 3953), True, 'import matplotlib.pyplot as plt\n'), ((4011, 4023), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4021, 4023), True, 'import matplotlib.pyplot as plt\n'), ((4115, 4135), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (4126, 4135), True, 'import matplotlib.pyplot as plt\n'), ((4144, 4256), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(img, cmap='Greys', interpolation='none', origin='lower', extent=\n [0, self.WIDTH, 0, self.HEIGHT])\n", (4154, 4256), True, 'import matplotlib.pyplot as plt\n'), ((4375, 4385), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4383, 4385), True, 'import matplotlib.pyplot as plt\n'), ((4481, 4508), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (4491, 4508), True, 'import matplotlib.pyplot as plt\n'), ((4640, 4660), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (4651, 4660), True, 'import matplotlib.pyplot as plt\n'), ((4749, 4871), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_X_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (4759, 4871), True, 'import matplotlib.pyplot as plt\n'), ((4876, 4896), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4887, 4896), True, 'import matplotlib.pyplot as plt\n'), ((4976, 5101), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_imgs_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (4986, 5101), True, 'import matplotlib.pyplot as plt\n'), ((5105, 5115), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5113, 5115), True, 'import matplotlib.pyplot as plt\n'), ((5312, 5339), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (5322, 5339), True, 'import matplotlib.pyplot as plt\n'), ((6760, 6770), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6768, 6770), True, 'import matplotlib.pyplot as plt\n'), ((4280, 4369), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\n fc='none')\n", (4308, 4369), False, 'import matplotlib\n'), ((5527, 5556), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(10)', 'i_subplot'], {}), '(1, 10, i_subplot)\n', (5538, 5556), True, 'import matplotlib.pyplot as plt\n'), ((5621, 5747), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.test_imgs[i]'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(self.test_imgs[i], cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (5631, 5747), True, 'import matplotlib.pyplot as plt\n'), ((676, 735), 'numpy.random.randint', 'np.random.randint', (['min_object_size', 'max_object_size'], {'size': '(2)'}), '(min_object_size, max_object_size, size=2)\n', (693, 735), True, 'import numpy as np\n'), ((756, 792), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.WIDTH - w)'], {}), '(0, self.WIDTH - w)\n', (773, 792), True, 'import numpy as np\n'), ((813, 850), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.HEIGHT - h)'], {}), '(0, self.HEIGHT - h)\n', (830, 850), True, 'import numpy as np\n'), ((1850, 1859), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1857, 1859), True, 'import matplotlib.pyplot as plt\n'), ((2086, 2095), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2093, 2095), True, 'import matplotlib.pyplot as plt\n'), ((3811, 3900), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\n fc='none')\n", (3839, 3900), False, 'import matplotlib\n'), ((4260, 4269), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4267, 4269), True, 'import matplotlib.pyplot as plt\n'), ((4669, 4678), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4676, 4678), True, 'import matplotlib.pyplot as plt\n'), ((4905, 4914), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4912, 4914), True, 'import matplotlib.pyplot as plt\n'), ((3791, 3800), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3798, 3800), True, 'import matplotlib.pyplot as plt\n'), ((3910, 3919), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3917, 3919), True, 'import matplotlib.pyplot as plt\n'), ((6151, 6260), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(pred_bbox[0], pred_bbox[1])', 'pred_bbox[2]', 'pred_bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((pred_bbox[0], pred_bbox[1]), pred_bbox[2],\n pred_bbox[3], ec='r', fc='none')\n", (6179, 6260), False, 'import matplotlib\n'), ((6314, 6419), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(exp_bbox[0], exp_bbox[1])', 'exp_bbox[2]', 'exp_bbox[3]'], {'ec': '"""b"""', 'fc': '"""none"""'}), "((exp_bbox[0], exp_bbox[1]), exp_bbox[2],\n exp_bbox[3], ec='b', fc='none')\n", (6342, 6419), False, 'import matplotlib\n'), ((6131, 6140), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6138, 6140), True, 'import matplotlib.pyplot as plt\n'), ((6294, 6303), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6301, 6303), True, 'import matplotlib.pyplot as plt\n'), ((6658, 6667), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6665, 6667), True, 'import matplotlib.pyplot as plt\n')] |
skeevey/PyBitmessage | src/knownnodes.py | 196d688b138393d1d540df3322844dfe7e7c02ba | import pickle
import threading
from bmconfigparser import BMConfigParser
import state
knownNodesLock = threading.Lock()
knownNodes = {}
knownNodesTrimAmount = 2000
def saveKnownNodes(dirName = None):
if dirName is None:
dirName = state.appdata
with knownNodesLock:
with open(dirName + 'knownnodes.dat', 'wb') as output:
pickle.dump(knownNodes, output)
def increaseRating(peer):
increaseAmount = 0.1
maxRating = 1
with knownNodesLock:
for stream in knownNodes.keys():
try:
knownNodes[stream][peer]["rating"] = min(knownNodes[stream][peer]["rating"] + increaseAmount, maxRating)
except KeyError:
pass
def decreaseRating(peer):
decreaseAmount = 0.1
minRating = -1
with knownNodesLock:
for stream in knownNodes.keys():
try:
knownNodes[stream][peer]["rating"] = max(knownNodes[stream][peer]["rating"] - decreaseAmount, minRating)
except KeyError:
pass
def trimKnownNodes(recAddrStream = 1):
if len(knownNodes[recAddrStream]) < BMConfigParser().get("knownnodes", "maxnodes"):
return
with knownNodesLock:
oldestList = sorted(knownNodes[recAddrStream], key=lambda x: x['lastseen'])[:knownNodesTrimAmount]
for oldest in oldestList:
del knownNodes[recAddrStream][oldest]
| [((105, 121), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (119, 121), False, 'import threading\n'), ((360, 391), 'pickle.dump', 'pickle.dump', (['knownNodes', 'output'], {}), '(knownNodes, output)\n', (371, 391), False, 'import pickle\n'), ((1121, 1137), 'bmconfigparser.BMConfigParser', 'BMConfigParser', ([], {}), '()\n', (1135, 1137), False, 'from bmconfigparser import BMConfigParser\n')] |
whamcloud/iml-agent | chroma_agent/action_plugins/manage_node.py | fecb2468fd6edc822f3ab37ced444d98d8725730 | # Copyright (c) 2018 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
import os
from chroma_agent.lib.shell import AgentShell
from chroma_agent.log import console_log
from chroma_agent.device_plugins.action_runner import CallbackAfterResponse
from chroma_agent.lib.pacemaker import PacemakerConfig
def ssi(runlevel):
# force a manual failover by failing a node
AgentShell.try_run(["sync"])
AgentShell.try_run(["sync"])
AgentShell.try_run(["init", runlevel])
def fail_node():
ssi("0")
def stonith(node):
p_cfg = PacemakerConfig()
# TODO: signal that manager that a STONITH has been done so that it
# doesn't treat it as an AWOL
console_log.info("Rebooting %s per a STONITH request" % node)
p_cfg.get_node(node).fence_reboot()
def shutdown_server(halt=True, at_time="now"):
def _shutdown():
console_log.info("Initiating server shutdown per manager request")
# This will initiate a "nice" shutdown with a wall from root, etc.
AgentShell.try_run(["shutdown", "-H" if halt else "-h", at_time])
console_log.info("Terminating")
os._exit(0)
raise CallbackAfterResponse(None, _shutdown)
def reboot_server(at_time="now"):
def _reboot():
console_log.info("Initiating server reboot per manager request")
# reboot(8) just calls shutdown anyhow.
AgentShell.try_run(["shutdown", "-r", at_time])
console_log.info("Terminating")
os._exit(0)
raise CallbackAfterResponse(None, _reboot)
ACTIONS = [reboot_server, shutdown_server, fail_node, stonith]
| [((453, 481), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['sync']"], {}), "(['sync'])\n", (471, 481), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((486, 514), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['sync']"], {}), "(['sync'])\n", (504, 514), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((519, 557), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['init', runlevel]"], {}), "(['init', runlevel])\n", (537, 557), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((623, 640), 'chroma_agent.lib.pacemaker.PacemakerConfig', 'PacemakerConfig', ([], {}), '()\n', (638, 640), False, 'from chroma_agent.lib.pacemaker import PacemakerConfig\n'), ((758, 819), 'chroma_agent.log.console_log.info', 'console_log.info', (["('Rebooting %s per a STONITH request' % node)"], {}), "('Rebooting %s per a STONITH request' % node)\n", (774, 819), False, 'from chroma_agent.log import console_log\n'), ((1227, 1265), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_shutdown'], {}), '(None, _shutdown)\n', (1248, 1265), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\n'), ((1570, 1606), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_reboot'], {}), '(None, _reboot)\n', (1591, 1606), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\n'), ((939, 1005), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Initiating server shutdown per manager request"""'], {}), "('Initiating server shutdown per manager request')\n", (955, 1005), False, 'from chroma_agent.log import console_log\n'), ((1089, 1154), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['shutdown', '-H' if halt else '-h', at_time]"], {}), "(['shutdown', '-H' if halt else '-h', at_time])\n", (1107, 1154), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((1164, 1195), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Terminating"""'], {}), "('Terminating')\n", (1180, 1195), False, 'from chroma_agent.log import console_log\n'), ((1204, 1215), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1212, 1215), False, 'import os\n'), ((1329, 1393), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Initiating server reboot per manager request"""'], {}), "('Initiating server reboot per manager request')\n", (1345, 1393), False, 'from chroma_agent.log import console_log\n'), ((1450, 1497), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['shutdown', '-r', at_time]"], {}), "(['shutdown', '-r', at_time])\n", (1468, 1497), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((1507, 1538), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Terminating"""'], {}), "('Terminating')\n", (1523, 1538), False, 'from chroma_agent.log import console_log\n'), ((1547, 1558), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1555, 1558), False, 'import os\n')] |
ian-r-rose/census-data-downloader | census_data_downloader/core/tables.py | f8ac9d773e6d3f52be87bf916a2e32249391f966 | #! /usr/bin/env python
# -*- coding: utf-8 -*
"""
A base class that governs how to download and process tables from a Census API table.
"""
import os
import logging
import pathlib
from . import geotypes
from . import decorators
logger = logging.getLogger(__name__)
class BaseTableConfig(object):
"""
Configures how to download and process tables from the Census API.
"""
THIS_DIR = pathlib.Path(__file__).parent
PARENT_DIR = THIS_DIR.parent
# All available years
YEAR_LIST = [
2017,
2016,
2015,
2014,
2013,
2012,
2011,
2010,
2009
]
# All available geographies
GEOTYPE_LIST = (
"nationwide",
"regions",
"divisions",
"states",
"congressional_districts",
"state_legislative_upper_districts",
"state_legislative_lower_districts",
"counties",
"places",
"urban_areas",
"msas",
"csas",
"pumas",
"nectas",
"cnectas",
"aiannh_homelands",
"tracts",
"zctas",
"unified_school_districts",
"elementary_school_districts",
"secondary_school_districts"
)
def __init__(
self,
api_key=None,
source="acs5",
years=None,
data_dir=None,
force=False
):
"""
Configuration.
"""
# Set the inputs
self.CENSUS_API_KEY = os.getenv("CENSUS_API_KEY", api_key)
if not self.CENSUS_API_KEY:
raise NotImplementedError("Census API key required. Pass it as the first argument.")
self.source = source
self.force = force
#
# Allow custom years for data download, defaulting to most recent year
#
# If they want all the years, give it to them.
if years == "all":
self.years_to_download = self.YEAR_LIST
# If the user provides a year give them that.
elif isinstance(years, int):
self.years_to_download = [years]
# Or if they provide years as a list, give those then.
elif isinstance(years, list):
self.years_to_download = list(map(int, years))
# If they provided nothing, default to the latest year of data
elif years is None:
self.years_to_download = [max(self.YEAR_LIST), ]
# Validate the years
for year in self.years_to_download:
if year not in self.YEAR_LIST:
error_msg = ("Data only available for the years"
f"{self.YEAR_LIST[-1]}-{self.YEAR_LIST[0]}.")
raise NotImplementedError(error_msg)
# Set the data directories
if data_dir:
self.data_dir = pathlib.Path(str(data_dir))
else:
self.data_dir = self.PARENT_DIR.joinpath("data")
self.raw_data_dir = self.data_dir.joinpath("raw")
self.processed_data_dir = self.data_dir.joinpath("processed")
# Make sure they exist
if not self.data_dir.exists():
self.data_dir.mkdir()
if not self.raw_data_dir.exists():
self.raw_data_dir.mkdir()
if not self.processed_data_dir.exists():
self.processed_data_dir.mkdir()
@property
def censusreporter_url(self):
"""
Returns the URL of the Census Reporter page explaining the ACS table.
"""
return f"https://censusreporter.org/tables/{self.RAW_TABLE_NAME}/"
#
# Geotype downloaders
#
@decorators.downloader
def download_nationwide(self):
"""
Download nationwide data.
"""
return geotypes.NationwideDownloader
@decorators.downloader
def download_regions(self):
"""
Download data for all regions.
"""
return geotypes.RegionsDownloader
@decorators.downloader
def download_divisions(self):
"""
Download data for all divisions.
"""
return geotypes.DivisionsDownloader
@decorators.downloader
def download_states(self):
"""
Download data for all states.
"""
return geotypes.StatesDownloader
@decorators.downloader
def download_congressional_districts(self):
"""
Download data for all Congressional districts.
"""
return geotypes.CongressionalDistrictsDownloader
@decorators.downloader
def download_state_legislative_upper_districts(self):
"""
Download data for all Census upper legislative districts in the provided state.
"""
return geotypes.StateLegislativeUpperDistrictsDownloader
@decorators.downloader
def download_state_legislative_lower_districts(self):
"""
Download data for all Census lower legislative districts in the provided state.
"""
return geotypes.StateLegislativeLowerDistrictsDownloader
@decorators.downloader
def download_counties(self):
"""
Download data for all counties.
"""
return geotypes.CountiesDownloader
@decorators.downloader
def download_places(self):
"""
Download data for all Census designated places.
"""
return geotypes.PlacesDownloader
@decorators.downloader
def download_urban_areas(self):
"""
Download data for all urban areas
"""
return geotypes.UrbanAreasDownloader
@decorators.downloader
def download_msas(self):
"""
Download data for Metropolitian Statistical Areas.
"""
return geotypes.MsasDownloader
@decorators.downloader
def download_csas(self):
"""
Download data for Combined Statistical Areas.
"""
return geotypes.CsasDownloader
@decorators.downloader
def download_pumas(self):
"""
Download data for Public Use Microdata Areas.
"""
return geotypes.PumasDownloader
@decorators.downloader
def download_nectas(self):
"""
Download data for New England cities and towns.
"""
return geotypes.NectasDownloader
@decorators.downloader
def download_cnectas(self):
"""
Download data for combined New England cities and towns.
"""
return geotypes.CnectasDownloader
@decorators.downloader
def download_aiannh_homelands(self):
"""
Download data for American Indian home lands.
"""
return geotypes.AiannhHomelandsDownloader
@decorators.downloader
def download_tracts(self):
"""
Download data for all Census tracts in the provided state.
"""
return geotypes.TractsDownloader
@decorators.downloader
def download_zctas(self):
"""
Download data for Zip Code Tabulation Areas
"""
return geotypes.ZctasDownloader
@decorators.downloader
def download_unified_school_districts(self):
"""
Download data for unified school districts.
"""
return geotypes.UnifiedSchoolDistrictsDownloader
@decorators.downloader
def download_elementary_school_districts(self):
"""
Download data for elementary school districts.
"""
return geotypes.ElementarySchoolDistrictsDownloader
@decorators.downloader
def download_secondary_school_districts(self):
"""
Download data for secondary school districts.
"""
return geotypes.SecondarySchoolDistrictsDownloader
def download_everything(self):
"""
Download 'em all.
"""
for geo in self.GEOTYPE_LIST:
print(geo)
# Get the downloader function
dl = getattr(self, f"download_{geo}", None)
# Validate it
if not dl or not callable(dl):
raise NotImplementedError(f"Invalid geography type: {geo}")
# Run it
try:
dl()
except NotImplementedError:
pass
| [((237, 264), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (254, 264), False, 'import logging\n'), ((400, 422), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (412, 422), False, 'import pathlib\n'), ((1474, 1510), 'os.getenv', 'os.getenv', (['"""CENSUS_API_KEY"""', 'api_key'], {}), "('CENSUS_API_KEY', api_key)\n", (1483, 1510), False, 'import os\n')] |
loujine/sgf2ebook | sgf2ebook.py | 13c87056646cc6c06485b129221ab2028e67ef95 | #!/usr/bin/env python3
import argparse
import os
from pathlib import Path
import shutil
import subprocess
import sys
from tempfile import TemporaryDirectory
from uuid import uuid4
from zipfile import ZipFile
import jinja2
import sente # type: ignore
__version__ = (1, 0, 0)
SGF_RENDER_EXECUTABLE = './sgf-render'
TEMPLATEDIR = Path(__file__, '..', 'epub_template').resolve()
def load_sgf(sgfpath: Path):
game = sente.sgf.load(str(sgfpath))
comments = {}
seq = game.get_default_sequence()
for idx, move in enumerate(seq, 1):
game.play(move)
if game.comment:
comments[idx] = game.comment
return {
# read only main sequence, not variations
'nb_moves': len(seq),
'metadata': game.get_properties(),
'comments': comments,
}
def main(sgfpath: Path, output_path: Path) -> None:
print()
print(f'Load content of {sgfpath}')
try:
sgf_content = load_sgf(sgfpath)
except (sente.exceptions.InvalidSGFException,
sente.exceptions.IllegalMoveException):
print(f'Could not read {sgfpath}, skipping')
return
nb_moves = sgf_content['nb_moves']
metadata = sgf_content['metadata']
comments = sgf_content['comments']
uuid = uuid4()
with TemporaryDirectory() as tmpdir:
print('Prepare structure of the ebook')
shutil.copytree(TEMPLATEDIR, tmpdir, dirs_exist_ok=True)
template = jinja2.Template(
TEMPLATEDIR.joinpath('EPUB', 'Text', 'page_001.html').open().read())
print('Prepare SVG diagrams')
svgdirpath = Path(tmpdir, 'EPUB', 'Images')
for move in range(1, nb_moves + 1):
svgpath = f'diagram_{move:03}.svg'
# generate SVG files with sgf-render
try:
subprocess.check_call([
SGF_RENDER_EXECUTABLE,
str(sgfpath),
'--move-numbers',
'--first-move-number', str(move),
'-n', str(move),
'--style', 'minimalist',
'-o', svgdirpath.joinpath(svgpath),
])
except subprocess.CalledProcessError:
print(f'Move {move} could not be converted to SVG')
continue
# replace move number in SVG
# not possible directly in sgf-render invocation at the moment
svg_content = svgdirpath.joinpath(svgpath).open().read()
svgdirpath.joinpath(svgpath).open('w').write(
svg_content.replace('>1<', f'>{move}<', 1))
# create HTML page with SVG element
html_content = template.render(
title=sgfpath.stem,
svgpath=svgpath,
info=metadata,
first_flag=(move == 1),
last_flag=(move == nb_moves),
comment=comments.get(move, ''),
)
with Path(tmpdir, 'EPUB', 'Text', f'page_{move:03}.html').open('w') as fd:
fd.write(html_content)
# Declare all HTML/SVG files in master file
print('Prepare content.opf file')
template = jinja2.Template(
TEMPLATEDIR.joinpath('EPUB', 'content.opf').open().read())
opf_content = template.render(
title=sgfpath.stem,
creator='sgf2ebook',
UUID=uuid,
svgpath=sorted(svgdirpath.glob('*.svg')),
enumerate=enumerate,
)
with Path(tmpdir, 'EPUB', 'content.opf').open('w') as fd:
fd.write(opf_content)
# Generate table of contents
print('Prepare table of contents')
template = jinja2.Template(
TEMPLATEDIR.joinpath('EPUB', 'toc.ncx').open().read())
toc_content = template.render(
title=sgfpath.stem,
UUID=uuid,
nb_moves=nb_moves,
range=range,
)
with Path(tmpdir, 'EPUB', 'toc.ncx').open('w') as fd:
fd.write(toc_content)
# zip all content in EPUB file
output_path.mkdir(exist_ok=True, parents=True)
output_name = f"{metadata.get('EV', 'unknown_event')}{'_' if 'RO' in metadata else ''}{metadata.get('RO', '')}.epub".replace(' ', '_')
with ZipFile(output_path.joinpath(output_name), 'w') as zf:
os.chdir(tmpdir)
# "The first file in the OCF ZIP Container MUST be the mimetype file"
zf.write('mimetype')
for root, dirs, files in os.walk('.'):
for file in sorted(files):
if file != 'mimetype':
zf.write(Path(root, file))
os.chdir(Path(__file__).parent)
print(f'{output_path.joinpath(output_name)} generated')
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='')
parser.add_argument('--input-path', '-i', help='Input files or directory')
parser.add_argument('--output-path', '-o', help='Output directory')
args = parser.parse_args()
path = Path(args.input_path)
outpath = Path(args.output_path)
if not path.exists():
print(f'Input path {path} not found')
sys.exit(1)
if path.is_file():
main(path, outpath)
if path.is_dir():
for filepath in sorted(path.rglob('*.sgf')):
main(filepath, outpath.joinpath(filepath.parent.relative_to(path)))
| [((1263, 1270), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1268, 1270), False, 'from uuid import uuid4\n'), ((4822, 4861), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '""""""'}), "(description='')\n", (4845, 4861), False, 'import argparse\n'), ((5065, 5086), 'pathlib.Path', 'Path', (['args.input_path'], {}), '(args.input_path)\n', (5069, 5086), False, 'from pathlib import Path\n'), ((5101, 5123), 'pathlib.Path', 'Path', (['args.output_path'], {}), '(args.output_path)\n', (5105, 5123), False, 'from pathlib import Path\n'), ((332, 369), 'pathlib.Path', 'Path', (['__file__', '""".."""', '"""epub_template"""'], {}), "(__file__, '..', 'epub_template')\n", (336, 369), False, 'from pathlib import Path\n'), ((1281, 1301), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (1299, 1301), False, 'from tempfile import TemporaryDirectory\n'), ((1369, 1425), 'shutil.copytree', 'shutil.copytree', (['TEMPLATEDIR', 'tmpdir'], {'dirs_exist_ok': '(True)'}), '(TEMPLATEDIR, tmpdir, dirs_exist_ok=True)\n', (1384, 1425), False, 'import shutil\n'), ((1604, 1634), 'pathlib.Path', 'Path', (['tmpdir', '"""EPUB"""', '"""Images"""'], {}), "(tmpdir, 'EPUB', 'Images')\n", (1608, 1634), False, 'from pathlib import Path\n'), ((5204, 5215), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5212, 5215), False, 'import sys\n'), ((4356, 4372), 'os.chdir', 'os.chdir', (['tmpdir'], {}), '(tmpdir)\n', (4364, 4372), False, 'import os\n'), ((4525, 4537), 'os.walk', 'os.walk', (['"""."""'], {}), "('.')\n", (4532, 4537), False, 'import os\n'), ((3511, 3546), 'pathlib.Path', 'Path', (['tmpdir', '"""EPUB"""', '"""content.opf"""'], {}), "(tmpdir, 'EPUB', 'content.opf')\n", (3515, 3546), False, 'from pathlib import Path\n'), ((3955, 3986), 'pathlib.Path', 'Path', (['tmpdir', '"""EPUB"""', '"""toc.ncx"""'], {}), "(tmpdir, 'EPUB', 'toc.ncx')\n", (3959, 3986), False, 'from pathlib import Path\n'), ((4697, 4711), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (4701, 4711), False, 'from pathlib import Path\n'), ((2963, 3015), 'pathlib.Path', 'Path', (['tmpdir', '"""EPUB"""', '"""Text"""', 'f"""page_{move:03}.html"""'], {}), "(tmpdir, 'EPUB', 'Text', f'page_{move:03}.html')\n", (2967, 3015), False, 'from pathlib import Path\n'), ((4658, 4674), 'pathlib.Path', 'Path', (['root', 'file'], {}), '(root, file)\n', (4662, 4674), False, 'from pathlib import Path\n')] |
bolcom/serenade-experiments-sigmod | vmis_sql_python/evaluation/metrics/popularity.py | 0a4c7f19d800d1c2784ea5536abb1a628cb12f7a | class Popularity:
'''
Popularity( length=20 )
Used to iteratively calculate the average overall popularity of an algorithm's recommendations.
Parameters
-----------
length : int
Coverage@length
training_df : dataframe
determines how many distinct item_ids there are in the training data
'''
def __init__(self, length=20, training_df=None):
self.length = length;
self.sum = 0
self.tests = 0
self.train_actions = len(training_df.index)
#group the data by the itemIds
grp = training_df.groupby('ItemId')
#count the occurence of every itemid in the trainingdataset
self.pop_scores = grp.size()
#sort it according to the score
self.pop_scores.sort_values(ascending=False, inplace=True)
#normalize
self.pop_scores = self.pop_scores / self.pop_scores[:1].values[0]
def add(self, result, next_items, for_item=0, session=0, pop_bin=None, position=None):
'''
Update the metric with a result set and the correct next item.
Result must be sorted correctly.
Parameters
--------
result: pandas.Series
Series of scores with the item id as the index
'''
#only keep the k- first predictions
recs = result[:self.length]
#take the unique values out of those top scorers
items = recs.index.unique()
self.sum += ( self.pop_scores[ items ].sum() / len( items ) )
self.tests += 1
def result(self):
'''
Return a tuple of a description string and the current averaged value
'''
return ("Popularity@" + str( self.length ) + ": "), ( self.sum / self.tests )
| [] |
amberdiehl/dandeliondiary_project | dandeliondiary/household/urls.py | e9bace5bd7980def6ca763840ab5b38f1e05cd3d | from django.conf.urls import include, url
from . import views
urlpatterns = [
url(r'^settings$', views.household_dashboard, name='household_dashboard'),
url(r'^myinfo$', views.my_info, name='my_info'),
url(r'^profile$', views.household_profile, name='maintain_household'),
url(r'^members$', views.household_members, name='maintain_members'),
url(r'^vehicles$', views.household_vehicles, name='maintain_vehicles'),
url(r'^ajax/models-by-make/(?P<make_id>\d+)/$', views.ajax_models_by_make),
url(r'^ajax/makes-by-type/(?P<type_id>\d+)/$', views.ajax_makes_by_type),
url(r'^ajax/add-make/(?P<type_key>\d+)/(?P<make>[\w ]{1,50})/$', views.ajax_add_make),
url(r'^ajax/add-model/(?P<make_key>\d+)/(?P<model>[\w -]{1,128})/$', views.ajax_add_model),
url(r'^ajax/delete-invite/$', views.ajax_delete_invite),
url(r'^ajax/change-member-status/$', views.ajax_change_member_status),
]
| [((83, 155), 'django.conf.urls.url', 'url', (['"""^settings$"""', 'views.household_dashboard'], {'name': '"""household_dashboard"""'}), "('^settings$', views.household_dashboard, name='household_dashboard')\n", (86, 155), False, 'from django.conf.urls import include, url\n'), ((162, 208), 'django.conf.urls.url', 'url', (['"""^myinfo$"""', 'views.my_info'], {'name': '"""my_info"""'}), "('^myinfo$', views.my_info, name='my_info')\n", (165, 208), False, 'from django.conf.urls import include, url\n'), ((215, 283), 'django.conf.urls.url', 'url', (['"""^profile$"""', 'views.household_profile'], {'name': '"""maintain_household"""'}), "('^profile$', views.household_profile, name='maintain_household')\n", (218, 283), False, 'from django.conf.urls import include, url\n'), ((290, 356), 'django.conf.urls.url', 'url', (['"""^members$"""', 'views.household_members'], {'name': '"""maintain_members"""'}), "('^members$', views.household_members, name='maintain_members')\n", (293, 356), False, 'from django.conf.urls import include, url\n'), ((363, 432), 'django.conf.urls.url', 'url', (['"""^vehicles$"""', 'views.household_vehicles'], {'name': '"""maintain_vehicles"""'}), "('^vehicles$', views.household_vehicles, name='maintain_vehicles')\n", (366, 432), False, 'from django.conf.urls import include, url\n'), ((439, 513), 'django.conf.urls.url', 'url', (['"""^ajax/models-by-make/(?P<make_id>\\\\d+)/$"""', 'views.ajax_models_by_make'], {}), "('^ajax/models-by-make/(?P<make_id>\\\\d+)/$', views.ajax_models_by_make)\n", (442, 513), False, 'from django.conf.urls import include, url\n'), ((519, 591), 'django.conf.urls.url', 'url', (['"""^ajax/makes-by-type/(?P<type_id>\\\\d+)/$"""', 'views.ajax_makes_by_type'], {}), "('^ajax/makes-by-type/(?P<type_id>\\\\d+)/$', views.ajax_makes_by_type)\n", (522, 591), False, 'from django.conf.urls import include, url\n'), ((597, 688), 'django.conf.urls.url', 'url', (['"""^ajax/add-make/(?P<type_key>\\\\d+)/(?P<make>[\\\\w ]{1,50})/$"""', 'views.ajax_add_make'], {}), "('^ajax/add-make/(?P<type_key>\\\\d+)/(?P<make>[\\\\w ]{1,50})/$', views.\n ajax_add_make)\n", (600, 688), False, 'from django.conf.urls import include, url\n'), ((688, 784), 'django.conf.urls.url', 'url', (['"""^ajax/add-model/(?P<make_key>\\\\d+)/(?P<model>[\\\\w -]{1,128})/$"""', 'views.ajax_add_model'], {}), "('^ajax/add-model/(?P<make_key>\\\\d+)/(?P<model>[\\\\w -]{1,128})/$', views\n .ajax_add_model)\n", (691, 784), False, 'from django.conf.urls import include, url\n'), ((784, 838), 'django.conf.urls.url', 'url', (['"""^ajax/delete-invite/$"""', 'views.ajax_delete_invite'], {}), "('^ajax/delete-invite/$', views.ajax_delete_invite)\n", (787, 838), False, 'from django.conf.urls import include, url\n'), ((845, 913), 'django.conf.urls.url', 'url', (['"""^ajax/change-member-status/$"""', 'views.ajax_change_member_status'], {}), "('^ajax/change-member-status/$', views.ajax_change_member_status)\n", (848, 913), False, 'from django.conf.urls import include, url\n')] |
devinbalkind/eden | private/templates/NYC/config.py | d5a684eae537432eb2c7d954132484a4714ca8fb | # -*- coding: utf-8 -*-
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import current
from gluon.html import A, URL
from gluon.storage import Storage
from s3 import s3_fullname
T = current.T
settings = current.deployment_settings
"""
Template settings for NYC Prepared
"""
# Pre-Populate
settings.base.prepopulate = ("NYC",)
settings.base.system_name = T("NYC Prepared")
settings.base.system_name_short = T("NYC Prepared")
# Theme (folder to use for views/layout.html)
settings.base.theme = "NYC"
settings.ui.formstyle_row = "bootstrap"
settings.ui.formstyle = "bootstrap"
settings.ui.filter_formstyle = "table_inline"
settings.msg.parser = "NYC"
# Uncomment to Hide the language toolbar
settings.L10n.display_toolbar = False
# Default timezone for users
settings.L10n.utc_offset = "UTC -0500"
# Uncomment these to use US-style dates in English
settings.L10n.date_format = "%m-%d-%Y"
# Start week on Sunday
settings.L10n.firstDOW = 0
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = ","
# Default Country Code for telephone numbers
settings.L10n.default_country_code = 1
# Enable this to change the label for 'Mobile Phone'
settings.ui.label_mobile_phone = "Cell Phone"
# Enable this to change the label for 'Postcode'
settings.ui.label_postcode = "ZIP Code"
# Uncomment to disable responsive behavior of datatables
# - Disabled until tested
settings.ui.datatables_responsive = False
# PDF to Letter
settings.base.paper_size = T("Letter")
# Restrict the Location Selector to just certain countries
# NB This can also be over-ridden for specific contexts later
# e.g. Activities filtered to those of parent Project
settings.gis.countries = ("US",)
settings.fin.currencies = {
"USD" : T("United States Dollars"),
}
settings.L10n.languages = OrderedDict([
("en", "English"),
("es", "Español"),
])
# Authentication settings
# These settings should be changed _after_ the 1st (admin) user is
# registered in order to secure the deployment
# Should users be allowed to register themselves?
settings.security.self_registration = "index"
# Do new users need to verify their email address?
settings.auth.registration_requires_verification = True
# Do new users need to be approved by an administrator prior to being able to login?
settings.auth.registration_requires_approval = True
# Always notify the approver of a new (verified) user, even if the user is automatically approved
#settings.auth.always_notify_approver = False
# Uncomment this to request the Mobile Phone when a user registers
settings.auth.registration_requests_mobile_phone = True
# Uncomment this to request the Organisation when a user registers
settings.auth.registration_requests_organisation = True
# Uncomment this to request the Site when a user registers
#settings.auth.registration_requests_site = True
# Roles that newly-registered users get automatically
#settings.auth.registration_roles = { 0: ["comms_dispatch"]}
#settings.auth.registration_link_user_to = {"staff":T("Staff"),
# #"volunteer":T("Volunteer")
# }
settings.auth.registration_link_user_to_default = "staff"
settings.security.policy = 5 # Controller, Function & Table ACLs
# Enable this to have Open links in IFrames open a full page in a new tab
settings.ui.iframe_opens_full = True
settings.ui.label_attachments = "Media"
settings.ui.update_label = "Edit"
# Uncomment to disable checking that LatLons are within boundaries of their parent
#settings.gis.check_within_parent_boundaries = False
# GeoNames username
settings.gis.geonames_username = "eden_nyc"
# Uncomment to show created_by/modified_by using Names not Emails
settings.ui.auth_user_represent = "name"
# Record Approval
settings.auth.record_approval = True
settings.auth.record_approval_required_for = ("org_organisation",)
# -----------------------------------------------------------------------------
# Audit
def audit_write(method, tablename, form, record, representation):
if not current.auth.user:
# Don't include prepop
return False
if tablename in ("cms_post",
"org_facility",
"org_organisation",
"req_req",
):
# Perform normal Audit
return True
else:
# Don't Audit non user-visible resources
return False
settings.security.audit_write = audit_write
# -----------------------------------------------------------------------------
# CMS
# Uncomment to use Bookmarks in Newsfeed
settings.cms.bookmarks = True
# Uncomment to use have Filter form in Newsfeed be open by default
settings.cms.filter_open = True
# Uncomment to adjust filters in Newsfeed when clicking on locations instead of opening the profile page
settings.cms.location_click_filters = True
# Uncomment to use organisation_id instead of created_by in Newsfeed
settings.cms.organisation = "post_organisation.organisation_id"
# Uncomment to use org_group_id in Newsfeed
settings.cms.organisation_group = "post_organisation_group.group_id"
# Uncomment to use person_id instead of created_by in Newsfeed
settings.cms.person = "person_id"
# Uncomment to use Rich Text editor in Newsfeed
settings.cms.richtext = True
# Uncomment to show Links in Newsfeed
settings.cms.show_links = True
# Uncomment to show Tags in Newsfeed
settings.cms.show_tags = True
# Uncomment to show post Titles in Newsfeed
settings.cms.show_titles = True
# -----------------------------------------------------------------------------
# Inventory Management
# Uncomment to customise the label for Facilities in Inventory Management
settings.inv.facility_label = "Facility"
# Uncomment if you need a simpler (but less accountable) process for managing stock levels
#settings.inv.direct_stock_edits = True
# Uncomment to call Stock Adjustments, 'Stock Counts'
settings.inv.stock_count = True
# Uncomment to not track pack values
settings.inv.track_pack_values = False
settings.inv.send_show_org = False
# Types common to both Send and Receive
settings.inv.shipment_types = {
1: T("Other Warehouse")
}
settings.inv.send_types = {
#21: T("Distribution")
}
settings.inv.send_type_default = 1
settings.inv.item_status = {
#0: current.messages["NONE"],
#1: T("Dump"),
#2: T("Sale"),
#3: T("Reject"),
#4: T("Surplus")
}
# -----------------------------------------------------------------------------
# Organisations
#
# Enable the use of Organisation Groups
settings.org.groups = "Network"
# Make Services Hierarchical
settings.org.services_hierarchical = True
# Set the label for Sites
settings.org.site_label = "Facility"
#settings.org.site_label = "Location"
# Uncomment to show the date when a Site (Facilities-only for now) was last contacted
settings.org.site_last_contacted = True
# Enable certain fields just for specific Organisations
# empty list => disabled for all (including Admin)
#settings.org.dependent_fields = { \
# "pr_person_details.mother_name" : [],
# "pr_person_details.father_name" : [],
# "pr_person_details.company" : [],
# "pr_person_details.affiliations" : [],
# "vol_volunteer.active" : [],
# "vol_volunteer_cluster.vol_cluster_type_id" : [],
# "vol_volunteer_cluster.vol_cluster_id" : [],
# "vol_volunteer_cluster.vol_cluster_position_id" : [],
# }
# Uncomment to use an Autocomplete for Site lookup fields
settings.org.site_autocomplete = True
# Extra fields to search in Autocompletes & display in Representations
settings.org.site_autocomplete_fields = ("organisation_id$name",
"location_id$addr_street",
)
# Uncomment to hide inv & req tabs from Sites
#settings.org.site_inv_req_tabs = True
# -----------------------------------------------------------------------------
def facility_marker_fn(record):
"""
Function to decide which Marker to use for Facilities Map
@ToDo: Legend
"""
db = current.db
s3db = current.s3db
table = db.org_facility_type
ltable = db.org_site_facility_type
query = (ltable.site_id == record.site_id) & \
(ltable.facility_type_id == table.id)
rows = db(query).select(table.name)
types = [row.name for row in rows]
# Use Marker in preferential order
if "Hub" in types:
marker = "warehouse"
elif "Medical Clinic" in types:
marker = "hospital"
elif "Food" in types:
marker = "food"
elif "Relief Site" in types:
marker = "asset"
elif "Residential Building" in types:
marker = "residence"
#elif "Shelter" in types:
# marker = "shelter"
else:
# Unknown
marker = "office"
if settings.has_module("req"):
# Colour code by open/priority requests
reqs = record.reqs
if reqs == 3:
# High
marker = "%s_red" % marker
elif reqs == 2:
# Medium
marker = "%s_yellow" % marker
elif reqs == 1:
# Low
marker = "%s_green" % marker
mtable = db.gis_marker
try:
marker = db(mtable.name == marker).select(mtable.image,
mtable.height,
mtable.width,
cache=s3db.cache,
limitby=(0, 1)
).first()
except:
marker = db(mtable.name == "office").select(mtable.image,
mtable.height,
mtable.width,
cache=s3db.cache,
limitby=(0, 1)
).first()
return marker
# -----------------------------------------------------------------------------
def org_facility_onvalidation(form):
"""
Default the name to the Street Address
"""
form_vars = form.vars
name = form_vars.get("name", None)
if name:
return
address = form_vars.get("address", None)
if address:
form_vars.name = address
else:
# We need a default
form_vars.name = current.db.org_facility.location_id.represent(form_vars.location_id)
# -----------------------------------------------------------------------------
def customise_org_facility_controller(**attr):
s3db = current.s3db
s3 = current.response.s3
# Tell the client to request per-feature markers
s3db.configure("org_facility", marker_fn=facility_marker_fn)
# Custom PreP
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
if r.method not in ("read", "update"):
types = r.get_vars.get("site_facility_type.facility_type_id__belongs", None)
if not types:
# Hide Private Residences
from s3 import FS
s3.filter = FS("site_facility_type.facility_type_id$name") != "Private Residence"
if r.interactive:
tablename = "org_facility"
table = s3db[tablename]
if not r.component and r.method in (None, "create", "update"):
from s3 import IS_LOCATION_SELECTOR2, S3LocationSelectorWidget2, S3MultiSelectWidget
field = table.location_id
if r.method in ("create", "update"):
field.label = "" # Gets replaced by widget
levels = ("L2", "L3")
field.requires = IS_LOCATION_SELECTOR2(levels=levels)
field.widget = S3LocationSelectorWidget2(levels=levels,
hide_lx=False,
reverse_lx=True,
show_address=True,
show_postcode=True,
)
table.organisation_id.widget = S3MultiSelectWidget(multiple=False)
if r.get_vars.get("format", None) == "popup":
# Coming from req/create form
# Hide most Fields
from s3 import S3SQLCustomForm, S3SQLInlineComponent
# We default this onvalidation
table.name.notnull = False
table.name.requires = None
crud_form = S3SQLCustomForm(S3SQLInlineComponent(
"site_facility_type",
label = T("Facility Type"),
fields = [("", "facility_type_id")],
multiple = False,
required = True,
),
"name",
"location_id",
)
s3db.configure(tablename,
crud_form = crud_form,
onvalidation = org_facility_onvalidation,
)
return True
s3.prep = custom_prep
return attr
settings.customise_org_facility_controller = customise_org_facility_controller
# -----------------------------------------------------------------------------
def customise_org_organisation_resource(r, tablename):
from gluon.html import DIV, INPUT
from s3 import S3MultiSelectWidget, S3SQLCustomForm, S3SQLInlineLink, S3SQLInlineComponent, S3SQLInlineComponentMultiSelectWidget
s3db = current.s3db
if r.tablename == "org_organisation":
if r.id:
# Update form
ctable = s3db.pr_contact
query = (ctable.pe_id == r.record.pe_id) & \
(ctable.contact_method == "RSS") & \
(ctable.deleted == False)
rss = current.db(query).select(ctable.poll,
limitby=(0, 1)
).first()
if rss and not rss.poll:
# Remember that we don't wish to import
rss_import = "on"
else:
# Default
rss_import = None
else:
# Create form: Default
rss_import = None
else:
# Component
if r.component_id:
# Update form
db = current.db
otable = s3db.org_organisation
org = db(otable.id == r.component_id).select(otable.pe_id,
limitby=(0, 1)
).first()
try:
pe_id = org.pe_id
except:
current.log.error("Org %s not found: cannot set rss_import correctly" % r.component_id)
# Default
rss_import = None
else:
ctable = s3db.pr_contact
query = (ctable.pe_id == pe_id) & \
(ctable.contact_method == "RSS") & \
(ctable.deleted == False)
rss = db(query).select(ctable.poll,
limitby=(0, 1)
).first()
if rss and not rss.poll:
# Remember that we don't wish to import
rss_import = "on"
else:
# Default
rss_import = None
else:
# Create form: Default
rss_import = None
mtable = s3db.org_group_membership
mtable.group_id.widget = S3MultiSelectWidget(multiple=False)
mtable.status_id.widget = S3MultiSelectWidget(multiple=False,
create=dict(c="org",
f="group_membership_status",
label=str(T("Add New Status")),
parent="group_membership",
child="status_id"
))
crud_form = S3SQLCustomForm(
"name",
"acronym",
S3SQLInlineLink(
"organisation_type",
field = "organisation_type_id",
label = T("Type"),
multiple = False,
#widget = "hierarchy",
),
S3SQLInlineComponentMultiSelectWidget(
# activate hierarchical org_service:
#S3SQLInlineLink(
"service",
label = T("Services"),
field = "service_id",
# activate hierarchical org_service:
#leafonly = False,
#widget = "hierarchy",
),
S3SQLInlineComponent(
"group_membership",
label = T("Network"),
fields = [("", "group_id"),
("", "status_id"),
],
),
S3SQLInlineComponent(
"address",
label = T("Address"),
multiple = False,
# This is just Text - put into the Comments box for now
# Ultimately should go into location_id$addr_street
fields = [("", "comments")],
),
S3SQLInlineComponentMultiSelectWidget(
"location",
label = T("Neighborhoods Served"),
field = "location_id",
filterby = dict(field = "level",
options = "L4"
),
# @ToDo: GroupedCheckbox Widget or Hierarchical MultiSelectWidget
#cols = 5,
),
"phone",
S3SQLInlineComponent(
"contact",
name = "phone2",
label = T("Phone2"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "WORK_PHONE"
)
),
S3SQLInlineComponent(
"contact",
name = "email",
label = T("Email"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "EMAIL"
)
),
"website",
S3SQLInlineComponent(
"contact",
comment = DIV(INPUT(_type="checkbox",
_name="rss_no_import",
value = rss_import,
),
T("Don't Import Feed")),
name = "rss",
label = T("RSS"),
multiple = False,
fields = [("", "value"),
#(T("Don't Import Feed"), "poll"),
],
filterby = dict(field = "contact_method",
options = "RSS"
)
),
S3SQLInlineComponent(
"document",
name = "iCal",
label = "iCAL",
multiple = False,
fields = [("", "url")],
filterby = dict(field = "name",
options="iCal"
)
),
S3SQLInlineComponent(
"document",
name = "data",
label = T("Data"),
multiple = False,
fields = [("", "url")],
filterby = dict(field = "name",
options="Data"
)
),
S3SQLInlineComponent(
"contact",
name = "twitter",
label = T("Twitter"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "TWITTER"
)
),
S3SQLInlineComponent(
"contact",
name = "facebook",
label = T("Facebook"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "FACEBOOK"
)
),
"comments",
postprocess = pr_contact_postprocess,
)
from s3 import S3LocationFilter, S3OptionsFilter, S3TextFilter
# activate hierarchical org_service:
#from s3 import S3LocationFilter, S3OptionsFilter, S3TextFilter, S3HierarchyFilter
filter_widgets = [
S3TextFilter(["name", "acronym"],
label = T("Name"),
_class = "filter-search",
),
S3OptionsFilter("group_membership.group_id",
label = T("Network"),
represent = "%(name)s",
#hidden = True,
),
S3LocationFilter("organisation_location.location_id",
label = T("Neighborhood"),
levels = ("L3", "L4"),
#hidden = True,
),
S3OptionsFilter("service_organisation.service_id",
#label = T("Service"),
#hidden = True,
),
# activate hierarchical org_service:
#S3HierarchyFilter("service_organisation.service_id",
# #label = T("Service"),
# #hidden = True,
# ),
S3OptionsFilter("organisation_organisation_type.organisation_type_id",
label = T("Type"),
#hidden = True,
),
]
list_fields = ["name",
(T("Type"), "organisation_organisation_type.organisation_type_id"),
(T("Services"), "service.name"),
"phone",
(T("Email"), "email.value"),
"website"
#(T("Neighborhoods Served"), "location.name"),
]
s3db.configure("org_organisation",
crud_form = crud_form,
filter_widgets = filter_widgets,
list_fields = list_fields,
)
settings.customise_org_organisation_resource = customise_org_organisation_resource
# -----------------------------------------------------------------------------
def customise_org_organisation_controller(**attr):
s3db = current.s3db
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.interactive:
if r.component_name == "facility":
if r.method in (None, "create", "update"):
from s3 import IS_LOCATION_SELECTOR2, S3LocationSelectorWidget2
table = s3db.org_facility
field = table.location_id
if r.method in ("create", "update"):
field.label = "" # Gets replaced by widget
levels = ("L2", "L3")
field.requires = IS_LOCATION_SELECTOR2(levels=levels)
field.widget = S3LocationSelectorWidget2(levels=levels,
hide_lx=False,
reverse_lx=True,
show_address=True,
show_postcode=True,
)
elif r.component_name == "human_resource":
# Don't assume that user is from same org/site as Contacts they create
r.component.table.site_id.default = None
return result
s3.prep = custom_prep
# Custom postp
standard_postp = s3.postp
def custom_postp(r, output):
# Call standard postp
if callable(standard_postp):
output = standard_postp(r, output)
if r.interactive and isinstance(output, dict):
if "rheader" in output:
# Custom Tabs
tabs = [(T("Basic Details"), None),
(T("Contacts"), "human_resource"),
(T("Facilities"), "facility"),
(T("Projects"), "project"),
(T("Assets"), "asset"),
]
output["rheader"] = s3db.org_rheader(r, tabs=tabs)
return output
s3.postp = custom_postp
return attr
settings.customise_org_organisation_controller = customise_org_organisation_controller
# -----------------------------------------------------------------------------
def customise_org_group_controller(**attr):
s3db = current.s3db
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if not r.component:
table = s3db.org_group
list_fields = ["name",
"mission",
"website",
"meetings",
]
s3db.configure("org_group",
list_fields = list_fields,
)
if r.interactive:
from gluon.html import DIV, INPUT
from s3 import S3SQLCustomForm, S3SQLInlineComponent
if r.method != "read":
from gluon.validators import IS_EMPTY_OR
from s3 import IS_LOCATION_SELECTOR2, S3LocationSelectorWidget2
field = table.location_id
field.label = "" # Gets replaced by widget
#field.requires = IS_LOCATION_SELECTOR2(levels = ("L2",))
field.requires = IS_EMPTY_OR(
IS_LOCATION_SELECTOR2(levels = ("L2",))
)
field.widget = S3LocationSelectorWidget2(levels = ("L2",),
points = True,
polygons = True,
)
# Default location to Manhattan
db = current.db
gtable = db.gis_location
query = (gtable.name == "New York") & \
(gtable.level == "L2")
manhattan = db(query).select(gtable.id,
limitby=(0, 1)).first()
if manhattan:
field.default = manhattan.id
table.mission.readable = table.mission.writable = True
table.meetings.readable = table.meetings.writable = True
if r.id:
# Update form
ctable = s3db.pr_contact
query = (ctable.pe_id == r.record.pe_id) & \
(ctable.contact_method == "RSS") & \
(ctable.deleted == False)
rss = current.db(query).select(ctable.poll,
limitby=(0, 1)
).first()
if rss and not rss.poll:
# Remember that we don't wish to import
rss_import = "on"
else:
# Default
rss_import = None
else:
# Create form: Default
rss_import = None
crud_form = S3SQLCustomForm(
"name",
"location_id",
"mission",
S3SQLInlineComponent(
"contact",
name = "phone",
label = T("Phone"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "WORK_PHONE"
)
),
S3SQLInlineComponent(
"contact",
name = "email",
label = T("Email"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "EMAIL"
)
),
"website",
S3SQLInlineComponent(
"contact",
comment = DIV(INPUT(_type="checkbox",
_name="rss_no_import",
value = rss_import,
),
T("Don't Import Feed")),
name = "rss",
label = T("RSS"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "RSS"
)
),
S3SQLInlineComponent(
"document",
name = "iCal",
label = "iCAL",
multiple = False,
fields = [("", "url")],
filterby = dict(field = "name",
options="iCal"
)
),
S3SQLInlineComponent(
"document",
name = "data",
label = T("Data"),
multiple = False,
fields = [("", "url")],
filterby = dict(field = "name",
options="Data"
)
),
S3SQLInlineComponent(
"contact",
name = "twitter",
label = T("Twitter"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "TWITTER"
)
),
S3SQLInlineComponent(
"contact",
name = "facebook",
label = T("Facebook"),
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "FACEBOOK"
)
),
"meetings",
"comments",
postprocess = pr_contact_postprocess,
)
s3db.configure("org_group",
crud_form = crud_form,
)
elif r.component_name == "pr_group":
list_fields = [#(T("Network"), "group_team.org_group_id"),
"name",
"description",
"meetings",
(T("Chairperson"), "chairperson"),
"comments",
]
s3db.configure("pr_group",
list_fields = list_fields,
)
elif r.component_name == "organisation":
# Add Network Status to List Fields
list_fields = s3db.get_config("org_organisation", "list_fields")
list_fields.insert(1, "group_membership.status_id")
return result
s3.prep = custom_prep
if current.auth.s3_logged_in():
# Allow components with components (such as org/group) to breakout from tabs
attr["native"] = True
return attr
settings.customise_org_group_controller = customise_org_group_controller
# -----------------------------------------------------------------------------
# Persons
# Uncomment to hide fields in S3AddPersonWidget
settings.pr.request_dob = False
settings.pr.request_gender = False
# Doesn't yet work (form fails to submit)
#settings.pr.select_existing = False
settings.pr.show_emergency_contacts = False
# -----------------------------------------------------------------------------
# Persons
def customise_pr_person_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
s3db = current.s3db
#if r.method == "validate":
# # Can't validate image without the file
# image_field = s3db.pr_image.image
# image_field.requires = None
if r.interactive or r.representation == "aadata":
if not r.component:
hr_fields = ["organisation_id",
"job_title_id",
"site_id",
]
if r.method in ("create", "update"):
get_vars = r.get_vars
# Context from a Profile page?"
organisation_id = get_vars.get("(organisation)", None)
if organisation_id:
field = s3db.hrm_human_resource.organisation_id
field.default = organisation_id
field.readable = field.writable = False
hr_fields.remove("organisation_id")
site_id = get_vars.get("(site)", None)
if site_id:
field = s3db.hrm_human_resource.site_id
field.default = site_id
field.readable = field.writable = False
hr_fields.remove("site_id")
else:
s3db.hrm_human_resource.site_id.default = None
# ImageCrop widget doesn't currently work within an Inline Form
#image_field = s3db.pr_image.image
#from gluon.validators import IS_IMAGE
#image_field.requires = IS_IMAGE()
#image_field.widget = None
from s3 import S3SQLCustomForm, S3SQLInlineComponent
s3_sql_custom_fields = ["first_name",
#"middle_name",
"last_name",
S3SQLInlineComponent(
"human_resource",
name = "human_resource",
label = "",
multiple = False,
fields = hr_fields,
),
#S3SQLInlineComponent(
# "image",
# name = "image",
# label = T("Photo"),
# multiple = False,
# fields = [("", "image")],
# filterby = dict(field = "profile",
# options=[True]
# )
# ),
]
list_fields = [(current.messages.ORGANISATION, "human_resource.organisation_id"),
"first_name",
#"middle_name",
"last_name",
(T("Job Title"), "human_resource.job_title_id"),
(T("Office"), "human_resource.site_id"),
]
# Don't include Email/Phone for unauthenticated users
if current.auth.is_logged_in():
MOBILE = settings.get_ui_label_mobile_phone()
EMAIL = T("Email")
list_fields += [(MOBILE, "phone.value"),
(EMAIL, "email.value"),
]
s3_sql_custom_fields.insert(3,
S3SQLInlineComponent(
"contact",
name = "phone",
label = MOBILE,
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "SMS")),
)
s3_sql_custom_fields.insert(3,
S3SQLInlineComponent(
"contact",
name = "email",
label = EMAIL,
multiple = False,
fields = [("", "value")],
filterby = dict(field = "contact_method",
options = "EMAIL")),
)
crud_form = S3SQLCustomForm(*s3_sql_custom_fields)
s3db.configure(r.tablename,
crud_form = crud_form,
list_fields = list_fields,
)
elif r.component_name == "group_membership":
s3db.pr_group_membership.group_head.label = T("Group Chairperson")
return result
s3.prep = custom_prep
# Custom postp
standard_postp = s3.postp
def custom_postp(r, output):
# Call standard postp
if callable(standard_postp):
output = standard_postp(r, output)
if r.interactive and isinstance(output, dict):
if "form" in output:
output["form"].add_class("pr_person")
elif "item" in output and hasattr(output["item"], "add_class"):
output["item"].add_class("pr_person")
return output
s3.postp = custom_postp
return attr
settings.customise_pr_person_controller = customise_pr_person_controller
# -----------------------------------------------------------------------------
# Groups
def chairperson(row):
"""
Virtual Field to show the chairperson of a group
"""
if hasattr(row, "pr_group"):
row = row.pr_group
try:
group_id = row.id
except:
# not available
return current.messages["NONE"]
db = current.db
mtable = current.s3db.pr_group_membership
ptable = db.pr_person
query = (mtable.group_id == group_id) & \
(mtable.group_head == True) & \
(mtable.person_id == ptable.id)
chair = db(query).select(ptable.first_name,
ptable.middle_name,
ptable.last_name,
ptable.id,
limitby=(0, 1)).first()
if chair:
# Only used in list view so HTML is OK
return A(s3_fullname(chair),
_href=URL(c="hrm", f="person", args=chair.id))
else:
return current.messages["NONE"]
# -----------------------------------------------------------------------------
def customise_pr_group_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
from s3 import S3Represent, S3TextFilter, S3OptionsFilter, S3SQLCustomForm, S3SQLInlineComponent
s3db = current.s3db
s3db.org_group_team.org_group_id.represent = S3Represent(lookup="org_group",
show_link=True)
crud_form = S3SQLCustomForm("name",
"description",
S3SQLInlineComponent("group_team",
label = T("Network"),
fields = [("", "org_group_id")],
# @ToDo: Make this optional?
multiple = False,
),
"meetings",
"comments",
)
filter_widgets = [
S3TextFilter(["name",
"description",
"comments",
"group_team.org_group_id$name",
],
label = T("Search"),
comment = T("You can search by by group name, description or comments and by network name. You may use % as wildcard. Press 'Search' without input to list all."),
#_class = "filter-search",
),
S3OptionsFilter("group_team.org_group_id",
label = T("Network"),
#hidden = True,
),
]
# Need to re-do list_fields as get over_written by hrm_group_controller()
list_fields = [(T("Network"), "group_team.org_group_id"),
"name",
"description",
"meetings",
(T("Chairperson"), "chairperson"),
"comments",
]
s3db.configure("pr_group",
crud_form = crud_form,
filter_widgets = filter_widgets,
list_fields = list_fields,
)
s3db.pr_group_membership.group_head.label = T("Group Chairperson")
if r.component_name == "group_membership":
from s3layouts import S3AddResourceLink
s3db.pr_group_membership.person_id.comment = \
S3AddResourceLink(c="pr", f="person",
title=T("Create Person"),
tooltip=current.messages.AUTOCOMPLETE_HELP)
#else:
# # RHeader wants a simplified version, but don't want inconsistent across tabs
# s3db.pr_group_membership.group_head.label = T("Chairperson")
return True
s3.prep = custom_prep
return attr
settings.customise_pr_group_controller = customise_pr_group_controller
# -----------------------------------------------------------------------------
def customise_pr_group_resource(r, tablename):
"""
Customise pr_group resource (in group & org_group controllers)
- runs after controller customisation
- but runs before prep
"""
s3db = current.s3db
table = s3db.pr_group
field = table.group_type
field.default = 3 # Relief Team, to show up in hrm/group
field.readable = field.writable = False
table.name.label = T("Name")
table.description.label = T("Description")
table.meetings.readable = table.meetings.writable = True
# Increase size of widget
from s3 import s3_comments_widget
table.description.widget = s3_comments_widget
from gluon import Field
table.chairperson = Field.Method("chairperson", chairperson)
# Format for filter_widgets & imports
s3db.add_components("pr_group",
org_group_team = "group_id",
)
s3db.configure("pr_group",
# Redirect to member list when a new group has been created
create_next = URL(c="hrm", f="group",
args=["[id]", "group_membership"]),
)
settings.customise_pr_group_resource = customise_pr_group_resource
# -----------------------------------------------------------------------------
def pr_contact_postprocess(form):
"""
Import Organisation/Network RSS Feeds
"""
s3db = current.s3db
form_vars = form.vars
rss_url = form_vars.rsscontact_i_value_edit_0 or \
form_vars.rsscontact_i_value_edit_none
if not rss_url:
if form.record:
# Update form
old_rss = form.record.sub_rsscontact
import json
data = old_rss = json.loads(old_rss)["data"]
if data:
# RSS feed is being deleted, so we should disable it
old_rss = data[0]["value"]["value"]
table = s3db.msg_rss_channel
old = current.db(table.url == old_rss).select(table.channel_id,
table.enabled,
limitby = (0, 1)
).first()
if old and old.enabled:
s3db.msg_channel_disable("msg_rss_channel", old.channel_id)
return
else:
# Nothing to do :)
return
# Check if we already have a channel for this Contact
db = current.db
name = form_vars.name
table = s3db.msg_rss_channel
name_exists = db(table.name == name).select(table.id,
table.channel_id,
table.enabled,
table.url,
limitby = (0, 1)
).first()
no_import = current.request.post_vars.get("rss_no_import", None)
if name_exists:
if name_exists.url == rss_url:
# No change to either Contact Name or URL
if no_import:
if name_exists.enabled:
# Disable channel (& associated parsers)
s3db.msg_channel_disable("msg_rss_channel",
name_exists.channel_id)
return
elif name_exists.enabled:
# Nothing to do :)
return
else:
# Enable channel (& associated parsers)
s3db.msg_channel_enable("msg_rss_channel",
name_exists.channel_id)
return
# Check if we already have a channel for this URL
url_exists = db(table.url == rss_url).select(table.id,
table.channel_id,
table.enabled,
limitby = (0, 1)
).first()
if url_exists:
# We have 2 feeds: 1 for the Contact & 1 for the URL
# Disable the old Contact one and link the URL one to this Contact
# and ensure active or not as appropriate
# Name field is unique so rename old one
name_exists.update_record(name="%s (Old)" % name)
if name_exists.enabled:
# Disable channel (& associated parsers)
s3db.msg_channel_disable("msg_rss_channel",
name_exists.channel_id)
url_exists.update_record(name=name)
if no_import:
if url_exists.enabled:
# Disable channel (& associated parsers)
s3db.msg_channel_disable("msg_rss_channel",
url_exists.channel_id)
return
elif url_exists.enabled:
# Nothing to do :)
return
else:
# Enable channel (& associated parsers)
s3db.msg_channel_enable("msg_rss_channel",
url_exists.channel_id)
return
else:
# Update the URL
name_exists.update_record(url=rss_url)
if no_import:
if name_exists.enabled:
# Disable channel (& associated parsers)
s3db.msg_channel_disable("msg_rss_channel",
name_exists.channel_id)
return
elif name_exists.enabled:
# Nothing to do :)
return
else:
# Enable channel (& associated parsers)
s3db.msg_channel_enable("msg_rss_channel",
name_exists.channel_id)
return
else:
# Check if we already have a channel for this URL
url_exists = db(table.url == rss_url).select(table.id,
table.channel_id,
table.enabled,
limitby = (0, 1)
).first()
if url_exists:
# Either Contact has changed Name or this feed is associated with
# another Contact
# - update Feed name
url_exists.update_record(name=name)
if no_import:
if url_exists.enabled:
# Disable channel (& associated parsers)
s3db.msg_channel_disable("msg_rss_channel",
url_exists.channel_id)
return
elif url_exists.enabled:
# Nothing to do :)
return
else:
# Enable channel (& associated parsers)
s3db.msg_channel_enable("msg_rss_channel",
url_exists.channel_id)
return
elif no_import:
# Nothing to do :)
return
#else:
# # Create a new Feed
# pass
# Add RSS Channel
_id = table.insert(name=name, enabled=True, url=rss_url)
record = dict(id=_id)
s3db.update_super(table, record)
# Enable
channel_id = record["channel_id"]
s3db.msg_channel_enable("msg_rss_channel", channel_id)
# Setup Parser
table = s3db.msg_parser
_id = table.insert(channel_id=channel_id,
function_name="parse_rss",
enabled=True)
s3db.msg_parser_enable(_id)
# Check Now
async = current.s3task.async
async("msg_poll", args=["msg_rss_channel", channel_id])
async("msg_parse", args=[channel_id, "parse_rss"])
# -----------------------------------------------------------------------------
# Human Resource Management
# Uncomment to chage the label for 'Staff'
settings.hrm.staff_label = "Contacts"
# Uncomment to allow Staff & Volunteers to be registered without an email address
settings.hrm.email_required = False
# Uncomment to allow Staff & Volunteers to be registered without an Organisation
settings.hrm.org_required = False
# Uncomment to show the Organisation name in HR represents
settings.hrm.show_organisation = True
# Uncomment to disable Staff experience
settings.hrm.staff_experience = False
# Uncomment to disable the use of HR Certificates
settings.hrm.use_certificates = False
# Uncomment to disable the use of HR Credentials
settings.hrm.use_credentials = False
# Uncomment to enable the use of HR Education
settings.hrm.use_education = False
# Uncomment to disable the use of HR Skills
#settings.hrm.use_skills = False
# Uncomment to disable the use of HR Trainings
settings.hrm.use_trainings = False
# Uncomment to disable the use of HR Description
settings.hrm.use_description = False
# Change the label of "Teams" to "Groups"
settings.hrm.teams = "Groups"
# Custom label for Organisations in HR module
#settings.hrm.organisation_label = "National Society / Branch"
settings.hrm.organisation_label = "Organization"
# -----------------------------------------------------------------------------
def customise_hrm_human_resource_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.interactive or r.representation == "aadata":
if not r.component:
from s3 import S3TextFilter, S3OptionsFilter, S3LocationFilter
filter_widgets = [
S3TextFilter(["person_id$first_name",
"person_id$middle_name",
"person_id$last_name",
],
label = T("Name"),
),
S3OptionsFilter("organisation_id",
filter = True,
header = "",
hidden = True,
),
S3OptionsFilter("group_person.group_id",
label = T("Network"),
#filter = True,
#header = "",
hidden = True,
),
S3LocationFilter("location_id",
label = T("Location"),
levels = ("L1", "L2", "L3", "L4"),
hidden = True,
),
S3OptionsFilter("site_id",
hidden = True,
),
S3OptionsFilter("training.course_id",
label = T("Training"),
hidden = True,
),
S3OptionsFilter("group_membership.group_id",
label = T("Team"),
filter = True,
header = "",
hidden = True,
),
]
s3db = current.s3db
s3db.configure("hrm_human_resource",
filter_widgets = filter_widgets,
)
field = r.table.site_id
# Don't assume that user is from same org/site as Contacts they create
field.default = None
# Use a hierarchical dropdown instead of AC
field.widget = None
script = \
'''$.filterOptionsS3({
'trigger':'organisation_id',
'target':'site_id',
'lookupResource':'site',
'lookupURL':'/%s/org/sites_for_org/',
'optional':true
})''' % r.application
s3.jquery_ready.append(script)
return result
s3.prep = custom_prep
return attr
settings.customise_hrm_human_resource_controller = customise_hrm_human_resource_controller
# -----------------------------------------------------------------------------
def customise_hrm_human_resource_resource(r, tablename):
"""
Customise hrm_human_resource resource (in facility, human_resource, organisation & person controllers)
- runs after controller customisation
- but runs before prep
"""
s3db = current.s3db
from s3 import S3SQLCustomForm, S3SQLInlineComponent
crud_form = S3SQLCustomForm("person_id",
"organisation_id",
"site_id",
S3SQLInlineComponent(
"group_person",
label = T("Network"),
link = False,
fields = [("", "group_id")],
multiple = False,
),
"job_title_id",
"start_date",
)
list_fields = ["id",
"person_id",
"job_title_id",
"organisation_id",
(T("Network"), "group_person.group_id"),
(T("Groups"), "person_id$group_membership.group_id"),
"site_id",
#"site_contact",
(T("Email"), "email.value"),
(settings.get_ui_label_mobile_phone(), "phone.value"),
]
s3db.configure("hrm_human_resource",
crud_form = crud_form,
list_fields = list_fields,
)
settings.customise_hrm_human_resource_resource = customise_hrm_human_resource_resource
# -----------------------------------------------------------------------------
def customise_hrm_job_title_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.interactive or r.representation == "aadata":
table = current.s3db.hrm_job_title
table.organisation_id.readable = table.organisation_id.writable = False
table.type.readable = table.type.writable = False
return result
s3.prep = custom_prep
return attr
settings.customise_hrm_job_title_controller = customise_hrm_job_title_controller
# -----------------------------------------------------------------------------
# Projects
# Use codes for projects (called 'blurb' in NYC)
settings.project.codes = True
# Uncomment this to use settings suitable for detailed Task management
settings.project.mode_task = False
# Uncomment this to use Activities for projects
settings.project.activities = True
# Uncomment this to use Milestones in project/task.
settings.project.milestones = False
# Uncomment this to disable Sectors in projects
settings.project.sectors = False
# Multiple partner organizations
settings.project.multiple_organisations = True
def customise_project_project_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if not r.component and (r.interactive or r.representation == "aadata"):
from s3 import S3SQLCustomForm, S3SQLInlineComponent, S3SQLInlineComponentCheckbox
s3db = current.s3db
table = r.table
tablename = "project_project"
table.code.label = T("Project blurb (max. 100 characters)")
table.code.max_length = 100
table.comments.label = T("How people can help")
script = '''$('#project_project_code').attr('maxlength','100')'''
s3.jquery_ready.append(script)
crud_form = S3SQLCustomForm(
"organisation_id",
"name",
"code",
"description",
"status_id",
"start_date",
"end_date",
"calendar",
#"drr.hfa",
#"objectives",
"human_resource_id",
# Activities
S3SQLInlineComponent(
"location",
label = T("Location"),
fields = [("", "location_id")],
),
# Partner Orgs
S3SQLInlineComponent(
"organisation",
name = "partner",
label = T("Partner Organizations"),
fields = ["organisation_id",
"comments", # NB This is labelled 'Role' in DRRPP
],
filterby = dict(field = "role",
options = "2"
)
),
S3SQLInlineComponent(
"document",
name = "media",
label = T("URLs (media, fundraising, website, social media, etc."),
fields = ["document_id",
"name",
"url",
"comments",
],
filterby = dict(field = "name")
),
S3SQLInlineComponentCheckbox(
"activity_type",
label = T("Categories"),
field = "activity_type_id",
cols = 3,
# Filter Activity Type by Project
filter = {"linktable": "project_activity_type_project",
"lkey": "project_id",
"rkey": "activity_type_id",
},
),
#"budget",
#"currency",
"comments",
)
from s3 import S3TextFilter, S3OptionsFilter, S3LocationFilter, S3DateFilter
filter_widgets = [
S3TextFilter(["name",
"code",
"description",
"organisation.name",
"organisation.acronym",
],
label = T("Name"),
_class = "filter-search",
),
S3OptionsFilter("status_id",
label = T("Status"),
# Not translateable
#represent = "%(name)s",
cols = 3,
),
#S3OptionsFilter("theme_project.theme_id",
# label = T("Theme"),
# #hidden = True,
# ),
S3LocationFilter("location.location_id",
label = T("Location"),
levels = ("L1", "L2", "L3", "L4"),
#hidden = True,
),
# @ToDo: Widget to handle Start & End in 1!
S3DateFilter("start_date",
label = T("Start Date"),
hide_time = True,
#hidden = True,
),
S3DateFilter("end_date",
label = T("End Date"),
hide_time = True,
#hidden = True,
),
]
list_fields = ["id",
"name",
"code",
"organisation_id",
"start_date",
"end_date",
(T("Locations"), "location.location_id"),
]
s3db.configure(tablename,
crud_form = crud_form,
filter_widgets = filter_widgets,
list_fields = list_fields,
)
return result
s3.prep = custom_prep
return attr
settings.customise_project_project_controller = customise_project_project_controller
# -----------------------------------------------------------------------------
# Requests Management
settings.req.req_type = ["People", "Stock"]#, "Summary"]
settings.req.prompt_match = False
#settings.req.use_commit = False
settings.req.requester_optional = True
settings.req.date_writable = False
settings.req.item_quantities_writable = True
settings.req.skill_quantities_writable = True
settings.req.items_ask_purpose = False
#settings.req.use_req_number = False
# Label for Requester
settings.req.requester_label = "Site Contact"
# Filter Requester as being from the Site
settings.req.requester_from_site = True
# Label for Inventory Requests
settings.req.type_inv_label = "Supplies"
# Uncomment to enable Summary 'Site Needs' tab for Offices/Facilities
settings.req.summary = True
# -----------------------------------------------------------------------------
def req_req_postprocess(form):
"""
Runs after crud_form completes
- creates a cms_post in the newswire
- @ToDo: Send out Tweets
"""
req_id = form.vars.id
db = current.db
s3db = current.s3db
rtable = s3db.req_req
# Read the full record
row = db(rtable.id == req_id).select(rtable.type,
rtable.site_id,
rtable.requester_id,
rtable.priority,
rtable.date_required,
rtable.purpose,
rtable.comments,
limitby=(0, 1)
).first()
# Build Title & Body from the Request details
priority = rtable.priority.represent(row.priority)
date_required = row.date_required
if date_required:
date = rtable.date_required.represent(date_required)
title = "%(priority)s by %(date)s" % dict(priority=priority,
date=date)
else:
title = priority
body = row.comments
if row.type == 1:
# Items
ritable = s3db.req_req_item
items = db(ritable.req_id == req_id).select(ritable.item_id,
ritable.item_pack_id,
ritable.quantity)
item_represent = s3db.supply_item_represent
pack_represent = s3db.supply_item_pack_represent
for item in items:
item = "%s %s %s" % (item.quantity,
pack_represent(item.item_pack_id),
item_represent(item.item_id))
body = "%s\n%s" % (item, body)
else:
# Skills
body = "%s\n%s" % (row.purpose, body)
rstable = s3db.req_req_skill
skills = db(rstable.req_id == req_id).select(rstable.skill_id,
rstable.quantity)
skill_represent = s3db.hrm_multi_skill_represent
for skill in skills:
item = "%s %s" % (skill.quantity, skill_represent(skill.skill_id))
body = "%s\n%s" % (item, body)
# Lookup series_id
stable = s3db.cms_series
try:
series_id = db(stable.name == "Request").select(stable.id,
cache=s3db.cache,
limitby=(0, 1)
).first().id
except:
# Prepop hasn't been run
series_id = None
# Location is that of the site
otable = s3db.org_site
location_id = db(otable.site_id == row.site_id).select(otable.location_id,
limitby=(0, 1)
).first().location_id
# Create Post
ptable = s3db.cms_post
_id = ptable.insert(series_id=series_id,
title=title,
body=body,
location_id=location_id,
person_id=row.requester_id,
)
record = dict(id=_id)
s3db.update_super(ptable, record)
# Add source link
url = "%s%s" % (settings.get_base_public_url(),
URL(c="req", f="req", args=req_id))
s3db.doc_document.insert(doc_id=record["doc_id"],
url=url,
)
# -----------------------------------------------------------------------------
def customise_req_req_resource(r, tablename):
from s3layouts import S3AddResourceLink
current.s3db.req_req.site_id.comment = \
S3AddResourceLink(c="org", f="facility",
vars = dict(child="site_id"),
title=T("Create Facility"),
tooltip=current.messages.AUTOCOMPLETE_HELP)
current.response.s3.req_req_postprocess = req_req_postprocess
if not r.component and r.method in ("create", "update"):
script = \
'''$('#req_req_site_id').change(function(){
var url=$('#person_add').attr('href')
url=url.split('?')
var q=S3.queryString.parse(url[1])
q['(site)']=$(this).val()
url=url[0]+'?'+S3.queryString.stringify(q)
$('#person_add').attr('href',url)})'''
current.response.s3.jquery_ready.append(script)
settings.customise_req_req_resource = customise_req_req_resource
# -----------------------------------------------------------------------------
# Comment/uncomment modules here to disable/enable them
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = T("Home"),
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Admin"),
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
module_type = None # No Menu
)),
("errors", Storage(
name_nice = T("Ticket Viewer"),
#description = "Needed for Breadcrumbs",
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = T("Synchronization"),
#description = "Synchronization",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
# Uncomment to enable internal support requests
#("support", Storage(
# name_nice = T("Support"),
# #description = "Support Requests",
# restricted = True,
# module_type = None # This item is handled separately for the menu
# )),
("gis", Storage(
name_nice = T("Map"),
#description = "Situation Awareness & Geospatial Analysis",
restricted = True,
module_type = 9, # 8th item in the menu
)),
("pr", Storage(
name_nice = T("Person Registry"),
#description = "Central point to record details on People",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = 10
)),
("org", Storage(
name_nice = T("Locations"),
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
restricted = True,
module_type = 4
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = T("Contacts"),
#description = "Human Resources Management",
restricted = True,
module_type = 3,
)),
#("vol", Storage(
# name_nice = T("Volunteers"),
# #description = "Human Resources Management",
# restricted = True,
# module_type = 2,
# )),
("cms", Storage(
name_nice = T("Content Management"),
#description = "Content Management System",
restricted = True,
module_type = 10,
)),
("doc", Storage(
name_nice = T("Documents"),
#description = "A library of digital resources, such as photos, documents and reports",
restricted = True,
module_type = None,
)),
("msg", Storage(
name_nice = T("Messaging"),
#description = "Sends & Receives Alerts via Email & SMS",
restricted = True,
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = None,
)),
("supply", Storage(
name_nice = T("Supply Chain Management"),
#description = "Used within Inventory Management, Request Management and Asset Management",
restricted = True,
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Inventory"),
#description = "Receiving and Sending Items",
restricted = True,
module_type = 10
)),
#("proc", Storage(
# name_nice = T("Procurement"),
# #description = "Ordering & Purchasing of Goods & Services",
# restricted = True,
# module_type = 10
# )),
("asset", Storage(
name_nice = T("Assets"),
#description = "Recording and Assigning Assets",
restricted = True,
module_type = 10,
)),
# Vehicle depends on Assets
#("vehicle", Storage(
# name_nice = T("Vehicles"),
# #description = "Manage Vehicles",
# restricted = True,
# module_type = 10,
# )),
("req", Storage(
name_nice = T("Requests"),
#description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.",
restricted = True,
module_type = 1,
)),
("project", Storage(
name_nice = T("Projects"),
#description = "Tracking of Projects, Activities and Tasks",
restricted = True,
module_type = 10
)),
("assess", Storage(
name_nice = T("Assessments"),
#description = "Rapid Assessments & Flexible Impact Assessments",
restricted = True,
module_type = 5,
)),
("event", Storage(
name_nice = T("Events"),
#description = "Activate Events (e.g. from Scenario templates) for allocation of appropriate Resources (Human, Assets & Facilities).",
restricted = True,
module_type = 10,
)),
("survey", Storage(
name_nice = T("Surveys"),
#description = "Create, enter, and manage surveys.",
restricted = True,
module_type = 5,
)),
#("cr", Storage(
# name_nice = T("Shelters"),
# #description = "Tracks the location, capacity and breakdown of victims in Shelters",
# restricted = True,
# module_type = 10
# )),
#("dvr", Storage(
# name_nice = T("Disaster Victim Registry"),
# #description = "Allow affected individuals & households to register to receive compensation and distributions",
# restricted = False,
# module_type = 10,
# )),
#("member", Storage(
# name_nice = T("Members"),
# #description = "Membership Management System",
# restricted = True,
# module_type = 10,
# )),
# @ToDo: Rewrite in a modern style
#("budget", Storage(
# name_nice = T("Budgeting Module"),
# #description = "Allows a Budget to be drawn up",
# restricted = True,
# module_type = 10
# )),
# @ToDo: Port these Assessments to the Survey module
#("building", Storage(
# name_nice = T("Building Assessments"),
# #description = "Building Safety Assessments",
# restricted = True,
# module_type = 10,
# )),
])
| [] |
nitinkaveriappa/downward | experiments/issue561/v2.py | 5c9a1b5111d667bb96f94da61ca2a45b1b70bb83 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from main import main
main("issue561-v1", "issue561-v2")
| [((71, 105), 'main.main', 'main', (['"""issue561-v1"""', '"""issue561-v2"""'], {}), "('issue561-v1', 'issue561-v2')\n", (75, 105), False, 'from main import main\n')] |
tgroth97/q2-qemistree | q2_qemistree/tests/test_fingerprint.py | 289c447a6c3a29478bb84212281ef0d7ffc1387a | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from unittest import TestCase, main
import qiime2
import os
from q2_qemistree import MGFDirFmt, SiriusDirFmt, ZodiacDirFmt, OutputDirs
from q2_qemistree import (compute_fragmentation_trees,
rerank_molecular_formulas,
predict_fingerprints)
from q2_qemistree._fingerprint import artifactory
class FingerprintTests(TestCase):
def setUp(self):
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
self.badsirpath = os.path.join(THIS_DIR, 'data/foo/bin')
self.goodsirpath = os.path.join(THIS_DIR, 'data/'
'sirius-linux64-headless-4.0.1/bin')
# MassSpectrometryFeatures
self.ions = qiime2.Artifact.load(os.path.join(THIS_DIR,
'data/sirius.mgf.qza'))
# SiriusFolder
self.sirout = qiime2.Artifact.load(os.path.join(THIS_DIR,
'data/sirFolder.qza'))
# ZodiacFolder
self.zodout = qiime2.Artifact.load(os.path.join(THIS_DIR,
'data/zodFolder.qza'))
def test_artifactory(self):
# everything is working fine
obs = os.environ.get('_JAVA_OPTIONS', '')
res = artifactory(self.goodsirpath, ['--help'],
constructor=OutputDirs, java_flags='-Xms2G')
self.assertEqual(obs, os.environ.get('_JAVA_OPTIONS'))
self.assertTrue(isinstance(res, OutputDirs))
# exceptions are raised
with self.assertRaises(OSError):
res = artifactory(self.badsirpath, ['--help'],
constructor=OutputDirs)
def test_fragmentation_trees(self):
ions = self.ions.view(MGFDirFmt)
result = compute_fragmentation_trees(sirius_path=self.goodsirpath,
features=ions,
ppm_max=15, profile='orbitrap')
contents = os.listdir(result.get_path())
self.assertTrue(('version.txt' in contents))
def test_fragmentation_trees_negative_ionization(self):
ions = self.ions.view(MGFDirFmt)
result = compute_fragmentation_trees(sirius_path=self.goodsirpath,
features=ions,
ppm_max=15, profile='orbitrap',
ionization_mode='negative')
contents = os.listdir(result.get_path())
self.assertTrue(('version.txt' in contents))
def test_fragmentation_trees_exception(self):
ions = self.ions.view(MGFDirFmt)
with self.assertRaises(ValueError):
compute_fragmentation_trees(sirius_path=self.goodsirpath,
features=ions,
ppm_max=15,
profile='orbitrap',
ionization_mode='n3gativ3')
def test_reranking(self):
ions = self.ions.view(MGFDirFmt)
sirout = self.sirout.view(SiriusDirFmt)
result = rerank_molecular_formulas(sirius_path=self.goodsirpath,
fragmentation_trees=sirout,
features=ions)
contents = os.listdir(result.get_path())
self.assertTrue(('zodiac_summary.csv' in contents))
def test_fingerid(self):
zodout = self.zodout.view(ZodiacDirFmt)
result = predict_fingerprints(sirius_path=self.goodsirpath,
molecular_formulas=zodout, ppm_max=15)
contents = os.listdir(result.get_path())
self.assertTrue(('summary_csi_fingerid.csv' in contents))
if __name__ == '__main__':
main()
| [((4200, 4206), 'unittest.main', 'main', ([], {}), '()\n', (4204, 4206), False, 'from unittest import TestCase, main\n'), ((836, 874), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/foo/bin"""'], {}), "(THIS_DIR, 'data/foo/bin')\n", (848, 874), False, 'import os\n'), ((902, 966), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirius-linux64-headless-4.0.1/bin"""'], {}), "(THIS_DIR, 'data/sirius-linux64-headless-4.0.1/bin')\n", (914, 966), False, 'import os\n'), ((1607, 1642), 'os.environ.get', 'os.environ.get', (['"""_JAVA_OPTIONS"""', '""""""'], {}), "('_JAVA_OPTIONS', '')\n", (1621, 1642), False, 'import os\n'), ((1657, 1747), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.goodsirpath', "['--help']"], {'constructor': 'OutputDirs', 'java_flags': '"""-Xms2G"""'}), "(self.goodsirpath, ['--help'], constructor=OutputDirs,\n java_flags='-Xms2G')\n", (1668, 1747), False, 'from q2_qemistree._fingerprint import artifactory\n'), ((2171, 2279), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap')\n", (2198, 2279), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((2587, 2723), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""', 'ionization_mode': '"""negative"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap', ionization_mode='negative')\n", (2614, 2723), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((3535, 3638), 'q2_qemistree.rerank_molecular_formulas', 'rerank_molecular_formulas', ([], {'sirius_path': 'self.goodsirpath', 'fragmentation_trees': 'sirout', 'features': 'ions'}), '(sirius_path=self.goodsirpath, fragmentation_trees\n =sirout, features=ions)\n', (3560, 3638), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((3924, 4018), 'q2_qemistree.predict_fingerprints', 'predict_fingerprints', ([], {'sirius_path': 'self.goodsirpath', 'molecular_formulas': 'zodout', 'ppm_max': '(15)'}), '(sirius_path=self.goodsirpath, molecular_formulas=\n zodout, ppm_max=15)\n', (3944, 4018), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((783, 808), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (798, 808), False, 'import os\n'), ((1086, 1131), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirius.mgf.qza"""'], {}), "(THIS_DIR, 'data/sirius.mgf.qza')\n", (1098, 1131), False, 'import os\n'), ((1253, 1297), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirFolder.qza"""'], {}), "(THIS_DIR, 'data/sirFolder.qza')\n", (1265, 1297), False, 'import os\n'), ((1421, 1465), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/zodFolder.qza"""'], {}), "(THIS_DIR, 'data/zodFolder.qza')\n", (1433, 1465), False, 'import os\n'), ((1800, 1831), 'os.environ.get', 'os.environ.get', (['"""_JAVA_OPTIONS"""'], {}), "('_JAVA_OPTIONS')\n", (1814, 1831), False, 'import os\n'), ((1977, 2041), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.badsirpath', "['--help']"], {'constructor': 'OutputDirs'}), "(self.badsirpath, ['--help'], constructor=OutputDirs)\n", (1988, 2041), False, 'from q2_qemistree._fingerprint import artifactory\n'), ((3105, 3241), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""', 'ionization_mode': '"""n3gativ3"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap', ionization_mode='n3gativ3')\n", (3132, 3241), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n')] |
GarimaVishvakarma/intel-chroma | chroma-manager/tests/utils/__init__.py | fdf68ed00b13643c62eb7480754d3216d9295e0b | import time
import datetime
import contextlib
@contextlib.contextmanager
def patch(obj, **attrs):
"Monkey patch an object's attributes, restoring them after the block."
stored = {}
for name in attrs:
stored[name] = getattr(obj, name)
setattr(obj, name, attrs[name])
try:
yield
finally:
for name in stored:
setattr(obj, name, stored[name])
@contextlib.contextmanager
def timed(msg='', threshold=0):
"Print elapsed time of a block, if over optional threshold."
start = time.time()
try:
yield
finally:
elapsed = time.time() - start
if elapsed >= threshold:
print datetime.timedelta(seconds=elapsed), msg
| [] |
rackerlabs/Tempo | tempo/worker.py | 60c2adaf5b592ae171987b999e0b9cc46b80c54e | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Rackspace
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import kombu
from tempo import actions
from tempo import config
from tempo import db
from tempo import notifier
from tempo import queue as tempo_queue
from tempo.openstack.common import cfg
from tempo.openstack.common import exception as common_exception
CFG = config.CFG
logger = logging.getLogger('tempo.worker')
worker_opts = [
cfg.BoolOpt('daemonized',
default=False,
help='Run worker as a daemon'),
cfg.StrOpt('publisher_id',
default='host',
help='Where the notification came from')
]
worker_group = cfg.OptGroup(name='worker', title='Worker options')
CFG.register_group(worker_group)
CFG.register_opts(worker_opts, group=worker_group)
def _perform_task(task):
def _notify(event_type, exception=None):
payload = {'task_uuid': task_uuid}
if exception is not None:
payload['exception'] = exception
publisher_id = CFG.worker.publisher_id
priority = notifier.DEBUG
notifier.notify(publisher_id, event_type, priority, payload)
action = task.action
task_uuid = task.uuid
try:
func = getattr(actions, action)
except AttributeError:
logger.error("unrecognized action '%(action)s' for task task"
" '%(task_uuid)s'" % locals())
return
logger.debug("task '%(task_uuid)s' started: '%(action)s'" % locals())
_notify('Started Task')
try:
func(task)
except Exception as e:
logger.error("task '%(task_uuid)s' errored: %(e)s" % locals())
_notify('Errored Task', exception=e)
else:
logger.debug("task '%(task_uuid)s' finished: returned successfully" %
locals())
_notify('Finished Task')
def _process_message(body, message):
message.ack()
task_uuid = body['task_uuid']
try:
task = db.task_get(task_uuid)
except common_exception.NotFound:
logger.error("Task '%(task_uuid)s' not found" % locals())
return
_perform_task(task)
def _consume_messages(exchange, queue, key):
kombu_xchg = kombu.Exchange(exchange, 'direct', durable=True)
kombu_queue = kombu.Queue(queue, exchange=kombu_xchg, key=key)
connection = tempo_queue.get_connection()
consumer = kombu.Consumer(connection.channel(), kombu_queue)
consumer.register_callback(_process_message)
consumer.consume()
while True:
connection.drain_events()
def consume_messages(exchange, queue, key):
if CFG.worker.daemonized:
# TODO(mdietz): there's a cleaner way to do this, but this works well
# as a way of backgrounding the server for now
import daemon
with daemon.DaemonContext():
_consume_messages(exchange, queue, key)
else:
_consume_messages(exchange, queue, key)
| [((970, 1003), 'logging.getLogger', 'logging.getLogger', (['"""tempo.worker"""'], {}), "('tempo.worker')\n", (987, 1003), False, 'import logging\n'), ((1266, 1317), 'tempo.openstack.common.cfg.OptGroup', 'cfg.OptGroup', ([], {'name': '"""worker"""', 'title': '"""Worker options"""'}), "(name='worker', title='Worker options')\n", (1278, 1317), False, 'from tempo.openstack.common import cfg\n'), ((1025, 1096), 'tempo.openstack.common.cfg.BoolOpt', 'cfg.BoolOpt', (['"""daemonized"""'], {'default': '(False)', 'help': '"""Run worker as a daemon"""'}), "('daemonized', default=False, help='Run worker as a daemon')\n", (1036, 1096), False, 'from tempo.openstack.common import cfg\n'), ((1134, 1222), 'tempo.openstack.common.cfg.StrOpt', 'cfg.StrOpt', (['"""publisher_id"""'], {'default': '"""host"""', 'help': '"""Where the notification came from"""'}), "('publisher_id', default='host', help=\n 'Where the notification came from')\n", (1144, 1222), False, 'from tempo.openstack.common import cfg\n'), ((2788, 2836), 'kombu.Exchange', 'kombu.Exchange', (['exchange', '"""direct"""'], {'durable': '(True)'}), "(exchange, 'direct', durable=True)\n", (2802, 2836), False, 'import kombu\n'), ((2855, 2903), 'kombu.Queue', 'kombu.Queue', (['queue'], {'exchange': 'kombu_xchg', 'key': 'key'}), '(queue, exchange=kombu_xchg, key=key)\n', (2866, 2903), False, 'import kombu\n'), ((2922, 2950), 'tempo.queue.get_connection', 'tempo_queue.get_connection', ([], {}), '()\n', (2948, 2950), True, 'from tempo import queue as tempo_queue\n'), ((1686, 1746), 'tempo.notifier.notify', 'notifier.notify', (['publisher_id', 'event_type', 'priority', 'payload'], {}), '(publisher_id, event_type, priority, payload)\n', (1701, 1746), False, 'from tempo import notifier\n'), ((2557, 2579), 'tempo.db.task_get', 'db.task_get', (['task_uuid'], {}), '(task_uuid)\n', (2568, 2579), False, 'from tempo import db\n'), ((3384, 3406), 'daemon.DaemonContext', 'daemon.DaemonContext', ([], {}), '()\n', (3404, 3406), False, 'import daemon\n')] |
AndyPJiang/basenji | bin/basenji_motifs.py | 64e43570c8bece156b4ab926608014f489b7965e | #!/usr/bin/env python
# Copyright 2017 Calico LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import print_function
from optparse import OptionParser
import copy, os, pdb, random, shutil, subprocess, time
import h5py
import matplotlib
matplotlib.use('PDF')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.stats import spearmanr
import seaborn as sns
from sklearn import preprocessing
import tensorflow as tf
import basenji
'''
basenji_motifs.py
Collect statistics and make plots to explore the first convolution layer
of the given model using the given sequences.
'''
weblogo_opts = '-X NO -Y NO --errorbars NO --fineprint ""'
weblogo_opts += ' -C "#CB2026" A A'
weblogo_opts += ' -C "#34459C" C C'
weblogo_opts += ' -C "#FBB116" G G'
weblogo_opts += ' -C "#0C8040" T T'
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <params_file> <model_file> <data_file>'
parser = OptionParser(usage)
parser.add_option(
'-a',
dest='act_t',
default=0.5,
type='float',
help=
'Activation threshold (as proportion of max) to consider for PWM [Default: %default]'
)
parser.add_option(
'-d',
dest='model_hdf5_file',
default=None,
help='Pre-computed model output as HDF5.')
parser.add_option('-o', dest='out_dir', default='.')
parser.add_option(
'-m',
dest='meme_db',
default='%s/data/motifs/Homo_sapiens.meme' % os.environ['BASENJIDIR'],
help='MEME database used to annotate motifs')
parser.add_option(
'-p',
dest='plot_heats',
default=False,
action='store_true',
help=
'Plot heat maps describing filter activations in the test sequences [Default: %default]'
)
parser.add_option(
'-s',
dest='sample',
default=None,
type='int',
help='Sample sequences from the test set [Default:%default]')
parser.add_option(
'-t',
dest='trim_filters',
default=False,
action='store_true',
help='Trim uninformative positions off the filter ends [Default: %default]'
)
(options, args) = parser.parse_args()
if len(args) != 3:
parser.error(
'Must provide Basenji parameters and model files and test data in HDF5'
' format.'
)
else:
params_file = args[0]
model_file = args[1]
data_file = args[2]
if not os.path.isdir(options.out_dir):
os.mkdir(options.out_dir)
#################################################################
# load data
data_open = h5py.File(data_file)
test_seqs1 = data_open['test_in']
test_targets = data_open['test_out']
try:
target_names = list(data_open['target_labels'])
except KeyError:
target_names = ['t%d' % ti for ti in range(test_targets.shape[1])]
if options.sample is not None:
# choose sampled indexes
sample_i = sorted(random.sample(range(test_seqs1.shape[0]), options.sample))
# filter
test_seqs1 = test_seqs1[sample_i]
test_targets = test_targets[sample_i]
# convert to letters
test_seqs = basenji.dna_io.hot1_dna(test_seqs1)
#################################################################
# model parameters and placeholders
job = basenji.dna_io.read_job_params(params_file)
job['seq_length'] = test_seqs1.shape[1]
job['seq_depth'] = test_seqs1.shape[2]
job['num_targets'] = test_targets.shape[2]
job['target_pool'] = int(np.array(data_open.get('pool_width', 1)))
t0 = time.time()
dr = basenji.seqnn.SeqNN()
dr.build(job)
print('Model building time %ds' % (time.time() - t0))
# adjust for fourier
job['fourier'] = 'train_out_imag' in data_open
if job['fourier']:
test_targets_imag = data_open['test_out_imag']
if options.valid:
test_targets_imag = data_open['valid_out_imag']
#################################################################
# predict
# initialize batcher
if job['fourier']:
batcher_test = basenji.batcher.BatcherF(
test_seqs1,
test_targets,
test_targets_imag,
batch_size=dr.batch_size,
pool_width=job['target_pool'])
else:
batcher_test = basenji.batcher.Batcher(
test_seqs1,
test_targets,
batch_size=dr.batch_size,
pool_width=job['target_pool'])
# initialize saver
saver = tf.train.Saver()
with tf.Session() as sess:
# load variables into session
saver.restore(sess, model_file)
# get weights
filter_weights = sess.run(dr.filter_weights[0])
filter_weights = np.transpose(np.squeeze(filter_weights), [2, 1, 0])
print(filter_weights.shape)
# test
t0 = time.time()
layer_filter_outs, _ = dr.hidden(sess, batcher_test, layers=[0])
filter_outs = layer_filter_outs[0]
print(filter_outs.shape)
# store useful variables
num_filters = filter_weights.shape[0]
filter_size = filter_weights.shape[2]
#################################################################
# individual filter plots
#################################################################
# also save information contents
filters_ic = []
meme_out = meme_intro('%s/filters_meme.txt' % options.out_dir, test_seqs)
for f in range(num_filters):
print('Filter %d' % f)
# plot filter parameters as a heatmap
plot_filter_heat(filter_weights[f, :, :],
'%s/filter%d_heat.pdf' % (options.out_dir, f))
# write possum motif file
filter_possum(filter_weights[f, :, :], 'filter%d' % f,
'%s/filter%d_possum.txt' % (options.out_dir,
f), options.trim_filters)
# plot weblogo of high scoring outputs
plot_filter_logo(
filter_outs[:, :, f],
filter_size,
test_seqs,
'%s/filter%d_logo' % (options.out_dir, f),
maxpct_t=options.act_t)
# make a PWM for the filter
filter_pwm, nsites = make_filter_pwm('%s/filter%d_logo.fa' %
(options.out_dir, f))
if nsites < 10:
# no information
filters_ic.append(0)
else:
# compute and save information content
filters_ic.append(info_content(filter_pwm))
# add to the meme motif file
meme_add(meme_out, f, filter_pwm, nsites, options.trim_filters)
meme_out.close()
#################################################################
# annotate filters
#################################################################
# run tomtom
subprocess.call(
'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %
(options.out_dir, options.out_dir, options.meme_db),
shell=True)
# read in annotations
filter_names = name_filters(
num_filters, '%s/tomtom/tomtom.txt' % options.out_dir, options.meme_db)
#################################################################
# print a table of information
#################################################################
table_out = open('%s/table.txt' % options.out_dir, 'w')
# print header for later panda reading
header_cols = ('', 'consensus', 'annotation', 'ic', 'mean', 'std')
print('%3s %19s %10s %5s %6s %6s' % header_cols, file=table_out)
for f in range(num_filters):
# collapse to a consensus motif
consensus = filter_motif(filter_weights[f, :, :])
# grab annotation
annotation = '.'
name_pieces = filter_names[f].split('_')
if len(name_pieces) > 1:
annotation = name_pieces[1]
# plot density of filter output scores
fmean, fstd = plot_score_density(
np.ravel(filter_outs[:, :, f]),
'%s/filter%d_dens.pdf' % (options.out_dir, f))
row_cols = (f, consensus, annotation, filters_ic[f], fmean, fstd)
print('%-3d %19s %10s %5.2f %6.4f %6.4f' % row_cols, file=table_out)
table_out.close()
#################################################################
# global filter plots
#################################################################
if options.plot_heats:
# plot filter-sequence heatmap
plot_filter_seq_heat(filter_outs, '%s/filter_seqs.pdf' % options.out_dir)
# plot filter-segment heatmap
plot_filter_seg_heat(filter_outs, '%s/filter_segs.pdf' % options.out_dir)
plot_filter_seg_heat(
filter_outs, '%s/filter_segs_raw.pdf' % options.out_dir, whiten=False)
# plot filter-target correlation heatmap
plot_target_corr(filter_outs, seq_targets, filter_names, target_names,
'%s/filter_target_cors_mean.pdf' % options.out_dir, 'mean')
plot_target_corr(filter_outs, seq_targets, filter_names, target_names,
'%s/filter_target_cors_max.pdf' % options.out_dir, 'max')
def get_motif_proteins(meme_db_file):
""" Hash motif_id's to protein names using the MEME DB file """
motif_protein = {}
for line in open(meme_db_file):
a = line.split()
if len(a) > 0 and a[0] == 'MOTIF':
if a[2][0] == '(':
motif_protein[a[1]] = a[2][1:a[2].find(')')]
else:
motif_protein[a[1]] = a[2]
return motif_protein
def info_content(pwm, transpose=False, bg_gc=0.415):
""" Compute PWM information content.
In the original analysis, I used a bg_gc=0.5. For any
future analysis, I ought to switch to the true hg19
value of 0.415.
"""
pseudoc = 1e-9
if transpose:
pwm = np.transpose(pwm)
bg_pwm = [1 - bg_gc, bg_gc, bg_gc, 1 - bg_gc]
ic = 0
for i in range(pwm.shape[0]):
for j in range(4):
# ic += 0.5 + pwm[i][j]*np.log2(pseudoc+pwm[i][j])
ic += -bg_pwm[j] * np.log2(
bg_pwm[j]) + pwm[i][j] * np.log2(pseudoc + pwm[i][j])
return ic
def make_filter_pwm(filter_fasta):
""" Make a PWM for this filter from its top hits """
nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
pwm_counts = []
nsites = 4 # pseudocounts
for line in open(filter_fasta):
if line[0] != '>':
seq = line.rstrip()
nsites += 1
if len(pwm_counts) == 0:
# initialize with the length
for i in range(len(seq)):
pwm_counts.append(np.array([1.0] * 4))
# count
for i in range(len(seq)):
try:
pwm_counts[i][nts[seq[i]]] += 1
except KeyError:
pwm_counts[i] += np.array([0.25] * 4)
# normalize
pwm_freqs = []
for i in range(len(pwm_counts)):
pwm_freqs.append([pwm_counts[i][j] / float(nsites) for j in range(4)])
return np.array(pwm_freqs), nsites - 4
def meme_add(meme_out, f, filter_pwm, nsites, trim_filters=False):
""" Print a filter to the growing MEME file
Attrs:
meme_out : open file
f (int) : filter index #
filter_pwm (array) : filter PWM array
nsites (int) : number of filter sites
"""
if not trim_filters:
ic_start = 0
ic_end = filter_pwm.shape[0] - 1
else:
ic_t = 0.2
# trim PWM of uninformative prefix
ic_start = 0
while ic_start < filter_pwm.shape[0] and info_content(
filter_pwm[ic_start:ic_start + 1]) < ic_t:
ic_start += 1
# trim PWM of uninformative suffix
ic_end = filter_pwm.shape[0] - 1
while ic_end >= 0 and info_content(filter_pwm[ic_end:ic_end + 1]) < ic_t:
ic_end -= 1
if ic_start < ic_end:
print('MOTIF filter%d' % f, file=meme_out)
print(
'letter-probability matrix: alength= 4 w= %d nsites= %d' %
(ic_end - ic_start + 1, nsites),
file=meme_out)
for i in range(ic_start, ic_end + 1):
print('%.4f %.4f %.4f %.4f' % tuple(filter_pwm[i]), file=meme_out)
print('', file=meme_out)
def meme_intro(meme_file, seqs):
""" Open MEME motif format file and print intro
Attrs:
meme_file (str) : filename
seqs [str] : list of strings for obtaining background freqs
Returns:
mem_out : open MEME file
"""
nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
# count
nt_counts = [1] * 4
for i in range(len(seqs)):
for nt in seqs[i]:
try:
nt_counts[nts[nt]] += 1
except KeyError:
pass
# normalize
nt_sum = float(sum(nt_counts))
nt_freqs = [nt_counts[i] / nt_sum for i in range(4)]
# open file for writing
meme_out = open(meme_file, 'w')
# print intro material
print('MEME version 4', file=meme_out)
print('', file=meme_out)
print('ALPHABET= ACGT', file=meme_out)
print('', file=meme_out)
print('Background letter frequencies:', file=meme_out)
print('A %.4f C %.4f G %.4f T %.4f' % tuple(nt_freqs), file=meme_out)
print('', file=meme_out)
return meme_out
def name_filters(num_filters, tomtom_file, meme_db_file):
""" Name the filters using Tomtom matches.
Attrs:
num_filters (int) : total number of filters
tomtom_file (str) : filename of Tomtom output table.
meme_db_file (str) : filename of MEME db
Returns:
filter_names [str] :
"""
# name by number
filter_names = ['f%d' % fi for fi in range(num_filters)]
# name by protein
if tomtom_file is not None and meme_db_file is not None:
motif_protein = get_motif_proteins(meme_db_file)
# hash motifs and q-value's by filter
filter_motifs = {}
tt_in = open(tomtom_file)
tt_in.readline()
for line in tt_in:
a = line.split()
fi = int(a[0][6:])
motif_id = a[1]
qval = float(a[5])
filter_motifs.setdefault(fi, []).append((qval, motif_id))
tt_in.close()
# assign filter's best match
for fi in filter_motifs:
top_motif = sorted(filter_motifs[fi])[0][1]
filter_names[fi] += '_%s' % motif_protein[top_motif]
return np.array(filter_names)
################################################################################
# plot_target_corr
#
# Plot a clustered heatmap of correlations between filter activations and
# targets.
#
# Input
# filter_outs:
# filter_names:
# target_names:
# out_pdf:
################################################################################
def plot_target_corr(filter_outs, seq_targets, filter_names, target_names, out_pdf, seq_op='mean'):
num_seqs = filter_outs.shape[0]
num_targets = len(target_names)
if seq_op == 'mean':
filter_outs_seq = filter_outs.mean(axis=2)
else:
filter_outs_seq = filter_outs.max(axis=2)
# std is sequence by filter.
filter_seqs_std = filter_outs_seq.std(axis=0)
filter_outs_seq = filter_outs_seq[:, filter_seqs_std > 0]
filter_names_live = filter_names[filter_seqs_std > 0]
filter_target_cors = np.zeros((len(filter_names_live), num_targets))
for fi in range(len(filter_names_live)):
for ti in range(num_targets):
cor, p = spearmanr(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti])
filter_target_cors[fi, ti] = cor
cor_df = pd.DataFrame(
filter_target_cors, index=filter_names_live, columns=target_names)
sns.set(font_scale=0.3)
plt.figure()
sns.clustermap(cor_df, cmap='BrBG', center=0, figsize=(8, 10))
plt.savefig(out_pdf)
plt.close()
################################################################################
# plot_filter_seq_heat
#
# Plot a clustered heatmap of filter activations in
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_seq_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):
# compute filter output means per sequence
filter_seqs = filter_outs.mean(axis=2)
# whiten
if whiten:
filter_seqs = preprocessing.scale(filter_seqs)
# transpose
filter_seqs = np.transpose(filter_seqs)
if drop_dead:
filter_stds = filter_seqs.std(axis=1)
filter_seqs = filter_seqs[filter_stds > 0]
# downsample sequences
seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)
hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)
hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)
sns.set(font_scale=0.3)
plt.figure()
sns.clustermap(
filter_seqs[:, seqs_i],
row_cluster=True,
col_cluster=True,
linewidths=0,
xticklabels=False,
vmin=hmin,
vmax=hmax)
plt.savefig(out_pdf)
#out_png = out_pdf[:-2] + 'ng'
#plt.savefig(out_png, dpi=300)
plt.close()
################################################################################
# plot_filter_seq_heat
#
# Plot a clustered heatmap of filter activations in sequence segments.
#
# Mean doesn't work well for the smaller segments for some reason, but taking
# the max looks OK. Still, similar motifs don't cluster quite as well as you
# might expect.
#
# Input
# filter_outs
################################################################################
def plot_filter_seg_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):
b = filter_outs.shape[0]
f = filter_outs.shape[1]
l = filter_outs.shape[2]
s = 5
while l / float(s) - (l / s) > 0:
s += 1
print('%d segments of length %d' % (s, l / s))
# split into multiple segments
filter_outs_seg = np.reshape(filter_outs, (b, f, s, l / s))
# mean across the segments
filter_outs_mean = filter_outs_seg.max(axis=3)
# break each segment into a new instance
filter_seqs = np.reshape(np.swapaxes(filter_outs_mean, 2, 1), (s * b, f))
# whiten
if whiten:
filter_seqs = preprocessing.scale(filter_seqs)
# transpose
filter_seqs = np.transpose(filter_seqs)
if drop_dead:
filter_stds = filter_seqs.std(axis=1)
filter_seqs = filter_seqs[filter_stds > 0]
# downsample sequences
seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)
hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)
hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)
sns.set(font_scale=0.3)
if whiten:
dist = 'euclidean'
else:
dist = 'cosine'
plt.figure()
sns.clustermap(
filter_seqs[:, seqs_i],
metric=dist,
row_cluster=True,
col_cluster=True,
linewidths=0,
xticklabels=False,
vmin=hmin,
vmax=hmax)
plt.savefig(out_pdf)
#out_png = out_pdf[:-2] + 'ng'
#plt.savefig(out_png, dpi=300)
plt.close()
################################################################################
# filter_motif
#
# Collapse the filter parameter matrix to a single DNA motif.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def filter_motif(param_matrix):
nts = 'ACGT'
motif_list = []
for v in range(param_matrix.shape[1]):
max_n = 0
for n in range(1, 4):
if param_matrix[n, v] > param_matrix[max_n, v]:
max_n = n
if param_matrix[max_n, v] > 0:
motif_list.append(nts[max_n])
else:
motif_list.append('N')
return ''.join(motif_list)
################################################################################
# filter_possum
#
# Write a Possum-style motif
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def filter_possum(param_matrix, motif_id, possum_file, trim_filters=False, mult=200):
# possible trim
trim_start = 0
trim_end = param_matrix.shape[1] - 1
trim_t = 0.3
if trim_filters:
# trim PWM of uninformative prefix
while trim_start < param_matrix.shape[1] and np.max(
param_matrix[:, trim_start]) - np.min(
param_matrix[:, trim_start]) < trim_t:
trim_start += 1
# trim PWM of uninformative suffix
while trim_end >= 0 and np.max(param_matrix[:, trim_end]) - np.min(
param_matrix[:, trim_end]) < trim_t:
trim_end -= 1
if trim_start < trim_end:
possum_out = open(possum_file, 'w')
print('BEGIN GROUP', file=possum_out)
print('BEGIN FLOAT', file=possum_out)
print('ID %s' % motif_id, file=possum_out)
print('AP DNA', file=possum_out)
print('LE %d' % (trim_end + 1 - trim_start), file=possum_out)
for ci in range(trim_start, trim_end + 1):
print(
'MA %s' % ' '.join(['%.2f' % (mult * n)
for n in param_matrix[:, ci]]),
file=possum_out)
print('END', file=possum_out)
print('END', file=possum_out)
possum_out.close()
################################################################################
# plot_filter_heat
#
# Plot a heatmap of the filter's parameters.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_heat(param_matrix, out_pdf):
param_range = abs(param_matrix).max()
sns.set(font_scale=2)
plt.figure(figsize=(param_matrix.shape[1], 4))
sns.heatmap(
param_matrix,
cmap='PRGn',
linewidths=0.2,
vmin=-param_range,
vmax=param_range)
ax = plt.gca()
ax.set_xticklabels(range(1, param_matrix.shape[1] + 1))
ax.set_yticklabels('TGCA', rotation='horizontal') # , size=10)
plt.savefig(out_pdf)
plt.close()
################################################################################
# plot_filter_logo
#
# Plot a weblogo of the filter's occurrences
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_logo(filter_outs, filter_size, seqs, out_prefix, raw_t=0, maxpct_t=None):
if maxpct_t:
all_outs = np.ravel(filter_outs)
all_outs_mean = all_outs.mean()
all_outs_norm = all_outs - all_outs_mean
raw_t = maxpct_t * all_outs_norm.max() + all_outs_mean
left_pad = (filter_size - 1) // 2
right_pad = filter_size - left_pad
# print fasta file of positive outputs
filter_fasta_out = open('%s.fa' % out_prefix, 'w')
filter_count = 0
for i in range(filter_outs.shape[0]):
for j in range(filter_outs.shape[1]):
if filter_outs[i, j] > raw_t:
# construct kmer
kmer = ''
# determine boundaries, considering padding
fstart = j - left_pad
fend = fstart + filter_size
# if it starts in left_pad
if fstart < 0:
kmer += 'N' * (-fstart)
fstart = 0
# add primary sequence
kmer += seqs[i][fstart:fend]
# if it ends in right_pad
if fend > len(seqs[i]):
kmer += 'N' * (fend - len(seqs[i]))
# output
print('>%d_%d' % (i, j), file=filter_fasta_out)
print(kmer, file=filter_fasta_out)
filter_count += 1
filter_fasta_out.close()
# make weblogo
if filter_count > 0:
weblogo_cmd = 'weblogo %s < %s.fa > %s.eps' % (weblogo_opts, out_prefix,
out_prefix)
subprocess.call(weblogo_cmd, shell=True)
################################################################################
# plot_score_density
#
# Plot the score density and print to the stats table.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_score_density(f_scores, out_pdf):
sns.set(font_scale=1.3)
plt.figure()
sns.distplot(f_scores, kde=False)
plt.xlabel('ReLU output')
plt.savefig(out_pdf)
plt.close()
return f_scores.mean(), f_scores.std()
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
# pdb.runcall(main)
| [((829, 850), 'matplotlib.use', 'matplotlib.use', (['"""PDF"""'], {}), "('PDF')\n", (843, 850), False, 'import matplotlib\n'), ((1670, 1689), 'optparse.OptionParser', 'OptionParser', (['usage'], {}), '(usage)\n', (1682, 1689), False, 'from optparse import OptionParser\n'), ((3270, 3290), 'h5py.File', 'h5py.File', (['data_file'], {}), '(data_file)\n', (3279, 3290), False, 'import h5py\n'), ((3793, 3828), 'basenji.dna_io.hot1_dna', 'basenji.dna_io.hot1_dna', (['test_seqs1'], {}), '(test_seqs1)\n', (3816, 3828), False, 'import basenji\n'), ((3945, 3988), 'basenji.dna_io.read_job_params', 'basenji.dna_io.read_job_params', (['params_file'], {}), '(params_file)\n', (3975, 3988), False, 'import basenji\n'), ((4195, 4206), 'time.time', 'time.time', ([], {}), '()\n', (4204, 4206), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((4214, 4235), 'basenji.seqnn.SeqNN', 'basenji.seqnn.SeqNN', ([], {}), '()\n', (4233, 4235), False, 'import basenji\n'), ((5041, 5057), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (5055, 5057), True, 'import tensorflow as tf\n'), ((7204, 7367), 'subprocess.call', 'subprocess.call', (["('tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\n (options.out_dir, options.out_dir, options.meme_db))"], {'shell': '(True)'}), "(\n 'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\n (options.out_dir, options.out_dir, options.meme_db), shell=True)\n", (7219, 7367), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((14271, 14293), 'numpy.array', 'np.array', (['filter_names'], {}), '(filter_names)\n', (14279, 14293), True, 'import numpy as np\n'), ((15402, 15481), 'pandas.DataFrame', 'pd.DataFrame', (['filter_target_cors'], {'index': 'filter_names_live', 'columns': 'target_names'}), '(filter_target_cors, index=filter_names_live, columns=target_names)\n', (15414, 15481), True, 'import pandas as pd\n'), ((15492, 15515), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (15499, 15515), True, 'import seaborn as sns\n'), ((15518, 15530), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (15528, 15530), True, 'import matplotlib.pyplot as plt\n'), ((15533, 15595), 'seaborn.clustermap', 'sns.clustermap', (['cor_df'], {'cmap': '"""BrBG"""', 'center': '(0)', 'figsize': '(8, 10)'}), "(cor_df, cmap='BrBG', center=0, figsize=(8, 10))\n", (15547, 15595), True, 'import seaborn as sns\n'), ((15598, 15618), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (15609, 15618), True, 'import matplotlib.pyplot as plt\n'), ((15621, 15632), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (15630, 15632), True, 'import matplotlib.pyplot as plt\n'), ((16225, 16250), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\n', (16237, 16250), True, 'import numpy as np\n'), ((16394, 16441), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\n', (16411, 16441), True, 'import numpy as np\n'), ((16452, 16496), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, (seqs_i)]', '(0.1)'], {}), '(filter_seqs[:, (seqs_i)], 0.1)\n', (16465, 16496), True, 'import numpy as np\n'), ((16504, 16549), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, (seqs_i)]', '(99.9)'], {}), '(filter_seqs[:, (seqs_i)], 99.9)\n', (16517, 16549), True, 'import numpy as np\n'), ((16551, 16574), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (16558, 16574), True, 'import seaborn as sns\n'), ((16578, 16590), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (16588, 16590), True, 'import matplotlib.pyplot as plt\n'), ((16593, 16728), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, (seqs_i)]'], {'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, (seqs_i)], row_cluster=True, col_cluster=True,\n linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\n', (16607, 16728), True, 'import seaborn as sns\n'), ((16768, 16788), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (16779, 16788), True, 'import matplotlib.pyplot as plt\n'), ((16857, 16868), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (16866, 16868), True, 'import matplotlib.pyplot as plt\n'), ((17644, 17685), 'numpy.reshape', 'np.reshape', (['filter_outs', '(b, f, s, l / s)'], {}), '(filter_outs, (b, f, s, l / s))\n', (17654, 17685), True, 'import numpy as np\n'), ((17992, 18017), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\n', (18004, 18017), True, 'import numpy as np\n'), ((18161, 18208), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\n', (18178, 18208), True, 'import numpy as np\n'), ((18219, 18263), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, (seqs_i)]', '(0.1)'], {}), '(filter_seqs[:, (seqs_i)], 0.1)\n', (18232, 18263), True, 'import numpy as np\n'), ((18271, 18316), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, (seqs_i)]', '(99.9)'], {}), '(filter_seqs[:, (seqs_i)], 99.9)\n', (18284, 18316), True, 'import numpy as np\n'), ((18318, 18341), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (18325, 18341), True, 'import seaborn as sns\n'), ((18409, 18421), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (18419, 18421), True, 'import matplotlib.pyplot as plt\n'), ((18424, 18572), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, (seqs_i)]'], {'metric': 'dist', 'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, (seqs_i)], metric=dist, row_cluster=True,\n col_cluster=True, linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\n', (18438, 18572), True, 'import seaborn as sns\n'), ((18618, 18638), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (18629, 18638), True, 'import matplotlib.pyplot as plt\n'), ((18707, 18718), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (18716, 18718), True, 'import matplotlib.pyplot as plt\n'), ((21275, 21296), 'seaborn.set', 'sns.set', ([], {'font_scale': '(2)'}), '(font_scale=2)\n', (21282, 21296), True, 'import seaborn as sns\n'), ((21299, 21345), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(param_matrix.shape[1], 4)'}), '(figsize=(param_matrix.shape[1], 4))\n', (21309, 21345), True, 'import matplotlib.pyplot as plt\n'), ((21348, 21443), 'seaborn.heatmap', 'sns.heatmap', (['param_matrix'], {'cmap': '"""PRGn"""', 'linewidths': '(0.2)', 'vmin': '(-param_range)', 'vmax': 'param_range'}), "(param_matrix, cmap='PRGn', linewidths=0.2, vmin=-param_range,\n vmax=param_range)\n", (21359, 21443), True, 'import seaborn as sns\n'), ((21478, 21487), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (21485, 21487), True, 'import matplotlib.pyplot as plt\n'), ((21614, 21634), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (21625, 21634), True, 'import matplotlib.pyplot as plt\n'), ((21637, 21648), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (21646, 21648), True, 'import matplotlib.pyplot as plt\n'), ((23772, 23795), 'seaborn.set', 'sns.set', ([], {'font_scale': '(1.3)'}), '(font_scale=1.3)\n', (23779, 23795), True, 'import seaborn as sns\n'), ((23798, 23810), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (23808, 23810), True, 'import matplotlib.pyplot as plt\n'), ((23813, 23846), 'seaborn.distplot', 'sns.distplot', (['f_scores'], {'kde': '(False)'}), '(f_scores, kde=False)\n', (23825, 23846), True, 'import seaborn as sns\n'), ((23849, 23874), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""ReLU output"""'], {}), "('ReLU output')\n", (23859, 23874), True, 'import matplotlib.pyplot as plt\n'), ((23877, 23897), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (23888, 23897), True, 'import matplotlib.pyplot as plt\n'), ((23900, 23911), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (23909, 23911), True, 'import matplotlib.pyplot as plt\n'), ((3110, 3140), 'os.path.isdir', 'os.path.isdir', (['options.out_dir'], {}), '(options.out_dir)\n', (3123, 3140), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((3146, 3171), 'os.mkdir', 'os.mkdir', (['options.out_dir'], {}), '(options.out_dir)\n', (3154, 3171), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((4674, 4804), 'basenji.batcher.BatcherF', 'basenji.batcher.BatcherF', (['test_seqs1', 'test_targets', 'test_targets_imag'], {'batch_size': 'dr.batch_size', 'pool_width': "job['target_pool']"}), "(test_seqs1, test_targets, test_targets_imag,\n batch_size=dr.batch_size, pool_width=job['target_pool'])\n", (4698, 4804), False, 'import basenji\n'), ((4869, 4979), 'basenji.batcher.Batcher', 'basenji.batcher.Batcher', (['test_seqs1', 'test_targets'], {'batch_size': 'dr.batch_size', 'pool_width': "job['target_pool']"}), "(test_seqs1, test_targets, batch_size=dr.batch_size,\n pool_width=job['target_pool'])\n", (4892, 4979), False, 'import basenji\n'), ((5066, 5078), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5076, 5078), True, 'import tensorflow as tf\n'), ((5355, 5366), 'time.time', 'time.time', ([], {}), '()\n', (5364, 5366), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((10067, 10084), 'numpy.transpose', 'np.transpose', (['pwm'], {}), '(pwm)\n', (10079, 10084), True, 'import numpy as np\n'), ((11127, 11146), 'numpy.array', 'np.array', (['pwm_freqs'], {}), '(pwm_freqs)\n', (11135, 11146), True, 'import numpy as np\n'), ((16161, 16193), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\n', (16180, 16193), False, 'from sklearn import preprocessing\n'), ((17836, 17871), 'numpy.swapaxes', 'np.swapaxes', (['filter_outs_mean', '(2)', '(1)'], {}), '(filter_outs_mean, 2, 1)\n', (17847, 17871), True, 'import numpy as np\n'), ((17928, 17960), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\n', (17947, 17960), False, 'from sklearn import preprocessing\n'), ((22080, 22101), 'numpy.ravel', 'np.ravel', (['filter_outs'], {}), '(filter_outs)\n', (22088, 22101), True, 'import numpy as np\n'), ((23363, 23403), 'subprocess.call', 'subprocess.call', (['weblogo_cmd'], {'shell': '(True)'}), '(weblogo_cmd, shell=True)\n', (23378, 23403), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((5263, 5289), 'numpy.squeeze', 'np.squeeze', (['filter_weights'], {}), '(filter_weights)\n', (5273, 5289), True, 'import numpy as np\n'), ((8287, 8319), 'numpy.ravel', 'np.ravel', (['filter_outs[:, :, (f)]'], {}), '(filter_outs[:, :, (f)])\n', (8295, 8319), True, 'import numpy as np\n'), ((15289, 15354), 'scipy.stats.spearmanr', 'spearmanr', (['filter_outs_seq[:, (fi)]', 'seq_targets[:num_seqs, (ti)]'], {}), '(filter_outs_seq[:, (fi)], seq_targets[:num_seqs, (ti)])\n', (15298, 15354), False, 'from scipy.stats import spearmanr\n'), ((4289, 4300), 'time.time', 'time.time', ([], {}), '()\n', (4298, 4300), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((10281, 10299), 'numpy.log2', 'np.log2', (['bg_pwm[j]'], {}), '(bg_pwm[j])\n', (10288, 10299), True, 'import numpy as np\n'), ((10325, 10353), 'numpy.log2', 'np.log2', (['(pseudoc + pwm[i][j])'], {}), '(pseudoc + pwm[i][j])\n', (10332, 10353), True, 'import numpy as np\n'), ((19977, 20014), 'numpy.max', 'np.max', (['param_matrix[:, (trim_start)]'], {}), '(param_matrix[:, (trim_start)])\n', (19983, 20014), True, 'import numpy as np\n'), ((20024, 20061), 'numpy.min', 'np.min', (['param_matrix[:, (trim_start)]'], {}), '(param_matrix[:, (trim_start)])\n', (20030, 20061), True, 'import numpy as np\n'), ((20173, 20208), 'numpy.max', 'np.max', (['param_matrix[:, (trim_end)]'], {}), '(param_matrix[:, (trim_end)])\n', (20179, 20208), True, 'import numpy as np\n'), ((20209, 20244), 'numpy.min', 'np.min', (['param_matrix[:, (trim_end)]'], {}), '(param_matrix[:, (trim_end)])\n', (20215, 20244), True, 'import numpy as np\n'), ((10779, 10798), 'numpy.array', 'np.array', (['([1.0] * 4)'], {}), '([1.0] * 4)\n', (10787, 10798), True, 'import numpy as np\n'), ((10954, 10974), 'numpy.array', 'np.array', (['([0.25] * 4)'], {}), '([0.25] * 4)\n', (10962, 10974), True, 'import numpy as np\n')] |
Joetib/jshop | apps/shop/urls.py | 810ce5dcf2cf2d23b45536dd0c8806efd3b7fc91 | from django.urls import path
from . import views
app_name = "shop"
urlpatterns = [
path('', views.HomePage.as_view(), name="home-page"),
path('shop/', views.ProductListView.as_view(), name="product-list"),
path('shop/<int:category_pk>/', views.ProductListView.as_view(), name="product-list"),
path('shop/products/<int:pk>/', views.ProductDetailView.as_view(), name="product-detail"),
path('cart/', views.cart_view, name="cart"),
path('cart/add/<int:product_pk>/', views.add_product_to_order, name="add-product-to-cart"),
path('cart/add/<int:product_pk>/json/', views.add_product_to_cart_json, name="add-product-to-cart-json"),
path('checkout/', views.CheckOut.as_view(), name="checkout"),
path('checkout/<int:address_pk>/', views.CheckOut.as_view(), name="checkout"),
path('payment/', views.PaymentChoice.as_view(), name="payment-choice"),
path('payment/order/<int:pk>/', views.MomoPayment.as_view(), name="momo-payment"),
path('payment/momo/<int:pk>/confirm/', views.ConfirmMomoPayment.as_view(), name="confirm-momo-payment"),
path('orders/', views.OrderList.as_view(), name="order-list"),
path('orders/<int:pk>/', views.OrderDetail.as_view(), name="order-detail"),
path('orders/<int:order_id>/items/<int:pk>/', views.OrderItemDetail.as_view(), name="order-item-detail"),
]
| [((406, 449), 'django.urls.path', 'path', (['"""cart/"""', 'views.cart_view'], {'name': '"""cart"""'}), "('cart/', views.cart_view, name='cart')\n", (410, 449), False, 'from django.urls import path\n'), ((455, 550), 'django.urls.path', 'path', (['"""cart/add/<int:product_pk>/"""', 'views.add_product_to_order'], {'name': '"""add-product-to-cart"""'}), "('cart/add/<int:product_pk>/', views.add_product_to_order, name=\n 'add-product-to-cart')\n", (459, 550), False, 'from django.urls import path\n'), ((551, 659), 'django.urls.path', 'path', (['"""cart/add/<int:product_pk>/json/"""', 'views.add_product_to_cart_json'], {'name': '"""add-product-to-cart-json"""'}), "('cart/add/<int:product_pk>/json/', views.add_product_to_cart_json,\n name='add-product-to-cart-json')\n", (555, 659), False, 'from django.urls import path\n')] |
dfm/SurPyval | surpyval/parametric/expo_weibull.py | 014fba8f1d4a0f43218a3713ce80a78191ad8be9 | import autograd.numpy as np
from scipy.stats import uniform
from autograd import jacobian
from numpy import euler_gamma
from scipy.special import gamma as gamma_func
from scipy.special import ndtri as z
from scipy import integrate
from scipy.optimize import minimize
from surpyval import parametric as para
from surpyval import nonparametric as nonp
from surpyval.parametric.parametric_fitter import ParametricFitter
from .fitters.mpp import mpp
class ExpoWeibull_(ParametricFitter):
def __init__(self, name):
self.name = name
self.k = 3
self.bounds = ((0, None), (0, None), (0, None),)
self.support = (0, np.inf)
self.plot_x_scale = 'log'
self.y_ticks = [0.0001, 0.0002, 0.0003, 0.001, 0.002,
0.003, 0.005, 0.01, 0.02, 0.03, 0.05,
0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8,
0.9, 0.95, 0.99, 0.999, 0.9999]
self.param_names = ['alpha', 'beta', 'mu']
self.param_map = {
'alpha' : 0,
'beta' : 1,
'mu' : 2
}
def _parameter_initialiser(self, x, c=None, n=None, offset=False):
log_x = np.log(x)
log_x[np.isnan(log_x)] = 0
gumb = para.Gumbel.fit(log_x, c, n, how='MLE')
if not gumb.res.success:
gumb = para.Gumbel.fit(log_x, c, n, how='MPP')
mu, sigma = gumb.params
alpha, beta = np.exp(mu), 1. / sigma
if (np.isinf(alpha) | np.isnan(alpha)):
alpha = np.median(x)
if (np.isinf(beta) | np.isnan(beta)):
beta = 1.
if offset:
gamma = np.min(x) - (np.max(x) - np.min(x))/10.
return gamma, alpha, beta, 1.
else:
return alpha, beta, 1.
def sf(self, x, alpha, beta, mu):
r"""
Survival (or reliability) function for the ExpoWeibull Distribution:
.. math::
R(x) = 1 - \left [ 1 - e^{-\left ( \frac{x}{\alpha} \right )^\beta} \right ]^{\mu}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the ExpoWeibull distribution
beta : numpy array or scalar
shape parameter for the ExpoWeibull distribution
mu : numpy array or scalar
shape parameter for the ExpoWeibull distribution
Returns
-------
sf : scalar or numpy array
The value(s) of the reliability function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import ExpoWeibull
>>> x = np.array([1, 2, 3, 4, 5])
>>> ExpoWeibull.sf(x, 3, 4, 1.2)
array([9.94911330e-01, 8.72902497e-01, 4.23286791e-01, 5.06674866e-02,
5.34717283e-04])
"""
return 1 - np.power(1 - np.exp(-(x / alpha)**beta), mu)
def ff(self, x, alpha, beta, mu):
r"""
Failure (CDF or unreliability) function for the ExpoWeibull Distribution:
.. math::
F(x) = \left [ 1 - e^{-\left ( \frac{x}{\alpha} \right )^\beta} \right ]^{\mu}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the ExpoWeibull distribution
beta : numpy array or scalar
shape parameter for the ExpoWeibull distribution
mu : numpy array or scalar
shape parameter for the ExpoWeibull distribution
Returns
-------
sf : scalar or numpy array
The value(s) of the failure function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import ExpoWeibull
>>> x = np.array([1, 2, 3, 4, 5])
>>> ExpoWeibull.ff(x, 3, 4, 1.2)
array([0.00508867, 0.1270975 , 0.57671321, 0.94933251, 0.99946528])
"""
return np.power(1 - np.exp(-(x / alpha)**beta), mu)
def cs(self, x, X, alpha, beta, mu):
r"""
Conditional survival (or reliability) function for the ExpoWeibull Distribution:
.. math::
R(x, X) = \frac{R(x + X)}{R(X)}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the ExpoWeibull distribution
beta : numpy array or scalar
shape parameter for the ExpoWeibull distribution
mu : numpy array or scalar
shape parameter for the ExpoWeibull distribution
Returns
-------
sf : scalar or numpy array
The value(s) of the reliability function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import ExpoWeibull
>>> x = np.array([1, 2, 3, 4, 5])
>>> ExpoWeibull.sf(x, 1, 3, 4, 1.2)
array([8.77367129e-01, 4.25451775e-01, 5.09266354e-02, 5.37452200e-04,
1.35732908e-07])
"""
return self.sf(x + X, alpha, beta, mu) / self.sf(X, alpha, beta, mu)
def df(self, x, alpha, beta, mu):
r"""
Density function for the ExpoWeibull Distribution:
.. math::
f(x) = \mu \left ( \frac{\beta}{\alpha} \right ) \left ( \frac{x}{\alpha} \right )^{\beta - 1} \left [ 1 - e^{-\left ( \frac{x}{\alpha} \right )^\beta} \right ]^{\mu - 1} e^{- \left ( \frac{x}{\alpha} \right )^\beta}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the ExpoWeibull distribution
beta : numpy array or scalar
shape parameter for the ExpoWeibull distribution
mu : numpy array or scalar
shape parameter for the ExpoWeibull distribution
Returns
-------
df : scalar or numpy array
The value(s) of the density function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import ExpoWeibull
>>> x = np.array([1, 2, 3, 4, 5])
>>> ExpoWeibull.df(x, 3, 4, 1.2)
array([0.02427515, 0.27589838, 0.53701385, 0.15943643, 0.00330058])
"""
return (beta * mu * x**(beta - 1)) / (alpha**beta) \
* (1 - np.exp(-(x/alpha)**beta))**(mu - 1) \
* np.exp(-(x/alpha)**beta)
def hf(self, x, alpha, beta, mu):
r"""
Instantaneous hazard rate for the ExpoWeibull Distribution:
.. math::
h(x) = \frac{f(x)}{R(x)}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the ExpoWeibull distribution
beta : numpy array or scalar
shape parameter for the ExpoWeibull distribution
mu : numpy array or scalar
shape parameter for the ExpoWeibull distribution
Returns
-------
hf : scalar or numpy array
The value(s) of the instantaneous hazard rate at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import ExpoWeibull
>>> x = np.array([1, 2, 3, 4, 5])
>>> ExpoWeibull.hf(x, 3, 4, 1.2)
array([0.02439931, 0.3160701 , 1.26867613, 3.14672068, 6.17256436])
"""
return self.df(x, alpha, beta, mu) / self.sf(x, alpha, beta, mu)
def Hf(self, x, alpha, beta, mu):
r"""
Instantaneous hazard rate for the ExpoWeibull Distribution:
.. math::
H(x) = -\ln \left ( R(x) \right )
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the ExpoWeibull distribution
beta : numpy array or scalar
shape parameter for the ExpoWeibull distribution
mu : numpy array or scalar
shape parameter for the ExpoWeibull distribution
Returns
-------
Hf : scalar or numpy array
The value(s) of the cumulative hazard rate at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import ExpoWeibull
>>> x = np.array([1, 2, 3, 4, 5])
>>> ExpoWeibull.Hf(x, 3, 4, 1.2)
array([5.10166141e-03, 1.35931416e-01, 8.59705336e-01, 2.98247086e+00,
7.53377239e+00])
"""
return -np.log(self.sf(x, alpha, beta, mu))
def qf(self, p, alpha, beta, mu):
r"""
Instantaneous hazard rate for the ExpoWeibull Distribution:
.. math::
q(p) =
Parameters
----------
p : numpy array or scalar
The percentiles at which the quantile will be calculated
alpha : numpy array or scalar
scale parameter for the ExpoWeibull distribution
beta : numpy array or scalar
shape parameter for the ExpoWeibull distribution
mu : numpy array or scalar
shape parameter for the ExpoWeibull distribution
Returns
-------
Q : scalar or numpy array
The quantiles for the Weibull distribution at each value p
Examples
--------
>>> import numpy as np
>>> from surpyval import ExpoWeibull
>>> p = np.array([.1, .2, .3, .4, .5])
>>> ExpoWeibull.qf(p, 3, 4, 1.2)
array([1.89361341, 2.2261045 , 2.46627621, 2.66992747, 2.85807988])
"""
return alpha * (-np.log(1 - p**(1./mu)))**(1/beta)
def mean(self, alpha, beta, mu):
func = lambda x : x * self.df(x, alpha, beta, mu)
top = 2 * self.qf(0.999, alpha, beta, mu)
return integrate.quadrature(func, 0, top)[0]
def random(self, size, alpha, beta, mu):
U = uniform.rvs(size=size)
return self.qf(U, alpha, beta, mu)
def mpp_x_transform(self, x, gamma=0):
return np.log(x - gamma)
def mpp_y_transform(self, y, *params):
mu = params[-1]
mask = ((y == 0) | (y == 1))
out = np.zeros_like(y)
out[~mask] = np.log(-np.log((1 - y[~mask]**(1./mu))))
out[mask] = np.nan
return out
def mpp_inv_y_transform(self, y, *params):
i = len(params)
mu = params[i-1]
return (1 - np.exp(-np.exp(y)))**mu
def unpack_rr(self, params, rr):
#UPDATE ME
if rr == 'y':
beta = params[0]
alpha = np.exp(params[1]/-beta)
elif rr == 'x':
beta = 1./params[0]
alpha = np.exp(params[1] / (beta * params[0]))
return alpha, beta, 1.
ExpoWeibull = ExpoWeibull_('ExpoWeibull') | [((1148, 1157), 'autograd.numpy.log', 'np.log', (['x'], {}), '(x)\n', (1154, 1157), True, 'import autograd.numpy as np\n'), ((1208, 1247), 'surpyval.parametric.Gumbel.fit', 'para.Gumbel.fit', (['log_x', 'c', 'n'], {'how': '"""MLE"""'}), "(log_x, c, n, how='MLE')\n", (1223, 1247), True, 'from surpyval import parametric as para\n'), ((10150, 10172), 'scipy.stats.uniform.rvs', 'uniform.rvs', ([], {'size': 'size'}), '(size=size)\n', (10161, 10172), False, 'from scipy.stats import uniform\n'), ((10275, 10292), 'autograd.numpy.log', 'np.log', (['(x - gamma)'], {}), '(x - gamma)\n', (10281, 10292), True, 'import autograd.numpy as np\n'), ((10412, 10428), 'autograd.numpy.zeros_like', 'np.zeros_like', (['y'], {}), '(y)\n', (10425, 10428), True, 'import autograd.numpy as np\n'), ((1300, 1339), 'surpyval.parametric.Gumbel.fit', 'para.Gumbel.fit', (['log_x', 'c', 'n'], {'how': '"""MPP"""'}), "(log_x, c, n, how='MPP')\n", (1315, 1339), True, 'from surpyval import parametric as para\n'), ((1394, 1404), 'autograd.numpy.exp', 'np.exp', (['mu'], {}), '(mu)\n', (1400, 1404), True, 'import autograd.numpy as np\n'), ((1429, 1444), 'autograd.numpy.isinf', 'np.isinf', (['alpha'], {}), '(alpha)\n', (1437, 1444), True, 'import autograd.numpy as np\n'), ((1447, 1462), 'autograd.numpy.isnan', 'np.isnan', (['alpha'], {}), '(alpha)\n', (1455, 1462), True, 'import autograd.numpy as np\n'), ((1485, 1497), 'autograd.numpy.median', 'np.median', (['x'], {}), '(x)\n', (1494, 1497), True, 'import autograd.numpy as np\n'), ((1510, 1524), 'autograd.numpy.isinf', 'np.isinf', (['beta'], {}), '(beta)\n', (1518, 1524), True, 'import autograd.numpy as np\n'), ((1527, 1541), 'autograd.numpy.isnan', 'np.isnan', (['beta'], {}), '(beta)\n', (1535, 1541), True, 'import autograd.numpy as np\n'), ((6570, 6598), 'autograd.numpy.exp', 'np.exp', (['(-(x / alpha) ** beta)'], {}), '(-(x / alpha) ** beta)\n', (6576, 6598), True, 'import autograd.numpy as np\n'), ((10054, 10088), 'scipy.integrate.quadrature', 'integrate.quadrature', (['func', '(0)', 'top'], {}), '(func, 0, top)\n', (10074, 10088), False, 'from scipy import integrate\n'), ((10807, 10832), 'autograd.numpy.exp', 'np.exp', (['(params[1] / -beta)'], {}), '(params[1] / -beta)\n', (10813, 10832), True, 'import autograd.numpy as np\n'), ((1172, 1187), 'autograd.numpy.isnan', 'np.isnan', (['log_x'], {}), '(log_x)\n', (1180, 1187), True, 'import autograd.numpy as np\n'), ((1605, 1614), 'autograd.numpy.min', 'np.min', (['x'], {}), '(x)\n', (1611, 1614), True, 'import autograd.numpy as np\n'), ((4033, 4061), 'autograd.numpy.exp', 'np.exp', (['(-(x / alpha) ** beta)'], {}), '(-(x / alpha) ** beta)\n', (4039, 4061), True, 'import autograd.numpy as np\n'), ((10458, 10492), 'autograd.numpy.log', 'np.log', (['(1 - y[~mask] ** (1.0 / mu))'], {}), '(1 - y[~mask] ** (1.0 / mu))\n', (10464, 10492), True, 'import autograd.numpy as np\n'), ((10908, 10946), 'autograd.numpy.exp', 'np.exp', (['(params[1] / (beta * params[0]))'], {}), '(params[1] / (beta * params[0]))\n', (10914, 10946), True, 'import autograd.numpy as np\n'), ((2889, 2917), 'autograd.numpy.exp', 'np.exp', (['(-(x / alpha) ** beta)'], {}), '(-(x / alpha) ** beta)\n', (2895, 2917), True, 'import autograd.numpy as np\n'), ((9859, 9886), 'autograd.numpy.log', 'np.log', (['(1 - p ** (1.0 / mu))'], {}), '(1 - p ** (1.0 / mu))\n', (9865, 9886), True, 'import autograd.numpy as np\n'), ((1618, 1627), 'autograd.numpy.max', 'np.max', (['x'], {}), '(x)\n', (1624, 1627), True, 'import autograd.numpy as np\n'), ((1630, 1639), 'autograd.numpy.min', 'np.min', (['x'], {}), '(x)\n', (1636, 1639), True, 'import autograd.numpy as np\n'), ((6514, 6542), 'autograd.numpy.exp', 'np.exp', (['(-(x / alpha) ** beta)'], {}), '(-(x / alpha) ** beta)\n', (6520, 6542), True, 'import autograd.numpy as np\n'), ((10662, 10671), 'autograd.numpy.exp', 'np.exp', (['y'], {}), '(y)\n', (10668, 10671), True, 'import autograd.numpy as np\n')] |
stjordanis/datar | tests/test_base_table.py | 4e2b5db026ad35918954576badef9951928c0cb1 | import pytest
from datar import stats
from datar.base import *
from datar import f
from datar.datasets import warpbreaks, state_division, state_region, airquality
from .conftest import assert_iterable_equal
def test_table():
# https://www.rdocumentation.org/packages/base/versions/3.6.2/topics/table
z = stats.rpois(100, 5)
x = table(z)
assert sum(x.values.flatten()) == 100
#-----------------
with data_context(warpbreaks) as _:
tab = table(f.wool, f.tension)
assert tab.columns.tolist() == ['H', 'L', 'M']
assert tab.index.tolist() == ['A', 'B']
assert_iterable_equal(tab.values.flatten(), [9] * 6)
tab = table(warpbreaks.loc[:, ['wool', 'tension']])
assert tab.columns.tolist() == ['H', 'L', 'M']
assert tab.index.tolist() == ['A', 'B']
assert_iterable_equal(tab.values.flatten(), [9] * 6)
#-----------------
tab = table(state_division, state_region)
assert tab.loc['New England', 'Northeast'] == 6
#-----------------
with data_context(airquality) as _:
qt = stats.quantile(f.Temp)
ct = cut(f.Temp, qt)
tab = table(ct, f.Month)
assert tab.iloc[0,0] == 24
#-----------------
a = letters[:3]
tab = table(a, sample(a))
assert sum(tab.values.flatten()) == 3
#-----------------
tab = table(a, sample(a), dnn=['x', 'y'])
assert tab.index.name == 'x'
assert tab.columns.name == 'y'
#-----------------
a = c(NA, Inf, (1.0/(i+1) for i in range(3)))
a = a * 10
# tab = table(a)
# assert_iterable_equal(tab.values.flatten(), [10] * 4)
tab = table(a, exclude=None)
assert_iterable_equal(tab.values.flatten(), [10] * 5)
#------------------
b = as_factor(rep(c("A","B","C"), 10))
tab = table(b)
assert tab.shape == (1, 3)
assert_iterable_equal(tab.values.flatten(), [10] * 3)
tab = table(b, exclude="B")
assert tab.shape == (1, 2)
assert_iterable_equal(tab.values.flatten(), [10] * 2)
assert 'B' not in tab.columns
#-------------------
d = factor(rep(c("A","B","C"), 10), levels=c("A","B","C","D","E"))
tab = table(d, exclude="B", dnn=['x'])
assert_iterable_equal(tab.columns.to_list(), ["A", "C", "D", "E"])
assert_iterable_equal(tab.values.flatten(), [10, 10, 0, 0])
d2 = factor(rep(c("A","B","C"), 10), levels=c("A","B","C","D","E"))
tab = table(d, d2, exclude="B")
assert tab.shape == (4, 4)
tab = table("abc", "cba", dnn='x')
assert tab.shape == (3,3)
assert sum(tab.values.flatten()) == 3
with data_context(airquality) as _:
tab = table(f.Ozone, f.Solar_R, exclude=None)
assert '<NA>' in tab.columns
assert '<NA>' in tab.index
def test_table_error():
from datar.datasets import iris, warpbreaks
with pytest.raises(ValueError):
table(iris)
with pytest.raises(ValueError):
table(warpbreaks, iris)
with pytest.raises(ValueError):
table(warpbreaks.wool, iris)
with pytest.raises(ValueError):
table(iris.iloc[:, []])
with pytest.raises(ValueError):
table(iris.iloc[:, [1,2]], iris)
with pytest.raises(ValueError):
table(iris.iloc[:, [1]], iris, iris)
with pytest.raises(ValueError):
table(iris.iloc[:, [1]], iris.iloc[:, []])
| [((315, 334), 'datar.stats.rpois', 'stats.rpois', (['(100)', '(5)'], {}), '(100, 5)\n', (326, 334), False, 'from datar import stats\n'), ((1058, 1080), 'datar.stats.quantile', 'stats.quantile', (['f.Temp'], {}), '(f.Temp)\n', (1072, 1080), False, 'from datar import stats\n'), ((2791, 2816), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2804, 2816), False, 'import pytest\n'), ((2847, 2872), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2860, 2872), False, 'import pytest\n'), ((2915, 2940), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2928, 2940), False, 'import pytest\n'), ((2988, 3013), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3001, 3013), False, 'import pytest\n'), ((3056, 3081), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3069, 3081), False, 'import pytest\n'), ((3133, 3158), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3146, 3158), False, 'import pytest\n'), ((3214, 3239), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3227, 3239), False, 'import pytest\n')] |
dokai/cqlengine | cqlengine/tests/statements/test_update_statement.py | a080aff3a73351d37126b14eef606061b445aa37 | from unittest import TestCase
from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause
from cqlengine.operators import *
class UpdateStatementTests(TestCase):
def test_table_rendering(self):
""" tests that fields are properly added to the select statement """
us = UpdateStatement('table')
self.assertTrue(unicode(us).startswith('UPDATE table SET'), unicode(us))
self.assertTrue(str(us).startswith('UPDATE table SET'), str(us))
def test_rendering(self):
us = UpdateStatement('table')
us.add_assignment_clause(AssignmentClause('a', 'b'))
us.add_assignment_clause(AssignmentClause('c', 'd'))
us.add_where_clause(WhereClause('a', EqualsOperator(), 'x'))
self.assertEqual(unicode(us), 'UPDATE table SET "a" = :0, "c" = :1 WHERE "a" = :2', unicode(us))
def test_context(self):
us = UpdateStatement('table')
us.add_assignment_clause(AssignmentClause('a', 'b'))
us.add_assignment_clause(AssignmentClause('c', 'd'))
us.add_where_clause(WhereClause('a', EqualsOperator(), 'x'))
self.assertEqual(us.get_context(), {'0': 'b', '1': 'd', '2': 'x'})
def test_context_update(self):
us = UpdateStatement('table')
us.add_assignment_clause(AssignmentClause('a', 'b'))
us.add_assignment_clause(AssignmentClause('c', 'd'))
us.add_where_clause(WhereClause('a', EqualsOperator(), 'x'))
us.update_context_id(3)
self.assertEqual(unicode(us), 'UPDATE table SET "a" = :4, "c" = :5 WHERE "a" = :3')
self.assertEqual(us.get_context(), {'4': 'b', '5': 'd', '3': 'x'})
def test_additional_rendering(self):
us = UpdateStatement('table', ttl=60)
us.add_assignment_clause(AssignmentClause('a', 'b'))
us.add_where_clause(WhereClause('a', EqualsOperator(), 'x'))
self.assertIn('USING TTL 60', unicode(us))
| [((311, 335), 'cqlengine.statements.UpdateStatement', 'UpdateStatement', (['"""table"""'], {}), "('table')\n", (326, 335), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((534, 558), 'cqlengine.statements.UpdateStatement', 'UpdateStatement', (['"""table"""'], {}), "('table')\n", (549, 558), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((897, 921), 'cqlengine.statements.UpdateStatement', 'UpdateStatement', (['"""table"""'], {}), "('table')\n", (912, 921), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((1237, 1261), 'cqlengine.statements.UpdateStatement', 'UpdateStatement', (['"""table"""'], {}), "('table')\n", (1252, 1261), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((1707, 1739), 'cqlengine.statements.UpdateStatement', 'UpdateStatement', (['"""table"""'], {'ttl': '(60)'}), "('table', ttl=60)\n", (1722, 1739), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((592, 618), 'cqlengine.statements.AssignmentClause', 'AssignmentClause', (['"""a"""', '"""b"""'], {}), "('a', 'b')\n", (608, 618), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((653, 679), 'cqlengine.statements.AssignmentClause', 'AssignmentClause', (['"""c"""', '"""d"""'], {}), "('c', 'd')\n", (669, 679), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((955, 981), 'cqlengine.statements.AssignmentClause', 'AssignmentClause', (['"""a"""', '"""b"""'], {}), "('a', 'b')\n", (971, 981), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((1016, 1042), 'cqlengine.statements.AssignmentClause', 'AssignmentClause', (['"""c"""', '"""d"""'], {}), "('c', 'd')\n", (1032, 1042), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((1295, 1321), 'cqlengine.statements.AssignmentClause', 'AssignmentClause', (['"""a"""', '"""b"""'], {}), "('a', 'b')\n", (1311, 1321), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((1356, 1382), 'cqlengine.statements.AssignmentClause', 'AssignmentClause', (['"""c"""', '"""d"""'], {}), "('c', 'd')\n", (1372, 1382), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n'), ((1773, 1799), 'cqlengine.statements.AssignmentClause', 'AssignmentClause', (['"""a"""', '"""b"""'], {}), "('a', 'b')\n", (1789, 1799), False, 'from cqlengine.statements import UpdateStatement, WhereClause, AssignmentClause\n')] |
Falcons-Robocup/code | packages/facilities/diagnostics/py/custom_checkbox.py | 2281a8569e7f11cbd3238b7cc7341c09e2e16249 | # Copyright 2020 Jan Feitsma (Falcons)
# SPDX-License-Identifier: Apache-2.0
#!/usr/bin/python
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
class Checkbox():
def __init__(self, name, position, default=False, label=None, rsize=0.6, enabled=True):
self.name = name # unique ID associated with
# label to display next to the checkbox
if label == None:
self.label = name # reuse
else:
self.label = label
self.callback = None
self.enabled = enabled
self.ticked = default
self.ax = plt.axes(position) # position is a tuple (x,y,w,h)
self.ax.axis('off')
self.canvas = self.ax.figure.canvas
# draw text
if len(self.label):
self.text = self.ax.text(-0.15, 0.5, self.label, horizontalalignment='right', verticalalignment='center')
# draw a rectangle, add a bit of spacing
self.ax.add_patch(Rectangle((0,(1.0-rsize)/2), rsize, rsize, fill=True))
# setup event handling
self.canvas.mpl_connect('button_release_event', self._handle_event)
self.redraw()
def __repr__(self):
s = 'checkbox:' + self.name + '=' + str(self.ticked)
if not self.enabled:
s += ' (disabled)'
return s
def on_changed(self, cb):
self.callback = cb
def _handle_event(self, e):
if self.enabled and e.inaxes == self.ax: # TODO: exclude spacing margin for inaxes calculation
self.ticked = not self.ticked
self.redraw()
if self.callback != None:
self.callback(self.name, self.ticked)
def redraw(self):
col = 'grey'
if self.enabled:
col = ['lightgoldenrodyellow', 'blue'][self.ticked]
self.ax.patches[0].set_facecolor(col)
self.ax.figure.canvas.draw()
| [((601, 619), 'matplotlib.pyplot.axes', 'plt.axes', (['position'], {}), '(position)\n', (609, 619), True, 'import matplotlib.pyplot as plt\n'), ((965, 1023), 'matplotlib.patches.Rectangle', 'Rectangle', (['(0, (1.0 - rsize) / 2)', 'rsize', 'rsize'], {'fill': '(True)'}), '((0, (1.0 - rsize) / 2), rsize, rsize, fill=True)\n', (974, 1023), False, 'from matplotlib.patches import Rectangle\n')] |
Axonny/HexagonalHitori | generator.py | 582cb50b751796c30ed273f66c8ac9fa6f3dd089 | from hitori_generator import Generator
from argparse import ArgumentParser
def generate(n: int, output_file: str) -> None:
if n < 3 or n > 8:
print("It isn't valid size")
exit(4)
generator = Generator(n)
data = generator.generate()
lines = map(lambda x: ' '.join(map(str, x)), data)
with open(output_file, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
def main():
p = ArgumentParser()
p.add_argument('filename', type=str, help='Path to output file')
p.add_argument('-s', "--size", type=int, default=3, help='Generate SxS field. size must be in [3, 8]. Default is 3')
args = p.parse_args()
generate(args.size, args.filename)
if __name__ == '__main__':
main()
| [((217, 229), 'hitori_generator.Generator', 'Generator', (['n'], {}), '(n)\n', (226, 229), False, 'from hitori_generator import Generator\n'), ((429, 445), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (443, 445), False, 'from argparse import ArgumentParser\n')] |
feliam/opaf | opaflib/xmlast.py | f9908c26af1bf28cc29f3d647dcd9f55d631d732 | from lxml import etree
from opaflib.filters import defilterData
#Logging facility
import logging,code
logger = logging.getLogger("OPAFXML")
class PDFXML(etree.ElementBase):
''' Base pdf-xml class. Every pdf token xml representation will
have a span wich indicates where the original token layed in the file
'''
def _getspan(self):
return tuple([int(i) for i in self.get('span').split('~')])
def _setspan(self, value):
self.set('span',"%d~%d"%value)
def span_move(self,offset, recursive=True):
begin,end = self.span
self.span = (begin+offset,end+offset)
if recursive:
for child in self.getchildren():
child.span_move(offset)
def span_expand(self,span):
begin,end = self.span
self.span = (min(begin,span[0]),max(end,span[1]))
def clear_span(self, recursive=True):
del self.attrib['span']
for child in self.getchildren():
child.clear_span()
span = property(_getspan,_setspan)
def _to_xml(self):
return etree.tostring(self)
xml = property(_to_xml)
def _from_python(self, value):
self.from_python(value)
def _to_python(self):
return self.to_python()
value = property(_to_python,_from_python)
def __getattr__(self, name):
tags = set([e.tag for e in self])
if name in tags:
return self.xpath('./%s'%name)
return getattr(super(PDFXML,self),name)
def get_numgen(self):
''' Search the object and generation number of any pdf element '''
if self.tag.startswith('indirect'):
return self.id
else:
return self.getparent().get_numgen()
#leaf
class PDFString(PDFXML):
def from_python(self, value):
self.text = value.encode('string_escape')
def to_python(self):
return self.text.decode('string_escape')
class PDFName(PDFString):
pass
class PDFData(PDFString):
pass
class PDFBool(PDFString):
def from_python(self, value):
assert type(value) == bool, 'Value must be a boolean'
self.text = ['false','true'][int(value)]
def to_python(self):
return {'false': False, 'true': True}[self.text]
class PDFNull(PDFString):
def from_python(self, value):
assert value is None, 'Value must be None'
self.text = 'null'
def to_python(self):
assert self.text == 'null', 'PDFNull xml not initialized'
return None
class PDFR(PDFString):
def from_python(self, (n,g)):
assert type(n) == int and type(g) == int, 'R must be two numbers, n and g'
assert n >= 0 and n < 65535 , 'Invalid object number (%d)'%n
assert g >= 0 and g < 65535 , 'Invalid generation number (%d)'%g
self.text = "%d %d"%(n,g)
def to_python(self):
return tuple([int(i) for i in self.text.split(' ')])
def solve(self):
''' search the referenced indirect object in the containing pdf '''
pdf = self.xpath('/*')[0]
return pdf.getIndirectObject(self.value)
class PDFNumber(PDFXML):
def from_python(self, value):
assert type(value) in [int, float], 'Wrong type for a number'
self.text = str(value)
def to_python(self):
x = self.text
return float(int(float(x))) == float(x) and int(float(x)) or float(x)
class PDFStartxref(PDFString):
def from_python(self, value):
assert type(value) == int , 'Wrong type for startxref'
self.text = str(value).encode('string_escape')
def to_python(self):
return int(self.text.decode('string_escape'))
class PDFHeader(PDFString):
pass
#tree
class PDFEntry(PDFXML):
def to_python(self):
return tuple([e.value for e in self.getchildren()])
def _getkey(self):
return self[0]
def _setkey(self, key):
assert key.tag == 'name'
self[0] = key
key = property(_getkey,_setkey,None)
def _getval(self):
return self[1]
def _setval(self, val):
self[1] = val
val = property(_getval,_setval,None)
class PDFDictionary(PDFXML):
def to_python(self):
return dict([e.value for e in self.getchildren()])
def has_key(self,key):
return len(self.xpath('./entry/name[position()=1 and text()="%s"]'%key))>0
def __getitem__(self, i):
if str == type(i):
return self.xpath('./entry/name[position()=1 and text()="%s"]/../*[position()=2]'%i)[0]
return super(PDFDictionary,self).__getitem__(i)
def __delitem__(self, i):
if str == type(i):
return self.remove(self.xpath('./entry/name[position()=1 and text()="%s"]/..'%i)[0])
return super(PDFDictionary,self).__delitem__(i)
def __setitem__(self, key, val):
if str == type(key):
self.xpath('./entry/name[position()=1 and text()="%s"]/..'%key)[0].val=val
else:
super(PDFDictionary,self).__setitem__(key,val)
class PDFStream(PDFXML):
def to_python(self):
return {'dictionary':self[0].value, 'data':self[1].value}
def _getdictionary(self):
return self[0]
def _setdictionary(self, d):
assert key.tag == 'dictionary'
self[0] = d
dictionary = property(_getdictionary,_setdictionary,None)
def _getdata(self):
return self[1]
def _setdata(self, data):
assert data.tag == 'data'
self[1] = data
data = property(_getdata,_setdata,None)
def isFiltered(self):
''' Check if stream is filtered '''
return self.dictionary.has_key('Filter')
def getFilters(self):
val = self.dictionary.value
filters = val.get('Filter',None)
params = val.get('DecodeParams',None)
assert any([type(filters) == list and (type(params) == list or params==None ),
type(filters) != list and (type(params) == dict or params==None ) ]), 'Filter/DecodeParms wrong type'
if type(filters) != list:
filters=[filters]
params=params and [params] or [{}]
if params == None:
params = [{}]*len(filters)
assert all([type(x)==str for x in filters]), 'Filter shall be a names'
assert all([type(x)==dict for x in params]), 'Params should be a dictionary.. or null?'
assert len(filters) == len(params),'Number of Decodeparams should match Filters'
return zip(filters,params)
def popFilter(self):
dictionary = self.dictionary
assert dictionary.has_key('Filter'), 'Stream not Filtered!'
selected_filter = None
selected_params = None
deletion_list = []
if dictionary['Length'].value != len(self.data.value):
logger.info("Length field of object %s does not match the actual data size (%d != %d)"%(str(self.get_numgen()),dictionary['Length'].value,len(self.data.value)))
if type(dictionary['Filter']) == PDFArray:
selected_filter = dictionary['Filter'][0]
del dictionary['Filter'][0]
if dictionary.has_key('DecodeParms'):
assert dictionary['DecodeParms'] == PDFArray, 'Array of filters need array of decoding params'
selected_params = dictionary['DecodeParms'][0]
deletion_list.append((dictionary['DecodeParms'],0))
#del dictionary['DecodeParms'][0]
else:
selected_filter = dictionary['Filter']
del dictionary['Filter']
if dictionary.has_key('DecodeParms'):
selected_params = dictionary['DecodeParms']
deletion_list.append((dictionary, 'DecodeParms'))
#del dictionary['DecodeParms']
if dictionary.has_key('Filter') and \
type(dictionary['Filter']) == PDFArray and \
len(dictionary['Filter']) == 0:
deletion_list.append((dictionary, 'Filter'))
#del dictionary['Filter']
if dictionary.has_key('DecodeParms') and \
type(dictionary['DecodeParms']) == PDFArray and \
len(dictionary['DecodeParms']) == 0:
deletion_list.append((dictionary, 'DecodeParms'))
#del dictionary['DecodeParms']
#FIX recode defilterData .. make it register/unregister able.
#(think /Crypt 7.4.10 Crypt Filter )
self.data.value = defilterData(selected_filter.value,self.data.value, selected_params and selected_params.value or selected_params)
for v,i in deletion_list:
del v[i]
dictionary['Length'].value = len(self.data.value)
def defilter(self):
try:
while self.isFiltered():
self.popFilter()
except Exception,e:
logger.debug("Couldn't defilter <%s> stream (exception %s)."%(self.value,str(e)))
logger.info("Couldn't defilter <%s> stream."%str(self.get_numgen()))
def isObjStm(self):
''' Return true if this is an object stream (ObjStml) '''
return self.dictionary.has_key('Type') and self.dictionary['Type'].value == 'ObjStm'
def expandObjStm(self):
'''
This parses the ObjStm structure and replace it with all the new
indirect objects.
'''
from opaflib.parser import parse
assert not self.isFiltered(), "ObjStm should not be compressed at this point"
assert self.dictionary.has_key('N'), "N is mandatory in ObjStm dictionary"
assert self.dictionary.has_key('First'), "First is mandatory in ObjStm dictionary"
dictionary = self.dictionary
data = self.data.value
first = dictionary["First"].value
pointers = [int(x) for x in data[:first].split()]
assert len(pointers)%2 == 0 , "Wrong number of integer in the ObjStm begining"
pointers = dict([(pointers[i+1]+first,pointers[i]) for i in range(0,len(pointers),2) ])
positions = sorted(pointers.keys() + [len(data)])
parsed_objects = []
for p in range(0,len(positions)-1):
logger.info("Adding new object %s from objectstream"%repr((pointers[positions[p]],0)))
io = PDF.indirect_object(parse('object', data[positions[p]:positions[p+1]]+" "))
io.id = (pointers[positions[p]],0)
parsed_objects.append(io)
return parsed_objects
class PDFArray(PDFXML):
def to_python(self):
return [e.value for e in self]
class PDFIndirect(PDFXML):
def to_python(self):
assert len(self.getchildren())==1, "Wrong number of children in indirect object"
return (self.id, self.object.value)
def _getobject(self):
return self[0]
def _setobject(self, o):
self[0] = o
object = property(_getobject,_setobject,None)
def _getid(self):
return tuple([int(i) for i in self.get('id').split(' ')])
def _setid(self, o):
self.set('id', "%d %d"%o)
id = property(_getid,_setid,None)
def isStream(self):
return len(self.xpath('./stream'))==1
class PDFPdf(PDFXML):
def to_python(self):
return [e.value for e in self]
def getStartxref(self):
''' Get the last startxref pointer (should be at least one) '''
return self.pdf_update[-1].startxref[-1]
#FIX move all this to pdf_update and do the wrapper here
def getObjectAt(self, pos):
''' Get the object found at certain byte position '''
return self.xpath('//*[starts-with(@span,"%d~")]'%pos)[0]
def getTrailer(self, startxref=None):
''' Get the Trailer dictionary (should be at least one) '''
if startxref == None:
startxref = self.getStartxref().value
xref = self.getObjectAt(startxref)
assert xref.tag in ['xref', 'stream'] and xref[0].tag == 'dictionary'
return xref[0]
def getID(self, startxref=None):
''' Get the pdf ID from the trailer dictionary '''
trailer = self.getTrailer(startxref).value
if trailer.has_key('ID'):
return trailer['ID']
else:
return ['','']
def getIndirectObject(self, ref):
''' Search for an indirect object '''
for u in self.pdf_update:
if u.has_key(ref):
return u[ref]
def getRoot(self):
''' Get the pdf Root node. '''
return self.getIndirectObject(self.getTrailer()['Root'].value).object
def isEncrypted(self):
''' Return true if pdf is encrypted '''
return self.getTrailer().has_key('Encrypt')
def countObjStm(self):
''' Count number of 'compressed' object streams '''
return len(self.xpath('//stream/dictionary/entry/name[position()=1 and text()="Type"]/../name[position()=2 and text()="ObjStm"]/../../..'))
def countIObj(self):
''' Count number of 'compressed' object streams '''
return len(self.xpath('//indirect_object'))
def graph(xml_pdf,dot='default.dot'):
''' Generate a .dot graph of the pdf '''
dotdata = "digraph {\n"
nodes_added = set()
for io in self.pdf_update.indirect_object:
references = io.xpath(".//R")
orig = "%d %d"%io.id
if len(references) == 0:
dotdata += '\t"%s";\n'%x
nodes_added.add(orig)
else:
for r in references:
dest = "%d %d"%r.value
dotdata += '\t"%s" -> "%s";\n'%(orig, dest)
nodes_added.add(orig)
nodes_added.add(dest)
try:
root = "%d %d"%self.getRoot()
dotdata += '\t"trailer" -> "%s";\n'%root
except Exception,e :
pass
dotdata += '}\n'
logger.info("Writing graph to %s(a dot file). Download graphviz or try this http://rise4fun.com/Agl for render it."%dot)
file(dot,"w").write(dotdata)
def expandAllObjStm(self):
''' Find all object streams and expand them. Each ObjStm will be replaced
by its childs '''
for u in self.pdf_update:
for ref in u.findAllObjStm():
u.expandObjStm(ref)
def defilterAll(self):
''' Find all object streams and expand them. Each ObjStm will be replaced
by its childs '''
for u in self.pdf_update:
for io in u[:]:
if type(io) == PDFIndirect and io.isStream() and io.object.isFiltered():
io.object.defilter()
def decrypt(self):
''' This will try to decrypt V:4 null password encryption '''
import hashlib, struct
from Crypto.Cipher import AES
from Crypto.Util import randpool
import base64
def rc4crypt(data, key):
x = 0
box = range(256)
for i in range(256):
x = (x + box[i] + ord(key[i % len(key)])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = []
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256]))
return ''.join(out)
block_size = 16
key_size = 32
def encrypt(plain_text,key_bytes):
assert len(key_bytes) == key_size
mode = AES.MODE_CBC
pad = block_size - len(plain_text) % block_size
data = plain_text + pad * chr(pad)
iv_bytes = randpool.RandomPool(512).get_bytes(block_size)
encrypted_bytes = iv_bytes + AES.new(key_bytes, mode, iv_bytes).encrypt(data)
return encrypted_bytes
def decrypt(encrypted_bytes,key_bytes):
#assert len(key_bytes) == key_size
mode = AES.MODE_CBC
iv_bytes = encrypted_bytes[:block_size]
plain_text = AES.new(key_bytes, mode, iv_bytes).decrypt(encrypted_bytes[block_size:])
pad = ord(plain_text[-1])
return plain_text[:-pad]
assert self.isEncrypted()
#Get and print the encryption dictionary
encrypt = self.getTrailer()['Encrypt'].solve().object
print "It's ENCRYPTED!"
encrypt_py = encrypt.value
print encrypt_py
#Ok try to decrypt it ...
assert encrypt_py['V'] == 4, "Sorry only Version 4 supported"
assert encrypt_py['R'] == 4, "Sorry only Version 4 supported"
#password length
n = encrypt_py['Length']/8
print "N:",n
#a) Pad or truncate the password string to exactly 32 bytes.
user_password = ""
pad = "28BF4E5E4E758A4164004E56FFFA01082E2E00B6D0683E802F0CA9FE6453697A".decode('hex')
print "PASSWORD: ", user_password.encode('hex')
print "PAD: ", pad.encode('hex')
#b) Initialize the MD5 hash function and pass the result of step (a) as input to this function.
m = hashlib.md5()
m.update((user_password+pad)[:32])
print "MD5 update 1", ((user_password+pad)[:32]).encode('hex')
#c) Pass the value of the encryption dictionary's O entry to the MD5 hash function.
m.update (encrypt_py['O'][:32])
print "MD5 update 2", (encrypt_py['O'][:32]).encode('hex')
#d) Convert the integer value of the P entry to a 32-bit unsigned binary number and pass these bytes to the
# MD5 hash function, low-order byte first. WTF!!??
print "MD5 update 3", struct.pack("<L", 0xffffffff&encrypt_py['P']).encode('hex')
m.update (struct.pack("<L", 0xffffffff&encrypt_py['P'] ))
#e) append ID ?
#TODO, get the ID from the trailer..
ID = ''
m.update (ID)
print "MD5 update 4", ID.encode('hex')
#f) If document metadata is not being encrypted, pass 4 bytes with the value 0xFFFFFFFF to the MD5 hash function.
if encrypt_py.has_key('EncryptMetadata') and encrypt_py['EncryptMetadata'] == false:
m.update('\xff'*4)
print "MD5 update 5", ('\xff'*4).encode('hex')
print "1rst DIGEST:", m.digest().encode('hex')
h = m.digest()[:n]
for i in range(0,50):
h = hashlib.md5(h[:n]).digest()
print "Encryption KEY(%d)"%i, h.encode('hex')
key = h[:n]
print "Encryption KEY", key.encode('hex')
print "Try to authenticate"
_buf = hashlib.md5(pad + ID).digest()
print "MD5(padding+ID):",_buf.encode('hex')
for i in range(0,20):
_key = ''.join([chr(ord(k)^i) for k in list(key)])
_buf1 = rc4crypt(_buf,_key)
print "RC4 iter(%d) Encrypt data <%s> with key <%s> and it gives data <%s>"%(i,_buf.encode('hex'),_key.encode('hex'),_buf1.encode('hex'))
_buf = _buf1
assert _buf == encrypt_py['U'][:16]
print "Authenticated! (An actual pass is not needed. Using null pass '' )"
print "U", encrypt_py['U'].encode('hex')
print "O", encrypt_py['O'].encode('hex')
def decrypt_xml(xml_element):
n,g = xml_element.get_numgen()
m = hashlib.md5()
m.update(key)
m.update(chr(n&0xff))
m.update(chr((n>>8)&0xff))
m.update(chr((n>>16)&0xff))
m.update(chr(g&0xff))
m.update(chr((g>>8)&0xff))
m.update("sAlT")
real_key = m.digest()
pld = e.value
if pld.endswith("\x0d\x0a"):
pld = pld[:-2]
pld = decrypt(pld,real_key)
e.value=pld
#decrypt every string and stream in place...
for e in self.xpath('//stream/data'):
decrypt_xml(e)
for e in self.xpath('//string'):
decrypt_xml(e)
class PDFUpdate(PDFXML):
def to_python(self):
return dict([e.value for e in self.xpath('./indirect_object')])
def has_key(self,key):
key = "%d %d"%key
return len(self.xpath('./indirect_object[@id="%s"]'%key))>0
def __getitem__(self, key):
if tuple == type(key):
key = "%d %d"%key
return self.xpath('./indirect_object[@id="%s"]'%key)[0]
return super(PDFUpdate,self).__getitem__(key)
def __delitem__(self, key):
if tuple == type(key):
key = "%d %d"%key
return self.remove(self.xpath('./indirect_object[@id="%s"]'%key)[0])
return super(PDFUpdate,self).__delitem__(key)
def __setitem__(self, key, val):
if str == type(key):
self.xpath('./indirect_object[@obj="%s"]'%key)[0][:]=[val] #mmm
else:
super(PDFDictionary,self).__setitem__(key,val)
def getObjectAt(self, pos):
''' Get the object found at certain byte position (only in this update!)'''
return self.xpath('.//*[starts-with(@span,"%d~")]'%pos)[0]
def getTrailer(self, startxref=None):
''' Get the Trailer dictionary (of this update!)'''
if startxref == None:
startxref = self.getStartxref().value
xref = self.getObjectAt(startxref)
return xref.dictionary
def getRoot(self):
''' Get the pdf Root node of this update. '''
return self[self.getTrailer()['Root'].value].object
def countObjStm(self):
''' Count number of 'compressed' object streams '''
return len(self.xpath('.//stream/dictionary/entry/name[position()=1 and text()="Type"]/../name[position()=2 and text()="ObjStm"]/../../..'))
def expandObjStm(self, ref):
io_objstm = self[ref]
assert io_objstm.object.dictionary['Type'].value == 'ObjStm'
#completelly defilter the object stream
while io_objstm.object.isFiltered():
io_objstm.object.popFilter()
#parse the indirect simpe objects inside it
expanded_iobjects = io_objstm.object.expandObjStm()
#replace the object stream by its childs
for new_io in expanded_iobjects:
io_objstm.addnext(new_io)
self.remove(io_objstm)
def findAllObjStm(self):
''' Search 'compressed' object streams ids/refs'''
return [io.id for io in self.xpath('.//stream/dictionary/entry/name[position()=1 and text()="Type"]/../name[position()=2 and text()="ObjStm"]/../../../..')]
def expandAllObjStm(self):
for ref in self.findAllObjStm():
self.expandObjStm(ref)
#Factory
class PDFXMLFactory():
def __init__(self):
self.parser = etree.XMLParser()
fallback = etree.ElementDefaultClassLookup(PDFXML)
lookup = etree.ElementNamespaceClassLookup(fallback)
namespace = lookup.get_namespace(None)
#leafs
namespace['name'] = PDFName
namespace['string'] = PDFString
namespace['number'] = PDFNumber
namespace['null'] = PDFNull
namespace['bool'] = PDFBool
namespace['R'] = PDFR
namespace['header'] = PDFHeader
namespace['startxref'] = PDFStartxref
namespace['data'] = PDFData
#trees
namespace['entry'] = PDFEntry
namespace['dictionary'] = PDFDictionary
namespace['stream'] = PDFStream
namespace['pdf'] = PDFPdf
namespace['pdf_update'] = PDFUpdate
namespace['indirect_object'] = PDFIndirect
namespace['array'] = PDFArray
self.parser.set_element_class_lookup(lookup)
#leaf
def create_leaf(self, tag, value,**attribs):
assert tag in ['number','string','name','R','startxref','header','data','null','bool'], "Got wrong leaf tag: %s"%tag
xml = self.parser.makeelement(tag)
xml.value=value
xml.span=attribs.setdefault('span', (0xffffffff,-1))
del attribs['span']
for attr_key, attr_val in attribs.items():
xml.set(attr_key, str(attr_val))
return xml
#Tree
def create_tree(self, tag, *childs, **attribs):
assert tag in ['indirect_object','dictionary', 'entry', 'array', 'stream', 'xref', 'pdf', 'pdf_update'], "Got wrong tree tag: %s"%tag
xml = self.parser.makeelement(tag)
xml.span=attribs.setdefault('span', (0xffffffff,-1))
del attribs['span']
for attr_key, attr_val in attribs.items():
xml.set(attr_key, str(attr_val))
for child in childs:
xml.append(child)
return xml
def __getattr__(self,tag, *args,**kwargs):
if tag in ['number','string','name','R','startxref','header','data','null','bool']:
return lambda payload, **my_kwargs: self.create_leaf(tag, payload, **my_kwargs)
elif tag in ['indirect_object','dictionary', 'entry', 'array', 'stream', 'xref', 'pdf', 'pdf_update']:
return lambda payload, **my_kwargs: self.create_tree(tag, *payload, **my_kwargs)
return super(PDFXMLFactory,self).__getattr__(tag,*args,**kwargs)
PDF = PDFXMLFactory()
def create_leaf(tag, value, **kwargs):
return PDF.create_leaf(tag, value,**kwargs)
def create_tree(tag, childs, **kwargs):
return PDF.create_tree(tag, *childs, **kwargs)
if __name__=="__main__":
name = create_leaf('name', "Name")
string = create_leaf('string', "Felipe")
entry = create_tree('entry',[name,string])
dictionary = create_tree('dictionary',[entry])
stream_data = create_leaf('data',"A"*100)
stream = create_tree('stream',[dictionary,stream_data])
indirect = create_tree('indirect_object', [stream], obj=(1,0))
array = create_tree('array', [create_leaf('number', i) for i in range(0,10)])
xml=indirect
print etree.tostring(xml), xml.value
import code
code.interact(local=locals())
| [] |
le3t/ko-repo | course-code/imooc-tf-mnist-flask/mnist/module.py | 50eb0b4cadb9db9bf608a9e5d36376f38ff5cce5 | import tensorflow as tf
# y=ax+b linear model
def regression(x):
a = tf.Variable(tf.zeros([784, 10]), name="a")
b = tf.Variable(tf.zeros([10]), name="b")
y = tf.nn.softmax(tf.matmul(x, a) + b)
return y, [a, b]
# 定义卷积模型
def convolutional(x, keep_prob):
def conv2d(x, w):
return tf.nn.conv2d(x, w, [1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(
x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
x_image = tf.reshape(x, [-1, 28, 28, 1])
w_conv1 = weight_variable([5, 5, 1, 32])
b_conv1 = bias_variable([32])
h_conv1 = tf.nn.relu(conv2d(x_image, w_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
w_conv2 = weight_variable([5, 5, 32, 64])
b_conv2 = bias_variable([64])
h_conv2 = tf.nn.relu(conv2d(h_pool1, w_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
# 全连接层
w_fc1 = weight_variable([7 * 7 * 64, 1024])
b_fc1 = bias_variable([1024])
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, w_fc1) + b_fc1)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
w_fc2 = weight_variable([1024, 10])
b_fc2 = bias_variable([10])
y = tf.nn.softmax(tf.matmul(h_fc1_drop, w_fc2))
return y, [w_conv1, b_conv1, w_conv2, b_conv2, w_fc1, w_fc2, b_fc2]
| [((745, 775), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1, 28, 28, 1]'], {}), '(x, [-1, 28, 28, 1])\n', (755, 775), True, 'import tensorflow as tf\n'), ((1243, 1280), 'tensorflow.reshape', 'tf.reshape', (['h_pool2', '[-1, 7 * 7 * 64]'], {}), '(h_pool2, [-1, 7 * 7 * 64])\n', (1253, 1280), True, 'import tensorflow as tf\n'), ((1361, 1392), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['h_fc1', 'keep_prob'], {}), '(h_fc1, keep_prob)\n', (1374, 1392), True, 'import tensorflow as tf\n'), ((87, 106), 'tensorflow.zeros', 'tf.zeros', (['[784, 10]'], {}), '([784, 10])\n', (95, 106), True, 'import tensorflow as tf\n'), ((138, 152), 'tensorflow.zeros', 'tf.zeros', (['[10]'], {}), '([10])\n', (146, 152), True, 'import tensorflow as tf\n'), ((310, 358), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x', 'w', '[1, 1, 1, 1]'], {'padding': '"""SAME"""'}), "(x, w, [1, 1, 1, 1], padding='SAME')\n", (322, 358), True, 'import tensorflow as tf\n'), ((400, 475), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['x'], {'ksize': '[1, 2, 2, 1]', 'strides': '[1, 2, 2, 1]', 'padding': '"""SAME"""'}), "(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')\n", (414, 475), True, 'import tensorflow as tf\n'), ((540, 578), 'tensorflow.truncated_normal', 'tf.truncated_normal', (['shape'], {'stddev': '(0.1)'}), '(shape, stddev=0.1)\n', (559, 578), True, 'import tensorflow as tf\n'), ((594, 614), 'tensorflow.Variable', 'tf.Variable', (['initial'], {}), '(initial)\n', (605, 614), True, 'import tensorflow as tf\n'), ((664, 693), 'tensorflow.constant', 'tf.constant', (['(0.1)'], {'shape': 'shape'}), '(0.1, shape=shape)\n', (675, 693), True, 'import tensorflow as tf\n'), ((709, 729), 'tensorflow.Variable', 'tf.Variable', (['initial'], {}), '(initial)\n', (720, 729), True, 'import tensorflow as tf\n'), ((1487, 1515), 'tensorflow.matmul', 'tf.matmul', (['h_fc1_drop', 'w_fc2'], {}), '(h_fc1_drop, w_fc2)\n', (1496, 1515), True, 'import tensorflow as tf\n'), ((186, 201), 'tensorflow.matmul', 'tf.matmul', (['x', 'a'], {}), '(x, a)\n', (195, 201), True, 'import tensorflow as tf\n'), ((1304, 1334), 'tensorflow.matmul', 'tf.matmul', (['h_pool2_flat', 'w_fc1'], {}), '(h_pool2_flat, w_fc1)\n', (1313, 1334), True, 'import tensorflow as tf\n')] |
terra-dashboard/staketaxcsv | src/sol/handle_metaplex.py | 5793105488bf799c61aee64a45f44e9ae8fef397 | from common.make_tx import make_swap_tx
from sol.handle_simple import handle_unknown_detect_transfers
def handle_metaplex(exporter, txinfo):
transfers_in, transfers_out, _ = txinfo.transfers_net
if len(transfers_in) == 1 and len(transfers_out) == 1:
sent_amount, sent_currency, _, _ = transfers_out[0]
received_amount, received_currency, _, _ = transfers_in[0]
row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency)
exporter.ingest_row(row)
else:
handle_unknown_detect_transfers(exporter, txinfo)
def is_nft_mint(txinfo):
log_instructions = txinfo.log_instructions
transfers_in, transfers_out, _ = txinfo.transfers_net
if "MintTo" in log_instructions and len(transfers_out) == 1 and len(transfers_in) == 0:
return True
elif ("MintTo" in log_instructions
and len(transfers_out) == 1
and len(transfers_in) == 1
and transfers_in[0][0] == 1):
return True
else:
return False
def handle_nft_mint(exporter, txinfo):
transfers_in, transfers_out, transfers_unknown = txinfo.transfers_net
if len(transfers_in) == 1 and len(transfers_out) == 1:
sent_amount, sent_currency, _, _ = transfers_out[0]
received_amount, received_currency, _, _ = transfers_in[0]
row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency)
exporter.ingest_row(row)
return
handle_unknown_detect_transfers(exporter, txinfo)
| [((1494, 1543), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\n', (1525, 1543), False, 'from sol.handle_simple import handle_unknown_detect_transfers\n'), ((403, 491), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\n received_currency)\n', (415, 491), False, 'from common.make_tx import make_swap_tx\n'), ((539, 588), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\n', (570, 588), False, 'from sol.handle_simple import handle_unknown_detect_transfers\n'), ((1356, 1444), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\n received_currency)\n', (1368, 1444), False, 'from common.make_tx import make_swap_tx\n')] |
lemiceterieux/dcor | dcor/independence.py | 205682a71463a2c6ab8f5b8b215ec12d44f0b5a6 | """
Functions for testing independence of several distributions.
The functions in this module provide methods for testing if
the samples generated from two random vectors are independent.
"""
import numpy as np
import scipy.stats
from . import _dcor_internals, _hypothesis
from ._dcor import u_distance_correlation_sqr
from ._utils import _random_state_init, _transform_to_2d
def distance_covariance_test(
x,
y,
*,
num_resamples=0,
exponent=1,
random_state=None,
n_jobs=1,
):
"""
Test of distance covariance independence.
Compute the test of independence based on the distance
covariance, for two random vectors.
The test is a permutation test where the null hypothesis is that the two
random vectors are independent.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
exponent: float
Exponent of the Euclidean distance, in the range :math:`(0, 2)`.
Equivalently, it is twice the Hurst parameter of fractional Brownian
motion.
num_resamples: int
Number of permutations resamples to take in the permutation test.
random_state: {None, int, array_like, numpy.random.RandomState}
Random state to generate the permutations.
Returns
-------
HypothesisTest
Results of the hypothesis test.
See Also
--------
distance_covariance
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> dcor.independence.distance_covariance_test(a, a)
HypothesisTest(p_value=1.0, statistic=208.0)
>>> dcor.independence.distance_covariance_test(a, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=11.75323056...)
>>> dcor.independence.distance_covariance_test(b, b)
HypothesisTest(p_value=1.0, statistic=1.3604610...)
>>> dcor.independence.distance_covariance_test(a, b,
... num_resamples=5, random_state=0)
HypothesisTest(p_value=0.5, statistic=11.7532305...)
>>> dcor.independence.distance_covariance_test(a, b,
... num_resamples=5, random_state=13)
HypothesisTest(p_value=0.3333333..., statistic=11.7532305...)
>>> dcor.independence.distance_covariance_test(a, a,
... num_resamples=7, random_state=0)
HypothesisTest(p_value=0.125, statistic=208.0)
"""
x = _transform_to_2d(x)
y = _transform_to_2d(y)
_dcor_internals._check_same_n_elements(x, y)
random_state = _random_state_init(random_state)
# Compute U-centered matrices
u_x = _dcor_internals._distance_matrix_generic(
x,
centering=_dcor_internals.double_centered,
exponent=exponent)
u_y = _dcor_internals._distance_matrix_generic(
y,
centering=_dcor_internals.double_centered,
exponent=exponent)
# Use the dcov statistic
def statistic_function(distance_matrix):
return u_x.shape[0] * _dcor_internals.mean_product(
distance_matrix, u_y)
return _hypothesis._permutation_test_with_sym_matrix(
u_x,
statistic_function=statistic_function,
num_resamples=num_resamples,
random_state=random_state,
n_jobs=n_jobs)
def partial_distance_covariance_test(
x,
y,
z,
*,
num_resamples=0,
exponent=1,
random_state=None,
n_jobs=1,
):
"""
Test of partial distance covariance independence.
Compute the test of independence based on the partial distance
covariance, for two random vectors conditioned on a third.
The test is a permutation test where the null hypothesis is that the first
two random vectors are independent given the third one.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
z: array_like
Observed random vector. The columns correspond with the individual
random variables while the rows are individual instances of the random
vector.
num_resamples: int
Number of permutations resamples to take in the permutation test.
random_state: {None, int, array_like, numpy.random.RandomState}
Random state to generate the permutations.
Returns
-------
HypothesisTest
Results of the hypothesis test.
See Also
--------
partial_distance_covariance
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> c = np.array([[1000, 0, 0, 1000],
... [0, 1000, 1000, 1000],
... [1000, 1000, 1000, 1000],
... [1000, 1000, 0, 1000]])
>>> dcor.independence.partial_distance_covariance_test(a, a, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=142.6664416...)
>>> dcor.independence.partial_distance_covariance_test(a, b, c)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=7.2690070...e-15)
>>> dcor.independence.partial_distance_covariance_test(b, b, c)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=2.2533380...e-30)
>>> dcor.independence.partial_distance_covariance_test(a, b, c,
... num_resamples=5, random_state=0)
HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15)
>>> dcor.independence.partial_distance_covariance_test(a, b, c,
... num_resamples=5, random_state=13)
HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15)
>>> dcor.independence.partial_distance_covariance_test(a, c, b,
... num_resamples=7, random_state=0)
HypothesisTest(p_value=1.0, statistic=-7.5701764...e-12)
"""
random_state = _random_state_init(random_state)
# Compute U-centered matrices
u_x = _dcor_internals._u_distance_matrix(x, exponent=exponent)
u_y = _dcor_internals._u_distance_matrix(y, exponent=exponent)
u_z = _dcor_internals._u_distance_matrix(z, exponent=exponent)
# Compute projections
proj = _dcor_internals.u_complementary_projection(u_z)
p_xz = proj(u_x)
p_yz = proj(u_y)
# Use the pdcor statistic
def statistic_function(distance_matrix):
return u_x.shape[0] * _dcor_internals.u_product(
distance_matrix, p_yz)
return _hypothesis._permutation_test_with_sym_matrix(
p_xz,
statistic_function=statistic_function,
num_resamples=num_resamples,
random_state=random_state,
n_jobs=n_jobs)
def distance_correlation_t_statistic(x, y):
"""
Transformation of the bias corrected version of distance correlation used
in :func:`distance_correlation_t_test`.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
Returns
-------
numpy scalar
T statistic.
See Also
--------
distance_correlation_t_test
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_statistic(a, a)
inf
>>> dcor.independence.distance_correlation_t_statistic(a, b)
... # doctest: +ELLIPSIS
-0.4430164...
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_statistic(b, b)
inf
"""
bcdcor = u_distance_correlation_sqr(x, y)
n = x.shape[0]
v = n * (n - 3) / 2
return np.sqrt(v - 1) * bcdcor / np.sqrt(1 - bcdcor**2)
def distance_correlation_t_test(x, y):
"""
Test of independence for high dimension based on convergence to a Student t
distribution. The null hypothesis is that the two random vectors are
independent.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
Returns
-------
HypothesisTest
Results of the hypothesis test.
See Also
--------
distance_correlation_t_statistic
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_test(a, a)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=0.0, statistic=inf)
>>> dcor.independence.distance_correlation_t_test(a, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=0.6327451..., statistic=-0.4430164...)
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_test(b, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=0.0, statistic=inf)
"""
t_test = distance_correlation_t_statistic(x, y)
n = x.shape[0]
v = n * (n - 3) / 2
df = v - 1
p_value = 1 - scipy.stats.t.cdf(t_test, df=df)
return _hypothesis.HypothesisTest(p_value=p_value, statistic=t_test)
| [((9268, 9292), 'numpy.sqrt', 'np.sqrt', (['(1 - bcdcor ** 2)'], {}), '(1 - bcdcor ** 2)\n', (9275, 9292), True, 'import numpy as np\n'), ((9242, 9256), 'numpy.sqrt', 'np.sqrt', (['(v - 1)'], {}), '(v - 1)\n', (9249, 9256), True, 'import numpy as np\n')] |
cinemascience/cinemasc | cinemasci/cis/__init__.py | 5b00a0c2e3c886f65cfbf1f59e914fc458d7068b | from . import imageview
from . import cisview
from . import renderer
from . import convert
class cis:
"""Composible Image Set Class
The data structure to hold properties of a Composible Image Set.
"""
def __init__(self, filename):
""" The constructor. """
self.fname = filename
self.classname = "COMPOSABLE_IMAGE_SET"
self.dims = [0,0]
self.flags = "CONSTANT_CHANNELS"
self.version = "1.0"
self.parameterlist = []
self.parametertable = None
self.variables = {}
self.images = {}
self.colormaps = {}
def debug_print(self):
""" Debug print statement for CIS properties. """
print("printing cis")
print(" fname: {}".format(self.fname))
print(" classname: {}".format(self.classname))
print(" dims: {}".format(self.dims))
print(" flags: {}".format(self.flags))
print(" version: {}".format(self.version))
print(" colormaps: ")
for m in self.colormaps:
print(m)
for i in self.get_images():
print(" image: {}".format(self.get_image(i).name))
for l in self.get_image(i).get_layers():
print(" layer: {}".format(self.get_image(i).get_layer(l).name))
print("\n")
def get_image(self, key):
""" Returns an image given its key. """
result = False
if key in self.images:
result = self.images[key]
return result
def get_images(self):
""" Returns all images. """
for i in self.images:
yield i
def get_image_names(self):
""" Returns list of image names. """
return list(self.images.keys())
def set_parameter_table(self, table):
""" Set parameter table using a deep copy. """
self.parametertable = table.copy(deep=True)
def add_parameter(self, name, type):
""" Add a parameter to the list of parameters for the CIS. """
# check for duplicates
self.parameterlist.append([name, type])
def add_variable(self, name, type, min, max):
""" Add a variable to the set of variables. """
# check for duplicates
self.variables[name] = {'type':type, 'min':min, 'max':max}
def add_image(self, name):
""" Add an image to the set of images in the CIS. """
# check for duplicates
self.images[name] = image.image(name)
return self.images[name]
def get_variables(self):
""" Return all variables. """
for i in self.variables:
yield i
def get_variable(self, name):
""" Return a variable. """
variable = None
if name in self.variables:
variable = self.variables[name]
return variable
def get_image(self,name):
""" Return an image. """
image = None
if name in self.images:
image = self.images[name]
return image
def get_colormap(self,name):
""" Return a colormap. """
colormap = None
if name in self.colormaps:
colormap = self.colormaps[name]
return colormap
def add_colormap(self, name, path):
""" Add a colormap to the set of colormaps. """
#if colormap not in dict
if (name not in self.colormaps):
self.colormaps[name] = colormap.colormap(path)
def remove_colormap(self, name):
""" Remove a colormap from the set of colormaps. """
self.colormaps.pop(name)
def get_colormaps(self):
""" Return all colormaps. """
for i in self.colormaps:
yield i
def set_dims(self, w, h):
""" Set the dimensions of the CIS given a width and height. """
self.dims = [w, h]
| [] |
fos/fos-legacy | applications/spaghetti.py | db6047668781a0615abcebc7d55a7164f3105047 | import numpy as np
import nibabel as nib
import os.path as op
import pyglet
#pyglet.options['debug_gl'] = True
#pyglet.options['debug_x11'] = True
#pyglet.options['debug_gl_trace'] = True
#pyglet.options['debug_texture'] = True
#fos modules
from fos.actor.axes import Axes
from fos import World, Window, WindowManager
from labeler import TrackLabeler
from fos.actor.slicer import Slicer
#dipy modules
from dipy.segment.quickbundles import QuickBundles
from dipy.io.dpy import Dpy
from dipy.io.pickles import load_pickle,save_pickle
from dipy.viz.colormap import orient2rgb
import copy
if __name__ == '__main__':
subject = 5
seeds = 1
qb_dist = 30
#load T1 volume registered in MNI space
img = nib.load('data/subj_'+("%02d" % subject)+'/MPRAGE_32/T1_flirt_out.nii.gz')
data = img.get_data()
affine = img.get_affine()
#load the tracks registered in MNI space
fdpyw = 'data/subj_'+("%02d" % subject)+'/101_32/DTI/tracks_gqi_'+str(seeds)+'M_linear.dpy'
dpr = Dpy(fdpyw, 'r')
T = dpr.read_tracks()
dpr.close()
#load initial QuickBundles with threshold 30mm
fpkl = 'data/subj_'+("%02d" % subject)+'/101_32/DTI/qb_gqi_'+str(seeds)+'M_linear_'+str(qb_dist)+'.pkl'
#qb=QuickBundles(T,30.,12)
qb=load_pickle(fpkl)
#create the interaction system for tracks
tl = TrackLabeler(qb,qb.downsampled_tracks(),vol_shape=data.shape,tracks_alpha=1)
#add a interactive slicing/masking tool
sl = Slicer(affine,data)
#add one way communication between tl and sl
tl.slicer=sl
#OpenGL coordinate system axes
ax = Axes(100)
x,y,z=data.shape
#add the actors to the world
w=World()
w.add(tl)
w.add(sl)
#w.add(ax)
#create a window
wi = Window(caption="Interactive Spaghetti using Diffusion Imaging in Python (dipy.org) and Free On Shades (fos.me)",\
bgcolor=(0.3,0.3,0.6,1),width=1200,height=800)
#attach the world to the window
wi.attach(w)
#create a manager which can handle multiple windows
wm = WindowManager()
wm.add(wi)
wm.run()
print('Everything is running ;-)')
| [((727, 803), 'nibabel.load', 'nib.load', (["('data/subj_' + '%02d' % subject + '/MPRAGE_32/T1_flirt_out.nii.gz')"], {}), "('data/subj_' + '%02d' % subject + '/MPRAGE_32/T1_flirt_out.nii.gz')\n", (735, 803), True, 'import nibabel as nib\n'), ((1018, 1033), 'dipy.io.dpy.Dpy', 'Dpy', (['fdpyw', '"""r"""'], {}), "(fdpyw, 'r')\n", (1021, 1033), False, 'from dipy.io.dpy import Dpy\n'), ((1281, 1298), 'dipy.io.pickles.load_pickle', 'load_pickle', (['fpkl'], {}), '(fpkl)\n', (1292, 1298), False, 'from dipy.io.pickles import load_pickle, save_pickle\n'), ((1488, 1508), 'fos.actor.slicer.Slicer', 'Slicer', (['affine', 'data'], {}), '(affine, data)\n', (1494, 1508), False, 'from fos.actor.slicer import Slicer\n'), ((1626, 1635), 'fos.actor.axes.Axes', 'Axes', (['(100)'], {}), '(100)\n', (1630, 1635), False, 'from fos.actor.axes import Axes\n'), ((1700, 1707), 'fos.World', 'World', ([], {}), '()\n', (1705, 1707), False, 'from fos import World, Window, WindowManager\n'), ((1781, 1955), 'fos.Window', 'Window', ([], {'caption': '"""Interactive Spaghetti using Diffusion Imaging in Python (dipy.org) and Free On Shades (fos.me)"""', 'bgcolor': '(0.3, 0.3, 0.6, 1)', 'width': '(1200)', 'height': '(800)'}), "(caption=\n 'Interactive Spaghetti using Diffusion Imaging in Python (dipy.org) and Free On Shades (fos.me)'\n , bgcolor=(0.3, 0.3, 0.6, 1), width=1200, height=800)\n", (1787, 1955), False, 'from fos import World, Window, WindowManager\n'), ((2080, 2095), 'fos.WindowManager', 'WindowManager', ([], {}), '()\n', (2093, 2095), False, 'from fos import World, Window, WindowManager\n')] |
dlzdy/faceai | faceai/gender.py | 4b1e41d4c394c00da51533562b76306d86493f72 | #coding=utf-8
#性别识别
import cv2
from keras.models import load_model
import numpy as np
import chineseText
img = cv2.imread("img/gather.png")
face_classifier = cv2.CascadeClassifier(
"d:\Python36\Lib\site-packages\opencv-master\data\haarcascades\haarcascade_frontalface_default.xml"
)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_classifier.detectMultiScale(
gray, scaleFactor=1.2, minNeighbors=3, minSize=(140, 140))
gender_classifier = load_model(
"classifier/gender_models/simple_CNN.81-0.96.hdf5")
gender_labels = {0: '女', 1: '男'}
color = (255, 255, 255)
for (x, y, w, h) in faces:
face = img[(y - 60):(y + h + 60), (x - 30):(x + w + 30)]
face = cv2.resize(face, (48, 48))
face = np.expand_dims(face, 0)
face = face / 255.0
gender_label_arg = np.argmax(gender_classifier.predict(face))
gender = gender_labels[gender_label_arg]
cv2.rectangle(img, (x, y), (x + h, y + w), color, 2)
img = chineseText.cv2ImgAddText(img, gender, x + h, y, color, 30)
cv2.imshow("Image", img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [((113, 141), 'cv2.imread', 'cv2.imread', (['"""img/gather.png"""'], {}), "('img/gather.png')\n", (123, 141), False, 'import cv2\n'), ((160, 299), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""d:\\\\Python36\\\\Lib\\\\site-packages\\\\opencv-master\\\\data\\\\haarcascades\\\\haarcascade_frontalface_default.xml"""'], {}), "(\n 'd:\\\\Python36\\\\Lib\\\\site-packages\\\\opencv-master\\\\data\\\\haarcascades\\\\haarcascade_frontalface_default.xml'\n )\n", (181, 299), False, 'import cv2\n'), ((296, 333), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (308, 333), False, 'import cv2\n'), ((460, 522), 'keras.models.load_model', 'load_model', (['"""classifier/gender_models/simple_CNN.81-0.96.hdf5"""'], {}), "('classifier/gender_models/simple_CNN.81-0.96.hdf5')\n", (470, 522), False, 'from keras.models import load_model\n'), ((1010, 1034), 'cv2.imshow', 'cv2.imshow', (['"""Image"""', 'img'], {}), "('Image', img)\n", (1020, 1034), False, 'import cv2\n'), ((1035, 1049), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1046, 1049), False, 'import cv2\n'), ((1050, 1073), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1071, 1073), False, 'import cv2\n'), ((685, 711), 'cv2.resize', 'cv2.resize', (['face', '(48, 48)'], {}), '(face, (48, 48))\n', (695, 711), False, 'import cv2\n'), ((723, 746), 'numpy.expand_dims', 'np.expand_dims', (['face', '(0)'], {}), '(face, 0)\n', (737, 746), True, 'import numpy as np\n'), ((886, 938), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x + h, y + w)', 'color', '(2)'], {}), '(img, (x, y), (x + h, y + w), color, 2)\n', (899, 938), False, 'import cv2\n'), ((949, 1008), 'chineseText.cv2ImgAddText', 'chineseText.cv2ImgAddText', (['img', 'gender', '(x + h)', 'y', 'color', '(30)'], {}), '(img, gender, x + h, y, color, 30)\n', (974, 1008), False, 'import chineseText\n')] |
mudit2103/csm_web | csm_web/scheduler/tests/utils.py | 3b7fd9ca7269ad4cb57bf264cf62a620e02d3780 | from django.test import TestCase
from os import path
from rest_framework import status
from rest_framework.test import APIClient
import random
from scheduler.models import Profile
from scheduler.factories import (
CourseFactory,
SpacetimeFactory,
UserFactory,
ProfileFactory,
SectionFactory,
AttendanceFactory,
OverrideFactory,
create_attendances_for,
)
random.seed(0)
COURSE_NAMES = ("CS88", "CS61A", "CS61B", "CS70", "CS61C", "EE16A")
ROLE_MAP = Profile.ROLE_MAP
BASE_PATH = "/scheduler"
# ----- REQUEST UTILITIES -----
def fail_msg(ep, resp):
return "Endpoint: {}\nResponse Content: {}".format(ep, resp.content)
class APITestCase(TestCase):
def get_client_for(self, user):
"""Returns an APIClient object that is logged in as the provided user."""
client = APIClient()
client.force_authenticate(user)
return client
def request(self, method, endpoint, exp_code=None, data=None):
"""
Performs a request to the specified endpoint and returns the response object.
Also checks if the status code of the response is exp_code, if provided.
The method parameter should be a get/post/etc from an APIClient object.
"""
resp = method(path.join(BASE_PATH, endpoint.strip("/")), follow=True, data=data)
if exp_code is not None:
self.assertEqual(resp.status_code, exp_code, msg=fail_msg(endpoint, resp))
return resp
def req_fails_perms(self, method, endpoint, data=None):
"""
Performs a request to the specified endpoint, and checks that it fails
due to the user lacking proper permissions.
The method parameter should be a get/post/etc from an APIClient object.
Returns the response object afterwards.
"""
return self.request(
method, endpoint, exp_code=status.HTTP_403_FORBIDDEN, data=data
)
def req_fails_method(self, method, endpoint, data=None):
"""
Performs a request to the specified endpoint, and checks that it fails
due to the endpoint not supporting the provided method.
Returns the response object.
"""
return self.request(
method, endpoint, exp_code=status.HTTP_405_METHOD_NOT_ALLOWED, data=data
)
def req_succeeds(self, method, endpoint, data=None):
"""
Performs a request to the specified endpoint, and checks that it succeeds.
The method parameter should be a get/post/etc from an APIClient object.
Returns the response object.
"""
return self.request(method, endpoint, exp_code=status.HTTP_200_OK, data=data)
# ----- MODEL GENERATION -----
def random_objs(clazz, n=1):
"""
Generates N instances of the provided class, retrieved from the database.
"""
src = clazz.objects.all()
for _ in range(n):
yield random.choice(src)
def make_test_courses():
"""Creates course objects and persists them to database."""
return [CourseFactory.create(name=name) for name in COURSE_NAMES]
def make_test_users(n):
"""Creates N test users and persists them to database."""
return UserFactory.create_batch(n)
def give_role(user, role, course):
"""
Creates a profile for USER in a given ROLE for the provided COURSE, and
saves the profile to database.
"""
return ProfileFactory.create(
user=user, course=course, leader=None, section=None, role=role
)
def create_empty_section_for(mentor):
"""
Creates a section for MENTOR without populated students.
"""
return SectionFactory.create(course=mentor.course, mentor=mentor)
def enroll_user_as_student(user, section):
"""
Creates a student profile for USER, and assigns them to the given SECTION.
Also creates blank attendances as necessary.
Returns the created profile.
"""
student = give_role(user, Profile.STUDENT, section.course)
student.section = section
student.leader = section.leader
create_attendances_for(student)
return student
def gen_test_data(cls, NUM_USERS=300):
"""
Adds NUM_USERS users to the database and initializes profiles for them as follows:
- 2 coords per course
- 4 SMs per coord, each with a section of 3-6 students
- 3 JMs per SM, each with a section of 3-6 students
"""
users = iter(make_test_users(NUM_USERS))
courses = make_test_courses()
# for sanity tests, everyone only has one role for now
num_courses = len(courses)
coords, seniors, juniors, students = [], [], [], []
COORD_COUNT = 2
SM_COUNT = 4
JM_COUNT = 3
def assign(role, leader, c, lst):
# returns the profile created
profile = give_role(next(users), role, c)
profile.leader = leader
lst.append(profile)
return profile
try:
for c in courses:
# coords
for i in range(COORD_COUNT):
coord = assign(Profile.COORDINATOR, None, c, coords)
# SMs
for j in range(SM_COUNT):
sm = assign(Profile.SENIOR_MENTOR, coord, c, seniors)
section = create_empty_section_for(sm)
for k in range(random.randint(3, 6)):
students.append(enroll_user_as_student(next(users), section))
# JMs
for k in range(JM_COUNT):
jm = assign(Profile.JUNIOR_MENTOR, sm, c, juniors)
for _ in range(random.randint(3, 6)):
students.append(
enroll_user_as_student(next(users), section)
)
except StopIteration:
pass
cls.users = users
cls.courses = courses
cls.coords = coords
cls.seniors = seniors
cls.juniors = juniors
cls.students = students
| [((388, 402), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\n', (399, 402), False, 'import random\n'), ((3184, 3211), 'scheduler.factories.UserFactory.create_batch', 'UserFactory.create_batch', (['n'], {}), '(n)\n', (3208, 3211), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((3378, 3467), 'scheduler.factories.ProfileFactory.create', 'ProfileFactory.create', ([], {'user': 'user', 'course': 'course', 'leader': 'None', 'section': 'None', 'role': 'role'}), '(user=user, course=course, leader=None, section=None,\n role=role)\n', (3399, 3467), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((3600, 3658), 'scheduler.factories.SectionFactory.create', 'SectionFactory.create', ([], {'course': 'mentor.course', 'mentor': 'mentor'}), '(course=mentor.course, mentor=mentor)\n', (3621, 3658), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((4002, 4033), 'scheduler.factories.create_attendances_for', 'create_attendances_for', (['student'], {}), '(student)\n', (4024, 4033), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((821, 832), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (830, 832), False, 'from rest_framework.test import APIClient\n'), ((3027, 3058), 'scheduler.factories.CourseFactory.create', 'CourseFactory.create', ([], {'name': 'name'}), '(name=name)\n', (3047, 3058), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((2905, 2923), 'random.choice', 'random.choice', (['src'], {}), '(src)\n', (2918, 2923), False, 'import random\n'), ((5211, 5231), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\n', (5225, 5231), False, 'import random\n'), ((5506, 5526), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\n', (5520, 5526), False, 'import random\n')] |
tallpauley/coldtype | coldtype/beziers.py | c1811e1d3713ff9c3c804511d6cd607b1d802065 | import math
from fontTools.pens.recordingPen import RecordingPen, replayRecording
from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT
from coldtype.geometry import Rect, Point
def raise_quadratic(start, a, b):
c0 = start
c1 = (c0[0] + (2/3)*(a[0] - c0[0]), c0[1] + (2/3)*(a[1] - c0[1]))
c2 = (b[0] + (2/3)*(a[0] - b[0]), b[1] + (2/3)*(a[1] - b[1]))
c3 = (b[0], b[1])
return [c1, c2, c3]
__length_cache = {}
__split_cache = {}
def splitCubicAtT_cached(a, b, c, d, t):
global __split_cache
abcdt = (a, b, c, d, t)
sc = __split_cache.get(abcdt)
if sc:
return sc
else:
s = splitCubicAtT(a, b, c, d, t)
__split_cache[abcdt] = s
return s
def calcCubicArcLength_cached(a, b, c, d):
#return calcCubicArcLength(a, b, c, d)
global __length_cache
abcd = (a, b, c, d)
lc = __length_cache.get(abcd)
if lc:
return lc
else:
l = calcCubicArcLength(a, b, c, d)
__length_cache[abcd] = l
return l
class CurveCutter():
def __init__(self, g, inc=0.0015):
if isinstance(g, RecordingPen):
self.pen = g
else:
self.pen = RecordingPen()
g.draw(self.pen)
self.inc = inc
self.length = self.calcCurveLength()
def calcCurveLength(self):
length = 0
for i, (t, pts) in enumerate(self.pen.value):
if t == "curveTo":
p1, p2, p3 = pts
p0 = self.pen.value[i-1][-1][-1]
length += calcCubicArcLength_cached(p0, p1, p2, p3)
elif t == "lineTo":
pass # todo
return length
def subsegment(self, start=None, end=None):
global __cut_cache
inc = self.inc
length = self.length
ended = False
_length = 0
out = []
for i, (t, pts) in enumerate(self.pen.value):
if t == "curveTo":
p1, p2, p3 = pts
p0 = self.pen.value[i-1][-1][-1]
length_arc = calcCubicArcLength_cached(p0, p1, p2, p3)
if _length + length_arc < end:
_length += length_arc
else:
t = inc
tries = 0
while not ended:
a, b = splitCubicAtT_cached(p0, p1, p2, p3, t)
length_a = calcCubicArcLength_cached(*a)
if _length + length_a > end:
ended = True
out.append(("curveTo", a[1:]))
else:
t += inc
tries += 1
if t == "lineTo":
pass # TODO
if not ended:
out.append((t, pts))
if out[-1][0] != "endPath":
out.append(("endPath",[]))
return out
def subsegmentPoint(self, start=0, end=1):
inc = self.inc
subsegment = self.subsegment(start=start, end=end)
try:
t, (a, b, c) = subsegment[-2]
tangent = math.degrees(math.atan2(c[1] - b[1], c[0] - b[0]) + math.pi*.5)
return c, tangent
except ValueError:
return None, None | [((650, 678), 'fontTools.misc.bezierTools.splitCubicAtT', 'splitCubicAtT', (['a', 'b', 'c', 'd', 't'], {}), '(a, b, c, d, t)\n', (663, 678), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\n'), ((951, 981), 'fontTools.misc.bezierTools.calcCubicArcLength', 'calcCubicArcLength', (['a', 'b', 'c', 'd'], {}), '(a, b, c, d)\n', (969, 981), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\n'), ((1195, 1209), 'fontTools.pens.recordingPen.RecordingPen', 'RecordingPen', ([], {}), '()\n', (1207, 1209), False, 'from fontTools.pens.recordingPen import RecordingPen, replayRecording\n'), ((3145, 3181), 'math.atan2', 'math.atan2', (['(c[1] - b[1])', '(c[0] - b[0])'], {}), '(c[1] - b[1], c[0] - b[0])\n', (3155, 3181), False, 'import math\n')] |
nick0lay/deep-reinforcement-learning | p1_navigation/train.py | 5af4daca9850b4e12aec5d8b0dad87f1e22a1f98 | """
Project for Udacity Danaodgree in Deep Reinforcement Learning
This script train an agent to navigate (and collect bananas!) in a large, square world.
A reward of +1 is provided for collecting a yellow banana, and a reward of -1 is provided for collecting a blue banana. Thus, the goal of your agent is to collect as many yellow bananas as possible while avoiding blue bananas.
The state space has 37 dimensions and contains the agent's velocity, along with ray-based perception of objects around the agent's forward direction. Given this information, the agent has to learn how to best select actions. Four discrete actions are available, corresponding to:
0 - move forward.
1 - move backward.
2 - turn left.
3 - turn right.
The task is episodic, and in order to solve the environment, your agent must get an average score of +13 over 100 consecutive episodes.
"""
from unityagents import UnityEnvironment
import numpy as np
from collections import deque
from dqn_agent import Agent
import torch
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
"""
Unity environment configuration
Mac: "path/to/Banana.app"
Windows (x86): "path/to/Banana_Windows_x86/Banana.exe"
Windows (x86_64): "path/to/Banana_Windows_x86_64/Banana.exe"
Linux (x86): "path/to/Banana_Linux/Banana.x86"
Linux (x86_64): "path/to/Banana_Linux/Banana.x86_64"
Linux (x86, headless): "path/to/Banana_Linux_NoVis/Banana.x86"
Linux (x86_64, headless): "path/to/Banana_Linux_NoVis/Banana.x86_64"
"""
# start Unity environment
env = UnityEnvironment(file_name="Banana.app")
# get the default brain
brain_name = env.brain_names[0]
brain = env.brains[brain_name]
env_info = env.reset(train_mode=False)[brain_name]
action_size = brain.vector_action_space_size
state_size = len(env_info.vector_observations[0])
# initialize agent
agent = Agent(state_size=state_size, action_size=action_size, seed=0, device=device)
def train(n_episodes=2000, eps_start=1.0, eps_end=0.05, eps_decay=0.99):
"""Deep Q-Learning.
Params
======
n_episodes (int): maximum number of training episodes
eps_start (float): starting value of epsilon, for epsilon-greedy action selection
eps_end (float): minimum value of epsilon
eps_decay (float): multiplicative factor (per episode) for decreasing epsilon
"""
scores = [] # list containing scores from each episode
scores_window = deque(maxlen=100) # last 100 scores
eps = eps_start # initialize epsilon
for i_episode in range(1, n_episodes+1):
# reset environment
env_info = env.reset(train_mode=True)[brain_name]
# get initial state
state = env_info.vector_observations[0]
# set initial score
score = 0
while True:
action = agent.act(state, eps)
env_info = env.step(action)[brain_name]
next_state, reward, done = env_info.vector_observations[0], env_info.rewards[0], env_info.local_done[0]
agent.step(state, action, reward, next_state, done)
state = next_state
score += reward
if done:
break
scores_window.append(score) # save most recent score
scores.append(score) # save most recent score
eps = max(eps_end, eps_decay*eps) # decrease epsilon
print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)), end="")
if i_episode % 100 == 0:
print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)))
if np.mean(scores_window)>=14:
print('\nEnvironment solved in {:d} episodes!\tAverage Score: {:.2f}'.format(i_episode-100, np.mean(scores_window)))
torch.save(agent.qnetwork_local.state_dict(), 'checkpoint.pth')
break
return scores
train() | [((1524, 1564), 'unityagents.UnityEnvironment', 'UnityEnvironment', ([], {'file_name': '"""Banana.app"""'}), "(file_name='Banana.app')\n", (1540, 1564), False, 'from unityagents import UnityEnvironment\n'), ((1829, 1905), 'dqn_agent.Agent', 'Agent', ([], {'state_size': 'state_size', 'action_size': 'action_size', 'seed': '(0)', 'device': 'device'}), '(state_size=state_size, action_size=action_size, seed=0, device=device)\n', (1834, 1905), False, 'from dqn_agent import Agent\n'), ((2429, 2446), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (2434, 2446), False, 'from collections import deque\n'), ((1038, 1063), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1061, 1063), False, 'import torch\n'), ((3653, 3675), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3660, 3675), True, 'import numpy as np\n'), ((3477, 3499), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3484, 3499), True, 'import numpy as np\n'), ((3617, 3639), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3624, 3639), True, 'import numpy as np\n'), ((3785, 3807), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3792, 3807), True, 'import numpy as np\n')] |
jac99/Egonn | models/model_factory.py | 075e00368a1676df741a35f42f6f38497da9d58f | # Warsaw University of Technology
from layers.eca_block import ECABasicBlock
from models.minkgl import MinkHead, MinkTrunk, MinkGL
from models.minkloc import MinkLoc
from third_party.minkloc3d.minkloc import MinkLoc3D
from misc.utils import ModelParams
def model_factory(model_params: ModelParams):
in_channels = 1
if model_params.model == 'MinkLoc':
model = MinkLoc(in_channels=in_channels, feature_size=model_params.feature_size,
output_dim=model_params.output_dim, planes=model_params.planes,
layers=model_params.layers, num_top_down=model_params.num_top_down,
conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.block,
pooling_method=model_params.pooling)
elif model_params.model == 'MinkLoc3D':
model = MinkLoc3D()
elif 'egonn' in model_params.model:
model = create_egonn_model(model_params)
else:
raise NotImplementedError('Model not implemented: {}'.format(model_params.model))
return model
def create_egonn_model(model_params: ModelParams):
model_name = model_params.model
global_normalize = False
local_normalize = True
if model_name == 'egonn':
# THIS IS OUR BEST MODEL
block = ECABasicBlock
planes = [32, 64, 64, 128, 128, 128, 128]
layers = [1, 1, 1, 1, 1, 1, 1]
global_in_levels = [5, 6, 7]
global_map_channels = 128
global_descriptor_size = 256
local_in_levels = [3, 4]
local_map_channels = 64
local_descriptor_size = 128
else:
raise NotImplementedError(f'Unknown model: {model_name}')
# Planes list number of channels for level 1 and above
global_in_channels = [planes[i-1] for i in global_in_levels]
head_global = MinkHead(global_in_levels, global_in_channels, global_map_channels)
if len(local_in_levels) > 0:
local_in_channels = [planes[i-1] for i in local_in_levels]
head_local = MinkHead(local_in_levels, local_in_channels, local_map_channels)
else:
head_local = None
min_out_level = len(planes)
if len(global_in_levels) > 0:
min_out_level = min(min_out_level, min(global_in_levels))
if len(local_in_levels) > 0:
min_out_level = min(min_out_level, min(local_in_levels))
trunk = MinkTrunk(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5, block=block,
min_out_level=min_out_level)
net = MinkGL(trunk, local_head=head_local, local_descriptor_size=local_descriptor_size,
local_normalize=local_normalize, global_head=head_global,
global_descriptor_size=global_descriptor_size, global_pool_method='GeM',
global_normalize=global_normalize, quantizer=model_params.quantizer)
return net | [((1835, 1902), 'models.minkgl.MinkHead', 'MinkHead', (['global_in_levels', 'global_in_channels', 'global_map_channels'], {}), '(global_in_levels, global_in_channels, global_map_channels)\n', (1843, 1902), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((2370, 2491), 'models.minkgl.MinkTrunk', 'MinkTrunk', ([], {'in_channels': '(1)', 'planes': 'planes', 'layers': 'layers', 'conv0_kernel_size': '(5)', 'block': 'block', 'min_out_level': 'min_out_level'}), '(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5,\n block=block, min_out_level=min_out_level)\n', (2379, 2491), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((2521, 2821), 'models.minkgl.MinkGL', 'MinkGL', (['trunk'], {'local_head': 'head_local', 'local_descriptor_size': 'local_descriptor_size', 'local_normalize': 'local_normalize', 'global_head': 'head_global', 'global_descriptor_size': 'global_descriptor_size', 'global_pool_method': '"""GeM"""', 'global_normalize': 'global_normalize', 'quantizer': 'model_params.quantizer'}), "(trunk, local_head=head_local, local_descriptor_size=\n local_descriptor_size, local_normalize=local_normalize, global_head=\n head_global, global_descriptor_size=global_descriptor_size,\n global_pool_method='GeM', global_normalize=global_normalize, quantizer=\n model_params.quantizer)\n", (2527, 2821), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((381, 716), 'models.minkloc.MinkLoc', 'MinkLoc', ([], {'in_channels': 'in_channels', 'feature_size': 'model_params.feature_size', 'output_dim': 'model_params.output_dim', 'planes': 'model_params.planes', 'layers': 'model_params.layers', 'num_top_down': 'model_params.num_top_down', 'conv0_kernel_size': 'model_params.conv0_kernel_size', 'block': 'model_params.block', 'pooling_method': 'model_params.pooling'}), '(in_channels=in_channels, feature_size=model_params.feature_size,\n output_dim=model_params.output_dim, planes=model_params.planes, layers=\n model_params.layers, num_top_down=model_params.num_top_down,\n conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.\n block, pooling_method=model_params.pooling)\n', (388, 716), False, 'from models.minkloc import MinkLoc\n'), ((2025, 2089), 'models.minkgl.MinkHead', 'MinkHead', (['local_in_levels', 'local_in_channels', 'local_map_channels'], {}), '(local_in_levels, local_in_channels, local_map_channels)\n', (2033, 2089), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((856, 867), 'third_party.minkloc3d.minkloc.MinkLoc3D', 'MinkLoc3D', ([], {}), '()\n', (865, 867), False, 'from third_party.minkloc3d.minkloc import MinkLoc3D\n')] |
rabarar/phidget_docker | mdns/Phidget22Python/Phidget22/Phidget.py | ceca56c86d27f291a4300a1257c02096862335ec | import sys
import ctypes
from Phidget22.PhidgetSupport import PhidgetSupport
from Phidget22.Async import *
from Phidget22.ChannelClass import ChannelClass
from Phidget22.ChannelSubclass import ChannelSubclass
from Phidget22.DeviceClass import DeviceClass
from Phidget22.DeviceID import DeviceID
from Phidget22.ErrorEventCode import ErrorEventCode
from Phidget22.PhidgetException import PhidgetException
class Phidget:
def __init__(self):
self.handle = ctypes.c_void_p()
if sys.platform == 'win32':
self._AttachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
else:
self._AttachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
self._Attach = None
self._onAttach = None
if sys.platform == 'win32':
self._DetachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
else:
self._DetachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
self._Detach = None
self._onDetach = None
if sys.platform == 'win32':
self._ErrorFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p)
else:
self._ErrorFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p)
self._Error = None
self._onError = None
if sys.platform == 'win32':
self._PropertyChangeFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)
else:
self._PropertyChangeFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)
self._PropertyChange = None
self._onPropertyChange = None
def __eq__(self, other):
return hasattr(other, 'handle') and self.handle.value == other.handle.value
def __hash__(self):
return self.handle.value
def __str__(self):
_value = (ctypes.c_char * 65536)()
_valueLen = ctypes.c_int32(65536)
if self.getIsChannel():
__func = PhidgetSupport.getDll().channelInfo
else:
__func = PhidgetSupport.getDll().deviceInfo
result = __func(self.handle, ctypes.byref(_value), _valueLen)
return _value.value.decode('utf- 8')
def __del__(self):
__func = PhidgetSupport.getDll().Phidget_delete
__func.restype = ctypes.c_int32
res = __func(ctypes.byref(self.handle))
self.handle = None
if res > 0:
raise PhidgetException(res)
def _localAttachEvent(self, handle, userPtr):
if self._Attach == None:
return
self._Attach(self)
def setOnAttachHandler(self, handler):
if handler == None:
self._Attach = None
self._onAttach = None
else:
self._Attach = handler
self._onAttach = self._AttachFactory(self._localAttachEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnAttachHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onAttach, None)
except RuntimeError:
self._Attach = None
self._onAttach = None
def _localDetachEvent(self, handle, userPtr):
if self._Detach == None:
return
self._Detach(self)
def setOnDetachHandler(self, handler):
if handler == None:
self._Detach = None
self._onDetach = None
else:
self._Detach = handler
self._onDetach = self._DetachFactory(self._localDetachEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnDetachHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onDetach, None)
except RuntimeError:
self._Detach = None
self._onDetach = None
def _localErrorEvent(self, handle, userPtr, Code, Description):
if self._Error == None:
return
Description = Description.decode('utf-8')
self._Error(self, Code, Description)
def setOnErrorHandler(self, handler):
if handler == None:
self._Error = None
self._onError = None
else:
self._Error = handler
self._onError = self._ErrorFactory(self._localErrorEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnErrorHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onError, None)
except RuntimeError:
self._Error = None
self._onError = None
def _localPropertyChangeEvent(self, handle, userPtr, propertyName):
if self._PropertyChange == None:
return
propertyName = propertyName.decode('utf-8')
self._PropertyChange(self, propertyName)
def setOnPropertyChangeHandler(self, handler):
if handler == None:
self._PropertyChange = None
self._onPropertyChange = None
else:
self._PropertyChange = handler
self._onPropertyChange = self._PropertyChangeFactory(self._localPropertyChangeEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnPropertyChangeHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onPropertyChange, None)
except RuntimeError:
self._PropertyChange = None
self._onPropertyChange = None
@staticmethod
def finalize(flags):
_flags = ctypes.c_int32(flags)
__func = PhidgetSupport.getDll().Phidget_finalize
__func.restype = ctypes.c_int32
result = __func(_flags)
if result > 0:
raise PhidgetException(result)
@staticmethod
def getLibraryVersion():
_LibraryVersion = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getLibraryVersion
__func.restype = ctypes.c_int32
result = __func(ctypes.byref(_LibraryVersion))
if result > 0:
raise PhidgetException(result)
return _LibraryVersion.value.decode('utf-8')
@staticmethod
def getLibraryVersionNumber():
_LibraryVersionNumber = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getLibraryVersionNumber
__func.restype = ctypes.c_int32
result = __func(ctypes.byref(_LibraryVersionNumber))
if result > 0:
raise PhidgetException(result)
return _LibraryVersionNumber.value.decode('utf-8')
@staticmethod
def resetLibrary():
__func = PhidgetSupport.getDll().Phidget_resetLibrary
__func.restype = ctypes.c_int32
result = __func()
if result > 0:
raise PhidgetException(result)
def getAttached(self):
_Attached = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getAttached
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Attached))
if result > 0:
raise PhidgetException(result)
return _Attached.value
def getChannel(self):
_Channel = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getChannel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Channel))
if result > 0:
raise PhidgetException(result)
return _Channel.value
def setChannel(self, Channel):
_Channel = ctypes.c_int(Channel)
__func = PhidgetSupport.getDll().Phidget_setChannel
__func.restype = ctypes.c_int32
result = __func(self.handle, _Channel)
if result > 0:
raise PhidgetException(result)
def getChannelClass(self):
_ChannelClass = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getChannelClass
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelClass))
if result > 0:
raise PhidgetException(result)
return _ChannelClass.value
def getChannelClassName(self):
_ChannelClassName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getChannelClassName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelClassName))
if result > 0:
raise PhidgetException(result)
return _ChannelClassName.value.decode('utf-8')
def getChannelName(self):
_ChannelName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getChannelName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelName))
if result > 0:
raise PhidgetException(result)
return _ChannelName.value.decode('utf-8')
def getChannelSubclass(self):
_ChannelSubclass = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getChannelSubclass
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelSubclass))
if result > 0:
raise PhidgetException(result)
return _ChannelSubclass.value
def close(self):
__func = PhidgetSupport.getDll().Phidget_close
__func.restype = ctypes.c_int32
result = __func(self.handle)
if result > 0:
raise PhidgetException(result)
def getDeviceChannelCount(self, cls):
_cls = ctypes.c_int(cls)
_count = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getDeviceChannelCount
__func.restype = ctypes.c_int32
result = __func(self.handle, _cls, ctypes.byref(_count))
if result > 0:
raise PhidgetException(result)
return _count.value
def getDeviceClass(self):
_DeviceClass = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getDeviceClass
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceClass))
if result > 0:
raise PhidgetException(result)
return _DeviceClass.value
def getDeviceClassName(self):
_DeviceClassName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceClassName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceClassName))
if result > 0:
raise PhidgetException(result)
return _DeviceClassName.value.decode('utf-8')
def getDeviceID(self):
_DeviceID = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getDeviceID
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceID))
if result > 0:
raise PhidgetException(result)
return _DeviceID.value
def getDeviceLabel(self):
_DeviceLabel = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceLabel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceLabel))
if result > 0:
raise PhidgetException(result)
return _DeviceLabel.value.decode('utf-8')
def setDeviceLabel(self, DeviceLabel):
_DeviceLabel = ctypes.create_string_buffer(DeviceLabel.encode('utf-8'))
__func = PhidgetSupport.getDll().Phidget_setDeviceLabel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceLabel))
if result > 0:
raise PhidgetException(result)
def getDeviceName(self):
_DeviceName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceName))
if result > 0:
raise PhidgetException(result)
return _DeviceName.value.decode('utf-8')
def getDeviceSerialNumber(self):
_DeviceSerialNumber = ctypes.c_int32()
__func = PhidgetSupport.getDll().Phidget_getDeviceSerialNumber
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceSerialNumber))
if result > 0:
raise PhidgetException(result)
return _DeviceSerialNumber.value
def setDeviceSerialNumber(self, DeviceSerialNumber):
_DeviceSerialNumber = ctypes.c_int32(DeviceSerialNumber)
__func = PhidgetSupport.getDll().Phidget_setDeviceSerialNumber
__func.restype = ctypes.c_int32
result = __func(self.handle, _DeviceSerialNumber)
if result > 0:
raise PhidgetException(result)
def getDeviceSKU(self):
_DeviceSKU = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceSKU
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceSKU))
if result > 0:
raise PhidgetException(result)
return _DeviceSKU.value.decode('utf-8')
def getDeviceVersion(self):
_DeviceVersion = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getDeviceVersion
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceVersion))
if result > 0:
raise PhidgetException(result)
return _DeviceVersion.value
def getHub(self):
_Hub = ctypes.c_void_p()
__func = PhidgetSupport.getDll().Phidget_getHub
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Hub))
if result > 0:
raise PhidgetException(result)
__Hub = Phidget()
__Hub.handle = _Hub
return __Hub
def getHubPort(self):
_HubPort = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getHubPort
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPort))
if result > 0:
raise PhidgetException(result)
return _HubPort.value
def setHubPort(self, HubPort):
_HubPort = ctypes.c_int(HubPort)
__func = PhidgetSupport.getDll().Phidget_setHubPort
__func.restype = ctypes.c_int32
result = __func(self.handle, _HubPort)
if result > 0:
raise PhidgetException(result)
def getHubPortCount(self):
_HubPortCount = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getHubPortCount
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPortCount))
if result > 0:
raise PhidgetException(result)
return _HubPortCount.value
def getHubPortSpeed(self):
_HubPortSpeed = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getHubPortSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPortSpeed))
if result > 0:
raise PhidgetException(result)
return _HubPortSpeed.value
def setHubPortSpeed(self, HubPortSpeed):
_HubPortSpeed = ctypes.c_uint32(HubPortSpeed)
__func = PhidgetSupport.getDll().Phidget_setHubPortSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, _HubPortSpeed)
if result > 0:
raise PhidgetException(result)
def getMaxHubPortSpeed(self):
_MaxHubPortSpeed = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getMaxHubPortSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_MaxHubPortSpeed))
if result > 0:
raise PhidgetException(result)
return _MaxHubPortSpeed.value
def getHubPortSupportsSetSpeed(self):
_HubPortSupportsSetSpeed = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getHubPortSupportsSetSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPortSupportsSetSpeed))
if result > 0:
raise PhidgetException(result)
return _HubPortSupportsSetSpeed.value
def getIsChannel(self):
_IsChannel = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsChannel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsChannel))
if result > 0:
raise PhidgetException(result)
return _IsChannel.value
def getIsHubPortDevice(self):
_IsHubPortDevice = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsHubPortDevice
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsHubPortDevice))
if result > 0:
raise PhidgetException(result)
return _IsHubPortDevice.value
def setIsHubPortDevice(self, IsHubPortDevice):
_IsHubPortDevice = ctypes.c_int(IsHubPortDevice)
__func = PhidgetSupport.getDll().Phidget_setIsHubPortDevice
__func.restype = ctypes.c_int32
result = __func(self.handle, _IsHubPortDevice)
if result > 0:
raise PhidgetException(result)
def getIsLocal(self):
_IsLocal = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsLocal
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsLocal))
if result > 0:
raise PhidgetException(result)
return _IsLocal.value
def setIsLocal(self, IsLocal):
_IsLocal = ctypes.c_int(IsLocal)
__func = PhidgetSupport.getDll().Phidget_setIsLocal
__func.restype = ctypes.c_int32
result = __func(self.handle, _IsLocal)
if result > 0:
raise PhidgetException(result)
def getIsRemote(self):
_IsRemote = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsRemote
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsRemote))
if result > 0:
raise PhidgetException(result)
return _IsRemote.value
def setIsRemote(self, IsRemote):
_IsRemote = ctypes.c_int(IsRemote)
__func = PhidgetSupport.getDll().Phidget_setIsRemote
__func.restype = ctypes.c_int32
result = __func(self.handle, _IsRemote)
if result > 0:
raise PhidgetException(result)
def open(self):
__func = PhidgetSupport.getDll().Phidget_open
__func.restype = ctypes.c_int32
result = __func(self.handle)
if result > 0:
raise PhidgetException(result)
def openWaitForAttachment(self, timeout):
_timeout = ctypes.c_uint32(timeout)
__func = PhidgetSupport.getDll().Phidget_openWaitForAttachment
__func.restype = ctypes.c_int32
result = __func(self.handle, _timeout)
if result > 0:
raise PhidgetException(result)
def getParent(self):
_Parent = ctypes.c_void_p()
__func = PhidgetSupport.getDll().Phidget_getParent
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Parent))
if result > 0:
raise PhidgetException(result)
__Parent = Phidget()
__Parent.handle = _Parent
return __Parent
def getServerHostname(self):
_ServerHostname = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerHostname
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerHostname))
if result > 0:
raise PhidgetException(result)
return _ServerHostname.value.decode('utf-8')
def getServerName(self):
_ServerName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerName))
if result > 0:
raise PhidgetException(result)
return _ServerName.value.decode('utf-8')
def setServerName(self, ServerName):
_ServerName = ctypes.create_string_buffer(ServerName.encode('utf-8'))
__func = PhidgetSupport.getDll().Phidget_setServerName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerName))
if result > 0:
raise PhidgetException(result)
def getServerPeerName(self):
_ServerPeerName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerPeerName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerPeerName))
if result > 0:
raise PhidgetException(result)
return _ServerPeerName.value.decode('utf-8')
def getServerUniqueName(self):
_ServerUniqueName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerUniqueName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerUniqueName))
if result > 0:
raise PhidgetException(result)
return _ServerUniqueName.value.decode('utf-8')
def getMaxVINTDeviceSpeed(self):
_MaxVINTDeviceSpeed = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getMaxVINTDeviceSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_MaxVINTDeviceSpeed))
if result > 0:
raise PhidgetException(result)
return _MaxVINTDeviceSpeed.value
def getVINTDeviceSupportsSetSpeed(self):
_VINTDeviceSupportsSetSpeed = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getVINTDeviceSupportsSetSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_VINTDeviceSupportsSetSpeed))
if result > 0:
raise PhidgetException(result)
return _VINTDeviceSupportsSetSpeed.value
def writeDeviceLabel(self, deviceLabel):
_deviceLabel = ctypes.create_string_buffer(deviceLabel.encode('utf-8'))
__func = PhidgetSupport.getDll().Phidget_writeDeviceLabel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_deviceLabel))
if result > 0:
raise PhidgetException(result)
ANY_SERIAL_NUMBER = -1
ANY_HUB_PORT = -1
ANY_CHANNEL = -1
ANY_LABEL = None
INFINITE_TIMEOUT = 0
DEFAULT_TIMEOUT = 1000
| [((457, 474), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (472, 474), False, 'import ctypes\n'), ((1830, 1851), 'ctypes.c_int32', 'ctypes.c_int32', (['(65536)'], {}), '(65536)\n', (1844, 1851), False, 'import ctypes\n'), ((4772, 4793), 'ctypes.c_int32', 'ctypes.c_int32', (['flags'], {}), '(flags)\n', (4786, 4793), False, 'import ctypes\n'), ((5022, 5039), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (5037, 5039), False, 'import ctypes\n'), ((5359, 5376), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (5374, 5376), False, 'import ctypes\n'), ((5879, 5893), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (5891, 5893), False, 'import ctypes\n'), ((6155, 6169), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (6167, 6169), False, 'import ctypes\n'), ((6437, 6458), 'ctypes.c_int', 'ctypes.c_int', (['Channel'], {}), '(Channel)\n', (6449, 6458), False, 'import ctypes\n'), ((6689, 6703), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (6701, 6703), False, 'import ctypes\n'), ((6995, 7012), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (7010, 7012), False, 'import ctypes\n'), ((7322, 7339), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (7337, 7339), False, 'import ctypes\n'), ((7642, 7656), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (7654, 7656), False, 'import ctypes\n'), ((8137, 8154), 'ctypes.c_int', 'ctypes.c_int', (['cls'], {}), '(cls)\n', (8149, 8154), False, 'import ctypes\n'), ((8166, 8183), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (8181, 8183), False, 'import ctypes\n'), ((8463, 8477), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (8475, 8477), False, 'import ctypes\n'), ((8764, 8781), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (8779, 8781), False, 'import ctypes\n'), ((9082, 9096), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (9094, 9096), False, 'import ctypes\n'), ((9366, 9383), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (9381, 9383), False, 'import ctypes\n'), ((9996, 10013), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (10011, 10013), False, 'import ctypes\n'), ((10319, 10335), 'ctypes.c_int32', 'ctypes.c_int32', ([], {}), '()\n', (10333, 10335), False, 'import ctypes\n'), ((10669, 10703), 'ctypes.c_int32', 'ctypes.c_int32', (['DeviceSerialNumber'], {}), '(DeviceSerialNumber)\n', (10683, 10703), False, 'import ctypes\n'), ((10950, 10967), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (10965, 10967), False, 'import ctypes\n'), ((11260, 11274), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (11272, 11274), False, 'import ctypes\n'), ((11543, 11560), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (11558, 11560), False, 'import ctypes\n'), ((11844, 11858), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (11856, 11858), False, 'import ctypes\n'), ((12126, 12147), 'ctypes.c_int', 'ctypes.c_int', (['HubPort'], {}), '(HubPort)\n', (12138, 12147), False, 'import ctypes\n'), ((12378, 12392), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (12390, 12392), False, 'import ctypes\n'), ((12676, 12693), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (12691, 12693), False, 'import ctypes\n'), ((12991, 13020), 'ctypes.c_uint32', 'ctypes.c_uint32', (['HubPortSpeed'], {}), '(HubPortSpeed)\n', (13006, 13020), False, 'import ctypes\n'), ((13267, 13284), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (13282, 13284), False, 'import ctypes\n'), ((13599, 13613), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (13611, 13613), False, 'import ctypes\n'), ((13924, 13938), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (13936, 13938), False, 'import ctypes\n'), ((14219, 14233), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (14231, 14233), False, 'import ctypes\n'), ((14549, 14578), 'ctypes.c_int', 'ctypes.c_int', (['IsHubPortDevice'], {}), '(IsHubPortDevice)\n', (14561, 14578), False, 'import ctypes\n'), ((14815, 14829), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (14827, 14829), False, 'import ctypes\n'), ((15097, 15118), 'ctypes.c_int', 'ctypes.c_int', (['IsLocal'], {}), '(IsLocal)\n', (15109, 15118), False, 'import ctypes\n'), ((15341, 15355), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (15353, 15355), False, 'import ctypes\n'), ((15629, 15651), 'ctypes.c_int', 'ctypes.c_int', (['IsRemote'], {}), '(IsRemote)\n', (15641, 15651), False, 'import ctypes\n'), ((16078, 16102), 'ctypes.c_uint32', 'ctypes.c_uint32', (['timeout'], {}), '(timeout)\n', (16093, 16102), False, 'import ctypes\n'), ((16332, 16349), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (16347, 16349), False, 'import ctypes\n'), ((16665, 16682), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (16680, 16682), False, 'import ctypes\n'), ((16984, 17001), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (16999, 17001), False, 'import ctypes\n'), ((17613, 17630), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (17628, 17630), False, 'import ctypes\n'), ((17944, 17961), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (17959, 17961), False, 'import ctypes\n'), ((18285, 18302), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (18300, 18302), False, 'import ctypes\n'), ((18632, 18646), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (18644, 18646), False, 'import ctypes\n'), ((531, 589), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (549, 589), False, 'import ctypes\n'), ((623, 679), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (639, 679), False, 'import ctypes\n'), ((782, 840), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (800, 840), False, 'import ctypes\n'), ((874, 930), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (890, 930), False, 'import ctypes\n'), ((1032, 1125), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\n ctypes.c_char_p)\n', (1050, 1125), False, 'import ctypes\n'), ((1154, 1245), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\n ctypes.c_char_p)\n', (1170, 1245), False, 'import ctypes\n'), ((1350, 1425), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n', (1368, 1425), False, 'import ctypes\n'), ((1467, 1540), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n', (1483, 1540), False, 'import ctypes\n'), ((2012, 2032), 'ctypes.byref', 'ctypes.byref', (['_value'], {}), '(_value)\n', (2024, 2032), False, 'import ctypes\n'), ((2116, 2139), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (2137, 2139), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((2204, 2229), 'ctypes.byref', 'ctypes.byref', (['self.handle'], {}), '(self.handle)\n', (2216, 2229), False, 'import ctypes\n'), ((2275, 2296), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['res'], {}), '(res)\n', (2291, 2296), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((4806, 4829), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (4827, 4829), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((4934, 4958), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (4950, 4958), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5052, 5075), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5073, 5075), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5154, 5183), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersion'], {}), '(_LibraryVersion)\n', (5166, 5183), False, 'import ctypes\n'), ((5212, 5236), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5228, 5236), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5389, 5412), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5410, 5412), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5497, 5532), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersionNumber'], {}), '(_LibraryVersionNumber)\n', (5509, 5532), False, 'import ctypes\n'), ((5561, 5585), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5577, 5585), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5688, 5711), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5709, 5711), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5814, 5838), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5830, 5838), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5906, 5929), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5927, 5929), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6015, 6038), 'ctypes.byref', 'ctypes.byref', (['_Attached'], {}), '(_Attached)\n', (6027, 6038), False, 'import ctypes\n'), ((6067, 6091), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6083, 6091), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6182, 6205), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6203, 6205), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6290, 6312), 'ctypes.byref', 'ctypes.byref', (['_Channel'], {}), '(_Channel)\n', (6302, 6312), False, 'import ctypes\n'), ((6341, 6365), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6357, 6365), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6471, 6494), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6492, 6494), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6616, 6640), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6632, 6640), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6716, 6739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6737, 6739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6829, 6856), 'ctypes.byref', 'ctypes.byref', (['_ChannelClass'], {}), '(_ChannelClass)\n', (6841, 6856), False, 'import ctypes\n'), ((6885, 6909), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6901, 6909), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7025, 7048), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7046, 7048), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7142, 7173), 'ctypes.byref', 'ctypes.byref', (['_ChannelClassName'], {}), '(_ChannelClassName)\n', (7154, 7173), False, 'import ctypes\n'), ((7202, 7226), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7218, 7226), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7352, 7375), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7373, 7375), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7464, 7490), 'ctypes.byref', 'ctypes.byref', (['_ChannelName'], {}), '(_ChannelName)\n', (7476, 7490), False, 'import ctypes\n'), ((7519, 7543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7535, 7543), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7669, 7692), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7690, 7692), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7785, 7815), 'ctypes.byref', 'ctypes.byref', (['_ChannelSubclass'], {}), '(_ChannelSubclass)\n', (7797, 7815), False, 'import ctypes\n'), ((7844, 7868), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7860, 7868), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7932, 7955), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7953, 7955), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8062, 8086), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8078, 8086), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8196, 8219), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8217, 8219), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8321, 8341), 'ctypes.byref', 'ctypes.byref', (['_count'], {}), '(_count)\n', (8333, 8341), False, 'import ctypes\n'), ((8370, 8394), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8386, 8394), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8490, 8513), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8511, 8513), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8602, 8628), 'ctypes.byref', 'ctypes.byref', (['_DeviceClass'], {}), '(_DeviceClass)\n', (8614, 8628), False, 'import ctypes\n'), ((8657, 8681), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8673, 8681), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8794, 8817), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8815, 8817), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8910, 8940), 'ctypes.byref', 'ctypes.byref', (['_DeviceClassName'], {}), '(_DeviceClassName)\n', (8922, 8940), False, 'import ctypes\n'), ((8969, 8993), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8985, 8993), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9109, 9132), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9130, 9132), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9218, 9241), 'ctypes.byref', 'ctypes.byref', (['_DeviceID'], {}), '(_DeviceID)\n', (9230, 9241), False, 'import ctypes\n'), ((9270, 9294), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9286, 9294), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9396, 9419), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9417, 9419), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9508, 9534), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\n', (9520, 9534), False, 'import ctypes\n'), ((9563, 9587), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9579, 9587), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9760, 9783), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9781, 9783), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9872, 9898), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\n', (9884, 9898), False, 'import ctypes\n'), ((9927, 9951), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9943, 9951), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10026, 10049), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10047, 10049), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10137, 10162), 'ctypes.byref', 'ctypes.byref', (['_DeviceName'], {}), '(_DeviceName)\n', (10149, 10162), False, 'import ctypes\n'), ((10191, 10215), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10207, 10215), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10348, 10371), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10369, 10371), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10467, 10500), 'ctypes.byref', 'ctypes.byref', (['_DeviceSerialNumber'], {}), '(_DeviceSerialNumber)\n', (10479, 10500), False, 'import ctypes\n'), ((10529, 10553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10545, 10553), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10716, 10739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10737, 10739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10883, 10907), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10899, 10907), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10980, 11003), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11001, 11003), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11090, 11114), 'ctypes.byref', 'ctypes.byref', (['_DeviceSKU'], {}), '(_DeviceSKU)\n', (11102, 11114), False, 'import ctypes\n'), ((11143, 11167), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11159, 11167), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11287, 11310), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11308, 11310), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11401, 11429), 'ctypes.byref', 'ctypes.byref', (['_DeviceVersion'], {}), '(_DeviceVersion)\n', (11413, 11429), False, 'import ctypes\n'), ((11458, 11482), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11474, 11482), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11573, 11596), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11594, 11596), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11677, 11695), 'ctypes.byref', 'ctypes.byref', (['_Hub'], {}), '(_Hub)\n', (11689, 11695), False, 'import ctypes\n'), ((11724, 11748), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11740, 11748), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11871, 11894), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11892, 11894), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11979, 12001), 'ctypes.byref', 'ctypes.byref', (['_HubPort'], {}), '(_HubPort)\n', (11991, 12001), False, 'import ctypes\n'), ((12030, 12054), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12046, 12054), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12160, 12183), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12181, 12183), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12305, 12329), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12321, 12329), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12405, 12428), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12426, 12428), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12518, 12545), 'ctypes.byref', 'ctypes.byref', (['_HubPortCount'], {}), '(_HubPortCount)\n', (12530, 12545), False, 'import ctypes\n'), ((12574, 12598), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12590, 12598), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12706, 12729), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12727, 12729), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12819, 12846), 'ctypes.byref', 'ctypes.byref', (['_HubPortSpeed'], {}), '(_HubPortSpeed)\n', (12831, 12846), False, 'import ctypes\n'), ((12875, 12899), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12891, 12899), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13033, 13056), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13054, 13056), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13188, 13212), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13204, 13212), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13297, 13320), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13318, 13320), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13413, 13443), 'ctypes.byref', 'ctypes.byref', (['_MaxHubPortSpeed'], {}), '(_MaxHubPortSpeed)\n', (13425, 13443), False, 'import ctypes\n'), ((13472, 13496), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13488, 13496), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13626, 13649), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13647, 13649), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13750, 13788), 'ctypes.byref', 'ctypes.byref', (['_HubPortSupportsSetSpeed'], {}), '(_HubPortSupportsSetSpeed)\n', (13762, 13788), False, 'import ctypes\n'), ((13817, 13841), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13833, 13841), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13951, 13974), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13972, 13974), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14061, 14085), 'ctypes.byref', 'ctypes.byref', (['_IsChannel'], {}), '(_IsChannel)\n', (14073, 14085), False, 'import ctypes\n'), ((14114, 14138), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14130, 14138), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14246, 14269), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14267, 14269), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14362, 14392), 'ctypes.byref', 'ctypes.byref', (['_IsHubPortDevice'], {}), '(_IsHubPortDevice)\n', (14374, 14392), False, 'import ctypes\n'), ((14421, 14445), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14437, 14445), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14591, 14614), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14612, 14614), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14752, 14776), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14768, 14776), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14842, 14865), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14863, 14865), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14950, 14972), 'ctypes.byref', 'ctypes.byref', (['_IsLocal'], {}), '(_IsLocal)\n', (14962, 14972), False, 'import ctypes\n'), ((15001, 15025), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15017, 15025), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15131, 15154), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15152, 15154), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15276, 15300), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15292, 15300), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15368, 15391), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15389, 15391), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15477, 15500), 'ctypes.byref', 'ctypes.byref', (['_IsRemote'], {}), '(_IsRemote)\n', (15489, 15500), False, 'import ctypes\n'), ((15529, 15553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15545, 15553), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15664, 15687), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15685, 15687), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15811, 15835), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15827, 15835), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15866, 15889), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15887, 15889), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15995, 16019), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16011, 16019), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16115, 16138), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16136, 16138), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16271, 16295), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16287, 16295), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16362, 16385), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16383, 16385), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16469, 16490), 'ctypes.byref', 'ctypes.byref', (['_Parent'], {}), '(_Parent)\n', (16481, 16490), False, 'import ctypes\n'), ((16519, 16543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16535, 16543), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16695, 16718), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16716, 16718), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16810, 16839), 'ctypes.byref', 'ctypes.byref', (['_ServerHostname'], {}), '(_ServerHostname)\n', (16822, 16839), False, 'import ctypes\n'), ((16868, 16892), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16884, 16892), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17014, 17037), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17035, 17037), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17125, 17150), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\n', (17137, 17150), False, 'import ctypes\n'), ((17179, 17203), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17195, 17203), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17371, 17394), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17392, 17394), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17482, 17507), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\n', (17494, 17507), False, 'import ctypes\n'), ((17536, 17560), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17552, 17560), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17643, 17666), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17664, 17666), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17758, 17787), 'ctypes.byref', 'ctypes.byref', (['_ServerPeerName'], {}), '(_ServerPeerName)\n', (17770, 17787), False, 'import ctypes\n'), ((17816, 17840), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17832, 17840), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17974, 17997), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17995, 17997), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18091, 18122), 'ctypes.byref', 'ctypes.byref', (['_ServerUniqueName'], {}), '(_ServerUniqueName)\n', (18103, 18122), False, 'import ctypes\n'), ((18151, 18175), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18167, 18175), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((18315, 18338), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (18336, 18338), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18434, 18467), 'ctypes.byref', 'ctypes.byref', (['_MaxVINTDeviceSpeed'], {}), '(_MaxVINTDeviceSpeed)\n', (18446, 18467), False, 'import ctypes\n'), ((18496, 18520), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18512, 18520), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((18659, 18682), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (18680, 18682), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18786, 18827), 'ctypes.byref', 'ctypes.byref', (['_VINTDeviceSupportsSetSpeed'], {}), '(_VINTDeviceSupportsSetSpeed)\n', (18798, 18827), False, 'import ctypes\n'), ((18856, 18880), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18872, 18880), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((19054, 19077), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (19075, 19077), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((19168, 19194), 'ctypes.byref', 'ctypes.byref', (['_deviceLabel'], {}), '(_deviceLabel)\n', (19180, 19194), False, 'import ctypes\n'), ((19223, 19247), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (19239, 19247), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((1890, 1913), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (1911, 1913), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((1946, 1969), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (1967, 1969), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((2632, 2655), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (2653, 2655), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((3175, 3198), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (3196, 3198), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((3790, 3813), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (3811, 3813), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((4483, 4506), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (4504, 4506), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n')] |
oleksiyVeretiuk/openprocurement.auctions.geb | openprocurement/auctions/geb/tests/blanks/create.py | 2965b52bf8826b9a8f8870c9a4d2052f945f5799 | def create_auction(self):
expected_http_status = '201 Created'
request_data = {"data": self.auction}
entrypoint = '/auctions'
response = self.app.post_json(entrypoint, request_data)
self.assertEqual(response.status, expected_http_status)
def create_auction_check_minNumberOfQualifiedBids(self):
expected_minNumberOfQualifiedBids = 2
request_data = {"data": self.auction}
entrypoint = '/auctions'
response = self.app.post_json(entrypoint, request_data)
self.assertEqual(response.json['data']['minNumberOfQualifiedBids'],
expected_minNumberOfQualifiedBids)
def create_auction_check_auctionParameters(self):
expected_auctionParameters = {'type': 'texas'}
request_data = {"data": self.auction}
entrypoint = '/auctions'
response = self.app.post_json(entrypoint, request_data)
self.assertEqual(response.json['data']['auctionParameters'],
expected_auctionParameters)
def create_auction_invalid_auctionPeriod(self):
expected_http_status = '422 Unprocessable Entity'
auction = self.auction
auction.pop('auctionPeriod')
request_data = {"data": self.auction}
entrypoint = '/auctions'
response = self.app.post_json(entrypoint, request_data, status=422)
self.assertEqual(response.status, expected_http_status)
entrypoint = '/auctions'
auction['auctionPeriod'] = {'startDate': None}
response = self.app.post_json(entrypoint, request_data, status=422)
self.assertEqual(response.status, expected_http_status)
def create_auction_dump(self):
request_data = {"data": self.auction}
entrypoint = '/auctions'
response = self.app.post_json(entrypoint, request_data)
filename = 'docs/source/tutorial/create_auction.http'
self.dump(response.request, response, filename)
| [] |
crossscreenmedia/scout_apm_python | tests/integration/test_celery.py | 5cd31bf21f5acd0be0df4f40ec0bd29ec050ec01 | # coding=utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from contextlib import contextmanager
import celery
import pytest
from celery.signals import setup_logging
import scout_apm.celery
from scout_apm.api import Config
# http://docs.celeryproject.org/en/latest/userguide/testing.html#py-test
skip_unless_celery_4_plus = pytest.mark.skipif(
celery.VERSION < (4, 0), reason="pytest fixtures added in Celery 4.0"
)
@setup_logging.connect
def do_nothing(**kwargs):
# Just by connecting to this signal, we prevent Celery from setting up
# logging - and stop it from interfering with global state
# http://docs.celeryproject.org/en/v4.3.0/userguide/signals.html#setup-logging
pass
@contextmanager
def app_with_scout(app=None, config=None):
"""
Context manager that configures a Celery app with Scout installed.
"""
if app is None:
app = celery.Celery("tasks", broker="memory://")
# Enable Scout by default in tests.
if config is None:
config = {"monitor": True}
# Disable running the agent.
config["core_agent_launch"] = False
@app.task
def hello():
return "Hello World!"
# Setup according to https://docs.scoutapm.com/#celery
Config.set(**config)
scout_apm.celery.install()
try:
yield app
finally:
scout_apm.celery.uninstall()
# Reset Scout configuration.
Config.reset_all()
def test_hello_eager(tracked_requests):
with app_with_scout() as app:
result = app.tasks["tests.integration.test_celery.hello"].apply()
assert result.result == "Hello World!"
assert len(tracked_requests) == 1
tracked_request = tracked_requests[0]
assert "task_id" in tracked_request.tags
assert tracked_request.tags["is_eager"] is True
assert tracked_request.tags["exchange"] == "unknown"
assert tracked_request.tags["routing_key"] == "unknown"
assert tracked_request.tags["queue"] == "unknown"
assert tracked_request.active_spans == []
assert len(tracked_request.complete_spans) == 1
span = tracked_request.complete_spans[0]
assert span.operation == "Job/tests.integration.test_celery.hello"
@skip_unless_celery_4_plus
def test_hello_worker(celery_app, celery_worker, tracked_requests):
with app_with_scout(app=celery_app) as app:
result = app.tasks["tests.integration.test_celery.hello"].delay().get()
assert result == "Hello World!"
assert len(tracked_requests) == 1
tracked_request = tracked_requests[0]
assert "task_id" in tracked_request.tags
assert tracked_request.tags["is_eager"] is False
assert tracked_request.tags["exchange"] == ""
assert tracked_request.tags["routing_key"] == "celery"
assert tracked_request.tags["queue"] == "unknown"
assert (
0.0 <= tracked_request.tags["queue_time"] < 60.0
) # Assume test took <60 seconds
assert tracked_request.active_spans == []
assert len(tracked_request.complete_spans) == 1
span = tracked_request.complete_spans[0]
assert span.operation == "Job/tests.integration.test_celery.hello"
@skip_unless_celery_4_plus
def test_hello_worker_header_preset(celery_app, celery_worker, tracked_requests):
with app_with_scout(app=celery_app) as app:
result = (
app.tasks["tests.integration.test_celery.hello"]
.apply_async(headers={"scout_task_start": "an evil string"})
.get()
)
assert result == "Hello World!"
assert len(tracked_requests) == 1
tracked_request = tracked_requests[0]
assert tracked_request.active_spans == []
assert len(tracked_request.complete_spans) == 1
span = tracked_request.complete_spans[0]
assert span.operation == "Job/tests.integration.test_celery.hello"
assert "queue_time" not in span.tags
@skip_unless_celery_4_plus
def test_hello_worker_chain(celery_app, celery_worker, tracked_requests):
with app_with_scout(app=celery_app) as app:
hello = app.tasks["tests.integration.test_celery.hello"]
result = (hello.si() | hello.si()).apply_async().get()
assert result == "Hello World!"
assert len(tracked_requests) == 2
assert [t.complete_spans[0].operation for t in tracked_requests] == [
"Job/tests.integration.test_celery.hello",
"Job/tests.integration.test_celery.hello",
]
assert "parent_task_id" not in tracked_requests[0].tags
first_task_id = tracked_requests[0].tags["task_id"]
assert tracked_requests[1].tags["parent_task_id"] == first_task_id
def test_no_monitor(tracked_requests):
# With an empty config, "monitor" defaults to False.
with app_with_scout(config={}) as app:
result = app.tasks["tests.integration.test_celery.hello"].apply()
assert result.result == "Hello World!"
assert tracked_requests == []
| [((367, 461), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(celery.VERSION < (4, 0))'], {'reason': '"""pytest fixtures added in Celery 4.0"""'}), "(celery.VERSION < (4, 0), reason=\n 'pytest fixtures added in Celery 4.0')\n", (385, 461), False, 'import pytest\n'), ((1268, 1288), 'scout_apm.api.Config.set', 'Config.set', ([], {}), '(**config)\n', (1278, 1288), False, 'from scout_apm.api import Config\n'), ((926, 968), 'celery.Celery', 'celery.Celery', (['"""tasks"""'], {'broker': '"""memory://"""'}), "('tasks', broker='memory://')\n", (939, 968), False, 'import celery\n'), ((1443, 1461), 'scout_apm.api.Config.reset_all', 'Config.reset_all', ([], {}), '()\n', (1459, 1461), False, 'from scout_apm.api import Config\n')] |
mollyproject/mollyproject | molly/apps/places/migrations/0001_initial.py | 3247c6bac3f39ce8d275d19aa410b30c6284b8a7 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Source'
db.create_table('places_source', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('module_name', self.gf('django.db.models.fields.CharField')(max_length=128)),
('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('places', ['Source'])
# Adding model 'EntityType'
db.create_table('places_entitytype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('article', self.gf('django.db.models.fields.CharField')(max_length=2)),
('verbose_name', self.gf('django.db.models.fields.TextField')()),
('verbose_name_plural', self.gf('django.db.models.fields.TextField')()),
('show_in_nearby_list', self.gf('django.db.models.fields.BooleanField')(default=False)),
('show_in_category_list', self.gf('django.db.models.fields.BooleanField')(default=False)),
('note', self.gf('django.db.models.fields.TextField')(null=True)),
))
db.send_create_signal('places', ['EntityType'])
# Adding M2M table for field subtype_of on 'EntityType'
db.create_table('places_entitytype_subtype_of', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_entitytype', models.ForeignKey(orm['places.entitytype'], null=False)),
('to_entitytype', models.ForeignKey(orm['places.entitytype'], null=False))
))
db.create_unique('places_entitytype_subtype_of', ['from_entitytype_id', 'to_entitytype_id'])
# Adding M2M table for field subtype_of_completion on 'EntityType'
db.create_table('places_entitytype_subtype_of_completion', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_entitytype', models.ForeignKey(orm['places.entitytype'], null=False)),
('to_entitytype', models.ForeignKey(orm['places.entitytype'], null=False))
))
db.create_unique('places_entitytype_subtype_of_completion', ['from_entitytype_id', 'to_entitytype_id'])
# Adding model 'Identifier'
db.create_table('places_identifier', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('scheme', self.gf('django.db.models.fields.CharField')(max_length=32)),
('value', self.gf('django.db.models.fields.CharField')(max_length=256)),
))
db.send_create_signal('places', ['Identifier'])
# Adding model 'Entity'
db.create_table('places_entity', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.TextField')(blank=True)),
('source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['places.Source'])),
('primary_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['places.EntityType'], null=True)),
('location', self.gf('django.contrib.gis.db.models.fields.PointField')(null=True)),
('geometry', self.gf('django.contrib.gis.db.models.fields.GeometryField')(null=True)),
('_metadata', self.gf('django.db.models.fields.TextField')(default='{}')),
('absolute_url', self.gf('django.db.models.fields.TextField')()),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['places.Entity'], null=True)),
('is_sublocation', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_stack', self.gf('django.db.models.fields.BooleanField')(default=False)),
('identifier_scheme', self.gf('django.db.models.fields.CharField')(max_length=32)),
('identifier_value', self.gf('django.db.models.fields.CharField')(max_length=256)),
))
db.send_create_signal('places', ['Entity'])
# Adding M2M table for field all_types on 'Entity'
db.create_table('places_entity_all_types', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('entity', models.ForeignKey(orm['places.entity'], null=False)),
('entitytype', models.ForeignKey(orm['places.entitytype'], null=False))
))
db.create_unique('places_entity_all_types', ['entity_id', 'entitytype_id'])
# Adding M2M table for field all_types_completion on 'Entity'
db.create_table('places_entity_all_types_completion', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('entity', models.ForeignKey(orm['places.entity'], null=False)),
('entitytype', models.ForeignKey(orm['places.entitytype'], null=False))
))
db.create_unique('places_entity_all_types_completion', ['entity_id', 'entitytype_id'])
# Adding M2M table for field _identifiers on 'Entity'
db.create_table('places_entity__identifiers', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('entity', models.ForeignKey(orm['places.entity'], null=False)),
('identifier', models.ForeignKey(orm['places.identifier'], null=False))
))
db.create_unique('places_entity__identifiers', ['entity_id', 'identifier_id'])
def backwards(self, orm):
# Deleting model 'Source'
db.delete_table('places_source')
# Deleting model 'EntityType'
db.delete_table('places_entitytype')
# Removing M2M table for field subtype_of on 'EntityType'
db.delete_table('places_entitytype_subtype_of')
# Removing M2M table for field subtype_of_completion on 'EntityType'
db.delete_table('places_entitytype_subtype_of_completion')
# Deleting model 'Identifier'
db.delete_table('places_identifier')
# Deleting model 'Entity'
db.delete_table('places_entity')
# Removing M2M table for field all_types on 'Entity'
db.delete_table('places_entity_all_types')
# Removing M2M table for field all_types_completion on 'Entity'
db.delete_table('places_entity_all_types_completion')
# Removing M2M table for field _identifiers on 'Entity'
db.delete_table('places_entity__identifiers')
models = {
'places.entity': {
'Meta': {'ordering': "('title',)", 'object_name': 'Entity'},
'_identifiers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['places.Identifier']", 'symmetrical': 'False'}),
'_metadata': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'absolute_url': ('django.db.models.fields.TextField', [], {}),
'all_types': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'entities'", 'blank': 'True', 'to': "orm['places.EntityType']"}),
'all_types_completion': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'entities_completion'", 'blank': 'True', 'to': "orm['places.EntityType']"}),
'geometry': ('django.contrib.gis.db.models.fields.GeometryField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier_scheme': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'identifier_value': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'is_stack': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_sublocation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Entity']", 'null': 'True'}),
'primary_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.EntityType']", 'null': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Source']"}),
'title': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'places.entitytype': {
'Meta': {'ordering': "('verbose_name',)", 'object_name': 'EntityType'},
'article': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'show_in_category_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_in_nearby_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'subtype_of': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'subtypes'", 'blank': 'True', 'to': "orm['places.EntityType']"}),
'subtype_of_completion': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'subtypes_completion'", 'blank': 'True', 'to': "orm['places.EntityType']"}),
'verbose_name': ('django.db.models.fields.TextField', [], {}),
'verbose_name_plural': ('django.db.models.fields.TextField', [], {})
},
'places.identifier': {
'Meta': {'object_name': 'Identifier'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'scheme': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'places.source': {
'Meta': {'object_name': 'Source'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'module_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['places']
| [((650, 693), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""places"""', "['Source']"], {}), "('places', ['Source'])\n", (671, 693), False, 'from south.db import db\n'), ((1510, 1557), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""places"""', "['EntityType']"], {}), "('places', ['EntityType'])\n", (1531, 1557), False, 'from south.db import db\n'), ((1971, 2067), 'south.db.db.create_unique', 'db.create_unique', (['"""places_entitytype_subtype_of"""', "['from_entitytype_id', 'to_entitytype_id']"], {}), "('places_entitytype_subtype_of', ['from_entitytype_id',\n 'to_entitytype_id'])\n", (1987, 2067), False, 'from south.db import db\n'), ((2499, 2607), 'south.db.db.create_unique', 'db.create_unique', (['"""places_entitytype_subtype_of_completion"""', "['from_entitytype_id', 'to_entitytype_id']"], {}), "('places_entitytype_subtype_of_completion', [\n 'from_entitytype_id', 'to_entitytype_id'])\n", (2515, 2607), False, 'from south.db import db\n'), ((2960, 3007), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""places"""', "['Identifier']"], {}), "('places', ['Identifier'])\n", (2981, 3007), False, 'from south.db import db\n'), ((4350, 4393), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""places"""', "['Entity']"], {}), "('places', ['Entity'])\n", (4371, 4393), False, 'from south.db import db\n'), ((4781, 4856), 'south.db.db.create_unique', 'db.create_unique', (['"""places_entity_all_types"""', "['entity_id', 'entitytype_id']"], {}), "('places_entity_all_types', ['entity_id', 'entitytype_id'])\n", (4797, 4856), False, 'from south.db import db\n'), ((5266, 5356), 'south.db.db.create_unique', 'db.create_unique', (['"""places_entity_all_types_completion"""', "['entity_id', 'entitytype_id']"], {}), "('places_entity_all_types_completion', ['entity_id',\n 'entitytype_id'])\n", (5282, 5356), False, 'from south.db import db\n'), ((5746, 5824), 'south.db.db.create_unique', 'db.create_unique', (['"""places_entity__identifiers"""', "['entity_id', 'identifier_id']"], {}), "('places_entity__identifiers', ['entity_id', 'identifier_id'])\n", (5762, 5824), False, 'from south.db import db\n'), ((5900, 5932), 'south.db.db.delete_table', 'db.delete_table', (['"""places_source"""'], {}), "('places_source')\n", (5915, 5932), False, 'from south.db import db\n'), ((5980, 6016), 'south.db.db.delete_table', 'db.delete_table', (['"""places_entitytype"""'], {}), "('places_entitytype')\n", (5995, 6016), False, 'from south.db import db\n'), ((6092, 6139), 'south.db.db.delete_table', 'db.delete_table', (['"""places_entitytype_subtype_of"""'], {}), "('places_entitytype_subtype_of')\n", (6107, 6139), False, 'from south.db import db\n'), ((6226, 6284), 'south.db.db.delete_table', 'db.delete_table', (['"""places_entitytype_subtype_of_completion"""'], {}), "('places_entitytype_subtype_of_completion')\n", (6241, 6284), False, 'from south.db import db\n'), ((6332, 6368), 'south.db.db.delete_table', 'db.delete_table', (['"""places_identifier"""'], {}), "('places_identifier')\n", (6347, 6368), False, 'from south.db import db\n'), ((6412, 6444), 'south.db.db.delete_table', 'db.delete_table', (['"""places_entity"""'], {}), "('places_entity')\n", (6427, 6444), False, 'from south.db import db\n'), ((6515, 6557), 'south.db.db.delete_table', 'db.delete_table', (['"""places_entity_all_types"""'], {}), "('places_entity_all_types')\n", (6530, 6557), False, 'from south.db import db\n'), ((6639, 6692), 'south.db.db.delete_table', 'db.delete_table', (['"""places_entity_all_types_completion"""'], {}), "('places_entity_all_types_completion')\n", (6654, 6692), False, 'from south.db import db\n'), ((6766, 6811), 'south.db.db.delete_table', 'db.delete_table', (['"""places_entity__identifiers"""'], {}), "('places_entity__identifiers')\n", (6781, 6811), False, 'from south.db import db\n'), ((1700, 1772), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'primary_key': '(True)', 'auto_created': '(True)'}), "(verbose_name='ID', primary_key=True, auto_created=True)\n", (1716, 1772), False, 'from django.db import models\n'), ((1807, 1862), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entitytype']"], {'null': '(False)'}), "(orm['places.entitytype'], null=False)\n", (1824, 1862), False, 'from django.db import models\n'), ((1895, 1950), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entitytype']"], {'null': '(False)'}), "(orm['places.entitytype'], null=False)\n", (1912, 1950), False, 'from django.db import models\n'), ((2228, 2300), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'primary_key': '(True)', 'auto_created': '(True)'}), "(verbose_name='ID', primary_key=True, auto_created=True)\n", (2244, 2300), False, 'from django.db import models\n'), ((2335, 2390), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entitytype']"], {'null': '(False)'}), "(orm['places.entitytype'], null=False)\n", (2352, 2390), False, 'from django.db import models\n'), ((2423, 2478), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entitytype']"], {'null': '(False)'}), "(orm['places.entitytype'], null=False)\n", (2440, 2478), False, 'from django.db import models\n'), ((4526, 4598), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'primary_key': '(True)', 'auto_created': '(True)'}), "(verbose_name='ID', primary_key=True, auto_created=True)\n", (4542, 4598), False, 'from django.db import models\n'), ((4624, 4675), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entity']"], {'null': '(False)'}), "(orm['places.entity'], null=False)\n", (4641, 4675), False, 'from django.db import models\n'), ((4705, 4760), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entitytype']"], {'null': '(False)'}), "(orm['places.entitytype'], null=False)\n", (4722, 4760), False, 'from django.db import models\n'), ((5011, 5083), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'primary_key': '(True)', 'auto_created': '(True)'}), "(verbose_name='ID', primary_key=True, auto_created=True)\n", (5027, 5083), False, 'from django.db import models\n'), ((5109, 5160), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entity']"], {'null': '(False)'}), "(orm['places.entity'], null=False)\n", (5126, 5160), False, 'from django.db import models\n'), ((5190, 5245), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entitytype']"], {'null': '(False)'}), "(orm['places.entitytype'], null=False)\n", (5207, 5245), False, 'from django.db import models\n'), ((5491, 5563), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'primary_key': '(True)', 'auto_created': '(True)'}), "(verbose_name='ID', primary_key=True, auto_created=True)\n", (5507, 5563), False, 'from django.db import models\n'), ((5589, 5640), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.entity']"], {'null': '(False)'}), "(orm['places.entity'], null=False)\n", (5606, 5640), False, 'from django.db import models\n'), ((5670, 5725), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['places.identifier']"], {'null': '(False)'}), "(orm['places.identifier'], null=False)\n", (5687, 5725), False, 'from django.db import models\n')] |
polivbr/pulumi-azure-native | sdk/python/pulumi_azure_native/servicebus/v20210601preview/get_subscription.py | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetSubscriptionResult',
'AwaitableGetSubscriptionResult',
'get_subscription',
]
@pulumi.output_type
class GetSubscriptionResult:
"""
Description of subscription resource.
"""
def __init__(__self__, accessed_at=None, auto_delete_on_idle=None, client_affine_properties=None, count_details=None, created_at=None, dead_lettering_on_filter_evaluation_exceptions=None, dead_lettering_on_message_expiration=None, default_message_time_to_live=None, duplicate_detection_history_time_window=None, enable_batched_operations=None, forward_dead_lettered_messages_to=None, forward_to=None, id=None, is_client_affine=None, lock_duration=None, max_delivery_count=None, message_count=None, name=None, requires_session=None, status=None, system_data=None, type=None, updated_at=None):
if accessed_at and not isinstance(accessed_at, str):
raise TypeError("Expected argument 'accessed_at' to be a str")
pulumi.set(__self__, "accessed_at", accessed_at)
if auto_delete_on_idle and not isinstance(auto_delete_on_idle, str):
raise TypeError("Expected argument 'auto_delete_on_idle' to be a str")
pulumi.set(__self__, "auto_delete_on_idle", auto_delete_on_idle)
if client_affine_properties and not isinstance(client_affine_properties, dict):
raise TypeError("Expected argument 'client_affine_properties' to be a dict")
pulumi.set(__self__, "client_affine_properties", client_affine_properties)
if count_details and not isinstance(count_details, dict):
raise TypeError("Expected argument 'count_details' to be a dict")
pulumi.set(__self__, "count_details", count_details)
if created_at and not isinstance(created_at, str):
raise TypeError("Expected argument 'created_at' to be a str")
pulumi.set(__self__, "created_at", created_at)
if dead_lettering_on_filter_evaluation_exceptions and not isinstance(dead_lettering_on_filter_evaluation_exceptions, bool):
raise TypeError("Expected argument 'dead_lettering_on_filter_evaluation_exceptions' to be a bool")
pulumi.set(__self__, "dead_lettering_on_filter_evaluation_exceptions", dead_lettering_on_filter_evaluation_exceptions)
if dead_lettering_on_message_expiration and not isinstance(dead_lettering_on_message_expiration, bool):
raise TypeError("Expected argument 'dead_lettering_on_message_expiration' to be a bool")
pulumi.set(__self__, "dead_lettering_on_message_expiration", dead_lettering_on_message_expiration)
if default_message_time_to_live and not isinstance(default_message_time_to_live, str):
raise TypeError("Expected argument 'default_message_time_to_live' to be a str")
pulumi.set(__self__, "default_message_time_to_live", default_message_time_to_live)
if duplicate_detection_history_time_window and not isinstance(duplicate_detection_history_time_window, str):
raise TypeError("Expected argument 'duplicate_detection_history_time_window' to be a str")
pulumi.set(__self__, "duplicate_detection_history_time_window", duplicate_detection_history_time_window)
if enable_batched_operations and not isinstance(enable_batched_operations, bool):
raise TypeError("Expected argument 'enable_batched_operations' to be a bool")
pulumi.set(__self__, "enable_batched_operations", enable_batched_operations)
if forward_dead_lettered_messages_to and not isinstance(forward_dead_lettered_messages_to, str):
raise TypeError("Expected argument 'forward_dead_lettered_messages_to' to be a str")
pulumi.set(__self__, "forward_dead_lettered_messages_to", forward_dead_lettered_messages_to)
if forward_to and not isinstance(forward_to, str):
raise TypeError("Expected argument 'forward_to' to be a str")
pulumi.set(__self__, "forward_to", forward_to)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_client_affine and not isinstance(is_client_affine, bool):
raise TypeError("Expected argument 'is_client_affine' to be a bool")
pulumi.set(__self__, "is_client_affine", is_client_affine)
if lock_duration and not isinstance(lock_duration, str):
raise TypeError("Expected argument 'lock_duration' to be a str")
pulumi.set(__self__, "lock_duration", lock_duration)
if max_delivery_count and not isinstance(max_delivery_count, int):
raise TypeError("Expected argument 'max_delivery_count' to be a int")
pulumi.set(__self__, "max_delivery_count", max_delivery_count)
if message_count and not isinstance(message_count, float):
raise TypeError("Expected argument 'message_count' to be a float")
pulumi.set(__self__, "message_count", message_count)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if requires_session and not isinstance(requires_session, bool):
raise TypeError("Expected argument 'requires_session' to be a bool")
pulumi.set(__self__, "requires_session", requires_session)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if updated_at and not isinstance(updated_at, str):
raise TypeError("Expected argument 'updated_at' to be a str")
pulumi.set(__self__, "updated_at", updated_at)
@property
@pulumi.getter(name="accessedAt")
def accessed_at(self) -> str:
"""
Last time there was a receive request to this subscription.
"""
return pulumi.get(self, "accessed_at")
@property
@pulumi.getter(name="autoDeleteOnIdle")
def auto_delete_on_idle(self) -> Optional[str]:
"""
ISO 8061 timeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.
"""
return pulumi.get(self, "auto_delete_on_idle")
@property
@pulumi.getter(name="clientAffineProperties")
def client_affine_properties(self) -> Optional['outputs.SBClientAffinePropertiesResponse']:
"""
Properties specific to client affine subscriptions.
"""
return pulumi.get(self, "client_affine_properties")
@property
@pulumi.getter(name="countDetails")
def count_details(self) -> 'outputs.MessageCountDetailsResponse':
"""
Message count details
"""
return pulumi.get(self, "count_details")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
Exact time the message was created.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="deadLetteringOnFilterEvaluationExceptions")
def dead_lettering_on_filter_evaluation_exceptions(self) -> Optional[bool]:
"""
Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
"""
return pulumi.get(self, "dead_lettering_on_filter_evaluation_exceptions")
@property
@pulumi.getter(name="deadLetteringOnMessageExpiration")
def dead_lettering_on_message_expiration(self) -> Optional[bool]:
"""
Value that indicates whether a subscription has dead letter support when a message expires.
"""
return pulumi.get(self, "dead_lettering_on_message_expiration")
@property
@pulumi.getter(name="defaultMessageTimeToLive")
def default_message_time_to_live(self) -> Optional[str]:
"""
ISO 8061 Default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.
"""
return pulumi.get(self, "default_message_time_to_live")
@property
@pulumi.getter(name="duplicateDetectionHistoryTimeWindow")
def duplicate_detection_history_time_window(self) -> Optional[str]:
"""
ISO 8601 timeSpan structure that defines the duration of the duplicate detection history. The default value is 10 minutes.
"""
return pulumi.get(self, "duplicate_detection_history_time_window")
@property
@pulumi.getter(name="enableBatchedOperations")
def enable_batched_operations(self) -> Optional[bool]:
"""
Value that indicates whether server-side batched operations are enabled.
"""
return pulumi.get(self, "enable_batched_operations")
@property
@pulumi.getter(name="forwardDeadLetteredMessagesTo")
def forward_dead_lettered_messages_to(self) -> Optional[str]:
"""
Queue/Topic name to forward the Dead Letter message
"""
return pulumi.get(self, "forward_dead_lettered_messages_to")
@property
@pulumi.getter(name="forwardTo")
def forward_to(self) -> Optional[str]:
"""
Queue/Topic name to forward the messages
"""
return pulumi.get(self, "forward_to")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isClientAffine")
def is_client_affine(self) -> Optional[bool]:
"""
Value that indicates whether the subscription has an affinity to the client id.
"""
return pulumi.get(self, "is_client_affine")
@property
@pulumi.getter(name="lockDuration")
def lock_duration(self) -> Optional[str]:
"""
ISO 8061 lock duration timespan for the subscription. The default value is 1 minute.
"""
return pulumi.get(self, "lock_duration")
@property
@pulumi.getter(name="maxDeliveryCount")
def max_delivery_count(self) -> Optional[int]:
"""
Number of maximum deliveries.
"""
return pulumi.get(self, "max_delivery_count")
@property
@pulumi.getter(name="messageCount")
def message_count(self) -> float:
"""
Number of messages.
"""
return pulumi.get(self, "message_count")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requiresSession")
def requires_session(self) -> Optional[bool]:
"""
Value indicating if a subscription supports the concept of sessions.
"""
return pulumi.get(self, "requires_session")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Enumerates the possible values for the status of a messaging entity.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system meta data relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> str:
"""
The exact time the message was updated.
"""
return pulumi.get(self, "updated_at")
class AwaitableGetSubscriptionResult(GetSubscriptionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSubscriptionResult(
accessed_at=self.accessed_at,
auto_delete_on_idle=self.auto_delete_on_idle,
client_affine_properties=self.client_affine_properties,
count_details=self.count_details,
created_at=self.created_at,
dead_lettering_on_filter_evaluation_exceptions=self.dead_lettering_on_filter_evaluation_exceptions,
dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration,
default_message_time_to_live=self.default_message_time_to_live,
duplicate_detection_history_time_window=self.duplicate_detection_history_time_window,
enable_batched_operations=self.enable_batched_operations,
forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to,
forward_to=self.forward_to,
id=self.id,
is_client_affine=self.is_client_affine,
lock_duration=self.lock_duration,
max_delivery_count=self.max_delivery_count,
message_count=self.message_count,
name=self.name,
requires_session=self.requires_session,
status=self.status,
system_data=self.system_data,
type=self.type,
updated_at=self.updated_at)
def get_subscription(namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
subscription_name: Optional[str] = None,
topic_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSubscriptionResult:
"""
Description of subscription resource.
:param str namespace_name: The namespace name
:param str resource_group_name: Name of the Resource group within the Azure subscription.
:param str subscription_name: The subscription name.
:param str topic_name: The topic name.
"""
__args__ = dict()
__args__['namespaceName'] = namespace_name
__args__['resourceGroupName'] = resource_group_name
__args__['subscriptionName'] = subscription_name
__args__['topicName'] = topic_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:servicebus/v20210601preview:getSubscription', __args__, opts=opts, typ=GetSubscriptionResult).value
return AwaitableGetSubscriptionResult(
accessed_at=__ret__.accessed_at,
auto_delete_on_idle=__ret__.auto_delete_on_idle,
client_affine_properties=__ret__.client_affine_properties,
count_details=__ret__.count_details,
created_at=__ret__.created_at,
dead_lettering_on_filter_evaluation_exceptions=__ret__.dead_lettering_on_filter_evaluation_exceptions,
dead_lettering_on_message_expiration=__ret__.dead_lettering_on_message_expiration,
default_message_time_to_live=__ret__.default_message_time_to_live,
duplicate_detection_history_time_window=__ret__.duplicate_detection_history_time_window,
enable_batched_operations=__ret__.enable_batched_operations,
forward_dead_lettered_messages_to=__ret__.forward_dead_lettered_messages_to,
forward_to=__ret__.forward_to,
id=__ret__.id,
is_client_affine=__ret__.is_client_affine,
lock_duration=__ret__.lock_duration,
max_delivery_count=__ret__.max_delivery_count,
message_count=__ret__.message_count,
name=__ret__.name,
requires_session=__ret__.requires_session,
status=__ret__.status,
system_data=__ret__.system_data,
type=__ret__.type,
updated_at=__ret__.updated_at)
| [((6398, 6430), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""accessedAt"""'}), "(name='accessedAt')\n", (6411, 6430), False, 'import pulumi\n'), ((6624, 6662), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""autoDeleteOnIdle"""'}), "(name='autoDeleteOnIdle')\n", (6637, 6662), False, 'import pulumi\n'), ((6937, 6981), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientAffineProperties"""'}), "(name='clientAffineProperties')\n", (6950, 6981), False, 'import pulumi\n'), ((7242, 7276), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""countDetails"""'}), "(name='countDetails')\n", (7255, 7276), False, 'import pulumi\n'), ((7470, 7501), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""createdAt"""'}), "(name='createdAt')\n", (7483, 7501), False, 'import pulumi\n'), ((7669, 7732), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""deadLetteringOnFilterEvaluationExceptions"""'}), "(name='deadLetteringOnFilterEvaluationExceptions')\n", (7682, 7732), False, 'import pulumi\n'), ((8048, 8102), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""deadLetteringOnMessageExpiration"""'}), "(name='deadLetteringOnMessageExpiration')\n", (8061, 8102), False, 'import pulumi\n'), ((8389, 8435), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""defaultMessageTimeToLive"""'}), "(name='defaultMessageTimeToLive')\n", (8402, 8435), False, 'import pulumi\n'), ((8850, 8907), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""duplicateDetectionHistoryTimeWindow"""'}), "(name='duplicateDetectionHistoryTimeWindow')\n", (8863, 8907), False, 'import pulumi\n'), ((9230, 9275), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""enableBatchedOperations"""'}), "(name='enableBatchedOperations')\n", (9243, 9275), False, 'import pulumi\n'), ((9521, 9572), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forwardDeadLetteredMessagesTo"""'}), "(name='forwardDeadLetteredMessagesTo')\n", (9534, 9572), False, 'import pulumi\n'), ((9812, 9843), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forwardTo"""'}), "(name='forwardTo')\n", (9825, 9843), False, 'import pulumi\n'), ((10167, 10203), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isClientAffine"""'}), "(name='isClientAffine')\n", (10180, 10203), False, 'import pulumi\n'), ((10438, 10472), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""lockDuration"""'}), "(name='lockDuration')\n", (10451, 10472), False, 'import pulumi\n'), ((10705, 10743), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""maxDeliveryCount"""'}), "(name='maxDeliveryCount')\n", (10718, 10743), False, 'import pulumi\n'), ((10931, 10965), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""messageCount"""'}), "(name='messageCount')\n", (10944, 10965), False, 'import pulumi\n'), ((11272, 11309), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""requiresSession"""'}), "(name='requiresSession')\n", (11285, 11309), False, 'import pulumi\n'), ((11749, 11781), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemData"""'}), "(name='systemData')\n", (11762, 11781), False, 'import pulumi\n'), ((12135, 12166), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""updatedAt"""'}), "(name='updatedAt')\n", (12148, 12166), False, 'import pulumi\n'), ((1293, 1341), 'pulumi.set', 'pulumi.set', (['__self__', '"""accessed_at"""', 'accessed_at'], {}), "(__self__, 'accessed_at', accessed_at)\n", (1303, 1341), False, 'import pulumi\n'), ((1510, 1574), 'pulumi.set', 'pulumi.set', (['__self__', '"""auto_delete_on_idle"""', 'auto_delete_on_idle'], {}), "(__self__, 'auto_delete_on_idle', auto_delete_on_idle)\n", (1520, 1574), False, 'import pulumi\n'), ((1760, 1834), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_affine_properties"""', 'client_affine_properties'], {}), "(__self__, 'client_affine_properties', client_affine_properties)\n", (1770, 1834), False, 'import pulumi\n'), ((1987, 2039), 'pulumi.set', 'pulumi.set', (['__self__', '"""count_details"""', 'count_details'], {}), "(__self__, 'count_details', count_details)\n", (1997, 2039), False, 'import pulumi\n'), ((2181, 2227), 'pulumi.set', 'pulumi.set', (['__self__', '"""created_at"""', 'created_at'], {}), "(__self__, 'created_at', created_at)\n", (2191, 2227), False, 'import pulumi\n'), ((2479, 2601), 'pulumi.set', 'pulumi.set', (['__self__', '"""dead_lettering_on_filter_evaluation_exceptions"""', 'dead_lettering_on_filter_evaluation_exceptions'], {}), "(__self__, 'dead_lettering_on_filter_evaluation_exceptions',\n dead_lettering_on_filter_evaluation_exceptions)\n", (2489, 2601), False, 'import pulumi\n'), ((2819, 2921), 'pulumi.set', 'pulumi.set', (['__self__', '"""dead_lettering_on_message_expiration"""', 'dead_lettering_on_message_expiration'], {}), "(__self__, 'dead_lettering_on_message_expiration',\n dead_lettering_on_message_expiration)\n", (2829, 2921), False, 'import pulumi\n'), ((3113, 3199), 'pulumi.set', 'pulumi.set', (['__self__', '"""default_message_time_to_live"""', 'default_message_time_to_live'], {}), "(__self__, 'default_message_time_to_live',\n default_message_time_to_live)\n", (3123, 3199), False, 'import pulumi\n'), ((3424, 3532), 'pulumi.set', 'pulumi.set', (['__self__', '"""duplicate_detection_history_time_window"""', 'duplicate_detection_history_time_window'], {}), "(__self__, 'duplicate_detection_history_time_window',\n duplicate_detection_history_time_window)\n", (3434, 3532), False, 'import pulumi\n'), ((3717, 3793), 'pulumi.set', 'pulumi.set', (['__self__', '"""enable_batched_operations"""', 'enable_batched_operations'], {}), "(__self__, 'enable_batched_operations', enable_batched_operations)\n", (3727, 3793), False, 'import pulumi\n'), ((4004, 4100), 'pulumi.set', 'pulumi.set', (['__self__', '"""forward_dead_lettered_messages_to"""', 'forward_dead_lettered_messages_to'], {}), "(__self__, 'forward_dead_lettered_messages_to',\n forward_dead_lettered_messages_to)\n", (4014, 4100), False, 'import pulumi\n'), ((4238, 4284), 'pulumi.set', 'pulumi.set', (['__self__', '"""forward_to"""', 'forward_to'], {}), "(__self__, 'forward_to', forward_to)\n", (4248, 4284), False, 'import pulumi\n'), ((4402, 4432), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (4412, 4432), False, 'import pulumi\n'), ((4594, 4652), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_client_affine"""', 'is_client_affine'], {}), "(__self__, 'is_client_affine', is_client_affine)\n", (4604, 4652), False, 'import pulumi\n'), ((4803, 4855), 'pulumi.set', 'pulumi.set', (['__self__', '"""lock_duration"""', 'lock_duration'], {}), "(__self__, 'lock_duration', lock_duration)\n", (4813, 4855), False, 'import pulumi\n'), ((5021, 5083), 'pulumi.set', 'pulumi.set', (['__self__', '"""max_delivery_count"""', 'max_delivery_count'], {}), "(__self__, 'max_delivery_count', max_delivery_count)\n", (5031, 5083), False, 'import pulumi\n'), ((5238, 5290), 'pulumi.set', 'pulumi.set', (['__self__', '"""message_count"""', 'message_count'], {}), "(__self__, 'message_count', message_count)\n", (5248, 5290), False, 'import pulumi\n'), ((5414, 5448), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (5424, 5448), False, 'import pulumi\n'), ((5610, 5668), 'pulumi.set', 'pulumi.set', (['__self__', '"""requires_session"""', 'requires_session'], {}), "(__self__, 'requires_session', requires_session)\n", (5620, 5668), False, 'import pulumi\n'), ((5798, 5836), 'pulumi.set', 'pulumi.set', (['__self__', '"""status"""', 'status'], {}), "(__self__, 'status', status)\n", (5808, 5836), False, 'import pulumi\n'), ((5983, 6031), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_data"""', 'system_data'], {}), "(__self__, 'system_data', system_data)\n", (5993, 6031), False, 'import pulumi\n'), ((6155, 6189), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (6165, 6189), False, 'import pulumi\n'), ((6331, 6377), 'pulumi.set', 'pulumi.set', (['__self__', '"""updated_at"""', 'updated_at'], {}), "(__self__, 'updated_at', updated_at)\n", (6341, 6377), False, 'import pulumi\n'), ((6572, 6603), 'pulumi.get', 'pulumi.get', (['self', '"""accessed_at"""'], {}), "(self, 'accessed_at')\n", (6582, 6603), False, 'import pulumi\n'), ((6877, 6916), 'pulumi.get', 'pulumi.get', (['self', '"""auto_delete_on_idle"""'], {}), "(self, 'auto_delete_on_idle')\n", (6887, 6916), False, 'import pulumi\n'), ((7177, 7221), 'pulumi.get', 'pulumi.get', (['self', '"""client_affine_properties"""'], {}), "(self, 'client_affine_properties')\n", (7187, 7221), False, 'import pulumi\n'), ((7416, 7449), 'pulumi.get', 'pulumi.get', (['self', '"""count_details"""'], {}), "(self, 'count_details')\n", (7426, 7449), False, 'import pulumi\n'), ((7618, 7648), 'pulumi.get', 'pulumi.get', (['self', '"""created_at"""'], {}), "(self, 'created_at')\n", (7628, 7648), False, 'import pulumi\n'), ((7961, 8027), 'pulumi.get', 'pulumi.get', (['self', '"""dead_lettering_on_filter_evaluation_exceptions"""'], {}), "(self, 'dead_lettering_on_filter_evaluation_exceptions')\n", (7971, 8027), False, 'import pulumi\n'), ((8312, 8368), 'pulumi.get', 'pulumi.get', (['self', '"""dead_lettering_on_message_expiration"""'], {}), "(self, 'dead_lettering_on_message_expiration')\n", (8322, 8368), False, 'import pulumi\n'), ((8781, 8829), 'pulumi.get', 'pulumi.get', (['self', '"""default_message_time_to_live"""'], {}), "(self, 'default_message_time_to_live')\n", (8791, 8829), False, 'import pulumi\n'), ((9150, 9209), 'pulumi.get', 'pulumi.get', (['self', '"""duplicate_detection_history_time_window"""'], {}), "(self, 'duplicate_detection_history_time_window')\n", (9160, 9209), False, 'import pulumi\n'), ((9455, 9500), 'pulumi.get', 'pulumi.get', (['self', '"""enable_batched_operations"""'], {}), "(self, 'enable_batched_operations')\n", (9465, 9500), False, 'import pulumi\n'), ((9738, 9791), 'pulumi.get', 'pulumi.get', (['self', '"""forward_dead_lettered_messages_to"""'], {}), "(self, 'forward_dead_lettered_messages_to')\n", (9748, 9791), False, 'import pulumi\n'), ((9975, 10005), 'pulumi.get', 'pulumi.get', (['self', '"""forward_to"""'], {}), "(self, 'forward_to')\n", (9985, 10005), False, 'import pulumi\n'), ((10124, 10146), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (10134, 10146), False, 'import pulumi\n'), ((10381, 10417), 'pulumi.get', 'pulumi.get', (['self', '"""is_client_affine"""'], {}), "(self, 'is_client_affine')\n", (10391, 10417), False, 'import pulumi\n'), ((10651, 10684), 'pulumi.get', 'pulumi.get', (['self', '"""lock_duration"""'], {}), "(self, 'lock_duration')\n", (10661, 10684), False, 'import pulumi\n'), ((10872, 10910), 'pulumi.get', 'pulumi.get', (['self', '"""max_delivery_count"""'], {}), "(self, 'max_delivery_count')\n", (10882, 10910), False, 'import pulumi\n'), ((11071, 11104), 'pulumi.get', 'pulumi.get', (['self', '"""message_count"""'], {}), "(self, 'message_count')\n", (11081, 11104), False, 'import pulumi\n'), ((11227, 11251), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (11237, 11251), False, 'import pulumi\n'), ((11476, 11512), 'pulumi.get', 'pulumi.get', (['self', '"""requires_session"""'], {}), "(self, 'requires_session')\n", (11486, 11512), False, 'import pulumi\n'), ((11702, 11728), 'pulumi.get', 'pulumi.get', (['self', '"""status"""'], {}), "(self, 'status')\n", (11712, 11728), False, 'import pulumi\n'), ((11936, 11967), 'pulumi.get', 'pulumi.get', (['self', '"""system_data"""'], {}), "(self, 'system_data')\n", (11946, 11967), False, 'import pulumi\n'), ((12090, 12114), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (12100, 12114), False, 'import pulumi\n'), ((12287, 12317), 'pulumi.get', 'pulumi.get', (['self', '"""updated_at"""'], {}), "(self, 'updated_at')\n", (12297, 12317), False, 'import pulumi\n'), ((14701, 14723), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (14721, 14723), False, 'import pulumi\n'), ((14815, 14953), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure-native:servicebus/v20210601preview:getSubscription"""', '__args__'], {'opts': 'opts', 'typ': 'GetSubscriptionResult'}), "(\n 'azure-native:servicebus/v20210601preview:getSubscription', __args__,\n opts=opts, typ=GetSubscriptionResult)\n", (14836, 14953), False, 'import pulumi\n')] |
crystalfontz/CFA-EVE-Python-Library | py_cfeve/module/CFAF240400E0-030TN-A1.py | c5aca10b9b6ee109d4df8a9a692dcef083dafc88 | #===========================================================================
#
# Crystalfontz Raspberry-Pi Python example library for FTDI / BridgeTek
# EVE graphic accelerators.
#
#---------------------------------------------------------------------------
#
# This file is part of the port/adaptation of existing C based EVE libraries
# to Python for Crystalfontz EVE based displays.
#
# 2021-10-20 Mark Williams / Crystalfontz America Inc.
# https:#www.crystalfontz.com/products/eve-accelerated-tft-displays.php
#---------------------------------------------------------------------------
#
# This is free and unencumbered software released into the public domain.
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
# For more information, please refer to <http:#unlicense.org/>
#
#============================================================================
#EVE Device Type
EVE_DEVICE = 811
# EVE Clock Speed
EVE_CLOCK_SPEED = 60000000
# Touch
TOUCH_RESISTIVE = False
TOUCH_CAPACITIVE = False
TOUCH_GOODIX_CAPACITIVE = False
# Define RGB output pins order, determined by PCB layout
LCD_SWIZZLE = 2
# Define active edge of PCLK. Observed by scope:
# 0: Data is put out coincident with falling edge of the clock.
# Rising edge of the clock is in the middle of the data.
# 1: Data is put out coincident with rising edge of the clock.
# Falling edge of the clock is in the middle of the data.
LCD_PCLKPOL = 0
# LCD drive strength: 0=5mA, 1=10mA
LCD_DRIVE_10MA = 0
# Spread Spectrum on RGB signals. Probably not a good idea at higher
# PCLK frequencies.
LCD_PCLK_CSPREAD = 0
#This is not a 24-bit display, so dither
LCD_DITHER = 0
# Pixel clock divisor
LCD_PCLK = 5
#----------------------------------------------------------------------------
# Frame_Rate = 60Hz / 16.7mS
#----------------------------------------------------------------------------
# Horizontal timing
# Target 60Hz frame rate, using the largest possible line time in order to
# maximize the time that the EVE has to process each line.
HPX = 240 # Horizontal Pixel Width
HSW = 10 # Horizontal Sync Width
HBP = 20 # Horizontal Back Porch
HFP = 10 # Horizontal Front Porch
HPP = 209 # Horizontal Pixel Padding
# FTDI needs at least 1 here
# Define the constants needed by the EVE based on the timing
# Active width of LCD display
LCD_WIDTH = HPX
# Start of horizontal sync pulse
LCD_HSYNC0 = HFP
# End of horizontal sync pulse
LCD_HSYNC1 = HFP+HSW
# Start of active line
LCD_HOFFSET = HFP+HSW+HBP
# Total number of clocks per line
LCD_HCYCLE = HPX+HFP+HSW+HBP+HPP
#----------------------------------------------------------------------------
# Vertical timing
VLH = 400 # Vertical Line Height
VS = 2 # Vertical Sync (in lines)
VBP = 2 # Vertical Back Porch
VFP = 4 # Vertical Front Porch
VLP = 1 # Vertical Line Padding
# FTDI needs at least 1 here
# Define the constants needed by the EVE based on the timing
# Active height of LCD display
LCD_HEIGHT = VLH
# Start of vertical sync pulse
LCD_VSYNC0 = VFP
# End of vertical sync pulse
LCD_VSYNC1 = VFP+VS
# Start of active screen
LCD_VOFFSET = VFP+VS+VBP
# Total number of lines per screen
LCD_VCYCLE = VLH+VFP+VS+VBP+VLP | [] |
OneToolsCollection/HLT-ISTI-QuaPy | quapy/model_selection.py | 6a5c528154c2d6d38d9f3258e667727bf692fc8b | import itertools
import signal
from copy import deepcopy
from typing import Union, Callable
import numpy as np
import quapy as qp
from quapy.data.base import LabelledCollection
from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction
from quapy.method.aggregative import BaseQuantifier
import inspect
from util import _check_sample_size
class GridSearchQ(BaseQuantifier):
"""Grid Search optimization targeting a quantification-oriented metric.
Optimizes the hyperparameters of a quantification method, based on an evaluation method and on an evaluation
protocol for quantification.
:param model: the quantifier to optimize
:type model: BaseQuantifier
:param param_grid: a dictionary with keys the parameter names and values the list of values to explore
:param sample_size: the size of the samples to extract from the validation set (ignored if protocl='gen')
:param protocol: either 'app' for the artificial prevalence protocol, 'npp' for the natural prevalence
protocol, or 'gen' for using a custom sampling generator function
:param n_prevpoints: if specified, indicates the number of equally distant points to extract from the interval
[0,1] in order to define the prevalences of the samples; e.g., if n_prevpoints=5, then the prevalences for
each class will be explored in [0.00, 0.25, 0.50, 0.75, 1.00]. If not specified, then eval_budget is requested.
Ignored if protocol!='app'.
:param n_repetitions: the number of repetitions for each combination of prevalences. This parameter is ignored
for the protocol='app' if eval_budget is set and is lower than the number of combinations that would be
generated using the value assigned to n_prevpoints (for the current number of classes and n_repetitions).
Ignored for protocol='npp' and protocol='gen' (use eval_budget for setting a maximum number of samples in
those cases).
:param eval_budget: if specified, sets a ceil on the number of evaluations to perform for each hyper-parameter
combination. For example, if protocol='app', there are 3 classes, n_repetitions=1 and eval_budget=20, then
n_prevpoints will be set to 5, since this will generate 15 different prevalences, i.e., [0, 0, 1],
[0, 0.25, 0.75], [0, 0.5, 0.5] ... [1, 0, 0], and since setting it to 6 would generate more than
20. When protocol='gen', indicates the maximum number of samples to generate, but less samples will be
generated if the generator yields less samples.
:param error: an error function (callable) or a string indicating the name of an error function (valid ones
are those in qp.error.QUANTIFICATION_ERROR
:param refit: whether or not to refit the model on the whole labelled collection (training+validation) with
the best chosen hyperparameter combination. Ignored if protocol='gen'
:param val_split: either a LabelledCollection on which to test the performance of the different settings, or
a float in [0,1] indicating the proportion of labelled data to extract from the training set, or a callable
returning a generator function each time it is invoked (only for protocol='gen').
:param n_jobs: number of parallel jobs
:param random_seed: set the seed of the random generator to replicate experiments. Ignored if protocol='gen'.
:param timeout: establishes a timer (in seconds) for each of the hyperparameters configurations being tested.
Whenever a run takes longer than this timer, that configuration will be ignored. If all configurations end up
being ignored, a TimeoutError exception is raised. If -1 (default) then no time bound is set.
:param verbose: set to True to get information through the stdout
"""
def __init__(self,
model: BaseQuantifier,
param_grid: dict,
sample_size: Union[int, None] = None,
protocol='app',
n_prevpoints: int = None,
n_repetitions: int = 1,
eval_budget: int = None,
error: Union[Callable, str] = qp.error.mae,
refit=True,
val_split=0.4,
n_jobs=1,
random_seed=42,
timeout=-1,
verbose=False):
self.model = model
self.param_grid = param_grid
self.sample_size = sample_size
self.protocol = protocol.lower()
self.n_prevpoints = n_prevpoints
self.n_repetitions = n_repetitions
self.eval_budget = eval_budget
self.refit = refit
self.val_split = val_split
self.n_jobs = n_jobs
self.random_seed = random_seed
self.timeout = timeout
self.verbose = verbose
self.__check_error(error)
assert self.protocol in {'app', 'npp', 'gen'}, \
'unknown protocol: valid ones are "app" or "npp" for the "artificial" or the "natural" prevalence ' \
'protocols. Use protocol="gen" when passing a generator function thorough val_split that yields a ' \
'sample (instances) and their prevalence (ndarray) at each iteration.'
assert self.eval_budget is None or isinstance(self.eval_budget, int)
if self.protocol in ['npp', 'gen']:
if self.protocol=='npp' and (self.eval_budget is None or self.eval_budget <= 0):
raise ValueError(f'when protocol="npp" the parameter eval_budget should be '
f'indicated (and should be >0).')
if self.n_repetitions != 1:
print('[warning] n_repetitions has been set and will be ignored for the selected protocol')
def _sout(self, msg):
if self.verbose:
print(f'[{self.__class__.__name__}]: {msg}')
def __check_training_validation(self, training, validation):
if isinstance(validation, LabelledCollection):
return training, validation
elif isinstance(validation, float):
assert 0. < validation < 1., 'validation proportion should be in (0,1)'
training, validation = training.split_stratified(train_prop=1 - validation, random_state=self.random_seed)
return training, validation
elif self.protocol=='gen' and inspect.isgenerator(validation()):
return training, validation
else:
raise ValueError(f'"validation" must either be a LabelledCollection or a float in (0,1) indicating the'
f'proportion of training documents to extract (type found: {type(validation)}). '
f'Optionally, "validation" can be a callable function returning a generator that yields '
f'the sample instances along with their true prevalence at each iteration by '
f'setting protocol="gen".')
def __check_error(self, error):
if error in qp.error.QUANTIFICATION_ERROR:
self.error = error
elif isinstance(error, str):
self.error = qp.error.from_name(error)
elif hasattr(error, '__call__'):
self.error = error
else:
raise ValueError(f'unexpected error type; must either be a callable function or a str representing\n'
f'the name of an error function in {qp.error.QUANTIFICATION_ERROR_NAMES}')
def __generate_predictions(self, model, val_split):
commons = {
'n_repetitions': self.n_repetitions,
'n_jobs': self.n_jobs,
'random_seed': self.random_seed,
'verbose': False
}
if self.protocol == 'app':
return artificial_prevalence_prediction(
model, val_split, self.sample_size,
n_prevpoints=self.n_prevpoints,
eval_budget=self.eval_budget,
**commons
)
elif self.protocol == 'npp':
return natural_prevalence_prediction(
model, val_split, self.sample_size,
**commons)
elif self.protocol == 'gen':
return gen_prevalence_prediction(model, gen_fn=val_split, eval_budget=self.eval_budget)
else:
raise ValueError('unknown protocol')
def fit(self, training: LabelledCollection, val_split: Union[LabelledCollection, float, Callable] = None):
""" Learning routine. Fits methods with all combinations of hyperparameters and selects the one minimizing
the error metric.
:param training: the training set on which to optimize the hyperparameters
:param val_split: either a LabelledCollection on which to test the performance of the different settings, or
a float in [0,1] indicating the proportion of labelled data to extract from the training set
:return: self
"""
if val_split is None:
val_split = self.val_split
training, val_split = self.__check_training_validation(training, val_split)
if self.protocol != 'gen':
self.sample_size = _check_sample_size(self.sample_size)
params_keys = list(self.param_grid.keys())
params_values = list(self.param_grid.values())
model = self.model
if self.timeout > 0:
def handler(signum, frame):
self._sout('timeout reached')
raise TimeoutError()
signal.signal(signal.SIGALRM, handler)
self.param_scores_ = {}
self.best_score_ = None
some_timeouts = False
for values in itertools.product(*params_values):
params = dict({k: values[i] for i, k in enumerate(params_keys)})
if self.timeout > 0:
signal.alarm(self.timeout)
try:
# overrides default parameters with the parameters being explored at this iteration
model.set_params(**params)
model.fit(training)
true_prevalences, estim_prevalences = self.__generate_predictions(model, val_split)
score = self.error(true_prevalences, estim_prevalences)
self._sout(f'checking hyperparams={params} got {self.error.__name__} score {score:.5f}')
if self.best_score_ is None or score < self.best_score_:
self.best_score_ = score
self.best_params_ = params
self.best_model_ = deepcopy(model)
self.param_scores_[str(params)] = score
if self.timeout > 0:
signal.alarm(0)
except TimeoutError:
print(f'timeout reached for config {params}')
some_timeouts = True
if self.best_score_ is None and some_timeouts:
raise TimeoutError('all jobs took more than the timeout time to end')
self._sout(f'optimization finished: best params {self.best_params_} (score={self.best_score_:.5f})')
if self.refit:
self._sout(f'refitting on the whole development set')
self.best_model_.fit(training + val_split)
return self
def quantify(self, instances):
"""Estimate class prevalence values using the best model found after calling the :meth:`fit` method.
:param instances: sample contanining the instances
:return: a ndarray of shape `(n_classes)` with class prevalence estimates as according to the best model found
by the model selection process.
"""
assert hasattr(self, 'best_model_'), 'quantify called before fit'
return self.best_model().quantify(instances)
@property
def classes_(self):
"""
Classes on which the quantifier has been trained on.
:return: a ndarray of shape `(n_classes)` with the class identifiers
"""
return self.best_model().classes_
def set_params(self, **parameters):
"""Sets the hyper-parameters to explore.
:param parameters: a dictionary with keys the parameter names and values the list of values to explore
"""
self.param_grid = parameters
def get_params(self, deep=True):
"""Returns the dictionary of hyper-parameters to explore (`param_grid`)
:param deep: Unused
:return: the dictionary `param_grid`
"""
return self.param_grid
def best_model(self):
"""
Returns the best model found after calling the :meth:`fit` method, i.e., the one trained on the combination
of hyper-parameters that minimized the error function.
:return: a trained quantifier
"""
if hasattr(self, 'best_model_'):
return self.best_model_
raise ValueError('best_model called before fit')
| [((9694, 9727), 'itertools.product', 'itertools.product', (['*params_values'], {}), '(*params_values)\n', (9711, 9727), False, 'import itertools\n'), ((7798, 7943), 'quapy.evaluation.artificial_prevalence_prediction', 'artificial_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {'n_prevpoints': 'self.n_prevpoints', 'eval_budget': 'self.eval_budget'}), '(model, val_split, self.sample_size,\n n_prevpoints=self.n_prevpoints, eval_budget=self.eval_budget, **commons)\n', (7830, 7943), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((9200, 9236), 'util._check_sample_size', '_check_sample_size', (['self.sample_size'], {}), '(self.sample_size)\n', (9218, 9236), False, 'from util import _check_sample_size\n'), ((9538, 9576), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'handler'], {}), '(signal.SIGALRM, handler)\n', (9551, 9576), False, 'import signal\n'), ((7169, 7194), 'quapy.error.from_name', 'qp.error.from_name', (['error'], {}), '(error)\n', (7187, 7194), True, 'import quapy as qp\n'), ((8074, 8150), 'quapy.evaluation.natural_prevalence_prediction', 'natural_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {}), '(model, val_split, self.sample_size, **commons)\n', (8103, 8150), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((9856, 9882), 'signal.alarm', 'signal.alarm', (['self.timeout'], {}), '(self.timeout)\n', (9868, 9882), False, 'import signal\n'), ((8240, 8325), 'quapy.evaluation.gen_prevalence_prediction', 'gen_prevalence_prediction', (['model'], {'gen_fn': 'val_split', 'eval_budget': 'self.eval_budget'}), '(model, gen_fn=val_split, eval_budget=self.eval_budget\n )\n', (8265, 8325), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((10562, 10577), 'copy.deepcopy', 'deepcopy', (['model'], {}), '(model)\n', (10570, 10577), False, 'from copy import deepcopy\n'), ((10692, 10707), 'signal.alarm', 'signal.alarm', (['(0)'], {}), '(0)\n', (10704, 10707), False, 'import signal\n')] |
ZxShane/slam_hospital | flasky.py | 302704b3a188cea07dddfb23595dd75f8d3cd636 | # -*- coding: utf-8 -*-
import os
from flask_migrate import Migrate
from app import create_app, db
from app.models import User, Role, PoseToLocation
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
migrate = Migrate(app, db)
# migrate 的新建 我们需要扫描到这些文件我们才能创建
@app.shell_context_processor
def make_shell_context():
return dict(db=db, User=User, Role=Role, PoseToLocation=PoseToLocation)
# 单元测试
@app.cli.command()
def test():
""" run the unit tests """
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
| [((220, 236), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (227, 236), False, 'from flask_migrate import Migrate\n'), ((170, 195), 'os.getenv', 'os.getenv', (['"""FLASK_CONFIG"""'], {}), "('FLASK_CONFIG')\n", (179, 195), False, 'import os\n'), ((505, 526), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (524, 526), False, 'import unittest\n'), ((549, 585), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (572, 585), False, 'import unittest\n')] |
aesdeef/advent-of-code-2021 | python/day09/smoke_basin.py | 4561bcf12ac03d360f5b28c48ef80134f97613b9 | INPUT_FILE = "../../input/09.txt"
Point = tuple[int, int]
Heightmap = dict[Point, int]
Basin = set[Point]
def parse_input() -> Heightmap:
"""
Parses the input and returns a Heightmap
"""
with open(INPUT_FILE) as f:
heights = [[int(x) for x in line.strip()] for line in f]
heightmap: Heightmap = dict()
for (y, row) in enumerate(heights):
for (x, height) in enumerate(row):
heightmap[(x, y)] = height
return heightmap
def get_surrounding_points(heightmap: Heightmap, point: Point) -> set[Point]:
"""
Returns a set of surrounding points within the heightmap
"""
x, y = point
return {
(x - 1, y),
(x, y - 1),
(x, y + 1),
(x + 1, y),
} & heightmap.keys()
def get_surrounding_heights(heightmap: Heightmap, point: Point) -> set[int]:
"""
Returns the heights of points surrounding the given point
"""
surrounding_points = get_surrounding_points(heightmap, point)
return {heightmap[point] for point in surrounding_points}
def get_low_points(heightmap: Heightmap) -> set[Point]:
"""
Finds the low points on the heightmap
"""
low_points: set[Point] = set()
for point in heightmap:
surrounding_heights = get_surrounding_heights(heightmap, point)
if all(heightmap[point] < height for height in surrounding_heights):
low_points.add(point)
return low_points
def solve_part1(heightmap: Heightmap, low_points: set[Point]) -> int:
"""
Calculates the sum of the risk levels of all low points
"""
return sum(1 + heightmap[point] for point in low_points)
def get_basins(heightmap: Heightmap, low_points: set[Point]) -> list[Basin]:
"""
Finds all basins on the heightmap
"""
basins: list[Basin] = []
for low_point in low_points:
basin: Basin = set()
points_to_consider = {low_point}
while points_to_consider:
point = points_to_consider.pop()
if heightmap[point] == 9:
continue
surrounding_points = get_surrounding_points(heightmap, point)
points_to_consider.update(surrounding_points - basin)
basin.add(point)
basins.append(basin)
return basins
def solve_part2(heightmap: Heightmap, low_points: set[Point]) -> int:
"""
Calculates the product of the sizes of the three largest basins
"""
basins = get_basins(heightmap, low_points)
basin_sizes = sorted((len(basin) for basin in basins), reverse=True)
return basin_sizes[0] * basin_sizes[1] * basin_sizes[2]
if __name__ == "__main__":
heightmap = parse_input()
low_points = get_low_points(heightmap)
part1 = solve_part1(heightmap, low_points)
part2 = solve_part2(heightmap, low_points)
print(part1)
print(part2)
| [] |
Subsets and Splits