content
stringlengths 5
1.05M
|
---|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Top Block
# Generated: Mon Jan 22 07:11:20 2018
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from gnuradio import analog
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import gr
from gnuradio import wxgui
from gnuradio.eng_option import eng_option
from gnuradio.fft import window
from gnuradio.filter import firdes
from gnuradio.wxgui import fftsink2
from gnuradio.wxgui import forms
from gnuradio.wxgui import scopesink2
from grc_gnuradio import wxgui as grc_wxgui
from optparse import OptionParser
import wx
class top_block(grc_wxgui.top_block_gui):
def __init__(self):
grc_wxgui.top_block_gui.__init__(self, title="Top Block")
_icon_path = "C:\Program Files\GNURadio-3.7\share\icons\hicolor\scalable/apps\gnuradio-grc.png"
self.SetIcon(wx.Icon(_icon_path, wx.BITMAP_TYPE_ANY))
##################################################
# Variables
##################################################
self.samp_rate = samp_rate = 32e3
self.freq = freq = 5000
##################################################
# Blocks
##################################################
_freq_sizer = wx.BoxSizer(wx.VERTICAL)
self._freq_text_box = forms.text_box(
parent=self.GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
label='Frequncy',
converter=forms.float_converter(),
proportion=0,
)
self._freq_slider = forms.slider(
parent=self.GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
minimum=-50e3,
maximum=50e3,
num_steps=100,
style=wx.SL_HORIZONTAL,
cast=float,
proportion=1,
)
self.Add(_freq_sizer)
self.wxgui_scopesink2_0 = scopesink2.scope_sink_c(
self.GetWin(),
title='Scope Plot',
sample_rate=samp_rate,
v_scale=0,
v_offset=0,
t_scale=0,
ac_couple=False,
xy_mode=False,
num_inputs=1,
trig_mode=wxgui.TRIG_MODE_AUTO,
y_axis_label='Counts',
)
self.Add(self.wxgui_scopesink2_0.win)
self.wxgui_fftsink2_0 = fftsink2.fft_sink_c(
self.GetWin(),
baseband_freq=0,
y_per_div=10,
y_divs=10,
ref_level=0,
ref_scale=2.0,
sample_rate=samp_rate,
fft_size=1024,
fft_rate=15,
average=False,
avg_alpha=None,
title='FFT Plot',
peak_hold=False,
)
self.Add(self.wxgui_fftsink2_0.win)
self.blocks_throttle_0 = blocks.throttle(gr.sizeof_gr_complex*1, samp_rate,True)
self.blocks_add_xx_0 = blocks.add_vcc(1)
self.analog_sig_source_x_0_0 = analog.sig_source_c(samp_rate, analog.GR_SIN_WAVE, 1e3, 1, 0)
self.analog_sig_source_x_0 = analog.sig_source_c(samp_rate, analog.GR_COS_WAVE, freq, 1, 0)
##################################################
# Connections
##################################################
self.connect((self.analog_sig_source_x_0, 0), (self.blocks_add_xx_0, 0))
self.connect((self.analog_sig_source_x_0_0, 0), (self.blocks_add_xx_0, 1))
self.connect((self.blocks_add_xx_0, 0), (self.blocks_throttle_0, 0))
self.connect((self.blocks_throttle_0, 0), (self.wxgui_fftsink2_0, 0))
self.connect((self.blocks_throttle_0, 0), (self.wxgui_scopesink2_0, 0))
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.wxgui_scopesink2_0.set_sample_rate(self.samp_rate)
self.wxgui_fftsink2_0.set_sample_rate(self.samp_rate)
self.blocks_throttle_0.set_sample_rate(self.samp_rate)
self.analog_sig_source_x_0_0.set_sampling_freq(self.samp_rate)
self.analog_sig_source_x_0.set_sampling_freq(self.samp_rate)
def get_freq(self):
return self.freq
def set_freq(self, freq):
self.freq = freq
self._freq_slider.set_value(self.freq)
self._freq_text_box.set_value(self.freq)
self.analog_sig_source_x_0.set_frequency(self.freq)
def main(top_block_cls=top_block, options=None):
tb = top_block_cls()
tb.Start(True)
tb.Wait()
if __name__ == '__main__':
main()
|
import operator
import os
import pyjsonrpc
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
import mongodb_client
PREFERENCE_MODEL_TABLE_NAME = "user_preference_model"
SERVER_HOST = 'localhost'
SERVER_PORT = 5050
def isclose(a, b, rel_tol=1e-09, abs_tol=0.0):
return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
class RequestHandler(pyjsonrpc.HttpRequestHandler):
""" Get user's preference in an ordered class list """
@pyjsonrpc.rpcmethod
def getPreferenceForUser(self, user_id):
db = mongodb_client.get_db()
model = db[PREFERENCE_MODEL_TABLE_NAME].find_one({'userId':user_id})
if model is None:
return []
sorted_tuples = sorted(model['preference'].items(), key=operator.itemgetter(1), reverse=True)
sorted_list = [x[0] for x in sorted_tuples]
sorted_value_list = [x[1] for x in sorted_tuples]
# If the first preference is same as the last one, the preference makes
# no sense.
if isclose(float(sorted_value_list[0]), float(sorted_value_list[-1])):
return []
return sorted_list
# Threading HTTP Server
http_server = pyjsonrpc.ThreadingHttpServer(
server_address = (SERVER_HOST, SERVER_PORT),
RequestHandlerClass = RequestHandler
)
print "Starting HTTP server on %s:%d" % (SERVER_HOST, SERVER_PORT)
http_server.serve_forever()
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
from django.core.exceptions import ValidationError
from django.core.validators import RegexValidator
from dateutil import parser
alphanumeric = RegexValidator(r'^[0-9a-zA-Z_]*$', 'Only alphanumeric characters are allowed.')
# See definition of duration field
# https://prometheus.io/docs/prometheus/latest/configuration/configuration/#configuration-file
duration = RegexValidator(
r"[0-9]+(ms|[smhdwy])",
"Invalid or missing duration suffix. Example: 30s, 5m, 1h ([0-9]+(ms|[smhdwy])",
)
def datetime(value):
try:
parser.parse(value)
except ValueError:
raise ValidationError("Invalid timestamp")
|
# B2083-画矩形
try:
a, b, c, f = (input().split())
except:
a, b, c, f = (17, 92, '1', 0)
a, b = int(a), int(b)
try:
f = int(f)
if f != 0:
f = 1
except:
f = 0
if f == 0:
print(c * b)
for i in range(a - 2):
print(c + ' ' * (b - 2) + c)
print(c*b)
else:
for i in range(a):
print(c * b) |
from neuron.neuron import Neuron
import numpy as np
class NeuronBuilder:
zero_quantization_error = None
def __init__(self, tau_2, growing_metric):
self.__growing_metric = growing_metric
self.__tau_2 = tau_2
def new_neuron(self, weights_map, position):
assert self.zero_quantization_error is not None, "Zero quantization error has not been set yet"
return Neuron(
weights_map,
position,
self.zero_quantization_error,
self.__tau_2,
self.__growing_metric
)
def zero_neuron(self, input_dataset):
input_dimension = input_dataset.shape[1]
zero_neuron = Neuron(
[self.__calc_input_mean(input_dataset).reshape(1, 1, input_dimension)],
(0, 0),
None,
None,
self.__growing_metric
)
zero_neuron.input_dataset = input_dataset
self.zero_quantization_error = zero_neuron.compute_quantization_error()
return zero_neuron
@staticmethod
def __calc_input_mean(input_dataset):
return input_dataset.mean(axis=0)
|
import pandas as pd
import numpy as np
# event_start_with = ['underwater', 'sochi', 'varoufakis',
# 'sandy', 'samurai', 'pigFish',
# 'nepal', 'passport', 'malaysia',
# 'livr', 'garissa', 'elephant',
# 'eclipse', 'columbianChemicals', 'bringback'
# 'boston', 'syrianboy']
# key_word_list = ['Underwater bedroom', 'Sochi Olympics',
# 'Varoufakis and ZDF', 'Hurricane Sandy',
# 'Samurai and Girl', 'pigFish',
# 'Nepal earthquake', 'passport',
# 'Malaysia flight 370', 'livr',
# 'Garissa Attack', 'elephant rock',
# 'Solar Eclipse', 'colombian chemicals',
# 'Bring Back Our Girls', 'Boston Marathon bombing',
# 'syrian boy beach']
# event_start_with = ['eclipse', 'garissa',
# 'nepal', 'samurai',
# 'syrianboy', 'varoufakis']
# # key_word_list = ['Garissa Attack', 'Nepal earthquake',
# # 'Samurai and Girl', 'Solar Eclipse',
# # 'Varoufakis and ZDF', 'syrian boy beach']
# key_word_list = ['Solar Eclipse', 'Garissa Attack',
# 'Nepal earthquake', 'Samurai and Girl',
# 'syrian boy beach', 'Varoufakis and ZDF']
key_word_list = ['Solar Eclipse', 'Garissa Attack',
'Nepal earthquake', 'Samurai and Girl',
'syrian boy beach', 'Varoufakis and ZDF',
'airstrikes','american soldier quran','ankara explosions',
'attacks paris','black lion','boko haram','bowie david',
'brussels car metro','brussels explosions','burst kfc',
'bush book','convoy explosion turkey','donald trump attacker',
'eagle kid','five headed snake','fuji','gandhi dancing',
'half everything','hubble telescope','immigrants','isis children',
'john guevara','McDonalds fee','nazi submarine','north korea',
'not afraid','pakistan explosion','pope francis','protest',
'refugees','rio moon','snowboard girl','soldier stealing',
'syrian children','ukrainian nazi','woman 14 children']
event_start_with = ['eclipse','garissa','nepal','samurai','syrianboy','varoufakis',
'airstrikes','american','ankara',
'attacks','black','boko_haram','bowie_david',
'brussels_car_metro','brussels_explosions','burst_kfc',
'bush_book','convoy_explosion_turkey','donald_trump_attacker',
'eagle_kid','five_headed_snake','fuji_lenticular','gandhi_dancing',
'half_everything','hubble_telescope','immigrants','isis_children',
'john_guevara','mc_donalds_fee','nazi_submarine','north_korea',
'not_afraid','pakistan_explosion','pope_francis','protest',
'refugees','rio_moon','snowboard_girl','soldier_stealing',
'syrian_children','ukrainian_nazi','woman_14_children']
#
# bbc_news = pd.read_csv('./dataset/news_from_bbc_cleaned')
# tweets = pd.read_csv('./dataset/cleaned_twitter_posts.csv', engine='python')
bbc_news = pd.read_pickle('MediaEval_feature_extraction/dataset/ReutersNews.pkl')
tweets = pd.read_pickle('MediaEval_feature_extraction/dataset/test.pkl')
new_tweetId = []
new_tweetText = []
new_imageId = []
new_label = []
new_cleanedPost = []
new_news = []
new_newsKeyword = []
new_newsUrl = []
new_comments = []
for i, key in enumerate(event_start_with):
print(i, " ------>> ", key)
event = tweets['imageId(s)'].str.startswith(key)
indexs = np.where(event)[0]
event_news = bbc_news[bbc_news.key_word == key_word_list[i]]
if (event_news.size == 0):
continue
for index in indexs:
for ni, news in event_news.iterrows():
try:
new_tweetId.append(tweets['tweetId'].iloc[index])
new_tweetText.append(tweets['tweetText'].iloc[index])
new_imageId.append(tweets['imageId(s)'].iloc[index])
new_label.append(tweets['label'].iloc[index])
new_comments.append(tweets['comments'].iloc[index])
# new_cleanedPost.append(tweets['cleaned_post'][index])
new_news.append(news.news)
new_newsKeyword.append(news.key_word)
new_newsUrl.append(news.url)
except:
print(index)
final_dataset = pd.DataFrame(
{
'tweetId' : new_tweetId,
'tweetText' : new_tweetText,
'imageId' : new_imageId,
# 'cleaned_post' : new_cleanedPost,
'bbc_news' : new_news,
'key_word' : new_newsKeyword,
'url' : new_newsUrl,
'label' : new_label,
'comments' : new_comments
})
final_dataset.to_pickle('MediaEval_feature_extraction/dataset/testset_comments_newsReuters.pkl')
|
from .birthday import *
from .checks import *
from .cooldown import *
from .filter import *
from .permissions import *
from .transformers import * |
import unittest
import os
import secrets
import sys
sys.path.append("..")
from wrapmail.gmail.email import Gmail
class GmailTest(unittest.TestCase):
def setUp(self):
self.html_template = "test_html_temp.html"
self.pdf_attachment = "test_pdf_attach.pdf"
self.from_mail = os.environ.get("EMAIL")
self.to_mail = os.environ.get("TARGET")
def test_text_mail(self):
mail = Gmail(self.to_mail, "test_text_mail", "test")
mail.send()
def test_text_mail_with_attachment(self):
mail = Gmail(self.to_mail, "test_text_mail_with_attachment", "test")
mail.add_attachment("documents/test_pdf_attach.pdf")
mail.send()
def test_text_mail_with_multiple_attachments(self):
mail = Gmail(self.to_mail, "test_text_mail_with_multiple_attachments", "test")
mail.add_attachment("documents/test_pdf_attach.pdf")
mail.add_attachment("documents/pdf-test.pdf")
mail.send()
def test_html_mail(self):
mail = Gmail(self.to_mail, "test_html_mail", "test", html="documents/test_html_temp.html")
mail.send()
def test_html_mail_with_attachment(self):
mail = Gmail(self.to_mail, "test_html_mail_with_attachment", "test", html="documents/test_html_temp.html")
mail.add_attachment("documents/test_pdf_attach.pdf")
mail.send()
def test_html_mail_with_multiple_attachments(self):
mail = Gmail(self.to_mail, "test_html_mail_with_multiple_attachments", "test", html="documents/test_html_temp.html")
mail.add_attachment("documents/test_pdf_attach.pdf")
mail.add_attachment( "documents/pdf-test.pdf")
mail.send()
def test_jpg_attachment(self):
mail = Gmail(self.to_mail, "test_jpg_attachment", "test", html="documents/test_html_temp.html")
mail.add_attachment("documents/harita.jpg")
mail.send()
def test_png_attachment(self):
mail = Gmail(self.to_mail, "test_png_attachment", "test", html="documents/test_html_temp.html")
mail.add_attachment("documents/meme.png")
mail.send()
def test_pdf_and_jpg(self):
mail = Gmail(self.to_mail, "test_pdf_and_jpg", "test", html="documents/test_html_temp.html")
mail.add_attachment("documents/harita.jpg")
mail.add_attachment( "documents/pdf-test.pdf")
mail.send() |
import unittest
import numpy as np
from desispec.spectra import Spectra
from desispec.io import empty_fibermap
from desispec.coaddition import coadd,fast_resample_spectra,spectroperf_resample_spectra
from desispec.maskbits import fibermask
class TestCoadd(unittest.TestCase):
def _random_spectra(self, ns=3, nw=10):
wave = np.linspace(5000, 5100, nw)
flux = np.random.uniform(0, 1, size=(ns,nw))
ivar = np.random.uniform(0, 1, size=(ns,nw))
#mask = np.zeros((ns,nw),dtype=int)
mask = None
rdat = np.ones((ns,3,nw))
rdat[:,0] *= 0.25
rdat[:,1] *= 0.5
rdat[:,2] *= 0.25
fmap = empty_fibermap(ns)
fmap["TARGETID"][:]=12 # same target
return Spectra(bands=["x"],wave={"x":wave},flux={"x":flux},ivar={"x":ivar}, mask=None, resolution_data={"x":rdat} , fibermap=fmap)
def test_coadd(self):
"""Test coaddition"""
s1 = self._random_spectra(3,10)
coadd(s1)
def test_spectroperf_resample(self):
"""Test spectroperf_resample"""
s1 = self._random_spectra(1,20)
wave = np.linspace(5000, 5100, 10)
s2 = spectroperf_resample_spectra(s1,wave=wave)
def test_fast_resample(self):
"""Test fast_resample"""
s1 = self._random_spectra(1,20)
wave = np.linspace(5000, 5100, 10)
s2 = fast_resample_spectra(s1,wave=wave)
def test_fiberstatus(self):
"""Test that FIBERSTATUS=0 isn't included in coadd"""
def _makespec(nspec, nwave):
s1 = self._random_spectra(nspec, nwave)
s1.flux['x'][:,:] = 1.0
s1.ivar['x'][:,:] = 1.0
return s1
#- Nothing masked
nspec, nwave = 4,10
s1 = _makespec(nspec, nwave)
self.assertEqual(len(s1.fibermap), nspec)
coadd(s1)
self.assertEqual(len(s1.fibermap), 1)
self.assertEqual(s1.fibermap['COADD_NUMEXP'][0], nspec)
self.assertEqual(s1.fibermap['FIBERSTATUS'][0], 0)
self.assertTrue(np.all(s1.flux['x'] == 1.0))
self.assertTrue(np.allclose(s1.ivar['x'], 1.0*nspec))
#- Two spectra masked
nspec, nwave = 5,10
s1 = _makespec(nspec, nwave)
self.assertEqual(len(s1.fibermap), nspec)
s1.fibermap['FIBERSTATUS'][0] = fibermask.BROKENFIBER
s1.fibermap['FIBERSTATUS'][1] = fibermask.BADFIBER
coadd(s1)
self.assertEqual(len(s1.fibermap), 1)
self.assertEqual(s1.fibermap['COADD_NUMEXP'][0], nspec-2)
self.assertEqual(s1.fibermap['FIBERSTATUS'][0], 0)
self.assertTrue(np.all(s1.flux['x'] == 1.0))
self.assertTrue(np.allclose(s1.ivar['x'], 1.0*(nspec-2)))
#- All spectra masked
nspec, nwave = 5,10
s1 = _makespec(nspec, nwave)
self.assertEqual(len(s1.fibermap), nspec)
s1.fibermap['FIBERSTATUS'] = fibermask.BROKENFIBER
coadd(s1)
self.assertEqual(len(s1.fibermap), 1)
self.assertEqual(s1.fibermap['COADD_NUMEXP'][0], 0)
self.assertEqual(s1.fibermap['FIBERSTATUS'][0], fibermask.BROKENFIBER)
self.assertTrue(np.all(s1.flux['x'] == 0.0))
self.assertTrue(np.all(s1.ivar['x'] == 0.0))
if __name__ == '__main__':
unittest.main()
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
from PIL import Image
from pytorch_lightning.utilities import _module_available
import flash
from flash.vision import ObjectDetector
from flash.vision.detection import ObjectDetectionData
from tests.vision.detection.test_data import _create_synth_coco_dataset
_COCO_AVAILABLE = _module_available("pycocotools")
@pytest.mark.skipif(not _COCO_AVAILABLE, reason="pycocotools is not installed for testing")
@pytest.mark.parametrize(["model", "backbone"], [("fasterrcnn", None), ("retinanet", "resnet34"),
("fasterrcnn", "mobilenet_v2"), ("retinanet", "simclr-imagenet")])
def test_detection(tmpdir, model, backbone):
train_folder, coco_ann_path = _create_synth_coco_dataset(tmpdir)
data = ObjectDetectionData.from_coco(train_folder=train_folder, train_ann_file=coco_ann_path, batch_size=1)
model = ObjectDetector(model=model, backbone=backbone, num_classes=data.num_classes)
trainer = flash.Trainer(fast_dev_run=True)
trainer.finetune(model, data)
test_image_one = os.fspath(tmpdir / "test_one.png")
test_image_two = os.fspath(tmpdir / "test_two.png")
Image.new('RGB', (1920, 1080)).save(test_image_one)
Image.new('RGB', (1920, 1080)).save(test_image_two)
test_images = [test_image_one, test_image_two]
model.predict(test_images)
|
# Coffee machine options : latte (200 mL water + 2 g coffee + 15 ml milk), espresso (50 ml water + 18 g coffee), cappuccino (250 ml water + 24 g coffee + 100 ml milk)
# Different prices (espresso = 1.50, latte = 2.50, cappuccino = 3.00)
# hardware : Water inlet, coin slot, ADD contactless, coin acceptor, LCD display, drinks 1-2-3, +-, Menu, drink outlet, waste water box
coffee_cup = """
.
`:.
`:.
.:' ,::
.:' ;:'
:: ;:'
: .:'
`. :.
_________________________
: _ _ _ _ _ _ _ _ _ _ _ _ :
,---:".".".".".".".".".".".".":
: ,'"`::.:.:.:.:.:.:.:.:.:.:.::'
`.`. `:-===-===-===-===-===-:'
`.`-._: :
`-.__`. ,'
,--------`"`-------------'--------.
`"--.__ __.--"'
`""-------------""'
"""
MENU = {
"espresso": {
"ingredients": {
"water": 50,
"coffee": 18,
},
"cost": 1.5,
},
"latte": {
"ingredients": {
"water": 200,
"milk": 150,
"coffee": 24,
},
"cost": 2.5,
},
"cappuccino": {
"ingredients": {
"water": 250,
"milk": 100,
"coffee": 24,
},
"cost": 3.0,
}
}
resources = {
"water": 300,
"milk": 200,
"coffee": 100,
}
coins= [
{
'name': 'Penny',
'value': 0.01
},
{
'name': 'Nickel',
'value': 0.05
},
{
'name': 'Dime',
'value': 0.1
},
{
'name': 'Quarter',
'value': 0.25
}]
def can_make_coffee(order_ingredients):
for item in order_ingredients:
if order_ingredients[item] >= resources[item]:
print(f"Not enough {item}.")
return False
return True
def make_coffee(drink_name, order_ingredients):
for item in order_ingredients:
resources[item] -= order_ingredients[item]
profit = 0
print("Welcome to Viper Coffee Co.")
is_on = True
while is_on:
choice = input("What would you like to order?")
if choice == "off":
is_on = False
print("Now turning off.")
elif choice == "report":
print(f"Water: {resources['water']} mL.")
print(f"Milk: {resources['milk']} mL.")
print(f"Coffee: {resources['coffee']} g.")
print(f"Money: ${profit}.")
else:
drink = MENU[choice]
if can_make_coffee(drink["ingredients"]):
print(f"You have ordered a {choice}. That will be {drink['cost']}$.")
pennies = 0
nickels = 0
dimes = 0
quarters = 0
inserted = 0
price_met = False
while not price_met:
insert = input("What would you like to insert in the coin slot? [penny/nickel/dime/quarter]\n").title()
if insert == "Penny".title():
pen_amount = input("How many pennies? ")
pennies += int(pen_amount)
elif insert == "Nickel".title():
nic_amount = input("How many nickels? ")
nickels += int(nic_amount)
elif insert == "Dime".title():
dim_amount = input("How many dimes? ")
dimes += int(dim_amount)
elif insert == "Quarter".title():
quar_amount = input("How many quarters? ")
quarters += int(quar_amount)
inserted = (float(pennies) * float(coins[0]['value'])) + (float(nickels) * float(coins[1]['value'])) + (float(dimes) * float(coins[2]['value'])) + (float(quarters) * float(coins[3]['value']))
print(f"${round(inserted,2)}")
if float(inserted) >= float(drink['cost']):
price_met = True
make_coffee(choice, drink['ingredients'])
if not can_make_coffee(drink["ingredients"]):
refill = input("Type 'refill' to refill.\n").lower()
if refill == "refill".lower():
print("Refilling.")
resources["water"] = 300
resources["coffee"] = 100
resources["milk"] = 200
profit += drink["cost"]
print(f"One {choice} coming right up. Please wait.")
print(coffee_cup)
change = float(inserted) - float(drink['cost'])
if float(change) > 0:
print(f"Clink. Don't forget your change: ${round(change,2)}")
|
class LibraError(Exception):
pass
class AccountError(LibraError):
pass
class TransactionError(LibraError):
@property
def error_code(self):
code, _ = self.args
return code
@property
def error_msg(self):
_, msg = self.args
return msg
class AdmissionControlError(TransactionError):
pass
class VMError(TransactionError):
pass
class MempoolError(TransactionError):
pass
class TransactionTimeoutError(LibraError):
pass
class LibraNetError(LibraError):
pass
|
import databases
import sqlalchemy
from fastapi import FastAPI
from fastapi_users import models
from fastapi_users.db import OrmarBaseUserModel, OrmarUserDatabase
class User(models.BaseUser):
pass
class UserCreate(models.BaseUserCreate):
pass
class UserUpdate(User, models.BaseUserUpdate):
pass
class UserDB(User, models.BaseUserDB):
pass
DATABASE_URL = "sqlite:///test.db"
metadata = sqlalchemy.MetaData()
database = databases.Database(DATABASE_URL)
class UserModel(OrmarBaseUserModel):
class Meta:
tablename = "users"
metadata = metadata
database = database
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.create_all(engine)
user_db = OrmarUserDatabase(UserDB, UserModel)
app = FastAPI()
app.state.database = database
@app.on_event("startup")
async def startup() -> None:
database_ = app.state.database
if not database_.is_connected:
await database_.connect()
@app.on_event("shutdown")
async def shutdown() -> None:
database_ = app.state.database
if database_.is_connected:
await database_.disconnect()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Базовый пример."""
from balaboba import balaboba
response = balaboba("Привет")
print(response)
|
def assign_ROSCO_values(wt_opt, modeling_options, control):
# ROSCO tuning parameters
wt_opt['tune_rosco_ivc.PC_omega'] = control['pitch']['PC_omega']
wt_opt['tune_rosco_ivc.PC_zeta'] = control['pitch']['PC_zeta']
wt_opt['tune_rosco_ivc.VS_omega'] = control['torque']['VS_omega']
wt_opt['tune_rosco_ivc.VS_zeta'] = control['torque']['VS_zeta']
if modeling_options['Level3']['ROSCO']['Flp_Mode'] > 0:
wt_opt['tune_rosco_ivc.Flp_omega'] = control['dac']['Flp_omega']
wt_opt['tune_rosco_ivc.Flp_zeta'] = control['dac']['Flp_zeta']
if 'IPC' in control.keys():
wt_opt['tune_rosco_ivc.IPC_KI'] = control['IPC']['IPC_gain_1P']
# # other optional parameters
wt_opt['tune_rosco_ivc.max_pitch'] = control['pitch']['max_pitch']
wt_opt['tune_rosco_ivc.min_pitch'] = control['pitch']['min_pitch']
wt_opt['tune_rosco_ivc.vs_minspd'] = control['torque']['VS_minspd']
wt_opt['tune_rosco_ivc.ss_vsgain'] = control['setpoint_smooth']['ss_vsgain']
wt_opt['tune_rosco_ivc.ss_pcgain'] = control['setpoint_smooth']['ss_pcgain']
wt_opt['tune_rosco_ivc.ps_percent'] = control['pitch']['ps_percent']
# Check for proper Flp_Mode, print warning
if modeling_options['WISDEM']['RotorSE']['n_tab'] > 1 and modeling_options['Level3']['ROSCO']['Flp_Mode'] == 0:
raise Exception('A distributed aerodynamic control device is specified in the geometry yaml, but Flp_Mode is zero in the modeling options.')
if modeling_options['WISDEM']['RotorSE']['n_tab'] == 1 and modeling_options['Level3']['ROSCO']['Flp_Mode'] > 0:
raise Exception('Flp_Mode is non zero in the modeling options, but no distributed aerodynamic control device is specified in the geometry yaml.')
return wt_opt
|
import pandas as pd
import numpy as np
import pytest
from column_completer import ColumnCompleter
X = np.random.randint(0, 100, (8, 3))
def test_name_collision_value_error_1():
df = pd.DataFrame(X, columns=["Col A", "Col_A", "Col B"])
with pytest.raises(ValueError) as err:
q = ColumnCompleter(df)
assert "spaces causes a collision of column names" in str(err.value)
def test_attribute_space_replaced_1():
df = pd.DataFrame(X, columns=["Col A", "col B", "Col C"])
q = ColumnCompleter(df)
assert all([col.startswith('Col_')
for col in vars(q) if col.startswith('Col')])
def test_attribute_space_replaced_2():
df = pd.DataFrame(X, columns=["Col A", "col B", "Col C"])
space_filler = '___'
q = ColumnCompleter(df, space_filler=space_filler)
assert all([col.startswith('Col' + space_filler)
for col in vars(q) if col.startswith('Col')])
def test_warn_spaces_at_edges_of_column_names_1():
df = pd.DataFrame(X, columns=["Col A ", "Col B", "Col C"])
with pytest.raises(Warning) as warn:
q = ColumnCompleter(df)
assert "The following columns ends with one or more spaces:" in str(
warn.value)
def test_warn_spaces_at_edges_of_column_names_2():
df = pd.DataFrame(X, columns=["Col A", " Col B", "Col C"])
with pytest.raises(Warning) as warn:
q = ColumnCompleter(df)
assert "The following columns starts with one or more spaces:" in str(
warn.value)
def test_rename_columns_1():
df_org = pd.DataFrame(X, columns=["col a", "col b", "col c"])
df_new = ColumnCompleter.replace_df_column_spaces(df_org, '_')
assert df_new.columns.tolist() == ["col_a", "col_b", "col_c"]
def test_rename_columns_2():
df_org = pd.DataFrame(X, columns=["col a", "col b", "col c"])
df_new = ColumnCompleter.replace_df_column_spaces(
df_org, '_', capatilize_first_letter=True)
assert df_new.columns.tolist() == ["Col_a", "Col_b", "Col_c"]
def test_rename_columns_3():
df_org = pd.DataFrame(X, columns=["col a", "col_a", "col c"])
with pytest.raises(ValueError) as err:
df_new = ColumnCompleter.replace_df_column_spaces(df_org, '_')
assert "Renaming the columns in such a way would cause a" in str(err.value)
def test_df_with_numeric_column_names():
df = pd.DataFrame(X)
q = ColumnCompleter(df) # no error should be raised
|
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
import importlib
import registration_shared
from datetime import datetime
from random import randrange
from registration_shared import client_registry_table
from registration_shared import iot_client
from OpenSSL.crypto import load_certificate, FILETYPE_PEM
def _create_keys_and_certificate():
keys_and_cert = iot_client.create_keys_and_certificate(setAsActive=True)
return keys_and_cert
def _create_thing(cognitoId, thingName):
print 'Attempting to create thing {}'.format(thingName)
thingResponse = iot_client.create_thing(thingName=thingName)
_set_device_info(cognitoId, thingResponse['thingName'], thingResponse['thingArn'])
print 'Created thing Name {} arn {}'.format(thingResponse['thingName'], thingResponse['thingArn'])
return thingResponse
def _set_device_info(cognitoId, thingName, thingArn):
print 'Setting thing for user {} to {} arn {}'.format(cognitoId, thingName, thingArn)
attribute_updates = ''
expression_attribute_values = {}
if thingName is not None:
attribute_updates += ' ThingName = :thingName'
expression_attribute_values[':thingName'] = thingName
if len(attribute_updates):
attribute_updates += ', '
attribute_updates += ' ThingArn = :thingArn'
expression_attribute_values[':thingArn'] = thingArn
update_expression = '{} {}'.format('SET',attribute_updates)
try:
response_info = client_registry_table.update_item(Key={ 'ClientID': cognitoId},UpdateExpression=update_expression,ExpressionAttributeValues=expression_attribute_values,ReturnValues='ALL_NEW')
except Exception as e:
raise RuntimeError('Could not update thing for {} {}'.format(cognitoId, e.response['Error']['Message']))
item_data = response_info.get('Attributes', None)
print 'Registration table device info update returned {}'.format(item_data)
def register_openssl(request):
clientId = request.event.get('clientId')
if clientId:
print 'Attempting openssl registration for id {}'.format(clientId)
client_info = registration_shared.get_user_entry(clientId)
else:
print 'Attempting openssl registration for new user'
client_info = {}
registration_status = client_info.get('RegistrationStatus')
print 'OpenSSL User status for {} returns {}'.format(clientId, registration_status)
responseObject = {}
if registration_status == 'BANNED':
responseObject['Result'] = 'DENIED'
return responseObject
if registration_status == 'UNKNOWN':
print 'Re-registering user with unknown status {}'.format(clientId)
keys_and_cert = _create_keys_and_certificate()
certificate_info = load_certificate(FILETYPE_PEM, keys_and_cert['certificatePem'])
certificate_sn = '{}'.format(certificate_info.get_serial_number())
print 'Got certificate SN {}'.format(certificate_sn)
if registration_status in ['NEW_USER', None]:
registration_shared.create_user_entry(certificate_sn, 'REGISTERED', False, keys_and_cert['certificateArn'])
print 'Attaching principal {} to policy {}'.format(keys_and_cert['certificateArn'], registration_shared.device_policy)
registration_shared.check_add_policy(keys_and_cert['certificateArn'], registration_shared.device_policy)
responseObject['Result'] = 'SUCCESS'
responseObject['ConnectionType'] = 'OPENSSL'
responseObject['PrivateKey'] = keys_and_cert['keyPair']['PrivateKey']
responseObject['DeviceCert'] = keys_and_cert['certificatePem']
return responseObject
|
from django.views.generic import CreateView, UpdateView, ListView
from .forms import MultipleChoiceTestWithAnswersForm
from .models import *
from django.utils.decorators import method_decorator
from JustTesting.utils.permission_decorators import user_permissions_decorator
@method_decorator(user_permissions_decorator, name="dispatch")
class TaskListView(ListView):
model = TaskList
template_name = "Task/task_lists.html"
context_object_name = "task_lists"
@method_decorator(user_permissions_decorator, name="dispatch")
class MultipleChoiceTestsOfTaskLisk(ListView):
template_name = "Task/multiple_choice_test_of_task_list.html"
context_object_name = "task_list"
def get_queryset(self):
return MultipleChoiceTest.objects.filter(task_list_id=self.kwargs["task_list_pk"])
@method_decorator(user_permissions_decorator, name="dispatch")
class MultipleChoiceTestCreateView(CreateView):
form_class = MultipleChoiceTestWithAnswersForm
template_name = "Task/multiple_choice_test_create.html"
def get_success_url(self):
return "create"
@method_decorator(user_permissions_decorator, name="dispatch")
class MultipleChoiceTestUpdateView(UpdateView):
form_class = MultipleChoiceTestWithAnswersForm
model = MultipleChoiceTest
template_name = "Task/multiple_choice_test_update.html"
def get_success_url(self):
return f"update={self.kwargs['pk']}"
|
# -*- coding: utf-8 -*-
from ....Classes.Circle import Circle
from ....Classes.Arc1 import Arc1
from ....Classes.Segment import Segment
from ....Classes.SurfLine import SurfLine
from numpy import exp, pi
from ....Functions.labels import (
SHAFT_LAB,
NO_LAM_LAB,
BOUNDARY_PROP_LAB,
SHAFTSR_LAB,
SHAFTSL_LAB,
SHAFTR_LAB,
)
def build_geometry(self, sym=1, alpha=0, delta=0):
"""Build the geometry of the shaft
Parameters
----------
self : Shaft
Shaft Object
sym : int
Symmetry factor (1= full machine, 2= half of the machine...)
alpha : float
Angle for rotation [rad]
delta : complex
Complex value for translation
Returns
-------
surf_list : list
list of surfaces needed to draw the lamination
"""
surf_list = list()
if sym == 1:
surf_list.append(
Circle(
radius=self.Drsh / 2,
label=NO_LAM_LAB + "_" + SHAFT_LAB,
center=0,
point_ref=0,
)
)
else:
begin = self.Drsh / 2
end = begin * exp(1j * 2 * pi / sym)
surface = SurfLine(
line_list=[
Segment(0, begin, prop_dict={BOUNDARY_PROP_LAB: SHAFTSR_LAB}),
Arc1(
begin, end, self.Drsh / 2, prop_dict={BOUNDARY_PROP_LAB: SHAFTR_LAB}
),
Segment(end, 0, prop_dict={BOUNDARY_PROP_LAB: SHAFTSL_LAB}),
],
label=NO_LAM_LAB + "_" + SHAFT_LAB,
point_ref=0,
)
surf_list.append(surface)
for surf in surf_list:
surf.rotate(alpha)
surf.translate(delta)
return surf_list
|
# -*- coding: utf-8 -*-
# @Author: liangou
# @Date: 2021-02-25 12:39:08
# @Last Modified by: liangou
# @Last Modified time: 2021-04-18 16:32:48
# -*- coding: utf-8 -*-
# @Author: liangou
# @Date: 2021-01-25 22:08:08
# @Last Modified by: liangou
# @Last Modified time: 2021-02-22 20:53:50
import pandas as pd
import math
import numpy as np
from sklearn.utils import shuffle
import multiprocessing as mp
from multiprocessing import Process,Queue
import sys,os
from concurrent.futures import ThreadPoolExecutor
from tombo.tombo_helper import intervalData,get_raw_signal
from tombo import tombo_helper, tombo_stats, resquiggle
from tombo.tombo_helper import Fasta
from tombo.tombo_helper import TomboMotif
import torch.nn.functional as F
from joblib import Parallel, delayed
import textwrap
import argparse
from gettext import gettext
import pdb,re
from tqdm.auto import tqdm
import pysam
import time
from functools import partial
import logging
import h5py
try:
import bripy
from bripy import *
except :
sys.stderr.write("Warning: The BRI module could not be loaded")
import random
import threading
def get_raw_read_slot(fast5_data):
raw_read_slot = next(iter(fast5_data['/Raw/Reads'].values()))
return raw_read_slot
def find_event(mid_path,name):
if "Events" in name:
return mid_path+"/"+name
def get_single_slot_genome_centric(r_data, *slot_name):
r_slot_values=[]
mid_path='/'.join(('/Analyses', r_data.corr_group))
find_e=partial(find_event, mid_path)
h5=h5py.File(r_data.fn, 'r')
mid_path=h5[mid_path].visit(find_e)
for sigle_slot in slot_name:
r_slot_values.append(h5[mid_path][sigle_slot])
h5.close()
return r_slot_values
'''
Multiple processes + coroutines was Used to optimize program execution speed
py ./tair_test.py predict --fast5 001"
363 python ./tair_test.py predict --fast5 "/home/shihan/qinh_NCBI/elife_NDRS/col0_nanopore_drs/elif_col0_guppy324_bc/col0_guppy324_allpassf5_tombo/col0_all"
--bam "/home/shihan/qinh_NCBI/elife_NDRS/col0_nanopore_drs/elif_col0_guppy324_bc_fq/trans_align/col0_drs_all_guppy324_bc_cdna.bam" --label VIRc
'''
def polish_signal(signal_list,padding_num=0,length=256):
if len(signal_list)==length:
return signal_list
elif len(signal_list) < length:
signal_list.extend([padding_num]*abs(len(signal_list)-length))
return signal_list
else:
return signal_list[int((len(signal_list)//2)-(length/2)):int((len(signal_list)//2)+(length/2))]
def get_pos(args):
'''
'''
fasta=Fasta(args.fasta)
motif=TomboMotif(args.motif)
#Cycle each chromosome/transcript
with open(args.output,'w') as out_hl:
for chrm in fasta.iter_chrms():
chrm_seq=fasta.get_seq(chrm)
for hit in motif.motif_pat.finditer(chrm_seq):
out_hl.write("\t".join([chrm,str(hit.start()),str(hit.end()),'+',hit.group()])+"\n")
def ont_hot(base_list):
'''
encode base symbol with one-hot
'''
dict={"A":[1,0,0,0],"T":[0,1,0,0],"C":[0,0,1,0],"G":[0,0,0,1]}
return np.array([dict[base.decode('UTF-8')] for base in base_list]).reshape(4,-1)
class ColoredArgParser(argparse.ArgumentParser):
# color_dict is a class attribute, here we avoid compatibility
# issues by attempting to override the __init__ method
# RED : Error, GREEN : Okay, YELLOW : Warning, Blue: Help/Info
color_dict = {'RED' : '1;31', 'GREEN' : '1;32',
'YELLOW' : '1;33', 'BLUE' : '1;36'}
def print_usage(self, file = None):
if file is None:
file = sys.stdout
self._print_message(self.format_usage()[0].upper() +
self.format_usage()[1:],
file, self.color_dict['YELLOW'])
def print_help(self, file = None):
if file is None:
file = sys.stdout
self._print_message(self.format_help()[0].upper() +
self.format_help()[1:],
file, self.color_dict['BLUE'])
def _print_message(self, message, file = None, color = None):
if message:
if file is None:
file = sys.stderr
# Print messages in bold, colored text if color is given.
if color is None:
file.write(message)
else:
# \x1b[ is the ANSI Control Sequence Introducer (CSI)
file.write('\x1b[' + color + 'm' + message.strip() + '\x1b[0m\n')
def exit(self, status = 0, message = None):
if message:
self._print_message(message, sys.stderr, self.color_dict['RED'])
sys.exit(status)
def error(self, message):
self.print_usage(sys.stderr)
args = {'prog' : self.prog, 'message': message}
self.exit(2, gettext('%(prog)s: Error: %(message)s\n') % args)
def extract_features(read,start,end,bamfile,wins,debug=False,bri=None,header=None):
sequ_fea=[x[start - read.start:end - read.start] for x in get_single_slot_genome_centric(read,*['length','norm_mean','norm_stdev'])]
r_sig,seg,*_,scale =get_raw_signal(read,start,end)
seg=seg[1:]-seg[0]
r_sig=(r_sig-scale.shift)/scale.scale
try:
if isinstance(bamfile,str):
sam_data = bri.get_alignments(read.read_id)
read_bam=pysam.AlignedSegment.fromstring(sam_data,header)
else:
read_bam=bamfile[read.read_id]
mapping_relation=pd.DataFrame(read_bam.get_aligned_pairs(matches_only=False),dtype=pd.Int64Dtype())
m6a_pos_inread=mapping_relation[(mapping_relation[1]==start+wins[0])].index.tolist()[0]
selected_base=mapping_relation.iloc[m6a_pos_inread-wins[0]:m6a_pos_inread+wins[1]+1,0].tolist()
read_baseQ=[read_bam.query_qualities[x] if isinstance(x,np.int64) else 0 for x in selected_base]
assert len(read_baseQ)==(end-start),"illegal length base-quality!"
sequ_fea.insert(2,[np.median(x) for x in np.split(r_sig,seg)[:-1]])
sequ_fea.insert(0,read_baseQ)
except Exception as e:
if debug:
sys.stderr.write(str(e)+'\n')
return None
return read.read_id,",".join([str(x) for x in np.vstack(sequ_fea).flatten()])
def not_empty(s):
return s
def get_logger():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
rq = time.strftime('%Y%m%d%H%M', time.localtime(time.time()))
log_path = os.path.dirname(os.getcwd()) + '/Logs/'
if not os.path.exists(log_path):
os.mkdir(log_path)
log_name = log_path + rq + '.log'
logfile = log_name
fh = logging.FileHandler(logfile, mode='w')
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
fh.setFormatter(formatter)
logger.addHandler(fh)
return logger
#c7d8d026-1266-4e90-9537-42b82abd89e6
def predict_worker(region,args,p_num):
reads_index = tombo_helper.TomboReads([args.fast5,],corrected_group=args.corr_grp)
#If BRI-MODE, pass the file path, else write it into memory
if not args.bri:
bamfile={read.query_name:read for read in pysam.AlignmentFile(args.bam,"rb")}
bri,header=None,None
else:
bamfile=args.bam
bri = BamReadIndex(bamfile)
header=pysam.AlignmentFile(bamfile, "rb").header
tmp_fl=open("{0}_tmp".format(p_num),mode="w")
for site in tqdm(list(region.itertuples()),position=p_num+1,desc="_".join(["processes",str(p_num)])):
start,end=site.start,site.end
reg_data = intervalData(chrm=site.ref,start=start,end=end,strand='+').add_reads(reads_index,require_full_span=True)
reg_data.add_seq()
reg_seq=reg_data.seq
reg_data = intervalData(chrm=site.ref,start=start+(2-args.windows[0]),end=end+(args.windows[1]-2),strand='+')
reg_data.add_reads(reads_index,require_full_span=True)
if(len(reg_data.reads))<1:
continue
if args.debug:
sys.stderr.write("{}\t{}found {:,} reads in fast5\n".format(site.ref,str(site.start)+"-"+str(site.end),len(reg_data.reads)))
if(len(reg_data.reads))>10000:
reg_data.reads=random.sample(reg_data.reads, 10000)
site_fea=[]
for read in reg_data.reads:
site_fea.append(extract_features(read,start+(2-args.windows[0]),end+(args.windows[1]-2),bamfile,args.windows,args.debug,bri,header))
site_fea=list(filter(not_empty,site_fea))
if args.debug:
sys.stderr.write("{}\t{}found {:,} reads in bamfile\n".format(site.ref,str(site.start)+"-"+str(site.end),len(site_fea)))
if len(site_fea) >0:
tmp_fl.write(">{}_{}_{}\n".format(site.ref,str(start+3),reg_seq))
pd.DataFrame(site_fea).to_csv(tmp_fl,sep="\t",mode="a",header=0,index=0)
tmp_fl.flush()
tmp_fl.close()
# q.put((site._asdict(),site_fea))
def predict(args):
'''
fast5:a directory(has been re-squiggled by tombo) that contains the FAST5 files
label:The label of the dataset You must choose one from 'a', 'm6a' and 'unknown"
bam:BAM file used to extract base-quality(feature)
sites:candidate position are used to extract features of mapped reads
'''
def split_df(df,N):
'''
Optimize multi-core usage
Divide a df evenly into n pieces
return a generator
'''
assert N >=1 and isinstance(N,int),"you should input a positive interge!"
df = shuffle(df.values)
return (i for i in np.array_split(site_df, N))
if args.bri:
status=os.system(" bri index "+args.bam)
if status==0 :
print("Bri index success")
else:
args.bri=False
print("Bri index fail,switch to normal mode")
if args.debug:
mylog=get_logger()
site_df = pd.read_csv(args.sites,sep="\t",header=None,names=["ref", "start", "end", "strand","motif"])
site_df = site_df[site_df['strand']=="+"] #We consider only motifs located in the plus-strand of the transcriptome
if args.debug:
mylog.debug('found {0} candidate sites'.format(site_df.shape[0]))
region_batchs=split_df(site_df,args.processes)
pool = mp.Pool(processes=args.processes, initargs=(mp.RLock(),), initializer=tqdm.set_lock)
jobs =[]
for p_num,region in enumerate(region_batchs):
jobs.append(pool.apply_async(predict_worker, (region,args,p_num,)))
pool.close()
pool.join()
for j in jobs:
print(j.get())
print("all tasks has done!")
def argument_parsing():
description = textwrap.dedent("""
=====================================================================================================
using Neural network model to predict the RNA m6a modification status with single reads resolution
We strongly recommend that you execute the following sub-commands in order:
get_pos get candidate position from the reference fasta of the species
predict dataset is feed into the model and obtain the predicted results
See 'python ./main.py {sub-command} -h' to read about a options details.
author:https://github.com/weir12
=====================================================================================================
""")
parser = ColoredArgParser(
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers(title="Sub-command",dest='command')
parser_a = subparsers.add_parser('get_pos',formatter_class=argparse.RawDescriptionHelpFormatter,help='get candidate position')
parser_a.add_argument('--fasta',required=True, default=None,
help=("reference fasta"))
parser_a.add_argument('--motif', default='RRACH',
help=("specifies a motif pattern"))
parser_a.add_argument('--output', default='./candidate_predict_pos.txt',
help=("output file"))
parser_a.set_defaults(func=get_pos)
parser_b = subparsers.add_parser('predict',formatter_class=argparse.RawDescriptionHelpFormatter,help='predict')
parser_b.add_argument('--fast5',required=True, default=None,
help=("a directory(has been re-squiggled by tombo) that contains the FAST5 files"))
parser_b.add_argument('--corr_grp',default="RawGenomeCorrected_000",
help=("Analysis slot containing the re-squiggle information"))
parser_b.add_argument('--bam',required=True, default=None,
help=("BAM file used to extract base-quality(feature)"))
parser_b.add_argument('--sites',default='./candidate_predict_pos.txt',
help=("candidate position are used to extract features of mapped reads"))
parser_b.add_argument('--label',required=True,
help=("The string used to distinguish the sample"))
parser_b.add_argument('--windows',required=True,nargs=2,metavar='3',type=int,
help=("Window drift away from the center of m6A"))
parser_b.add_argument('--debug',action='store_true',default=False,
help=("Enable debug mode (output more detailed run log)"))
parser_b.add_argument('--bri',action='store_true',default=False,
help=("Enable BRI mode (Reduce RAM consumption of BAM files)"))
parser_b.add_argument('--processes',default=8,type=int,
help=("Number of processes allocated"))
parser_b.set_defaults(func=predict)
args = parser.parse_args()
args.func(args)
if __name__ == '__main__':
argument_parsing()
|
import numpy as num
from random import randrange
from scipy.sparse.linalg import gmres
import matplotlib.pyplot as plt
import math
import datetime
# gmmres with givens rotation
#givens rotation
def gmres_algorithm_givens (A , b , x0 , error , max_iter ):
res = b - num.asarray(num.dot(A,x0)).reshape(-1) # residual error
x_pred = []
q_ = [0] * max_iter
x_pred.append(res)
q_[0] = res / num.linalg.norm(res)
h_ = num.zeros((max_iter + 1, max_iter))
sn = num.zeros((max_iter , 1))
cs = num.zeros((max_iter , 1))
b_ = num.zeros(max_iter + 1)
b_[0] = num.linalg.norm(res)
global cpu_mul
cpu_mul = 0
for k in range(min(max_iter , A.shape[0])) :
time_s1 = datetime.datetime.now()
y_out = num.asarray(num.dot(A,q_[k])).reshape(-1)
time_end1 = datetime.datetime.now()
cpu_mul += (time_end1 - time_s1).total_seconds()
for j in range(k+1) :
h_[j , k] = num.dot(q_[j],y_out)
y_out = y_out - h_[j , k] * q_[j]
h_[k+1 , k] = num.linalg.norm(y_out)
if (h_[k + 1, k] != 0 and k != max_iter - 1):
q_[k+1] = y_out / h_[k+1 , k]
for i in range(k):
temp = cs[i] * h_[i , k] + sn[i] * h_[i+1 , k]
h_[i+1 ,k] = -1*sn[i] * h_[i , k] + cs[i] * h_[i+1 , k]
h_[i , k] = temp
t = math.sqrt(h_[k , k]**2 + h_[k+1 , k]**2)
cs[k] = (h_[k , k]) /t
sn[k] = (h_[k+1 , k]) /t
h_[k , k] = cs[k] * h_[k , k] + sn[k] * h_[k + 1 , k]
h_[k + 1 , k] = 0
b_[k + 1] = -1 * sn[k] * b_[k]
b_[k] = cs[k] * b_[k]
#print(h_)
#print(b_)
c_ = num.linalg.lstsq(h_ , b_)[0]
prod_ = num.asarray(num.dot(num.asarray(q_).transpose() , c_))
if (k == max_iter - 1) :
print('q_ ' + str(num.asarray(q_).shape) + ' c_shape = ' + str(c_.shape) + ' prod_ = ' + str(prod_.shape))
#print(prod_)
x_pred.append(prod_ + x0)
x_temp_ = (num.linalg.norm(b - num.dot(A ,(prod_ + x0)).reshape(-1)) / num.linalg.norm(b))
g_1.append(math.log10(x_temp_))
print(x_temp_)
if (x_temp_ < error) :
print("exit val : ", k)
break
return x_pred
|
import os
from rasa.core.test import _generate_trackers, collect_story_predictions, test
# we need this import to ignore the warning...
# noinspection PyUnresolvedReferences
from rasa.nlu.test import run_evaluation
from tests.core.conftest import (
DEFAULT_STORIES_FILE,
E2E_STORY_FILE_UNKNOWN_ENTITY,
END_TO_END_STORY_FILE,
)
async def test_evaluation_image_creation(tmpdir, default_agent):
stories_path = os.path.join(tmpdir.strpath, "failed_stories.md")
img_path = os.path.join(tmpdir.strpath, "story_confmat.pdf")
await test(
stories=DEFAULT_STORIES_FILE,
agent=default_agent,
out_directory=tmpdir.strpath,
max_stories=None,
e2e=False,
)
assert os.path.isfile(img_path)
assert os.path.isfile(stories_path)
async def test_action_evaluation_script(tmpdir, default_agent):
completed_trackers = await _generate_trackers(
DEFAULT_STORIES_FILE, default_agent, use_e2e=False
)
story_evaluation, num_stories = collect_story_predictions(
completed_trackers, default_agent, use_e2e=False
)
assert not story_evaluation.evaluation_store.has_prediction_target_mismatch()
assert len(story_evaluation.failed_stories) == 0
assert num_stories == 3
async def test_end_to_end_evaluation_script(tmpdir, default_agent):
completed_trackers = await _generate_trackers(
END_TO_END_STORY_FILE, default_agent, use_e2e=True
)
story_evaluation, num_stories = collect_story_predictions(
completed_trackers, default_agent, use_e2e=True
)
assert not story_evaluation.evaluation_store.has_prediction_target_mismatch()
assert len(story_evaluation.failed_stories) == 0
assert num_stories == 2
async def test_end_to_end_evaluation_script_unknown_entity(tmpdir, default_agent):
completed_trackers = await _generate_trackers(
E2E_STORY_FILE_UNKNOWN_ENTITY, default_agent, use_e2e=True
)
story_evaluation, num_stories = collect_story_predictions(
completed_trackers, default_agent, use_e2e=True
)
assert story_evaluation.evaluation_store.has_prediction_target_mismatch()
assert len(story_evaluation.failed_stories) == 1
assert num_stories == 1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-12-25 16:34
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('rules', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Signature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=255)),
('level', models.ForeignKey(db_column='level', null=True, on_delete=django.db.models.deletion.CASCADE, to='rules.RuleLevel')),
('rule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rules.Rule', to_field='rule_id')),
],
options={
'db_table': 'signature',
},
),
migrations.CreateModel(
name='SignatureCategoryMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rule_id', models.PositiveIntegerField()),
('cat_id', models.PositiveIntegerField()),
],
options={
'db_table': 'signature_category_mapping',
},
),
]
|
from hanspell import spell_checker
from hanspell.constants import CheckResult
if __name__ == '__main__':
for i in range(1) :
result = spell_checker.check(u'안녕 하세요. 저는 한국인 입니다. 이문장은 한글로 작성됬습니다. 외않되. 잘 되는데?')
#print(type(result))
#print(result.as_dict().get("original"))
#print(result.as_dict().get("checked"))
#print(result.as_dict().get("errors"))
print(result.as_dict().get("words"))
#push data to DB.
#errors -> 에러의 총 갯수
for key, value in result.words.items():
print(key, value)
|
class Squaring:
@staticmethod
def squaring(a):
c = a * a
return c |
'''
======================================================
Plotting a linear function with a categorical variable
======================================================
Fitting a pyearth model to a linear function shows that pyearth
will automatically choose a linear basis function in some cases.
'''
import numpy as np
import matplotlib.pyplot as plt
from pyearth import Earth
np.random.seed(1)
m = 1000
n = 5
X = np.random.normal(size=(m, n))
# Make X[:,1] binary
X[:, 1] = np.random.binomial(1, .5, size=m)
# The response is a linear function of the inputs
y = 2 * X[:, 0] + 3 * X[:, 1] + np.random.normal(size=m)
# Fit the earth model
model = Earth().fit(X, y)
# Print the model summary, showing linear terms
print(model.summary())
# Plot for both values of X[:,1]
y_hat = model.predict(X)
plt.figure()
plt.plot(X[:, 0], y, 'k.')
plt.plot(X[X[:, 1] == 0, 0], y_hat[X[:, 1] == 0], 'r.', label='$x_1 = 0$')
plt.plot(X[X[:, 1] == 1, 0], y_hat[X[:, 1] == 1], 'b.', label='$x_1 = 1$')
plt.legend(loc='best')
plt.xlabel('$x_0$')
plt.show()
|
from flaskapp import start_flask_server
flask_server = start_flask_server()
if __name__ == "__main__":
flask_server.run(debug=True,host='0.0.0.0')
|
import numpy as np
import cv2
import imutils
import glob
import math
import os
from db_config import *
session,engine = connect_db()
from config import *
import ot_functions as ot
def capture_plate(path,plate_name):
'''
Activates the webcam and streams its feed until you press 'q' on the keyboard
while on the image. It will then save the frame to the desired path.
'''
cam = cv2.VideoCapture(0)
counter = 0
while(True):
ret, frame = cam.read()
img = np.copy(frame)
img = imutils.resize(img, width=1000)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
cv2.imshow('img', gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
cv2.imwrite('{}/{}.jpg'.format(path,plate_name),frame)
print("Photo saved, click on the image to move to next plate")
cv2.waitKey(0)
break
cam.release()
cv2.destroyAllWindows()
def capture_build():
assemblies = []
print("Choose which build you would like to photograph:")
for index,assembly in enumerate(session.query(Plate).join(Build,Plate.builds).filter(Build.status == 'building').order_by(Build.build_name)):
print("{}. {}".format(index,assembly.builds.build_name))
assemblies.append(assembly)
plate_num = int(input("Enter plate here: "))
target_plate = assemblies[plate_num]
build_num = assembly.builds.build_name
photo_path = '{}/builds/{}/{}_trans_pics'.format(BASE_PATH,build_num,build_num)
ot.make_directory(photo_path)
build_name = target_plate.builds.build_name
num_reactions = len(target_plate.wells)
num_plates = math.ceil(num_reactions/24)
plate_names = [build_name + '_p' + str(num + 1) for num in range(num_plates)]
print(plate_names)
for plate in plate_names:
print("Place the plate labelled {} inside the image box".format(plate))
capture_plate(photo_path,plate)
print('All of the plates have been captured')
if __name__ == "__main__":
capture_build()
#
|
import re
from django.core.cache import caches
from django.template.loader import render_to_string
from django.core.paginator import Paginator
from moex.utils import (search_new_securities_api,
security_search_in_db,
add_search_securities_to_cache)
from ..types_classes import InlineKeyboard, InlineKeyboardButton
cache = caches['default']
def prepare_search_msg(query, base='True', page_number=1):
# base = 'True' if search in own base, 'Fasle' for new securities
if base == 'True':
securities = security_search_in_db(query)
count = securities.count()
else:
securities = search_new_securities_api(query)
add_search_securities_to_cache(securities)
count = len(securities)
paginator = Paginator(securities, 3)
page = paginator.get_page(page_number)
buttons = list()
if page.has_previous():
buttons.append(InlineKeyboardButton(
'Prev',
callback_data='mode={}:query={}:page={}:base={}'.format(
'search', query, page.previous_page_number(), base
)
))
if page.has_next():
buttons.append(InlineKeyboardButton(
'Next',
callback_data='mode={}:query={}:page={}:base={}'.format(
'search', query, page.next_page_number(), base
)
))
msg = render_to_string('tgbot/search_securities.html',
context={
'base': base,
'query': query,
'page': page,
'pages': paginator.num_pages,
'count': count
})
return msg, buttons, count
def search_mode(request, bot):
query = request.tg_body.text
match = re.findall(r'[^A-Za-zА-Яа-яёЁ0-9]{1}', query)
empty = True
if match:
return bot.send_message(
'Недопустимые символы: "{}"'.format(''.join(set(match))),
request.tg_body.chat.id
)
msg, buttons, count = prepare_search_msg(query, base='True')
if count > 0:
empty = False
bot.send_message(
msg,
request.tg_body.chat.id,
reply_markup=InlineKeyboard([buttons]).to_json()
)
msg, buttons, count = prepare_search_msg(query, base='False')
if count > 0:
empty = False
bot.send_message(
msg,
request.tg_body.chat.id,
reply_markup=InlineKeyboard([buttons]).to_json()
)
if empty:
bot.send_message(
'По вашему запросу ничего не найдено.',
request.tg_body.chat.id
)
modes_views = {
'search': search_mode
}
def main_message_handle(request, bot):
chat_id = request.tg_body.chat.id
# check mode
mode = cache.get('tgbot_{}_mode'.format(chat_id))
if mode:
cache.add('tgbot_{}_mode'.format(chat_id),
mode, timeout=3 * 60)
return modes_views[mode](request, bot)
else:
bot.send_message(
'Пожалуйста, выберите режим...',
chat_id
)
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from numpy import genfromtxt
import seaborn as sns
#### ANN (FULL) ####
######################
# TAKES A FEW HOURS! #
######################
mcruns = 100000
df_ANN_fullvar = pd.DataFrame() # x scenarios, y mc runs per scenario
i = 0
scenarios = ["2_0C", "NDC", "RCP4_5_SSP2"]
scenarioslabels = ["2.0C", "NDC", "RCP4.5 & SSP2"]
for sc in scenarios:
path_VAR_fullvar = "..\\PAGEoutput\\mcPAGEVAR\\finalscc\\ANN_100k\\%s\\scc.csv" % (sc)
data_VAR_fullvar = genfromtxt(path_VAR_fullvar, delimiter=',')
for ii in range(mcruns):
df_ANN_fullvar = df_ANN_fullvar.append({'Scenario': sc, 'USD': data_VAR_fullvar[ii]}, ignore_index=True)
df_ANN_fullvar.to_csv('df_ANN_fullvar.csv', sep=',')
print("I'm done, check whether I have done my work well.") |
import os
cwd=os.getcwd()
global path
from pydub import AudioSegment
from pydub.playback import play
path_terminator='/home/pi/Desktop/terminator_soundboards/'
path_starwars='/home/pi/Desktop/StarWars_audio'
def play_Strong_with_the_force():
path_i=os.path.join(path_starwars,'Strong with the force.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def The_force():
path_i=os.path.join(path_starwars,'The force.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def light_saber_on():
path_i=os.path.join(path_starwars,'light-saber-on.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def light_saber_off():
path_i=os.path.join(path_starwars,'light-saber-off.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def star_wars_theme():
path_i=os.path.join(path_starwars,'star-wars-theme-song.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def play_theme():
path_i=os.path.join(path_terminator,'Theme Song.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def play_mycpu():
path_i=os.path.join(path_terminator,'My CPU is intact.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def play_Cybernetic():
path_i=os.path.join(path_terminator,'Cybernetic organism.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def play_Hasta():
path_i=os.path.join(path_terminator,'Hasta la vista.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def play_beback():
path_i=os.path.join(path_terminator,'Ill be back.mp3')
sound = AudioSegment.from_mp3(path_i)
play(sound)
def play_sound(rq):
import threading
while True:
try:
case=rq.get(False)
if case=='hasta':
threading.Thread(target=play_Hasta()).start()
if case=='beback':
threading.Thread(target=play_beback()).start()
if case=='cyber':
threading.Thread(target=play_Cybernetic()).start()
if case=='cpu':
threading.Thread(target=play_mycpu()).start()
if case=='theme':
threading.Thread(target=play_theme()).start()
if case=='strongforce':
threading.Thread(target=play_Strong_with_the_force()).start()
if case=='star_wars_theme':
threading.Thread(target=star_wars_theme()).start()
if case=='light_on':
threading.Thread(target=light_saber_on()).start()
if case=='light_off':
threading.Thread(target=light_saber_off()).start()
if case=='theforce':
threading.Thread(target=The_force()).start()
except:
pass |
from kivy.uix.image import Image
from kivy.clock import Clock
from kivy.graphics.texture import Texture
from constants import *
from nodesensordisplay import NodeSensorDisplay
from scipy.optimize import minimize
import csv
import math
import numpy as np
import cv2
#Defines direction of arrow to render for each robot
robot_arrow = np.float32([[-5,0,0], [5,0,0]]).reshape(-1,3)
#Definition of aruco board sizes
m_width = 7.1
m_sep = 3.55
class MotionCapture(Image, NodeSensorDisplay):
def __init__(self, **kwargs):
super(MotionCapture, self).__init__(**kwargs)
self.logging = False
self.min_pts = np.zeros(0)
#load calibration constants for camera
self.camera_calib_mtx = np.load('camera_calib_mtx.npy')
self.camera_calib_dist = np.load('camera_calib_dist.npy')
#grab dict of aruco tags
self.aruco_dict = cv2.aruco.Dictionary_get(cv2.aruco.DICT_6X6_250)
self.parameters = cv2.aruco.DetectorParameters_create()
#aruco board generator
self.aruco_boards = []
for i in range(0,5):
self.aruco_boards.append(cv2.aruco.Board_create([np.array([[m_sep/2, m_width/2 ,0.],[m_sep/2+m_width, m_width/2, 0.],[m_sep/2+m_width, -m_width/2, 0.],[m_sep/2, -m_width/2, 0.]], dtype=np.float32),
np.array([[-(m_sep/2+m_width), m_width/2 ,0.],[-m_sep/2, m_width/2, 0.],[-m_sep/2, -m_width/2, 0.],[-(m_sep/2+m_width), -m_width/2, 0.]], dtype=np.float32)],
self.aruco_dict, np.array([i*2,i*2+1])))
#grab a hook to the webcam
self.capture = cv2.VideoCapture(0)
def toggle_logging(self):
if (self.logging):
#close up file
self.log_file.close()
#Update button name and flip the boolean
self.ids.toggle_logging_button.text = "Log"
self.logging = False
else:
#Open file and csv writer
self.log_file = open('data.csv',"wb")
self.csv_writer = csv.writer(self.log_file)
#Update button name and flip the boolean
self.ids.toggle_logging_button.text = "Stop"
self.logging = True
#override nodesensordisplay version
#don't add widget, so we don't actually render nodes
#rendering will be done through OpenCV
def set_list(self, node_list):
self.node_list = []
for node in node_list:
self.node_list.append(node['data'])
#print self.node_list
def draw_robot(self, pos, color, num, frame):
#project lines of detected robot locations onto the screen for rendering
pts, jac = cv2.projectPoints(robot_arrow, np.array([0,0,pos[2]], np.float32),
np.array([pos[0],pos[1],220], np.float32),
self.camera_calib_mtx, self.camera_calib_dist)
#draw robots on screen
frame = cv2.arrowedLine(frame, tuple(pts[0].ravel()), tuple(pts[1].ravel()), color, 5, tipLength=0.3)
cv2.putText(frame, str(num + 2), tuple(pts[0].ravel()), cv2.FONT_HERSHEY_SIMPLEX, 1, color, thickness=3)
def track_robots(self, frame):
#detect the corners and ids of all the aruco markers
corners, ids, rejectedImgPoints = cv2.aruco.detectMarkers(frame, self.aruco_dict, parameters=self.parameters)
if ids is not None:
robot_positions = np.zeros([ids.max()/2+1, 3])
else:
return np.zeros([0, 3])
for board in self.aruco_boards:
#if we detected some stuff
success = False
if (ids is not None):
success, rvec, tvec = cv2.aruco.estimatePoseBoard(corners, ids, board, self.camera_calib_mtx, self.camera_calib_dist)
try:
if (success):
#current rotation returned in axis-angle representation. See Wikipedia for helpful treatment
#we want to convert to euler angles, and are only interested in rotation about z
angle = np.linalg.norm(rvec)
axis = rvec/angle
s = np.sin(angle)
c = np.cos(angle)
t = 1-c
z_angle = np.arctan2(axis[2]*s - axis[1]*axis[0]*t, 1-(axis[2]**2 + axis[0]**2)*t)-3.14159/2
#add position vector to the overall vector
robot_positions[board.ids[0][0]/2] = [tvec[0], tvec[1], z_angle]
except TypeError:
#nothing detected
pass
return robot_positions
def update(self, dt):
#Call NodeSensorDisplay update method
super(MotionCapture, self).update(dt)
#draw video feed (like a boss)
ret, frame = self.capture.read()
if ret:
#detect robots
measured_pos = self.track_robots(frame)
#get calculated robot positions
calculated_pos = np.zeros([len(self.node_list), 3])
for n in self.node_list:
#expects nodes to be numbered 2,3,4...
calculated_pos[n.node_id-2] = [n.pos[0]/5., n.pos[1]/5., -np.radians(n.angle)]
min_pts = np.copy(self.min_pts)
if (len(calculated_pos) == len(measured_pos)):
#fit points onto eachother
err1 = self.normalize_pts(calculated_pos, measured_pos)
if (len(measured_pos) == len(self.min_pts)):
err2 = self.normalize_pts(min_pts, measured_pos)
if (self.logging):
#find difference b/w known and calculated states
difference_filter = calculated_pos-measured_pos
difference_min = min_pts-measured_pos
#log difference data to csv (like a boss)
self.csv_writer.writerow(np.append(difference_filter.flatten(),difference_min.flatten()))
#actually draw stuff
i = 0
for p in calculated_pos:
self.draw_robot(p, (255,0,0), i, frame)
i += 1
i = 0
for p in measured_pos:
self.draw_robot(p, (0,255,0), i, frame)
i += 1
i=0
for p in min_pts:
self.draw_robot(p, (0,0,255), i, frame)
i += 1
# convert OpenCV image to Kivy texture
buf1 = cv2.flip(frame, 0)
buf = buf1.tostring()
image_texture = Texture.create(size=(frame.shape[1], frame.shape[0]), colorfmt='bgr')
image_texture.blit_buffer(buf, colorfmt='bgr', bufferfmt='ubyte')
# display image from the texture
self.texture = image_texture
#run minimization algorithm when we get new data
#comment out if speed is desired
def trigger_new_data(self):
Dp = self.gen_adjacencies()
if (Dp.any()):
# try to minimize error
x0 = np.zeros((Dp.shape[0], 3))
#args has to be a tuple, because of weird numpy problems
out = minimize(fun=self.sum_errors, x0=x0, args=(Dp,), method='SLSQP')
# format stuff nicely and output
self.min_pts = np.reshape(out.x, (out.x.shape[0]/3, 3))
#scale things to be in the right coordinate system
self.min_pts[:,2] *= -1
self.min_pts[:,0:2] /= 5.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2020/6/6 19:12
# @Author : CoderCharm
# @File : __init__.py.py
# @Software: PyCharm
# @Desc :
"""
测试用例
官网地址
https://fastapi.tiangolo.com/tutorial/testing/
主要使用 TestClient 模块测试
from fastapi.testclient import TestClient
继承了 requests
TestClient(requests.Session):
"""
|
from functools import singledispatch
@singledispatch
def getMax(number):
return number
@getMax.register
def _(number1, number2): # Compares two numbers and returns the bigger one
if(number1 > number2):
result = number1
else:
result = number2
return result
@getMax.register
def _(number1, number2, number3): # Does the same but with 3 numbers
if(number1 >= number2 and number2 >= number3):
result = number1
elif(number2 >= number1 and number2 >= number3):
result = number2
else:
result = number3
print(getMax(2, 5))
print(getMax(2, 5, 3)) |
"""
Create a “LunarCalendarUtils” class that have ability to convert from Gregorian calendar to Lunar Calendar
"""
|
from torch.utils import data
from torchvision import datasets, transforms
def get_dataset(dataset, aug):
avail_datasets = {
'cifar10': get_cifar10,
'cifar100': get_cifar100
}
assert dataset in avail_datasets
return avail_datasets[dataset](aug)
# credits: https://github.com/kuangliu/pytorch-cifar
def get_cifar10(aug=False):
ori_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2470, 0.2435, 0.2616)),
])
aug_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2470, 0.2435, 0.2616)),
])
trainset = datasets.CIFAR10(
root='./',
train=True,
download=True,
transform=(aug_transform if aug else ori_transform)
)
testset = datasets.CIFAR10(
root='./',
train=False,
download=True,
transform=ori_transform
)
return trainset, testset
# credits: https://gist.github.com/weiaicunzai/e623931921efefd4c331622c344d8151
def get_cifar100(aug=False):
ori_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5071, 0.4865, 0.4409), (0.2673, 0.2564, 0.2762)),
])
aug_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5071, 0.4865, 0.4409), (0.2673, 0.2564, 0.2762)),
])
trainset = datasets.CIFAR100(
root='./',
train=True,
download=True,
transform=(aug_transform if aug else ori_transform)
)
testset = datasets.CIFAR100(
root='./',
train=False,
download=True,
transform=ori_transform
)
return trainset, testset |
import sqlite3
#中文繁简转换工具,Musicbrainz数据库中文均为繁体
from zhconv import convert
# Replace your own parameters
mbdump_path='mbdump/'
database_path='db.sqlite3'
prefix='musicbrainz_'
# Connect to the database
connection = sqlite3.connect(database_path)
'''
#Custom database table names .
# You may need to change table names if you integrate with your Django project.
old_prefix='sslog_musicbrainz_'
new_prefix='musicbrainz_'
tables=[
'artist',
'tag',
'artist_tag',
'release',
'release_tag',
'release_artist',
'recording',
'recording_tag',
'recording_artist',
'medium',
'artist_credit_name'
]
for item in tables:
sql = 'alter table ' + old_prefix + item + ' rename to ' + new_prefix + item
try:
connection.execute(sql)
except sqlite3.OperationalError as e:
print('no such table ' + old_prefix + item)
connection.commit()
'''
# improt artist
with open(mbdump_path +'artist','rt',encoding='utf-8') as f:
i = 0
for line in f:
data = line.split('\t')
id= int(data[0])
gid = str(data[1]).replace('-','')
name = convert(str(data[2]), 'zh-hans')
params = (id,gid,name)
try:
connection.execute("insert into " + prefix + "artist values(?,?,?)",params)
if i % 10000 ==0:
print('musicbrainz_artist\t' + str(i))
except:
if i % 10000 ==0:
print('musicbrainz_artist\t' + str(i) + ' is exists')
i += 1
connection.commit()
# improt artist_credit_name
with open(mbdump_path +'artist_credit_name','rt',encoding='utf-8') as f:
i = 0
for line in f:
data = line.split('\t')
artist_credit= int(data[0])
artist = int(data[2])
params = (None,artist_credit,artist)
try:
connection.execute("insert into " + prefix + "artist_credit_name values(?,?,?)",params)
if i % 10000 ==0:
print('musicbrainz_artist_credit_name\t' + str(i))
except:
if i % 10000 ==0:
print('musicbrainz_artist_credit_name\t' + str(i) + ' is exists')
i += 1
connection.commit()
# improt tag
with open(mbdump_path +'label','rt',encoding='utf-8') as f:
i = 0
for line in f:
data = line.split('\t')
id= int(data[0])
gid = str(data[1]).replace('-','')
name = convert(str(data[2]), 'zh-hans')
params = (id,gid,name)
try:
connection.execute("insert into " + prefix + "tag values(?,?,?)",params)
if i % 10000 ==0:
print('musicbrainz_tag\t' + str(i))
except:
if i % 10000 ==0:
print('musicbrainz_tag\t' + str(i) + ' is exists')
i += 1
connection.commit()
# improt artist_label
with open(mbdump_path + 'l_artist_label','rt',encoding='utf-8') as f:
i = 0
for line in f:
data = line.split('\t')
artist_id = data[2]
label_id= data[3]
params = (None,artist_id,label_id)
try:
connection.execute("insert into " + prefix + "artist_tag values(?,?,?)" ,params)
if i % 10000 ==0:
print('musicbrainz_artist_label\t' + str(i))
except:
if i % 10000 ==0:
print('musicbrainz_artist_label\t' + str(i) + ' is exists')
i += 1
connection.commit()
# improt release
with open(mbdump_path + 'release','rt',encoding='utf-8') as f:
i = 0
for line in f:
data = line.split('\t')
id= int(data[0])
gid = str(data[1]).replace('-','')
name = convert(str(data[2]), 'zh-hans')
artist_credit= str(data[3])
params = (id,gid,name,artist_credit,0)
try:
connection.execute("insert into " + prefix + "release values(?,?,?,?,?)",params)
if i % 10000 ==0:
print('musicbrainz_release\t' + str(i))
except:
if i % 10000 ==0:
print('musicbrainz_release\t' + str(i) + ' is exists')
i += 1
connection.commit()
# Update the publishdate field in release by release_unknown_country
with open(mbdump_path + 'release_unknown_country','rt',encoding='utf-8') as f:
i = 0
for line in f:
data = line.split('\t')
id= data[0]
publishdate = data[1]
params = (name,id)
try:
connection.execute("update " + prefix + "release set publishdate=? where id=?" ,params)
print('musicbrainz_release_publishdate\t' + str(i))
if i % 10000 ==0:
connection.commit()
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_release_unknown_country\t' + str(i) + '\t' +line)
print('musicbrainz_release_unknown_country\t' + str(i) + ' is exists')
i += 1
connection.commit()
# Update the publishdate field in release by release_country
with open(mbdump_path + 'release_country','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
id= data[0]
publishdate = data[2]
params = (name,id)
try:
connection.execute("update " + prefix + "release set publishdate=? where id=?" ,params)
print('musicbrainz_release_publishdate\t' + str(i))
if i % 10000 ==0:
connection.commit()
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_release_country\t' + str(i) + '\t' +line)
print('musicbrainz_release_country\t' + str(i) + ' is exists')
i += 1
connection.commit()
# improt release_artist
with open(mbdump_path + 'release','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
id = data[0]
artist_credit= str(data[3])
cursor = connection.execute("select * from " + prefix + "artist_credit_name where artist_credit=" + artist_credit)
for item in cursor:
params = (None,id,item[2])
try:
connection.execute("insert into " + prefix + "release_artist values(?,?,?)" ,params)
print('musicbrainz_release_artist\t' + str(i))
if i % 10000 ==0:
connection.commit()
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_release_artist\t' + str(i) + '\t' +line)
print('musicbrainz_release_artist\t' + str(i) + ' is exists')
i += 1
connection.commit()
# improt release_label
with open(mbdump_path + 'release_label','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
try:
release_id = int(data[1])
label_id= int(data[2])
params = (None,release_id,label_id)
connection.execute("insert into " + prefix + "release_tag values(?,?,?)" ,params)
print('musicbrainz_release_label\t' + str(i))
if i % 10000 ==0:
connection.commit()
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_release_label\t' +str(i) + '\t' +line)
print('musicbrainz_release_label\t' +str(i) + ' is exists')
i += 1
connection.commit()
# improt recording
with open(mbdump_path + 'recording','rt',encoding='utf-8') as f:
i = 1
for line in f:
try:
data = line.split('\t')
id= int(data[0])
gid = str(data[1]).replace('-','')
name = convert(str(data[2]), 'zh-hans')
params = (id,gid,name,1)
connection.execute("insert into " + prefix + "recording values(?,?,?,?)" ,params)
i += 1
if i % 10000 == 0 :
print('musicbrainz_recording\t' + str(i))
except:
print('musicbrainz_recording\t' + str(i) + ' is exists')
connection.commit()
# improt media
with open(mbdump_path + 'medium','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
try:
id= int(data[0])
release=int(data[1])
params = (id,release)
connection.execute("insert into " + prefix + "media values(?,?)" ,params)
print('musicbrainz_medium\t' + str(i))
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_medium\t' +str(i) + '\t' +line)
print('musicbrainz_medium\t' +str(i) + '\t' +line)
i += 1
connection.commit()
# improt recording_release
with open(mbdump_path + 'track','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
try:
recording_id = int(data[2])
medium_id= int(data[3])
cursor = connection.execute("select * from " + prefix + "media where id=" + str(medium_id))
for item in cursor:
release_id =int(item[1])
params = (release_id,recording_id)
connection.execute("update " + prefix + "recording set release_id=? where id=?" ,params)
if i % 10000 == 0 :
print('musicbrainz_recording_release\t' + str(i))
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_recording_release\t' +str(i) + '\t' +line)
print('musicbrainz_recording_release\t' +str(i) + '\t' +line)
i += 1
connection.commit()
# improt recording_artist
with open(mbdump_path + 'recording','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
try:
recording_id = int(data[0])
artist_credit= int(data[3])
cursor = connection.execute("select * from " + prefix + "artist_credit_name where artist_credit=" + str(artist_credit))
for item in cursor:
artist_id =int(item[2])
params = (None,recording_id,artist_id)
connection.execute("insert into " + prefix + "recording_artist values(?,?,?)" ,params)
if i % 10000 == 0 :
print('musicbrainz_recording_artist\t' + str(i))
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_recording_artist\t' +str(i) + '\t' +line)
print('musicbrainz_recording_artist\t' +str(i) + '\t' +line)
i += 1
connection.commit()
# improt release_tag
with open(mbdump_path + 'release_label','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
try:
release_id = int(data[1])
label_id= int(data[2])
params = (None,release_id,label_id)
connection.execute("insert into " + prefix + "release_tag values(?,?,?)" ,params)
if i % 100 == 0 :
print('musicbrainz_release_label\t' + str(i))
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_release_label\t' +str(i) + '\t' +line)
i += 1
connection.commit()
# improt l_label_recording
with open(mbdump_path + 'l_label_recording','rt',encoding='utf-8') as f:
i = 1
for line in f:
data = line.split('\t')
try:
label_id = int(data[2])
recording_id= int(data[3])
params = (None,recording_id,label_id)
connection.execute("insert into " + prefix + "recording_tag values(?,?,?)" ,params)
if i % 10000 == 0 :
print('musicbrainz_recording_tag\t' + str(i))
except:
with open('error.txt','at',encoding='utf-8') as f:
f.write('musicbrainz_recording_tag\t' +str(i) + '\t' +line)
i += 1
connection.commit()
connection.close()
|
"""
This file houses all of the digging functionality
@created: 11-1-2020
"""
import odrive
from odrive.utils import dump_errors
from odrive.enums import *
import subprocess
import yaml
import time
class Digging:
#--------------------------------------------------------------------
# Digging initialize function
#
# Establish the odrive connection for digging
#--------------------------------------------------------------------
def __init__(self):
self.serial_num = "00320097"
try:
print("Searching for digging odrive, this may take a few seconds...")
self.odrv0 = odrive.find_any(serial_number="207939834D4D")
except:
print("Unable to find digging odrive")
self.dig_engage_depth()
self.dig_engage_zipper()
#--------------------------------------------------------------------
# Move the zipper forward, digging the material below it
#
# param: speed -- set the speed of belt movement (max at 67)
#--------------------------------------------------------------------
def zipper_forward(self, speed):
print("\n belt foward current pull: \n")
print(self.odrv0.axis1.motor.current_control.Iq_measured)
self.odrv0.axis1.controller.input_vel = -speed
#--------------------------------------------------------------------
# Move the zipper backward, to get it unstuck in the case of digging
# incorrect material
#
# param: speed -- set the speed of belt movement (max at 67)
#--------------------------------------------------------------------
def zipper_back(self, speed):
print("\n belt backwards pull: \n")
print(self.odrv0.axis1.motor.current_control.Iq_measured)
self.odrv0.axis1.controller.input_vel = speed
#--------------------------------------------------------------------
# Stop the zipper at its current location
#--------------------------------------------------------------------
def zipper_stop(self):
self.odrv0.axis1.controller.input_vel = 0
time.sleep(0.1)
self.odrv0.axis1.controller.input_vel = 5
time.sleep(0.1)
self.odrv0.axis1.controller.input_vel = 0
#--------------------------------------------------------------------
# Extends the zipper drive deeper into the ground
#
# param: speed -- set the speed of depth adjustment (max at 50)
#--------------------------------------------------------------------
def depth_extend(self, speed):
print("\n depth extend current pull: \n")
print(self.odrv0.axis0.motor.current_control.Iq_measured)
self.odrv0.axis0.controller.input_vel = speed
#--------------------------------------------------------------------
# Retracts the zipper drive from the hole it has dug
#
# param: speed -- set the speed of the depth adjustment (max at 50)
#--------------------------------------------------------------------
def depth_retract(self, speed):
print("\n depth retract current pull: \n")
print(self.odrv0.axis0.motor.current_control.Iq_measured)
self.odrv0.axis0.controller.input_vel = -speed
#--------------------------------------------------------------------
# Stops adjusting the depth of the zipper
#--------------------------------------------------------------------
def depth_stop(self):
self.odrv0.axis0.controller.input_vel = 0
time.sleep(0.1)
self.odrv0.axis0.controller.input_vel = 5
time.sleep(0.1)
self.odrv0.axis0.controller.input_vel = 0
#--------------------------------------------------------------------
# Helper function to operate the stepper motor
#
# param: *args -- a variable set of arguments used to send commands
#--------------------------------------------------------------------
def ticcmd(self, *args):
return subprocess.check_output(['ticcmd'] + list(args))
#--------------------------------------------------------------------
# Rotate the zipper forward with the stepper motor
#--------------------------------------------------------------------
def stepper_forward(self, pos):
new_target = (-1 * pos)
self.ticcmd('--exit-safe-start', '-d', self.serial_num, '--position-relative', str(new_target))
#--------------------------------------------------------------------
# Rotate the zipper backward with the stepper motor
#--------------------------------------------------------------------
def stepper_backward(self, pos):
new_target = pos
self.ticcmd('--exit-safe-start', '-d', self.serial_num, '--position-relative', str(new_target))
#--------------------------------------------------------------------
# Rotate the zipper backward with the stepper motor
#--------------------------------------------------------------------
def stepper_stop(self):
self.ticcmd('-d', self.serial_num, '--reset')
#--------------------------------------------------------------------
# Engages the depth motor by setting their state
#--------------------------------------------------------------------
def dig_engage_depth(self):
self.odrv0.axis0.requested_state = AXIS_STATE_CLOSED_LOOP_CONTROL
#--------------------------------------------------------------------
# Disengages the depth motor by setting their state
#--------------------------------------------------------------------
def dig_disengage_depth(self):
self.depth_stop()
time.sleep(0.1)
self.odrv0.axis0.requested_state = AXIS_STATE_IDLE
#--------------------------------------------------------------------
# Engages the zipper motor by setting their state
#--------------------------------------------------------------------
def dig_engage_zipper(self):
self.odrv0.axis1.requested_state = AXIS_STATE_CLOSED_LOOP_CONTROL
#--------------------------------------------------------------------
# Disengages the zipper motor by setting their state
#--------------------------------------------------------------------
def dig_disengage_zipper(self):
self.zipper_stop()
time.sleep(0.1)
self.odrv0.axis1.requested_state = AXIS_STATE_IDLE
#--------------------------------------------------------------------
# Disengages the stepper motor by reseting the state
#--------------------------------------------------------------------
def dig_disengage_pitch(self):
self.ticcmd('-d', self.serial_num, '--reset')
#--------------------------------------------------------------------
# Dumps all errors from the locomotion odrive
#--------------------------------------------------------------------
def dig_dump_errors(self):
dump_errors(odrv0, True)
|
import unittest
import logging
from flask import jsonify
from app.app import app
from app.utils.logger_wrapper import setup_logging
setup_logging()
logger = logging.getLogger(__name__)
class FlaskrTestCase(unittest.TestCase):
API_PREFIX = '/v1'
def setUp(self):
logger.info('Setting up for the test suite')
self.app = app.test_client()
def tearDown(self):
logger.info('Cleaning up')
# Start tests
def initialize_list(self):
res = self.app.post(self.API_PREFIX +'/asset/sample', data="{\"assetType\":\"antenna\", \"assetClass\":\"dish\"}", content_type='application/json')
def test_list_works(self):
logger.info("test_list_works")
res = self.app.get(self.API_PREFIX +'/assets')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 200
def test_get_asset(self):
logger.info("test_get_asset")
self.initialize_list()
res = self.app.get(self.API_PREFIX +'/asset/sample')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 200
def test_no_body(self):
logger.info("test_no_body")
res = self.app.post(self.API_PREFIX +'/asset/nobody')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 400
def test_add_asset(self):
logger.info("test_add_asset")
res = self.app.post(self.API_PREFIX +'/asset/test', data="{\"assetType\":\"antenna\", \"assetClass\":\"dish\"}", content_type='application/json')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 200
def test_add_asset_invalid_name_start(self):
logger.info("test_add_asset_invalid_name_start")
res = self.app.post(self.API_PREFIX +'/asset/-test', data="{\"assetType\":\"antenna\", \"assetClass\":\"dish\"}", content_type='application/json')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 400
def test_add_asset_invalid_name_too_short(self):
logger.info("test_add_asset_invalid_name_too_short")
res = self.app.post(self.API_PREFIX +'/asset/t', data="{\"assetType\":\"antenna\", \"assetClass\":\"dove\"}", content_type='application/json')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 400
def test_add_asset_invalid_name_too_long(self):
logger.info("test_add_asset_invalid_name_too_long")
res = self.app.post(self.API_PREFIX +'/asset/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',
data="{\"assetType\":\"antenna\", \"assetClass\":\"dove\"}", content_type='application/json')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 400
def test_add_asset_invalid_type(self):
logger.info("test_add_asset_invalid_type")
res = self.app.post(self.API_PREFIX +'/asset/test', data="{\"assetType\":\"blah\", \"assetClass\":\"dish\"}", content_type='application/json')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 400
def test_add_asset_invalid_class(self):
logger.info("test_add_asset_invalid_class")
res = self.app.post(self.API_PREFIX +'/asset/test', data="{\"assetType\":\"antenna\", \"assetClass\":\"dove\"}", content_type='application/json')
logger.info(res.data.decode('utf-8'))
assert res.status_code == 400
def test_invalid_endpoint(self):
logger.info("test_invalid_endpoint")
res = self.app.post(self.API_PREFIX +"/hi")
logger.info(res.data.decode('utf-8'))
assert res.status_code == 404
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = "0.0"
__maintainer__ = "Ricardo Ribeiro"
__email__ = "[email protected]"
__status__ = "Development"
from __init__ import *
import random, time
from PyQt4 import QtCore
class SimpleExample(BaseWidget):
def __init__(self):
super(SimpleExample,self).__init__('Simple example')
#Definition of the forms fields
self._control0 = ControlEventsGraph('Check me')
self._control1 = ControlEventsGraph('Check me')
self._control2 = ControlEventsGraph('Check me')
self._control3 = ControlEventsGraph('Check me')
self._txt = ControlText('Time')
self._btn = ControlButton('Click')
self._btn1 = ControlButton('Click 1')
self._save = ControlButton('Save button')
self._load = ControlButton('Load button')
self.formset = [
('_btn','_btn1'),
('_control0','_control1'),
('_control2','_control3'),
'_txt',
('_save','_load')]
self._btn.value = self.__btn
self._btn1.value = self.__btn1
self._save.value = self.save_window
self._load.value = self.load_window
self._start = time.time()
self.INTERVAL = 500
self.N_TRACKS = 8
def __btn(self):
for i in range(40):
s = random.randint( 0, 10000 )
o = random.randint( 0, 1000 )
self._control0.add_event( s, s+o, track=random.randint(0,self.N_TRACKS) )
#self._control0.add_event( random.randint(0, 10000), s+o, track=random.randint(0,self.N_TRACKS), color="#00FFDD")
self._control0.value = 5000
def __addEvent0(self):
b = self._control0.value
e = b+self.INTERVAL
self._control0.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control0.value = e
self._txt.value = str(time.time() - self._start)
def __addEvent1(self):
b = self._control1.value
e = b+self.INTERVAL
self._control1.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control1.value = e
def __addEvent2(self):
b = self._control2.value
e = b+self.INTERVAL
self._control2.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control2.value = e
def __addEvent3(self):
b = self._control3.value
e = b+self.INTERVAL
self._control3.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control3.value = e
def __btn1(self):
self._start = time.time()
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent0)
timer.start(self.INTERVAL)
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent1)
timer.start(self.INTERVAL)
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent2)
timer.start(self.INTERVAL)
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent3)
timer.start(self.INTERVAL)
##################################################################################################################
##################################################################################################################
##################################################################################################################
#Execute the application
if __name__ == "__main__": pyforms.start_app( SimpleExample )
|
#!/usr/bin/python
#-------------------------------------------------------------------------------
# Copyright 2019 Pivotal Software Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#-------------------------------------------------------------------------------
"""
Name: Generator.py
Author: Jarrod Vawdrey
Description:
"""
import yaml
import os
import json
import logging
import sys
import accountGenerator as ag
import merchantGenerator as mg
import transactionGenerator as tg
YAML_FILE="myConfigs.yml"
LOGGING_LEVEL=logging.INFO
def readYaml(filename):
cwd = os.getcwd()
print(cwd)
with open(filename, 'r') as stream:
try:
myConfigs = yaml.load(stream)
return myConfigs
except yaml.YAMLError as exc:
logging.critical(exc)
sys.exit()
def checkAndDropFile(filename):
if os.path.exists(filename):
os.remove(filename)
return True
else:
return False
if __name__ == '__main__':
# logging configurations
logging.basicConfig(format='%(asctime)s - %(message)s',level=LOGGING_LEVEL)
# read configs
myConfigs = readYaml(YAML_FILE)
try:
restrictToStates = myConfigs['constraints']['states']
except Exception as e:
logging.error(e)
restrictToStates = []
if myConfigs['configs']['createAccountsJson'] == True:
# clean up files if already exist
checkAndDropFile('accounts.json')
# create accounts.json file
ag.accountGenerator(myConfigs, restrictToStates)
if myConfigs['configs']['createLocationsJson'] == True:
# clean up files if already exist
checkAndDropFile('locations.json')
# create locations.json file
mg.merchantGenerator(myConfigs['generator']['merchantsNumber'], myConfigs['data'], restrictToStates)
# simulate transactions
if (myConfigs['target']['type'] == 'kafka'):
tg.generate_kafka_data(myConfigs)
elif (myConfigs['target']['type'] in ['json','csv']):
tg.generate_file_data(myConfigs)
logging.info("!")
logging.info("Finished generating data!")
|
#!/usr/bin/env python3
#
from __future__ import division, print_function
import numpy as np
class Transformation(object):
"""
Transforms from model to search space (and back).
"""
def transform(self, parameters, which_model, no_cells):
"""
Transform from model into search space.
"""
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
parameters[3],
np.log(parameters[4]),
parameters[5],
np.log(parameters[6]),
parameters[7],
np.log(parameters[8]),
parameters[9],
np.log(parameters[10]),
parameters[11],
np.log(parameters[12]),
parameters[13],
np.log(parameters[14]),
parameters[15],
np.log(parameters[16]),
parameters[17],
np.log(parameters[18]),
parameters[19],
np.log(parameters[20]),
parameters[21],
np.log(parameters[22]),
parameters[23]
])
self.n_params = len(x)
self.no_cells = no_cells
for i in range(self.no_cells):
x = np.append(x, parameters[self.n_params+i])
return x
def detransform(self, transformed_parameters, which_model, noise=False):
"""
Transform back from search space to model space.
"""
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
transformed_parameters[3],
np.exp(transformed_parameters[4]),
transformed_parameters[5],
np.exp(transformed_parameters[6]),
transformed_parameters[7],
np.exp(transformed_parameters[8]),
transformed_parameters[9],
np.exp(transformed_parameters[10]),
transformed_parameters[11],
np.exp(transformed_parameters[12]),
transformed_parameters[13],
np.exp(transformed_parameters[14]),
transformed_parameters[15],
np.exp(transformed_parameters[16]),
transformed_parameters[17],
np.exp(transformed_parameters[18]),
transformed_parameters[19],
np.exp(transformed_parameters[20]),
transformed_parameters[21],
np.exp(transformed_parameters[22]),
transformed_parameters[23]
])
for i in range(self.no_cells):
x = np.append(x, transformed_parameters[self.n_params+i])
if noise:
x = np.append(x, transformed_parameters[self.n_params+self.no_cells])
return x
|
#!/usr/bin/env python3
# coding: utf-8
import re
import sys
import json
from typing import Optional
import requests
from retrying import retry
headers = {
'Accept-Language': 'en-US,en;q=0.8',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:67.0) Gecko/20100101 Firefox/67.0',
}
# Testing ID - Always live
# channel_id = "UCSJ4gkVC6NrvII8umztf0Ow"
# Production ID - minhealthnz
channel_id = "UCPuGpQo9IX49SGn2iYCoqOQ"
@retry(stop_max_attempt_number=3)
def get(url: str) -> Optional[str]:
try:
r = requests.get(url, headers=headers)
return r.text
except requests.RequestException:
pass
def get_live_video_info_from_html(html: str) -> Optional[str]:
"""
Extract video info from HTML of channel page.
"""
# regex = r'{"itemSectionRenderer":{"contents":\[{"shelfRenderer":{"title":{"runs":\[{"text":"Live now".+?"content":{"expandedShelfContentsRenderer":{"items":(.+?),"showMoreText":{"runs":\[{"text":"Show more"}]}}}'
# regex = r'"contents":\[{"channelFeaturedContentRenderer":{"items":(.+?}])}}],"trackingParams"'
regex = re.search(r'var ytInitialData = ({".+?});</script>',html)
if regex is None:
print("Could not find JSON data in Youtube Result")
return None
json_items = json.loads(regex.group(1))
extract = json_items["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]["tabRenderer"]["content"]["sectionListRenderer"]["contents"][0]["itemSectionRenderer"]["contents"][0]["channelFeaturedContentRenderer"]["items"][0]["videoRenderer"]
# print(extract)
# print(json.dumps(json_items, indent=2, separators=(',', ': ')))
# exit(0)
if (extract['thumbnailOverlays'][0]['thumbnailOverlayTimeStatusRenderer']['text']['accessibility']['accessibilityData']['label'] != 'LIVE'):
print("Youtube says it's live, but it isn't...")
return None
return extract['videoId']
def check_channel_live_streaming(channel_id: str) -> Optional[str]:
html = get(f'https://www.youtube.com/channel/{channel_id}/featured')
if '"label":"LIVE"' in html:
print("Channel Live")
# video_info = get_live_video_info_by_channel_id(channel_id)
video_info = get_live_video_info_from_html(html)
return video_info
elif "404 Not Found" in html:
print("Channel Not Found")
return None
def checkLive() -> Optional[str]:
return check_channel_live_streaming(channel_id)
# usage: ./check_youtube.py youtube_channel_id
if __name__ == '__main__':
if (len(sys.argv) < 2):
print(checkLive())
exit(1)
channel_id = sys.argv[1]
info = check_channel_live_streaming(channel_id)
if info:
# Same output format as
# youtube-dl --get-id --get-title --get-description
print("Video ID: ",info)
else:
print(f'No live streams for channel {channel_id} available now', file=sys.stderr)
exit(1) |
import sys
from functools import partial
import unittest, pytest
import sympy as sp
import matplotlib.pyplot as plt
if sys.platform == 'win32':
pytestmark = pytest.mark.skip("Skipping test on Windows")
dl = None
skiptest = unittest.skipIf(
True, reason="fenics_adjoint package not available on Windows")
else:
import dolfin as dl
from pyapprox_dev.fenics_models.fenics_utilities import *
from pyapprox_dev.fenics_models.nonlinear_diffusion import *
from pyapprox_dev.fenics_models.tests.test_advection_diffusion import \
get_exact_solution as get_advec_exact_solution, \
get_forcing as get_advec_forcing
dl.set_log_level(40)
skiptest = unittest.skipIf(
not has_dla, reason="fenics_adjoint package missing")
def quadratic_diffusion(u):
"""Nonlinear coefficient in the PDE."""
return 1 + u**2
def get_quadratic_diffusion_exact_solution_sympy():
# Use SymPy to compute f given manufactured solution u
from sympy.abc import t
x, y = sp.symbols('x[0] x[1]')
u = sp.cos(np.pi*t)*sp.sin(np.pi*x)*sp.sin(np.pi*y) # 1 + x + 2*y
return u, x, y, t
def get_quadratic_diffusion_exact_solution(mesh, degree):
u, x, y, t = get_quadratic_diffusion_exact_solution_sympy()
exact_sol = dla.Expression(
sp.printing.ccode(u), cell=mesh.ufl_cell(),
domain=mesh, t=0, degree=degree)
return exact_sol
def get_diffusion_forcing(q, sol_func, mesh, degree):
u, x, y, t = sol_func()
f = u.diff(t, 1)-sum(sp.diff(q(u)*sp.diff(u, xi), xi) for xi in (x, y))
f = sp.simplify(f)
forcing = dla.Expression(
sp.printing.ccode(f), cell=mesh.ufl_cell(),
domain=mesh, t=0, degree=degree)
return forcing
class TestNonlinearDiffusion(unittest.TestCase):
def setUp(self):
pass
def test_quadratic_diffusion_dirichlet_boundary_conditions(self):
"""
du/dt = div((1+u**2)*grad(u))+f in the unit square.
u = u_D on the boundary.
"""
nx, ny, degree = 21, 21, 2
mesh = dla.RectangleMesh(dl.Point(0, 0), dl.Point(1, 1), nx, ny)
function_space = dl.FunctionSpace(mesh, "Lagrange", degree)
bndry_obj = get_2d_unit_square_mesh_boundaries()
boundary_conditions = get_dirichlet_boundary_conditions_from_expression(
get_quadratic_diffusion_exact_solution(mesh, degree), 0, 1, 0, 1)
forcing = get_diffusion_forcing(
quadratic_diffusion, get_quadratic_diffusion_exact_solution_sympy,
mesh, degree)
options = {'time_step': 0.05, 'final_time': 1,
'forcing': forcing,
'boundary_conditions': boundary_conditions,
'second_order_timestepping': True,
'init_condition': get_quadratic_diffusion_exact_solution(
mesh, degree), 'nonlinear_diffusion':
quadratic_diffusion}
sol = run_model(function_space, **options)
exact_sol = get_quadratic_diffusion_exact_solution(mesh, degree)
exact_sol.t = options['final_time']
error = dl.errornorm(exact_sol, sol, mesh=mesh)
print('Abs. Error', error)
assert error <= 8e-5
def test_constant_diffusion_dirichlet_boundary_conditions(self):
kappa = 3
nx, ny, degree = 31, 31, 2
mesh = dla.RectangleMesh(dl.Point(0, 0), dl.Point(1, 1), nx, ny)
function_space = dl.FunctionSpace(mesh, "Lagrange", degree)
def constant_diffusion(u):
return dla.Constant(kappa)
boundary_conditions = get_dirichlet_boundary_conditions_from_expression(
get_advec_exact_solution(mesh, degree), 0, 1, 0, 1)
nlsparam = dict()
options = {'time_step': 0.05, 'final_time': 1,
'forcing': get_advec_forcing(kappa, mesh, degree),
'boundary_conditions': boundary_conditions,
'second_order_timestepping': True,
'init_condition': get_advec_exact_solution(mesh, degree),
'nlsparam': nlsparam,
'nonlinear_diffusion': constant_diffusion}
sol = run_model(function_space, **options)
exact_sol = get_advec_exact_solution(mesh, degree)
exact_sol.t = options['final_time']
error = dl.errornorm(exact_sol, sol, mesh=mesh)
print('Abs. Error', error)
assert error <= 1e-4
@skiptest
def test_adjoint(self):
np_kappa = 2
nx, ny, degree = 11, 11, 2
mesh = dla.RectangleMesh(dl.Point(0, 0), dl.Point(1, 1), nx, ny)
function_space = dl.FunctionSpace(mesh, "Lagrange", degree)
def constant_diffusion(kappa, u):
return kappa
boundary_conditions = \
get_dirichlet_boundary_conditions_from_expression(
get_advec_exact_solution(mesh, degree), 0, 1, 0, 1)
class NonlinearDiffusivity(object):
def __init__(self, kappa):
self.kappa = kappa
def __call__(self, u):
return (self.kappa+u**2)
dl_kappa = dla.Constant(np_kappa)
options = {'time_step': 0.05, 'final_time': 1.,
'forcing': dla.Constant(1),
'boundary_conditions': boundary_conditions,
'init_condition': get_advec_exact_solution(mesh, degree),
'nonlinear_diffusion': NonlinearDiffusivity(dl_kappa),
'second_order_timestepping': True,
'nlsparam': dict()}
def dl_qoi_functional(sol):
return dla.assemble(sol*dl.dx)
def dl_fun(np_kappa):
kappa = dla.Constant(np_kappa)
options_copy = options.copy()
options_copy['forcing'] = dla.Constant(1.0)
# using class avoids pickling
options_copy['nonlinear_diffusion'] = NonlinearDiffusivity(kappa)
sol = run_model(function_space, **options_copy)
return sol, kappa
def fun(np_kappa):
np_kappa = np_kappa[0, 0]
sol, kappa = dl_fun(np_kappa)
J = dl_qoi_functional(sol)
control = dla.Control(kappa)
dJd_kappa = dla.compute_gradient(J, [control])[0]
return np.atleast_1d(float(J)), np.atleast_2d(float(dJd_kappa))
sol, kappa = dl_fun(np_kappa)
J = dl_qoi_functional(sol)
control = dla.Control(kappa)
Jhat = dla.ReducedFunctional(J, control)
# h = dla.Constant(np.random.normal(0, 1, 1))
# conv_rate = dla.taylor_test(Jhat, kappa, h)
# assert np.allclose(conv_rate, 2.0, atol=1e-2)
from pyapprox.optimization import check_gradients
x0 = np.atleast_2d(np_kappa)
errors = check_gradients(fun, True, x0)
assert errors.min() < 1e-7 and errors.max() > 1e-1
class TestShallowIceEquation(unittest.TestCase):
def setUp(self):
pass
def run_shallow_ice_halfar(self, nphys_dim):
"""
See 'Exact time-dependent similarity solutions for isothermal shallow
ice sheets'
https://pdfs.semanticscholar.org/5e57/ffc51586717cb4db33c1c20ebed54c3bfbfb.pdf
Compute the similarity solution to the isothermal flat-bed SIA from
Halfar (1983). Constants H0 = 3600 m and R0 = 750 km are as in Test B in
Bueler et al (2005).
"""
nx, degree = 41, 1
glen_exponent = 3
Gamma = 2.8457136065980445e-05
positivity_tol = 0 # 1e-6
Lx = 1200e3
if nphys_dim == 1:
mesh = dla.IntervalMesh(nx, -Lx, Lx)
elif nphys_dim == 2:
ny, Ly = nx, Lx
mesh = dla.RectangleMesh(
dl.Point(-Lx, -Ly), dl.Point(Lx, Ly), nx, ny)
function_space = dl.FunctionSpace(mesh, "Lagrange", degree)
bed = None
forcing = dla.Constant(0.0)
exact_solution = get_halfar_shallow_ice_exact_solution(
Gamma, mesh, degree, nphys_dim)
if nphys_dim == 1:
boundary_conditions = \
get_1d_dirichlet_boundary_conditions_from_expression(
exact_solution, -Lx, Lx)
elif nphys_dim == 2:
boundary_conditions =\
get_dirichlet_boundary_conditions_from_expression(
exact_solution, -Lx, Lx, -Ly, Ly)
# exact solution is undefined at t=0 so set initial condition to
# some later time
secpera = 31556926 # seconds per anum
exact_solution.t = 200*secpera
nlsparams = get_default_snes_nlsparams()
beta = None
diffusion = partial(
shallow_ice_diffusion, glen_exponent, Gamma, bed, positivity_tol,
beta)
options = {'time_step': 10*secpera, 'final_time': 600*secpera,
'forcing': forcing,
'boundary_conditions': boundary_conditions,
'second_order_timestepping': True,
'init_condition': exact_solution,
'nonlinear_diffusion': diffusion,
'nlsparam': nlsparams,
'positivity_tol': positivity_tol}
sol = run_model(function_space, **options)
# exact_solution.t=forcing.t+options['time_step']
exact_solution.t = options['final_time']
error = dl.errornorm(exact_solution, sol, mesh=mesh)
print('Abs. Error', error)
rel_error = error/dl.sqrt(
dla.assemble(exact_solution**2*dl.dx(degree=5)))
print('Rel. Error', rel_error)
plot = False
if plot and nphys_dim == 1:
function_space = dl.FunctionSpace(mesh, "CG", degree)
x = function_space.tabulate_dof_coordinates()
indices = np.argsort(x)
x = x[indices]
values = sol.vector().get_local()
values = values[indices]
plt.plot(x, values)
exact_values = dla.interpolate(
exact_solution, function_space).vector().get_local()
plt.plot(x, exact_values[indices])
plt.show()
elif plot and nphys_dim == 2:
fig = plt.figure(figsize=(3*8, 6))
ax = plt.subplot(1, 3, 1)
pl = plot(sol, mesh=mesh)
plt.colorbar(pl, ax=ax)
ax = plt.subplot(1, 3, 2)
pl = plot(exact_solution, mesh=mesh)
plt.colorbar(pl, ax=ax)
ax = plt.subplot(1, 3, 3)
pl = plot(exact_solution-sol, mesh=mesh)
plt.colorbar(pl, ax=ax)
plt.show()
assert rel_error <= 3e-4
def test_shallow_ice_halfar_1d(self):
self.run_shallow_ice_halfar(1)
def test_shallow_ice_halfar_2d(self):
self.run_shallow_ice_halfar(2)
def test_halfar_model(self):
nlsparams = get_default_snes_nlsparams()
# nlsparams = get_default_newton_nlsparams()
def dl_qoi_functional(sol):
return dla.assemble(sol*dl.dx)
def qoi_functional(sol):
return np.atleast_1d(float(dl_qoi_functional(sol)))
def qoi_functional_grad(sol, model):
J = dl_qoi_functional(sol)
control = dla.Control(model.shallow_ice_diffusivity.Gamma)
dJd_gamma = dla.compute_gradient(J, [control])[0]
# apply chain rule. we want gradient of qoi as a function of x
# but fenics compute gradient with respect to g(x)=(1+x)*Gamma
# dq/dx = dq/dg*dg/dx
dJd_gamma *= model.shallow_ice_diffusivity.Gamma
# h = dla.Constant(1e-5) # h must be similar magnitude to Gamma
#Jhat = dla.ReducedFunctional(J, control)
# conv_rate = dla.taylor_test(
# Jhat, model.shallow_ice_diffusivity.Gamma, h)
return np.atleast_2d(float(dJd_gamma))
if not has_dla:
qoi_functional_grad = None
secpera = 31556926 # seconds per anum
init_time = 200*secpera
final_time, degree, nphys_dim = 300*secpera, 1, 1 # 600*secpera, 1, 1
model = HalfarShallowIceModel(
nphys_dim, init_time, final_time, degree, qoi_functional,
second_order_timestepping=True, nlsparams=nlsparams,
qoi_functional_grad=qoi_functional_grad)
# for nphys_dim=1 [8, 8] will produce error of 2.7 e-5
# but stagnates for a while at around 1e-4 for values
# 5, 6, 7
random_sample = np.array([[0]]).T
config_sample = np.array([[4]*nphys_dim + [4]]).T
sample = np.vstack((random_sample, config_sample))
sol = model.solve(sample)
exact_solution = get_halfar_shallow_ice_exact_solution(
model.Gamma, model.mesh, model.degree, model.nphys_dim)
exact_solution.t = final_time
error = dl.errornorm(exact_solution, sol, mesh=model.mesh)
print('Abs. Error', error)
rel_error = error/dl.sqrt(
dla.assemble(exact_solution**2*dl.dx(degree=5)))
print('Rel. Error', rel_error)
assert rel_error < 1e-3
if not has_dla:
return
# TODO: complete test qoi grad but first add taylor_test
val, grad = model(sample, True)
print(val, grad)
from pyapprox.optimization import check_gradients
from pyapprox.models.wrappers import SingleFidelityWrapper
fun = SingleFidelityWrapper(
partial(model, jac=True), config_sample[:, 0])
x0 = np.atleast_2d(model.Gamma)
errors = check_gradients(fun, True, x0, direction=np.atleast_2d(1))
assert errors.min() < 3e-5 and errors.max() > 1e-1
if __name__ == "__main__":
nonlinear_diffusion_test_suite = \
unittest.TestLoader().loadTestsFromTestCase(
TestNonlinearDiffusion)
unittest.TextTestRunner(verbosity=2).run(nonlinear_diffusion_test_suite)
shallow_ice_test_suite = unittest.TestLoader().loadTestsFromTestCase(
TestShallowIceEquation)
unittest.TextTestRunner(verbosity=2).run(shallow_ice_test_suite)
|
class QuickProtocol:
def __init__(self):
self.functions = dict()
def handle(self, query):
function_name = query["function"]
if function_name not in self.functions:
query.add_error("function {} not found".format(function_name), code=404)
return
func = self.functions[function_name]
func(query)
def __setitem__(self, func_name, func_obj):
self.functions[func_name] = func_obj
def __getitem__(self, func_name):
return self.functions[func_name]
def __delitem__(self, func_name):
del self.functions[func_name]
|
import logging
from pyFeatSel.FeatureSelectors.FeatureSelector import FeatureSelector
from pyFeatSel.misc.Helpers import create_all_combinations
class CompleteFeatureSpace(FeatureSelector):
def run_selecting(self):
column_names = self.train_data.columns.values.tolist()
combns = [comb for comb in create_all_combinations(column_names) if len(comb) > 0]
for comb in combns:
measure = self.inner_run(list(comb))
self.computed_features += [{"measure": measure, "column_names": comb}]
logging.info("Test Measure: {0}, Val Measure: {1}".format(measure["test"], measure["val"]))
if self.best_result is None:
self.best_result = {"measure": measure, "column_names": comb}
elif measure["test"] > self.best_result["measure"]["test"] and self.maximize_measure:
self.best_result = {"measure": measure, "column_names": comb}
elif measure["test"] < self.best_result["measure"]["test"] and not self.maximize_measure:
self.best_result = {"measure": measure, "column_names": comb}
else:
continue
|
#!/bin/python
import os
from os import path
import sys
import argparse
import platform
import subprocess
import logging
import xml.etree.ElementTree as ET
logfile = None
if os.environ.get('VTEST_PY_VENV_PATH'):
logfile = os.environ['VTEST_PY_VENV_PATH'] + '/run_test.log'
else:
logfile = os.path.realpath(
os.environ.get('PWD') +
'/../../build/debug/vrouter/utils/vtest_py_venv/run_test.log')
logging.basicConfig(filename=logfile,
filemode='w',
level=logging.DEBUG,
format='%(asctime)s %(message)s')
def parse(cmd):
if (len(cmd) == 1):
print("Use -h or --help option for usage detail")
return
parser = argparse.ArgumentParser()
parser.add_argument('-vr', '--vrouter', required=False,
help="specify vrouter path")
parser.add_argument('-vt', '--vtest', required=False,
help="specify vtest path")
parser.add_argument('-sp', '--socket', required=False,
help="specify socket path")
parser.add_argument('-v', '--venv', required=False,
help="specify vtest_py venv path")
parser.add_argument('-vt_only', '--vtest_only',
help="run vTest alone", action="store_true")
parser.add_argument('-vr_only', '--vrouter_only',
help="run vRouter alone", action="store_true")
parser.add_argument('-t', '--test', required=False,
help="test a specific file")
parser.add_argument('-gxml', '--xml',
help="tpecify xml file", action="store_true")
parser.add_argument("-a", "--all",
help="run all tests", action="store_true")
parser.add_argument("-p", "--pycodestyle",
help="run pycodestyle check", action="store_true")
parser.add_argument("-f", "--flake",
help="run flake check", action="store_true")
parser.add_argument("-c", "--cli", required=False,
help="run vrouter commands like 'vif --list'"
"'flow -l', etc")
parser.add_argument("-l", "--log_level", required=False,
help="set log level (ERROR/INFO/DEBUG)",
default='INFO')
vrouter_path = os.environ.get('VROUTER_DPDK_PATH')
vtest_path = os.environ.get('VTEST_PATH')
socket_path = os.environ.get('VROUTER_SOCKET_PATH')
vtest_py_venv_path = os.environ.get('VTEST_PY_VENV_PATH')
tests_path = os.environ.get('PWD')
build_path = tests_path + '/../../build'
args = vars(parser.parse_args())
test_opt = args['test']
if args['vrouter'] is None:
if vrouter_path is None:
path_cmd = '{}/debug/vrouter/dpdk/contrail-vrouter-dpdk'.\
format(build_path)
vrouter_path = os.path.realpath(path_cmd)
logging.info("Using default vrouter path - {}".format(vrouter_path))
else:
vrouter_path = args['vrouter']
if not path.exists(vrouter_path):
logging.error("vRouter path not set")
exit(1)
os.environ['VROUTER_DPDK_PATH'] = vrouter_path
if args['vtest'] is None:
if vtest_path is None:
path_cmd = '{}/debug/vrouter/utils/vtest/vtest'.format(
build_path)
vtest_path = os.path.realpath(path_cmd)
logging.info("Using default vtest path - {}".format(vtest_path))
else:
vtest_path = args['vtest']
if not path.exists(vtest_path):
logging.error("vtest path not set")
exit(1)
os.environ['VTEST_PATH'] = vtest_path
if args['socket'] is None:
if socket_path is None:
path_cmd = '{}/debug/vrouter/utils/vtest_py_venv/sock/'.\
format(build_path)
socket_path = os.path.realpath(path_cmd)
logging.info("Using default socket path - {}".format(vtest_path))
else:
socket_path = args['socket']
# VR_UNIX_PATH_MAX is set as 108
if len(socket_path) > (108 - len('dpdk_netlink')):
logging.info("Socket path is too long {}, so setting it to /tmp/sock".
format(socket_path))
if not os.path.exists('/tmp/sock'):
os.makedirs('/tmp/sock')
socket_path = os.path.realpath('/tmp/sock')
if not path.exists(socket_path):
logging.error("socket path not set")
exit(1)
os.environ['VROUTER_SOCKET_PATH'] = socket_path
if args['venv'] is None:
if vtest_py_venv_path is None:
path_cmd = '{}/debug/vrouter/utils/vtest_py_venv'.format(
build_path)
vtest_py_venv_path = os.path.realpath(path_cmd)
logging.info("Using default venv path - {}".format(args['venv']))
else:
vtest_py_venv_path = args['venv']
if not path.exists(vtest_py_venv_path):
logging.error("venv path not set")
exit(1)
os.environ['VTEST_PY_VENV_PATH'] = vtest_py_venv_path
utilily_path = build_path + '/debug/vrouter/utils/'
if args['cli']:
cmd = '{}{} --sock-dir {}'.format(utilily_path, args['cli'],
socket_path)
os.system(cmd)
exit(0)
os.environ['LOG_PATH'] = logfile
if args['log_level'] == 'ERROR':
os.environ['LOG_LEVEL'] = "40"
elif args['log_level'] == 'DEBUG':
os.environ['LOG_LEVEL'] = "10"
else:
# default is info
os.environ['LOG_LEVEL'] = "20"
logging.info("\nRunning tests with following params:")
logging.info("VROUTER_DPDK_PATH: {}".format(vrouter_path))
logging.info("VTEST_PATH: {}".format(vtest_path))
logging.info("VROUTER_SOCKET_PATH: {}".format(socket_path))
logging.info("VTEST_PY_VENV_PATH: {}".format(vtest_py_venv_path))
logging.info("VTEST_ONLY_MODE: {}".format(args['vtest_only']))
logging.info("VROUTER_ONLY_MODE: {}".format(args['vrouter_only']))
logging.info("TEST PARAM: {}".format(test_opt))
logging.info("LOG_PATH: {}".format(logfile))
logging.info("LOG_LEVEL: {}".format(args['log_level']))
if args['vtest_only']:
os.environ["VTEST_ONLY_MODE"] = "1"
if(os.system('pidof contrail-vrouter-dpdk') != 0):
print("Error! You have specified vtest_only, but there is")
print("no vrouter running. Please check!")
return 1
else:
os.environ["VTEST_ONLY_MODE"] = "0"
if args['vrouter_only']:
os.environ["VROUTER_ONLY_MODE"] = "1"
exec_cmd = 'taskset 0x6 {} --no-daemon --no-huge --vr_packet_sz 2048 \
--vr_socket_dir {}'.format(vrouter_path, socket_path)
logging.info("Running cmd {}".format(exec_cmd))
os.execlp("taskset", "taskset", "0x6", vrouter_path,
"--no-daemon", "--no-huge", "--vr_packet_sz",
"2048", "--vr_socket_dir", socket_path)
return 0
else:
os.environ["VROUTER_ONLY_MODE"] = "0"
extension = None
if(test_opt is not None and test_opt.find('.py') != -1):
extension = test_opt.split('.')[1]
if extension is not None:
extension = extension.split('::')[0]
if(extension == 'py'):
file_name = test_opt.split('::')[0]
cmd = "cp {} {}/tests/{}".\
format(file_name, vtest_py_venv_path, file_name)
logging.info("Running cmd {}".format(cmd))
os.system(cmd)
logging.info("Entering venv")
os.chdir(vtest_py_venv_path)
cmd = None
if args['all']:
logging.info("Executing all the tests in ./tests dir ..")
if(args['xml'] is not None):
cmd = 'pytest ./tests --junitxml=result.xml'
else:
cmd = 'pytest ./tests/'
elif args['pycodestyle']:
logging.info("Running pycodestyle check ..")
cmd = "source ./bin/activate; pycodestyle lib/*.py tests/test_*.py;"
cmd_op = os.popen(cmd).read()
if cmd_op:
print(cmd_op)
raise NameError('pycodestyle errors')
exit(0)
elif args['flake']:
logging.info("Running flake check ..")
cmd = 'flake8 lib/*.py tests/test_*.py'
else:
if(test_opt):
logging.info("Executing test file {} ..".format(test_opt))
if(args['xml'] is not None):
cmd = 'pytest -s ./tests/{} --junitxml=result.xml'.format(
test_opt)
else:
cmd = 'pytest -s ./tests/{}'.format(test_opt)
result = run_command(cmd)
logging.info("Exiting venv\n")
print('Logs path : {}'.format(logfile))
if(result != 0):
logging.error("Script execution failed")
exit(1)
def run_command(cmd):
if(cmd is None):
logging.info("No command to run. Exiting!!!")
return 1
logging.info("Running cmd - {}".format(cmd))
try:
cmd = "source ./bin/activate;" + cmd + "; deactivate"
os.system(cmd)
except Exception as e:
logging.info("Running cmd - {} failed".format(cmd))
return 1
else:
return 0
def parse_result():
if not path.exists('result.xml'):
exit(0)
tree = ET.parse('result.xml')
root = tree.getroot()
for child in root:
if child.attrib['failures'] != '0':
print("Script execution failed")
exit(1)
def main():
cmd = sys.argv
parse(cmd)
parse_result()
if __name__ == '__main__':
main()
|
from itertools import product
from random import sample
import math
import numpy as np
from PyR2 import hu
from robotics.pyr2.utils import get_box_body
from PyR2.pr2.pr2Robot import makeRobot
BODY_PLACEMENT_Z_OFFSET = 1e-3
TOP_HOLDING_LEFT_ARM = [0.67717021, -0.34313199, 1.2, -1.46688405, 1.24223229, -1.95442826, 2.22254125]
class ManipulationProblem:
def __init__(self, workspace, initial_conf, initial_poses=[],
movable_names=[], known_poses=[],
goal_conf=None, goal_poses={}):
self.workspace = workspace
self.initial_conf = initial_conf
self.initial_poses = initial_poses
self.movable_names = movable_names
self.goal_conf = goal_conf
self.goal_poses = goal_poses
self.known_poses = known_poses
def dantam2(): # (Incremental Task and Motion Planning: A Constraint-Based Approach)
m, n = 3, 3
#n_obj = 8
n_obj = 2
side_dim = .07 # .05 | .07
height_dim = .1
box_dims = (side_dim, side_dim, height_dim)
separation = (side_dim, side_dim)
workspace = np.array([[-2.0, -1.5, 0.0],
[1.0, 1.5, 2.0]])
robot = makeRobot(workspace, useLeft=True, useRight=False)
base_conf = (-.75, .2, -math.pi/2)
initial_conf = robot.makeConf(*base_conf, g=robot.gripMax, ignoreStandard=True)
initial_conf = initial_conf.set('pr2LeftArm', TOP_HOLDING_LEFT_ARM)
initial_poses = []
goal_poses = {}
length = m*(box_dims[0] + separation[0])
width = n*(box_dims[1] + separation[1])
height = .7
table = get_box_body(length, width, height, name='table', color='brown')
initial_poses.append((table, hu.Pose(0, 0, height/2, 0)))
poses = []
z = height + height_dim/2 + BODY_PLACEMENT_Z_OFFSET
theta = 0
for r in range(m):
row = []
x = -length/2 + (r+.5)*(box_dims[0] + separation[0])
for c in range(n):
y = -width/2 + (c+.5)*(box_dims[1] + separation[1])
row.append(hu.Pose(x, y, z, theta))
poses.append(row)
coordinates = list(product(range(m), range(n)))
assert n_obj <= len(coordinates)
obj_coordinates = sample(coordinates, n_obj)
movable_names = []
for i, (r, c) in enumerate(obj_coordinates):
row_color = np.zeros(4)
row_color[2-r] = 1.
if i == 0:
name = 'goal%d-%d'%(r, c)
color = 'blue'
#color = (0, 0, 255)
goal_poses[name] = poses[m/2][n/2]
else:
name = 'block%d-%d'%(r, c)
color = 'red'
#color = (255, 0, 0)
obj = get_box_body(*box_dims, name=name, color=color)
initial_poses.append((obj, poses[r][c]))
movable_names.append(name)
known_poses = [pose for col in poses for pose in col]
return ManipulationProblem(workspace, initial_conf, initial_poses=initial_poses,
movable_names=movable_names, known_poses=known_poses,
goal_poses=goal_poses, goal_conf=initial_conf) |
import autoslug.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="Email",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"email",
models.EmailField(
unique=True, max_length=254, verbose_name="E-mail"
),
),
],
options={"verbose_name": "Email", "verbose_name_plural": "Emails"},
),
migrations.CreateModel(
name="Institution",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("name", models.CharField(max_length=250, verbose_name="Name")),
(
"slug",
autoslug.fields.AutoSlugField(
editable=False,
populate_from=b"name",
unique=True,
verbose_name="Slug",
),
),
(
"address",
models.EmailField(
help_text="E-mail address used to contact with institutions",
max_length=254,
verbose_name="E-mail",
),
),
],
options={
"verbose_name": "Institution",
"verbose_name_plural": "Institution",
},
),
migrations.CreateModel(
name="Tag",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("name", models.CharField(max_length=15, verbose_name="Name")),
(
"slug",
autoslug.fields.AutoSlugField(
populate_from=b"name", verbose_name="Slug", editable=False
),
),
],
options={"verbose_name": "Tag", "verbose_name_plural": "Tags"},
),
]
|
"""Implementation of the spreadplayers command."""
from mcipc.rcon.be.types import RelativeFloat
from mcipc.rcon.client import Client
__all__ = ['spreadplayers']
# pylint: disable=C0103,R0913
def spreadplayers(self: Client, x: RelativeFloat, y: RelativeFloat,
spread_distance: float, max_range: float,
victim: str) -> str:
"""Spreads players."""
return self.run('spreadplayers', x, y, spread_distance, max_range, victim)
|
import toppra
import pytest
try:
import openravepy as orpy
IMPORT_OPENRAVE = True
except ImportError as err:
IMPORT_OPENRAVE = False
except SyntaxError as err:
IMPORT_OPENRAVE = False
@pytest.fixture(scope="session")
def rave_env():
env = orpy.Environment()
yield env
env.Destroy()
def pytest_addoption(parser):
parser.addoption(
"--loglevel", action="store", default="WARNING",
help="Set toppra loglevel during testing."
)
parser.addoption(
"--robust_regex", action="store", default=".*oa.*",
help="Regex to choose problems to test when running test_robustness_main.py. "
"Select '.*oa.*' to run only tests for hotqpoases."
)
parser.addoption(
"--visualize", action="store_true", default=False,
help="If True visualize test instance."
)
def pytest_collection_modifyitems(config, items):
toppra.setup_logging(config.getoption("--loglevel"))
|
from gpiozero import Motor
from time import sleep
import sys, select, tty, termios
motorA = Motor(17,18)
motorB = Motor(22,23)
def isData():
return select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], [])
old_settings = termios.tcgetattr(sys.stdin)
try:
tty.setcbreak(sys.stdin.fileno())
i = 0
while 1:
if isData():
c = sys.stdin.read(1)
print(c)
if c == '\x1b':
break
elif c == 'w':
motorA.forward()
motorB.forward()
elif c == 's':
motorA.backward()
motorB.backward()
elif c == 'a':
motorA.forward()
motorB.backward()
elif c == 'd':
motorA.backward()
motorB.forward()
elif c == 'q':
motorA.stop()
motorB.stop()
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
|
"""
Testing the brand new datasets from tensorflow community for experimenting on
ImageNet2012 dataset.
We identify several problems while working with ImageNet dataset:
1. The dataset is not easy to download. Credentials (email) of some well known
organization/university is required to get the dowanload link.
2. The huge size if dataset, namely "ILSVRC2012_img_train.tar" -> 138Gb
and "ILSVRC2012_img_val.tar" -> 7Gb
3. Dowanloading and preparing the dataset for some ML algorithm takes a good
chunck of time.
4. No easy way to parallelize the consumption of data across GPU for model
training
--------------------------------------------------------------------------------
In this script, we show that tensorflow dataset library tries to solve most of
the above mentioned problems.
"""
import tensorflow as tf
import tensorflow_datasets as tfds
import numpy as np
import scipy
import scipy.misc
from scipy.misc import imsave
import data.imagenet_simclr.data as imagenet_input
tf.enable_eager_execution()
def main():
print("Demonstration for using Imagenet2012 dataset with tensorflow datset")
# List all the datasets provided in the tensorflow_datasets
# print(tfds.list_builders())
# Step 1: get a dataset builder for the required dataset
# dataset_name = "imagenet2012"
# if dataset_name in tfds.list_builders():
# imagenet_dataset_builder = tfds.builder(dataset_name)
# print("retrived " + dataset_name + " builder")
# else:
# return
imagenet_train, imagenet_eval = [
imagenet_input.ImageNetInput(
is_training=is_training,
data_dir="gs://imagenet_data/train/",
transpose_input=False, #params.transpose_input,
cache=False and is_training, # params.use_cache and is_training,
image_size=224, #params.image_size,
num_parallel_calls=1, # 8, params.num_parallel_calls,
include_background_label=False, #(params.num_label_classes == 1001),
use_bfloat16=False, #use_bfloat16,
)
for is_training in [True, False]
]
# # get all the information regarding dataset
# print(imagenet_dataset_builder.info)
# print("Image shape", imagenet_dataset_builder.info.features['image'].shape)
# print("class",imagenet_dataset_builder.info.features['label'].num_classes)
# print("classname",imagenet_dataset_builder.info.features['label'].names)
# print("NrTrain",imagenet_dataset_builder.info.splits['train'].num_examples)
# print("Val",imagenet_dataset_builder.info.splits['validation'].num_examples)
# # Download and prepare the dataset internally
# # The dataset should be downloaded to ~/tensorflow-datasets/download
# # but for Imagenet case, we need to manually download the dataset and
# # specify the manual_dir where the downloaded files are kept.
# manual_dataset_dir = "gs://serrelab/imagenet2012/"
# # The download_and_prepare function will assume that two files namely
# # ILSVRC2012_img_train.tar and ILSVRC2012_img_val.tar are present in
# # directory manual_dataset_dir + "/manual/imagenet2012"
# imagenet_download_config = tfds.download.DownloadConfig(
# manual_dir = manual_dataset_dir)
# # Conditionally, download config can be passed as second argument.
# imagenet_dataset_builder.download_and_prepare(
# download_dir = manual_dataset_dir)
# # Once this is complete (that just pre-process without downloading anything)
# # it will create a director "~/tensorflow_datasets/imagenet2012/2.0.0"
# # having 1000 train tfrecords and 5 validation tfrecords in addition to some
# # bookkeeping json and label txt files.
# # now, we get the tf.data.Dataset structure which tensorflow data-pipeline
# # understands and process in tf graph.
# imagenet_train = imagenet_dataset_builder.as_dataset(split=tfds.Split.TRAIN)
# assert isinstance(imagenet_train, tf.data.Dataset)
# imagenet_validation = imagenet_dataset_builder.as_dataset(
# split=tfds.Split.VALIDATION)
# assert isinstance(imagenet_validation, tf.data.Dataset)
imagenet_train_dataset = imagenet_train.input_fn({'batch_size':1})
# Now we can peek into the sample images present in the dataset with take
(imagenet_example,) = imagenet_train_dataset.take(1) # returns a dictionary
img, label = imagenet_example["image"], imagenet_example["label"]
# img and label are constant tensors, with numpy field containing numpy arry
print("Image_shape", img.numpy().shape)
# print("Label_shape", label.numpy().shape)
# print out the image file on the disk, and print the corresponding label
imsave("image1.png", img.numpy()[:,:,:3])
imsave("image2.png", img.numpy()[:,:,3:])
# print("label", label.numpy())
# From the tf.data.Datasets imagenet_train and imagenet_validation,
# the input pipeline can be created for tf training and serving.
if __name__ == "__main__":
main() |
import unittest
import datetime
import dwetl
from dwetl.reader.list_reader import ListReader
from dwetl.writer.list_writer import ListWriter
from dwetl.job_info import JobInfo
from dwetl.processor.ezproxy_processor import EzproxyProcessor
from tests import test_logger
import logging
import pdb
import pprint
class TestEzproxyProcessor(unittest.TestCase):
maxDiff= None
@classmethod
def setUpClass(cls):
cls.logger = test_logger.logger
cls.sample_data = [{'em_create_dw_job_exectn_id': 1,
'em_create_dw_job_name': 'CopyStage1ToStage2',
'em_create_dw_job_version_no': '1.0.0',
'em_create_dw_prcsng_cycle_id': 1,
'em_create_tmstmp': datetime.datetime(2020, 5, 13, 15, 40, 10, 575382),
'em_create_user_id': 'thschone',
'in_ezp_sessns_snap_actv_sessns_cnt': 20,
'in_ezp_sessns_snap_tmstmp': '20200509-0000',
'in_ezp_sessns_virtual_hosts_cnt': 2718,
'in_mbr_lbry_cd': 'ub'}]
def test_convert_timestemp(self):
result = EzproxyProcessor.convert_timestamp(self.sample_data[0])
expected_result = datetime.datetime.strptime('20200509-0000', '%Y%m%d-%H%M')
self.assertEqual(expected_result, result)
def test_library_dim_lookup(self):
result = EzproxyProcessor.library_dim_lookup(self.sample_data[0])
expected_result = 10
self.assertEqual(expected_result, result)
def test_clndr_dt_dim_lookup(self):
result = EzproxyProcessor.clndr_dt_dim_lookup(self.sample_data[0])
expected_result = 14740
self.assertEqual(expected_result, result)
def test_transform(self):
result = EzproxyProcessor.transform(self.sample_data[0], self.logger)
expected_keys = sorted([
'em_create_dw_prcsng_cycle_id', 'in_ezp_sessns_snap_tmstmp',
'in_mbr_lbry_cd',
't1_ezp_sessns_snap_actv_sessns_cnt',
't1_ezp_sessns_snap_tmstmp__ezp_sessns_snap_clndr_dt_dim_key', 't1_ezp_sessns_virtual_hosts_cnt',
't1_mbr_lbry_cd__ezp_sessns_snap_mbr_lbry_dim_key', 't2_ezp_sessns_snap_tmstmp__ezp_sessns_snap_tmstmp',
't3_ezp_sessns_snap_tmstmp__ezp_sessns_snap_time_of_day_dim_key'
])
self.assertEqual(expected_keys, sorted(list(result.keys())))
self.assertEqual(20, result['t1_ezp_sessns_snap_actv_sessns_cnt'])
self.assertEqual(14740, result['t1_ezp_sessns_snap_tmstmp__ezp_sessns_snap_clndr_dt_dim_key'])
self.assertEqual(2718, result['t1_ezp_sessns_virtual_hosts_cnt'])
self.assertEqual(10, result['t1_mbr_lbry_cd__ezp_sessns_snap_mbr_lbry_dim_key'])
self.assertEqual(datetime.datetime(2020, 5, 9, 0, 0), result['t2_ezp_sessns_snap_tmstmp__ezp_sessns_snap_tmstmp'])
def test_end_to_end(self):
writer = ListWriter()
error_writer = ListWriter()
job_info = JobInfo(-1, 'test_user', '1', '1')
reader = ListReader(self.sample_data)
ezproxy_processor = EzproxyProcessor(reader, writer, job_info, self.logger, error_writer)
ezproxy_processor.execute()
results = ezproxy_processor.writer.list
expected_keys = sorted([
't1_ezp_sessns_snap_actv_sessns_cnt',
't1_ezp_sessns_snap_tmstmp__ezp_sessns_snap_clndr_dt_dim_key', 't1_ezp_sessns_virtual_hosts_cnt',
't1_mbr_lbry_cd__ezp_sessns_snap_mbr_lbry_dim_key', 't2_ezp_sessns_snap_tmstmp__ezp_sessns_snap_tmstmp',
't3_ezp_sessns_snap_tmstmp__ezp_sessns_snap_time_of_day_dim_key','em_create_dw_prcsng_cycle_id',
'em_update_user_id', 'em_update_dw_prcsng_cycle_id',
'em_update_dw_job_version_no', 'em_update_dw_job_exectn_id',
'em_update_dw_job_name', 'em_update_tmstmp',
'in_ezp_sessns_snap_tmstmp', 'in_mbr_lbry_cd'])
self.assertEqual(expected_keys, sorted(list(results[0].keys())))
|
import asyncio
import binascii
import contextlib
import io
import time
from unittest import TestCase, skipIf
from aioquic import tls
from aioquic.buffer import UINT_VAR_MAX, Buffer, encode_uint_var
from aioquic.quic import events
from aioquic.quic.configuration import QuicConfiguration
from aioquic.quic.connection import (
QuicConnection,
QuicConnectionError,
QuicNetworkPath,
QuicReceiveContext,
)
from aioquic.quic.crypto import CryptoPair
from aioquic.quic.logger import QuicLogger
from aioquic.quic.packet import (
PACKET_TYPE_INITIAL,
QuicErrorCode,
QuicFrameType,
QuicTransportParameters,
encode_quic_retry,
encode_quic_version_negotiation,
push_quic_transport_parameters,
)
from aioquic.quic.packet_builder import QuicDeliveryState, QuicPacketBuilder
from aioquic.quic.recovery import QuicPacketPacer
from .utils import (
SERVER_CACERTFILE,
SERVER_CERTFILE,
SERVER_CERTFILE_WITH_CHAIN,
SERVER_KEYFILE,
SKIP_TESTS,
)
CLIENT_ADDR = ("1.2.3.4", 1234)
SERVER_ADDR = ("2.3.4.5", 4433)
class SessionTicketStore:
def __init__(self):
self.tickets = {}
def add(self, ticket):
self.tickets[ticket.ticket] = ticket
def pop(self, label):
return self.tickets.pop(label, None)
def client_receive_context(client, epoch=tls.Epoch.ONE_RTT):
return QuicReceiveContext(
epoch=epoch,
host_cid=client.host_cid,
network_path=client._network_paths[0],
quic_logger_frames=[],
time=asyncio.get_event_loop().time(),
)
def consume_events(connection):
while True:
event = connection.next_event()
if event is None:
break
def create_standalone_client(self, **client_options):
client = QuicConnection(
configuration=QuicConfiguration(
is_client=True, quic_logger=QuicLogger(), **client_options
)
)
client._ack_delay = 0
# kick-off handshake
client.connect(SERVER_ADDR, now=time.time())
self.assertEqual(drop(client), 1)
return client
@contextlib.contextmanager
def client_and_server(
client_kwargs={},
client_options={},
client_patch=lambda x: None,
handshake=True,
server_kwargs={},
server_certfile=SERVER_CERTFILE,
server_keyfile=SERVER_KEYFILE,
server_options={},
server_patch=lambda x: None,
):
client_configuration = QuicConfiguration(
is_client=True, quic_logger=QuicLogger(), **client_options
)
client_configuration.load_verify_locations(cafile=SERVER_CACERTFILE)
client = QuicConnection(configuration=client_configuration, **client_kwargs)
client._ack_delay = 0
disable_packet_pacing(client)
client_patch(client)
server_configuration = QuicConfiguration(
is_client=False, quic_logger=QuicLogger(), **server_options
)
server_configuration.load_cert_chain(server_certfile, server_keyfile)
server = QuicConnection(
configuration=server_configuration,
original_destination_connection_id=client.original_destination_connection_id,
**server_kwargs
)
server._ack_delay = 0
disable_packet_pacing(server)
server_patch(server)
# perform handshake
if handshake:
client.connect(SERVER_ADDR, now=time.time())
for i in range(3):
roundtrip(client, server)
yield client, server
# close
client.close()
server.close()
def disable_packet_pacing(connection):
class DummyPacketPacer(QuicPacketPacer):
def next_send_time(self, now):
return None
connection._loss._pacer = DummyPacketPacer()
def encode_transport_parameters(parameters: QuicTransportParameters) -> bytes:
buf = Buffer(capacity=512)
push_quic_transport_parameters(buf, parameters)
return buf.data
def sequence_numbers(connection_ids):
return list(map(lambda x: x.sequence_number, connection_ids))
def drop(sender):
"""
Drop datagrams from `sender`.
"""
return len(sender.datagrams_to_send(now=time.time()))
def roundtrip(sender, receiver):
"""
Send datagrams from `sender` to `receiver` and back.
"""
return (transfer(sender, receiver), transfer(receiver, sender))
def transfer(sender, receiver):
"""
Send datagrams from `sender` to `receiver`.
"""
datagrams = 0
from_addr = CLIENT_ADDR if sender._is_client else SERVER_ADDR
for data, addr in sender.datagrams_to_send(now=time.time()):
datagrams += 1
receiver.receive_datagram(data, from_addr, now=time.time())
return datagrams
class QuicConnectionTest(TestCase):
def check_handshake(self, client, server, alpn_protocol=None):
"""
Check handshake completed.
"""
event = client.next_event()
self.assertEqual(type(event), events.ProtocolNegotiated)
self.assertEqual(event.alpn_protocol, alpn_protocol)
event = client.next_event()
self.assertEqual(type(event), events.HandshakeCompleted)
self.assertEqual(event.alpn_protocol, alpn_protocol)
self.assertEqual(event.early_data_accepted, False)
self.assertEqual(event.session_resumed, False)
for i in range(7):
self.assertEqual(type(client.next_event()), events.ConnectionIdIssued)
self.assertIsNone(client.next_event())
event = server.next_event()
self.assertEqual(type(event), events.ProtocolNegotiated)
self.assertEqual(event.alpn_protocol, alpn_protocol)
event = server.next_event()
self.assertEqual(type(event), events.HandshakeCompleted)
self.assertEqual(event.alpn_protocol, alpn_protocol)
for i in range(7):
self.assertEqual(type(server.next_event()), events.ConnectionIdIssued)
self.assertIsNone(server.next_event())
def test_connect(self):
with client_and_server() as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server)
# check each endpoint has available connection IDs for the peer
self.assertEqual(
sequence_numbers(client._peer_cid_available), [1, 2, 3, 4, 5, 6, 7]
)
self.assertEqual(
sequence_numbers(server._peer_cid_available), [1, 2, 3, 4, 5, 6, 7]
)
# client closes the connection
client.close()
self.assertEqual(transfer(client, server), 1)
# check connection closes on the client side
client.handle_timer(client.get_timer())
event = client.next_event()
self.assertEqual(type(event), events.ConnectionTerminated)
self.assertEqual(event.error_code, QuicErrorCode.NO_ERROR)
self.assertEqual(event.frame_type, None)
self.assertEqual(event.reason_phrase, "")
self.assertIsNone(client.next_event())
# check connection closes on the server side
server.handle_timer(server.get_timer())
event = server.next_event()
self.assertEqual(type(event), events.ConnectionTerminated)
self.assertEqual(event.error_code, QuicErrorCode.NO_ERROR)
self.assertEqual(event.frame_type, None)
self.assertEqual(event.reason_phrase, "")
self.assertIsNone(server.next_event())
# check client log
client_log = client.configuration.quic_logger.to_dict()
self.assertGreater(len(client_log["traces"][0]["events"]), 20)
# check server log
server_log = server.configuration.quic_logger.to_dict()
self.assertGreater(len(server_log["traces"][0]["events"]), 20)
def test_connect_with_alpn(self):
with client_and_server(
client_options={"alpn_protocols": ["h3-25", "hq-25"]},
server_options={"alpn_protocols": ["hq-25"]},
) as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server, alpn_protocol="hq-25")
def test_connect_with_secrets_log(self):
client_log_file = io.StringIO()
server_log_file = io.StringIO()
with client_and_server(
client_options={"secrets_log_file": client_log_file},
server_options={"secrets_log_file": server_log_file},
) as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server)
# check secrets were logged
client_log = client_log_file.getvalue()
server_log = server_log_file.getvalue()
self.assertEqual(client_log, server_log)
labels = []
for line in client_log.splitlines():
labels.append(line.split()[0])
self.assertEqual(
labels,
[
"QUIC_SERVER_HANDSHAKE_TRAFFIC_SECRET",
"QUIC_CLIENT_HANDSHAKE_TRAFFIC_SECRET",
"QUIC_SERVER_TRAFFIC_SECRET_0",
"QUIC_CLIENT_TRAFFIC_SECRET_0",
],
)
def test_connect_with_cert_chain(self):
with client_and_server(server_certfile=SERVER_CERTFILE_WITH_CHAIN) as (
client,
server,
):
# check handshake completed
self.check_handshake(client=client, server=server)
def test_connect_with_cipher_suite_aes128(self):
with client_and_server(
client_options={"cipher_suites": [tls.CipherSuite.AES_128_GCM_SHA256]}
) as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server)
# check selected cipher suite
self.assertEqual(
client.tls.key_schedule.cipher_suite, tls.CipherSuite.AES_128_GCM_SHA256
)
self.assertEqual(
server.tls.key_schedule.cipher_suite, tls.CipherSuite.AES_128_GCM_SHA256
)
def test_connect_with_cipher_suite_aes256(self):
with client_and_server(
client_options={"cipher_suites": [tls.CipherSuite.AES_256_GCM_SHA384]}
) as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server)
# check selected cipher suite
self.assertEqual(
client.tls.key_schedule.cipher_suite, tls.CipherSuite.AES_256_GCM_SHA384
)
self.assertEqual(
server.tls.key_schedule.cipher_suite, tls.CipherSuite.AES_256_GCM_SHA384
)
@skipIf("chacha20" in SKIP_TESTS, "Skipping chacha20 tests")
def test_connect_with_cipher_suite_chacha20(self):
with client_and_server(
client_options={"cipher_suites": [tls.CipherSuite.CHACHA20_POLY1305_SHA256]}
) as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server)
# check selected cipher suite
self.assertEqual(
client.tls.key_schedule.cipher_suite,
tls.CipherSuite.CHACHA20_POLY1305_SHA256,
)
self.assertEqual(
server.tls.key_schedule.cipher_suite,
tls.CipherSuite.CHACHA20_POLY1305_SHA256,
)
def test_connect_with_loss_1(self):
"""
Check connection is established even in the client's INITIAL is lost.
"""
def datagram_sizes(items):
return [len(x[0]) for x in items]
client_configuration = QuicConfiguration(is_client=True)
client_configuration.load_verify_locations(cafile=SERVER_CACERTFILE)
client = QuicConnection(configuration=client_configuration)
client._ack_delay = 0
server_configuration = QuicConfiguration(is_client=False)
server_configuration.load_cert_chain(SERVER_CERTFILE, SERVER_KEYFILE)
server = QuicConnection(
configuration=server_configuration,
original_destination_connection_id=client.original_destination_connection_id,
)
server._ack_delay = 0
# client sends INITIAL
now = 0.0
client.connect(SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280])
self.assertEqual(client.get_timer(), 1.0)
# INITIAL is lost
now = 1.0
client.handle_timer(now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280])
self.assertEqual(client.get_timer(), 3.0)
# server receives INITIAL, sends INITIAL + HANDSHAKE
now = 1.1
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280, 1050])
self.assertEqual(server.get_timer(), 2.1)
self.assertEqual(len(server._loss.spaces[0].sent_packets), 1)
self.assertEqual(len(server._loss.spaces[1].sent_packets), 2)
self.assertEqual(type(server.next_event()), events.ProtocolNegotiated)
self.assertIsNone(server.next_event())
# handshake continues normally
now = 1.2
client.receive_datagram(items[0][0], SERVER_ADDR, now=now)
client.receive_datagram(items[1][0], SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [376])
self.assertAlmostEqual(client.get_timer(), 1.825)
self.assertEqual(type(client.next_event()), events.ProtocolNegotiated)
self.assertEqual(type(client.next_event()), events.HandshakeCompleted)
self.assertEqual(type(client.next_event()), events.ConnectionIdIssued)
now = 1.3
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [229])
self.assertAlmostEqual(server.get_timer(), 1.825)
self.assertEqual(len(server._loss.spaces[0].sent_packets), 0)
self.assertEqual(len(server._loss.spaces[1].sent_packets), 0)
self.assertEqual(type(server.next_event()), events.HandshakeCompleted)
self.assertEqual(type(server.next_event()), events.ConnectionIdIssued)
now = 1.4
client.receive_datagram(items[0][0], SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [32])
self.assertAlmostEqual(client.get_timer(), 61.4) # idle timeout
def test_connect_with_loss_2(self):
def datagram_sizes(items):
return [len(x[0]) for x in items]
client_configuration = QuicConfiguration(is_client=True)
client_configuration.load_verify_locations(cafile=SERVER_CACERTFILE)
client = QuicConnection(configuration=client_configuration)
client._ack_delay = 0
server_configuration = QuicConfiguration(is_client=False)
server_configuration.load_cert_chain(SERVER_CERTFILE, SERVER_KEYFILE)
server = QuicConnection(
configuration=server_configuration,
original_destination_connection_id=client.original_destination_connection_id,
)
server._ack_delay = 0
# client sends INITIAL
now = 0.0
client.connect(SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280])
self.assertEqual(client.get_timer(), 1.0)
# server receives INITIAL, sends INITIAL + HANDSHAKE but second datagram is lost
now = 0.1
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280, 1050])
self.assertEqual(server.get_timer(), 1.1)
self.assertEqual(len(server._loss.spaces[0].sent_packets), 1)
self.assertEqual(len(server._loss.spaces[1].sent_packets), 2)
# client only receives first datagram and sends ACKS
now = 0.2
client.receive_datagram(items[0][0], SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [97])
self.assertAlmostEqual(client.get_timer(), 0.625)
self.assertEqual(type(client.next_event()), events.ProtocolNegotiated)
self.assertIsNone(client.next_event())
# client PTO - HANDSHAKE PING
now = client.get_timer() # ~0.625
client.handle_timer(now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [44])
self.assertAlmostEqual(client.get_timer(), 1.875)
# server receives PING, discards INITIAL and sends ACK
now = 0.725
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [48])
self.assertAlmostEqual(server.get_timer(), 1.1)
self.assertEqual(len(server._loss.spaces[0].sent_packets), 0)
self.assertEqual(len(server._loss.spaces[1].sent_packets), 3)
self.assertEqual(type(server.next_event()), events.ProtocolNegotiated)
self.assertIsNone(server.next_event())
# ACKs are lost, server retransmits HANDSHAKE
now = server.get_timer()
server.handle_timer(now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280, 874])
self.assertAlmostEqual(server.get_timer(), 3.1)
self.assertEqual(len(server._loss.spaces[0].sent_packets), 0)
self.assertEqual(len(server._loss.spaces[1].sent_packets), 3)
self.assertIsNone(server.next_event())
# handshake continues normally
now = 1.2
client.receive_datagram(items[0][0], SERVER_ADDR, now=now)
client.receive_datagram(items[1][0], SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [329])
self.assertAlmostEqual(client.get_timer(), 2.45)
self.assertEqual(type(client.next_event()), events.HandshakeCompleted)
self.assertEqual(type(client.next_event()), events.ConnectionIdIssued)
now = 1.3
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [229])
self.assertAlmostEqual(server.get_timer(), 1.925)
self.assertEqual(type(server.next_event()), events.HandshakeCompleted)
self.assertEqual(type(server.next_event()), events.ConnectionIdIssued)
now = 1.4
client.receive_datagram(items[0][0], SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [32])
self.assertAlmostEqual(client.get_timer(), 61.4) # idle timeout
def test_connect_with_loss_3(self):
def datagram_sizes(items):
return [len(x[0]) for x in items]
client_configuration = QuicConfiguration(is_client=True)
client_configuration.load_verify_locations(cafile=SERVER_CACERTFILE)
client = QuicConnection(configuration=client_configuration)
client._ack_delay = 0
server_configuration = QuicConfiguration(is_client=False)
server_configuration.load_cert_chain(SERVER_CERTFILE, SERVER_KEYFILE)
server = QuicConnection(
configuration=server_configuration,
original_destination_connection_id=client.original_destination_connection_id,
)
server._ack_delay = 0
# client sends INITIAL
now = 0.0
client.connect(SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280])
self.assertEqual(client.get_timer(), 1.0)
# server receives INITIAL, sends INITIAL + HANDSHAKE
now = 0.1
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [1280, 1050])
self.assertEqual(server.get_timer(), 1.1)
self.assertEqual(len(server._loss.spaces[0].sent_packets), 1)
self.assertEqual(len(server._loss.spaces[1].sent_packets), 2)
# client receives INITIAL + HANDSHAKE
now = 0.2
client.receive_datagram(items[0][0], SERVER_ADDR, now=now)
client.receive_datagram(items[1][0], SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [376])
self.assertAlmostEqual(client.get_timer(), 0.825)
self.assertEqual(type(client.next_event()), events.ProtocolNegotiated)
self.assertEqual(type(client.next_event()), events.HandshakeCompleted)
self.assertEqual(type(client.next_event()), events.ConnectionIdIssued)
# server completes handshake
now = 0.3
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [229])
self.assertAlmostEqual(server.get_timer(), 0.825)
self.assertEqual(len(server._loss.spaces[0].sent_packets), 0)
self.assertEqual(len(server._loss.spaces[1].sent_packets), 0)
self.assertEqual(type(server.next_event()), events.ProtocolNegotiated)
self.assertEqual(type(server.next_event()), events.HandshakeCompleted)
self.assertEqual(type(server.next_event()), events.ConnectionIdIssued)
# server PTO - 1-RTT PING
now = 0.825
server.handle_timer(now=now)
items = server.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [29])
self.assertAlmostEqual(server.get_timer(), 1.875)
# client receives PING, sends ACK
now = 0.9
client.receive_datagram(items[0][0], SERVER_ADDR, now=now)
items = client.datagrams_to_send(now=now)
self.assertEqual(datagram_sizes(items), [32])
self.assertAlmostEqual(client.get_timer(), 0.825)
# server receives ACK, retransmits HANDSHAKE_DONE
now = 1.0
self.assertFalse(server._handshake_done_pending)
server.receive_datagram(items[0][0], CLIENT_ADDR, now=now)
self.assertTrue(server._handshake_done_pending)
items = server.datagrams_to_send(now=now)
self.assertFalse(server._handshake_done_pending)
self.assertEqual(datagram_sizes(items), [224])
def test_connect_with_quantum_readiness(self):
with client_and_server(client_options={"quantum_readiness_test": True},) as (
client,
server,
):
stream_id = client.get_next_available_stream_id()
client.send_stream_data(stream_id, b"hello")
self.assertEqual(roundtrip(client, server), (1, 1))
received = None
while True:
event = server.next_event()
if isinstance(event, events.StreamDataReceived):
received = event.data
elif event is None:
break
self.assertEqual(received, b"hello")
def test_connect_with_0rtt(self):
client_ticket = None
ticket_store = SessionTicketStore()
def save_session_ticket(ticket):
nonlocal client_ticket
client_ticket = ticket
with client_and_server(
client_kwargs={"session_ticket_handler": save_session_ticket},
server_kwargs={"session_ticket_handler": ticket_store.add},
) as (client, server):
pass
with client_and_server(
client_options={"session_ticket": client_ticket},
server_kwargs={"session_ticket_fetcher": ticket_store.pop},
handshake=False,
) as (client, server):
client.connect(SERVER_ADDR, now=time.time())
stream_id = client.get_next_available_stream_id()
client.send_stream_data(stream_id, b"hello")
self.assertEqual(roundtrip(client, server), (2, 1))
event = server.next_event()
self.assertEqual(type(event), events.ProtocolNegotiated)
event = server.next_event()
self.assertEqual(type(event), events.StreamDataReceived)
self.assertEqual(event.data, b"hello")
def test_connect_with_0rtt_bad_max_early_data(self):
client_ticket = None
ticket_store = SessionTicketStore()
def patch(server):
"""
Patch server's TLS initialization to set an invalid
max_early_data value.
"""
real_initialize = server._initialize
def patched_initialize(peer_cid: bytes):
real_initialize(peer_cid)
server.tls._max_early_data = 12345
server._initialize = patched_initialize
def save_session_ticket(ticket):
nonlocal client_ticket
client_ticket = ticket
with client_and_server(
client_kwargs={"session_ticket_handler": save_session_ticket},
server_kwargs={"session_ticket_handler": ticket_store.add},
server_patch=patch,
) as (client, server):
# check handshake failed
event = client.next_event()
self.assertIsNone(event)
def test_change_connection_id(self):
with client_and_server() as (client, server):
self.assertEqual(
sequence_numbers(client._peer_cid_available), [1, 2, 3, 4, 5, 6, 7]
)
# the client changes connection ID
client.change_connection_id()
self.assertEqual(transfer(client, server), 1)
self.assertEqual(
sequence_numbers(client._peer_cid_available), [2, 3, 4, 5, 6, 7]
)
# the server provides a new connection ID
self.assertEqual(transfer(server, client), 1)
self.assertEqual(
sequence_numbers(client._peer_cid_available), [2, 3, 4, 5, 6, 7, 8]
)
def test_change_connection_id_retransmit_new_connection_id(self):
with client_and_server() as (client, server):
self.assertEqual(
sequence_numbers(client._peer_cid_available), [1, 2, 3, 4, 5, 6, 7]
)
# the client changes connection ID
client.change_connection_id()
self.assertEqual(transfer(client, server), 1)
self.assertEqual(
sequence_numbers(client._peer_cid_available), [2, 3, 4, 5, 6, 7]
)
# the server provides a new connection ID, NEW_CONNECTION_ID is lost
self.assertEqual(drop(server), 1)
self.assertEqual(
sequence_numbers(client._peer_cid_available), [2, 3, 4, 5, 6, 7]
)
# NEW_CONNECTION_ID is retransmitted
server._on_new_connection_id_delivery(
QuicDeliveryState.LOST, server._host_cids[-1]
)
self.assertEqual(transfer(server, client), 1)
self.assertEqual(
sequence_numbers(client._peer_cid_available), [2, 3, 4, 5, 6, 7, 8]
)
def test_change_connection_id_retransmit_retire_connection_id(self):
with client_and_server() as (client, server):
self.assertEqual(
sequence_numbers(client._peer_cid_available), [1, 2, 3, 4, 5, 6, 7]
)
# the client changes connection ID, RETIRE_CONNECTION_ID is lost
client.change_connection_id()
self.assertEqual(drop(client), 1)
self.assertEqual(
sequence_numbers(client._peer_cid_available), [2, 3, 4, 5, 6, 7]
)
# RETIRE_CONNECTION_ID is retransmitted
client._on_retire_connection_id_delivery(QuicDeliveryState.LOST, 0)
self.assertEqual(transfer(client, server), 1)
# the server provides a new connection ID
self.assertEqual(transfer(server, client), 1)
self.assertEqual(
sequence_numbers(client._peer_cid_available), [2, 3, 4, 5, 6, 7, 8]
)
def test_get_next_available_stream_id(self):
with client_and_server() as (client, server):
# client
stream_id = client.get_next_available_stream_id()
self.assertEqual(stream_id, 0)
client.send_stream_data(stream_id, b"hello")
stream_id = client.get_next_available_stream_id()
self.assertEqual(stream_id, 4)
client.send_stream_data(stream_id, b"hello")
stream_id = client.get_next_available_stream_id(is_unidirectional=True)
self.assertEqual(stream_id, 2)
client.send_stream_data(stream_id, b"hello")
stream_id = client.get_next_available_stream_id(is_unidirectional=True)
self.assertEqual(stream_id, 6)
client.send_stream_data(stream_id, b"hello")
# server
stream_id = server.get_next_available_stream_id()
self.assertEqual(stream_id, 1)
server.send_stream_data(stream_id, b"hello")
stream_id = server.get_next_available_stream_id()
self.assertEqual(stream_id, 5)
server.send_stream_data(stream_id, b"hello")
stream_id = server.get_next_available_stream_id(is_unidirectional=True)
self.assertEqual(stream_id, 3)
server.send_stream_data(stream_id, b"hello")
stream_id = server.get_next_available_stream_id(is_unidirectional=True)
self.assertEqual(stream_id, 7)
server.send_stream_data(stream_id, b"hello")
def test_datagram_frame(self):
with client_and_server(
client_options={"max_datagram_frame_size": 65536},
server_options={"max_datagram_frame_size": 65536},
) as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server, alpn_protocol=None)
# send datagram
client.send_datagram_frame(b"hello")
self.assertEqual(transfer(client, server), 1)
event = server.next_event()
self.assertEqual(type(event), events.DatagramFrameReceived)
self.assertEqual(event.data, b"hello")
def test_datagram_frame_2(self):
# payload which exactly fills an entire packet
payload = b"Z" * 1250
with client_and_server(
client_options={"max_datagram_frame_size": 65536},
server_options={"max_datagram_frame_size": 65536},
) as (client, server):
# check handshake completed
self.check_handshake(client=client, server=server, alpn_protocol=None)
# queue 20 datagrams
for i in range(20):
client.send_datagram_frame(payload)
# client can only 11 datagrams are sent due to congestion control
self.assertEqual(transfer(client, server), 11)
for i in range(11):
event = server.next_event()
self.assertEqual(type(event), events.DatagramFrameReceived)
self.assertEqual(event.data, payload)
# server sends ACK
self.assertEqual(transfer(server, client), 1)
# client sends remaining datagrams
self.assertEqual(transfer(client, server), 9)
for i in range(9):
event = server.next_event()
self.assertEqual(type(event), events.DatagramFrameReceived)
self.assertEqual(event.data, payload)
def test_decryption_error(self):
with client_and_server() as (client, server):
# mess with encryption key
server._cryptos[tls.Epoch.ONE_RTT].send.setup(
cipher_suite=tls.CipherSuite.AES_128_GCM_SHA256,
secret=bytes(48),
version=server._version,
)
# server sends close
server.close(error_code=QuicErrorCode.NO_ERROR)
for data, addr in server.datagrams_to_send(now=time.time()):
client.receive_datagram(data, SERVER_ADDR, now=time.time())
def test_tls_error(self):
def patch(client):
real_initialize = client._initialize
def patched_initialize(peer_cid: bytes):
real_initialize(peer_cid)
client.tls._supported_versions = [tls.TLS_VERSION_1_3_DRAFT_28]
client._initialize = patched_initialize
# handshake fails
with client_and_server(client_patch=patch) as (client, server):
timer_at = server.get_timer()
server.handle_timer(timer_at)
event = server.next_event()
self.assertEqual(type(event), events.ConnectionTerminated)
self.assertEqual(event.error_code, 326)
self.assertEqual(event.frame_type, QuicFrameType.CRYPTO)
self.assertEqual(event.reason_phrase, "No supported protocol version")
def test_receive_datagram_garbage(self):
client = create_standalone_client(self)
datagram = binascii.unhexlify("c00000000080")
client.receive_datagram(datagram, SERVER_ADDR, now=time.time())
def test_receive_datagram_reserved_bits_non_zero(self):
client = create_standalone_client(self)
builder = QuicPacketBuilder(
host_cid=client._peer_cid,
is_client=False,
peer_cid=client.host_cid,
version=client._version,
)
crypto = CryptoPair()
crypto.setup_initial(client._peer_cid, is_client=False, version=client._version)
crypto.encrypt_packet_real = crypto.encrypt_packet
def encrypt_packet(plain_header, plain_payload, packet_number):
# mess with reserved bits
plain_header = bytes([plain_header[0] | 0x0C]) + plain_header[1:]
return crypto.encrypt_packet_real(
plain_header, plain_payload, packet_number
)
crypto.encrypt_packet = encrypt_packet
builder.start_packet(PACKET_TYPE_INITIAL, crypto)
buf = builder.start_frame(QuicFrameType.PADDING)
buf.push_bytes(bytes(builder.remaining_flight_space))
for datagram in builder.flush()[0]:
client.receive_datagram(datagram, SERVER_ADDR, now=time.time())
self.assertEqual(drop(client), 1)
self.assertEqual(
client._close_event,
events.ConnectionTerminated(
error_code=QuicErrorCode.PROTOCOL_VIOLATION,
frame_type=None,
reason_phrase="Reserved bits must be zero",
),
)
def test_receive_datagram_wrong_version(self):
client = create_standalone_client(self)
builder = QuicPacketBuilder(
host_cid=client._peer_cid,
is_client=False,
peer_cid=client.host_cid,
version=0xFF000011, # DRAFT_16
)
crypto = CryptoPair()
crypto.setup_initial(client._peer_cid, is_client=False, version=client._version)
builder.start_packet(PACKET_TYPE_INITIAL, crypto)
buf = builder.start_frame(QuicFrameType.PADDING)
buf.push_bytes(bytes(builder.remaining_flight_space))
for datagram in builder.flush()[0]:
client.receive_datagram(datagram, SERVER_ADDR, now=time.time())
self.assertEqual(drop(client), 0)
def test_receive_datagram_retry(self):
client = create_standalone_client(self)
client.receive_datagram(
encode_quic_retry(
version=client._version,
source_cid=binascii.unhexlify("85abb547bf28be97"),
destination_cid=client.host_cid,
original_destination_cid=client._peer_cid,
retry_token=bytes(16),
),
SERVER_ADDR,
now=time.time(),
)
self.assertEqual(drop(client), 1)
def test_receive_datagram_retry_wrong_destination_cid(self):
client = create_standalone_client(self)
client.receive_datagram(
encode_quic_retry(
version=client._version,
source_cid=binascii.unhexlify("85abb547bf28be97"),
destination_cid=binascii.unhexlify("c98343fe8f5f0ff4"),
original_destination_cid=client._peer_cid,
retry_token=bytes(16),
),
SERVER_ADDR,
now=time.time(),
)
self.assertEqual(drop(client), 0)
def test_handle_ack_frame_ecn(self):
client = create_standalone_client(self)
client._handle_ack_frame(
client_receive_context(client),
QuicFrameType.ACK_ECN,
Buffer(data=b"\x00\x02\x00\x00\x00\x00\x00"),
)
def test_handle_connection_close_frame(self):
with client_and_server() as (client, server):
server.close(
error_code=QuicErrorCode.PROTOCOL_VIOLATION,
frame_type=QuicFrameType.ACK,
reason_phrase="illegal ACK frame",
)
self.assertEqual(roundtrip(server, client), (1, 0))
self.assertEqual(
client._close_event,
events.ConnectionTerminated(
error_code=QuicErrorCode.PROTOCOL_VIOLATION,
frame_type=QuicFrameType.ACK,
reason_phrase="illegal ACK frame",
),
)
def test_handle_connection_close_frame_app(self):
with client_and_server() as (client, server):
server.close(error_code=QuicErrorCode.NO_ERROR, reason_phrase="goodbye")
self.assertEqual(roundtrip(server, client), (1, 0))
self.assertEqual(
client._close_event,
events.ConnectionTerminated(
error_code=QuicErrorCode.NO_ERROR,
frame_type=None,
reason_phrase="goodbye",
),
)
def test_handle_connection_close_frame_app_not_utf8(self):
client = create_standalone_client(self)
client._handle_connection_close_frame(
client_receive_context(client),
QuicFrameType.APPLICATION_CLOSE,
Buffer(data=binascii.unhexlify("0008676f6f6462798200")),
)
self.assertEqual(
client._close_event,
events.ConnectionTerminated(
error_code=QuicErrorCode.NO_ERROR, frame_type=None, reason_phrase="",
),
)
def test_handle_crypto_frame_over_largest_offset(self):
with client_and_server() as (client, server):
# client receives offset + length > 2^62 - 1
with self.assertRaises(QuicConnectionError) as cm:
client._handle_crypto_frame(
client_receive_context(client),
QuicFrameType.CRYPTO,
Buffer(data=encode_uint_var(UINT_VAR_MAX) + encode_uint_var(1)),
)
self.assertEqual(
cm.exception.error_code, QuicErrorCode.FRAME_ENCODING_ERROR
)
self.assertEqual(cm.exception.frame_type, QuicFrameType.CRYPTO)
self.assertEqual(
cm.exception.reason_phrase, "offset + length cannot exceed 2^62 - 1"
)
def test_handle_data_blocked_frame(self):
with client_and_server() as (client, server):
# client receives DATA_BLOCKED: 12345
client._handle_data_blocked_frame(
client_receive_context(client),
QuicFrameType.DATA_BLOCKED,
Buffer(data=encode_uint_var(12345)),
)
def test_handle_datagram_frame(self):
client = create_standalone_client(self, max_datagram_frame_size=6)
client._handle_datagram_frame(
client_receive_context(client),
QuicFrameType.DATAGRAM,
Buffer(data=b"hello"),
)
self.assertEqual(
client.next_event(), events.DatagramFrameReceived(data=b"hello")
)
def test_handle_datagram_frame_not_allowed(self):
client = create_standalone_client(self, max_datagram_frame_size=None)
with self.assertRaises(QuicConnectionError) as cm:
client._handle_datagram_frame(
client_receive_context(client),
QuicFrameType.DATAGRAM,
Buffer(data=b"hello"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.DATAGRAM)
self.assertEqual(cm.exception.reason_phrase, "Unexpected DATAGRAM frame")
def test_handle_datagram_frame_too_large(self):
client = create_standalone_client(self, max_datagram_frame_size=5)
with self.assertRaises(QuicConnectionError) as cm:
client._handle_datagram_frame(
client_receive_context(client),
QuicFrameType.DATAGRAM,
Buffer(data=b"hello"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.DATAGRAM)
self.assertEqual(cm.exception.reason_phrase, "Unexpected DATAGRAM frame")
def test_handle_datagram_frame_with_length(self):
client = create_standalone_client(self, max_datagram_frame_size=7)
client._handle_datagram_frame(
client_receive_context(client),
QuicFrameType.DATAGRAM_WITH_LENGTH,
Buffer(data=b"\x05hellojunk"),
)
self.assertEqual(
client.next_event(), events.DatagramFrameReceived(data=b"hello")
)
def test_handle_datagram_frame_with_length_not_allowed(self):
client = create_standalone_client(self, max_datagram_frame_size=None)
with self.assertRaises(QuicConnectionError) as cm:
client._handle_datagram_frame(
client_receive_context(client),
QuicFrameType.DATAGRAM_WITH_LENGTH,
Buffer(data=b"\x05hellojunk"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.DATAGRAM_WITH_LENGTH)
self.assertEqual(cm.exception.reason_phrase, "Unexpected DATAGRAM frame")
def test_handle_datagram_frame_with_length_too_large(self):
client = create_standalone_client(self, max_datagram_frame_size=6)
with self.assertRaises(QuicConnectionError) as cm:
client._handle_datagram_frame(
client_receive_context(client),
QuicFrameType.DATAGRAM_WITH_LENGTH,
Buffer(data=b"\x05hellojunk"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.DATAGRAM_WITH_LENGTH)
self.assertEqual(cm.exception.reason_phrase, "Unexpected DATAGRAM frame")
def test_handle_handshake_done_not_allowed(self):
with client_and_server() as (client, server):
# server receives HANDSHAKE_DONE frame
with self.assertRaises(QuicConnectionError) as cm:
server._handle_handshake_done_frame(
client_receive_context(server),
QuicFrameType.HANDSHAKE_DONE,
Buffer(data=b""),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.HANDSHAKE_DONE)
self.assertEqual(
cm.exception.reason_phrase,
"Clients must not send HANDSHAKE_DONE frames",
)
def test_handle_max_data_frame(self):
with client_and_server() as (client, server):
self.assertEqual(client._remote_max_data, 1048576)
# client receives MAX_DATA raising limit
client._handle_max_data_frame(
client_receive_context(client),
QuicFrameType.MAX_DATA,
Buffer(data=encode_uint_var(1048577)),
)
self.assertEqual(client._remote_max_data, 1048577)
def test_handle_max_stream_data_frame(self):
with client_and_server() as (client, server):
# client creates bidirectional stream 0
stream = client._create_stream(stream_id=0)
self.assertEqual(stream.max_stream_data_remote, 1048576)
# client receives MAX_STREAM_DATA raising limit
client._handle_max_stream_data_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAM_DATA,
Buffer(data=b"\x00" + encode_uint_var(1048577)),
)
self.assertEqual(stream.max_stream_data_remote, 1048577)
# client receives MAX_STREAM_DATA lowering limit
client._handle_max_stream_data_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAM_DATA,
Buffer(data=b"\x00" + encode_uint_var(1048575)),
)
self.assertEqual(stream.max_stream_data_remote, 1048577)
def test_handle_max_stream_data_frame_receive_only(self):
with client_and_server() as (client, server):
# server creates unidirectional stream 3
server.send_stream_data(stream_id=3, data=b"hello")
# client receives MAX_STREAM_DATA: 3, 1
with self.assertRaises(QuicConnectionError) as cm:
client._handle_max_stream_data_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAM_DATA,
Buffer(data=b"\x03\x01"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.STREAM_STATE_ERROR)
self.assertEqual(cm.exception.frame_type, QuicFrameType.MAX_STREAM_DATA)
self.assertEqual(cm.exception.reason_phrase, "Stream is receive-only")
def test_handle_max_streams_bidi_frame(self):
with client_and_server() as (client, server):
self.assertEqual(client._remote_max_streams_bidi, 128)
# client receives MAX_STREAMS_BIDI raising limit
client._handle_max_streams_bidi_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAMS_BIDI,
Buffer(data=encode_uint_var(129)),
)
self.assertEqual(client._remote_max_streams_bidi, 129)
# client receives MAX_STREAMS_BIDI lowering limit
client._handle_max_streams_bidi_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAMS_BIDI,
Buffer(data=encode_uint_var(127)),
)
self.assertEqual(client._remote_max_streams_bidi, 129)
def test_handle_max_streams_uni_frame(self):
with client_and_server() as (client, server):
self.assertEqual(client._remote_max_streams_uni, 128)
# client receives MAX_STREAMS_UNI raising limit
client._handle_max_streams_uni_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAMS_UNI,
Buffer(data=encode_uint_var(129)),
)
self.assertEqual(client._remote_max_streams_uni, 129)
# client receives MAX_STREAMS_UNI raising limit
client._handle_max_streams_uni_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAMS_UNI,
Buffer(data=encode_uint_var(127)),
)
self.assertEqual(client._remote_max_streams_uni, 129)
def test_handle_new_token_frame(self):
with client_and_server() as (client, server):
# client receives NEW_TOKEN
client._handle_new_token_frame(
client_receive_context(client),
QuicFrameType.NEW_TOKEN,
Buffer(data=binascii.unhexlify("080102030405060708")),
)
def test_handle_new_token_frame_from_client(self):
with client_and_server() as (client, server):
# server receives NEW_TOKEN
with self.assertRaises(QuicConnectionError) as cm:
server._handle_new_token_frame(
client_receive_context(client),
QuicFrameType.NEW_TOKEN,
Buffer(data=binascii.unhexlify("080102030405060708")),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.NEW_TOKEN)
self.assertEqual(
cm.exception.reason_phrase, "Clients must not send NEW_TOKEN frames"
)
def test_handle_path_challenge_frame(self):
with client_and_server() as (client, server):
# client changes address and sends some data
client.send_stream_data(0, b"01234567")
for data, addr in client.datagrams_to_send(now=time.time()):
server.receive_datagram(data, ("1.2.3.4", 2345), now=time.time())
# check paths
self.assertEqual(len(server._network_paths), 2)
self.assertEqual(server._network_paths[0].addr, ("1.2.3.4", 2345))
self.assertFalse(server._network_paths[0].is_validated)
self.assertEqual(server._network_paths[1].addr, ("1.2.3.4", 1234))
self.assertTrue(server._network_paths[1].is_validated)
# server sends PATH_CHALLENGE and receives PATH_RESPONSE
for data, addr in server.datagrams_to_send(now=time.time()):
client.receive_datagram(data, SERVER_ADDR, now=time.time())
for data, addr in client.datagrams_to_send(now=time.time()):
server.receive_datagram(data, ("1.2.3.4", 2345), now=time.time())
# check paths
self.assertEqual(server._network_paths[0].addr, ("1.2.3.4", 2345))
self.assertTrue(server._network_paths[0].is_validated)
self.assertEqual(server._network_paths[1].addr, ("1.2.3.4", 1234))
self.assertTrue(server._network_paths[1].is_validated)
def test_handle_path_response_frame_bad(self):
with client_and_server() as (client, server):
# server receives unsollicited PATH_RESPONSE
with self.assertRaises(QuicConnectionError) as cm:
server._handle_path_response_frame(
client_receive_context(client),
QuicFrameType.PATH_RESPONSE,
Buffer(data=b"\x11\x22\x33\x44\x55\x66\x77\x88"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.PATH_RESPONSE)
def test_handle_padding_frame(self):
client = create_standalone_client(self)
# no more padding
buf = Buffer(data=b"")
client._handle_padding_frame(
client_receive_context(client), QuicFrameType.PADDING, buf
)
self.assertEqual(buf.tell(), 0)
# padding until end
buf = Buffer(data=bytes(10))
client._handle_padding_frame(
client_receive_context(client), QuicFrameType.PADDING, buf
)
self.assertEqual(buf.tell(), 10)
# padding then something else
buf = Buffer(data=bytes(10) + b"\x01")
client._handle_padding_frame(
client_receive_context(client), QuicFrameType.PADDING, buf
)
self.assertEqual(buf.tell(), 10)
def test_handle_reset_stream_frame(self):
with client_and_server() as (client, server):
# client creates bidirectional stream 0
client.send_stream_data(stream_id=0, data=b"hello")
consume_events(client)
# client receives RESET_STREAM
client._handle_reset_stream_frame(
client_receive_context(client),
QuicFrameType.RESET_STREAM,
Buffer(data=binascii.unhexlify("000100")),
)
event = client.next_event()
self.assertEqual(type(event), events.StreamReset)
self.assertEqual(event.error_code, QuicErrorCode.INTERNAL_ERROR)
self.assertEqual(event.stream_id, 0)
def test_handle_reset_stream_frame_send_only(self):
with client_and_server() as (client, server):
# client creates unidirectional stream 2
client.send_stream_data(stream_id=2, data=b"hello")
# client receives RESET_STREAM
with self.assertRaises(QuicConnectionError) as cm:
client._handle_reset_stream_frame(
client_receive_context(client),
QuicFrameType.RESET_STREAM,
Buffer(data=binascii.unhexlify("021100")),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.STREAM_STATE_ERROR)
self.assertEqual(cm.exception.frame_type, QuicFrameType.RESET_STREAM)
self.assertEqual(cm.exception.reason_phrase, "Stream is send-only")
def test_handle_retire_connection_id_frame(self):
with client_and_server() as (client, server):
self.assertEqual(
sequence_numbers(client._host_cids), [0, 1, 2, 3, 4, 5, 6, 7]
)
# client receives RETIRE_CONNECTION_ID
client._handle_retire_connection_id_frame(
client_receive_context(client),
QuicFrameType.RETIRE_CONNECTION_ID,
Buffer(data=b"\x02"),
)
self.assertEqual(
sequence_numbers(client._host_cids), [0, 1, 3, 4, 5, 6, 7, 8]
)
def test_handle_retire_connection_id_frame_current_cid(self):
with client_and_server() as (client, server):
self.assertEqual(
sequence_numbers(client._host_cids), [0, 1, 2, 3, 4, 5, 6, 7]
)
# client receives RETIRE_CONNECTION_ID for the current CID
with self.assertRaises(QuicConnectionError) as cm:
client._handle_retire_connection_id_frame(
client_receive_context(client),
QuicFrameType.RETIRE_CONNECTION_ID,
Buffer(data=b"\x00"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(
cm.exception.frame_type, QuicFrameType.RETIRE_CONNECTION_ID
)
self.assertEqual(
cm.exception.reason_phrase, "Cannot retire current connection ID"
)
self.assertEqual(
sequence_numbers(client._host_cids), [0, 1, 2, 3, 4, 5, 6, 7]
)
def test_handle_stop_sending_frame(self):
with client_and_server() as (client, server):
# client creates bidirectional stream 0
client.send_stream_data(stream_id=0, data=b"hello")
# client receives STOP_SENDING
client._handle_stop_sending_frame(
client_receive_context(client),
QuicFrameType.STOP_SENDING,
Buffer(data=b"\x00\x11"),
)
def test_handle_stop_sending_frame_receive_only(self):
with client_and_server() as (client, server):
# server creates unidirectional stream 3
server.send_stream_data(stream_id=3, data=b"hello")
# client receives STOP_SENDING
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stop_sending_frame(
client_receive_context(client),
QuicFrameType.STOP_SENDING,
Buffer(data=b"\x03\x11"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.STREAM_STATE_ERROR)
self.assertEqual(cm.exception.frame_type, QuicFrameType.STOP_SENDING)
self.assertEqual(cm.exception.reason_phrase, "Stream is receive-only")
def test_handle_stream_frame_over_largest_offset(self):
with client_and_server() as (client, server):
# client receives offset + length > 2^62 - 1
frame_type = QuicFrameType.STREAM_BASE | 6
stream_id = 1
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stream_frame(
client_receive_context(client),
frame_type,
Buffer(
data=encode_uint_var(stream_id)
+ encode_uint_var(UINT_VAR_MAX)
+ encode_uint_var(1)
),
)
self.assertEqual(
cm.exception.error_code, QuicErrorCode.FRAME_ENCODING_ERROR
)
self.assertEqual(cm.exception.frame_type, frame_type)
self.assertEqual(
cm.exception.reason_phrase, "offset + length cannot exceed 2^62 - 1"
)
def test_handle_stream_frame_over_max_data(self):
with client_and_server() as (client, server):
# artificially raise received data counter
client._local_max_data_used = client._local_max_data
# client receives STREAM frame
frame_type = QuicFrameType.STREAM_BASE | 4
stream_id = 1
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stream_frame(
client_receive_context(client),
frame_type,
Buffer(data=encode_uint_var(stream_id) + encode_uint_var(1)),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.FLOW_CONTROL_ERROR)
self.assertEqual(cm.exception.frame_type, frame_type)
self.assertEqual(cm.exception.reason_phrase, "Over connection data limit")
def test_handle_stream_frame_over_max_stream_data(self):
with client_and_server() as (client, server):
# client receives STREAM frame
frame_type = QuicFrameType.STREAM_BASE | 4
stream_id = 1
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stream_frame(
client_receive_context(client),
frame_type,
Buffer(
data=encode_uint_var(stream_id)
+ encode_uint_var(client._local_max_stream_data_bidi_remote + 1)
),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.FLOW_CONTROL_ERROR)
self.assertEqual(cm.exception.frame_type, frame_type)
self.assertEqual(cm.exception.reason_phrase, "Over stream data limit")
def test_handle_stream_frame_over_max_streams(self):
with client_and_server() as (client, server):
# client receives STREAM frame
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stream_frame(
client_receive_context(client),
QuicFrameType.STREAM_BASE,
Buffer(
data=encode_uint_var(client._local_max_stream_data_uni * 4 + 3)
),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.STREAM_LIMIT_ERROR)
self.assertEqual(cm.exception.frame_type, QuicFrameType.STREAM_BASE)
self.assertEqual(cm.exception.reason_phrase, "Too many streams open")
def test_handle_stream_frame_send_only(self):
with client_and_server() as (client, server):
# client creates unidirectional stream 2
client.send_stream_data(stream_id=2, data=b"hello")
# client receives STREAM frame
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stream_frame(
client_receive_context(client),
QuicFrameType.STREAM_BASE,
Buffer(data=b"\x02"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.STREAM_STATE_ERROR)
self.assertEqual(cm.exception.frame_type, QuicFrameType.STREAM_BASE)
self.assertEqual(cm.exception.reason_phrase, "Stream is send-only")
def test_handle_stream_frame_wrong_initiator(self):
with client_and_server() as (client, server):
# client receives STREAM frame
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stream_frame(
client_receive_context(client),
QuicFrameType.STREAM_BASE,
Buffer(data=b"\x00"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.STREAM_STATE_ERROR)
self.assertEqual(cm.exception.frame_type, QuicFrameType.STREAM_BASE)
self.assertEqual(cm.exception.reason_phrase, "Wrong stream initiator")
def test_handle_stream_data_blocked_frame(self):
with client_and_server() as (client, server):
# client creates bidirectional stream 0
client.send_stream_data(stream_id=0, data=b"hello")
# client receives STREAM_DATA_BLOCKED
client._handle_stream_data_blocked_frame(
client_receive_context(client),
QuicFrameType.STREAM_DATA_BLOCKED,
Buffer(data=b"\x00\x01"),
)
def test_handle_stream_data_blocked_frame_send_only(self):
with client_and_server() as (client, server):
# client creates unidirectional stream 2
client.send_stream_data(stream_id=2, data=b"hello")
# client receives STREAM_DATA_BLOCKED
with self.assertRaises(QuicConnectionError) as cm:
client._handle_stream_data_blocked_frame(
client_receive_context(client),
QuicFrameType.STREAM_DATA_BLOCKED,
Buffer(data=b"\x02\x01"),
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.STREAM_STATE_ERROR)
self.assertEqual(cm.exception.frame_type, QuicFrameType.STREAM_DATA_BLOCKED)
self.assertEqual(cm.exception.reason_phrase, "Stream is send-only")
def test_handle_streams_blocked_uni_frame(self):
with client_and_server() as (client, server):
# client receives STREAMS_BLOCKED_UNI: 0
client._handle_streams_blocked_frame(
client_receive_context(client),
QuicFrameType.STREAMS_BLOCKED_UNI,
Buffer(data=b"\x00"),
)
def test_parse_transport_parameters(self):
client = create_standalone_client(self)
data = encode_transport_parameters(
QuicTransportParameters(
original_destination_connection_id=client.original_destination_connection_id
)
)
client._parse_transport_parameters(data)
def test_parse_transport_parameters_with_bad_active_connection_id_limit(self):
client = create_standalone_client(self)
for active_connection_id_limit in [0, 1]:
data = encode_transport_parameters(
QuicTransportParameters(
active_connection_id_limit=active_connection_id_limit,
original_destination_connection_id=client.original_destination_connection_id,
)
)
with self.assertRaises(QuicConnectionError) as cm:
client._parse_transport_parameters(data)
self.assertEqual(
cm.exception.error_code, QuicErrorCode.TRANSPORT_PARAMETER_ERROR
)
self.assertEqual(cm.exception.frame_type, QuicFrameType.CRYPTO)
self.assertEqual(
cm.exception.reason_phrase,
"active_connection_id_limit must be no less than 2",
)
def test_parse_transport_parameters_with_server_only_parameter(self):
server_configuration = QuicConfiguration(
is_client=False, quic_logger=QuicLogger()
)
server_configuration.load_cert_chain(SERVER_CERTFILE, SERVER_KEYFILE)
server = QuicConnection(
configuration=server_configuration,
original_destination_connection_id=bytes(8),
)
for active_connection_id_limit in [0, 1]:
data = encode_transport_parameters(
QuicTransportParameters(
active_connection_id_limit=active_connection_id_limit,
original_destination_connection_id=bytes(8),
)
)
with self.assertRaises(QuicConnectionError) as cm:
server._parse_transport_parameters(data)
self.assertEqual(
cm.exception.error_code, QuicErrorCode.TRANSPORT_PARAMETER_ERROR
)
self.assertEqual(cm.exception.frame_type, QuicFrameType.CRYPTO)
self.assertEqual(
cm.exception.reason_phrase,
"original_destination_connection_id is not allowed for clients",
)
def test_payload_received_padding_only(self):
with client_and_server() as (client, server):
# client receives padding only
is_ack_eliciting, is_probing = client._payload_received(
client_receive_context(client), b"\x00" * 1200
)
self.assertFalse(is_ack_eliciting)
self.assertTrue(is_probing)
def test_payload_received_unknown_frame(self):
with client_and_server() as (client, server):
# client receives unknown frame
with self.assertRaises(QuicConnectionError) as cm:
client._payload_received(client_receive_context(client), b"\x1f")
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, 0x1F)
self.assertEqual(cm.exception.reason_phrase, "Unknown frame type")
def test_payload_received_unexpected_frame(self):
with client_and_server() as (client, server):
# client receives CRYPTO frame in 0-RTT
with self.assertRaises(QuicConnectionError) as cm:
client._payload_received(
client_receive_context(client, epoch=tls.Epoch.ZERO_RTT), b"\x06"
)
self.assertEqual(cm.exception.error_code, QuicErrorCode.PROTOCOL_VIOLATION)
self.assertEqual(cm.exception.frame_type, QuicFrameType.CRYPTO)
self.assertEqual(cm.exception.reason_phrase, "Unexpected frame type")
def test_payload_received_malformed_frame(self):
with client_and_server() as (client, server):
# client receives malformed TRANSPORT_CLOSE frame
with self.assertRaises(QuicConnectionError) as cm:
client._payload_received(
client_receive_context(client), b"\x1c\x00\x01"
)
self.assertEqual(
cm.exception.error_code, QuicErrorCode.FRAME_ENCODING_ERROR
)
self.assertEqual(cm.exception.frame_type, 0x1C)
self.assertEqual(cm.exception.reason_phrase, "Failed to parse frame")
def test_send_max_data_blocked_by_cc(self):
with client_and_server() as (client, server):
# check congestion control
self.assertEqual(client._loss.bytes_in_flight, 0)
self.assertEqual(client._loss.congestion_window, 14303)
# artificially raise received data counter
client._local_max_data_used = client._local_max_data
self.assertEqual(server._remote_max_data, 1048576)
# artificially raise bytes in flight
client._loss._cc.bytes_in_flight = 14303
# MAX_DATA is not sent due to congestion control
self.assertEqual(drop(client), 0)
def test_send_max_data_retransmit(self):
with client_and_server() as (client, server):
# artificially raise received data counter
client._local_max_data_used = client._local_max_data
self.assertEqual(server._remote_max_data, 1048576)
# MAX_DATA is sent and lost
self.assertEqual(drop(client), 1)
self.assertEqual(client._local_max_data_sent, 2097152)
self.assertEqual(server._remote_max_data, 1048576)
# MAX_DATA is retransmitted and acked
client._on_max_data_delivery(QuicDeliveryState.LOST)
self.assertEqual(client._local_max_data_sent, 0)
self.assertEqual(roundtrip(client, server), (1, 1))
self.assertEqual(server._remote_max_data, 2097152)
def test_send_max_stream_data_retransmit(self):
with client_and_server() as (client, server):
# client creates bidirectional stream 0
stream = client._create_stream(stream_id=0)
client.send_stream_data(0, b"hello")
self.assertEqual(stream.max_stream_data_local, 1048576)
self.assertEqual(stream.max_stream_data_local_sent, 1048576)
self.assertEqual(roundtrip(client, server), (1, 1))
# server sends data, just before raising MAX_STREAM_DATA
server.send_stream_data(0, b"Z" * 524288) # 1048576 // 2
for i in range(10):
roundtrip(server, client)
self.assertEqual(stream.max_stream_data_local, 1048576)
self.assertEqual(stream.max_stream_data_local_sent, 1048576)
# server sends one more byte
server.send_stream_data(0, b"Z")
self.assertEqual(transfer(server, client), 1)
# MAX_STREAM_DATA is sent and lost
self.assertEqual(drop(client), 1)
self.assertEqual(stream.max_stream_data_local, 2097152)
self.assertEqual(stream.max_stream_data_local_sent, 2097152)
client._on_max_stream_data_delivery(QuicDeliveryState.LOST, stream)
self.assertEqual(stream.max_stream_data_local, 2097152)
self.assertEqual(stream.max_stream_data_local_sent, 0)
# MAX_DATA is retransmitted and acked
self.assertEqual(roundtrip(client, server), (1, 1))
self.assertEqual(stream.max_stream_data_local, 2097152)
self.assertEqual(stream.max_stream_data_local_sent, 2097152)
def test_send_ping(self):
with client_and_server() as (client, server):
consume_events(client)
# client sends ping, server ACKs it
client.send_ping(uid=12345)
self.assertEqual(roundtrip(client, server), (1, 1))
# check event
event = client.next_event()
self.assertEqual(type(event), events.PingAcknowledged)
self.assertEqual(event.uid, 12345)
def test_send_ping_retransmit(self):
with client_and_server() as (client, server):
consume_events(client)
# client sends another ping, PING is lost
client.send_ping(uid=12345)
self.assertEqual(drop(client), 1)
# PING is retransmitted and acked
client._on_ping_delivery(QuicDeliveryState.LOST, (12345,))
self.assertEqual(roundtrip(client, server), (1, 1))
# check event
event = client.next_event()
self.assertEqual(type(event), events.PingAcknowledged)
self.assertEqual(event.uid, 12345)
def test_send_reset_stream(self):
with client_and_server() as (client, server):
# client creates bidirectional stream
client.send_stream_data(0, b"hello")
self.assertEqual(roundtrip(client, server), (1, 1))
# client resets stream
client.reset_stream(0, QuicErrorCode.NO_ERROR)
self.assertEqual(roundtrip(client, server), (1, 1))
def test_send_stream_data_over_max_streams_bidi(self):
with client_and_server() as (client, server):
# create streams
for i in range(128):
stream_id = i * 4
client.send_stream_data(stream_id, b"")
self.assertFalse(client._streams[stream_id].is_blocked)
self.assertEqual(len(client._streams_blocked_bidi), 0)
self.assertEqual(len(client._streams_blocked_uni), 0)
self.assertEqual(roundtrip(client, server), (0, 0))
# create one too many -> STREAMS_BLOCKED
stream_id = 128 * 4
client.send_stream_data(stream_id, b"")
self.assertTrue(client._streams[stream_id].is_blocked)
self.assertEqual(len(client._streams_blocked_bidi), 1)
self.assertEqual(len(client._streams_blocked_uni), 0)
self.assertEqual(roundtrip(client, server), (1, 1))
# peer raises max streams
client._handle_max_streams_bidi_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAMS_BIDI,
Buffer(data=encode_uint_var(129)),
)
self.assertFalse(client._streams[stream_id].is_blocked)
def test_send_stream_data_over_max_streams_uni(self):
with client_and_server() as (client, server):
# create streams
for i in range(128):
stream_id = i * 4 + 2
client.send_stream_data(stream_id, b"")
self.assertFalse(client._streams[stream_id].is_blocked)
self.assertEqual(len(client._streams_blocked_bidi), 0)
self.assertEqual(len(client._streams_blocked_uni), 0)
self.assertEqual(roundtrip(client, server), (0, 0))
# create one too many -> STREAMS_BLOCKED
stream_id = 128 * 4 + 2
client.send_stream_data(stream_id, b"")
self.assertTrue(client._streams[stream_id].is_blocked)
self.assertEqual(len(client._streams_blocked_bidi), 0)
self.assertEqual(len(client._streams_blocked_uni), 1)
self.assertEqual(roundtrip(client, server), (1, 1))
# peer raises max streams
client._handle_max_streams_uni_frame(
client_receive_context(client),
QuicFrameType.MAX_STREAMS_UNI,
Buffer(data=encode_uint_var(129)),
)
self.assertFalse(client._streams[stream_id].is_blocked)
def test_send_stream_data_peer_initiated(self):
with client_and_server() as (client, server):
# server creates bidirectional stream
server.send_stream_data(1, b"hello")
self.assertEqual(roundtrip(server, client), (1, 1))
# server creates unidirectional stream
server.send_stream_data(3, b"hello")
self.assertEqual(roundtrip(server, client), (1, 1))
# client creates bidirectional stream
client.send_stream_data(0, b"hello")
self.assertEqual(roundtrip(client, server), (1, 1))
# client sends data on server-initiated bidirectional stream
client.send_stream_data(1, b"hello")
self.assertEqual(roundtrip(client, server), (1, 1))
# client creates unidirectional stream
client.send_stream_data(2, b"hello")
self.assertEqual(roundtrip(client, server), (1, 1))
# client tries to reset server-initiated unidirectional stream
with self.assertRaises(ValueError) as cm:
client.reset_stream(3, QuicErrorCode.NO_ERROR)
self.assertEqual(
str(cm.exception),
"Cannot send data on peer-initiated unidirectional stream",
)
# client tries to reset unknown server-initiated bidirectional stream
with self.assertRaises(ValueError) as cm:
client.reset_stream(5, QuicErrorCode.NO_ERROR)
self.assertEqual(
str(cm.exception), "Cannot send data on unknown peer-initiated stream"
)
# client tries to send data on server-initiated unidirectional stream
with self.assertRaises(ValueError) as cm:
client.send_stream_data(3, b"hello")
self.assertEqual(
str(cm.exception),
"Cannot send data on peer-initiated unidirectional stream",
)
# client tries to send data on unknown server-initiated bidirectional stream
with self.assertRaises(ValueError) as cm:
client.send_stream_data(5, b"hello")
self.assertEqual(
str(cm.exception), "Cannot send data on unknown peer-initiated stream"
)
def test_stream_direction(self):
with client_and_server() as (client, server):
for off in [0, 4, 8]:
# Client-Initiated, Bidirectional
self.assertTrue(client._stream_can_receive(off))
self.assertTrue(client._stream_can_send(off))
self.assertTrue(server._stream_can_receive(off))
self.assertTrue(server._stream_can_send(off))
# Server-Initiated, Bidirectional
self.assertTrue(client._stream_can_receive(off + 1))
self.assertTrue(client._stream_can_send(off + 1))
self.assertTrue(server._stream_can_receive(off + 1))
self.assertTrue(server._stream_can_send(off + 1))
# Client-Initiated, Unidirectional
self.assertFalse(client._stream_can_receive(off + 2))
self.assertTrue(client._stream_can_send(off + 2))
self.assertTrue(server._stream_can_receive(off + 2))
self.assertFalse(server._stream_can_send(off + 2))
# Server-Initiated, Unidirectional
self.assertTrue(client._stream_can_receive(off + 3))
self.assertFalse(client._stream_can_send(off + 3))
self.assertFalse(server._stream_can_receive(off + 3))
self.assertTrue(server._stream_can_send(off + 3))
def test_version_negotiation_fail(self):
client = create_standalone_client(self)
# no common version, no retry
client.receive_datagram(
encode_quic_version_negotiation(
source_cid=client._peer_cid,
destination_cid=client.host_cid,
supported_versions=[0xFF000011], # DRAFT_16
),
SERVER_ADDR,
now=time.time(),
)
self.assertEqual(drop(client), 0)
event = client.next_event()
self.assertEqual(type(event), events.ConnectionTerminated)
self.assertEqual(event.error_code, QuicErrorCode.INTERNAL_ERROR)
self.assertEqual(event.frame_type, None)
self.assertEqual(
event.reason_phrase, "Could not find a common protocol version"
)
def test_version_negotiation_ok(self):
client = create_standalone_client(self)
# found a common version, retry
client.receive_datagram(
encode_quic_version_negotiation(
source_cid=client._peer_cid,
destination_cid=client.host_cid,
supported_versions=[client._version],
),
SERVER_ADDR,
now=time.time(),
)
self.assertEqual(drop(client), 1)
def test_write_connection_close_early(self):
client = create_standalone_client(self)
builder = QuicPacketBuilder(
host_cid=client.host_cid,
is_client=True,
peer_cid=client._peer_cid,
version=client._version,
)
crypto = CryptoPair()
crypto.setup_initial(client.host_cid, is_client=True, version=client._version)
builder.start_packet(PACKET_TYPE_INITIAL, crypto)
client._write_connection_close_frame(
builder=builder,
epoch=tls.Epoch.INITIAL,
error_code=123,
frame_type=None,
reason_phrase="some reason",
)
self.assertEqual(
builder.quic_logger_frames,
[
{
"error_code": QuicErrorCode.APPLICATION_ERROR,
"error_space": "transport",
"frame_type": "connection_close",
"raw_error_code": QuicErrorCode.APPLICATION_ERROR,
"reason": "",
"trigger_frame_type": QuicFrameType.PADDING,
}
],
)
class QuicNetworkPathTest(TestCase):
def test_can_send(self):
path = QuicNetworkPath(("1.2.3.4", 1234))
self.assertFalse(path.is_validated)
# initially, cannot send any data
self.assertTrue(path.can_send(0))
self.assertFalse(path.can_send(1))
# receive some data
path.bytes_received += 1
self.assertTrue(path.can_send(0))
self.assertTrue(path.can_send(1))
self.assertTrue(path.can_send(2))
self.assertTrue(path.can_send(3))
self.assertFalse(path.can_send(4))
# send some data
path.bytes_sent += 3
self.assertTrue(path.can_send(0))
self.assertFalse(path.can_send(1))
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""=================================================================
@Project : Algorithm_YuweiYin/LeetCode-All-Solution/Python3
@File : LC-1719-Number-Of-Ways-To-Reconstruct-A-Tree.py
@Author : [YuweiYin](https://github.com/YuweiYin)
@Date : 2022-02-16
=================================================================="""
import sys
import time
from typing import List
# import collections
"""
LeetCode - 1719 - (Hard) - Number Of Ways To Reconstruct A Tree
https://leetcode.com/problems/number-of-ways-to-reconstruct-a-tree/
Description & Requirement:
You are given an array pairs, where pairs[i] = [x_i, y_i], and:
There are no duplicates.
x_i < y_i
Let ways be the number of rooted trees that satisfy the following conditions:
The tree consists of nodes whose values appeared in pairs.
A pair [x_i, y_i] exists in pairs if and only if x_i is an ancestor of y_i or y_i is an ancestor of x_i.
Note: the tree does not have to be a binary tree.
Two ways are considered to be different if there is at least one node that has different parents in both ways.
Return:
0 if ways == 0
1 if ways == 1
2 if ways > 1
A rooted tree is a tree that has a single root node,
and all edges are oriented to be outgoing from the root.
An ancestor of a node is any node on the path from the root to that node
(excluding the node itself). The root has no ancestors.
Example 1:
Input: pairs = [[1,2],[2,3]]
Output: 1
Explanation: There is exactly one valid rooted tree, which is shown in the above figure.
Example 2:
Input: pairs = [[1,2],[2,3],[1,3]]
Output: 2
Explanation: There are multiple valid rooted trees.
Example 3:
Input: pairs = [[1,2],[2,3],[2,4],[1,5]]
Output: 0
Explanation: There are no valid rooted trees.
Constraints:
1 <= pairs.length <= 10^5
1 <= x_i < y_i <= 500
The elements in pairs are unique.
"""
class Solution:
def checkWays(self, pairs: List[List[int]]) -> int:
# exception case
assert isinstance(pairs, list) and len(pairs) > 0
for pair in pairs:
assert isinstance(pair, list) and len(pair) == 2
if len(pairs) == 1:
return 2
# main method: (try to reconstruct the tree)
# if it can't be rebuilt, then return 0;
# if built, but exist node that can change position with its ancestor, then return 2; else return 1.
return self._checkWays(pairs)
def _checkWays(self, pairs: List[List[int]]) -> int:
"""
Runtime: 1804 ms, faster than 94.74% of Python3 online submissions for Number Of Ways To Reconstruct A Tree.
Memory Usage: 45.1 MB, less than 81.58% of Python3 online submissions for Number Of Ways To Reconstruct A Tree.
"""
len_pairs = len(pairs)
assert len_pairs >= 2
# convert paris into ancestor links, if two nodes have a link, then one of them is the ancestor of the other
ancestor_dict = dict({})
for pair in pairs:
if pair[0] == pair[1]:
continue
# pair[0] -> pair[1]
if pair[0] not in ancestor_dict:
ancestor_dict[pair[0]] = {pair[1]}
else:
if pair[1] not in ancestor_dict[pair[0]]:
ancestor_dict[pair[0]].add(pair[1])
# pair[1] -> pair[0]
if pair[1] not in ancestor_dict:
ancestor_dict[pair[1]] = {pair[0]}
else:
if pair[0] not in ancestor_dict[pair[1]]:
ancestor_dict[pair[1]].add(pair[0])
# a valid root node must be the ancestor of all others
possible_root = []
for cur_node, ancestor_node_set in ancestor_dict.items():
if len(ancestor_node_set) == len(ancestor_dict) - 1:
possible_root.append(cur_node)
if len(possible_root) <= 0:
return 0 # no valid root
# len(possible_root) may > 1, just choose one of them
root_node = possible_root[0]
res = 1 # default: can rebuild only one tree
for cur_node, ancestor_node_set in ancestor_dict.items():
# the root_node has no ancestor, so skip checking it (have checked root node is the ancestor of all others)
if cur_node == root_node:
continue
# for every rest node, check if it can find a valid parent node, based on len(ancestor_link_set)
# if node_i is the ancestor of node_j, then len(ancestor_dict[node_i]) >= len(ancestor_dict[node_j])
cur_ancestor_node_len = len(ancestor_node_set)
parent_node = -int(1e9+7) # Constraint: 1 <= x_i < y_i <= 500
parent_ancestor_node_len = int(1e9+7) # Constraint: 1 <= pairs.length <= 10^5
# now, find an ancestor to be the parent node of the current node
for ancestor_node in ancestor_node_set:
# "parent" is the lowest ancestor
if cur_ancestor_node_len <= len(ancestor_dict[ancestor_node]) < parent_ancestor_node_len:
parent_node = ancestor_node
parent_ancestor_node_len = len(ancestor_dict[ancestor_node])
# can't find a valid parent, so can't rebuild the tree
if parent_node < 0:
return 0
# now check if ancestor_dict[cur_node] is a subset of ancestor_dict[parent_node]
for ancestor_node in ancestor_node_set:
if ancestor_node != parent_node and ancestor_node not in ancestor_dict[parent_node]:
return 0
# now check if len(ancestor_dict[cur_node]) == len(ancestor_dict[parent_node])
# if so, cur_node and parent_node are exchangeable in the rebuilt tree
if cur_ancestor_node_len == parent_ancestor_node_len:
res = 2 # if the tree can be built at last, then `return res` will be `return 2` rather than 1
return res
def main():
# Example 1: Output: 1
# pairs = [[1, 2], [2, 3]]
# Example 2: Output: 2
# pairs = [[1, 2], [2, 3], [1, 3]]
# Example 3: Output: 0
# pairs = [[1, 2], [2, 3], [2, 4], [1, 5]]
# Example 4: Output: 0
pairs = [[9, 14], [5, 13], [8, 14], [12, 13], [7, 14], [7, 8], [3, 5], [6, 14], [10, 14], [8, 13], [5, 8], [3, 9],
[3, 13], [3, 10], [5, 10], [10, 13], [4, 14], [3, 12], [6, 13], [12, 14], [13, 14], [5, 7], [3, 15],
[11, 14], [14, 15], [2, 3], [3, 8], [9, 15], [2, 14], [3, 14], [1, 14], [1, 3], [2, 11], [3, 6], [1, 2],
[7, 13], [3, 11], [5, 14]]
# init instance
solution = Solution()
# run & time
start = time.process_time()
ans = solution.checkWays(pairs)
end = time.process_time()
# show answer
print('\nAnswer:')
print(ans)
# show time consumption
print('Running Time: %.5f ms' % ((end - start) * 1000))
if __name__ == "__main__":
sys.exit(main())
|
import numpy as np
import scipy.io
import scipy.linalg
import sklearn.metrics
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import accuracy_score
from sklearn.svm import LinearSVC
from scipy.linalg import eig
class JDA:
'''
Implements Joint Distribution Adaptation.
To read more about the JDA, check the following paper:
Long M, Wang J, Ding G, et al.
Transfer feature learning with joint distribution adaptation[C]//
Proceedings of the IEEE international conference on computer vision. 2013: 2200-2207.
The code is modified according to https://github.com/jindongwang/transferlearning/tree/master/code/traditional/JDA
'''
def __init__(self, kernel_type='linear', dim=20, lamb=0.1, gamma=1, iter=10, base_classifer=LinearSVC()):
'''
:param kernel_type: kernel, values: 'primal' | 'linear' | 'rbf' | 'sam'
:param dim: dimension after transfer
:param lamb: lambda value in equation
:param gamma: kernel bandwidth for rbf kernel
:param iter: iterations
'''
self.kernel_type = kernel_type
self.dim = dim
self.lamb = lamb
self.gamma = gamma
self.base_classifer = base_classifer
self.iter = iter
self.X = None
self.V = None
def kernel(self, kernel_type, X1, X2, gamma):
K = None
X1[np.isnan(X1)]=0
if not kernel_type or kernel_type == 'primal':
K = X1
elif kernel_type == 'linear':
if X2 is not None:
X2[np.isnan(X2)]=0
K = sklearn.metrics.pairwise.linear_kernel(np.asarray(X1).T, np.asarray(X2).T)
else:
K = sklearn.metrics.pairwise.linear_kernel(np.asarray(X1).T)
elif kernel_type == 'rbf':
if X2 is not None:
X2[np.isnan(X2)]=0
K = sklearn.metrics.pairwise.rbf_kernel(np.asarray(X1).T, np.asarray(X2).T, gamma)
else:
K = sklearn.metrics.pairwise.rbf_kernel(np.asarray(X1).T, None, gamma)
return K
def fit(self, Xs, Xt, Ys):
'''
get the transform weight matrix and neccessary parameters
:param: Xs source feature, shape:(num_samples,num_features)
:param: Xt target feature, shape:(num_samples,num_features)
:param: Ys source data label
:return: Xs_new and Xt_new after JDA
'''
X = np.hstack((Xs.T, Xt.T))
X = X/np.linalg.norm(X, axis=0)
m, n = X.shape
ns, nt = len(Xs), len(Xt)
e = np.vstack((1 / ns * np.ones((ns, 1)), -1 / nt * np.ones((nt, 1))))
C = len(np.unique(Ys))
H = np.eye(n) - 1 / n * np.ones((n, n))
M = e * e.T * C
Y_tar_pseudo = None
clf = KNeighborsClassifier(n_neighbors=1)
for t in range(self.iter):
print('iteration %d/%d'%(t+1,self.iter))
N = 0
if Y_tar_pseudo is not None and len(Y_tar_pseudo) == nt:
# the source code is 'for c in range(1, C + 1)', but in our case, the true label start from 0
for c in range(C):
e = np.zeros((n, 1))
tt = Ys == c
e[np.where(tt == True)] = 1 / len(Ys[np.where(Ys == c)])
yy = Y_tar_pseudo == c
ind = np.where(yy == True)
inds = [item + ns for item in ind]
e[tuple(inds)] = -1 / len(Y_tar_pseudo[np.where(Y_tar_pseudo == c)])
e[np.isinf(e)] = 0
N = N + np.dot(e, e.T)
M += N
M = M / np.linalg.norm(M, 'fro')
K = self.kernel(self.kernel_type, X, None, gamma=self.gamma)
n_eye = m if self.kernel_type == 'primal' else n
a = np.linalg.multi_dot([K, M, K.T])+self.lamb*np.eye(n_eye)
b = np.linalg.multi_dot([K, H, K.T])
a[np.isnan(a)] = 0.0
b[np.isnan(b)] = 0.0
Kc = np.linalg.inv(a).dot(b)
Kc[np.isnan(Kc)] = 0
w, v= scipy.sparse.linalg.eigs(Kc)
# w, v = eig(a, b)
w = w.astype('float')
ind = np.argsort(w)
A = v[:, ind[:self.dim]].astype('float')
Z = np.dot(A.T, K)
Z /= np.linalg.norm(Z, axis=0)
Z[np.isnan(Z)] = 0
# save neccessary metrics for transforming
self.X = X
self.V = A.T
Xs_new, Xt_new = Z[:, :ns].T, Z[:, ns:].T
clf.fit(Xs_new, Ys.ravel())
Y_tar_pseudo = clf.predict(Xt_new)
return Xs_new, Xt_new
def transform(self, x_test):
'''
transform test data
:param x_test: test data
:return: transformed test data
'''
if self.X is None:
raise ValueError('Please fit on some data first.')
x_test_k = self.kernel(self.kernel_type, self.X, x_test.T, gamma=self.gamma)
x_test_jda = np.dot(self.V, x_test_k)
x_test_jda /= np.linalg.norm(x_test_jda, axis=0)
return x_test_jda.T
def fit_predict(self, Xs, Xt, X_test, Ys, Y_test):
self.fit(Xs, Xt, Ys)
Xs = self.transform(Xs)
self.base_classifer.fit(Xs, Ys)
X_test = self.transform(X_test)
y_pred = self.base_classifer.predict(X_test)
acc = accuracy_score(Y_test, y_pred)
return acc |
#WITH SET
def get_string_permutations(input_string):
if len(input_string) < 1:
raise ValueError('Input cannot be an empty string')
elif len(input_string) == 1:
return set(input_string)
return generate_permutations(get_string_permutations(input_string[:-1]), input_string[-1])
def generate_permutations(old_perm_list, new_char):
new_perm_list = set()
for perm in old_perm_list:
for i in range(len(perm) + 1):
new_perm = perm[:i] + new_char + perm[i:]
new_perm_list.add(new_perm)
return new_perm_list
print get_string_permutations("CAT")
#WIH LIST
# def get_string_permutations(input_string):
# if len(input_string) < 1:
# raise ValueError('Input cannot be an empty string')
# elif len(input_string) == 1:
# return [input_string]
#
# return generate_permutations(get_string_permutations(input_string[:-1]), input_string[-1])
#
# def generate_permutations(old_perm_list, new_char):
# new_perm_list = []
# for perm in old_perm_list:
# for i in range(len(perm) + 1):
# new_perm = perm[:i] + new_char + perm[i:]
# new_perm_list.append(new_perm)
# return new_perm_list
#
#
# print get_string_permutations("CAT") |
# -*- coding: utf-8 -*-
'''
Project Birdseye server source package. Requires gevent greenlet patch.
'''
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_rq2 import RQ
import logging.config
import sys
def _get_version():
try:
import birdseye.version # pragma: no cover
return birdseye.version.ver # pragma: no cover
except:
import setuptools_scm
return setuptools_scm.get_version(root='..', relative_to=__file__)
__version__ = _get_version()
def get_semver():
scm_ver = __version__
return scm_ver[:scm_ver.index('.dev')] if '.dev' in scm_ver else scm_ver
app = Flask(__name__)
app.config.from_object('birdseye.default_settings')
if 'LOGGER' in app.config:
logging.config.dictConfig(app.config['LOGGER'])
else:
handler = logging.StreamHandler(stream=sys.stdout)
app.logger.addHandler(handler)
app.logger.setLevel('INFO')
db = SQLAlchemy(app)
rq = RQ(app)
import birdseye.api # noqa
birdseye.api.noqa()
|
import pathlib
import spacy
from spacy.matcher import Matcher
root_dir = pathlib.Path(__file__).parent.parent
nlp = spacy.load(f"{root_dir}/models/pt_core_news_sm_addresses")
# TODO verificar melhor maneira de identificar abreviações
LOCATION_LABELS = ["STATE", "ZIPCODE"]
# adiciona identificação de ceps
matcher = Matcher(nlp.vocab)
pattern_with_dot = [{"SHAPE": "dd."}, {"SHAPE": "ddd-ddd"}] # 44.050-024
pattern_without_dot = [{"SHAPE": "dddd-ddd"}] # 44050-024
matcher.add("ZIPCODE", [pattern_with_dot, pattern_without_dot])
def get_locations(text):
found = []
doc = nlp(text)
matches = matcher(doc)
for match_id, start, end in matches:
string_id = nlp.vocab.strings[match_id]
span = doc[start:end]
found.append(
{
"type": string_id,
"start": span.start_char,
"end": span.end_char,
}
)
for ent in doc.ents:
if ent.label_ in LOCATION_LABELS:
found.append(
{
"type": ent.label_,
"start": ent.start_char,
"end": ent.end_char,
}
)
return found
|
#!usr/bin/env python3
# -*- coding:utf-8 -*-
__author__ = 'yanqiong'
import base64
import ctypes
import hashlib
import logging
import os
from pathlib import Path
import sys
import uuid
from typing import Optional
from tqsdk.tradeable.otg.base_otg import BaseOtg
from tqsdk.tradeable.mixin import FutureMixin
class TqAccount(BaseOtg, FutureMixin):
"""天勤实盘账户类"""
def __init__(self, broker_id: str, account_id: str, password: str, front_broker: Optional[str] = None,
front_url: Optional[str] = None, td_url: Optional[str] = None, **kwargs) -> None:
"""
创建天勤实盘账户实例
Args:
broker_id (str): 期货公司,支持的期货公司列表 https://www.shinnytech.com/blog/tq-support-broker/
account_id (str): 帐号
password (str): 密码
td_url(str): [可选]用于指定账户连接的交易服务器地址, eg: "tcp://1.2.3.4:1234/"
"""
if bool(front_broker) != bool(front_url):
raise Exception("front_broker 和 front_url 参数需同时填写")
self._front_broker = front_broker
self._front_url = front_url
self._app_id = "SHINNY_TQ_1.0"
account_type = kwargs["account_type"] if "account_type" in kwargs else "FUTURE"
if account_type == "SPOT":
raise Exception("account_type 账户类型指定错误,目前只支持 FUTURE")
kwargs.pop("account_type", None)
if len(kwargs) > 0:
raise TypeError(f"不支持以下参数 {[kwargs.keys()]}")
super(TqAccount, self).__init__(broker_id, account_id, password, td_url)
def _get_account_key(self):
s = self._broker_id + self._account_id
s += self._front_broker if self._front_broker else ""
s += self._front_url if self._front_url else ""
s += self._td_url if self._td_url else ""
return hashlib.md5(s.encode('utf-8')).hexdigest()
@property
def _account_info(self):
info = super(TqAccount, self)._account_info
info.update({
"account_type": self._account_type
})
return info
def _get_system_info(self):
try:
l = ctypes.c_int(344)
buf = ctypes.create_string_buffer(l.value)
path = Path(__file__, '../../../ctpse')
lib_path = path.resolve() # Make the path absolute, resolving any symlinks. A new path object is returned
if sys.platform.startswith("win") or sys.platform.startswith("linux"):
if sys.platform.startswith("win"):
if ctypes.sizeof(ctypes.c_voidp) == 4:
selib = ctypes.cdll.LoadLibrary(os.path.join(lib_path, "WinDataCollect32.dll"))
ret = getattr(selib, "?CTP_GetSystemInfo@@YAHPADAAH@Z")(buf, ctypes.byref(l))
else:
selib = ctypes.cdll.LoadLibrary(os.path.join(lib_path, "WinDataCollect64.dll"))
ret = getattr(selib, "?CTP_GetSystemInfo@@YAHPEADAEAH@Z")(buf, ctypes.byref(l))
else:
selib = ctypes.cdll.LoadLibrary(os.path.join(lib_path, "LinuxDataCollect64.so"))
ret = selib._Z17CTP_GetSystemInfoPcRi(buf, ctypes.byref(l))
if ret == 0:
return base64.b64encode(buf.raw[:l.value]).decode("utf-8")
else:
raise Exception("错误码: %d" % ret)
else:
logging.getLogger("TqApi.TqAccount").debug("ctpse error", error="不支持该平台")
except Exception as e:
self._api._print(f"采集穿透式监管客户端信息失败: {e}", level="ERROR")
logging.getLogger("TqApi.TqAccount").error("ctpse error", error=e)
return ""
async def _send_login_pack(self):
req = {
"aid": "req_login",
"bid": self._broker_id,
"user_name": self._account_id,
"password": self._password,
}
mac = f"{uuid.getnode():012X}"
req["client_mac_address"] = "-".join([mac[e:e + 2] for e in range(0, 11, 2)])
system_info = self._get_system_info()
if system_info:
req["client_app_id"] = self._app_id
req["client_system_info"] = system_info
if self._front_broker:
req["broker_id"] = self._front_broker
req["front"] = self._front_url
await self._td_send_chan.send(req)
await self._td_send_chan.send({
"aid": "confirm_settlement"
}) # 自动发送确认结算单
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import sys
from setuptools import setup
with open('mapmaker/__init__.py') as f:
for line in f:
if line.startswith('__version__'):
VERSION = line.split('\'')[1]
break
with open('requirements.txt') as f:
required = f.readlines()
with open('README.rst') as f:
long_description = f.read()
setup(
name='mapmaker',
version=VERSION,
author='Alexander Keil',
author_email='[email protected]',
maintainer='Alexander Keil',
url='http://github.com/akeil/mapmaker',
project_urls={
'Source': 'http://github.com/akeil/mapmaker',
'Bug Reports': 'http://github.com/akeil/mapmaker/issues',
},
description='Create map images from slippy map tiles.',
long_description=long_description,
long_description_content_type='text/x-rst',
install_requires=required,
entry_points = {
'console_scripts': [
'mapmaker = mapmaker.main:main',
]
},
license='MIT',
license_file = 'LICENSE.txt',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: GIS',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
keywords = 'osm, openstreetmap, tiles, map, image, cli',
python_requires='>=3'
)
|
'''
#装饰器
视图函数应用验证装饰器:
1.在视图函数之前@ValidateDecorator()
2.定义_validate_function(request,context)
3.在_validate_function中验证各个字段
@ValidateDecorator()
def function(request):
id = request.POST['id']
return render_to_response('')
#验证方法以 _validate_ 做前缀
def _validate_function(request, context):
if not context.validate_numeric('id'):
context.add_error('非法ID!')
'''
'''
ValidateDecorator定义
为装饰了validate的视图函数找到_validate_function和验证上下文对象 context
'''
class ValidateDecorator (object):
def __init__(self, **kwargs):
self.args = kwargs
pass
def __call__(self, view_func):
def delegate(request, *args, **kwargs):
try:
context, result = self.__validate(view_func, request, args, kwargs)
if isinstance(result, HttpResponse):
return result
except Http404:
return HttpResponseNotFound()
if not context.has_error():
try:
return view_func(request, *args, **kwargs)
except Exception, e:
logging.exception(e)
raise e
else:
result = dict(actionErrors = context.get_errors(), fieldErrors = context.get_fielderrors())
return render_json(result, False)
return delegate
def __validate(self, view_func, request, args, kwargs):
validate_func = self.__lookup_validator(view_func)
context = ValidatorContext(request)
result = None
if validate_func != None:
result = validate_func(request, context, *args, **kwargs)
return context, result
def __lookup_validator(self, view_func):
logging.info('#########')
logging.info(sys.modules[view_func.__module__])
if self.args.has_key('validator'):
return self.args['validator']
else:
mod = sys.modules[view_func.__module__]
if hasattr(mod, '_validate_%s' % view_func.__name__):
return getattr(mod, '_validate_%s' % view_func.__name__)
else:
return None
'''
验证上下文定义
'''
class ValidatorContext(object):
def __init__(self, request):
self.request = request
if 'GET' == request.META.get('REQUEST_METHOD'):
params = request.GET
elif 'POST' == request.META.get('REQUEST_METHOD'):
params = request.POST
from common.web.http import query2dict
self.query = query2dict(params)
self._field_errors = {}
self._errors = []
def add_error(self, msg):
self._errors.append(msg)
def add_fielderror(self, field, msg):
stack = []
if self._field_errors.has_key(field):
stack = self._field_errors[field]
stack.append(msg)
self._field_errors[field] = stack
def has_error(self):
return len(self._errors) > 0 or len(self._field_errors.keys()) > 0
def get_errors(self):
return self._errors
def clear_errors(self):
self._errors = []
def get_fielderrors(self):
return self._field_errors
def clear_fielderrors(self):
self._field_errors = {}
@_validate_dec
def validate_presented(self, field, msg = None):
"""
校验字段不为空
"""
value = self._parse_value(field)
if value == None:
return False
if type(value) in (str, unicode):
return value.strip() != ''
if type(value) in (list, tuple):
return len(value) > 0
@_validate_dec
def validate_dev(self, field, msg = None):
"""
校验中英文 -_() 数字 长度不超过100
"""
value = self._parse_value(field)
if not value:
return False
return re.match(u'[\w\u4e00-\u9fa5\s\-\(\)\.\,]{1,100}', value)
@_validate_dec
def validate_dev_dec(self, field, msg = None):
"""
校验中英文 -_() 数字 长度不超过100
"""
value = self._parse_value(field)
if not value:
return False
return re.match(u'[\w\u4e00-\u9fa5\s\-\(\)\.\,]{0,500}', value)
@_validate_dec
def validate_fieldequals(self, field, field2, msg = None):
"""
校验两个字段相等
"""
return self._parse_value(field) != self._parse_value(field2)
@_validate_dec
def validate_equals(self, field, value, msg = None):
"""
校验field的值是否等于value
"""
fieldvalue = self._parse_value(field)
return fieldvalue != str(value)
@_validate_dec
def validate_format(self, field, pattern, not_match = False, msg = None):
"""
验证是否符合表达式'pattern'
"""
value = self._parse_value(field)
matched = re.match(pattern, value)
return not_match and not matched or matched != None
@_validate_dec
def validate_numeric(self, field, msg = None):
"""
验证是否为整数
"""
value = self._parse_value(field)
if not value:
return False
return re.match(r'^[0-9]+$', value)
@_validate_dec
def validate_range(self, field, min = None, max = None, msg = None):
"""
验证是否在数值范围之内
"""
if min == None and max == None:
assert False, 'min和max必须输入一项'
value = self._parse_value(field)
flag = True
if type(min) == float or type(max) == float:
value = float(value)
elif type(min) == int or type(max) == int:
value = int(value)
if max == None:
flag = value > min
elif min == None:
flag = max > value
else:
flag = min < value < max
return flag
@_validate_dec
def validate_ipv4(self, field, msg = None):
"""
验证IPv4地址格式
"""
value = self._parse_value(field)
return is_ipv4(value)
@_validate_dec
def validate_subnet(self, field, msg = None):
"""
验证子网格式
"""
value = self._parse_value(field)
return is_subnet(value)
@_validate_dec
def validate_ipsegment(self, field, msg = None):
"""
验证IP段格式
"""
value = self._parse_value(field)
return is_ipsegment(value)
@_validate_dec
def validate_iprange(self, field, msg = None):
"""
验证是否符合IPv4地址格式 或 子网格式 或 IP段格式
"""
value = self._parse_value(field)
return is_ipv4(value) or is_subnet(value) or is_ipsegment(value)
@_validate_dec
def validate_ipranges(self, field, separator_pattern = r"\n", msg = None):
"""
验证是否符合IPv4地址格式 或 子网格式 或 IP段格式
"""
values = self._parse_value(field)
for ip_range in re.split(separator_pattern, values):
if not (is_ipv4(value) or is_subnet(value) or is_ipsegment(value)):
return False
else:
return True
@_validate_dec
def validate_ga(self,field,ip,mask, msg = None):
"""
验证网关地址格式
"""
value = self._parse_value(field)
ip = self._parse_value(ip)
mask = self._parse_value(mask)
in_ip = ip2int(ip)
in_mask = ip2int(mask)
ipra = in_ip & in_mask
ra = ipmasktseg(int2ip(ipra),mask)
i = ip2int(value)
if i<(ra[1]-1) and i>=ra[0]:
return True
else:
return False
@_validate_dec
def validate_portnum(self, field, msg = None):
"""
验证端口号
"""
value = self._parse_value(field)
return self.validate_numeric(field) and (0 <= int(value) < 65536)
@_validate_dec
def validate_mask(self, field, msg = None):
value = self._parse_value(field)
return is_ipmask(value)
@_validate_dec
def validate_strlen(self, field, min, max, msg = None):
"""
验证字符串长度
"""
if min == None and max == None:
assert False, 'min和max必须输入一项'
value = self._parse_value(field)
if value == None:
return False
strlen = len(value)
if max == None:
flag = max >= strlen
elif min == None:
flag = strlen <= min
else:
flag = min <= strlen <= max
return flag
@_validate_dec
def validate_email(self, field, msg = None):
"""
验证邮件地址
"""
pattern = r'^[^@]+@([-\w]+\.)+[A-Za-z]{2,4}$'
value = self._parse_value(field)
return value != None and re.match(pattern, value)
|
from django.core.exceptions import ImproperlyConfigured
import random
from .arm_wells_support.models import Story
from ._utils import add_n_random_stories_to_well
from ._utils import generate_random_story
from ._utils import generate_random_well
from ._utils import TestCase
from ..views import SimpleWellView
from ..views import QuerySetBackedWellView
class WellViewTestCase(TestCase):
def setUp(self):
super(WellViewTestCase, self).setUp()
self.well = generate_random_well()
class SimpleWellViewTest(WellViewTestCase):
view_class = SimpleWellView
def default_kwargs(self):
return {
"template_name": "index.html",
"well_title": self.well.title,
}
def test_get_well_returns_false_when_allow_empty_is_true(self):
view = self.view_class(well_title="unknown", allow_empty=True)
self.assertFalse(view.get_well())
def test_raises_exception_without_template_name_param(self):
kwargs = self.default_kwargs()
del kwargs["template_name"]
view = self.view_class.as_view(**kwargs)
with self.assertRaises(ImproperlyConfigured) as context_manager:
view(self.factory.get("/"))
self.assertEqual(
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of "
"'get_template_names()'",
context_manager.exception.message)
def test_does_not_raise_if_template_name_is_present(self):
view = self.view_class.as_view(**self.default_kwargs())
view(self.factory.get("/"))
def test_raises_exception_on_no_well_in_params(self):
kwargs = self.default_kwargs()
del kwargs["well_title"]
view = self.view_class.as_view(**kwargs)
self.assertRaises(ImproperlyConfigured, view, self.factory.get("/"))
def test_passes_a_well_to_the_render(self):
view = self.view_class.as_view(**self.default_kwargs())
result = view(self.factory.get("/"))
self.assertInContext('well', self.well, result)
class QuerySetBackedWellViewTest(SimpleWellViewTest):
view_class = QuerySetBackedWellView
def setUp(self):
super(QuerySetBackedWellViewTest, self).setUp()
self.number_in_well = random.randint(1, 5)
add_n_random_stories_to_well(self.number_in_well, self.well)
def default_kwargs(self):
queryset = Story.objects.all()
kwargs = super(QuerySetBackedWellViewTest, self).default_kwargs()
kwargs['queryset'] = queryset
return kwargs
def test_raises_exception_if_no_queryset_provided(self):
kwargs = self.default_kwargs()
del kwargs["queryset"]
view = self.view_class(**kwargs)
with self.assertRaises(ImproperlyConfigured) as context_manager:
view.get_queryset()
expected = u"'%s' must define 'queryset' or 'model'" % (
self.view_class.__name__)
self.assertEqual(expected, context_manager.exception.message)
def test_get_queryset_returns_well_and_backed_queryset(self):
number_of_stories = random.randint(1, 5)
for i in range(number_of_stories):
generate_random_story()
view = self.view_class(**self.default_kwargs())
queryset = view.get_queryset()
expected = number_of_stories + self.number_in_well
self.assertEqual(expected, len(queryset))
def test_get_queryset_returns_raw_queryset_if_there_is_an_empty_well(self):
kwargs = self.default_kwargs()
kwargs.update({
"allow_empty": True,
"well_title": "Unknown and Unknowable",
})
view = self.view_class(**kwargs)
stories = Story.objects.all()
queryset = view.get_queryset()
self.assertEqual(len(stories), len(queryset))
for story in stories:
self.assert_(story in queryset)
def test_passes_a_well_to_the_render(self):
view = self.view_class.as_view(**self.default_kwargs())
result = view(self.factory.get("/"))
self.assertInContext('well', self.well, result)
def test_passes_object_list_to_the_render(self):
kwargs = self.default_kwargs()
view = self.view_class.as_view(**kwargs)
result = view(self.factory.get("/"))
self.assertIn('object_list', result.context_data)
self.assertEqual(
list(result.context_data['object_list']),
list(self.well.items))
|
from gamepack.equipment.Equipment import Equipment
from gamepack.inventory.Inventory import Inventory
from gamepack.moneypouch.MoneyPouch import MoneyPouch
class Player(object):
def __init__(self, name = ''):
self.name = name
self.health = 100
self.defense = 1
self.equipment = Equipment()
self.inventory = Inventory()
self.moneypouch = MoneyPouch()
def get_equipment_stats(self):
return str(self.equipment)
def get_inventory(self):
return str(self.inventory)
def check_moneypouch(self):
return str(self.moneypouch)
def __str__(self):
return '%s [%i / 100]' % (self.name, self.health) |
"""
A validator for a frontend failure model. The model contains all
the failing web frontends and their status, as well as the virtual
machines they run on.
"""
from vuluptuous import Schema
schema = Schema({
'web_frontends_failures'
})
|
class Question:
'This container Class stores information about a single question'
def __init__(self, dicVal):
self.answer_time = dicVal['answer_time']
self.cat_name = dicVal['cat_name']
self.question_id = dicVal['q_id']
self.timestamp = dicVal['timestamp']
self.cat_id = dicVal['cat_id']
self.question = dicVal['question']
self.wrong_1 = dicVal['wrong1']
self.wrong_2 = dicVal['wrong2']
self.wrong_3 = dicVal['wrong3']
self.correct = dicVal['correct']
self.wrong_1_p = dicVal['stats']['wrong1_answer_percent']
self.wrong_2_p = dicVal['stats']['wrong2_answer_percent']
self.wrong_3_p = dicVal['stats']['wrong1_answer_percent']
self.correct_p = dicVal['stats']['correct_answer_percent']
def dumpToConsole(self):
print "Dumping Question to Console:"
print "answer_time: " + str(self.answer_time)
print "cat_name : " + str(self.cat_name)
print "question_id: " + str(self.question_id)
print "timestamp : " + str(self.timestamp)
print "cat_id : " + str(self.cat_id)
print "question : " + str(self.question)
print "wrong_1 : " + str(self.wrong_1)
print "wrong_1_p : " + str(self.wrong_1_p)
print "wrong_2 : " + str(self.wrong_2)
print "wrong_2_p : " + str(self.wrong_2_p)
print "wrong_3 : " + str(self.wrong_3)
print "wrong_3_p : " + str(self.wrong_3_p)
print "correct : " + str(self.correct)
print "correct_1_p: " + str(self.correct_1_p)
def printDetails(self):
print " " + self.cat_name.encode('utf-8') + ": " + self.question.encode('utf-8')
print " Answers:"
print " [1]: " + self.wrong_1.encode('utf-8') + " ( " + str(self.wrong_1_p) + " )"
print " [2]: " + self.wrong_2.encode('utf-8') + " ( " + str(self.wrong_2_p) + " )"
print " [3]: " + self.wrong_3.encode('utf-8') + " ( " + str(self.wrong_3_p) + " )"
print " [4]: " + self.correct.encode('utf-8') + " ( " + str(self.correct_p) + " )"
|
import homeassistant.helpers.config_validation as cv
from homeassistant.const import CONF_ENTITY_ID
import voluptuous as vol
DOMAIN = "setter"
SERVICE_DELETE = "delete"
SERVICE_DELETE_SCHEMA = vol.Schema({vol.Required(CONF_ENTITY_ID): cv.entity_id})
SERVICE_SET = "set"
SERVICE_SET_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required("state"): str,
vol.Required("attributes"): dict,
}
)
|
"""
pipeline
~~~~~~~~
This module defines pipeline and combining transformations,
which combine multiple ciphers together to form a complex cipher.
The encryption and decryption algorithms are pipelined to produce
the overall encryption and decryption effect.
- Classes
- Pipeline: Creates a pipeline which combines ciphers in a vertical fashion, where the ciphertext of one
cipher is the plaintext for the next.
- HorizontalPipeline: Creates a pipeline which combines ciphers in a horizontal fashion, where separate
ciphers are applied over same sized chunks from the original plaintext and the results are combined.
Author: Kinshuk Vasisht
Dated : 11/03/2022
"""
import enum
from ... import utils
class Order(enum.Enum):
""" Order of decryption to follow in the pipeline. """
# Natural order: For ciphers C1 and C2 decryption is done using C2 then C1
NATURAL = 1
# Original order: For ciphers C1 and C2 decryption is done using C1 then C2
ORIGINAL = 2
class Pipeline:
"""
Defines a pipeline transformation for ciphers.
Given a set of cipher objects, this transformation creates a new cipher
whose encryption and decryption operations is equivalent to the respective
encryption and decryption operations of the given ciphers in sequence.
Example usage:
>>> from cipher.components.transforms import Pipeline
>>> from cipher.des import DES
...
>>> des_1 = DES(key = "<some key K1>")
>>> des_2 = DES(key = "<some key K2>")
>>> triple_des = Pipeline([ des_1, des_2, des_1 ])
...
>>> p = "<some 64-bit plaintext>"
# Equivalent to des_1.encrypt(des_2.encrypt(des_1.encrypt(p)))
>>> triple_des.encrypt(p)
"""
def __init__(self, ciphers, order = Order.NATURAL):
""" Creates a new Pipeline cipher instance.
Args:
ciphers (list): List of ciphers or cipher components to pipeline.
order (Order, optional): The order of cipher execution to follow during decryption.
Defaults to Order.NATURAL.
"""
self.ciphers = ciphers
self.process_order = order
def encrypt(self, plaintext: "int | str | bytes | list | tuple"):
""" Encrypts a given plaintext using the pipelined ciphers.
Args:
plaintext (int | str | bytes | list | tuple): The plaintext to encrypt.
Returns:
int | list | bytes: The encrypted ciphertext after the last stage of the pipeline.
"""
ciphertext = plaintext
for cipher in self.ciphers:
ciphertext = cipher.encrypt(ciphertext)
return ciphertext
def decrypt(self, ciphertext: "int | str | bytes | list | tuple"):
""" Decrypts a given ciphertext using the pipelined ciphers used to encrypt.
Args:
ciphertext (int | str | bytes | list | tuple): The ciphertext to decrypt.
Returns:
int | bytes | list: The decrypted plaintext after the last stage of the pipeline.
"""
plaintext = ciphertext
if self.process_order == Order.ORIGINAL:
for cipher in self.ciphers:
plaintext = cipher.decrypt(plaintext)
else:
for cipher in self.ciphers[::-1]:
plaintext = cipher.decrypt(plaintext)
return plaintext
class HorizontalPipeline:
"""
Defines a horizontal pipeline transformation for ciphers.
Given a set of cipher objects, this transformation creates a new cipher
whose encryption and decryption operations is equivalent to the respective
encryption and decryption operations of the given ciphers over equal-sized
sub-blocks of the data object.
Example Usage:
>>> from cipher.components.transforms import HorizontalPipeline
>>> from cipher.components.substitution import SBox
...
>>> s = [ SBox(...), SBox(...), SBox(...), SBox(...) ]
...
>>> combined_s = HorizontalPipeline(s)
>>> p = "<some 64-bit plaintext>"
# Equivalent to:
# s[0].encrypt(p[0:16]) + s[1].encrypt(p[16:32]) +
# s[1].encrypt(p[32:48]) + s[3].encrypt(p[48:64])
>>> combined_s.encrypt(p)
"""
def __init__(self, ciphers, input_size, output_size = None):
""" Creates a new HorizontalPipeline instance.
Args:
ciphers (list): List of ciphers or cipher components to pipeline.
input_size (int): The input size of the plaintext, in bits. Helps determine size per segment.
output_size (int, optional): The output size of the ciphertext. Defaults to None.
"""
self.ciphers = ciphers
self.input_size = input_size
self.output_size = output_size or input_size
self.input_chunk_size = input_size // len(ciphers)
self.output_chunk_size = self.output_size // len(ciphers)
def encrypt(self, plaintext: "int | str | bytes | list | tuple"):
""" Encrypts a given plaintext using the pipelined ciphers.
Args:
plaintext (int | str | bytes | list | tuple): The plaintext to encrypt.
Returns:
int | bytes | list: The encrypted ciphertext after passing segments to ciphers in the pipeline.
"""
plaintext_size = utils.input_size(plaintext)
if plaintext_size != self.input_size:
raise ValueError(
self.__class__.__name__ + f": size of plaintext ({plaintext_size}) " +
f"does not match what was expected ({self.input_size})"
)
chunks = utils.n_ary_split(plaintext, self.input_chunk_size, self.input_size)
chunks = [ cipher.encrypt(chunk) for cipher, chunk in zip(self.ciphers, chunks) ]
return utils.n_ary_join(chunks, self.output_chunk_size, self.output_size)
def decrypt(self, ciphertext: "int | str | bytes | list | tuple"):
""" Decrypts a given ciphertext using the pipelined ciphers used to encrypt.
Args:
ciphertext (int | str | bytes | list | tuple): The ciphertext to decrypt.
Returns:
int | bytes | list: The decrypted plaintext after passing segments to ciphers in the pipeline.
"""
ciphertext_size = utils.input_size(ciphertext)
if ciphertext_size != self.output_size:
raise ValueError(
self.__class__.__name__ + f": size of ciphertext ({ciphertext_size}) " +
f"does not match what was expected ({self.output_size})"
)
chunks = utils.n_ary_split(ciphertext, self.output_chunk_size, self.output_size)
chunks = [ cipher.decrypt(chunk) for cipher, chunk in zip(self.ciphers, chunks) ]
return utils.n_ary_join(chunks, self.input_chunk_size, self.input_size) |
import io
import pickle
from contextlib import closing
import click
from matplotlib import pyplot as plt
import networkx as nx
import pydot
from sqlalchemy import create_engine, MetaData
from sqlalchemy.orm import Session
from views_query_planning import query_planning, compose_join, query_with_ops
@click.group(name = "vqp")
def vqp():
"""
ViEWS query planning
====================
This CLI exposes core functionality from the ViEWS query planner, making it
possible to inspect and understand the query planning operations taking
place with the library.
Examples:
> Generate a join network and output it to a plot
vqp with postgresql://user@database/dbname join-network - | vqp dot-to-plot - out.png
"""
pass
@vqp.group(name = "with")
@click.argument("connection-string")
@click.option("--schema", default = "public")
@click.pass_context
def with_network(ctx: click.Context, connection_string: str, schema: str):
ctx.ensure_object(dict)
ctx.obj["engine"] = create_engine(connection_string)
ctx.obj["session"] = Session(bind = ctx.obj["engine"])
metadata = MetaData(bind = ctx.obj["engine"], schema = schema)
metadata.reflect()
ctx.obj["metadata"] = metadata
ctx.obj["network"] = query_planning.join_network(ctx.obj["metadata"].tables)
ctx.obj["schema"] = schema
@with_network.command(name = "join-network")
@click.pass_context
@click.argument("file", type = click.File("w"))
def join_network(ctx: click.Context, file: io.BufferedWriter):
file.write(nx.nx_pydot.to_pydot(ctx.obj["network"]).to_string())
@with_network.command(name = "query")
@click.pass_context
@click.argument("level-of-analysis", type = str)
@click.argument("table", type = str)
@click.argument("column-name", type = str)
@click.argument("file", type = click.File("w"))
@click.option("-a","--aggregation-function", type = str, default = "avg")
def query(
ctx: click.Context,
level_of_analysis: str,
table: str,
column_name: str,
aggregation_function: str,
file: io.BufferedWriter):
index_columns = ["id"]
query = query_with_ops(
ctx.obj["session"].query(),
compose_join,
ctx.obj["network"],
level_of_analysis,
table,
column_name,
index_columns,
aggregation_function
)
click.echo(str(query))
@with_network.command(name = "dump")
@click.argument("file", type = click.File("wb"))
@click.pass_context
def dump_db_reflection(ctx: click.Context, file: io.BufferedWriter):
pickle.dump(ctx.obj["metadata"], file)
@vqp.command(name = "dot-to-plot")
@click.argument("dotfile", type = click.File("r"))
@click.argument("outfile", type = click.File("wb"))
def dot_to_plot(dotfile = io.BufferedReader, outfile = io.BufferedWriter):
data = dotfile.read()
click.echo(data)
pydot_graph = pydot.graph_from_dot_data(data)
if pydot_graph is None:
click.echo("failed to read dot graph from data:")
click.echo(data)
return 1
else:
pydot_graph, *_ = pydot_graph
graph = nx.nx_pydot.from_pydot(pydot_graph)
fig,ax = plt.subplots(figsize = (20,20))
nx.draw(graph, with_labels = True, ax = ax)
fig.savefig(outfile)
|
from django.db import models
from django.contrib.auth.models import User
class account(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
auth_id = models.CharField(max_length=40)
username = models.CharField(max_length=30)
def __str__(self):
return self.username
class phone_number(models.Model):
number = models.CharField(max_length=40)
account = models.ForeignKey(account, on_delete=models.CASCADE)
def __str__(self):
return self.account.username
|
from django.shortcuts import render, get_object_or_404
from django.template.loader import render_to_string
from django.http import JsonResponse
from django.views import View
from elasticsearch_dsl.query import Q
from cms_pages.models import NewsPageTag
from catalog.models import Address, Ownership
from catalog.api import hybrid_response
from catalog.paginator import paginated_search
from catalog.elastic_models import (
Ownership as ElasticOwnership,
Address as ElasticAddress
)
from cms_pages.models import NewsPage
class SuggestView(View):
def get(self, request):
q = request.GET.get('q', '').strip()
suggestions = []
seen = set()
s = ElasticOwnership.search().source(
['names_autocomplete']
).highlight('names_autocomplete').highlight_options(
order='score', fragment_size=100,
number_of_fragments=10,
pre_tags=['<strong>'],
post_tags=["</strong>"]
)
s = s.query(
"bool",
must=[
Q(
"match",
names_autocomplete={
"query": q,
"operator": "and"
}
)
],
should=[
Q(
"match_phrase",
names_autocomplete__raw={
"query": q,
"boost": 2
},
),
Q(
"match_phrase_prefix",
names_autocomplete__raw={
"query": q,
"boost": 2
},
)
]
)[:200]
res = s.execute()
for r in res:
if "names_autocomplete" in r.meta.highlight:
for candidate in r.meta.highlight["names_autocomplete"]:
if candidate.lower() not in seen:
suggestions.append(candidate)
seen.add(candidate.lower())
rendered_result = [
render_to_string("autocomplete.jinja", {
"result": {
"hl": k
}
})
for k in suggestions[:20]
]
return JsonResponse(rendered_result, safe=False)
def address_details(request, slug):
address = get_object_or_404(
Address, slug=slug
)
# Todo: cache
tags = {t["tag__name"].lower(): {
"slug": t["tag__slug"],
"name": t["tag__name"]}
for t in NewsPageTag.objects.select_related("tag").values(
"tag__slug", "tag__name")}
return render(
request,
"address_details.jinja",
{
"address": address,
"tags": tags,
"ownerships": Ownership.objects.filter(
prop__address=address).order_by("prop_id", "owner", "pk")
}
)
def addresses_by_city(request):
addresses = Address.objects.order_by("city", "title")
return render(
request,
"by_city.jinja",
{
"addresses": addresses,
}
)
def latest_addresses(request):
addresses = Address.objects.order_by("-date_added")
return render(
request,
"by_latest.jinja",
{
"addresses": addresses,
}
)
def _ownership_search(request):
query = request.GET.get("q", "")
fields_to_search = [
"owner", "asset", "ownership_ground", "ownership_form", "share",
"comment", "mortgage_charge", "mortgage_details",
"mortgage_charge_subjects", "mortgage_holder", "mortgage_mortgagor",
"mortgage_guarantor", "mortgage_other_persons", "persons",
"companies", "addresses"]
if query:
ownerships = ElasticOwnership.search().query(
"multi_match", query=query, operator="and",
fields=fields_to_search
)
if ownerships.count() == 0:
# PLAN B, PLAN B
ownerships = ElasticOwnership.search().query(
"multi_match", query=query,
operator="or",
minimum_should_match="2",
fields=fields_to_search
)
else:
ownerships = ElasticOwnership.search().query("match_all")
return paginated_search(request, ownerships)
def _addresses_search(request):
query = request.GET.get("q", "")
if query:
addresses = ElasticAddress.search().query(
"match", _all={"query": query, "operator": "and"}
)
if addresses.count() == 0:
# PLAN B, PLAN B
addresses = ElasticAddress.search().query(
"match", _all={
"query": query,
"operator": "or",
"minimum_should_match": "2"
},
)
else:
addresses = ElasticAddress.search().query("match_all")
return paginated_search(request, addresses)
def _news_search(request):
query = request.GET.get("q", "")
if query:
return NewsPage.objects.search(query)
else:
return None
@hybrid_response("search.jinja")
def search(request, sources=["ownerships", "addresses", "news"]):
query = request.GET.get("q", "")
res = {
"query": query,
}
if "ownerships" in sources:
res["ownerships"] = _ownership_search(request)
if "addresses" in sources:
res["addresses"] = _addresses_search(request)
if "news" in sources:
res["news_results"] = _news_search(request)
return res
|
"""
Capstone Project. Code to run on the EV3 robot (NOT on a laptop).
Author: Your professors (for the framework)
and Marcus Hughes-Oliver
Winter term, 2018-2019.
"""
import rosebot
import mqtt_remote_method_calls as com
import time
import shared_gui_delegate_on_robot
def main():
"""
This code, which must run on the EV3 ROBOT:
1. Makes the EV3 robot to various things.
2. Communicates via MQTT with the GUI code that runs on the LAPTOP.
"""
# run_test_get_color()
# run_test_not_color()
# run_test_color_intense_greater()
# run_test_color_intense_less()
# run_test_arm_calibrate()
# run_test_lower_arm()
# run_test_stop()
real_thing()
def run_test_arm_raise():
robot=rosebot.RoseBot()
robot.arm_and_claw.raise_arm()
def run_test_arm_calibrate():
robot=rosebot.RoseBot()
robot.arm_and_claw.calibrate_arm()
def run_test_move_arm_to_position():
robot=rosebot.RoseBot()
robot.arm_and_claw.move_arm_to_position(0)
def run_test_lower_arm():
robot=rosebot.RoseBot()
robot.arm_and_claw.lower_arm()
def run_test_go():
robot=rosebot.RoseBot()
print("go")
robot.drive_system.go(100,100)
def run_test_go_straight_for_seconds():
robot=rosebot.RoseBot()
print("go straight for seconds")
robot.drive_system.go_straight_for_seconds(100, 100)
def run_test_stop():
robot=rosebot.RoseBot()
time.sleep(3)
print("stop")
robot.drive_system.stop()
def run_test_go_straight_for_inches_using_time():
robot = rosebot.RoseBot()
print("go straight for inches using time")
robot.drive_system.go_straight_for_inches_using_time(50, 100)
def run_test_go_straight_for_inches_using_encoder():
robot = rosebot.RoseBot()
print("go straight for inches using encoder")
robot.drive_system.go_straight_for_inches_using_encoder(50,100)
def run_test_beeper():
robot=rosebot.Beeper()
robot.beep(3)
def run_test_tone_maker():
robot=rosebot.ToneMaker()
robot.tone(300, 3)
def run_test_speak_maker():
robot=rosebot.SpeechMaker()
robot.speak("don't make me sing")
def run_test_color_intense_greater():
robot = rosebot.RoseBot()
print('go straight until intensity')
robot.drive_system.go_straight_until_intensity_is_greater_than(20, 25)
def run_test_color_intense_less():
robot = rosebot.RoseBot()
print('go straight until less intense')
robot.drive_system.go_straight_until_intensity_is_less_than(70, 25)
def run_test_get_color():
robot = rosebot.RoseBot()
print('go straight till color')
robot.drive_system.go_straight_until_color_is('Black', 25)
def run_test_not_color():
robot = rosebot.RoseBot()
print('go straight till not color')
robot.drive_system.go_straight_until_color_is_not('White', 25)
def my_stuff():
robot = rosebot.SensorSystem()
robo = rosebot.Beeper()
rob = rosebot.RoseBot()
disty = 0
while True:
dist = int(robot.ir_proximity_sensor.get_distance_in_inches())
robo.beep()
# print('beep')
if disty > dist:
break
disty = dist
rob.drive_system.stop()
def real_thing():
robot = rosebot.RoseBot()
r = None
delegate = shared_gui_delegate_on_robot.Handler(robot, r)
r = com.MqttClient(delegate)
delegate.r = r
r.connect_to_pc()
while True:
time.sleep(.01)
if delegate.is_time_to_stop:
break
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main() |
import argparse
import logging
import os
import anndata
import numpy as np
import pandas as pd
import scipy.sparse
from cirrocumulus.anndata_util import get_scanpy_marker_keys, datasets_schema, DataType
from cirrocumulus.io_util import get_markers, filter_markers, add_spatial, SPATIAL_HELP, unique_id
from cirrocumulus.util import to_json, get_fs
logger = logging.getLogger("cirro")
cluster_fields = ['anno', 'cell_type', 'celltype', 'leiden', 'louvain', 'seurat_cluster', 'cluster']
categorical_fields_convert = ['seurat_clusters']
def read_adata(path, backed=False, spatial_directory=None, use_raw=False):
if path.lower().endswith('.loom'):
adata = anndata.read_loom(path)
elif path.lower().endswith('.zarr'):
adata = anndata.read_zarr(path)
else:
adata = anndata.read(path, backed=backed)
if use_raw and adata.raw is not None and adata.shape[0] == adata.raw.shape[0]:
logger.info('Using adata.raw')
adata = anndata.AnnData(X=adata.raw.X, var=adata.raw.var, obs=adata.obs, obsm=adata.obsm, uns=adata.uns)
if spatial_directory is not None:
if not add_spatial(adata, spatial_directory):
logger.info('No spatial data found in {}'.format(spatial_directory))
def fix_column_names(df):
rename = {}
for c in df.columns:
if c.find(' ') != -1:
rename[c] = c.replace(' ', '_')
return df.rename(rename, axis=1) if len(rename) > 0 else df
adata.obs = fix_column_names(adata.obs)
adata.var = fix_column_names(adata.var)
for field in categorical_fields_convert:
if field in adata.obs and not pd.api.types.is_categorical_dtype(adata.obs[field]):
logger.info('Converting {} to categorical'.format(field))
adata.obs[field] = adata.obs[field].astype('category')
for key in adata.obsm:
if key.find(' ') != -1:
new_key = key.replace(' ', '_')
adata.obsm[new_key] = adata.obsm[key]
del adata.obsm[key]
if not backed and scipy.sparse.issparse(adata.X) and scipy.sparse.isspmatrix_csr(adata.X):
adata.X = adata.X.tocsc()
return adata
#
# def make_unique(index, join='-1'):
# index = index.str.replace('/', '_')
# lower_index = index.str.lower()
# if lower_index.is_unique:
# return index
# from collections import defaultdict
#
# indices_dup = lower_index.duplicated(keep="first")
# values_dup_lc = lower_index.values[indices_dup]
# values_dup = index.values[indices_dup]
# counter = defaultdict(lambda: 0)
# for i, v in enumerate(values_dup_lc):
# counter[v] += 1
# values_dup[i] += join + str(counter[v])
# values = index.values
# values[indices_dup] = values_dup
# index = pd.Index(values)
# return index
class PrepareData:
def __init__(self, datasets, output, dimensions=None, groups=[], group_nfeatures=10, markers=[],
output_format='parquet', no_auto_groups=False, save_whitelist=None):
self.datasets = datasets
self.groups = groups
self.group_nfeatures = group_nfeatures
self.markers = markers
self.output_format = output_format
self.no_auto_groups = no_auto_groups
self.save_whitelist = save_whitelist
for dataset in datasets:
for key in list(dataset.obsm.keys()):
m = dataset.obsm[key]
dim = m.shape[1]
if not (1 < dim <= 3):
del dataset.obsm[key]
data_type2_datasets = {}
for i in range(len(datasets)):
dataset = datasets[i]
if i > 0:
name = dataset.uns.get('name', 'dataset {}'.format(i + 1))
prefix = name + '-'
dataset.var.index = prefix + dataset.var.index.astype(str)
# ensure cell ids are the same
if not np.array_equal(datasets[0].obs.index, dataset.obs.index):
raise ValueError('{} obs ids are not equal'.format(name))
if dataset.uns.get('data_type') is None and dataset.uns.get('name', '').lower().startswith(
'module'): # TODO hack
dataset.uns['data_type'] = DataType.MODULE
data_type = dataset.uns.get('data_type')
dataset_list = data_type2_datasets.get(data_type)
if dataset_list is None:
dataset_list = []
data_type2_datasets[data_type] = dataset_list
dataset_list.append(dataset)
datasets = []
for data_type in data_type2_datasets:
dataset_list = data_type2_datasets[data_type]
dataset = anndata.concat(dataset_list, axis=1) if len(dataset_list) > 1 else dataset_list[0]
dataset.var.index = dataset.var.index.str.replace('/', '_')
dataset.var_names_make_unique()
dataset.obs.index.name = 'index'
dataset.var.index.name = 'index'
datasets.append(dataset)
primary_dataset = datasets[0]
for i in range(1, len(datasets)):
dataset = datasets[i]
# mark duplicate in obs, then delete after computing DE
obs_duplicates = []
dataset.uns['cirro_obs_delete'] = obs_duplicates
for key in list(dataset.obs.keys()):
if key in primary_dataset.obs.columns and dataset.obs[key].equals(primary_dataset.obs[key]):
obs_duplicates.append(key)
else:
dataset.obs[prefix + key] = dataset.obs[key]
del dataset.obs[key]
for key in list(dataset.obsm.keys()):
if key in primary_dataset.obsm and np.array_equal(dataset.obsm[key], primary_dataset.obsm[key]):
del dataset.obsm[key]
else:
dataset.obsm[prefix + key] = dataset.obsm[key] # rename
del dataset.obsm[key]
self.base_output = output
dimensions_supplied = dimensions is not None and len(dimensions) > 0
self.dimensions = [] if not dimensions_supplied else dimensions
self.measures = []
self.others = []
for dataset in datasets:
for i in range(len(dataset.obs.columns)):
name = dataset.obs.columns[i]
c = dataset.obs[name]
if pd.api.types.is_object_dtype(c):
dataset.obs[name] = dataset.obs[name].astype('category')
c = dataset.obs[name]
if not dimensions_supplied and pd.api.types.is_categorical_dtype(c):
if 1 < len(c.cat.categories) < 2000:
self.dimensions.append(name)
if c.isna().sum() > 0:
logger.info('Replacing nans in {}'.format(name))
dataset.obs[name] = dataset.obs[name].astype(str)
dataset.obs.loc[dataset.obs[name].isna(), name] = ''
dataset.obs[name] = dataset.obs[name].astype('category')
else:
self.others.append(name)
elif not pd.api.types.is_string_dtype(c) and not pd.api.types.is_object_dtype(c):
self.measures.append('obs/' + name)
else:
self.others.append(name)
def execute(self):
output_format = self.output_format
if self.groups is None and not self.no_auto_groups:
groups = []
for dataset in self.datasets:
existing_fields = set()
scanpy_marker_keys = get_scanpy_marker_keys(dataset)
for key in scanpy_marker_keys:
existing_fields.add(dataset.uns[key]['params']['groupby'])
for field in dataset.obs.columns:
field_lc = field.lower()
for cluster_field in cluster_fields:
if field_lc.find(cluster_field) != -1 and cluster_field not in existing_fields:
groups.append(field)
break
self.groups = groups
if self.groups is not None and len(self.groups) > 0:
use_pegasus = False
use_scanpy = False
try:
import pegasus as pg
use_pegasus = True
except ModuleNotFoundError:
pass
if not use_pegasus:
try:
import scanpy as sc
use_scanpy = True
except ModuleNotFoundError:
pass
if not use_pegasus and not use_scanpy:
raise ValueError('Please install pegasuspy or scanpy to compute markers')
for dataset in self.datasets:
for group in self.groups:
field = group
if group not in dataset.obs: # test if multiple comma separated fields
split_groups = group.split(',')
if len(split_groups) > 1:
use_split_groups = True
for split_group in split_groups:
if split_group not in dataset.obs:
use_split_groups = False
break
if use_split_groups:
dataset.obs[field] = dataset.obs[split_groups[0]].str.cat(dataset.obs[split_groups[1:]],
sep=',')
if field in dataset.obs:
if not pd.api.types.is_categorical_dtype(dataset.obs[field]):
dataset.obs[field] = dataset.obs[field].astype('category')
if len(dataset.obs[field].cat.categories) > 1:
print('Computing markers for {}'.format(field))
key_added = 'rank_genes_' + str(field)
if use_pegasus:
pg.de_analysis(dataset, cluster=field, de_key=key_added)
else:
sc.tl.rank_genes_groups(dataset, field, key_added=key_added, method='t-test')
# remove duplicate obs fields after DE
for dataset in self.datasets:
obs_duplicates = dataset.uns.get('cirro_obs_delete', [])
for key in obs_duplicates:
del dataset.obs[key]
schema = self.get_schema()
schema['format'] = output_format
if output_format in ['parquet', 'zarr']:
output_dir = self.base_output
else:
output_dir = os.path.splitext(self.base_output)[0]
filesystem = get_fs(output_dir)
filesystem.makedirs(output_dir, exist_ok=True)
results = schema.get('results', [])
if len(results) > 0:
uns_dir = os.path.join(output_dir, 'uns')
is_gzip = output_format != 'jsonl'
filesystem.makedirs(uns_dir, exist_ok=True)
for i in range(len(results)):
full_result = results[i]
result_id = full_result.pop('id')
# keep id, name, type in schema, store rest in file
results[i] = dict(id=result_id, name=full_result.pop('name'), type=full_result.pop('type'),
content_type='application/json', content_encoding='gzip' if is_gzip else None)
result_path = os.path.join(uns_dir, result_id + '.json.gz') if is_gzip else os.path.join(uns_dir,
result_id + '.json')
with filesystem.open(result_path, 'wt', compression='gzip' if is_gzip else None) as out:
out.write(to_json(full_result))
for dataset in self.datasets:
images = dataset.uns.get('images')
if images is not None:
image_dir = os.path.join(output_dir, 'images')
filesystem.makedirs(image_dir, exist_ok=True)
for image in images:
src = image['image']
dest = os.path.join(image_dir, os.path.basename(src))
filesystem.copy(src, dest)
image['image'] = 'images/' + os.path.basename(src)
if output_format == 'parquet':
from cirrocumulus.parquet_output import save_datasets_pq
save_datasets_pq(self.datasets, schema, self.base_output, filesystem, self.save_whitelist)
elif output_format == 'jsonl':
from cirrocumulus.jsonl_io import save_datasets_jsonl
save_datasets_jsonl(self.datasets, schema, output_dir, self.base_output, filesystem)
elif output_format == 'zarr':
from cirrocumulus.zarr_output import save_datasets_zarr
save_datasets_zarr(self.datasets, schema, self.base_output, filesystem, self.save_whitelist)
elif output_format == 'h5ad':
from cirrocumulus.h5ad_output import save_datasets_h5ad
save_datasets_h5ad(self.datasets, schema, self.base_output, filesystem, self.save_whitelist)
else:
raise ValueError("Unknown format")
def get_schema(self):
result = datasets_schema(self.datasets)
markers = result.get('markers', [])
if self.markers is not None: # add results specified from file
markers += get_markers(self.markers)
markers = filter_markers(self.datasets[0], markers) # TODO check if markers are in union of all features
for marker in markers:
if marker.get('id') is None:
marker['id'] = unique_id()
marker['readonly'] = True
result['markers'] = markers
result['format'] = self.output_format
return result
def main(argsv):
parser = argparse.ArgumentParser(
description='Prepare a dataset for cirrocumulus server.')
parser.add_argument('dataset', help='Path to a h5ad, loom, or Seurat file', nargs='+')
parser.add_argument('--out', help='Path to output directory')
parser.add_argument('--format', help='Output format', choices=['parquet', 'jsonl', 'zarr'],
default='parquet')
parser.add_argument('--whitelist',
help='Optional whitelist of fields to save. Only applies when output format is parquet',
choices=['obs', 'obsm', 'X'],
action='append')
parser.add_argument('--backed', help='Load h5ad file in backed mode', action='store_true')
parser.add_argument('--markers',
help='Path to JSON file of precomputed markers that maps name to features. For example {"a":["gene1", "gene2"], "b":["gene3"]',
action='append')
parser.add_argument('--no-auto-groups', dest='no_auto_groups',
help='Disable automatic cluster field detection to compute differential expression results for',
action='store_true')
parser.add_argument('--groups',
help='List of groups to compute markers for (e.g. louvain). Note that markers created with scanpy or cumulus are automatically included.',
action='append')
parser.add_argument('--group_nfeatures', help='Number of marker genes/features to include', type=int, default=10)
parser.add_argument('--spatial', help=SPATIAL_HELP)
args = parser.parse_args(argsv)
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
out = args.out
no_auto_groups = args.no_auto_groups
save_whitelist = args.whitelist
input_datasets = args.dataset # multimodal
output_format = args.format
if out is None:
out = os.path.splitext(os.path.basename(input_datasets[0]))[0]
if out.endswith('/'):
out = out[:len(out) - 1]
output_format2extension = dict(parquet='.cpq', jsonl='.jsonl', zarr='.zarr', h5ad='.h5ad')
if not out.lower().endswith(output_format2extension[output_format]):
out += output_format2extension[output_format]
datasets = []
tmp_files = []
for input_dataset in input_datasets:
use_raw = False
if input_dataset.lower().endswith('.rds'):
import subprocess
import tempfile
import pkg_resources
_, h5_file = tempfile.mkstemp(suffix='.h5ad')
os.remove(h5_file)
subprocess.check_call(
['Rscript', pkg_resources.resource_filename("cirrocumulus", 'seurat2h5ad.R'), input_dataset, h5_file])
input_dataset = h5_file
tmp_file = h5_file
use_raw = True
tmp_files.append(tmp_file)
adata = read_adata(input_dataset, backed=args.backed, spatial_directory=args.spatial, use_raw=use_raw)
datasets.append(adata)
adata.uns['name'] = os.path.splitext(os.path.basename(input_dataset))[0]
prepare_data = PrepareData(datasets=datasets, output=out, dimensions=args.groups, groups=args.groups,
group_nfeatures=args.group_nfeatures,
markers=args.markers, output_format=output_format, no_auto_groups=no_auto_groups,
save_whitelist=save_whitelist)
prepare_data.execute()
for tmp_file in tmp_files:
os.remove(tmp_file)
if __name__ == '__main__':
import sys
main(sys.argv)
|
import ROOT as r
from glob import glob
from os import path, makedirs
from argparse import ArgumentParser
r.gSystem.Load('libFramework.so')
def main():
# Parse
parser = ArgumentParser()
parser.add_argument('-i', dest='infile')
parser.add_argument('-o', dest='outdir')
args = parser.parse_args()
# Skim
if not path.exists(args.outdir): makedirs(args.outdir)
trigger_file(args.infile, args.outdir)
def trigger_file(rfile, outdir):
# Tha two constants we need
cut = 3_000 # MeV
maxLayer = 20
# Load original
ogTree = load( rfile )
ecalRecHits = addBranch(ogTree, 'EcalHit', 'EcalRecHits_v12')
# Tskim file and tree
outfile = outdir + '/' + rfile.split('/')[-1][:-5] + '_tskim.root'
tskimF = r.TFile( outfile, 'RECREATE')
tskimT = ogTree.CloneTree(0)
# Loop over events
for entry in range(cut):
ogTree.GetEntry(entry)
# Check trigger pass
energy_tot = 0
for hit in ecalRecHits:
if hit.getEnergy() < 0: continue
if layer(hit) < maxLayer:
energy_tot += hit.getEnergy()
# If passes, fill new tree
if energy_tot < cut:
tskimT.Fill()
# Wrap up
tskimF.cd()
tskimT.Write()
tskimF.Close()
def load(fil,treeName='LDMX_Events'):
# Load ROOT tree
twee = r.TChain(treeName)
twee.Add(fil)
return twee
def addBranch(tree, ldmx_class, branch_name):
""" Add a new branch to read from """
if ldmx_class == 'EventHeader': branch = r.ldmx.EventHeader()
elif ldmx_class == 'EcalVetoResult': branch = r.ldmx.EcalVetoResult()
elif ldmx_class == 'HcalVetoResult': branch = r.ldmx.HcalVetoResult()
elif ldmx_class == 'TriggerResult': branch = r.ldmx.TriggerResult()
elif ldmx_class == 'SimParticle': branch = r.map(int, 'ldmx::'+ldmx_class)()
else: branch = r.std.vector('ldmx::'+ldmx_class)()
tree.SetBranchAddress(branch_name,r.AddressOf(branch))
return branch
def layer(hit):
""" Get layerID from ecal hit """
LAYER_MASK = 0x3F # space for up to 64 layers
LAYER_SHIFT = 17
return (hit.getID() >> LAYER_SHIFT) & LAYER_MASK
if __name__ == '__main__': main()
|
"""
Script that listens for user input.
"""
from gpiozero import LED
from time import sleep
red = LED(17)
while True:
red.on()
sleep(1)
red.off()
sleep(1)
|
BOT_TOKEN = "1629027959:AAEaTw4s2qaAL3mYP3fQRnE"
RESULTS_COUNT = 4 # NOTE Number of results to show, 4 is better
SUDO_CHATS_ID = [-1001477025068, -1001440596979]
DRIVE_NAME = [
"Root", # folder 1 name
"Cartoon", # folder 2 name
"Course", # folder 3 name
"Movies", # ....
"Series", # ......
"Others" # and soo onnnn folder n names
]
DRIVE_ID = [
"1B9A3QqQqF31IuW2om3Qhr-wkiVLloxw8", # folder 1 id
"12wNJTjNnR-CNBOTnLHqe-1vqFvCRLecn", # folder 2 id
"11nZcObsJJHojHYg43dBS0_eVvJrSD7Nf", # and so onn... folder n id
"10_hTMK8HE8k144wOTth_3x1hC2kZL-LR",
"1-oTctBpyFcydDNiptLL09Enwte0dClCq",
"1B9A3QqQqF31IuW2om3Qhr-wkiVLloxw8"
]
INDEX_URL = [
"https://dl.null.tech/0:", # folder 1 index link
"https://dl.null.tech/0:/Cartoon", # folder 2 index link
"https://dl.null.tech/0:/Course", # and soo on folder n link
"https://dl.null.tech/0:/MOVIES",
"https://dl.null.tech/0:/Series",
"https://dl.null.tech/0:/Roms"
]
|
# Copyright (c) Moshe Zadka
# See LICENSE for details.
extensions = ['sphinx.ext.mathjax']
master_doc = 'index'
project = 'Calculus 101'
copyright = 'Copyright (c) 2015, Moshe Zadka'
author = 'Moshe Zadka'
latex_elements = dict(preamble='\usepackage{amsfonts}\n')
|
import tensorflow as tf
def custom_model():
inputs = tf.keras.layers.Input(shape=(4, 1), name="input")
x = tf.keras.layers.Flatten()(inputs)
outputs = tf.keras.layers.Dense(3, name="output")(x)
return tf.keras.Model(inputs=inputs, outputs=outputs, name="simple-model")
def loss(labels, predictions):
return tf.reduce_mean(
tf.nn.sparse_softmax_cross_entropy_with_logits(
tf.cast(tf.reshape(labels, [-1]), tf.int32), predictions
)
)
def optimizer(lr=0.1):
return tf.optimizers.SGD(lr)
def eval_metrics_fn():
return {
"accuracy": lambda labels, predictions: tf.equal(
tf.argmax(predictions, 1, output_type=tf.int32),
tf.cast(tf.reshape(labels, [-1]), tf.int32),
)
}
|
from xml.etree.ElementTree import ElementTree
import numpy as np
import pytest
from deepocr.io import elements
def _mock_words(size=(1., 1.), offset=(0, 0), confidence=0.9):
return [
elements.Word("hello", confidence, (
(offset[0], offset[1]),
(size[0] / 2 + offset[0], size[1] / 2 + offset[1])
)),
elements.Word("world", confidence, (
(size[0] / 2 + offset[0], size[1] / 2 + offset[1]),
(size[0] + offset[0], size[1] + offset[1])
))
]
def _mock_artefacts(size=(1, 1), offset=(0, 0), confidence=0.8):
sub_size = (size[0] / 2, size[1] / 2)
return [
elements.Artefact("qr_code", confidence, (
(offset[0], offset[1]),
(sub_size[0] + offset[0], sub_size[1] + offset[1])
)),
elements.Artefact("qr_code", confidence, (
(sub_size[0] + offset[0], sub_size[1] + offset[1]),
(size[0] + offset[0], size[1] + offset[1])
)),
]
def _mock_lines(size=(1, 1), offset=(0, 0)):
sub_size = (size[0] / 2, size[1] / 2)
return [
elements.Line(_mock_words(size=sub_size, offset=offset)),
elements.Line(_mock_words(size=sub_size, offset=(offset[0] + sub_size[0], offset[1] + sub_size[1]))),
]
def _mock_blocks(size=(1, 1), offset=(0, 0)):
sub_size = (size[0] / 4, size[1] / 4)
return [
elements.Block(
_mock_lines(size=sub_size, offset=offset),
_mock_artefacts(size=sub_size, offset=(offset[0] + sub_size[0], offset[1] + sub_size[1]))
),
elements.Block(
_mock_lines(size=sub_size, offset=(offset[0] + 2 * sub_size[0], offset[1] + 2 * sub_size[1])),
_mock_artefacts(size=sub_size, offset=(offset[0] + 3 * sub_size[0], offset[1] + 3 * sub_size[1])),
),
]
def _mock_pages(block_size=(1, 1), block_offset=(0, 0)):
return [
elements.Page(_mock_blocks(block_size, block_offset), 0, (300, 200),
{"value": 0., "confidence": 1.}, {"value": "EN", "confidence": 0.8}),
elements.Page(_mock_blocks(block_size, block_offset), 1, (500, 1000),
{"value": 0.15, "confidence": 0.8}, {"value": "FR", "confidence": 0.7}),
]
def test_element():
with pytest.raises(KeyError):
elements.Element(sub_elements=[1])
def test_word():
word_str = "hello"
conf = 0.8
geom = ((0, 0), (1, 1))
word = elements.Word(word_str, conf, geom)
# Attribute checks
assert word.value == word_str
assert word.confidence == conf
assert word.geometry == geom
# Render
assert word.render() == word_str
# Export
assert word.export() == {"value": word_str, "confidence": conf, "geometry": geom}
# Repr
assert word.__repr__() == f"Word(value='hello', confidence={conf:.2})"
# Class method
state_dict = {"value": "there", "confidence": 0.1, "geometry": ((0, 0), (.5, .5))}
word = elements.Word.from_dict(state_dict)
assert word.export() == state_dict
def test_line():
geom = ((0, 0), (0.5, 0.5))
words = _mock_words(size=geom[1], offset=geom[0])
line = elements.Line(words)
# Attribute checks
assert len(line.words) == len(words)
assert all(isinstance(w, elements.Word) for w in line.words)
assert line.geometry == geom
# Render
assert line.render() == "hello world"
# Export
assert line.export() == {"words": [w.export() for w in words], "geometry": geom}
# Repr
words_str = ' ' * 4 + ',\n '.join(repr(word) for word in words) + ','
assert line.__repr__() == f"Line(\n (words): [\n{words_str}\n ]\n)"
# Ensure that words repr does't span on several lines when there are none
assert repr(elements.Line([], ((0, 0), (1, 1)))) == "Line(\n (words): []\n)"
# from dict
state_dict = {
"words": [{"value": "there", "confidence": 0.1, "geometry": ((0, 0), (1., 1.))}],
"geometry": ((0, 0), (1., 1.))
}
line = elements.Line.from_dict(state_dict)
assert line.export() == state_dict
def test_artefact():
artefact_type = "qr_code"
conf = 0.8
geom = ((0, 0), (1, 1))
artefact = elements.Artefact(artefact_type, conf, geom)
# Attribute checks
assert artefact.type == artefact_type
assert artefact.confidence == conf
assert artefact.geometry == geom
# Render
assert artefact.render() == "[QR_CODE]"
# Export
assert artefact.export() == {"type": artefact_type, "confidence": conf, "geometry": geom}
# Repr
assert artefact.__repr__() == f"Artefact(type='{artefact_type}', confidence={conf:.2})"
def test_block():
geom = ((0, 0), (1, 1))
sub_size = (geom[1][0] / 2, geom[1][0] / 2)
lines = _mock_lines(size=sub_size, offset=geom[0])
artefacts = _mock_artefacts(size=sub_size, offset=sub_size)
block = elements.Block(lines, artefacts)
# Attribute checks
assert len(block.lines) == len(lines)
assert len(block.artefacts) == len(artefacts)
assert all(isinstance(w, elements.Line) for w in block.lines)
assert all(isinstance(a, elements.Artefact) for a in block.artefacts)
assert block.geometry == geom
# Render
assert block.render() == "hello world\nhello world"
# Export
assert block.export() == {"lines": [line.export() for line in lines],
"artefacts": [artefact.export() for artefact in artefacts], "geometry": geom}
def test_page():
page_idx = 0
page_size = (300, 200)
orientation = {"value": 0., "confidence": 0.}
language = {"value": "EN", "confidence": 0.8}
blocks = _mock_blocks()
page = elements.Page(blocks, page_idx, page_size, orientation, language)
# Attribute checks
assert len(page.blocks) == len(blocks)
assert all(isinstance(b, elements.Block) for b in page.blocks)
assert page.page_idx == page_idx
assert page.dimensions == page_size
assert page.orientation == orientation
assert page.language == language
# Render
assert page.render() == "hello world\nhello world\n\nhello world\nhello world"
# Export
assert page.export() == {"blocks": [b.export() for b in blocks], "page_idx": page_idx, "dimensions": page_size,
"orientation": orientation, "language": language}
# Export XML
assert isinstance(page.export_as_xml(), tuple) and isinstance(
page.export_as_xml()[0], (bytes, bytearray)) and isinstance(page.export_as_xml()[1], ElementTree)
# Repr
assert '\n'.join(repr(page).split('\n')[:2]) == f'Page(\n dimensions={repr(page_size)}'
# Show
page.show(np.zeros((256, 256, 3), dtype=np.uint8), block=False)
# Synthesize
img = page.synthesize()
assert isinstance(img, np.ndarray)
assert img.shape == (*page_size, 3)
def test_document():
pages = _mock_pages()
doc = elements.Document(pages)
# Attribute checks
assert len(doc.pages) == len(pages)
assert all(isinstance(p, elements.Page) for p in doc.pages)
# Render
page_export = "hello world\nhello world\n\nhello world\nhello world"
assert doc.render() == f"{page_export}\n\n\n\n{page_export}"
# Export
assert doc.export() == {"pages": [p.export() for p in pages]}
# Export XML
assert isinstance(doc.export_as_xml(), list) and len(doc.export_as_xml()) == len(pages)
# Show
doc.show([np.zeros((256, 256, 3), dtype=np.uint8) for _ in range(len(pages))], block=False)
# Synthesize
img_list = doc.synthesize()
assert isinstance(img_list, list) and len(img_list) == len(pages)
|
from django.conf.urls import patterns, include, url
from art import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='art'),
url(r'^audit/$', views.ArtAuditView.as_view(), name='audit'),
url(r'^upload/$', views.UploadTaskView.as_view(), name='upload'),
url(r'^up_upload/$', views.UpdateUploadTaskView.as_view(), name='up_upload'),
url(r'^pass/$', views.ArtPassView.as_view(), name='pass'),
url(r'^faild/$', views.ArtFaildView.as_view(), name='faild'),
url(r'^resource_upload/$', views.upload, name='resource_upload'),
url(r'^update/$', views.update, name='update'),
] |
'''
Convert_Sticky.py
Copyright (c) 2003 - 2006 James Urquhart([email protected])
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
import Blender
# Converts Sticky UV Coordinates to Mesh Face coordinates
'''
In essence, blender's sticky coordinates are Per Vertex UV coordinates - just like what torque uses.
In order to simplify importing of dts aswell as 3ds, this script handles converting these coordinates
to Per Face UV coordinates.
'''
def convertObject(object):
msh = object.data
if not msh.hasVertexUV(): return
if not msh.hasFaceUV():
msh.hasFaceUV(1)
msh.update()
# Loop through faces
for f in msh.polygons:
for v in range(0, len(f.v)):
# Get face vert
vert = f.v[v]
# Find mesh verts that are identical
# and get corresponding uvco's
#for vt in msh.verts:
# if vt.index == vert.index:
# Sticky coords seem to have
# odd texture coords. Lets convert them
# to fit in blenders 0-1.0 bounds
v1 = (vert.uvco[0])
v2 = (-((vert.uvco[1]))) +1
f.uv[v] = (v1 , v2)
msh.hasVertexUV(0)
msh.update()
if __name__ == "__main__":
for o in bpy.context.scene.objects:
if o.type == "MESH": convertObject(o)
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
##################################################################################
# File: c:\Projects\KENYA ONE PROJECT\CORE\engines\Gudmundsson_Constraint.py #
# Project: c:\Projects\KENYA ONE PROJECT\CORE\engines #
# Created Date: Thursday, January 9th 2020, 8:56:55 pm #
# Author: Geoffrey Nyaga Kinyua ( <[email protected]> ) #
# ----- #
# Last Modified: Thursday January 9th 2020 8:56:55 pm #
# Modified By: Geoffrey Nyaga Kinyua ( <[email protected]> ) #
# ----- #
# MIT License #
# #
# Copyright (c) 2020 KENYA ONE PROJECT #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy of#
# this software and associated documentation files (the "Software"), to deal in #
# the Software without restriction, including without limitation the rights to #
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies #
# of the Software, and to permit persons to whom the Software is furnished to do #
# so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
# ----- #
# Copyright (c) 2020 KENYA ONE PROJECT #
##################################################################################
import sys
sys.path.append("../")
from CORE.API.db_API import write_to_db, read_from_db # type: ignore
from math import sqrt, pi
import numpy as np # type: ignore
import matplotlib.pyplot as plt # type: ignore
grossWeight = read_from_db("finalMTOW")
cruiseSpeed = read_from_db("cruiseSpeed")
ROC = read_from_db("rateOfClimb") * 3.28 * 60
vLof = read_from_db("stallSpeed") * 1.1
AR = read_from_db("AR")
cdMin = read_from_db("cdMin")
wsfromsizing = read_from_db("WS")
rhoSL = read_from_db("rhoSL")
propEff = read_from_db("propEff")
cruiseAltitude: int = 10000 # ft
gForce: float = 2.0
V_ROC: float = 80.0
groundRun: int = 900
serviceCeiling: int = 18000
wsInitial: float = 22.6 # lb/f**2
g: float = 32.174
CDto: float = 0.04
CLto: float = 0.5
groundFriction: float = 0.04
def oswaldEff(AR: float) -> float:
e = (1.78 * (1 - (0.045 * AR ** 0.68))) - 0.64
return e
e = oswaldEff(AR)
k: float = 1 / (pi * AR * e)
write_to_db("k", k)
# dynamic pressure at altitude
def rhoAlt(cruiseAltitude: int) -> float:
rhoalt = rhoSL * (1 - 0.0000068756 * cruiseAltitude) ** 4.2561
return rhoalt
rhoCruise = rhoAlt(cruiseAltitude)
# print ('air density at cruise altitude, rho = ' +str(rhoCruise))
qAltitude = 0.5 * rhoCruise * (1.688 * cruiseSpeed) ** 2
# print('dynamic pressure at altitude = ' +str(qAltitude))
# Gag Ferrar Model
def gagFerrar(bhp):
"takes in bhp and returns normalised bhp"
normBhp = bhp / (1.132 * (rhoCruise / rhoSL) - 0.132)
return normBhp
WS = np.arange(10, 30)
twTurn = qAltitude * ((cdMin / WS) + k * (gForce / qAltitude) ** 2 * (WS))
qROC = 0.5 * rhoSL * (V_ROC * 1.688) ** 2
Vv = ROC / 60
twROC = (Vv / (V_ROC * 1.688)) + (qROC * cdMin / WS) + (k * WS / qROC)
qVlof = 0.5 * rhoSL * (vLof * 1.688 / sqrt(2)) ** 2
twVlof = (
((vLof * 1.688) ** 2 / (2 * g * groundRun))
+ (qVlof * CDto / WS)
+ (groundFriction * (1 - (qVlof * CLto / WS)))
)
rhoCeiling = rhoAlt(serviceCeiling)
# print(rhoCeiling)
twCruise = qAltitude * cdMin * (1 / WS) + (k)
twCeiling = (1.667 / (np.sqrt((2 * WS / rhoCeiling) * sqrt(k / 3 * cdMin)))) + (
(k * cdMin / 3) * 4
)
plt.figure(1)
plt.subplot(121)
plt.plot(WS, twTurn, label="Rate of Turn")
plt.plot(WS, twROC, label="Rate of Climb")
plt.plot(WS, twVlof, label="Vlof")
plt.plot(WS, twCruise, label="Cruise")
plt.plot(WS, twCeiling, label="Ceiling")
plt.axvline(x=wsfromsizing)
plt.title(" Graph 1 \n HP/Weight ratio")
plt.legend()
# ax = plt.gca()
# ax.set_xticklabels([])
###NORMAlization
norm_twTurn = gagFerrar((grossWeight * twTurn * 1.688 * cruiseSpeed / (propEff * 550)))
test = grossWeight * twTurn * 1.688 * cruiseSpeed / (propEff * 550)
norm_twROC = gagFerrar((grossWeight * twROC * 1.688 * V_ROC / (propEff * 550)))
norm_twVlof = gagFerrar((grossWeight * twVlof * 1.688 * vLof / (propEff * 550)))
norm_twCruise = gagFerrar(
(grossWeight * twCruise * 1.688 * cruiseSpeed / (propEff * 550))
)
norm_twCeiling = gagFerrar(
(grossWeight * twCeiling * 1.688 * cruiseSpeed / (propEff * 550))
)
plt.subplot(122)
plt.plot(WS, norm_twTurn, label="Rate of Turn")
plt.plot(WS, norm_twROC, label="Rate of Climb")
plt.plot(WS, norm_twVlof, label="Vlof")
plt.plot(WS, norm_twCruise, label="Cruise")
plt.plot(WS, norm_twCeiling, label="Ceiling")
plt.title("Graph 2 \n Normalised BHP")
plt.legend()
plt.axvline(x=wsfromsizing)
plt.tight_layout()
if __name__ == "__main__":
plt.show()
def find_nearest(array, value: float) -> int:
idx = (np.abs(array - value)).argmin()
return idx
# print(find_nearest(ws, plotWS))
plotWS = read_from_db("WS")
myidx = find_nearest(WS, plotWS)
def point() -> float:
cruiseidx = norm_twCruise[myidx]
takeoffidx = norm_twVlof[myidx]
climbidx = norm_twROC[myidx]
turnidx = norm_twTurn[myidx]
ceilingidx = norm_twCeiling[myidx]
# print([cruiseidx,takeoffidx,climbidx,turnidx,ceilingidx])
# print (cruiseidx,"cruiseidx")
x = np.array([cruiseidx, takeoffidx, climbidx, turnidx, ceilingidx])
return x[np.argmax(x)]
finalBHP = point()
write_to_db("finalBHP", finalBHP)
print(finalBHP, "The Final normalised BHP")
# now switch back to figure 1 and make some changes
|
class Solution:
def lengthOfLongestSubstring(self, s):
unique = set()
left = 0
right = 0
maxi = 0
while right < len(s):
if s[right] not in unique:
unique.add(s[right])
right+=1
maxi = max(maxi, len(unique))
else:
unique.discard(s[left])
left += 1
return maxi
s = Solution()
print(s.lengthOfLongestSubstring("bbba"))
|
import logging
import os
import re
from subprocess import run, CalledProcessError # nosec
if __name__ == '__main__':
ignored_patterns = [
'setup.py',
'build',
'tools',
'projects',
'.history',
'torchreid/models',
'torchreid/data',
'torchreid/engine',
'torchreid/metrics',
'torchreid/optim',
'torchreid/utils',
'tests/conftest.py',
'tests/config.py',
'torchreid/integration/sc/model_wrappers/classification.py',
'tests/test_ote_training.py'
]
to_pylint = []
wd = os.path.abspath('.')
for root, dirnames, filenames in os.walk(wd):
for filename in filenames:
if filename.endswith('.py'):
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, wd)
if all(not re.match(pattern, rel_path) for pattern in ignored_patterns):
to_pylint.append(rel_path)
run(['pylint'] + to_pylint, check=True)
|
import math, random
import pygame, time
import tkinter as tk
from src.Environment.variables import START1, START2, END1, END2, START_REWARD, BEST_REWARD
from src.Grid.node import Node
# Multi-agent Environment
# Get the start and end node for each agent and insert them to the grid
def start_end_pos(grid, width, total_rows):
start_node1, end_node1, start_node2, end_node2 = checkForPos(grid, total_rows)
for i in range(total_rows):
for j in range(total_rows):
if (i,j) == (start_node1.row, start_node1.column):
grid[i][j].set_state(START1)
if (i,j) == (end_node1.row, end_node1.column):
grid[i][j].set_state(END1)
if (i,j) == (start_node2.row, start_node2.column):
grid[i][j].set_state(START2)
if (i,j) == (end_node2.row, end_node2.column):
grid[i][j].set_state(END2)
return start_node1, start_node2, end_node1, end_node2
# Get the start and end positions for both agents
def checkForPos(grid, total_rows):
top = (0, random.randint(0, total_rows-1))
bottom = (total_rows-1, random.randint(0, total_rows-1))
left = (random.randint(0, total_rows-1), 0)
right = (random.randint(0, total_rows-1), total_rows-1)
locations = [top, bottom, left, right]
# Get the startign and ending positions for the 2 agents
start1, end1, start2, end2 = pos_setUp(locations)
# Set the start and end node in case they are close the first time around
start_node1 = grid[start1[0]][start1[1]]
start_node2 = grid[start2[0]][start2[1]]
end_node1 = grid[end1[0]][end1[1]]
end_node2 = grid[end2[0]][end2[1]]
# Set the rewards of those two nodes
start_node1.reward = START_REWARD
start_node2.reward = START_REWARD
end_node1.reward = BEST_REWARD
end_node2.reward = BEST_REWARD
return start_node1, end_node1, start_node2, end_node2
# Give each start and end a certain node at the borders of the grid
def pos_setUp(location):
start_agent1 = random.choices(location, weights=[1,1,1,1], k=1)[0]
index = location.index(start_agent1)
location.pop(index)
end_agent1 = random.choices(location, weights=[1,1,1], k=1)[0]
index = location.index(end_agent1)
location.pop(index)
start_agent2 = random.choices(location, weights=[1,1], k=1)[0]
index = location.index(start_agent2)
location.pop(index)
end_agent2 = random.choices(location, weights=[1], k=1)[0]
index = location.index(end_agent2)
location.pop(index)
return start_agent1, end_agent1, start_agent2, end_agent2 |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = ['SubscriptionArgs', 'Subscription']
@pulumi.input_type
class SubscriptionArgs:
def __init__(__self__, *,
namespace_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
topic_name: pulumi.Input[str],
auto_delete_on_idle: Optional[pulumi.Input[str]] = None,
dead_lettering_on_filter_evaluation_exceptions: Optional[pulumi.Input[bool]] = None,
dead_lettering_on_message_expiration: Optional[pulumi.Input[bool]] = None,
default_message_time_to_live: Optional[pulumi.Input[str]] = None,
enable_batched_operations: Optional[pulumi.Input[bool]] = None,
entity_availability_status: Optional[pulumi.Input['EntityAvailabilityStatus']] = None,
is_read_only: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
lock_duration: Optional[pulumi.Input[str]] = None,
max_delivery_count: Optional[pulumi.Input[int]] = None,
requires_session: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input['EntityStatus']] = None,
subscription_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Subscription resource.
:param pulumi.Input[str] namespace_name: The namespace name
:param pulumi.Input[str] resource_group_name: Name of the Resource group within the Azure subscription.
:param pulumi.Input[str] topic_name: The topic name.
:param pulumi.Input[str] auto_delete_on_idle: TimeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.
:param pulumi.Input[bool] dead_lettering_on_filter_evaluation_exceptions: Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
:param pulumi.Input[bool] dead_lettering_on_message_expiration: Value that indicates whether a subscription has dead letter support when a message expires.
:param pulumi.Input[str] default_message_time_to_live: Default message time to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.
:param pulumi.Input[bool] enable_batched_operations: Value that indicates whether server-side batched operations are enabled.
:param pulumi.Input['EntityAvailabilityStatus'] entity_availability_status: Entity availability status for the topic.
:param pulumi.Input[bool] is_read_only: Value that indicates whether the entity description is read-only.
:param pulumi.Input[str] location: Subscription data center location.
:param pulumi.Input[str] lock_duration: The lock duration time span for the subscription.
:param pulumi.Input[int] max_delivery_count: Number of maximum deliveries.
:param pulumi.Input[bool] requires_session: Value indicating if a subscription supports the concept of sessions.
:param pulumi.Input['EntityStatus'] status: Enumerates the possible values for the status of a messaging entity.
:param pulumi.Input[str] subscription_name: The subscription name.
:param pulumi.Input[str] type: Resource manager type of the resource.
"""
pulumi.set(__self__, "namespace_name", namespace_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "topic_name", topic_name)
if auto_delete_on_idle is not None:
pulumi.set(__self__, "auto_delete_on_idle", auto_delete_on_idle)
if dead_lettering_on_filter_evaluation_exceptions is not None:
pulumi.set(__self__, "dead_lettering_on_filter_evaluation_exceptions", dead_lettering_on_filter_evaluation_exceptions)
if dead_lettering_on_message_expiration is not None:
pulumi.set(__self__, "dead_lettering_on_message_expiration", dead_lettering_on_message_expiration)
if default_message_time_to_live is not None:
pulumi.set(__self__, "default_message_time_to_live", default_message_time_to_live)
if enable_batched_operations is not None:
pulumi.set(__self__, "enable_batched_operations", enable_batched_operations)
if entity_availability_status is not None:
pulumi.set(__self__, "entity_availability_status", entity_availability_status)
if is_read_only is not None:
pulumi.set(__self__, "is_read_only", is_read_only)
if location is not None:
pulumi.set(__self__, "location", location)
if lock_duration is not None:
pulumi.set(__self__, "lock_duration", lock_duration)
if max_delivery_count is not None:
pulumi.set(__self__, "max_delivery_count", max_delivery_count)
if requires_session is not None:
pulumi.set(__self__, "requires_session", requires_session)
if status is not None:
pulumi.set(__self__, "status", status)
if subscription_name is not None:
pulumi.set(__self__, "subscription_name", subscription_name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="namespaceName")
def namespace_name(self) -> pulumi.Input[str]:
"""
The namespace name
"""
return pulumi.get(self, "namespace_name")
@namespace_name.setter
def namespace_name(self, value: pulumi.Input[str]):
pulumi.set(self, "namespace_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Name of the Resource group within the Azure subscription.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="topicName")
def topic_name(self) -> pulumi.Input[str]:
"""
The topic name.
"""
return pulumi.get(self, "topic_name")
@topic_name.setter
def topic_name(self, value: pulumi.Input[str]):
pulumi.set(self, "topic_name", value)
@property
@pulumi.getter(name="autoDeleteOnIdle")
def auto_delete_on_idle(self) -> Optional[pulumi.Input[str]]:
"""
TimeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.
"""
return pulumi.get(self, "auto_delete_on_idle")
@auto_delete_on_idle.setter
def auto_delete_on_idle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auto_delete_on_idle", value)
@property
@pulumi.getter(name="deadLetteringOnFilterEvaluationExceptions")
def dead_lettering_on_filter_evaluation_exceptions(self) -> Optional[pulumi.Input[bool]]:
"""
Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
"""
return pulumi.get(self, "dead_lettering_on_filter_evaluation_exceptions")
@dead_lettering_on_filter_evaluation_exceptions.setter
def dead_lettering_on_filter_evaluation_exceptions(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dead_lettering_on_filter_evaluation_exceptions", value)
@property
@pulumi.getter(name="deadLetteringOnMessageExpiration")
def dead_lettering_on_message_expiration(self) -> Optional[pulumi.Input[bool]]:
"""
Value that indicates whether a subscription has dead letter support when a message expires.
"""
return pulumi.get(self, "dead_lettering_on_message_expiration")
@dead_lettering_on_message_expiration.setter
def dead_lettering_on_message_expiration(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dead_lettering_on_message_expiration", value)
@property
@pulumi.getter(name="defaultMessageTimeToLive")
def default_message_time_to_live(self) -> Optional[pulumi.Input[str]]:
"""
Default message time to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.
"""
return pulumi.get(self, "default_message_time_to_live")
@default_message_time_to_live.setter
def default_message_time_to_live(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_message_time_to_live", value)
@property
@pulumi.getter(name="enableBatchedOperations")
def enable_batched_operations(self) -> Optional[pulumi.Input[bool]]:
"""
Value that indicates whether server-side batched operations are enabled.
"""
return pulumi.get(self, "enable_batched_operations")
@enable_batched_operations.setter
def enable_batched_operations(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_batched_operations", value)
@property
@pulumi.getter(name="entityAvailabilityStatus")
def entity_availability_status(self) -> Optional[pulumi.Input['EntityAvailabilityStatus']]:
"""
Entity availability status for the topic.
"""
return pulumi.get(self, "entity_availability_status")
@entity_availability_status.setter
def entity_availability_status(self, value: Optional[pulumi.Input['EntityAvailabilityStatus']]):
pulumi.set(self, "entity_availability_status", value)
@property
@pulumi.getter(name="isReadOnly")
def is_read_only(self) -> Optional[pulumi.Input[bool]]:
"""
Value that indicates whether the entity description is read-only.
"""
return pulumi.get(self, "is_read_only")
@is_read_only.setter
def is_read_only(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_read_only", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Subscription data center location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="lockDuration")
def lock_duration(self) -> Optional[pulumi.Input[str]]:
"""
The lock duration time span for the subscription.
"""
return pulumi.get(self, "lock_duration")
@lock_duration.setter
def lock_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lock_duration", value)
@property
@pulumi.getter(name="maxDeliveryCount")
def max_delivery_count(self) -> Optional[pulumi.Input[int]]:
"""
Number of maximum deliveries.
"""
return pulumi.get(self, "max_delivery_count")
@max_delivery_count.setter
def max_delivery_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_delivery_count", value)
@property
@pulumi.getter(name="requiresSession")
def requires_session(self) -> Optional[pulumi.Input[bool]]:
"""
Value indicating if a subscription supports the concept of sessions.
"""
return pulumi.get(self, "requires_session")
@requires_session.setter
def requires_session(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "requires_session", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['EntityStatus']]:
"""
Enumerates the possible values for the status of a messaging entity.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['EntityStatus']]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="subscriptionName")
def subscription_name(self) -> Optional[pulumi.Input[str]]:
"""
The subscription name.
"""
return pulumi.get(self, "subscription_name")
@subscription_name.setter
def subscription_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subscription_name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Resource manager type of the resource.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class Subscription(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_delete_on_idle: Optional[pulumi.Input[str]] = None,
dead_lettering_on_filter_evaluation_exceptions: Optional[pulumi.Input[bool]] = None,
dead_lettering_on_message_expiration: Optional[pulumi.Input[bool]] = None,
default_message_time_to_live: Optional[pulumi.Input[str]] = None,
enable_batched_operations: Optional[pulumi.Input[bool]] = None,
entity_availability_status: Optional[pulumi.Input['EntityAvailabilityStatus']] = None,
is_read_only: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
lock_duration: Optional[pulumi.Input[str]] = None,
max_delivery_count: Optional[pulumi.Input[int]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
requires_session: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input['EntityStatus']] = None,
subscription_name: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Description of subscription resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] auto_delete_on_idle: TimeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.
:param pulumi.Input[bool] dead_lettering_on_filter_evaluation_exceptions: Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
:param pulumi.Input[bool] dead_lettering_on_message_expiration: Value that indicates whether a subscription has dead letter support when a message expires.
:param pulumi.Input[str] default_message_time_to_live: Default message time to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.
:param pulumi.Input[bool] enable_batched_operations: Value that indicates whether server-side batched operations are enabled.
:param pulumi.Input['EntityAvailabilityStatus'] entity_availability_status: Entity availability status for the topic.
:param pulumi.Input[bool] is_read_only: Value that indicates whether the entity description is read-only.
:param pulumi.Input[str] location: Subscription data center location.
:param pulumi.Input[str] lock_duration: The lock duration time span for the subscription.
:param pulumi.Input[int] max_delivery_count: Number of maximum deliveries.
:param pulumi.Input[str] namespace_name: The namespace name
:param pulumi.Input[bool] requires_session: Value indicating if a subscription supports the concept of sessions.
:param pulumi.Input[str] resource_group_name: Name of the Resource group within the Azure subscription.
:param pulumi.Input['EntityStatus'] status: Enumerates the possible values for the status of a messaging entity.
:param pulumi.Input[str] subscription_name: The subscription name.
:param pulumi.Input[str] topic_name: The topic name.
:param pulumi.Input[str] type: Resource manager type of the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SubscriptionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Description of subscription resource.
:param str resource_name: The name of the resource.
:param SubscriptionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SubscriptionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_delete_on_idle: Optional[pulumi.Input[str]] = None,
dead_lettering_on_filter_evaluation_exceptions: Optional[pulumi.Input[bool]] = None,
dead_lettering_on_message_expiration: Optional[pulumi.Input[bool]] = None,
default_message_time_to_live: Optional[pulumi.Input[str]] = None,
enable_batched_operations: Optional[pulumi.Input[bool]] = None,
entity_availability_status: Optional[pulumi.Input['EntityAvailabilityStatus']] = None,
is_read_only: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
lock_duration: Optional[pulumi.Input[str]] = None,
max_delivery_count: Optional[pulumi.Input[int]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
requires_session: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input['EntityStatus']] = None,
subscription_name: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SubscriptionArgs.__new__(SubscriptionArgs)
__props__.__dict__["auto_delete_on_idle"] = auto_delete_on_idle
__props__.__dict__["dead_lettering_on_filter_evaluation_exceptions"] = dead_lettering_on_filter_evaluation_exceptions
__props__.__dict__["dead_lettering_on_message_expiration"] = dead_lettering_on_message_expiration
__props__.__dict__["default_message_time_to_live"] = default_message_time_to_live
__props__.__dict__["enable_batched_operations"] = enable_batched_operations
__props__.__dict__["entity_availability_status"] = entity_availability_status
__props__.__dict__["is_read_only"] = is_read_only
__props__.__dict__["location"] = location
__props__.__dict__["lock_duration"] = lock_duration
__props__.__dict__["max_delivery_count"] = max_delivery_count
if namespace_name is None and not opts.urn:
raise TypeError("Missing required property 'namespace_name'")
__props__.__dict__["namespace_name"] = namespace_name
__props__.__dict__["requires_session"] = requires_session
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["status"] = status
__props__.__dict__["subscription_name"] = subscription_name
if topic_name is None and not opts.urn:
raise TypeError("Missing required property 'topic_name'")
__props__.__dict__["topic_name"] = topic_name
__props__.__dict__["type"] = type
__props__.__dict__["accessed_at"] = None
__props__.__dict__["count_details"] = None
__props__.__dict__["created_at"] = None
__props__.__dict__["message_count"] = None
__props__.__dict__["name"] = None
__props__.__dict__["updated_at"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:servicebus/v20140901:Subscription"), pulumi.Alias(type_="azure-native:servicebus:Subscription"), pulumi.Alias(type_="azure-nextgen:servicebus:Subscription"), pulumi.Alias(type_="azure-native:servicebus/v20150801:Subscription"), pulumi.Alias(type_="azure-nextgen:servicebus/v20150801:Subscription"), pulumi.Alias(type_="azure-native:servicebus/v20170401:Subscription"), pulumi.Alias(type_="azure-nextgen:servicebus/v20170401:Subscription"), pulumi.Alias(type_="azure-native:servicebus/v20180101preview:Subscription"), pulumi.Alias(type_="azure-nextgen:servicebus/v20180101preview:Subscription"), pulumi.Alias(type_="azure-native:servicebus/v20210101preview:Subscription"), pulumi.Alias(type_="azure-nextgen:servicebus/v20210101preview:Subscription")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Subscription, __self__).__init__(
'azure-native:servicebus/v20140901:Subscription',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Subscription':
"""
Get an existing Subscription resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = SubscriptionArgs.__new__(SubscriptionArgs)
__props__.__dict__["accessed_at"] = None
__props__.__dict__["auto_delete_on_idle"] = None
__props__.__dict__["count_details"] = None
__props__.__dict__["created_at"] = None
__props__.__dict__["dead_lettering_on_filter_evaluation_exceptions"] = None
__props__.__dict__["dead_lettering_on_message_expiration"] = None
__props__.__dict__["default_message_time_to_live"] = None
__props__.__dict__["enable_batched_operations"] = None
__props__.__dict__["entity_availability_status"] = None
__props__.__dict__["is_read_only"] = None
__props__.__dict__["location"] = None
__props__.__dict__["lock_duration"] = None
__props__.__dict__["max_delivery_count"] = None
__props__.__dict__["message_count"] = None
__props__.__dict__["name"] = None
__props__.__dict__["requires_session"] = None
__props__.__dict__["status"] = None
__props__.__dict__["type"] = None
__props__.__dict__["updated_at"] = None
return Subscription(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessedAt")
def accessed_at(self) -> pulumi.Output[str]:
"""
Last time there was a receive request to this subscription.
"""
return pulumi.get(self, "accessed_at")
@property
@pulumi.getter(name="autoDeleteOnIdle")
def auto_delete_on_idle(self) -> pulumi.Output[Optional[str]]:
"""
TimeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.
"""
return pulumi.get(self, "auto_delete_on_idle")
@property
@pulumi.getter(name="countDetails")
def count_details(self) -> pulumi.Output['outputs.MessageCountDetailsResponse']:
"""
Message Count Details.
"""
return pulumi.get(self, "count_details")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> pulumi.Output[str]:
"""
Exact time the message was created.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="deadLetteringOnFilterEvaluationExceptions")
def dead_lettering_on_filter_evaluation_exceptions(self) -> pulumi.Output[Optional[bool]]:
"""
Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
"""
return pulumi.get(self, "dead_lettering_on_filter_evaluation_exceptions")
@property
@pulumi.getter(name="deadLetteringOnMessageExpiration")
def dead_lettering_on_message_expiration(self) -> pulumi.Output[Optional[bool]]:
"""
Value that indicates whether a subscription has dead letter support when a message expires.
"""
return pulumi.get(self, "dead_lettering_on_message_expiration")
@property
@pulumi.getter(name="defaultMessageTimeToLive")
def default_message_time_to_live(self) -> pulumi.Output[Optional[str]]:
"""
Default message time to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.
"""
return pulumi.get(self, "default_message_time_to_live")
@property
@pulumi.getter(name="enableBatchedOperations")
def enable_batched_operations(self) -> pulumi.Output[Optional[bool]]:
"""
Value that indicates whether server-side batched operations are enabled.
"""
return pulumi.get(self, "enable_batched_operations")
@property
@pulumi.getter(name="entityAvailabilityStatus")
def entity_availability_status(self) -> pulumi.Output[Optional[str]]:
"""
Entity availability status for the topic.
"""
return pulumi.get(self, "entity_availability_status")
@property
@pulumi.getter(name="isReadOnly")
def is_read_only(self) -> pulumi.Output[Optional[bool]]:
"""
Value that indicates whether the entity description is read-only.
"""
return pulumi.get(self, "is_read_only")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="lockDuration")
def lock_duration(self) -> pulumi.Output[Optional[str]]:
"""
The lock duration time span for the subscription.
"""
return pulumi.get(self, "lock_duration")
@property
@pulumi.getter(name="maxDeliveryCount")
def max_delivery_count(self) -> pulumi.Output[Optional[int]]:
"""
Number of maximum deliveries.
"""
return pulumi.get(self, "max_delivery_count")
@property
@pulumi.getter(name="messageCount")
def message_count(self) -> pulumi.Output[float]:
"""
Number of messages.
"""
return pulumi.get(self, "message_count")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requiresSession")
def requires_session(self) -> pulumi.Output[Optional[bool]]:
"""
Value indicating if a subscription supports the concept of sessions.
"""
return pulumi.get(self, "requires_session")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional[str]]:
"""
Enumerates the possible values for the status of a messaging entity.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> pulumi.Output[str]:
"""
The exact time the message was updated.
"""
return pulumi.get(self, "updated_at")
|
from .reg.linear_regression import LinearRegression
from .reg.k_nearest_neighbour import KNNReg
from .classi.k_nearest_neighbour import KNNClassi
__all__ = ['LinearRegression', "KNNReg", 'KNNClassi']
|
from .token import Token, TokenType
from .tokenizer import Tokenizer
|
def exponentation(base_number, exponent):
# watch out: 0^0 is defined as 1 (as indicated by Google Calculator)
if exponent == 0:
return 1
else:
return base_number * exponentation(base_number, exponent - 1)
if __name__ == "__main__":
n_a_1 = 3
n_a_2 = 4
print("Input numbers:", n_a_1, n_a_2)
print("Result:", exponentation(n_a_1, n_a_2))
n_b_1 = 17
n_b_2 = 1
print("\nInput numbers:", n_b_1, n_b_2)
print("Result:", exponentation(n_b_1, n_b_2))
n_c_1 = 2
n_c_2 = 0
print("\nInput numbers:", n_c_1, n_c_2)
print("Result:", exponentation(n_c_1, n_c_2))
|
from test.helper import (
execute_add,
wait_for_process,
command_factory,
)
def test_kill_remove_resume(daemon_setup):
"""Old `done` and `failed` entries will be deleted."""
# Add a command that fails, and finishes as well as queued and running processes
execute_add('failingstufftest')
execute_add('ls')
execute_add('ls')
execute_add('sleep 60')
execute_add('ls')
wait_for_process(2)
# Trigger the clear
command_factory('clear')()
# Check that 0,1,2 are missing, 3 is 'running' and 4 is 'queued'
status = command_factory('status')()
assert 0 not in status['data']
assert 1 not in status['data']
assert 2 not in status['data']
assert status['data'][3]['status'] == 'running'
assert status['data'][4]['status'] == 'queued'
|
def send_GET(sock:dict) -> dict:
blob = [sock[bob] for bob in sock]
sock = blob[2]
headers_ = ("GET /%s HTTP/1.1\r\x0AHost: %s\r\x0AAccept: */*\r\x0AConnection: close\r\x0AUser-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36\r\x0A\r\x0A"%(blob[0], blob[1])).encode("utf-8")
sock.send(headers_)
data = b"".join(sock.recv(12482) for bob in range(65))
return data |
# -*- coding: utf-8 -*-
"""@package scrape
@date Created on nov. 15 09:30 2017
@author samuel_r
"""
def get_translation(jp_text):
return
|
#!/usr/bin/env python
import io
import struct
import os
import re
from datetime import datetime, timedelta
import sys
from tqdm import tqdm
__author__ = 'petercable'
datere = re.compile('(\d{8}T\d{4}_UTC)')
binary_sync = '\xa3\x9d\x7a'
file_scan_depth = 256000
def lrc(data, seed=0):
for b in bytearray(data):
seed ^= b
return seed
def find_sensor(filename):
if '_' in filename:
return filename.split('_')[0]
def find_time_range(filename):
# initial implementation will assume 1 day per file
match = datere.search(filename)
if match:
dt = datetime.strptime(match.group(1), '%Y%m%dT%H%M_%Z')
return dt, dt + timedelta(1)
def check_chunk_ascii(chunk):
return 'OOI-TS' in chunk
def check_chunk_binary(chunk, fh):
# look for binary sync
sync_index = chunk.find(binary_sync)
if sync_index != -1:
# make sure we have enough bytes to read the packet size
if len(chunk) < sync_index + 4:
chunk += fh.read(4)
if len(chunk) >= sync_index + 4:
packet_size = struct.unpack_from('>H', chunk, sync_index+4)[0]
# make sure we have at least packet size bytes
if len(chunk) < sync_index + packet_size:
chunk += fh.read(packet_size)
if len(chunk) >= sync_index + packet_size:
# The LRC of the entire packet should be 0 if this is a valid packet
if lrc(chunk[sync_index:sync_index+packet_size]) == 0:
return True
return False
def find_type(path, f):
found_ascii = False
found_binary = False
ffw = False
with io.open(os.path.join(path, f), 'rb') as fh:
while not all((found_ascii, found_binary)):
chunk = fh.read(256)
if chunk == '':
break
if not found_ascii:
found_ascii = check_chunk_ascii(chunk)
if not found_binary:
found_binary = check_chunk_binary(chunk, fh)
# If we have scanned through the file to at least file_scan_depth
# then seek file_scan_depth from the end of file and search from
# there
if not ffw and fh.tell() >= file_scan_depth:
ffw = True
here = fh.tell()
fh.seek(-file_scan_depth, io.SEEK_END)
# go back where we were if the file happens to be smaller than
# file_scan_depth x 2
if fh.tell() < here:
fh.seek(here)
return found_ascii, found_binary
def walk_tree(root, sensor):
found = []
for path, dirs, files in os.walk(root):
files = [f for f in files if f.endswith('.dat') and 'DigiCmd' not in f]
if sensor is not None:
files = [f for f in files if sensor in f]
if files:
files.sort()
print('Processing %d files in %s: ' % (len(files), path))
for index in tqdm(xrange(len(files)), leave=True):
f = files[index]
sensor = find_sensor(f)
time_range = find_time_range(f)
# if there is a next file, grab the start time from it
if len(files) > index+1:
next_time_range = find_time_range(files[index+1])
time_range = time_range[0], next_time_range[0]
if time_range:
start, stop = time_range
record_type = find_type(path, f)
found.append((sensor, start, stop, record_type, os.path.join(path, f)))
return found
def analyze(items):
total_size = 0
items_start = None
items_stop = None
count = 0
for _, start, stop, _, path in items:
size = os.stat(path).st_size
if size == 0:
continue
if size < 1000:
# attempt to filter out "empty" files
valid = False
with open(path) as fh:
for line in fh:
if line.startswith('####'):
continue
if line.strip() == '':
continue
valid = True
if not valid:
continue
total_size += size
count += 1
if items_start is None:
items_start = start
else:
items_start = min(items_start, start)
if items_stop is None:
items_stop = stop
else:
items_stop = max(items_stop, stop)
return count, total_size, items_start, items_stop
def main():
root = sys.argv[1]
sensor = None
if len(sys.argv) > 2:
sensor = sys.argv[2]
found = walk_tree(root, sensor)
found.sort()
results = {}
for _ in found:
sensor, start, stop, record_type, path = _
if record_type == (False, False):
results.setdefault(sensor, {}).setdefault('chunky', []).append(_)
elif record_type == (True, False):
results.setdefault(sensor, {}).setdefault('ascii', []).append(_)
else:
results.setdefault(sensor, {}).setdefault('binary', []).append(_)
for sensor in results:
print sensor
print 'chunky', analyze(results[sensor].get('chunky', []))
print 'ascii', analyze(results[sensor].get('ascii', []))
print 'binary', analyze(results[sensor].get('binary', []))
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
class CMSApp(object):
name = None
urls = None
menus = []
app_name = None
|
import numpy as np
import pylot.utils
from pylot.perception.detection.utils import BoundingBox2D, BoundingBox3D, \
get_bounding_box_in_camera_view
VEHICLE_LABELS = {'car', 'bicycle', 'motorcycle', 'bus', 'truck', 'vehicle'}
class Obstacle(object):
"""Class used to store info about obstacles.
This class provides helper functions to detect obstacles and provide
bounding boxes for them.
Args:
bounding_box (:py:class:`.BoundingBox2D`): The bounding box of the
obstacle (can be 2D or 3D).
confidence (:obj:`float`): The confidence of the detection.
label (:obj:`str`): The label of the obstacle.
id (:obj:`int`): The identifier of the obstacle.
transform (:py:class:`~pylot.utils.Transform`, optional): Transform of
the obstacle in the world.
Attributes:
bounding_box (:py:class:`~pylot.utisl.BoundingBox2D`): Bounding box of
the obstacle (can be 2D or 3D).
confidence (:obj:`float`): The confidence of the detection.
label (:obj:`str`): The label of the obstacle.
id (:obj:`int`): The identifier of the obstacle.
transform (:py:class:`~pylot.utils.Transform`): Transform of the
obstacle.
"""
def __init__(self,
bounding_box,
confidence: float,
label: str,
id: int = -1,
transform: pylot.utils.Transform = None,
detailed_label: str = '',
bounding_box_2D: BoundingBox2D = None):
self.bounding_box = bounding_box
if isinstance(bounding_box, BoundingBox2D):
self.bounding_box_2D = bounding_box
else:
self.bounding_box_2D = bounding_box_2D
self.confidence = confidence
self.label = label
self.id = id
self.transform = transform
self.detailed_label = detailed_label
if label == 'vehicle':
self.segmentation_class = 10
elif label == 'person':
self.segmentation_class = 4
else:
self.segmentation_class = None
# Thresholds to be used for detection of the obstacle.
self.__segmentation_threshold = 0.20
self.__depth_threshold = 5
@classmethod
def from_simulator_actor(cls, actor):
"""Creates an Obstacle from a simulator actor.
Args:
actor: The actor to initialize the obstacle with.
Returns:
:py:class:`.Obstacle`: An obstacle instance.
"""
from carla import Vehicle, Walker
if not isinstance(actor, (Vehicle, Walker)):
raise ValueError("The actor should be of type Vehicle or "
"Walker to initialize the Obstacle class.")
# We do not use everywhere from_simulator* methods in order to reduce
# runtime.
# Convert the transform provided by the simulation to the Pylot class.
transform = pylot.utils.Transform.from_simulator_transform(
actor.get_transform())
# Convert the bounding box from the simulation to the Pylot one.
bounding_box = BoundingBox3D.from_simulator_bounding_box(
actor.bounding_box)
if isinstance(actor, Vehicle):
label = 'vehicle'
else:
label = 'person'
# Get the simulator actor from type_id (e.g. vehicle.ford.mustang).
detailed_label = actor.type_id
# TODO (Sukrit): Move from vehicles and people to separate classes
# for bicycles, motorcycles, cars and persons.
return cls(bounding_box, 1.0, label, actor.id, transform,
detailed_label)
def as_mot16_str(self, timestamp):
if not self.bounding_box_2D:
raise ValueError(
'Obstacle {} does not have 2D bounding box'.format(self.id))
log_line = "{},{},{},{},{},{},{},{},{},{}\n".format(
timestamp, self.id, self.bounding_box_2D.x_min,
self.bounding_box_2D.y_min, self.bounding_box_2D.get_width(),
self.bounding_box_2D.get_height(), 1.0, -1, -1, -1)
return log_line
def _distance(self, other_transform: pylot.utils.Transform):
"""Computes the distance from the obstacle to the other transform.
The distance provides an estimate of the depth returned by the depth
camera sensor in the simulator. As a result, the distance is defined
as the displacement of the obstacle along either the X or the Y axis.
Args:
other_transform (:py:class:`~pylot.utils.Transform`): The other
transform.
Returns:
:obj:`float`: The distance (in metres) of the obstacle from the
transform.
"""
import numpy as np
if self.transform is None:
raise ValueError('Obstacle {} does not have a transform'.format(
self.id))
# Get the location of the vehicle and the obstacle as numpy arrays.
other_location = other_transform.location.as_numpy_array()
obstacle_location = self.transform.location.as_numpy_array()
# Calculate the vector from the vehicle to the obstacle.
# Scale it by the forward vector, and calculate the norm.
relative_vector = other_location - obstacle_location
distance = np.linalg.norm(
relative_vector * other_transform.forward_vector.as_numpy_array())
return distance
def draw_on_frame(self,
frame,
bbox_color_map,
ego_transform: pylot.utils.Transform = None,
text: str = None):
"""Annotate the image with the bounding box of the obstacle."""
if text is None:
text = '{}, {:.1f}'.format(self.label, self.confidence)
if self.id != -1:
text += ', id:{}'.format(self.id)
if ego_transform is not None and self.transform is not None:
text += ', {:.1f}m'.format(
ego_transform.location.distance(self.transform.location))
if self.label in bbox_color_map:
color = bbox_color_map[self.label]
else:
color = [255, 255, 255]
# Show bounding box.
if self.bounding_box_2D:
# Draw the 2D bounding box if available.
frame.draw_box(self.bounding_box_2D.get_min_point(),
self.bounding_box_2D.get_max_point(), color)
frame.draw_text(self.bounding_box_2D.get_min_point(), text, color)
elif isinstance(self.bounding_box, BoundingBox3D):
if self.bounding_box.corners is None:
raise ValueError(
'Obstacle {} does not have bbox corners'.format(self.id))
corners = self.bounding_box.to_camera_view(
None, frame.camera_setup.get_extrinsic_matrix(),
frame.camera_setup.get_intrinsic_matrix())
frame.draw_3d_box(corners, color)
else:
raise ValueError('Obstacle {} does not have bounding box'.format(
self.id))
def draw_trajectory_on_frame(self,
trajectory,
frame,
point_color,
draw_label: bool = False):
# Intrinsic and extrinsic matrix of the top down camera.
extrinsic_matrix = frame.camera_setup.get_extrinsic_matrix()
intrinsic_matrix = frame.camera_setup.get_intrinsic_matrix()
if isinstance(self.bounding_box, BoundingBox3D):
# Draw bounding boxes.
start_location = self.bounding_box.transform.location - \
self.bounding_box.extent
end_location = self.bounding_box.transform.location + \
self.bounding_box.extent
for transform in trajectory:
[start_transform,
end_transform] = transform.transform_locations(
[start_location, end_location])
start_point = start_transform.to_camera_view(
extrinsic_matrix, intrinsic_matrix)
end_point = end_transform.to_camera_view(
extrinsic_matrix, intrinsic_matrix)
if frame.in_frame(start_point) or frame.in_frame(end_point):
frame.draw_box(start_point, end_point, point_color)
else:
# Draw points.
for transform in trajectory:
screen_point = transform.location.to_camera_view(
extrinsic_matrix, intrinsic_matrix)
if frame.in_frame(screen_point):
# Draw trajectory on frame.
frame.draw_point(screen_point, point_color)
if draw_label and len(trajectory) > 0:
text = '{}, {}'.format(self.label, self.id)
screen_point = trajectory[-1].location.to_camera_view(
extrinsic_matrix, intrinsic_matrix)
frame.draw_text(screen_point, text, point_color)
def get_bounding_box_corners(self,
obstacle_transform,
obstacle_radius=None):
"""Gets the corners of the obstacle's bounding box.
Note:
The bounding box is applied on the given obstacle transfom, and not
on the default obstacle transform.
"""
# Use 3d bounding boxes if available, otherwise use default
if isinstance(self.bounding_box, BoundingBox3D):
start_location = (self.bounding_box.transform.location -
self.bounding_box.extent)
end_location = (self.bounding_box.transform.location +
self.bounding_box.extent)
[start_location,
end_location] = obstacle_transform.transform_locations(
[start_location, end_location])
else:
obstacle_radius_loc = pylot.utils.Location(obstacle_radius,
obstacle_radius)
start_location = obstacle_transform.location - obstacle_radius_loc
end_location = obstacle_transform.location + obstacle_radius_loc
return [
min(start_location.x, end_location.x),
min(start_location.y, end_location.y),
max(start_location.x, end_location.x),
max(start_location.y, end_location.y)
]
def get_in_log_format(self):
if not self.bounding_box_2D:
raise ValueError(
'Obstacle {} does not have 2D bounding box'.format(self.id))
min_point = self.bounding_box_2D.get_min_point()
max_point = self.bounding_box_2D.get_max_point()
return (self.label, self.detailed_label, self.id,
((min_point.x, min_point.y), (max_point.x, max_point.y)))
def is_animal(self):
return self.label in [
'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra',
'giraffe'
]
def is_person(self):
return self.label == 'person'
def is_speed_limit(self):
return self.label in [
'speed limit 30', 'speed limit 60', 'speed limit 90'
]
def is_stop_sign(self):
return self.label == 'stop sign' or self.label == 'stop marking'
def is_traffic_light(self):
return self.label in [
'red traffic light', 'yellow traffic light', 'green traffic light',
'off traffic light'
]
def is_vehicle(self):
# Might want to include train.
return self.label in VEHICLE_LABELS
def populate_bounding_box_2D(self, depth_frame, segmented_frame):
"""Populates the 2D bounding box for the obstacle.
Heuristically uses the depth frame and segmentation frame to figure out
if the obstacle is in view of the camera or not.
Args:
depth_frame (:py:class:`~pylot.perception.depth_frame.DepthFrame`):
Depth frame used to compare the depth to the distance of the
obstacle from the sensor.
segmented_frame (:py:class:`~pylot.perception.segmentation.segmented_frame.SegmentedFrame`): # noqa: E501
Segmented frame used to refine the conversions.
Returns:
:py:class:`~pylot.utisl.BoundingBox2D`: An instance representing a
rectangle over the obstacle if the obstacle is deemed to be
visible, None otherwise.
"""
if self.bounding_box_2D:
return self.bounding_box_2D
# Convert the bounding box of the obstacle to the camera coordinates.
bb_coordinates = self.bounding_box.to_camera_view(
self.transform, depth_frame.camera_setup.get_extrinsic_matrix(),
depth_frame.camera_setup.get_intrinsic_matrix())
# Threshold the bounding box to be within the camera view.
bbox_2d = get_bounding_box_in_camera_view(
bb_coordinates, depth_frame.camera_setup.width,
depth_frame.camera_setup.height)
if not bbox_2d:
return None
# Crop the segmented and depth image to the given bounding box.
cropped_image = segmented_frame.as_numpy_array()[
bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max]
cropped_depth = depth_frame.as_numpy_array()[
bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max]
# If the size of the bounding box is greater than 0, ensure that the
# bounding box contains more than a threshold of pixels corresponding
# to the required segmentation class.
if cropped_image.size > 0:
masked_image = np.zeros_like(cropped_image)
masked_image[np.where(
cropped_image == self.segmentation_class)] = 1
seg_threshold = self.__segmentation_threshold * masked_image.size
if np.sum(masked_image) >= seg_threshold:
# The bounding box contains the required number of pixels that
# belong to the required class. Ensure that the depth of the
# obstacle is the depth in the image.
masked_depth = cropped_depth[np.where(masked_image == 1)]
mean_depth = np.mean(masked_depth) * 1000
depth = self._distance(
depth_frame.camera_setup.get_transform())
if abs(depth - mean_depth) <= self.__depth_threshold:
self.bounding_box_2D = bbox_2d
return bbox_2d
return None
def __repr__(self):
return self.__str__()
def __str__(self):
obstacle = 'Obstacle(id: {}, label: {}, confidence: {}, '\
'bbox: {})'.format(self.id, self.label, self.confidence,
self.bounding_box)
if self.transform:
return obstacle + ' at ' + str(self.transform)
else:
return obstacle
|
from flask import Flask, request, url_for
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_bcrypt import Bcrypt
from flask_wtf.csrf import CSRFProtect
from flask_login import LoginManager
from flask_mail import Mail
from datetime import datetime
from flask_login import UserMixin
import itsdangerous
from config import Config
app = Flask(__name__)
app._static_folder = 'static'
app.config['SECRET_KEY'] = '5791628bb0b13ce0c676df2de280ba245'
app.config.from_object(Config)
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
login_manager = LoginManager(app)
login_manager.login_view = 'login'
login_manager.login_message_category = 'info'
mail = Mail(app)
migrate = Migrate(app, db)
csrf = CSRFProtect(app)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
followers = db.Table('followers',
db.Column('follower_id', db.Integer,
db.ForeignKey('user.id')),
db.Column('followed_id', db.Integer,
db.ForeignKey('user.id'))
)
class User(db.Model, UserMixin):
"""Модель пользователя. Представлены обязательные
для заполнения поля и поля для отображения в профиле.
"""
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(15), unique=True, nullable=False)
email = db.Column(db.String(), unique=True, nullable=False)
image_file = db.Column(db.String(), nullable=True, default='default.jpg')
password = db.Column(db.String(59), nullable=False)
posts = db.relationship('Post', backref='author', lazy=True)
comments = db.relationship('Comment', backref='author', lazy=True)
# Отображение последнего посещения.
last_seen = db.Column(db.DateTime, default=datetime.utcnow)
# Персональные данные, необязательные, для профиля
age = db.Column(db.Integer,
default='Не заполнено.', nullable=True)
country = db.Column(db.String(59),
default='Не заполнено.', nullable=True)
city = db.Column(db.String(59),
default='Не заполнено.', nullable=True)
telegram = db.Column(db.String(59),
default='Не заполнено.', nullable=True)
git = db.Column(db.String(199), nullable=True)
# Подписчики
followed = db.relationship(
'User', secondary=followers,
primaryjoin=(followers.c.follower_id == id),
secondaryjoin=(followers.c.followed_id == id),
backref='Подписчики',
lazy='dynamic')
likes = db.relationship('Like', backref='user', lazy=True)
messages = db.relationship('Message', backref='user', lazy=True)
def __repr__(self):
return self.username
def follow(self, user):
"""Добавления в избранные авторы."""
if not self.is_following(user):
self.followed.append(user)
def unfollow(self, user):
"""Удаление из избранных авторов."""
if self.is_following(user):
self.followed.remove(user)
def is_following(self, user):
"""Проверка, является ли подписчиком."""
return self.followed.filter(
followers.c.followed_id == user.id).count() > 0
def get_reset_token(self):
"""Получение токкена."""
auth_s = itsdangerous.Serializer(app.config['SECRET_KEY'])
return auth_s.dumps({'user_id': self.id})
@staticmethod
def verify_reset_token(token):
"""Подтверждение токена из письма,
отправленного на почту при смене пароля.
"""
auth_s = itsdangerous.Serializer(app.config['SECRET_KEY'])
try:
user_id = auth_s.loads(token)['user_id']
except:
return None
return User.query.get(user_id)
class Post(db.Model):
"""ДМодель отображения всех записей."""
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100), nullable=False)
created_at = db.Column(
db.DateTime,
nullable=False,
default=datetime.utcnow
)
text = db.Column(db.Text, nullable=False)
user_id = db.Column(
db.Integer,
db.ForeignKey('user.id'),
nullable=False
)
comments = db.relationship('Comment', backref='posts', lazy=True)
likes = db.relationship('Like', backref='post', lazy=True)
def __repr__(self):
return f"Post '{self.title}', created '{self.created_at}')"
class Comment(db.Model):
"""Комментарии к посту."""
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.String(100), nullable=False)
timestamp = db.Column(db.DateTime, nullable=False,
default=datetime.utcnow)
post_id = db.Column(db.Integer, db.ForeignKey('post.id'), nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return self.body[:20]
class Like(db.Model):
"""Лайки к посту."""
id = db.Column(db.Integer, primary_key=True)
author = db.Column(db.Integer, db.ForeignKey(
'user.id', ondelete="CASCADE"), nullable=False)
post_id = db.Column(db.Integer, db.ForeignKey(
'post.id', ondelete="CASCADE"), nullable=False)
class Message(db.Model):
"""Личные сообщения."""
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.String(199), nullable=False)
# Кто отправил.
sender = db.Column(db.Integer, db.ForeignKey(
'user.id', ondelete="CASCADE"), nullable=False)
# Кто получил.
getter = db.Column(db.Integer, db.ForeignKey(
'user.id', ondelete="CASCADE"), nullable=False)
timestamp = db.Column(db.DateTime, nullable=False,
default=datetime.utcnow)
|
import hashlib
import json
from typing import Any, Callable, List
from clvm import SExp
OpCallable = Callable[[Any, "ValStackType"], int]
ValStackType = List[SExp]
OpStackType = List[OpCallable]
def sha256tree(v):
pair = v.as_pair()
if pair:
left = sha256tree(pair[0])
right = sha256tree(pair[1])
s = b"\2" + left + right
else:
atom = v.as_atom()
s = b"\1" + atom
return hashlib.sha256(s).digest()
PRELUDE = '''<html>
<head>
<link rel="stylesheet"
href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css"
integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T"
crossorigin="anonymous">
<script
src="https://code.jquery.com/jquery-3.3.1.slim.min.js"
integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo"
crossorigin="anonymous"></script>
<script
src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js"
integrity="sha384-UO2eT0CpHqdSJQ6hJty5KVphtPhzWj9WO1clHTMGa3JDZwrnQq4sF86dIHNDz0W1"
crossorigin="anonymous"></script>
<script
src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js"
integrity="sha384-JjSmVgyd0p3pXB1rRibZUAYoIIy6OrQ6VrjIEaFf/nJGzIxFDsf4x0xIM+B07jRM"
crossorigin="anonymous"></script>
</head>
<body>
<div class="container">
'''
TRAILER = "</div></body></html>"
def dump_sexp(s, disassemble):
return '<span id="%s">%s</span>' % (id(s), disassemble(s))
def dump_invocation(form, rewrit_form, env, result, disassemble):
print('<hr><div class="invocation" id="%s">' % id(form))
print('<span class="form"><a name="id_%s">%s</a></span>' % (
id(form), dump_sexp(form, disassemble)))
print('<ul>')
if form != rewrit_form:
print(
'<li>Rewritten as:<span class="form">'
'<a name="id_%s">%s</a></span></li>' % (
id(rewrit_form), dump_sexp(rewrit_form, disassemble)))
for _, e in enumerate(env):
print('<li>x%d: <a href="#id_%s">%s</a></li>' % (
_, id(e), dump_sexp(e, disassemble)))
print('</ul>')
print('<span class="form">%s</span>' % dump_sexp(result, disassemble))
if form.listp() and len(form) > 1:
print('<ul>')
for _, arg in enumerate(form[1:]):
print('<li>arg %d: <a href="#id_%s">%s</a></li>' % (
_, id(arg), dump_sexp(arg, disassemble)))
print('</ul>')
print("</div>")
def trace_to_html(invocations, disassemble):
invocations = reversed(invocations)
print(PRELUDE)
id_set = set()
id_list = []
for form, rewrit_form, env, rv in invocations:
dump_invocation(form, rewrit_form, env, rv, disassemble)
the_id = id(form)
if the_id not in id_set:
id_set.add(the_id)
id_list.append(form)
print('<hr>')
for _ in id_list:
print('<div><a href="#id_%s">%s</a></div>' % (id(_), disassemble(_)))
print('<hr>')
print(TRAILER)
def build_symbol_dump(constants_lookup, run_program, path):
compiled_lookup = {}
for k, v in constants_lookup.items():
cost, v1 = run_program(v, v.null())
compiled_lookup[sha256tree(v1).hex()] = bytes(k).decode()
output = json.dumps(compiled_lookup)
with open(path, "w") as f:
f.write(output)
def text_trace(disassemble, form, symbol, env, result):
if symbol:
env = env.rest()
symbol = disassemble(env.to(symbol.encode()).cons(env))
else:
symbol = "%s [%s]" % (disassemble(form), disassemble(env))
print("%s => %s" % (symbol, result))
print("")
def table_trace(disassemble, form, symbol, env, result):
if form.listp():
sexp = form.first()
args = form.rest()
else:
sexp = form
args = form.null()
print("exp:", disassemble(sexp))
print("arg:", disassemble(args))
print("env:", disassemble(env))
print("val:", result)
print("bexp:", sexp.as_bin())
print("barg:", args.as_bin())
print("benv:", env.as_bin())
print("--")
def display_trace(trace, disassemble, symbol_table, display_fun):
for item in trace:
form, env, rv = item
if rv is None:
rv = "(didn't finish)"
else:
rv = disassemble(rv)
h = sha256tree(form).hex()
symbol = symbol_table.get(h) if symbol_table else symbol_table
display_fun(disassemble, form, symbol, env, rv)
def trace_to_text(trace, disassemble, symbol_table):
display_trace(trace, disassemble, symbol_table, text_trace)
def trace_to_table(trace, disassemble, symbol_table):
display_trace(trace, disassemble, symbol_table, table_trace)
def make_trace_pre_eval(log_entries, symbol_table=None):
def pre_eval_f(sexp, args):
sexp, args = [SExp.to(_) for _ in [sexp, args]]
if symbol_table:
h = sha256tree(sexp).hex()
if h not in symbol_table:
return None
log_entry = [sexp, args, None]
log_entries.append(log_entry)
def callback_f(r):
log_entry[-1] = SExp.to(r)
return callback_f
return pre_eval_f
|
from typing import List
from Yacht.Dice import Dice
from Yacht.PointTable import PointTable
from Yacht.PointType import PointType
class Player:
point_table: PointTable = PointTable()
dices: List[Dice] = [Dice(), Dice(), Dice(), Dice(), Dice()]
roll_count: int = 0
def __init__(self):
self.point_table = PointTable()
self.roll_count = 0
for i in range(0, len(self.dices)):
self.dices[i] = Dice()
def round_start(self):
for dice in self.dices:
dice.reset()
dice.roll()
self.roll_count = 2
def roll(self) -> bool:
if self.roll_count > 0:
self.roll_count -= 1
for dice in self.dices:
dice.roll()
return True
else:
return False
def hold(self, number: int):
self.dices[number].set_hold()
def un_hold(self, number: int):
self.dices[number].unset_hold()
def set_point(self, point_type: PointType) -> bool:
return self.point_table.set_point(point_type, self.dices)
def get_point(self) -> int:
return self.point_table.get_total_point()
def is_point_setable(self, point_type: PointType) -> bool:
return self.point_table.is_point_setable(point_type)
def get_roll_count(self) -> int:
return self.roll_count
def get_dices(self) -> List[Dice]:
return self.dices
def get_point_table(self) -> PointTable:
return self.point_table
def print_point(self):
print("Total Point: ", self.point_table.get_total_point())
self.point_table.print_table()
def print_dice(self):
print("Roll Count: ", self.roll_count)
for dice in self.dices:
dice.print_dice()
|
"""Support for interface with an Samsung TV."""
import asyncio
from datetime import timedelta
import logging
import socket
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE,
SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF, SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP)
from homeassistant.const import (
CONF_HOST, CONF_MAC, CONF_NAME, CONF_PORT, CONF_TIMEOUT, STATE_OFF,
STATE_ON)
import homeassistant.helpers.config_validation as cv
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Samsung TV Remote'
DEFAULT_PORT = 55000
DEFAULT_TIMEOUT = 1
KEY_PRESS_TIMEOUT = 1.2
KNOWN_DEVICES_KEY = 'samsungtv_known_devices'
SOURCES = {
'TV': 'KEY_DTV',
'HDMI': 'KEY_HDMI'
}
SUPPORT_SAMSUNGTV = SUPPORT_PAUSE | SUPPORT_VOLUME_STEP | \
SUPPORT_VOLUME_MUTE | SUPPORT_PREVIOUS_TRACK | SUPPORT_SELECT_SOURCE | \
SUPPORT_NEXT_TRACK | SUPPORT_TURN_OFF | SUPPORT_PLAY | SUPPORT_PLAY_MEDIA
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_MAC): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Samsung TV platform."""
known_devices = hass.data.get(KNOWN_DEVICES_KEY)
if known_devices is None:
known_devices = set()
hass.data[KNOWN_DEVICES_KEY] = known_devices
uuid = None
# Is this a manual configuration?
if config.get(CONF_HOST) is not None:
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
name = config.get(CONF_NAME)
mac = config.get(CONF_MAC)
timeout = config.get(CONF_TIMEOUT)
elif discovery_info is not None:
tv_name = discovery_info.get('name')
model = discovery_info.get('model_name')
host = discovery_info.get('host')
name = "{} ({})".format(tv_name, model)
port = DEFAULT_PORT
timeout = DEFAULT_TIMEOUT
mac = None
udn = discovery_info.get('udn')
if udn and udn.startswith('uuid:'):
uuid = udn[len('uuid:'):]
else:
_LOGGER.warning("Cannot determine device")
return
# Only add a device once, so discovered devices do not override manual
# config.
ip_addr = socket.gethostbyname(host)
if ip_addr not in known_devices:
known_devices.add(ip_addr)
add_entities([SamsungTVDevice(host, port, name, timeout, mac, uuid)])
_LOGGER.info("Samsung TV %s:%d added as '%s'", host, port, name)
else:
_LOGGER.info("Ignoring duplicate Samsung TV %s:%d", host, port)
class SamsungTVDevice(MediaPlayerDevice):
"""Representation of a Samsung TV."""
def __init__(self, host, port, name, timeout, mac, uuid):
"""Initialize the Samsung device."""
from samsungctl import exceptions
from samsungctl import Remote
import wakeonlan
# Save a reference to the imported classes
self._exceptions_class = exceptions
self._remote_class = Remote
self._name = name
self._mac = mac
self._uuid = uuid
self._wol = wakeonlan
# Assume that the TV is not muted
self._muted = False
# Assume that the TV is in Play mode
self._playing = True
self._state = None
self._remote = None
# Mark the end of a shutdown command (need to wait 15 seconds before
# sending the next command to avoid turning the TV back ON).
self._end_of_power_off = None
# Generate a configuration for the Samsung library
self._config = {
'name': 'HomeAssistant',
'description': name,
'id': 'ha.component.samsung',
'port': port,
'host': host,
'timeout': timeout,
}
if self._config['port'] == 8001:
self._config['method'] = 'websocket'
else:
self._config['method'] = 'legacy'
def update(self):
"""Update state of device."""
self.send_key("KEY")
def get_remote(self):
"""Create or return a remote control instance."""
if self._remote is None:
# We need to create a new instance to reconnect.
self._remote = self._remote_class(self._config)
return self._remote
def send_key(self, key):
"""Send a key to the tv and handles exceptions."""
if self._power_off_in_progress() \
and key not in ('KEY_POWER', 'KEY_POWEROFF'):
_LOGGER.info("TV is powering off, not sending command: %s", key)
return
try:
# recreate connection if connection was dead
retry_count = 1
for _ in range(retry_count + 1):
try:
self.get_remote().control(key)
break
except (self._exceptions_class.ConnectionClosed,
BrokenPipeError):
# BrokenPipe can occur when the commands is sent to fast
self._remote = None
self._state = STATE_ON
except (self._exceptions_class.UnhandledResponse,
self._exceptions_class.AccessDenied):
# We got a response so it's on.
self._state = STATE_ON
self._remote = None
_LOGGER.debug("Failed sending command %s", key, exc_info=True)
return
except OSError:
self._state = STATE_OFF
self._remote = None
if self._power_off_in_progress():
self._state = STATE_OFF
def _power_off_in_progress(self):
return self._end_of_power_off is not None and \
self._end_of_power_off > dt_util.utcnow()
@property
def unique_id(self) -> str:
"""Return the unique ID of the device."""
return self._uuid
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def source_list(self):
"""List of available input sources."""
return list(SOURCES)
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._mac:
return SUPPORT_SAMSUNGTV | SUPPORT_TURN_ON
return SUPPORT_SAMSUNGTV
def turn_off(self):
"""Turn off media player."""
self._end_of_power_off = dt_util.utcnow() + timedelta(seconds=15)
if self._config['method'] == 'websocket':
self.send_key('KEY_POWER')
else:
self.send_key('KEY_POWEROFF')
# Force closing of remote session to provide instant UI feedback
try:
self.get_remote().close()
self._remote = None
except OSError:
_LOGGER.debug("Could not establish connection.")
def volume_up(self):
"""Volume up the media player."""
self.send_key('KEY_VOLUP')
def volume_down(self):
"""Volume down media player."""
self.send_key('KEY_VOLDOWN')
def mute_volume(self, mute):
"""Send mute command."""
self.send_key('KEY_MUTE')
def media_play_pause(self):
"""Simulate play pause media player."""
if self._playing:
self.media_pause()
else:
self.media_play()
def media_play(self):
"""Send play command."""
self._playing = True
self.send_key('KEY_PLAY')
def media_pause(self):
"""Send media pause command to media player."""
self._playing = False
self.send_key('KEY_PAUSE')
def media_next_track(self):
"""Send next track command."""
self.send_key('KEY_FF')
def media_previous_track(self):
"""Send the previous track command."""
self.send_key('KEY_REWIND')
async def async_play_media(self, media_type, media_id, **kwargs):
"""Support changing a channel."""
if media_type != MEDIA_TYPE_CHANNEL:
_LOGGER.error('Unsupported media type')
return
# media_id should only be a channel number
try:
cv.positive_int(media_id)
except vol.Invalid:
_LOGGER.error('Media ID must be positive integer')
return
for digit in media_id:
await self.hass.async_add_job(self.send_key, 'KEY_' + digit)
await asyncio.sleep(KEY_PRESS_TIMEOUT, self.hass.loop)
await self.hass.async_add_job(self.send_key, 'KEY_ENTER')
def turn_on(self):
"""Turn the media player on."""
if self._mac:
self._wol.send_magic_packet(self._mac)
else:
self.send_key('KEY_POWERON')
async def async_select_source(self, source):
"""Select input source."""
if source not in SOURCES:
_LOGGER.error('Unsupported source')
return
await self.hass.async_add_job(self.send_key, SOURCES[source])
|
Subsets and Splits