max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
setup.py | SNR20db/meteostat2 | 0 | 12785551 | from setuptools import setup
from os import path
from codecs import open
here = path.abspath( path.dirname( __file__ ) )
with open( path.join( here, 'README.md' ), encoding='utf-8' ) as file :
long_description = file.read()
for line in open( path.join( 'meteostat', '__init__.py' ) ) :
if line.startswith( '__version__' ) :
exec( line )
break
setup(
name = 'meteostat2',
versio = __version__,
description = 'Meteostat alternative API for python',
long_description = long_description,
url = 'https://github.com/SNR20db/meteostat2',
license = 'MIT',
Classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developer',
'Intended Audience :: Education',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: MIT License',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10'
],
keywords = 'meteo meteostat meteostat2 weather weatherAPI meteorology',
install_requires = [ 'requests' ],
entry_points = {
'console_scripts' : [
'meteostat2 = meteostat.__main__:main',
'meteo2 = meteostat.__main__:main'
]
},
py_modules = [ 'meteostat.meteostat2' , 'meteostat.__main__' ],
test_require = [
'pandas'
]
) | 1.765625 | 2 |
tests/test_parameters.py | thomasjpfan/skvalid | 2 | 12785552 | import pytest
from numpy.random import RandomState
from skvalid.parameters import TypeOf
from skvalid.parameters import Enum
from skvalid.parameters import Union
from skvalid.parameters import Interval
from skvalid.parameters import Const
import typing
@pytest.mark.parametrize('type_of,value',
[(TypeOf(bool), True), (TypeOf(bool), False),
(TypeOf(typing.Callable), TypeOf.validate),
(TypeOf(float), 10.1), (TypeOf(int), 10),
(TypeOf(dict), {})])
def test_typeof_valid_values(type_of, value):
type_of.validate(value, "tol")
@pytest.mark.parametrize('type_of,value', [(TypeOf(bool), 'Hello world'),
(TypeOf(typing.Callable), True),
(TypeOf(str), 120),
(TypeOf(int), 10.1),
(TypeOf(dict), True)])
def test_typeof_invalid_values(type_of, value):
cur_type = type_of.types[0]
name = getattr(cur_type, "__name__", str(cur_type))
msg = 'tol: {} is not a {}'.format(value, name)
with pytest.raises(TypeError, match=msg):
type_of.validate(value, "tol")
def test_typeof_invalid_values_multiple():
msg = 'tol: 4.0 is not a RandomState or int'
with pytest.raises(TypeError, match=msg):
TypeOf(RandomState, int).validate(4.0, "tol")
@pytest.mark.parametrize('constant,value', [(Const(4), 4),
(Const('hehe'), 'hehe'),
(Const(3.1), 3.1),
(Const(True), True),
(Const(None), None)])
def test_constant_valid_values(constant, value):
# does not raise
constant.validate(value, "tol")
@pytest.mark.parametrize('constant,value', [(Const(4), 3),
(Const('hehe'), 'heh'),
(Const(3.1), 4.1),
(Const(True), False),
(Const(None), 4),
(Const(4), None)])
def test_constant_invalid_values(constant, value):
msg = 'tol: {} != {}'.format(value, constant.value)
with pytest.raises(ValueError, match=msg):
constant.validate(value, "tol")
@pytest.mark.parametrize('members, msg',
[([], 'members must have at least one item'),
((), 'members must have at least one item')])
def test_enum_invalid_members_init(members, msg):
with pytest.raises(ValueError, match=msg):
Enum(*members)
@pytest.mark.parametrize('enum, value',
[(Enum('a', 'b'), 'a'), (Enum('a', 'b'), 'b'),
(Enum('a', 'c', 'b'), 'c'),
(Enum('a', 1, None, 1.0, True), 'a'),
(Enum('a', 1, None, 1.0, True), 1),
(Enum('a', 1, None, 1.0, True), None),
(Enum('a', 1, None, 1.0, True), 1.0),
(Enum('a', 1, None, 1.0, True), True)])
def test_enum_values(enum, value):
# does not raise
enum.validate(value, "tol")
@pytest.mark.parametrize(
'enum, value, msg',
[(Enum('a', '5'), '3', r'3 is not in \[a, 5\]'),
(Enum('a', '3', '9'), '5', r'5 is not in \[a, 3, 9\]'),
(Enum('a', 1, None, 1.0,
True), 'bad', r'bad is not in \[a, 1, None, 1.0, True\]')])
def test_enum_invalid_values(enum, value, msg):
with pytest.raises(ValueError, match=msg):
enum.validate(value, "tol")
def test_enum_invalid_type_error():
enum, value, msg = Enum('hello', 'f'), 1, r'1 is not in \[hello, f\]'
with pytest.raises(ValueError, match=msg):
enum.validate(value, 'tol')
@pytest.mark.parametrize(
'params, msg',
[((), 'parameters must have at least one item'),
(('hello', 'world'), 'all parameters must be of type Parameter'),
((TypeOf(int), 3), 'all parameters must be of type Parameter'),
((None, Enum('hello')), 'all parameters must be of type Parameter')])
def test_union_invalid_params_init(params, msg):
with pytest.raises(ValueError, match=msg):
Union(*params)
@pytest.mark.parametrize('union, value, msg', [
(Union(TypeOf(int), Enum('hello', 'world')), None,
r'tol: None is not a int and is not in \[hello, world\]'),
(Union(TypeOf(int), Enum('hello', 'world')), 0.4, 'tol: 0.4 is not a int'),
])
def test_union_invalid_values(union, value, msg):
with pytest.raises(ValueError, match=msg):
union.validate(value, "tol")
@pytest.mark.parametrize('union, value', [
(Union(TypeOf(int), Enum('hello', 'world')), 'hello'),
(Union(TypeOf(int), Enum('hello', 'world')), 'world'),
(Union(TypeOf(int), Enum('hello', 'world')), 10),
(Union(TypeOf(int), Enum('hello', 'world'), Const(None)), None),
(Union(TypeOf(float), TypeOf(int)), 10),
(Union(TypeOf(float), TypeOf(int)), 10.3),
])
def test_union_valid_values(union, value):
# does not raise
union.validate(value, "tol")
def test_union_removes_tags():
union = Union(TypeOf(int, tags=['control']),
Enum('a', 'b', tags=['not good']),
tags=['deprecated'])
for params in union.params:
assert not params.tags
@pytest.mark.parametrize('lower, upper, msg',
[(None, None, 'lower or upper must be defined'),
(10, 1, 'lower must be strictly less than upper'),
(10, 10, 'lower must be strictly less than upper')])
def test_interval_error_init(lower, upper, msg):
with pytest.raises(ValueError, match=msg):
Interval(int, lower=lower, upper=upper)
@pytest.mark.parametrize('interval, value', [
(Interval(int, lower=None, upper=2), 1),
(Interval(int, lower=None, upper=2), 2),
(Interval(int, lower=-3, upper=None), 3),
(Interval(int, lower=-3, upper=None), -3),
(Interval(int, lower=-3, upper=2), 0),
(Interval(int, lower=-3, upper=2), -3),
(Interval(int, lower=-3, upper=2), 2),
(Interval(float, lower=None, upper=2), 1.0),
(Interval(float, lower=None, upper=2), 2.0),
(Interval(float, lower=-3, upper=None), 3.0),
(Interval(float, lower=-3, upper=None), -3.0),
(Interval(float, lower=-3, upper=2), 0.0),
(Interval(float, lower=-3, upper=2), -3.0),
(Interval(float, lower=-3, upper=2), 2.0),
])
def test_interval_valid_values(interval, value):
interval.validate(value, "tol")
@pytest.mark.parametrize('interval, value, msg', [
(Interval(int, lower=None, upper=2), 1.0, 'tol: 1.0 is not a int'),
(Interval(float, lower=None, upper=2), 1, 'tol: 1 is not a float'),
])
def test_interval_invalid_type(interval, value, msg):
with pytest.raises(TypeError, match=msg):
interval.validate(value, "tol")
@pytest.mark.parametrize('interval, value, msg', [
(Interval(int, lower=None, upper=2), 3, r'3 not in \(-inf, 2\]'),
(Interval(int, lower=None, upper=2,
upper_inclusive=False), 2, r'2 not in \(-inf, 2\)'),
(Interval(int, lower=-3, upper=None), -4, r'-4 not in \[-3, inf\)'),
(Interval(int, lower=-3, upper=None,
lower_inclusive=False), -3, r'-3 not in \(-3, inf\)'),
(Interval(int, lower=-3, upper=2), 3, r'3 not in \[-3, 2\]'),
(Interval(int, lower=-3, upper=2), -4, r'-4 not in \[-3, 2\]'),
(Interval(int, lower=-3, upper=2,
lower_inclusive=False), -3, r'-3 not in \(-3, 2\]'),
(Interval(int, lower=-3, upper=2,
upper_inclusive=False), 2, r'2 not in \[-3, 2\)'),
])
def test_interval_invalid_values(interval, value, msg):
with pytest.raises(ValueError, match=msg):
interval.validate(value, 'tol')
| 2.140625 | 2 |
hello.py | sokjc/BuildWeekGitDemo | 0 | 12785553 | <gh_stars>0
"""
Prints Hello World
"""
def say_hi(name):
return f'Hello {name}!'
def say_bye(name):
return f'Bye {name}'
if __name__ == 'main':
say_hi("World!")
say_bye("World!")
| 2.703125 | 3 |
kwep.py | sumitgo/ram22 | 0 | 12785554 | import os
os.system("chmod 777 /content/xorta/Miners/ethminer/v0.11.0_Nvidia_Optimized/Linux/ethminer")
| 1.171875 | 1 |
wmt21-multi-low-res/euro_low_res_multiling/data/wiki/remove_short.py | ufal/bergamot | 0 | 12785555 | <gh_stars>0
import sys
for line in sys.stdin:
if len(line.split(' '))>6:
print(line.strip())
| 2.46875 | 2 |
dragoncord-bot/test.py | Dragoncord-for-discord/dragoncord | 0 | 12785556 | @bot.command()
async def help1(ctx):
cpage = discord.Embed(
title = 'Команды бота | Версия бота: 2.5',
description = f"say - Повторяет что Вы говорите.\nban - Банит участника.\nmute - Мутит участника.\ninfobot - Тут инфы о боте и создателя.\nuser - Информация о участника.\navatar - Аватарка у участника.\ncat - Показывает фото кота.\nfox - Показывает фото лисы.\nmeme - Показывает мемы.\ndog - Показывает фото собаки.\nping - Пинг бота.\nben - Вы можете спросить вопрос у бена.\nkick - Кикнуть участника.\ndelspamchannels - Удалить каналы с одинаковым названием.\ndelspamroles - Удалить роли с одинаковым названием.\ncrash - Крашает авто сервер.", colour = discord.Colour.blue()
)
await ctx.send(
embed=cpage,
components = [Button(
label = "Поддержка",
style = ButtonStyle.URL,
disabled=False,
url="https://discord.gg/8k9wbbEMYT"
)
Button(
label = "Поддержка",
style = ButtonStyle.URL,
disabled=False,
url="https://discord.gg/8k9wbbEMYT"
)
]
) | 2.375 | 2 |
denverapi/tools/__main__.py | xcodz-dot/denver | 4 | 12785557 | <reponame>xcodz-dot/denver
"""
Prints a list of all available tools
"""
import os
from denverapi import beautiful_cli
cli = beautiful_cli.new_cli()
def tool(path):
p = os.path.basename(path)
p = os.path.splitext(p)[0]
return p
if __name__ == "__main__":
directory = os.path.dirname(__file__)
cli.info("Available Tools")
for x in os.listdir(directory):
if not x.startswith("_") and x.endswith(".py"):
cli.info("\t", tool(x), sep="")
| 2.796875 | 3 |
alipay/aop/api/domain/MedicalHospitalDeptInfo.py | snowxmas/alipay-sdk-python-all | 213 | 12785558 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class MedicalHospitalDeptInfo(object):
def __init__(self):
self._code = None
self._location = None
self._name = None
self._parent_name = None
self._partner_code = None
@property
def code(self):
return self._code
@code.setter
def code(self, value):
self._code = value
@property
def location(self):
return self._location
@location.setter
def location(self, value):
self._location = value
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def parent_name(self):
return self._parent_name
@parent_name.setter
def parent_name(self, value):
self._parent_name = value
@property
def partner_code(self):
return self._partner_code
@partner_code.setter
def partner_code(self, value):
self._partner_code = value
def to_alipay_dict(self):
params = dict()
if self.code:
if hasattr(self.code, 'to_alipay_dict'):
params['code'] = self.code.to_alipay_dict()
else:
params['code'] = self.code
if self.location:
if hasattr(self.location, 'to_alipay_dict'):
params['location'] = self.location.to_alipay_dict()
else:
params['location'] = self.location
if self.name:
if hasattr(self.name, 'to_alipay_dict'):
params['name'] = self.name.to_alipay_dict()
else:
params['name'] = self.name
if self.parent_name:
if hasattr(self.parent_name, 'to_alipay_dict'):
params['parent_name'] = self.parent_name.to_alipay_dict()
else:
params['parent_name'] = self.parent_name
if self.partner_code:
if hasattr(self.partner_code, 'to_alipay_dict'):
params['partner_code'] = self.partner_code.to_alipay_dict()
else:
params['partner_code'] = self.partner_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = MedicalHospitalDeptInfo()
if 'code' in d:
o.code = d['code']
if 'location' in d:
o.location = d['location']
if 'name' in d:
o.name = d['name']
if 'parent_name' in d:
o.parent_name = d['parent_name']
if 'partner_code' in d:
o.partner_code = d['partner_code']
return o
| 2.21875 | 2 |
idaes/power_generation/costing/power_plant_costing.py | carldlaird/idaes-pse | 112 | 12785559 | <gh_stars>100-1000
#################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
Power Plant costing library
This method leverages NETL costing capabilities. Two main methods have been
developed to calculate the capital cost of power generation plants:
1.- Fossil fueled power plants (from SCPC to IGCC) (get_PP_costing)
2.- supercritical CO2 power cycles (direct and indirect) (get_sCO2_unit_cost)
other methods:
* get_ASU_cost() to cost air separation units
* costing_initialization() to initialize costing blocks
* display_total_plant_costs() to display total plant cost
* display_bare_erected_costs() to display BEC costs
* build_flowsheet_cost_constraint() to display the total cost of the entire
flowsheet
* display_flowsheet_cost() to display flowsheet cost
* check_sCO2_costing_bounds() to display a warnning if costing model have been
used outside the range that where designed for
"""
__author__ = "Costing Team (<NAME> and <NAME>)"
__version__ = "1.0.0"
from pyomo.environ import Param, Var, Block, Constraint, Expression, value, \
Expr_if
import idaes.core.util.scaling as iscale
from idaes.power_generation.costing.costing_dictionaries import \
BB_costing_exponents, BB_costing_params, sCO2_costing_params
from pyomo.util.calc_var_value import calculate_variable_from_constraint
# -----------------------------------------------------------------------------
# Power Plant Costing Library
# -----------------------------------------------------------------------------
def get_PP_costing(self, cost_accounts,
scaled_param, units, tech, ccs='B'):
'''
Power Plant Costing Method
This method relies on the capital cost scaling methodologies developed
by NETL. Report #DOE/NETL-341/013113
Multiple vendors quotes have been used to determine the cost of several
plant equipments (i.e. boiler, pumps, heat exchangers, etc.), other cost
incurred during the plant operation (i.e. solids handling, etc.)
Scaling approach uses one main equation:
SC = RC*(SP/RP)^Exp
where:
SC is the scaled cost
RC is the reference cost
SP is the scaled operational parameter
RP is the reference operational parameter
Exp is the scaling exponent
The scaled cost is computed using ref values for different technoligies.
Categories:
1 - Supercritical PC, air-fired, with and without CO2 capture,
Illinois No. 6 coal
2 - Subcritical PC, air-fired, with and without CO2 capture,
Illinois No. 6 coal
3 - Two-stage, slurry-feed, oxygen-blown gasifier with and without
CO2 capture, Illinois No. 6 coal
4 - Single-stage, slurry-feed, oxygen-blown gasifier with and without
CO2 capture, Illinois No. 6 coal
5 - Single-stage, dry-feed, oxygen-blown, up-flow gasifier with
and without CO2 capture, Illinois No. 6 coal
6 - Natural gas, air-fired, with and without CO2 capture
7 - Advanced Ultrasupercritical PC
This method computes the capital cost of units and main components of the
power plant, and requires a few arguments to build a constraint as part of
your main model.
Args:
* self: A block or unit model where costing constraints can be added to
* accounts: A list of accounts to be included in the total cost,
they should all use the same reference parameter
* scaled_param: the process parameter for the system(s) being costed
* units: the units of the scaled_param, used for verification
* tech: int 1-7 representing the above catagories
* ccs: 'A' or 'B' representing no CCS or CCS
The appropriate scaling parameters for various cost accounts can be found
in the QGESS on capital cost scaling (Report #DOE/NETL-341/013113).
The correct units for the reference parameters are found in the BBR4 COE
spreadsheet.
'''
# ------------------------ Power Plant Cost ------------------------
# check to see if a costing block already exists
if hasattr(self, 'costing'):
raise AttributeError("{} already has an attribute costing. "
"Check that you are not calling get_costing"
" twice on the same model".format(self.name))
# create a costing Block
self.costing = Block()
self.costing.library = 'PP'
# find flowsheet block to create global costing parameters
try:
fs = self.flowsheet()
except AttributeError:
fs = self.parent_block()
# build flowsheet level parameters CE_index = year
if not hasattr(fs, 'costing'):
fs.get_costing(year='2018')
CE_index = fs.costing.CE_index
# define preloaded accounts
PC_preloaded_accounts = {'Coal Handling': ['1.1', '1.2',
'1.3', '1.4', '1.9a'],
'Sorbent Handling': ['1.5', '1.6',
'1.7', '1.8', '1.9b'],
'Coal Feed': ['2.1', '2.2', '2.9a'],
'Sorbent Feed': ['2.5', '2.6', '2.9b'],
'Feedwater System': ['3.1', '3.3'],
'PC Boiler': ['4.9'],
'Steam Turbine': ['8.1'],
'Condenser': ['8.3'],
'Cooling Tower': ['9.1'],
'Circulating Water System': ['9.2', '9.3',
'9.4', '9.6', '9.7'],
'Ash Handling': ['10.6', '10.7', '10.9']}
IGCC_preloaded_accounts = {'Coal Handling': ['1.1', '1.2',
'1.3', '1.4', '1.9'],
'Coal Feed': ['2.1', '2.2',
'2.3', '2.4', '2.9'],
'Feedwater System': ['3.1', '3.3'],
'Gasifier': ['4.1'],
'Syngas Cooler': ['4.2'],
'ASU': ['4.3a'],
'ASU Oxidant Compression': ['4.3b'],
'Combustion Turbine': ['6.1', '6.3'],
'Syngas Expander': ['6.2'],
'HRSG': ['7.1', '7.2'],
'Steam Turbine': ['8.1'],
'Condenser': ['8.3'],
'Cooling Tower': ['9.1'],
'Circulating Water System': ['9.2', '9.3',
'9.4', '9.6',
'9.7'],
'Slag Handling': ['10.1', '10.2',
'10.3', '10.6',
'10.7', '10.8',
'10.9']}
NGCC_preloaded_accounts = {'Feedwater System': ['3.1', '3.3'],
'Combustion Turbine': ['6.1', '6.3'],
'HRSG': ['7.1', '7.2'],
'Steam Turbine': ['8.1'],
'Condenser': ['8.3'],
'Cooling Tower': ['9.1'],
'Circulating Water System': ['9.2', '9.3',
'9.4', '9.6',
'9.7']}
AUSC_preloaded_accounts = {'PC Boiler': ['4.9'],
'Steam Turbine': ['8.1'],
'Steam Piping': ['8.4']}
# preloaded account handling
if type(cost_accounts) == str:
if tech in [1, 2]:
cost_accounts = PC_preloaded_accounts[cost_accounts]
elif tech in [3, 4, 5]:
cost_accounts = IGCC_preloaded_accounts[cost_accounts]
elif tech == 6:
cost_accounts = NGCC_preloaded_accounts[cost_accounts]
elif tech == 7:
cost_accounts = AUSC_preloaded_accounts[cost_accounts]
else:
AttributeError("{} technology not supported".format(self.name))
# check that all accounts use the same process parameter
param_check = None
for account in cost_accounts:
param = BB_costing_exponents[str(tech)][account]['Process Parameter']
if param_check is None:
param_check = param
elif param != param_check:
raise ValueError("{} cost accounts selected do not use "
" the same process parameter".format(self.name))
# check that the user passed the correct units
ref_units = BB_costing_params[str(tech)][ccs][cost_accounts[0]]['Units']
if units != ref_units:
raise ValueError('Account %s uses units of %s. '
'Units of %s were passed.'
% (cost_accounts[0], ref_units, units))
# construct dictionaries
account_names = {}
exponents = {}
reference_costs = {}
reference_params = {}
engineering_fees = {}
process_contingencies = {}
project_contingencies = {}
for account in cost_accounts:
account_names[account] = BB_costing_exponents[str(
tech)][account]['Account Name']
exponents[account] = float(
BB_costing_exponents[str(tech)][account]['Exponent'])
reference_costs[account] = BB_costing_params[str(
tech)][ccs][account]['BEC']
reference_params[account] = BB_costing_params[str(
tech)][ccs][account]['RP Value']
engineering_fees[account] = BB_costing_params[str(
tech)][ccs][account]['Eng Fee']
process_contingencies[account] = BB_costing_params[str(
tech)][ccs][account]['Process Contingency']
project_contingencies[account] = BB_costing_params[str(
tech)][ccs][account]['Project Contingency']
# Used by other functions for reporting results
self.costing.account_names = account_names
# define parameters
self.costing.exp = Param(cost_accounts,
mutable=True,
initialize=exponents,
doc='exponential parameter for account')
self.costing.ref_cost = Param(cost_accounts,
mutable=True,
initialize=reference_costs,
doc='reference cost for account')
self.costing.ref_param = Param(cost_accounts,
mutable=True,
initialize=reference_params,
doc='reference parameter for account')
self.costing.eng_fee = Param(cost_accounts,
mutable=True,
initialize=engineering_fees,
doc='engineering fee percentage')
self.costing.process_conting = Param(cost_accounts,
mutable=True,
initialize=process_contingencies,
doc='process contingency percentage')
self.costing.project_conting = Param(cost_accounts,
mutable=True,
initialize=project_contingencies,
doc='project contingency percentage')
# define variables
self.costing.bare_erected_cost = Var(cost_accounts,
initialize=reference_costs,
bounds=(0, 1e4),
doc='scaled bare erected cost in $MM')
self.costing.total_plant_cost = Var(cost_accounts,
initialize=reference_costs,
bounds=(0, 1e4),
doc='total plant cost in $MM')
# rule for scaling BEC
# reference cost is in 2018 dollars, 671.1 is CE index for 2018
def bare_erected_cost_rule(costing, i):
return (costing.bare_erected_cost[i]*1e3 ==
(CE_index/671.1)*costing.ref_cost[i] *
(scaled_param/costing.ref_param[i])**costing.exp[i])
self.costing.bare_erected_cost_eq = Constraint(
cost_accounts, rule=bare_erected_cost_rule)
# rule for calculating TPC
def total_plant_cost_rule(costing, i):
return (costing.total_plant_cost[i] == costing.bare_erected_cost[i] *
(1 + costing.eng_fee[i] + costing.process_conting[i]) *
(1 + costing.project_conting[i]))
self.costing.total_plant_cost_eq = Constraint(
cost_accounts, rule=total_plant_cost_rule)
# rule for sum of BEC
def BEC_sum_rule(costing):
return sum(costing.bare_erected_cost[i] for i in cost_accounts)
self.costing.bare_erected_cost_sum = Expression(rule=BEC_sum_rule)
# rule for sum of TPC
def TPC_sum_rule(costing):
return sum(costing.total_plant_cost[i] for i in cost_accounts)
self.costing.total_plant_cost_sum = Expression(rule=TPC_sum_rule)
# # add variable and constraint scaling
for i in cost_accounts:
iscale.set_scaling_factor(self.costing.bare_erected_cost[i], 1)
iscale.set_scaling_factor(self.costing.total_plant_cost[i], 1)
iscale.constraint_scaling_transform(self.
costing.bare_erected_cost_eq[i],
1e-3,
overwrite=False)
iscale.constraint_scaling_transform(self.
costing.total_plant_cost_eq[i],
1,
overwrite=False)
# -----------------------------------------------------------------------------
# Supercritical CO2 Costing Library
# -----------------------------------------------------------------------------
def get_sCO2_unit_cost(self, equipment, scaled_param, temp_C=None, n_equip=1):
'''
Args:
self - pyomo Block where constraints will be made
unit_name - the name of the SCO2 equipment to cost
scaling_param - the scaling parameter (in appropriate units) for the
selected equipment
temp - the maximum temperature of the equipment. Not all types of
equipment use a temperature correction factor, so it is optional
n_equip - the number of pieces of equipment to cost
Cost is in M$
'''
# check to see if a costing block already exists
if hasattr(self, 'costing'):
raise AttributeError("{} already has an attribute costing. "
"Check that you are not calling get_costing"
" twice on the same model".format(self.name))
# create a costing Block
self.costing = Block()
self.costing.library = 'sCO2'
self.costing.equipment = equipment
# find flowsheet block to create global costing parameters
try:
fs = self.flowsheet()
except AttributeError:
fs = self.parent_block()
# build flowsheet level parameters CE_index = year
if not hasattr(fs, 'costing'):
fs.get_costing(year='2017')
CE_index = fs.costing.CE_index
param_dict = sCO2_costing_params[equipment]
# define parameters
self.costing.ref_cost = Param(mutable=True,
initialize=param_dict['a'],
doc='Reference cost')
self.costing.exp = Param(mutable=True,
initialize=param_dict['b'],
doc='Scaling exponent')
self.costing.c = Param(mutable=True,
initialize=param_dict['c'],
doc='coefficient for temperature correction')
self.costing.d = Param(mutable=True,
initialize=param_dict['d'],
doc='coefficient for temperature correction')
self.costing.material_cost = Param(mutable=True,
doc='material installation cost',
initialize=param_dict['Material Cost'])
self.costing.labor_cost = Param(mutable=True,
initialize=param_dict['Labor Cost'],
doc='labor installation cost')
# estimates for the percentages of TPC will be added later
self.costing.eng_fee = Param(mutable=True,
initialize=0,
doc='engineering fee percentage')
self.costing.process_conting = Param(mutable=True,
initialize=0,
doc='process contingency percentage')
self.costing.project_conting = Param(mutable=True,
initialize=0,
doc='project contingency percentage')
# define variables
# n_equip is left as a fixed variable to support MINLP optimization
self.costing.n_equip = Var(initialize=n_equip,
doc='number of pieces of equipment')
self.costing.n_equip.fix(n_equip)
self.costing.scaled_param = Var(initialize=scaled_param,
bounds=(0, 1e12),
doc='scaled parameter')
self.costing.temp_factor = Var(initialize=1,
bounds=(0.9, 100),
doc='temperature correction factor')
self.costing.equipment_cost = Var(initialize=self.costing.ref_cost,
bounds=(0, 1e4),
doc='equipment cost of sCO2 unit in $MM')
self.costing.bare_erected_cost = Var(initialize=self.costing.ref_cost,
bounds=(0, 1e4),
doc='bare erected cost of sCO2 unit'
'in $MM')
self.costing.total_plant_cost = Var(initialize=self.costing.ref_cost,
bounds=(0, 1e4),
doc='total plant cost of sCO2 unit'
'in $MM')
# divides the scaled parameter by the number of pieces of equipment
def scaled_param_rule(costing):
return costing.scaled_param*costing.n_equip == scaled_param
self.costing.scaled_param_eq = Constraint(rule=scaled_param_rule)
# check if equipment requires a temperature correction factor
if equipment in ['Axial turbine', 'Radial turbine', 'Coal-fired heater',
'Natural gas-fired heater', 'Recuperator']:
if temp_C is None:
raise ValueError('Temperature argument is '
'required to cost %s equipment' % equipment)
else:
self.costing.temperature = Var(initialize=500,
bounds=(0, 1e6),
doc='dummy var for temperature')
self.costing.temp_eq = Constraint(expr=(self.costing.temperature
== temp_C))
def temp_correction_rule(costing): # rule for temp correction
return (Expr_if(costing.temperature < 550,
1e-6*costing.temperature + 1,
1 + costing.c*(costing.temperature - 550)
+ costing.d*(costing.temperature - 550)**2) ==
costing.temp_factor)
self.costing.temp_correction_eq = Constraint(
rule=temp_correction_rule)
else:
self.costing.temp_factor.fix(1)
# rule for equipment cost
def equipment_cost_rule(costing):
return (costing.equipment_cost*1e6 ==
(CE_index/567.5) * costing.n_equip * costing.ref_cost *
(costing.scaled_param**costing.exp) * costing.temp_factor)
self.costing.equipment_cost_eq = Constraint(rule=equipment_cost_rule)
# rule for bare erected cost
def bare_erected_cost_rule(costing):
return (costing.bare_erected_cost == costing.equipment_cost *
(1 + costing.material_cost + costing.labor_cost))
self.costing.bare_erected_cost_eq = Constraint(rule=bare_erected_cost_rule)
# rule for calculating total plant cost
def total_plant_cost_rule(costing):
return (costing.total_plant_cost == costing.bare_erected_cost *
(1 + costing.eng_fee + costing.process_conting
+ costing.project_conting))
self.costing.total_plant_cost_eq = Constraint(rule=total_plant_cost_rule)
# add variable and constraint scaling
if equipment in ["Recuperator", "Direct air cooler"]:
iscale.set_scaling_factor(self.costing.scaled_param, 1e-5)
else:
iscale.set_scaling_factor(self.costing.scaled_param, 1)
iscale.set_scaling_factor(self.costing.equipment_cost, 1e3)
iscale.set_scaling_factor(self.costing.bare_erected_cost, 1e3)
iscale.set_scaling_factor(self.costing.total_plant_cost, 1e3)
iscale.constraint_scaling_transform(
self.costing.equipment_cost_eq, 1e-6, overwrite=False)
iscale.constraint_scaling_transform(
self.costing.bare_erected_cost_eq, 1e3, overwrite=False)
iscale.constraint_scaling_transform(
self.costing.bare_erected_cost_eq, 1e3, overwrite=False)
# -----------------------------------------------------------------------------
# Air Separation Unit Costing Library
# -----------------------------------------------------------------------------
def get_ASU_cost(self, scaled_param):
# scaled parameter is O2 flowrate in TPD
params = {'Reference Cost': 3.26e6,
'Reference Parameter': 13078,
'Exponent': 0.7,
'Eng Fee': 0.097,
'Process': 0,
'Project': 0.110}
# check to see if a costing block already exists
if hasattr(self, 'costing'):
raise AttributeError("{} already has an attribute costing. "
"Check that you are not calling get_costing"
" twice on the same model".format(self.name))
# create a costing Block
self.costing = Block()
self.costing.library = 'ASU'
# find flowsheet block to create global costing parameters
try:
fs = self.flowsheet()
except AttributeError:
fs = self.parent_block()
# build flowsheet level parameters CE_index = year
if not hasattr(fs, 'costing'):
fs.get_costing(year='2017')
CE_index = fs.costing.CE_index
# define parameters
self.costing.ref_cost = Param(initialize=params['Reference Cost'],
mutable=True,
doc='ASU reference cost')
self.costing.ref_param = Param(initialize=params['Reference Parameter'],
mutable=True,
doc='ASU reference parameter value')
self.costing.exp = Param(initialize=params['Exponent'],
mutable=True,
doc='ASU scaling exponent')
self.costing.eng_fee = Param(mutable=True,
initialize=params['Eng Fee'],
doc='engineering fee percentage')
self.costing.process_conting = Param(mutable=True,
initialize=params['Process'],
doc='process contingency percentage')
self.costing.project_conting = Param(mutable=True,
initialize=params['Project'],
doc='project contingency percentage')
# define variables
self.costing.bare_erected_cost = Var(initialize=params['Reference Cost'],
bounds=(0, 1e4),
doc='scaled bare erected cost in $MM')
self.costing.total_plant_cost = Var(initialize=params['Reference Cost'],
bounds=(0, 1e4),
doc='total plant cost in $MM')
# rule for scaling BEC
# reference cost is in 2008 dollars, 566.2 is CE index for Nov 2008
def bare_erected_cost_rule(costing):
return (costing.bare_erected_cost*1e3 ==
(CE_index/566.2)*costing.ref_cost *
(scaled_param/costing.ref_param)**costing.exp)
self.costing.bare_erected_cost_eq = Constraint(rule=bare_erected_cost_rule)
# rule for calculating TPC
def total_plant_cost_rule(costing):
return (costing.total_plant_cost == costing.bare_erected_cost *
(1 + costing.eng_fee + costing.process_conting
+ costing.project_conting))
self.costing.total_plant_cost_eq = Constraint(rule=total_plant_cost_rule)
# add variable and constraint scaling
iscale.set_scaling_factor(self.costing.bare_erected_cost, 1)
iscale.set_scaling_factor(self.costing.total_plant_cost, 1)
iscale.constraint_scaling_transform(
self.costing.bare_erected_cost_eq, 1e-3, overwrite=False)
iscale.constraint_scaling_transform(
self.costing.total_plant_cost_eq, 1, overwrite=False)
# -----------------------------------------------------------------------------
# Costing Library Utility Functions
# -----------------------------------------------------------------------------
def costing_initialization(fs):
for o in fs.component_objects(descend_into=False):
# look for costing blocks
if hasattr(o, 'costing'):
if o.costing.library == 'sCO2':
if o.costing.equipment in ['Axial turbine',
'Radial turbine',
'Coal-fired heater',
'Natural gas-fired heater',
'Recouperator']:
calculate_variable_from_constraint(o.costing.temperature,
o.costing.temp_eq)
calculate_variable_from_constraint(o.costing.temp_factor,
o.costing.
temp_correction_eq)
calculate_variable_from_constraint(o.costing.scaled_param,
o.costing.scaled_param_eq)
calculate_variable_from_constraint(o.costing.equipment_cost,
o.costing.equipment_cost_eq)
calculate_variable_from_constraint(o.costing.bare_erected_cost,
o.costing.
bare_erected_cost_eq)
calculate_variable_from_constraint(o.costing.total_plant_cost,
o.costing.
total_plant_cost_eq)
elif o.costing.library in ['PP', 'ASU']:
for key in o.costing.bare_erected_cost.keys():
calculate_variable_from_constraint(o.costing.
bare_erected_cost[key],
o.costing.
bare_erected_cost_eq[
key])
calculate_variable_from_constraint(o.costing.
total_plant_cost[key],
o.costing.
total_plant_cost_eq[
key])
def display_total_plant_costs(fs):
print('-----Total Plant Costs-----')
for o in fs.component_objects(descend_into=False):
# look for costing blocks
if hasattr(o, 'costing') and hasattr(o.costing, 'total_plant_cost'):
print('%s: $%.2f Million' % (value(o.name),
value(o.costing.total_cost)))
def display_bare_erected_costs(fs):
print('-----Bare Erected Costs-----')
for o in fs.component_objects(descend_into=False):
# look for costing blocks
if hasattr(o, 'costing') and hasattr(o.costing, 'bare_erected_cost'):
print('%s: $%.2f Million' % (value(o.name),
value(o.costing.bare_erected_cost)))
def display_equipment_costs(fs):
print('-----Equipment Costs-----')
for o in fs.component_objects(descend_into=False):
# look for costing blocks
if hasattr(o, 'costing') and hasattr(o.costing, 'equipment_cost'):
print('%s: $%.2f Million' % (value(o.name),
value(o.costing.equipment_cost)))
def build_flowsheet_cost_constraint(m):
total_cost_list = []
for o in m.fs.component_objects(descend_into=False):
# look for costing blocks
if hasattr(o, 'costing'):
for key in o.costing.total_plant_cost.keys():
total_cost_list.append(o.costing.total_plant_cost[key])
m.fs.flowsheet_cost = Var(initialize=0,
bounds=(0, 1e12),
doc='cost of entire process')
def flowsheet_cost_rule(fs):
return fs.flowsheet_cost == sum(total_cost_list)
m.fs.flowsheet_cost_eq = Constraint(rule=flowsheet_cost_rule)
def display_flowsheet_cost(m):
print('\n')
print('Total flowsheet cost: $%.3f Million' %
value(m.fs.flowsheet_cost_exp))
def check_sCO2_costing_bounds(fs):
# interate through the children of the flowsheet
for o in fs.component_objects(descend_into=False):
# look for costing blocks
if hasattr(o, 'costing'):
costing = o.costing
if costing.library == 'sCO2':
equipment = costing.equipment
lower_bound = sCO2_costing_params[equipment]['Lower Bound']
upper_bound = sCO2_costing_params[equipment]['Upper Bound']
if value(costing.scaled_param) < lower_bound:
print('''%s: The scaled parameter (%f) is below the lower
bound (%f).''' % (value(o.name),
value(costing.scaled_param),
lower_bound))
elif value(costing.scaled_param) > upper_bound:
print('''%s: The scaled parameter (%f) is above the upper
bound (%f).''' % (value(o.name),
value(costing.scaled_param),
upper_bound))
else:
print('''%s: The scaled parameter is within the bounds.'''
% value(o.name))
| 1.890625 | 2 |
WeatherStation/rtcds1307.py | flashypepo/myMicropython-Examples | 3 | 12785560 | # RTC clock with DS1307
#
# 2017-0225 various tests, since micropython 1.8.7 changes RTC interface
# Sources:
# ESP8266 - connection to RTC, I2C-connection with NodeMCU
# MicroPython class RTC: https://micropython.org/resources/docs/en/latest/wipy/library/machine.RTC.html
# class RTC is only for WiPy board, but also works in ESP8266
# interface/methods are changed, is more like Adafruit uRTC
# class uRTC of Adafruit is NOT used, but documentation is used!
# Adafruit uRTC: http://micropython-urtc.readthedocs.io/en/latest/urtc.html#ds1307
#
import machine
# create an RTC object
# 2017_0225 apparently no I2C address has to be given?? It works, however.
print("create RTC object...")
rtc = machine.RTC()
# initialise the RTC to a certain datetime
#
# Analyses:
# * according to Adafruit documentation datetime().
#
# * rtc.datetime(datetime): get or set the current time.
# The datetime is an 8-tuple of the format describing the time
# to be set:
# (year, month, day, weekday, hour, minute, second, millisecond)
#
# * If not specified, the method returns a tuple in the same format.
# * day of the week: an integer, where Monday is 0 and Sunday is 6.
# set RTC: (y, m, d, wd, h, m, s, ms)
# rtc.datetime((2017, 2, 25, 6, 22, 48, 15, 0))
# get datetime
print("current datetime: ", rtc.datetime())
# get help about rtc
print("\nprint help about RTC object...")
help(rtc)
# object <RTC> is of type RTC
# datetime -- <function>
# memory -- <function>
# alarm -- <function>
# alarm_left -- <function>
# irq -- <function>
# ALARM0 -- 0
print("\ncreate an alarm which triggers off in 10 seconds...")
import time
# set alarm in 10 seconds
rtc.alarm(0, 10000)
print("print time-left of alarm...")
# get time left to alarm
time.sleep(7.0)
print(rtc.alarm_left(0))
time.sleep(1.0)
print(rtc.alarm_left(0))
time.sleep(1.0)
print(rtc.alarm_left(0))
time.sleep(1.0)
print(rtc.alarm_left(0))
print("Alarm is on, no visible cues. How todo?")
print("\n===== end-of-RTC-test ===")
| 3.234375 | 3 |
src/masonite/commands/__init__.py | cercos/masonite | 1,816 | 12785561 | <reponame>cercos/masonite<filename>src/masonite/commands/__init__.py
from .CommandCapsule import CommandCapsule
from .AuthCommand import AuthCommand
from .TinkerCommand import TinkerCommand
from .KeyCommand import KeyCommand
from .ServeCommand import ServeCommand
from .QueueWorkCommand import QueueWorkCommand
from .QueueRetryCommand import QueueRetryCommand
from .QueueTableCommand import QueueTableCommand
from .QueueFailedCommand import QueueFailedCommand
from .MakeControllerCommand import MakeControllerCommand
from .MakeJobCommand import MakeJobCommand
from .MakeMailableCommand import MakeMailableCommand
from .MakeProviderCommand import MakeProviderCommand
from .PublishPackageCommand import PublishPackageCommand
from .MakePolicyCommand import MakePolicyCommand
from .MakeTestCommand import MakeTestCommand
from .DownCommand import DownCommand
from .UpCommand import UpCommand
from .MakeCommandCommand import MakeCommandCommand
from .MakeViewCommand import MakeViewCommand
from .MakeMiddlewareCommand import MakeMiddlewareCommand
from .PresetCommand import PresetCommand
from .Command import Command
| 1.34375 | 1 |
app/utils/dbutils.py | saowu/FigureBed | 1 | 12785562 | <filename>app/utils/dbutils.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
' a test module '
__author__ = 'saowu'
import pymysql
from DBUtils.PooledDB import PooledDB
class DBUtil(object):
__instance = None
def __init__(self, host, port, database, user, password, ):
# 数据库连接池
self.pool = PooledDB(
creator=pymysql,
maxconnections=6,
mincached=2,
maxcached=5,
maxshared=3,
blocking=True,
maxusage=None,
setsession=[],
ping=0,
host=host,
port=int(port),
user=user,
password=password,
database=database,
charset='utf8'
)
# 日志打印4次没关系,做了单例
from .. import app
app.logger.warning('PooledDB init success...')
# 实现单例
def __new__(cls, *args, **kwargs):
if cls.__instance is None:
cls.__instance = object.__new__(cls)
return cls.__instance
else:
return cls.__instance
def create_conn_cursor(self):
conn = self.pool.connection()
cursor = conn.cursor(pymysql.cursors.DictCursor)
return conn, cursor
def fetch_one(self, sql, args):
'''
查询一条数据
:param sql:
:param args:
:return:
'''
conn, cursor = self.create_conn_cursor()
cursor.execute(sql, args)
result = cursor.fetchone()
cursor.close()
conn.close()
return result
def insert_many(self, sql, args):
'''
插入多条数据
:param sql:
:param args:
:return:
'''
conn, cursor = self.create_conn_cursor()
try:
result = cursor.executemany(sql, args)
conn.commit()
except Exception as e:
conn.rollback()
cursor.close()
conn.close()
return result
def delete_one(self, sql, args):
'''
删除一条数据
:param sql:
:param args:
:return:
'''
conn, cursor = self.create_conn_cursor()
try:
result = cursor.execute(sql, args)
conn.commit()
except Exception as e:
conn.rollback()
cursor.close()
conn.close()
return result
| 2.34375 | 2 |
kanshin/data.py | basuke/kanshin-export | 0 | 12785563 | # -*- coding: utf-8 -*-
import boto3
from boto3.dynamodb.conditions import Key
TABLE_PREFIX = 'KanshinCom-'
USER_TABLE = TABLE_PREFIX + 'user'
KEYWORD_TABLE = TABLE_PREFIX + 'keyword'
CONNECTION_TABLE = TABLE_PREFIX + 'connection'
DIARY_TABLE = TABLE_PREFIX + 'diary'
dynamodb = boto3.resource('dynamodb', region_name='us-west-2')
user_table = dynamodb.Table(USER_TABLE)
keyword_table = dynamodb.Table(KEYWORD_TABLE)
connection_table = dynamodb.Table(CONNECTION_TABLE)
diary_table = dynamodb.Table(DIARY_TABLE)
s3 = boto3.resource('s3', region_name='ap-northeast-1')
storage_bucket = s3.Bucket('s.kanshin.link')
def fetch_user_diaries(user_id):
query_args = dict(
IndexName='byUser',
KeyConditionExpression=Key('user_id').eq(user_id),
ProjectionExpression='id'
)
for item in query(diary_table, **query_args):
yield(get_item(diary_table, item['id']))
def fetch_user_keywords(user_id):
query_args = dict(
IndexName='byUser',
KeyConditionExpression=Key('user_id').eq(user_id),
ProjectionExpression='id'
)
for item in query(keyword_table, **query_args):
yield(get_item(keyword_table, item['id']))
def fetch_user(user_id):
return get_item(user_table, user_id)
def fetch_connections(keyword_id):
query_args = dict(
IndexName='byUser',
KeyConditionExpression=Key('user_id').eq(user_id),
ProjectionExpression='id'
)
return (
dict(
id=item['other_id'],
out_reason=item['out_reason'],
in_reason=item['in_reason']
)
for item in query(connection_table, **query_args)
)
def save_user(item):
item['id'] = int(item['id'])
save_item(user_table, item)
def save_keyword(item):
item['id'] = int(item['id'])
save_item(keyword_table, item)
def save_connection(id1, id2, out_reason=None, in_reason=None):
save_item(connection_table, dict(id=int(id1), other_id=int(id2), out_reason=out_reason, in_reason=in_reason), ['id', 'other_id'])
def save_diary(item):
item['id'] = int(item['id'])
save_item(diary_table, item)
def has_image(path):
obj = storage_bucket.Object(path)
try:
obj.metadata # test if obj exists
except:
return False
return True
def save_image(path, content_type, content):
obj = storage_bucket.Object(path)
obj.put(Body=content, ContentType=content_type, ACL='public-read')
# ----------------------
def get_item(table, id=None, **kwargs):
if id is not None:
kwargs['KeyConditionExpression'] = Key('id').eq(id)
result = table.query(**kwargs)
if result['Items'] and len(result['Items']) > 0:
return result['Items'][0]
else:
return None
def query(table, **kwargs):
startKey = None
while True:
if startKey:
kwargs['ExclusiveStartKey'] = startKey
elif 'ExclusiveStartKey' in kwargs:
del kwargs['ExclusiveStartKey']
result = table.query(**kwargs)
for item in result['Items']:
yield item
startKey = result.get('LastEvaluatedKey')
if not startKey:
break
def key_for(item, pk_keys):
return dict([(key, item[key]) for key in item if key in pk_keys])
def updates_for(item, pk_keys):
updates = {}
for key in item:
if key not in pk_keys:
value = item[key]
if value is None or value == '':
value = {'Action': 'DELETE'}
else:
value = {'Action': 'PUT', 'Value': value}
updates[key] = value
return updates
def save_item(table, item, pk_keys=['id']):
table.update_item(
Key=key_for(item, pk_keys),
AttributeUpdates=updates_for(item, pk_keys)
)
| 1.96875 | 2 |
python/Calculator.py | mysteriouspla/helloAlgorithm | 0 | 12785564 | # Welcome to pyhack calculator
num_1 = float(input("Enter the first number "))
num_2 = float(input("Enter the second number "))
print("Please select operation which you want to do: ")
operator = int(input("Type 1-Addition, 2-Substration, 3-Multiplication, 4-Division " ))
if operator == 1:
a = num_1 + num_2
a = round(a,2)
print(a)
elif operator == 2:
b = num_1 - num_2
b = round(b,2)
print(b)
elif operator == 3:
c = num_1 * num_2
c = round(c,2)
print(c)
elif operator == 4:
d = num_1 / num_2
d = round(d,2)
print(d)
else:
print("Invalid Input")
| 4 | 4 |
app/configs/__init__.py | riszkymf/pricefinder_data_endpoint | 0 | 12785565 | <filename>app/configs/__init__.py
import os
from dotenv import load_dotenv
from app.libs import MetaFlaskEnv
APP_ROOT = os.path.join(os.path.dirname(__file__), '../..')
dotenv_path = os.path.join(APP_ROOT, '.env')
load_dotenv(dotenv_path=dotenv_path)
class Config(metaclass=MetaFlaskEnv):
ENV_PREFIX = "FLASK_" | 2.3125 | 2 |
prototype/converter.py | kloppstock/deep_cyber | 0 | 12785566 | import numpy as np
import sys
# convert any index to a 4 tuple
def unpackIndex(i, default):
a = b = c = d = default
if type(i) == int:
d = i
elif len(i) == 1:
d = i[0]
elif len(i) == 2:
c = i[0]
d = i[1]
elif len(i) == 3:
b = i[0]
c = i[1]
d = i[2]
else:
a = i[0]
b = i[1]
c = i[2]
d = i[3]
return (a, b, c, d)
def convert(path):
# load the file
arr = np.load(path + ".npy")
# open the output file
with open(("../cifar10/" + path + ".c").lower(), "w") as f:
# get dimensions
(a, b, c, d) = unpackIndex(arr.shape, 1)
arr = arr.reshape((a, b, c, d))
# write head
f.write('#include "../include/deep_cyber.h"\n')
f.write('\n')
f.write('const uint8_t ' + path.upper() + '_DATA[' + str(arr.view(np.uint8).flatten().shape[0]) + '] = {\n')
# write data
for ai in range(a):
for bi in range(b):
for ci in range(c):
for di in range(d):
elem_arr = np.zeros((1), dtype=np.float32)
elem_arr[0] = arr[ai, bi, ci, di]
elem = elem_arr.view(np.uint8).flatten()
e = elem.shape[0]
for ei in range(e):
if ai == a - 1 and bi == b - 1 and ci == c - 1 and di == d - 1 and ei == e - 1:
break
f.write('\t' + hex(elem[ei]) + ',\n')
# write tail
elem_arr = np.zeros((1), dtype=np.float32)
elem_arr[0] = arr.flatten()[-1]
elem = elem_arr.view(np.uint8).flatten()
e = elem.shape[0]
f.write('\t' + hex(elem[-1]) + '};\n')
f.write('\n')
f.write('Tensor ' + path.upper() + ' = {' + str(a) + ', ' + str(b) + ', ' + str(c) + ', ' + str(d) + ', (float*)' + path.upper() + '_DATA};\n')
convert("c1b")
convert("c1w")
convert("c2b")
convert("c2w")
convert("c3b")
convert("c3w")
convert("c4b")
convert("c4w")
convert("d1b")
convert("d1w")
convert("d2b")
convert("d2w")
| 2.40625 | 2 |
tf/ensemble.py | zxlzr/atec_back | 0 | 12785567 | x1=[]
x2=[]
x3=[]
import sys
import numpy as np
f1 = open("light_gbm.txt")
for line in f1:
x1.append(float((line.strip().split('\t')[1])))
#print x1
f2 = open("simese_cnn.txt")
for line in f2:
x2.append(0.5 + 0.5*float((line.strip().split('\t')[1])))
#print x2
f3 = open("matchpyramid.txt")
for line in f3:
x3.append(float((line.strip().split('\t')[1])))
#print x3
x1=np.asarray(x1)
x2=np.asarray(x2)
x3=np.asarray(x3)
f=np.vstack((x1,x2))
f=np.vstack((f,x3))
y_pred=f[0]/3+f[1]/3+f[2]/3
#print pred.shape
#print pred
for i in range(len(y_pred)):
if y_pred[i]>0.31:
y_pred[i]=1
else:
y_pred[i]=0
output_file=sys.argv[1]
with open(output_file, 'w') as fo:
print("\nemsembling...\n")
lineno = 1
for pred in y_pred:
fo.write('{}\t{}\n'.format(lineno, int(pred)))
lineno += 1
| 2.4375 | 2 |
people/repositories.py | jordifierro/abidria-api | 93 | 12785568 | <reponame>jordifierro/abidria-api<filename>people/repositories.py<gh_stars>10-100
from abidria.exceptions import EntityDoesNotExistException
from .models import ORMPerson, ORMAuthToken, ORMConfirmationToken
from .entities import Person, AuthToken
class PersonRepo:
def get_person(self, id=None, username=None, email=None):
try:
if id is not None:
return self._decode_db_person(ORMPerson.objects.get(id=id))
elif username is not None:
return self._decode_db_person(ORMPerson.objects.get(username=username))
else:
return self._decode_db_person(ORMPerson.objects.get(email=email))
except ORMPerson.DoesNotExist:
raise EntityDoesNotExistException
def create_guest_person(self):
created_orm_person = ORMPerson.objects.create()
return self._decode_db_person(created_orm_person)
def update_person(self, person):
orm_person = ORMPerson.objects.get(id=person.id)
orm_person.is_registered = person.is_registered
orm_person.username = person.username
orm_person.email = person.email
orm_person.is_email_confirmed = person.is_email_confirmed
orm_person.save()
return self._decode_db_person(orm_person)
def _decode_db_person(self, db_person):
return Person(id=db_person.id, is_registered=db_person.is_registered,
username=db_person.username, email=db_person.email,
is_email_confirmed=db_person.is_email_confirmed)
class AuthTokenRepo:
def create_auth_token(self, person_id):
created_orm_auth_token = ORMAuthToken.objects.create(person_id=person_id)
return self._decode_db_auth_token(created_orm_auth_token)
def get_auth_token(self, access_token):
try:
orm_auth_token = ORMAuthToken.objects.get(access_token=access_token)
return self._decode_db_auth_token(orm_auth_token)
except ORMAuthToken.DoesNotExist:
raise EntityDoesNotExistException
def _decode_db_auth_token(self, db_auth_token):
return AuthToken(person_id=db_auth_token.person_id,
access_token=str(db_auth_token.access_token),
refresh_token=str(db_auth_token.refresh_token))
class ConfirmationTokenRepo:
def get_person_id(self, confirmation_token):
try:
return ORMConfirmationToken.objects.get(token=confirmation_token).person_id
except ORMConfirmationToken.DoesNotExist:
raise EntityDoesNotExistException
def create_confirmation_token(self, person_id):
created_orm_confirmation_token = ORMConfirmationToken.objects.create(person_id=person_id)
return str(created_orm_confirmation_token.token)
def delete_confirmation_tokens(self, person_id):
ORMConfirmationToken.objects.filter(person_id=person_id).delete()
return True
| 2.25 | 2 |
hello_world_mujoco_base.py | BolunDai0216/PyMuJoCoBase | 5 | 12785569 | from mujoco_base import MuJoCoBase
def main():
xml_path = "./xml/ball.xml"
mjb = MuJoCoBase(xml_path)
mjb.simulate()
if __name__ == "__main__":
main()
| 1.46875 | 1 |
tests/__init__.py | tdidechkin/rethinkdb-sessions | 5 | 12785570 | from django.conf import settings
settings.configure(
SESSION_ENGINE='rdb_session.main'
)
| 1.015625 | 1 |
species/plot/plot_spectrum.py | vandalt/species | 0 | 12785571 | """
Module with a function for plotting spectra.
"""
import os
import math
import warnings
import itertools
from typing import Optional, Union, Tuple, List
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from typeguard import typechecked
from matplotlib.ticker import AutoMinorLocator, MultipleLocator
from species.core import box, constants
from species.read import read_filter
from species.util import plot_util
@typechecked
def plot_spectrum(boxes: list,
filters: Optional[List[str]] = None,
residuals: Optional[box.ResidualsBox] = None,
plot_kwargs: Optional[List[Optional[dict]]] = None,
xlim: Optional[Tuple[float, float]] = None,
ylim: Optional[Tuple[float, float]] = None,
ylim_res: Optional[Tuple[float, float]] = None,
scale: Optional[Tuple[str, str]] = None,
title: Optional[str] = None,
offset: Optional[Tuple[float, float]] = None,
legend: Optional[Union[str, dict, Tuple[float, float],
List[Optional[Union[dict, str, Tuple[float, float]]]]]] = None,
figsize: Optional[Tuple[float, float]] = (10., 5.),
object_type: str = 'planet',
quantity: str = 'flux density',
output: str = 'spectrum.pdf'):
"""
Parameters
----------
boxes : list(species.core.box, )
Boxes with data.
filters : list(str, ), None
Filter IDs for which the transmission profile is plotted. Not plotted if set to None.
residuals : species.core.box.ResidualsBox, None
Box with residuals of a fit. Not plotted if set to None.
plot_kwargs : list(dict, ), None
List with dictionaries of keyword arguments for each box. For example, if the ``boxes``
are a ``ModelBox`` and ``ObjectBox``:
.. code-block:: python
plot_kwargs=[{'ls': '-', 'lw': 1., 'color': 'black'},
{'spectrum_1': {'marker': 'o', 'ms': 3., 'color': 'tab:brown', 'ls': 'none'},
'spectrum_2': {'marker': 'o', 'ms': 3., 'color': 'tab:blue', 'ls': 'none'},
'Paranal/SPHERE.IRDIS_D_H23_3': {'marker': 's', 'ms': 4., 'color': 'tab:cyan', 'ls': 'none'},
'Paranal/SPHERE.IRDIS_D_K12_1': [{'marker': 's', 'ms': 4., 'color': 'tab:orange', 'ls': 'none'},
{'marker': 's', 'ms': 4., 'color': 'tab:red', 'ls': 'none'}],
'Paranal/NACO.Lp': {'marker': 's', 'ms': 4., 'color': 'tab:green', 'ls': 'none'},
'Paranal/NACO.Mp': {'marker': 's', 'ms': 4., 'color': 'tab:green', 'ls': 'none'}}]
For an ``ObjectBox``, the dictionary contains items for the different spectrum and filter
names stored with :func:`~species.data.database.Database.add_object`. In case both
and ``ObjectBox`` and a ``SynphotBox`` are provided, then the latter can be set to ``None``
in order to use the same (but open) symbols as the data from the ``ObjectBox``. Note that
if a filter name is duplicated in an ``ObjectBox`` (Paranal/SPHERE.IRDIS_D_K12_1 in the
example) then a list with two dictionaries should be provided. Colors are automatically
chosen if ``plot_kwargs`` is set to ``None``.
xlim : tuple(float, float)
Limits of the wavelength axis.
ylim : tuple(float, float)
Limits of the flux axis.
ylim_res : tuple(float, float), None
Limits of the residuals axis. Automatically chosen (based on the minimum and maximum
residual value) if set to None.
scale : tuple(str, str), None
Scale of the x and y axes ('linear' or 'log'). The scale is set to ``('linear', 'linear')``
if set to ``None``.
title : str
Title.
offset : tuple(float, float)
Offset for the label of the x- and y-axis.
legend : str, tuple, dict, list(dict, dict), None
Location of the legend (str or tuple(float, float)) or a dictionary with the ``**kwargs``
of ``matplotlib.pyplot.legend``, for example ``{'loc': 'upper left', 'fontsize: 12.}``.
Alternatively, a list with two values can be provided to separate the model and data
handles in two legends. Each of these two elements can be set to ``None``. For example,
``[None, {'loc': 'upper left', 'fontsize: 12.}]``, if only the data points should be
included in a legend.
figsize : tuple(float, float)
Figure size.
object_type : str
Object type ('planet' or 'star'). With 'planet', the radius and mass are expressed in
Jupiter units. With 'star', the radius and mass are expressed in solar units.
quantity: str
The quantity of the y-axis ('flux density', 'flux', or 'magnitude').
output : str
Output filename.
Returns
-------
NoneType
None
"""
mpl.rcParams['font.serif'] = ['Bitstream Vera Serif']
mpl.rcParams['font.family'] = 'serif'
plt.rc('axes', edgecolor='black', linewidth=2.2)
plt.rcParams['axes.axisbelow'] = False
if plot_kwargs is None:
plot_kwargs = []
elif plot_kwargs is not None and len(boxes) != len(plot_kwargs):
raise ValueError(f'The number of \'boxes\' ({len(boxes)}) should be equal to the '
f'number of items in \'plot_kwargs\' ({len(plot_kwargs)}).')
if residuals is not None and filters is not None:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(3, 1, height_ratios=[1, 3, 1])
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[1, 0])
ax2 = plt.subplot(gridsp[0, 0])
ax3 = plt.subplot(gridsp[2, 0])
elif residuals is not None:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(2, 1, height_ratios=[4, 1])
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[0, 0])
ax2 = None
ax3 = plt.subplot(gridsp[1, 0])
elif filters is not None:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(2, 1, height_ratios=[1, 4])
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[1, 0])
ax2 = plt.subplot(gridsp[0, 0])
ax3 = None
else:
plt.figure(1, figsize=figsize)
gridsp = mpl.gridspec.GridSpec(1, 1)
gridsp.update(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
ax1 = plt.subplot(gridsp[0, 0])
ax2 = None
ax3 = None
if residuals is not None:
labelbottom = False
else:
labelbottom = True
if scale is None:
scale = ('linear', 'linear')
ax1.set_xscale(scale[0])
ax1.set_yscale(scale[1])
if filters is not None:
ax2.set_xscale(scale[0])
if residuals is not None:
ax3.set_xscale(scale[0])
ax1.tick_params(axis='both', which='major', colors='black', labelcolor='black',
direction='in', width=1, length=5, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=labelbottom)
ax1.tick_params(axis='both', which='minor', colors='black', labelcolor='black',
direction='in', width=1, length=3, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=labelbottom)
if filters is not None:
ax2.tick_params(axis='both', which='major', colors='black', labelcolor='black',
direction='in', width=1, length=5, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=False)
ax2.tick_params(axis='both', which='minor', colors='black', labelcolor='black',
direction='in', width=1, length=3, labelsize=12, top=True,
bottom=True, left=True, right=True, labelbottom=False)
if residuals is not None:
ax3.tick_params(axis='both', which='major', colors='black', labelcolor='black',
direction='in', width=1, length=5, labelsize=12, top=True,
bottom=True, left=True, right=True)
ax3.tick_params(axis='both', which='minor', colors='black', labelcolor='black',
direction='in', width=1, length=3, labelsize=12, top=True,
bottom=True, left=True, right=True)
if scale[0] == 'linear':
ax1.xaxis.set_minor_locator(AutoMinorLocator(5))
if scale[1] == 'linear':
ax1.yaxis.set_minor_locator(AutoMinorLocator(5))
# ax1.set_yticks([1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1e0])
# ax3.set_yticks([-2., 0., 2.])
if filters is not None and scale[0] == 'linear':
ax2.xaxis.set_minor_locator(AutoMinorLocator(5))
if residuals is not None and scale[0] == 'linear':
ax3.xaxis.set_minor_locator(AutoMinorLocator(5))
if residuals is not None and filters is not None:
ax1.set_xlabel('')
ax2.set_xlabel('')
ax3.set_xlabel('Wavelength (µm)', fontsize=13)
elif residuals is not None:
ax1.set_xlabel('')
ax3.set_xlabel('Wavelength (µm)', fontsize=11)
elif filters is not None:
ax1.set_xlabel('Wavelength (µm)', fontsize=13)
ax2.set_xlabel('')
else:
ax1.set_xlabel('Wavelength (µm)', fontsize=13)
if filters is not None:
ax2.set_ylabel('Transmission', fontsize=13)
if residuals is not None:
if quantity == 'flux density':
ax3.set_ylabel(r'$\Delta$$\mathregular{F}_\lambda$ ($\sigma$)', fontsize=11)
elif quantity == 'flux':
ax3.set_ylabel(r'$\Delta$$\mathregular{F}_\lambda$ ($\sigma$)', fontsize=11)
if xlim is None:
ax1.set_xlim(0.6, 6.)
else:
ax1.set_xlim(xlim[0], xlim[1])
if quantity == 'magnitude':
scaling = 1.
ax1.set_ylabel('Flux contrast (mag)', fontsize=13)
if ylim:
ax1.set_ylim(ylim[0], ylim[1])
else:
if ylim:
ax1.set_ylim(ylim[0], ylim[1])
ylim = ax1.get_ylim()
exponent = math.floor(math.log10(ylim[1]))
scaling = 10.**exponent
if quantity == 'flux density':
ylabel = r'$\mathregular{F}_\lambda$ (10$^{'+str(exponent)+r'}$ W m$^{-2}$ µm$^{-1}$)'
elif quantity == 'flux':
ylabel = r'$\lambda$$\mathregular{F}_\lambda$ (10$^{'+str(exponent)+r'}$ W m$^{-2}$)'
ax1.set_ylabel(ylabel, fontsize=11)
ax1.set_ylim(ylim[0]/scaling, ylim[1]/scaling)
if ylim[0] < 0.:
ax1.axhline(0.0, ls='--', lw=0.7, color='gray', dashes=(2, 4), zorder=0.5)
else:
if quantity == 'flux density':
ax1.set_ylabel(r'$\mathregular{F}_\lambda$ (W m$^{-2}$ µm$^{-1}$)', fontsize=11)
elif quantity == 'flux':
ax1.set_ylabel(r'$\lambda$$\mathregular{F}_\lambda$ (W m$^{-2}$)', fontsize=11)
scaling = 1.
xlim = ax1.get_xlim()
if filters is not None:
ax2.set_xlim(xlim[0], xlim[1])
ax2.set_ylim(0., 1.)
if residuals is not None:
ax3.set_xlim(xlim[0], xlim[1])
if offset is not None and residuals is not None and filters is not None:
ax3.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
ax2.get_yaxis().set_label_coords(offset[1], 0.5)
ax3.get_yaxis().set_label_coords(offset[1], 0.5)
elif offset is not None and filters is not None:
ax1.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
ax2.get_yaxis().set_label_coords(offset[1], 0.5)
elif offset is not None and residuals is not None:
ax3.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
ax3.get_yaxis().set_label_coords(offset[1], 0.5)
elif offset is not None:
ax1.get_xaxis().set_label_coords(0.5, offset[0])
ax1.get_yaxis().set_label_coords(offset[1], 0.5)
else:
ax1.get_xaxis().set_label_coords(0.5, -0.12)
ax1.get_yaxis().set_label_coords(-0.1, 0.5)
for j, boxitem in enumerate(boxes):
flux_scaling = 1.
if j < len(boxes):
plot_kwargs.append(None)
if isinstance(boxitem, (box.SpectrumBox, box.ModelBox)):
wavelength = boxitem.wavelength
flux = boxitem.flux
if isinstance(wavelength[0], (np.float32, np.float64)):
data = np.array(flux, dtype=np.float64)
masked = np.ma.array(data, mask=np.isnan(data))
if isinstance(boxitem, box.ModelBox):
param = boxitem.parameters
par_key, par_unit, par_label = plot_util.quantity_unit(
param=list(param.keys()), object_type=object_type)
label = ''
newline = False
for i, item in enumerate(par_key):
if item[:4] == 'teff':
value = f'{param[item]:.0f}'
elif item in ['logg', 'feh', 'fsed', 'lognorm_ext',
'powerlaw_ext', 'ism_ext']:
value = f'{param[item]:.1f}'
elif item in ['co']:
value = f'{param[item]:.2f}'
elif item[:6] == 'radius':
if object_type == 'planet':
value = f'{param[item]:.1f}'
# if item == 'radius_1':
# value = f'{param[item]:.0f}'
# else:
# value = f'{param[item]:.1f}'
elif object_type == 'star':
value = f'{param[item]*constants.R_JUP/constants.R_SUN:.1f}'
elif item == 'mass':
if object_type == 'planet':
value = f'{param[item]:.0f}'
elif object_type == 'star':
value = f'{param[item]*constants.M_JUP/constants.M_SUN:.1f}'
elif item == 'luminosity':
value = f'{np.log10(param[item]):.2f}'
else:
continue
# if len(label) > 80 and newline == False:
# label += '\n'
# newline = True
if par_unit[i] is None:
label += f'{par_label[i]} = {value}'
else:
label += f'{par_label[i]} = {value} {par_unit[i]}'
if i < len(par_key)-1:
label += ', '
else:
label = None
if plot_kwargs[j]:
kwargs_copy = plot_kwargs[j].copy()
if 'label' in kwargs_copy:
if kwargs_copy['label'] is None:
label = None
else:
label = kwargs_copy['label']
del kwargs_copy['label']
if quantity == 'flux':
flux_scaling = wavelength
ax1.plot(wavelength, flux_scaling*masked/scaling, zorder=2, label=label, **kwargs_copy)
else:
if quantity == 'flux':
flux_scaling = wavelength
ax1.plot(wavelength, flux_scaling*masked/scaling, lw=0.5, label=label, zorder=2)
elif isinstance(wavelength[0], (np.ndarray)):
for i, item in enumerate(wavelength):
data = np.array(flux[i], dtype=np.float64)
masked = np.ma.array(data, mask=np.isnan(data))
if isinstance(boxitem.name[i], bytes):
label = boxitem.name[i].decode('utf-8')
else:
label = boxitem.name[i]
if quantity == 'flux':
flux_scaling = item
ax1.plot(item, flux_scaling*masked/scaling, lw=0.5, label=label)
elif isinstance(boxitem, list):
for i, item in enumerate(boxitem):
wavelength = item.wavelength
flux = item.flux
data = np.array(flux, dtype=np.float64)
masked = np.ma.array(data, mask=np.isnan(data))
if quantity == 'flux':
flux_scaling = wavelength
if plot_kwargs[j]:
ax1.plot(wavelength, flux_scaling*masked/scaling, zorder=1, **plot_kwargs[j])
else:
ax1.plot(wavelength, flux_scaling*masked/scaling, color='gray', lw=0.2, alpha=0.5, zorder=1)
elif isinstance(boxitem, box.PhotometryBox):
label_check = []
for i, item in enumerate(boxitem.wavelength):
transmission = read_filter.ReadFilter(boxitem.filter_name[i])
fwhm = transmission.filter_fwhm()
if quantity == 'flux':
flux_scaling = item
if plot_kwargs[j]:
if 'label' in plot_kwargs[j] and plot_kwargs[j]['label'] not in label_check:
label_check.append(plot_kwargs[j]['label'])
elif 'label' in plot_kwargs[j] and plot_kwargs[j]['label'] in label_check:
del plot_kwargs[j]['label']
if boxitem.flux[i][1] is None:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=None, zorder=3, **plot_kwargs[j])
else:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[i][1]/scaling, zorder=3, **plot_kwargs[j])
else:
if boxitem.flux[i][1] is None:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=None, marker='s', ms=6, color='black', zorder=3)
else:
ax1.errorbar(item, flux_scaling*boxitem.flux[i][0]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[i][1]/scaling, marker='s', ms=6, color='black',
zorder=3)
elif isinstance(boxitem, box.ObjectBox):
if boxitem.spectrum is not None:
spec_list = []
wavel_list = []
for item in boxitem.spectrum:
spec_list.append(item)
wavel_list.append(boxitem.spectrum[item][0][0, 0])
sort_index = np.argsort(wavel_list)
spec_sort = []
for i in range(sort_index.size):
spec_sort.append(spec_list[sort_index[i]])
for key in spec_sort:
masked = np.ma.array(boxitem.spectrum[key][0],
mask=np.isnan(boxitem.spectrum[key][0]))
if quantity == 'flux':
flux_scaling = masked[:, 0]
if not plot_kwargs[j] or key not in plot_kwargs[j]:
plot_obj = ax1.errorbar(masked[:, 0], flux_scaling*masked[:, 1]/scaling,
yerr=flux_scaling*masked[:, 2]/scaling, ms=2, marker='s',
zorder=2.5, ls='none')
if plot_kwargs[j] is None:
plot_kwargs[j] = {}
plot_kwargs[j][key] = {'marker': 's', 'ms': 2., 'ls': 'none',
'color': plot_obj[0].get_color()}
else:
ax1.errorbar(masked[:, 0], flux_scaling*masked[:, 1]/scaling, yerr=flux_scaling*masked[:, 2]/scaling,
zorder=2.5, **plot_kwargs[j][key])
if boxitem.flux is not None:
filter_list = []
wavel_list = []
for item in boxitem.flux:
read_filt = read_filter.ReadFilter(item)
filter_list.append(item)
wavel_list.append(read_filt.mean_wavelength())
sort_index = np.argsort(wavel_list)
filter_sort = []
for i in range(sort_index.size):
filter_sort.append(filter_list[sort_index[i]])
for item in filter_sort:
transmission = read_filter.ReadFilter(item)
wavelength = transmission.mean_wavelength()
fwhm = transmission.filter_fwhm()
if not plot_kwargs[j] or item not in plot_kwargs[j]:
if not plot_kwargs[j]:
plot_kwargs[j] = {}
if quantity == 'flux':
flux_scaling = wavelength
if isinstance(boxitem.flux[item][0], np.ndarray):
for i in range(boxitem.flux[item].shape[1]):
plot_obj = ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0, i]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[item][1, i]/scaling, marker='s', ms=5, zorder=3)
else:
plot_obj = ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[item][1]/scaling, marker='s', ms=5, zorder=3)
plot_kwargs[j][item] = {'marker': 's', 'ms': 5., 'color': plot_obj[0].get_color()}
else:
if quantity == 'flux':
flux_scaling = wavelength
if isinstance(boxitem.flux[item][0], np.ndarray):
if not isinstance(plot_kwargs[j][item], list):
raise ValueError(f'A list with {boxitem.flux[item].shape[1]} '
f'dictionaries are required because the filter '
f'{item} has {boxitem.flux[item].shape[1]} '
f'values.')
for i in range(boxitem.flux[item].shape[1]):
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0, i]/scaling, xerr=fwhm/2.,
yerr=flux_scaling*boxitem.flux[item][1, i]/scaling, zorder=3, **plot_kwargs[j][item][i])
else:
if boxitem.flux[item][1] == 0.:
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0]/scaling,
xerr=fwhm/2., yerr=0.5*flux_scaling*boxitem.flux[item][0]/scaling,
uplims=True, capsize=2., capthick=0., zorder=3, **plot_kwargs[j][item])
else:
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item][0]/scaling,
xerr=fwhm/2., yerr=flux_scaling*boxitem.flux[item][1]/scaling,
zorder=3, **plot_kwargs[j][item])
elif isinstance(boxitem, box.SynphotBox):
for i, find_item in enumerate(boxes):
if isinstance(find_item, box.ObjectBox):
obj_index = i
break
for item in boxitem.flux:
transmission = read_filter.ReadFilter(item)
wavelength = transmission.mean_wavelength()
fwhm = transmission.filter_fwhm()
if quantity == 'flux':
flux_scaling = wavelength
if not plot_kwargs[obj_index] or item not in plot_kwargs[obj_index]:
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item]/scaling, xerr=fwhm/2., yerr=None,
alpha=0.7, marker='s', ms=5, zorder=4, mfc='white')
else:
if isinstance(plot_kwargs[obj_index][item], list):
# In case of multiple photometry values for the same filter, use the
# plot_kwargs of the first data point
kwargs_copy = plot_kwargs[obj_index][item][0].copy()
if 'label' in kwargs_copy:
del kwargs_copy['label']
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item]/scaling, xerr=fwhm/2., yerr=None,
zorder=4, mfc='white', **kwargs_copy)
else:
kwargs_copy = plot_kwargs[obj_index][item].copy()
if 'label' in kwargs_copy:
del kwargs_copy['label']
if 'mfc' in kwargs_copy:
del kwargs_copy['mfc']
ax1.errorbar(wavelength, flux_scaling*boxitem.flux[item]/scaling, xerr=fwhm/2., yerr=None,
zorder=4, mfc='white', **kwargs_copy)
if filters is not None:
for i, item in enumerate(filters):
transmission = read_filter.ReadFilter(item)
data = transmission.get_filter()
ax2.plot(data[:, 0], data[:, 1], '-', lw=0.7, color='black', zorder=1)
if residuals is not None:
for i, find_item in enumerate(boxes):
if isinstance(find_item, box.ObjectBox):
obj_index = i
break
res_max = 0.
if residuals.photometry is not None:
for item in residuals.photometry:
if not plot_kwargs[obj_index] or item not in plot_kwargs[obj_index]:
ax3.plot(residuals.photometry[item][0], residuals.photometry[item][1], marker='s',
ms=5, linestyle='none', zorder=2)
else:
if residuals.photometry[item].ndim == 1:
ax3.errorbar(residuals.photometry[item][0], residuals.photometry[item][1],
zorder=2, **plot_kwargs[obj_index][item])
elif residuals.photometry[item].ndim == 2:
for i in range(residuals.photometry[item].shape[1]):
if isinstance(plot_kwargs[obj_index][item], list):
ax3.errorbar(residuals.photometry[item][0, i],
residuals.photometry[item][1, i], zorder=2,
**plot_kwargs[obj_index][item][i])
else:
ax3.errorbar(residuals.photometry[item][0, i],
residuals.photometry[item][1, i], zorder=2,
**plot_kwargs[obj_index][item])
res_max = np.nanmax(np.abs(residuals.photometry[item][1]))
if residuals.spectrum is not None:
for key, value in residuals.spectrum.items():
if not plot_kwargs[obj_index] or key not in plot_kwargs[obj_index]:
ax3.errorbar(value[:, 0], value[:, 1], marker='o', ms=2, ls='none', zorder=1)
else:
ax3.errorbar(value[:, 0], value[:, 1], zorder=1, **plot_kwargs[obj_index][key])
max_tmp = np.nanmax(np.abs(value[:, 1]))
if max_tmp > res_max:
res_max = max_tmp
res_lim = math.ceil(1.1*res_max)
if res_lim > 10.:
res_lim = 5.
ax3.axhline(0., ls='--', lw=0.7, color='gray', dashes=(2, 4), zorder=0.5)
# ax3.axhline(-2.5, ls=':', lw=0.7, color='gray', dashes=(1, 4), zorder=0.5)
# ax3.axhline(2.5, ls=':', lw=0.7, color='gray', dashes=(1, 4), zorder=0.5)
if ylim_res is None:
ax3.set_ylim(-res_lim, res_lim)
else:
ax3.set_ylim(ylim_res[0], ylim_res[1])
if filters is not None:
ax2.set_ylim(0., 1.1)
print(f'Plotting spectrum: {output}...', end='', flush=True)
if title is not None:
if filters:
ax2.set_title(title, y=1.02, fontsize=13)
else:
ax1.set_title(title, y=1.02, fontsize=13)
handles, labels = ax1.get_legend_handles_labels()
if handles and legend is not None:
if isinstance(legend, list):
model_handles = []
data_handles = []
model_labels = []
data_labels = []
for i, item in enumerate(handles):
if isinstance(item, mpl.lines.Line2D):
model_handles.append(item)
model_labels.append(labels[i])
elif isinstance(item, mpl.container.ErrorbarContainer):
data_handles.append(item)
data_labels.append(labels[i])
else:
warnings.warn(f'The object type {item} is not implemented for the legend.')
if legend[0] is not None:
if isinstance(legend[0], (str, tuple)):
leg_1 = ax1.legend(model_handles, model_labels, loc=legend[0], fontsize=10., frameon=False)
else:
leg_1 = ax1.legend(model_handles, model_labels, **legend[0])
else:
leg_1 = None
if legend[1] is not None:
if isinstance(legend[1], (str, tuple)):
leg_2 = ax1.legend(data_handles, data_labels, loc=legend[1], fontsize=8, frameon=False)
else:
leg_2 = ax1.legend(data_handles, data_labels, **legend[1])
if leg_1 is not None:
ax1.add_artist(leg_1)
elif isinstance(legend, (str, tuple)):
ax1.legend(loc=legend, fontsize=8, frameon=False)
else:
ax1.legend(**legend)
# filters = ['Paranal/SPHERE.ZIMPOL_N_Ha',
# 'MUSE/Hbeta',
# 'ALMA/855']
#
# filters = ['Paranal/SPHERE.IRDIS_B_Y',
# 'MKO/NSFCam.J',
# 'Paranal/SPHERE.IRDIS_D_H23_2',
# 'Paranal/SPHERE.IRDIS_D_H23_3',
# 'Paranal/SPHERE.IRDIS_D_K12_1',
# 'Paranal/SPHERE.IRDIS_D_K12_2',
# 'Paranal/NACO.Lp',
# 'Paranal/NACO.NB405',
# 'Paranal/NACO.Mp']
#
# for i, item in enumerate(filters):
# readfilter = read_filter.ReadFilter(item)
# filter_wavelength = readfilter.mean_wavelength()
# filter_width = readfilter.filter_fwhm()
#
# # if i == 5:
# # ax1.errorbar(filter_wavelength, 1.3e4, xerr=filter_width/2., color='dimgray', elinewidth=2.5, zorder=10)
# # else:
# # ax1.errorbar(filter_wavelength, 6e3, xerr=filter_width/2., color='dimgray', elinewidth=2.5, zorder=10)
#
# if i == 0:
# ax1.text(filter_wavelength, 1e-2, r'H$\alpha$', ha='center', va='center', fontsize=10, color='black')
# elif i == 1:
# ax1.text(filter_wavelength, 1e-2, r'H$\beta$', ha='center', va='center', fontsize=10, color='black')
# elif i == 2:
# ax1.text(filter_wavelength, 1e-2, 'ALMA\nband 7 rms', ha='center', va='center', fontsize=8, color='black')
#
# if i == 0:
# ax1.text(filter_wavelength, 1.4, 'Y', ha='center', va='center', fontsize=10, color='black')
# elif i == 1:
# ax1.text(filter_wavelength, 1.4, 'J', ha='center', va='center', fontsize=10, color='black')
# elif i == 2:
# ax1.text(filter_wavelength-0.04, 1.4, 'H2', ha='center', va='center', fontsize=10, color='black')
# elif i == 3:
# ax1.text(filter_wavelength+0.04, 1.4, 'H3', ha='center', va='center', fontsize=10, color='black')
# elif i == 4:
# ax1.text(filter_wavelength, 1.4, 'K1', ha='center', va='center', fontsize=10, color='black')
# elif i == 5:
# ax1.text(filter_wavelength, 1.4, 'K2', ha='center', va='center', fontsize=10, color='black')
# elif i == 6:
# ax1.text(filter_wavelength, 1.4, 'L$\'$', ha='center', va='center', fontsize=10, color='black')
# elif i == 7:
# ax1.text(filter_wavelength, 1.4, 'NB4.05', ha='center', va='center', fontsize=10, color='black')
# elif i == 8:
# ax1.text(filter_wavelength, 1.4, 'M$\'}$', ha='center', va='center', fontsize=10, color='black')
#
# ax1.text(1.26, 0.58, 'VLT/SPHERE', ha='center', va='center', fontsize=8., color='slateblue', rotation=43.)
# ax1.text(2.5, 1.28, 'VLT/SINFONI', ha='left', va='center', fontsize=8., color='darkgray')
plt.savefig(os.getcwd()+'/'+output, bbox_inches='tight')
plt.clf()
plt.close()
print(' [DONE]')
| 2.46875 | 2 |
Print All Links.py | fatih-iver-2016400264/Search-Engine-with-Python | 0 | 12785572 | <gh_stars>0
def print_all_links(page):
while True:
url, endpos = get_next_target(page)
if url:
print(url)
page = page[endpos:]
else:
break
| 3 | 3 |
tests/unit/p2p/messages/errors.py | btclib-org/btclib_node | 4 | 12785573 | from btclib_node.p2p.messages import get_payload
from btclib_node.p2p.messages.errors import Notfound, Reject, RejectCode
def test_not_found():
msg = Notfound([(1, "00" * 32)])
msg_bytes = bytes.fromhex("00" * 4) + msg.serialize()
assert msg == Notfound.deserialize(get_payload(msg_bytes)[1])
def test_reject():
msg = Reject("tx", RejectCode(0x42), "", "00" * 32)
msg_bytes = bytes.fromhex("00" * 4) + msg.serialize()
assert msg == Reject.deserialize(get_payload(msg_bytes)[1])
| 2.40625 | 2 |
calvinextras/calvinsys/web/pushbullet/Pushbullet.py | gabrielcercel/calvin-base | 334 | 12785574 | <reponame>gabrielcercel/calvin-base<gh_stars>100-1000
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pushbullet
from calvin.runtime.south.async import threads, async
from calvin.utilities.calvinlogger import get_logger
from calvin.runtime.south.calvinsys import base_calvinsys_object
_log = get_logger(__name__)
class Pushbullet(base_calvinsys_object.BaseCalvinsysObject):
"""
Pushbullet - Post messages to pushbullet channel
Requires pushbullet.py (pip install pushbullet.py)
"""
init_schema = {
"type": "object",
"properties": {
"api_key": {
"description": "API key, see https://www.pushbullet.com/account",
"type": "string"
},
"channel_tag": {
"description": "Pushbullet to post to, see http://www.pushbullet.com",
"type": "string"
}
},
"required": ["api_key", "channel_tag"],
"description": "Setup up api key and tag of channel to use for pushbullet messages"
}
can_write_schema = {
"description": "Returns True if data can be posted, otherwise False",
"type": "boolean"
}
write_schema = {
"description": "Post update to configured pushbullet channel",
"type": ["object", "string"],
"properties": {
"title": {"type": "string", "description": "title of message"},
"message": {"type": "string", "description": "message to post to channel"}
}
}
def init(self, api_key, channel_tag, title=None):
def init_pb():
try:
# pushbullet = pbullet.Pushbullet({"api_key": api_key})
pb_api = pushbullet.PushBullet(api_key)
ch = pb_api.get_channel(channel_tag)
return (pb_api, ch)
except Exception as e:
_log.error("Failed to initialize pushbullet: {}".format(e))
def done(pb_chan):
self.pushbullet, self.channel = pb_chan
self.busy = False
self.title = title
self.busy = True
in_progress = threads.defer_to_thread(init_pb)
in_progress.addCallback(done)
def can_write(self):
return not self.busy
def write(self, data):
def send():
try:
self.channel.push_note(title, message)
except Exception as e:
_log.error("Failed to send pushbullet: {}".format(e))
done()
def done(*args, **kwargs):
self.busy = False
if isinstance(data, basestring):
message = data
title = self.title
else :
message = data.get("message")
title = data.get("title")
self.busy = True
in_progress = threads.defer_to_thread(send)
in_progress.addBoth(done)
def close(self):
del self.channel
self.channel = None
del self.pushbullet
self.pushbullet = None
| 1.789063 | 2 |
openpyscad/util.py | Samoxiaki/openpyscad | 0 | 12785575 | <reponame>Samoxiaki/openpyscad
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .base import BaseObject
from .boolean import Union
from .shapes_3d import *
from .transformations import *
__all__ = ['Workspace', 'PolyhedronBuilder']
class Workspace(BaseObject):
def __init__(self, position=[0,0,0], rotation=[0,0,0], color=None, visible=True):
"""
Create a new Workspace object.
Param processing order:
1. Visibility
2. Color
3. Rotation
4. Position
"""
super().__init__()
self._position = Workspace.__validation_check(position, "position")
self._rotation = Workspace.__validation_check(rotation, "rotation")
self._color = Workspace.__validation_check(color, "color")
self._visible = Workspace.__validation_check(visible, "boolean")
@staticmethod
def __validation_check(obj, obj_type="position"):
"""
Return parsed values from object if valid.
If not valid, raise an exception.
"""
if(obj_type == "position"):
if(obj and isinstance(obj, (list, tuple))):
position = [0,0,0]
for i in range(0, min(3, len(obj))):
try:
position[i] = float(obj[i])
except:
raise TypeError("Cannot parse value to a number: " + obj[i])
return position
else:
raise TypeError("Invalid position: " + obj)
elif(obj_type == "rotation"):
if(obj and isinstance(obj, (list, tuple))):
rotation = [0,0,0]
for i in range(0, min(3, len(obj))):
try:
rotation[i] = float(obj[i]) % 360
except:
raise TypeError("Cannot parse value to a number: " + obj[i])
return rotation
else:
raise TypeError("Invalid rotation: " + obj)
elif(obj_type == "color"):
Colors.validate_color(obj)
elif(obj_type == "boolean"):
if(obj):
return True
else:
return False
else:
raise TypeError("Invalid obj_type: " + obj_type)
def dumps(self, indent_level=0, fp=None):
dumped_obj = Union()
for child in self.children:
dumped_obj.append(child)
if(not self._visible):
dumped_obj = dumped_obj.disable()
if(self._color):
dumped_obj = dumped_obj.color(self._color)
dumped_obj = dumped_obj.rotate(self._rotation).translate(self._position)
return dumped_obj.dumps(indent_level=indent_level, fp=fp)
def translate(self, translation):
parsed_translation = Workspace.__validation_check(translation, "position")
for i in range(0,len(parsed_translation)):
self._position[i]+= parsed_translation[i]
return self.clone()
def move(self, new_position):
self._position = Workspace.__validation_check(new_position, "position")
return self.clone()
def rotate(self, rotation):
parsed_rotation = Workspace.__validation_check(rotation, "rotation")
for i in range(0,len(parsed_rotation)):
self._rotation[i]+= parsed_rotation[i]
return self.clone()
def set_rotation(self, new_rotation)
self._rotation = Workspace.__validation_check(new_rotation, "rotation")
return self.clone()
def color(self, color):
self._color = Workspace.__validation_check(color, "color")
return self.clone()
def disable(self):
self._visible = False
def enable(self):
self_visible = True
class PolyhedronBuilder:
def __init__(self):
self.points = []
self.faces = []
def __get_point_index(self, point):
for i in range(0, len(self.points)):
if(point == self.points[i]):
return i
index = len(self.points)
self.points.append(point)
return index
def add_face(self, points):
face = []
for p in points:
face.append(self.__get_point_index(p))
self.faces.append(face)
return face
def build(self):
return Polyhedron(points = self.points, faces = self.faces)
| 2.484375 | 2 |
Meraki/get_network_client_traffic_history.py | insidus341/devnet | 0 | 12785576 | from meraki_sdk.meraki_sdk_client import MerakiSdkClient
from tools.api_key import key
from get_network_id import get_network_id
meraki = MerakiSdkClient(key)
# net_id = get_network_id()
net_id = 'L_594475150812909110'
cient_id = 'k01816e'
clients_controller = meraki.clients
params = {}
params['network_id'] = net_id
params['client_id'] = 'k01816e'
client = clients_controller.get_network_client_traffic_history(params)
print(client) | 2.0625 | 2 |
modules/loss.py | ChenX17/aligntts | 0 | 12785577 | <filename>modules/loss.py
'''
Date: 2021-01-23 18:37:19
LastEditors: <NAME>(<EMAIL>)
LastEditTime: 2021-02-02 23:30:55
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import hparams as hp
from utils.utils import get_mask_from_lengths
import math
class MDNLoss(nn.Module):
def __init__(self):
super(MDNLoss, self).__init__()
def forward(self, mu_sigma, melspec, text_lengths, mel_lengths):
# mu, sigma: B, L, F / melspec: B, F, T
B, L, _ = mu_sigma.size()
T = melspec.size(2)
x = melspec.transpose(1,2).unsqueeze(1) # B, 1, T, F
mu = torch.sigmoid(mu_sigma[:, :, :hp.n_mel_channels].unsqueeze(2)) # B, L, 1, F
log_sigma = mu_sigma[:, :, hp.n_mel_channels:].unsqueeze(2) # B, L, 1, F
exponential = -0.5*torch.sum((x-mu)*(x-mu)/log_sigma.exp()**2, dim=-1) # B, L, T
log_prob_matrix = exponential - (hp.n_mel_channels/2)*torch.log(torch.tensor(2*math.pi)) - 0.5 * log_sigma.sum(dim=-1)
log_alpha = mu_sigma.new_ones(B, L, T)*(-1e30)
log_alpha[:, 0, 0] = log_prob_matrix[:,0, 0]
# import pdb;pdb.set_trace()
# prob_matrix = torch.tensor(2*math.pi).exp()**(-0.5) * torch.exp(-0.5*torch.sum((x-mu)*(x-mu)/log_sigma.exp()**2, dim=-1))
# alpha = mu_sigma.new_ones(B, L, T)*(1e-30)
# alpha[:, 0, 0] = prob_matrix[:,0, 0]
# import pdb;pdb.set_trace()
for t in range(1, T):
prev_step = torch.cat([log_alpha[:, :, t-1:t], F.pad(log_alpha[:, :, t-1:t], (0,0,1,-1), value=-1e30)], dim=-1)
log_alpha[:, :, t] = torch.logsumexp(prev_step+1e-30, dim=-1)+log_prob_matrix[:, :, t]
# prev_step = torch.cat([alpha[:, :, t-1:t], F.pad(alpha[:, :, t-1:t], (0,0,1,-1), value=1e-30)], dim=-1)
# alpha[:, :, t] = torch.sum(prev_step+1e-30, dim=-1)*prob_matrix[:, :, t]
# scaler = torch.unsqueeze(1 / torch.sum(log_alpha[:, :, t], dim=1) + 1e-30, -1)
# log_alpha[:, :, t] = log_alpha[:, :, t] * scaler
alpha_last = log_alpha[torch.arange(B), text_lengths-1, mel_lengths-1]
# alpha_last = torch.log(alpha[torch.arange(B), text_lengths-1, mel_lengths-1])
mdn_loss = -alpha_last.mean()
return mdn_loss, log_prob_matrix
class MDNDNNLoss(nn.Module):
def __init__(self):
super(MDNDNNLoss, self).__init__()
def forward(self, probs, melspec, text_lengths, mel_lengths):
# mu, sigma: B, L, F / melspec: B, F, T
# B, L, _ = mu_sigma.size()
# probs: B, L, T
# import pdb;pdb.set_trace()
B, L, _ = probs.size()
T = melspec.size(2)
# x = melspec.transpose(1,2).unsqueeze(1) # B, 1, T, F
# mu = torch.sigmoid(mu_sigma[:, :, :hp.n_mel_channels].unsqueeze(2)) # B, L, 1, F
# log_sigma = mu_sigma[:, :, hp.n_mel_channels:].unsqueeze(2) # B, L, 1, F
# exponential = -0.5*torch.sum((x-mu)*(x-mu)/log_sigma.exp()**2, dim=-1) # B, L, T
log_prob_matrix = torch.log(probs+1e-30)
log_alpha = probs.new_ones(B, L, T)*(-1e30)
log_alpha[:, 0, 0] = log_prob_matrix[:,0, 0]
for t in range(1, T):
prev_step = torch.cat([log_alpha[:, :, t-1:t], F.pad(log_alpha[:, :, t-1:t], (0,0,1,-1), value=-1e30)], dim=-1)
log_alpha[:, :, t] = torch.logsumexp(prev_step+1e-30, dim=-1)+log_prob_matrix[:, :, t]
# prev_step = torch.cat([alpha[:, :, t-1:t], F.pad(alpha[:, :, t-1:t], (0,0,1,-1), value=1e-30)], dim=-1)
# alpha[:, :, t] = torch.sum(prev_step+1e-30, dim=-1)*prob_matrix[:, :, t]
# scaler = torch.unsqueeze(1 / torch.sum(log_alpha[:, :, t], dim=1) + 1e-30, -1)
# log_alpha[:, :, t] = log_alpha[:, :, t] * scaler
alpha_last = log_alpha[torch.arange(B), text_lengths-1, mel_lengths-1]
# alpha_last = torch.log(alpha[torch.arange(B), text_lengths-1, mel_lengths-1])
mdn_loss = -alpha_last.mean()
return mdn_loss, log_prob_matrix | 2.46875 | 2 |
shift_classification/preprocessing.py | team8/outdoor-blind-navigation | 6 | 12785578 | import numpy as np
import cv2
def preprocess(img, side):
img = cv2.rotate(img, cv2.ROTATE_90_COUNTERCLOCKWISE)
img = cv2.transpose(img)
size_y, size_x, _ = img.shape
img_crop_size = (480, 480)
min_resize = max(img_crop_size[0] / size_x, img_crop_size[1] / size_y)
img = cv2.resize(img, (int(size_x * min_resize), int(size_y * min_resize))) # keeps the same aspect ratio
size_y, size_x, _ = img.shape
if side == 1:
# road is on the left so crop it there
img = img[(size_y - img_crop_size[1]):size_y, 0:img_crop_size[0]]
elif side == -1:
# road is on the right so crop it there
img = img[(size_y - img_crop_size[1]):size_y, (size_x - img_crop_size[0]):size_x]
else:
img = img[(size_y - img_crop_size[1]):size_y, int((size_x - img_crop_size[0]) / 2):int(size_x - (size_x - img_crop_size[0]) / 2)]
edges = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(edges, 200, 300)
edges = cv2.GaussianBlur(edges, (3, 3), 0)
img = cv2.medianBlur(img, 5)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
edges = cv2.dilate(edges, kernel)
# remove the blue red layer for smaller image size
b, g, r = cv2.split(img)
img = cv2.merge((b, edges, r))
cv2.imshow('test2', img)
cv2.waitKey(1)
img = img / 255
return img
| 2.90625 | 3 |
make/help.py | abhishekgahlot/flexx | 1 | 12785579 | <reponame>abhishekgahlot/flexx<gh_stars>1-10
# License: consider this public domain
"""
Show the list of available commands, or details on a command.
* python make help - show list of commands
* python make help foo - show details on command "foo"
"""
import os
import sys
from make import THIS_DIR, NAME
def help(command=''):
if not command:
# List all commands
fnames = [fname for fname in os.listdir(THIS_DIR) if
os.path.isfile(os.path.join(THIS_DIR, fname)) and
fname.endswith('.py') and
fname.count('.') == 1 and
not fname.startswith('_')]
print('Developer tools for project %s\n' % NAME.capitalize())
print(' python make <command> [arg]\n')
for fname in sorted(fnames):
modname = fname[:-3]
doc = get_doc_for_file(fname)
summary = doc.split('\n', 1)[0] if doc else ''
print(modname.ljust(15) + ' ' + summary)
else:
# Give more detailed info on command
fname = command + '.py'
if not os.path.isfile(os.path.join(THIS_DIR, fname)):
sys.exit('Not a known command: %r' % command)
doc = get_doc_for_file(fname) or ''
print('\n%s - %s\n' % (command, doc))
def get_doc_for_file(fname):
""" Get the module docstring of the given file. Returns string with
quotes and whitespace stripped, and only LF newlines.
"""
# Read code
try:
code = open(os.path.join(THIS_DIR, fname), 'rt').read()
except Exception as err:
return 'Error: could not read %r: %s' % (fname, str(err))
# Search for closes multiline string
qsingle, qdouble = "'''", '"""'
ii = [(code.find(needle), needle) for needle in (qsingle, qdouble)]
ii = [(i, needle) for i, needle in ii if i >= 0]
ii.sort(key=lambda x: x[0])
# Find where it ends
if ii:
i1, needle = ii[0]
i2 = code.find(needle, i1+3)
if i2 > 0:
doc = code[i1:i2].strip('"\'').strip()
return doc.replace('\r\n', '\n').replace('\r', '\n')
| 2.859375 | 3 |
controller_setter.py | lijian2020/NDNAPP | 0 | 12785580 | #!/usr/bin/python3
#
# Copyright (C) 2019 Trinity College of Dublin, the University of Dublin.
# Copyright (c) 2019 <NAME>
# Author: <NAME> <<EMAIL>>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''This module is used for some specific functions or tests for controller. '''
import sys
import time
import argparse
import traceback
import random
from pyndn import Name
from pyndn import Face
from pyndn.security import KeyChain
from oscommand import OSCommand
from ofmsg import OFMSG
from featurereq import FeatureReq
from packetout import PacketOutMsg
from facemod import FaceModMsg
from node_prefix_table import NodePrefixTable
class Controller_Setter(object):
def __init__(self):
pass
#self.keyChain = KeyChain()
#self.isDone = False
#self.ofmsg = OFMSG()
#self.nodeid = OSCommand.getnodeid()
#self.face = Face()
#self.featurereq = FeatureReq()
#self.helloreq_name_list = []
def run(self):
pass
def packetoutsender(self):
'''This section is used to send packetout msg if necessary'''
PacketOut_suffix = "all---all---/Ireland/Dublin/TCD/---2---0---3600---36000---0x0001---faceid255---0x0001"
PacketOutMsg().run(PacketOut_suffix)
def facemodsender(self):
'''This section is used to send facemod msg if necessary'''
facemod_suffix = "255---0x0001" # "faceid---Action"; Action ={create=0x0000, destroy=0x0001}
FaceModMsg().run(facemod_suffix) | 2.09375 | 2 |
mtp_noms_ops/apps/security/export.py | ministryofjustice/money-to-prisoners-noms-ops | 3 | 12785581 | import datetime
import re
from django.http import HttpResponse
from django.utils.dateparse import parse_datetime
from mtp_common.utils import format_currency
from openpyxl import Workbook
from security.models import credit_sources, disbursement_methods
from security.templatetags.security import (
format_card_number, format_sort_code,
format_resolution, format_disbursement_resolution,
list_prison_names,
)
from security.utils import EmailSet, NameSet
class ObjectListXlsxResponse(HttpResponse):
def __init__(self, object_list, object_type, attachment_name='export.xlsx', **kwargs):
kwargs.setdefault(
'content_type',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
)
super().__init__(**kwargs)
self['Content-Disposition'] = 'attachment; filename="%s"' % attachment_name
serialiser = ObjectListSerialiser.serialiser_for(object_type)
workbook = serialiser.make_workbook(object_list)
workbook.save(self)
class ObjectListSerialiser:
serialisers = {}
headers = []
def __init_subclass__(cls, object_type):
cls.serialisers[object_type] = cls
@classmethod
def serialiser_for(cls, object_type):
try:
return cls.serialisers[object_type]()
except KeyError:
raise NotImplementedError(f'Cannot export {object_type}')
def make_workbook(self, object_list):
workbook = Workbook(write_only=True)
worksheet = workbook.create_sheet()
worksheet.append(self.headers)
for record in object_list:
serialised_record = self.serialise(record)
worksheet.append([
escape_formulae(serialised_record.get(field))
for field in self.headers
])
return workbook
def serialise(self, record):
raise NotImplementedError
class CreditListSerialiser(ObjectListSerialiser, object_type='credits'):
headers = [
'Internal ID',
'Date started', 'Date received', 'Date credited',
'Amount',
'Prisoner number', 'Prisoner name', 'Prison',
'Sender name', 'Payment method',
'Bank transfer sort code', 'Bank transfer account', 'Bank transfer roll number',
'Debit card number', 'Debit card expiry', 'Debit card billing address',
'Sender email', 'Sender IP address',
'Status',
'NOMIS transaction',
]
def serialise(self, record):
return {
'Internal ID': record['id'],
'Date started': record['started_at'],
'Date received': (
record['received_at'].strftime('%Y-%m-%d')
if record['source'] == 'bank_transfer' else record['received_at']
),
'Date credited': record['credited_at'],
'Amount': format_currency(record['amount']),
'Prisoner number': record['prisoner_number'],
'Prisoner name': record['prisoner_name'],
'Prison': record['prison_name'],
'Sender name': record['sender_name'],
'Payment method': str(credit_sources.get(record['source'], record['source'])),
'Bank transfer sort code': (
format_sort_code(record['sender_sort_code']) if record['sender_sort_code'] else None
),
'Bank transfer account': record['sender_account_number'],
'Bank transfer roll number': record['sender_roll_number'],
'Debit card number': (
f'{record["card_number_first_digits"] or "******"}******{record["card_number_last_digits"]}'
if record['card_number_last_digits']
else None
),
'Debit card expiry': record['card_expiry_date'],
'Debit card billing address': credit_address_for_export(record['billing_address']),
'Sender email': record['sender_email'],
'Sender IP address': record['ip_address'],
'Status': str(format_resolution(record['resolution'])),
'NOMIS transaction': record['nomis_transaction_id'],
}
class DisbursementListSerialiser(ObjectListSerialiser, object_type='disbursements'):
headers = [
'Internal ID',
'Date entered', 'Date confirmed', 'Date sent',
'Amount',
'Prisoner number', 'Prisoner name', 'Prison',
'Recipient name', 'Payment method',
'Bank transfer sort code', 'Bank transfer account', 'Bank transfer roll number',
'Recipient address', 'Recipient email',
'Status',
'NOMIS transaction', 'SOP invoice number',
]
def serialise(self, record):
last_action_dates = {
log_item['action']: parse_datetime(log_item['created'])
for log_item in record['log_set']
}
return {
'Internal ID': record['id'],
'Date entered': record['created'],
'Date confirmed': last_action_dates.get('confirmed', ''),
'Date sent': last_action_dates.get('sent', ''),
'Amount': format_currency(record['amount']),
'Prisoner number': record['prisoner_number'],
'Prisoner name': record['prisoner_name'],
'Prison': record['prison_name'],
'Recipient name': f'{record["recipient_first_name"]} {record["recipient_last_name"]}'.strip(),
'Payment method': str(disbursement_methods.get(record['method'], record['method'])),
'Bank transfer sort code': (
format_sort_code(record['sort_code']) if record['sort_code'] else ''
),
'Bank transfer account': record['account_number'],
'Bank transfer roll number': record['roll_number'],
'Recipient address': disbursement_address_for_export(record),
'Recipient email': record['recipient_email'],
'Status': str(format_disbursement_resolution(record['resolution'])),
'NOMIS transaction': record['nomis_transaction_id'],
'SOP invoice number': record['invoice_number'],
}
class SenderListSerialiser(ObjectListSerialiser, object_type='senders'):
headers = [
'Sender name', 'Payment method',
'Credits sent', 'Total amount sent',
'Prisoners sent to', 'Prisons sent to',
'Bank transfer sort code', 'Bank transfer account', 'Bank transfer roll number',
'Debit card number', 'Debit card expiry', 'Debit card postcode',
'Other cardholder names', 'Cardholder emails',
]
def serialise(self, record):
serialised_record = {
'Credits sent': record['credit_count'],
'Total amount sent': format_currency(record['credit_total']),
'Prisoners sent to': record['prisoner_count'],
'Prisons sent to': record['prison_count'],
}
if record.get('bank_transfer_details'):
bank_transfer = record['bank_transfer_details'][0]
return {
**serialised_record,
'Sender name': bank_transfer['sender_name'],
'Payment method': 'Bank transfer',
'Bank transfer sort code': format_sort_code(bank_transfer['sender_sort_code']),
'Bank transfer account': bank_transfer['sender_account_number'],
'Bank transfer roll number': bank_transfer['sender_roll_number'],
}
if record.get('debit_card_details'):
debit_card = record['debit_card_details'][0]
try:
sender_name = debit_card['cardholder_names'][0]
except IndexError:
sender_name = 'Unknown'
other_sender_names = NameSet(debit_card['cardholder_names'])
if sender_name in other_sender_names:
other_sender_names.remove(sender_name)
return {
**serialised_record,
'Sender name': sender_name,
'Payment method': 'Debit card',
'Debit card number': format_card_number(debit_card),
'Debit card expiry': debit_card['card_expiry_date'],
'Debit card postcode': debit_card['postcode'] or 'Unknown',
'Other cardholder names': ', '.join(other_sender_names),
'Cardholder emails': ', '.join(EmailSet(debit_card['sender_emails'])),
}
return {
**serialised_record,
'Sender name': '(Unknown)',
'Payment method': '(Unknown)',
}
class PrisonerListSerialiser(ObjectListSerialiser, object_type='prisoners'):
headers = [
'Prisoner number',
'Prisoner name',
'Date of birth',
'Credits received',
'Total amount received',
'Payment sources',
'Disbursements sent',
'Total amount sent',
'Recipients',
'Current prison',
'All known prisons',
'Names given by senders',
]
def serialise(self, record):
if record['current_prison']:
current_prison = record['current_prison']['name']
else:
current_prison = 'Not in a public prison'
provided_names = NameSet(record['provided_names'])
return {
'Prisoner number': record['prisoner_number'],
'Prisoner name': record['prisoner_name'],
'Date of birth': record['prisoner_dob'],
'Credits received': record['credit_count'],
'Total amount received': format_currency(record['credit_total']),
'Payment sources': record['sender_count'],
'Disbursements sent': record['disbursement_count'],
'Total amount sent': format_currency(record['disbursement_total']),
'Recipients': record['recipient_count'],
'Current prison': current_prison,
'All known prisons': list_prison_names(record['prisons']),
'Names given by senders': ', '.join(provided_names),
}
def escape_formulae(value):
"""
Escapes formulae (strings that start with =) to prevent
spreadsheet software vulnerabilities being exploited
:param value: the value being added to a CSV cell
"""
if isinstance(value, str) and value.startswith('='):
return "'" + value
if isinstance(value, datetime.datetime):
return value.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(value, datetime.date):
return value.strftime('%Y-%m-%d')
return value
def credit_address_for_export(address):
if not address:
return ''
whitespace = re.compile(r'\s+')
keys = ('line1', 'line2', 'city', 'postcode', 'country')
lines = (whitespace.sub(' ', address[key]).strip() for key in keys if address.get(key))
return ', '.join(lines)
def disbursement_address_for_export(disbursement):
whitespace = re.compile(r'\s+')
keys = ('address_line1', 'address_line2', 'city', 'postcode', 'country')
lines = (whitespace.sub(' ', disbursement[key]).strip() for key in keys if disbursement.get(key))
return ', '.join(lines)
| 2.0625 | 2 |
src/scorers/result_scorer_pr_binary_factory.py | elangovana/large-scale-ptm-ppi | 1 | 12785582 | <gh_stars>1-10
from scorers.base_classification_scorer_factory import BaseClassificationScorerFactory
from scorers.result_scorer_pr_binary import ResultScorerPrBinary
class ResultScorerPrBinaryFactory(BaseClassificationScorerFactory):
"""
Factory for Pr Binary
"""
def get(self):
return ResultScorerPrBinary()
| 1.867188 | 2 |
python/p273.py | forewing/lc | 0 | 12785583 | <gh_stars>0
class Solution:
def __init__(self):
self.hundred = "Hundred"
self.split = ["INVALID", "Thousand", "Million", "Billion"]
self.tens = ["INVALID", "INVALID", "Twenty", "Thirty", "Forty", "Fifty", "Sixty", "Seventy", "Eighty", "Ninety"]
self.teens = ["Ten", "Eleven", "Twelve", "Thirteen", "Fourteen",
"Fifteen", "Sixteen", "Seventeen", "Eighteen", "Nineteen"]
self.ones = ["Zero", "One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine"]
def get_100_999(self, num):
hundred = num // 100
remain = self.get_10_99(num % 100)
if hundred > 0:
return [self.ones[hundred], self.hundred] + remain
return remain
def get_10_99(self, num):
if num < 10:
return self.get_0_9(num)
elif num < 20:
return [self.teens[num % 10]]
else:
return [self.tens[num // 10]] + self.get_0_9(num % 10)
def get_0_9(self, num):
if num == 0:
return []
return [self.ones[num]]
def numberToWords(self, num: int) -> str:
if num == 0:
return self.ones[0]
result = []
for split in range(0, 4):
remain = num % 1000
num //= 1000
if remain:
result = self.get_100_999(remain) + ([self.split[split]] if split > 0 else []) + result
return " ".join(result)
| 3.296875 | 3 |
src/owmpy/current/_classes.py | ernieIzde8ski/open_weather_mappy | 0 | 12785584 | from ..utils import Number as _Number
from ..utils import Units, _AutomaticClient
from ._classes import *
from .response import *
class CurrentWeatherAPIException(Exception):
pass
class CurrentWeather(_AutomaticClient):
BASE_URL = "https://api.openweathermap.org/data/2.5/weather"
async def get(
self, coords: tuple[_Number, _Number], units: Units = StandardUnits.STANDARD, lang: str | None = None
) -> CurrentWeatherStatus:
params = {"appid": self.appid, "lat": coords[0], "lon": coords[1], "units": units.api_name}
if lang:
params["lang"] = lang
async with self.client.get(self.BASE_URL, params=params) as resp:
resp = await resp.json()
if "cod" in resp and "message" in resp:
raise CurrentWeatherAPIException(resp["cod"], resp["message"])
if "rain" in resp:
keys: set[str] = set(resp["rain"])
for key in keys:
resp["rain"][f"_{key}"] = resp["rain"][key]
del resp["rain"][key]
return CurrentWeatherStatus(**resp, units=units)
| 2.765625 | 3 |
Joels_Files/mathFunctions/signal_math.py | Hullimulli/EEGEyeNet | 0 | 12785585 | <gh_stars>0
from tqdm import tqdm
import os
import numpy as np
from config import config
def pca(inputSignals: np.ndarray, filename: str, directory: str):
"""
Calculates the principle components for the given samples. Saves the matrix as a numpy file.
@param inputSignals: 3d Tensor of the signals of which the principle components have to be calculated.
Shape has to be [#samples,#timestamps,#electrodes]
@type inputSignals: Numpy Array
@param filename: The name of the file which stores the eigenvectors.
@type filename: String
@param directory: Directory where the file has to be saved.
@type directory: String
"""
#Checks
if inputSignals.ndim != 3:
raise Exception("Need a 3 dimensional array as input.")
if not os.path.isdir(directory):
raise Exception("Directory does not exist.")
eigenVectors = np.zeros([inputSignals.shape[1] ,inputSignals.shape[1] ,inputSignals.shape[2]])
for i in tqdm(range(inputSignals.shape[2])):
covMatrix = np.cov(np.transpose(inputSignals[:, :, i]))
if config['framework'] == 'tensorflow':
import tensorflow as tf
e, v = tf.linalg.eigh(covMatrix)
elif config['framework'] == 'pytorch':
import torch
e, v = torch.linalg.eigh(torch.from_numpy(covMatrix))
else:
print("No valid framework selected.")
return
del e
eigenVectors[: ,: ,i] = v.numpy()[: ,::-1]
np.save(os.path.join(directory,filename) ,eigenVectors)
def pcaDimReduction(inputSignals, file, dim=2, transformBackBool = True):
"""
Takes the signals and transforms them with the eigenvector matrix of the PCA. All values except the first n ones,
which correspond to the eigenvectors with the highest eigenvalues, are set to zero. Then the
signal is transformed back to its original space if desired.
@param inputSignals: 3d Tensor of the signal of which the principle components have to be calculated.
Shape has to be [#samples,#timestamps,#electrodes]
@type inputSignals: Numpy Array
@param file: Where the numpy file of the principle components is found.
@type file: String
@param dim: How many eigenvectors are kept.
@type dim: Integer
@param transformBack: If True, the data is transformed back to its original space.
@type transformBack: Bool
@return: Input transformed with PCA.
@rtype: [#samples,#timestamps/#dimensions,#electrodes] Numpy Array
"""
#Checks
if inputSignals.ndim != 3:
raise Exception("Need a 3 dimensional array as input.")
if not os.path.isfile(file):
raise Exception("Directory does not exist.")
v = np.load(file)
if inputSignals.shape[1] != v.shape[1] or inputSignals.shape[2] != v.shape[2]:
raise Exception("Invalid shapes.")
if config['framework'] == 'tensorflow':
import tensorflow as fr
z = np.transpose(fr.matmul(np.transpose(v), np.transpose(inputSignals)).numpy())
if transformBackBool:
z[:, dim:, :] = 0
returnValue = np.transpose(fr.matmul(np.swapaxes(np.transpose(v), 1, 2), np.transpose(z)).numpy())
return returnValue
else:
return z[:, :dim, :]
elif config['framework'] == 'pytorch':
import torch as fr
z = np.transpose(fr.matmul(fr.from_numpy(np.transpose(v)), fr.from_numpy(np.transpose(inputSignals))).numpy())
if transformBackBool:
z[:, dim:, :] = 0
returnValue = np.transpose(fr.matmul(fr.from_numpy(np.swapaxes(np.transpose(v), 1, 2)),
fr.from_numpy(np.transpose(z))).numpy())
return returnValue
else:
return z[:, :dim, :]
else:
raise Exception("No valid framework selected.") | 2.421875 | 2 |
src/commands/actions/download_entity.py | jherrerotardon/spies | 0 | 12785586 | <reponame>jherrerotardon/spies
from pyframework.commands.action import Action
from pyframework.exceptions.custom_exceptions import ArgumentException
from ...triggers.entity_info_trigger import EntityInfoTrigger, AbstractTrigger
class DownloadEntity(Action):
"""Concrete action to download entities (restaurants) from place. """
_name = 'download.info.ready.action'
_entities_ids = []
"""Entities ids to be scrapped. """
_endpoint_id = int
"""Endpoint to be downloaded. """
def set_up(self):
super(DownloadEntity, self).set_up()
if 'endpoint_id' not in self._payload:
raise ArgumentException('Endpoint ID is required.')
self._entities_ids = self._payload.get('restaurants_ids', [])
def _generate_tasks(self) -> list:
tasks = [{
'id': entity_id,
'guid': self._guid,
'entity_id': entity_id,
'endpoint_id': self._payload['endpoint_id'],
} for entity_id in self._entities_ids]
return tasks
def _get_trigger(self) -> AbstractTrigger:
return EntityInfoTrigger()
| 2.296875 | 2 |
cardboard/tests/test_phases.py | Julian/cardboard | 5 | 12785587 | <reponame>Julian/cardboard
import unittest
import mock
from cardboard import events, phases as p
from cardboard.tests.util import GameTestCase
class TestPhase(unittest.TestCase):
def test_init(self):
h = p.Phase("Foo", [1, 2, 3])
self.assertEqual(h.name, "Foo")
self.assertEqual(h.steps, [1, 2, 3])
def test_iter(self):
h = p.Phase("Foo", [1, 2, 3])
self.assertEqual(list(h), [1, 2, 3])
def test_getitem(self):
h = p.Phase("Foo", [1, 2, 3])
self.assertEqual(h[0], 1)
self.assertEqual(h[1], 2)
self.assertEqual(h[2], 3)
def test_len(self):
h = p.Phase("Foo", [1, 2, 3])
self.assertEqual(len(h), 3)
def test_repr_str(self):
h = p.Phase("foo_bar", [1, 2, 3])
self.assertEqual(repr(h), "<Phase: Foo Bar>")
self.assertEqual(str(h), "Foo Bar")
class TestPhaseMechanics(GameTestCase):
def test_untap(self):
"""
The untap step should perform the actions in :ref:`untap-step`.
"""
self.game.start()
own = [mock.Mock() for _ in range(4)]
not_own = [mock.Mock() for _ in range(4)]
# TODO: Just double check that this is how the final implementation is
phasing = mock.Mock()
phasing.description = "Phasing"
own[0].abilities = own[1].abilities = []
not_own[0].abilities = not_own[1].abilities = []
own[2].abilities = own[3].abilities = [phasing]
own[2].is_phased_in = False
not_own[2].abilities = not_own[3].abilities = [phasing]
not_own[2].is_phased_in = False
for n, o in zip(own, not_own):
n.types = o.types = {"Enchantment"}
for o in own:
o.controller = self.game.turn.active_player
self.game.battlefield.update(own, not_own)
p.untap(self.game)
# all phased-in permanents with phasing that the active player controls
# phase out, and all phased-out permanents controlled when they phased
# out phase in
self.assertTrue(own[2].phase_in.called)
self.assertTrue(own[3].phase_out.called)
self.assertFalse(not_own[2].phase_in.called)
self.assertFalse(not_own[3].phase_out.called)
# the active player determines which permanents he controls will untap
# Then he untaps them all simultaneously.
for o in own:
o.untap.assert_called_once_with()
for o in not_own:
self.assertFalse(o.untap.called)
self.assertTriggered([
{"event" : events.STEP_BEGAN, "phase" : "beginning",
"step" : "untap", "player" : self.game.turn.active_player},
{"event" : events.STEP_ENDED, "phase" : "beginning",
"step" : "untap", "player" : self.game.turn.active_player},
])
# XXX: Normally all untap. but effects can keep some from untapping.
def test_upkeep(self):
"""
The upkeep step should perform the actions in :ref:`upkeep-step`.
"""
self.game.start()
self.game.grant_priority = mock.Mock()
p.upkeep(self.game)
self.assertTrue(self.game.grant_priority.called)
self.assertTriggered([
{"event" : events.STEP_BEGAN, "phase" : "beginning",
"step" : "upkeep", "player" : self.game.turn.active_player},
{"event" : events.STEP_ENDED, "phase" : "beginning",
"step" : "upkeep", "player" : self.game.turn.active_player},
])
def test_draw(self):
"""
The draw step should perform the actions in :ref:`draw-step`.
"""
self.game.start()
self.game.turn.active_player.draw = mock.Mock()
self.game.grant_priority = mock.Mock()
p.draw(self.game)
self.assertTrue(self.game.turn.active_player.draw.called)
self.assertTrue(self.game.grant_priority.called)
self.assertTriggered([
{"event" : events.STEP_BEGAN, "phase" : "beginning",
"step" : "draw", "player" : self.game.turn.active_player},
{"event" : events.STEP_ENDED, "phase" : "beginning",
"step" : "draw", "player" : self.game.turn.active_player},
])
def test_main(self):
"""
The main phase should perform the actions in :ref:`main-phase`.
"""
self.game.start()
self.game.grant_priority = mock.Mock()
p.first_main.steps[0](self.game)
self.assertTrue(self.game.grant_priority.called)
self.assertTriggered([
{"event" : events.PHASE_BEGAN, "phase" : "first main",
"player" : self.game.turn.active_player},
{"event" : events.PHASE_ENDED, "phase" : "first main",
"player" : self.game.turn.active_player},
])
self.resetEvents()
self.game.grant_priority = mock.Mock()
p.second_main.steps[0](self.game)
self.assertTrue(self.game.grant_priority.called)
self.assertTriggered([
{"event" : events.PHASE_BEGAN, "phase" : "second main",
"player" : self.game.turn.active_player},
{"event" : events.PHASE_ENDED, "phase" : "second main",
"player" : self.game.turn.active_player},
])
def test_end(self):
"""
The end step should perform the actions in :ref:`end-step`.
"""
self.game.start()
self.game.grant_priority = mock.Mock()
p.end(self.game)
self.assertTrue(self.game.grant_priority.called)
self.assertTriggered([
{"event" : events.STEP_BEGAN, "phase" : "ending",
"step" : "end", "player" : self.game.turn.active_player},
{"event" : events.STEP_ENDED, "phase" : "ending",
"step" : "end", "player" : self.game.turn.active_player},
])
def test_cleanup(self):
"""
The cleanup step should perform the actions in :ref:`cleanup-step`.
"""
self.game.start()
player = self.game.turn.active_player
player.draw(3)
discard = list(player.hand)[:-7]
with player.user.select_cards.will_return(*discard):
p.cleanup(self.game)
for card in discard:
self.assertIn(card, player.graveyard)
# XXX: remove all damage
self.assertTriggered([
{"event" : events.STEP_BEGAN, "phase" : "ending",
"step" : "cleanup", "player" : self.game.turn.active_player},
{"event" : events.STEP_ENDED, "phase" : "ending",
"step" : "cleanup", "player" : self.game.turn.active_player},
])
| 3.125 | 3 |
starbot/configuration/config.py | onerandomusername/StarBot | 0 | 12785588 | <reponame>onerandomusername/StarBot
from typing import Any
from disnake import Permissions
from starbot.configuration.config_abc import ConfigABC
from starbot.configuration.definition import DEFINITION
from starbot.configuration.utils import get_dotted_path
class GuildConfig(ConfigABC):
"""
Represents one node inside the guild configuration.
The structure is defined in the configuration definition file.
Each node can be accessed using the dot notation.
"""
def __init__(self, guild_id: int, entries: dict[str, str], prefix: str = "") -> None:
self.guild_id = guild_id
self.entries = entries
self.prefix = prefix
def __getattr__(self, item: str) -> Any:
path = item if not self.prefix else f"{self.prefix}.{item}"
if not (definition := get_dotted_path(DEFINITION, path)):
raise AttributeError(f"The configuration entry '{path}' does not exist.")
# If this has a `type` attribute then we know it is an entry
if "type" in definition:
value = self.entries[path] if path in self.entries else definition["default"]
return self.convert_entry(value, definition)
# If not, we can just nest another config
else:
return GuildConfig(self.guild_id, self.entries, path)
def get(self, key: str) -> Any:
"""Get the value of a configuration entry."""
if not (definition := get_dotted_path(DEFINITION, key)):
raise KeyError(f"The configuration entry '{key}' does not exist.")
if key in self.entries:
return self.convert_entry(self.entries[key], definition)
else:
return self.convert_entry(definition["default"], definition)
def convert_entry(self, value: Any, definition: dict) -> Any:
"""Convert the string value to the correct type."""
if value is None:
return None
match definition["type"]:
case "role":
return int(value)
case "int":
return int(value, base=0)
case "bool":
return value.lower() in ["true", "t", "yes", "y", "1"]
case "discord_permission":
return Permissions(**{value: True})
case "choice":
if value not in definition["choices"]:
raise ValueError(f"The value '{value}' is not in the list of choices.")
return value
case "str":
return value
case _:
raise ValueError(f"Unknown type '{definition['type']}'.")
def __str__(self) -> str:
return f"<GuildConfig(guild_id={self.guild_id})>"
| 2.78125 | 3 |
birdseye/actions.py | emmair/BirdsEye | 0 | 12785589 | <filename>birdseye/actions.py<gh_stars>0
import random
import itertools
import numpy as np
class Actions(object):
"""Common base class for action methods
Parameters
----------
action_space : tuple
Set of tuples defining combinations of actions
for all dimensions
"""
def __init__(self, action_space=None, verbose=False, **kwargs):
if action_space is None:
raise ValueError('Action space must be defined by action(s) (set)')
self.action_space = action_space
self.action_list = self.setup_action_list()
self.verbose = verbose
if verbose:
self.print_action_info()
def avail_actions(self):
"""Return set of available actions
"""
return self.action_space
def get_action_list(self):
"""Return ordered list of actions
"""
return self.action_list
def action_to_index(self, index=0):
"""Undefined action to index method:
Provided an index, return associated action
"""
raise NotImplementedError()
def index_to_action(self, action=None):
"""Undefined index to action method:
Provided an action, return associated index
"""
raise NotImplementedError()
def setup_action_list(self):
"""Define ordered list of actions
"""
return list(map(self.action_to_index, self.action_space))
def get_random_action(self):
"""Return random action and associated index
"""
random_action_index = random.choice(self.get_action_list())
return self.index_to_action(random_action_index), random_action_index
def print_action_info(self):
print("Available Actions:")
print(" ID, Values")
for ai in zip(self.get_action_list(), self.avail_actions()):
print(" {} {}".format(ai[0], ai[1]))
class SimpleActions(Actions):
"""SimpleActions for testing purposes
"""
def __init__(self):
self.del_theta = [-30, 0, 30]
self.del_r = [3,4]
simple_action_space = tuple(itertools.product(self.del_theta, self.del_r))
super().__init__(action_space=simple_action_space, verbose=False)
#returns index of action given an action
def action_to_index(self, action):
return self.action_space.index(action)
#returns action given an index
def index_to_action(self, a_idx):
return self.action_space[a_idx]
AVAIL_ACTIONS = {'simpleactions' : SimpleActions,
}
def get_action(action_name=''):
"""Convenience function for retrieving BirdsEye action methods
Parameters
----------
action_name : {'simpleactions'}
Name of action method.
Returns
-------
action_obj : Action class object
BirdsEye action method.
"""
action_name = action_name.lower()
if action_name in AVAIL_ACTIONS:
action_obj = AVAIL_ACTIONS[action_name]
return action_obj
else:
raise ValueError('Invalid action method name, {}, entered. Must be '
'in {}'.format(action_name, AVAIL_ACTIONS.keys()))
| 2.984375 | 3 |
home.admin/BlitzTUI/blitztui/ui/qcode.py | PatrickScheich/raspiblitz | 1,908 | 12785590 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'designer/qcode.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_DialogShowQrCode(object):
def setupUi(self, DialogShowQrCode):
DialogShowQrCode.setObjectName("DialogShowQrCode")
DialogShowQrCode.resize(480, 320)
self.buttonBox = QtWidgets.QDialogButtonBox(DialogShowQrCode)
self.buttonBox.setGeometry(QtCore.QRect(326, 268, 150, 50))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.buttonBox.sizePolicy().hasHeightForWidth())
self.buttonBox.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(24)
self.buttonBox.setFont(font)
self.buttonBox.setStyleSheet("background-color: lightgrey;\n"
"font: 24pt \"Arial\";")
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.top_right_logo = QtWidgets.QLabel(DialogShowQrCode)
self.top_right_logo.setGeometry(QtCore.QRect(430, 2, 40, 60))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.top_right_logo.sizePolicy().hasHeightForWidth())
self.top_right_logo.setSizePolicy(sizePolicy)
self.top_right_logo.setText("")
self.top_right_logo.setPixmap(QtGui.QPixmap(":/RaspiBlitz/images/RaspiBlitz_Logo_Berry.png"))
self.top_right_logo.setScaledContents(True)
self.top_right_logo.setAlignment(QtCore.Qt.AlignCenter)
self.top_right_logo.setObjectName("top_right_logo")
self.frame = QtWidgets.QFrame(DialogShowQrCode)
self.frame.setGeometry(QtCore.QRect(0, 0, 320, 320))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setStyleSheet("background-color: rgb(255, 255, 255);")
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.qcode = QtWidgets.QLabel(self.frame)
self.qcode.setGeometry(QtCore.QRect(1, 1, 318, 318))
self.qcode.setStyleSheet("background-color: white")
self.qcode.setText("")
self.qcode.setPixmap(QtGui.QPixmap(":/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png"))
self.qcode.setScaledContents(True)
self.qcode.setAlignment(QtCore.Qt.AlignCenter)
self.qcode.setObjectName("qcode")
self.label = QtWidgets.QLabel(DialogShowQrCode)
self.label.setGeometry(QtCore.QRect(330, 4, 88, 60))
self.label.setText("")
self.label.setPixmap(QtGui.QPixmap(":/RaspiBlitz/images/RaspiBlitz_Logo_Stacked.png"))
self.label.setScaledContents(True)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.horizontalLayoutWidget = QtWidgets.QWidget(DialogShowQrCode)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(320, 70, 161, 191))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.horizontalLayoutWidget)
self.verticalLayout.setContentsMargins(6, 0, 6, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.line = QtWidgets.QFrame(self.horizontalLayoutWidget)
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.verticalLayout.addWidget(self.line)
self.memo_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(75)
self.memo_key.setFont(font)
self.memo_key.setScaledContents(False)
self.memo_key.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.memo_key.setWordWrap(True)
self.memo_key.setObjectName("memo_key")
self.verticalLayout.addWidget(self.memo_key)
self.memo_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
self.memo_value.setFont(font)
self.memo_value.setScaledContents(False)
self.memo_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.memo_value.setWordWrap(True)
self.memo_value.setObjectName("memo_value")
self.verticalLayout.addWidget(self.memo_value)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.status_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setUnderline(False)
font.setWeight(75)
self.status_key.setFont(font)
self.status_key.setScaledContents(False)
self.status_key.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.status_key.setWordWrap(True)
self.status_key.setObjectName("status_key")
self.horizontalLayout.addWidget(self.status_key)
self.status_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
self.status_value.setFont(font)
self.status_value.setScaledContents(False)
self.status_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.status_value.setWordWrap(True)
self.status_value.setObjectName("status_value")
self.horizontalLayout.addWidget(self.status_value)
self.verticalLayout.addLayout(self.horizontalLayout)
self.inv_amt_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.inv_amt_key.setFont(font)
self.inv_amt_key.setObjectName("inv_amt_key")
self.verticalLayout.addWidget(self.inv_amt_key)
self.inv_amt_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.inv_amt_value.setFont(font)
self.inv_amt_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.inv_amt_value.setObjectName("inv_amt_value")
self.verticalLayout.addWidget(self.inv_amt_value)
self.amt_paid_key = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.amt_paid_key.setFont(font)
self.amt_paid_key.setObjectName("amt_paid_key")
self.verticalLayout.addWidget(self.amt_paid_key)
self.amt_paid_value = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(11)
self.amt_paid_value.setFont(font)
self.amt_paid_value.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.amt_paid_value.setObjectName("amt_paid_value")
self.verticalLayout.addWidget(self.amt_paid_value)
self.spinner = QtWidgets.QWidget(DialogShowQrCode)
self.spinner.setGeometry(QtCore.QRect(440, 0, 40, 40))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinner.sizePolicy().hasHeightForWidth())
self.spinner.setSizePolicy(sizePolicy)
self.spinner.setObjectName("spinner")
self.spinner.raise_()
self.buttonBox.raise_()
self.top_right_logo.raise_()
self.frame.raise_()
self.label.raise_()
self.horizontalLayoutWidget.raise_()
self.retranslateUi(DialogShowQrCode)
self.buttonBox.accepted.connect(DialogShowQrCode.accept)
QtCore.QMetaObject.connectSlotsByName(DialogShowQrCode)
def retranslateUi(self, DialogShowQrCode):
_translate = QtCore.QCoreApplication.translate
DialogShowQrCode.setWindowTitle(_translate("DialogShowQrCode", "Dialog"))
self.memo_key.setText(_translate("DialogShowQrCode", "Memo"))
self.memo_value.setText(_translate("DialogShowQrCode", "RB-Vivid-Badger"))
self.status_key.setText(_translate("DialogShowQrCode", "Status"))
self.status_value.setText(_translate("DialogShowQrCode", "Open/Paid"))
self.inv_amt_key.setText(_translate("DialogShowQrCode", "Invoice Amount"))
self.inv_amt_value.setText(_translate("DialogShowQrCode", "123456798"))
self.amt_paid_key.setText(_translate("DialogShowQrCode", "Amount Paid"))
self.amt_paid_value.setText(_translate("DialogShowQrCode", "N/A"))
from . import resources_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
DialogShowQrCode = QtWidgets.QDialog()
ui = Ui_DialogShowQrCode()
ui.setupUi(DialogShowQrCode)
DialogShowQrCode.show()
sys.exit(app.exec_())
| 1.671875 | 2 |
CONSTANT.py | sotolatopiga/scrape-server | 0 | 12785591 | OUTPUT_PICKLE_FILENAME = "data.pickle" | 1.132813 | 1 |
Command/commands/set.py | hc-tec/redis-py | 1 | 12785592 |
from Client.interfaces import IClient
from Command.base import BaseCommand, CommandType
from Database.interfaces import IDatabase
from Timer.event import TimeoutEvent
from Timer.timestamp import Timestamp
from Conf.command import CMD_RES
class Set(BaseCommand):
args_order = ['key', 'value', 'expires_time']
min_args = 2
cmd_type = CommandType.CMD_WRITE
def handle(self, args, kwargs):
db: IDatabase = self.client.get_database()
expires_time = kwargs.get('expires_time')
if expires_time is None:
db.store(kwargs['key'], kwargs['value'])
else:
expires_time = int(expires_time)
db.store(kwargs['key'], kwargs['value'])
self.set_expires_timer(kwargs['key'], expires_time)
return CMD_RES.OK
def set_expires_timer(self, key, expires_time):
db: IDatabase = self.client.get_database()
timestamp = Timestamp(expires_time, 's')
db.store_expires(key, timestamp.get_time())
timeout_event = ExpiresKeyRemoveEvent(timestamp)
timeout_event.set_extra_data({
"client": self.client,
"expires_key": key
})
server = self.client.get_server()
reactor = server.get_loop()
reactor.create_timeout_event(timeout_event)
print('expire event build')
class ExpiresKeyRemoveEvent(TimeoutEvent):
def handle_event(self, reactor):
extra_data = self.extra_data
print('expire event activate')
client: IClient = extra_data['client']
db: IDatabase = client.get_database()
db.remove_expires(extra_data['expires_key'])
| 2.21875 | 2 |
env_wrapper.py | modanesh/integrated-gradient-pytorch | 0 | 12785593 | """
The Atari environment(env) wrapper. Some envs needed some configurations which you can find below.
"""
import gym
import numpy as np
from scipy.misc import imresize
class AtariWrapper():
def __init__(self, env):
self.env = env
self.observation_space = self.env.observation_space
self.reward_range = self.env.reward_range
self.metadata = self.env.metadata
self.spec = self.env.spec
def step(self, *args, **kwargs):
state, reward, done, info = self.env.step(*args, **kwargs)
info['org_obs'] = state
state = self.process_atari_image(state)
return state, reward, done, info
@property
def action_space(self):
return self.env.action_space
def close(self, *args, **kwargs):
return self.env.close(*args, **kwargs)
def render(self, mode='human', inspect=False, img=None):
if not inspect:
return self.env.render(mode)
else:
if mode == 'rgb_array':
return img
elif mode == 'human':
from gym.envs.classic_control import rendering
if self.env.env.viewer is None:
self.env.env.viewer = rendering.SimpleImageViewer()
self.env.env.viewer.imshow(img)
return self.env.env.viewer.isopen
def reset(self, inspect=False):
state = self.env.reset()
if inspect:
return self.process_atari_image(state), state
else:
return self.process_atari_image(state)
def seed(self, *args, **kwargs):
return self.env.seed(*args, **kwargs)
@staticmethod
def process_atari_image(img):
return imresize(img[5:195].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class Crop35And195(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[35:195].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class Crop15And195(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[:195].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class PongWrapper(Crop35And195):
def __init__(self, env):
Crop35And195.__init__(self, env)
def step(self, action):
if action > 2:
raise Exception('Unknown Action')
if action == 1:
action = 4
elif action == 2:
action = 5
state, reward, done, info = self.env.step(action)
info['org_obs'] = state
state = self.process_atari_image(state)
return state, reward, done, info
@property
def action_space(self):
return gym.spaces.discrete.Discrete(3)
class SpaceInvaderWrapper(Crop15And195):
def __init__(self, env):
Crop15And195.__init__(self, env)
@property
def action_space(self):
return gym.spaces.discrete.Discrete(4)
class EnduroWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[0:155, 10:].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BeamRiderWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[30:180, 10:].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class FreewayWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[25:195, 10:].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BoxingWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[15:180, 30:130].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BreakoutWrapper(Crop35And195):
def __init__(self, env):
Crop35And195.__init__(self, env)
class QbertWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@staticmethod
def process_atari_image(img):
return imresize(img[30:190, 10:150].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class BowlingWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
@property
def action_space(self):
return gym.spaces.discrete.Discrete(4)
@staticmethod
def process_atari_image(img):
return imresize(img[105:172, :].mean(2), (80, 80)).astype(np.float32).reshape(1, 80, 80) / 255.0
class ElevatorActionWrapper(AtariWrapper):
def __init__(self, env):
AtariWrapper.__init__(self, env)
def atari_wrapper(env_name):
x = env_name.lower()
x = x.split('-')[0]
if x.__contains__('pong'):
env = PongWrapper(gym.make(env_name))
elif x.__contains__('spaceinvaders'):
env = SpaceInvaderWrapper(gym.make(env_name))
elif x.__contains__('enduro'):
env = EnduroWrapper(gym.make(env_name))
elif x.__contains__('beamrider'):
env = BeamRiderWrapper(gym.make(env_name))
elif x.__contains__('freeway'):
env = FreewayWrapper(gym.make(env_name))
elif x.__contains__('boxing'):
env = BoxingWrapper(gym.make(env_name))
elif x.__contains__('breakout'):
env = BreakoutWrapper(gym.make(env_name))
elif x.__contains__('qbert'):
env = QbertWrapper(gym.make(env_name))
elif x.__contains__('bowling'):
env = BowlingWrapper(gym.make(env_name))
elif x.__contains__('elevatoraction'):
env = ElevatorActionWrapper(gym.make(env_name))
else:
env = AtariWrapper(gym.make(env_name))
return env | 2.515625 | 3 |
cart/admin.py | arihant-001/django-cart | 0 | 12785594 | <reponame>arihant-001/django-cart<filename>cart/admin.py
from django.contrib import admin
from cart.models import Order, OrderItem, ShippingAddress
@admin.register(Order)
class OrderAdmin(admin.ModelAdmin):
pass
@admin.register(OrderItem)
class OrderItemAdmin(admin.ModelAdmin):
pass
@admin.register(ShippingAddress)
class ShippingAddressAdmin(admin.ModelAdmin):
pass
| 1.757813 | 2 |
IG-robot.py | abdallah34/IG-bot | 0 | 12785595 | from time import sleep
import requests
import json
class AbdullahCoder():
def __init__(self):
self.hosturl = "https://www.instagram.com/"
self.loginurl = "https://www.instagram.com/accounts/login/ajax/"
self.editurl = "https://www.instagram.com/accounts/web_change_profile_picture/"
self.editdata = {"Content-Disposition": "form-data", "name": "profile_pic","filename": "profilepic.jpg", "Content-Type": "image/jpeg"}
self.session = requests.Session()
self.session.headers = {'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36', 'Referer': self.hosturl}
self.headers = {"Host": "www.instagram.com","Accept": "*/*","Accept-Language": "en-US,en;q=0.5","Accept-Encoding": "gzip, deflate, br","Referer": "https://www.instagram.com/accounts/edit/","X-IG-App-ID": "936619743392459","X-Requested-With": "XMLHttpRequest","DNT": "1","Connection": "keep-alive",}
self.pcnm = int(input('Number of photos: '))
self.X = int(input('Sleep : '))
self.extension = input("photo extension [png,jpg] : ")
if self.AddCH() == False:
self.login()
self.Save_login()
while True:
n = 0
while n < self.pcnm:
self.change(n)
n+=1
def login(self):
username = str(input('Username : '))
password = input('Password : ')
resp = self.session.get(self.hosturl)
self.session.headers.update({'X-CSRFToken': resp.cookies['csrftoken']})
login_data = {'username': username, 'enc_password': <PASSWORD>:0:&:'+password}
login_resp = self.session.post(self.loginurl, data=login_data, allow_redirects=True)
if login_resp.json()['authenticated']:
print("Login successful")
self.session.headers.update({'X-CSRFToken': login_resp.cookies['csrftoken']})
else:
print("Login failed!")
self.login()
def Save_login(self):
with open('cookies.txt', 'w+') as f:
json.dump(self.session.cookies.get_dict(), f)
with open('headers.txt', 'w+') as f:
json.dump(self.session.headers, f)
def AddCH(self):
try:
with open('cookies.txt', 'r') as f:
self.session.cookies.update(json.load(f))
with open('headers.txt', 'r') as f:
self.session.headers = json.load(f)
except:
return False
def change(self,n):
try:
with open("imgs/photo"+str(n)+"."+self.extension, "rb") as resp:
f = resp.read()
p_pic = bytes(f)
p_pic_s = len(f)
self.session.headers.update({'Content-Length': str(p_pic_s)})
files = {'profile_pic': p_pic}
r = self.session.post(self.editurl, files=files, data=self.editdata)
if r.json()['changed_profile']:
print(f"Profile picture changed | photo{str(n)}")
else:
print(f"Something went wrong | photo{str(n)}")
sleep(self.X)
except Exception as e:
print(e)
sleep(10)
AbdullahCoder()
| 2.953125 | 3 |
config_wrangler/config_types/path_types.py | arcann/config_wrangler | 0 | 12785596 | import os
import shutil
from pathlib import Path
from pydantic import DirectoryPath, FilePath
from pydantic.validators import path_validator
__all__ = [
'FilePath',
'DirectoryPath',
'AutoCreateDirectoryPath',
'DirectoryFindUp',
'PathExpandUser',
'ExecutablePath',
]
class PathExpandUser(DirectoryPath):
@staticmethod
def _expand_user(path: Path):
path = path.expanduser()
return path
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls._expand_user
super().__get_validators__()
class AutoCreateDirectoryPath(PathExpandUser):
@staticmethod
def _ensure_exsits(path: Path):
if not path.exists():
os.makedirs(path)
return path
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls._expand_user
yield cls._ensure_exsits
super().__get_validators__()
class DirectoryFindUp(DirectoryPath):
@staticmethod
def __find_up(path: Path):
if path.exists():
return path
else:
start_dir = Path(os.getcwd())
for parent_dir in start_dir.parents:
parent_path = Path(parent_dir, path)
if parent_path.exists():
return parent_path
raise FileNotFoundError(f"{path} not found in {start_dir} or parents")
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls.__find_up
yield cls.validate
class ExecutablePath(Path):
@staticmethod
def __find_in_system_path(path: Path):
full_path = shutil.which(path)
if full_path is None:
raise FileNotFoundError(f"{path} not found")
# Note: on Windows any existing file appears as executable
elif not os.access(path, os.X_OK):
raise ValueError(f"{path} found but is not executable")
@classmethod
def __get_validators__(cls):
yield path_validator
yield cls.__find_in_system_path
| 2.515625 | 3 |
test_shell.py | deepfield/ishell | 0 | 12785597 | import unittest
from ishell.console import Console
from ishell.command import Command
import io
from contextlib import redirect_stdout
class TestConsole(unittest.TestCase):
def test_console_creation(self):
"""Console must be created."""
c = Console()
assert isinstance(c, Console)
def test_console_has_prompt(self):
"""Console should have a default prompt string."""
c = Console()
assert c.prompt == "Prompt"
assert c.prompt_delim == ">"
class TestCommand(unittest.TestCase):
def test_command_creation(self):
"""Command must be created with name and default help message."""
cmd = Command('configure')
assert cmd.name == 'configure'
assert cmd.help == 'No help provided'
assert cmd.dynamic_args == False
def test_simple_completion(self):
"""Command must complete with only one option."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd1.addChild(cmd2)
candidates = cmd1.complete('', '', 0, run=False, full_line='configure ')
assert 'terminal ' == candidates
candidates = cmd1.complete('', '', 1, run=False, full_line='configure ')
assert None == candidates
def test_double_completion(self):
"""Command must complete with two options."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd3 = Command('interface')
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
# State 0 must print all commands followed by help message
# and return None as candidates
candidates = cmd1.complete('', '', 0, run=False, full_line='configure ')
assert None == candidates
candidates = cmd1.complete('', 'in', 0, run=False, full_line='configure in')
assert 'interface ' == candidates
candidates = cmd1.complete('', 't', 0, run=False, full_line='configure t')
assert 'terminal ' == candidates
def test_double_overlapping_completion(self):
"""Command must complete with two overlapping options."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd2.run = lambda l: "terminal output"
cmd3 = Command('terminal_1')
cmd3.run = lambda l: "terminal_1 output"
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
# State 0 must print all commands followed by help message
# and return None as candidates
candidates = cmd1.complete('', '', 0, run=False, full_line='configure ')
assert None == candidates
candidates = cmd1.complete('', 't', 0, run=False, full_line='configure t')
assert 'terminal ' == candidates
candidates = cmd1.complete('', 't', 1, run=False, full_line='configure t')
assert 'terminal_1 ' == candidates
# user pressing tab on ambiguous command
candidates = cmd1.complete(["terminal"], 'terminal', 0, run=False, full_line=None)
assert "terminal " == candidates
candidates = cmd1.complete(["terminal"], 'terminal', 1, run=False, full_line=None)
assert "terminal_1 " == candidates
output = cmd1.complete(["terminal"], 'configure terminal', 0, run=True, full_line='configure terminal')
assert 'terminal output' == output
output = cmd1.complete(["terminal_1"], 'configure terminal_1', 0, run=True, full_line='configure terminal_1')
assert 'terminal_1 output' == output
def test_double_overlapping_nested_completion(self):
"""Command must complete with two overlapping nested options."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd3 = Command('terminal_1')
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
cmd4 = Command("option")
cmd2.addChild(cmd4)
cmd3.addChild(cmd4)
cmd5 = Command("Aaaa")
cmd4.addChild(cmd5)
cmd6 = Command("B")
cmd4.addChild(cmd6)
# show help for A, B (NOT terminal terminal_1) tab complete
with io.StringIO() as buf, redirect_stdout(buf):
output = cmd1.complete(["terminal", "option"], '', 0, run=False, full_line=None)
assert output is None
help_output = buf.getvalue()
assert help_output == '\rPossible Completions:\n\r Aaaa No help provided\n\r B No help provided\n'
# show help for A, B (NOT terminal terminal_1) enter
with io.StringIO() as buf, redirect_stdout(buf):
output = cmd1.complete(["terminal", "option"], 'terminal option', 0, run=True, full_line='terminal option')
assert output is None
help_output = buf.getvalue()
assert help_output == '\nIncomplete Command: terminal option\n\nHelp:\nAaaa - No help provided\n B - No help provided\n\n'
def test_completion_with_buffer(self):
"""Command must complete correctly with buffer provided."""
cmd1 = Command('configure')
cmd2 = Command('terminal')
cmd1.addChild(cmd2)
candidates = cmd1.complete(['t'], 't', 0, run=False, full_line='configure ')
assert 'terminal ' == candidates
candidates = cmd1.complete(['t'], 't', 1, run=False, full_line='configure ')
assert None == candidates
def test_completion_with_dynamic_arg(self):
cmd1 = Command('show')
cmd2 = Command('call', dynamic_args=True)
cmd3 = Command('calls', dynamic_args=True)
cmd2.args = lambda: ['100', '101']
cmd3.args = lambda: ['continuous', 'raw']
cmd1.addChild(cmd2)
cmd1.addChild(cmd3)
candidates = cmd1.complete(['c'], '', 0, run=False, full_line='show calls')
self.assertEqual(None, candidates)
candidates = cmd1.complete(['c'], 'c', 0, run=False, full_line='show calls')
self.assertEqual('call ', candidates)
candidates = cmd1.complete(['c'], 'c', 1, run=False, full_line='show calls')
self.assertEqual('calls ', candidates)
candidates = cmd2.complete([''], '', 0, run=False, full_line='show calls')
self.assertEqual(None, candidates)
candidates = cmd2.complete([''], '1', 0, run=False, full_line='show calls')
self.assertEqual('100', candidates)
candidates = cmd2.complete([''], '1', 1, run=False, full_line='show calls')
self.assertEqual('101', candidates)
candidates = cmd3.complete([''], '', 0, run=False, full_line='show calls c')
self.assertEqual(None, candidates)
candidates = cmd3.complete([''], 'c', 0, run=False, full_line='show calls c')
self.assertEqual('continuous', candidates)
candidates = cmd3.complete([''], 'r', 0, run=False, full_line='show calls c')
self.assertEqual('raw', candidates)
candidates = cmd1.complete(['calls', 'c'], 'c', 0, run=False, full_line='show calls c')
self.assertEqual('continuous', candidates)
candidates = cmd2.complete(['1'], '1', 0, run=False, full_line='show calls c')
self.assertEqual('100', candidates)
candidates = cmd2.complete(['1'], '1', 1, run=False, full_line='show calls c')
self.assertEqual('101', candidates)
if __name__ == '__main__':
unittest.main()
| 3.265625 | 3 |
pints/tests/test_toy_fitzhugh_nagumo_model.py | lisaplag/pints | 0 | 12785598 | #!/usr/bin/env python3
#
# Tests if the Fitzhugh-Nagumo toy model runs.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import unittest
import pints
import pints.toy
import numpy as np
class TestFitzhughNagumoModel(unittest.TestCase):
"""
Tests if the Fitzhugh-Nagumo toy model runs.
"""
def test_run(self):
# Test basic properties
model = pints.toy.FitzhughNagumoModel()
self.assertEqual(model.n_parameters(), 3)
self.assertEqual(model.n_outputs(), 2)
# Test simulation
x = model.suggested_parameters()
times = model.suggested_times()
values = model.simulate(x, times)
self.assertEqual(values.shape, (len(times), 2))
# Simulation with sensitivities
values, dvalues_dp = model.simulateS1(x, times)
self.assertEqual(values.shape, (len(times), 2))
self.assertEqual(dvalues_dp.shape, (len(times), 2, 3))
# Test alternative starting position
model = pints.toy.FitzhughNagumoModel([0.1, 0.1])
values = model.simulate(x, times)
self.assertEqual(values.shape, (len(times), 2))
# Times can't be negative
times = [-1, 2, 3, 4]
self.assertRaises(ValueError, model.simulate, x, times)
# Initial value must have size 2
pints.toy.FitzhughNagumoModel([1, 1])
self.assertRaises(ValueError, pints.toy.FitzhughNagumoModel, [1])
def test_values(self):
# value-based tests of Fitzhugh-Nagumo model
parameters = [0.2, 0.4, 2.5]
y0 = [-2, 1.5]
times = np.linspace(0, 20, 201)
model = pints.toy.FitzhughNagumoModel(y0)
values = model.simulate(parameters, times)
self.assertAlmostEqual(values[200, 0], 1.675726, places=6)
self.assertAlmostEqual(values[200, 1], -0.226142, places=6)
def test_sensitivities(self):
# compares sensitivities against standards
model = pints.toy.FitzhughNagumoModel([2, 3])
parameters = [0.2, 0.7, 2.8]
# Test with initial point t=0 included in range
sols, sens = model.simulateS1(parameters, [0, 7, 12])
self.assertAlmostEqual(sens[1, 0, 2], 5.01378, 5)
self.assertAlmostEqual(sens[2, 1, 1], 0.82883, 4)
# Test without initial point in range
sols, sens = model.simulateS1(parameters, [7, 12])
self.assertAlmostEqual(sens[0, 0, 2], 5.01378, 5)
self.assertAlmostEqual(sens[1, 1, 1], 0.82883, 4)
# Test without any points in range
sols, sens = model.simulateS1(parameters, [])
self.assertEqual(sols.shape, (0, 2))
self.assertEqual(sens.shape, (0, 2, 3))
if __name__ == '__main__':
unittest.main()
| 3.078125 | 3 |
gen_asm_reljump.py | orsonteodoro/tsha | 0 | 12785599 | #!/usr/bin/python3
#
# Near Jump, Jump Table Generator for sha256b
#
# Copyright (c) 2021-2022 <NAME> <<EMAIL>>. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
answer = ""
L = []
def gen_insert_W_byte_sse2_table_sha256():
global answer
line = 2000
mmi = 3
jt_label = "insert_byte_sse2_jt\\@"
for bi in range(256):
L.append(line)
if bi <= 239:
block = \
".L" + str(line) + "\\@:" \
" insert_byte " + str(bi % 16) + ",\\c,xmm" + str(int(bi / 16)) + ",xmm15\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
if bi >= 240:
if bi % 8 == 0:
mmi += 1
block = \
".L" + str(line) + "\\@:" \
" insert_byte_alt " + str(bi % 8) +",\\c,mm" + str(mmi) + ",\\gpr0q,\\gpr0l,\\gpr1q\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Insertion jump table for SSE2 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_insert_W_byte_sse4_1_table_sha512_256():
global answer
line = 2500
jt_label = "insert_byte_sse4_1_jt\\@"
for bi in range(256):
L.append(line)
block = \
".L" + str(line) + "\\@:" \
" insert_byte " + str(bi % 16) + ",\\c,xmm" + str(int(bi / 16)) + "\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Insertion jump table for SSE4.1 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_insert_W_byte_sse2_table_sha512_256():
global answer
line = 2000
mmi = 3
jt_label = "insert_byte_sse2_jt\\@"
for bi in range(256):
L.append(line)
if bi <= 239:
block = \
".L" + str(line) + "\\@:" \
" insert_byte " + str(bi % 16) + ",\\c,xmm" + str(int(bi / 16)) + ",xmm15\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
if bi >= 240:
if bi % 8 == 0:
mmi += 1
block = \
".L" + str(line) + "\\@:" \
" insert_byte_alt " + str(bi % 8) +",\\c,mm" + str(mmi) + ",\\gpr0q,\\gpr0l,\\gpr1q\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Insertion jump table for SSE2 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_insert_W_byte_sse4_1_table_sha256():
global answer
line = 2500
jt_label = "insert_byte_sse4_1_jt\\@"
for bi in range(256):
L.append(line)
block = \
".L" + str(line) + "\\@:" \
" insert_byte " + str(bi % 16) + ",\\c,xmm" + str(int(bi / 16)) + "\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Insertion jump table for SSE4.1 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_get_w_sse4_1_table_sha256():
global answer
line = 3000
jt_label = "get_w_sse4_1_jt\\@"
for wi in range(64):
L.append(line)
block = \
".L" + str(line) + "\\@:" \
" _get_w \\w," + str(wi % 4) + ",xmm" + str(int(wi / 4)) + "\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Get wi jump table for SSE4.1 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_set_w_sse4_1_table_sha256():
global answer
line = 3500
jt_label = "set_w_sse4_1_jt\\@"
for wi in range(64):
L.append(line)
block = \
".L" + str(line) + "\\@:" \
" _set_w \\w," + str(wi % 4) + ",xmm" + str(int(wi / 4)) + "\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Set wi jump table for SSE4.1 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_get_w_sse2_table_sha256():
global answer
line = 4000
mmi = 3
jt_label = "get_w_sse2_jt\\@"
for wi in range(64):
block = ""
L.append(line)
if wi <= 59:
block = \
".L" + str(line) + "\\@:" \
" _get_w \\w," + str((wi % 4)*4) + ",xmm" + str(int(wi / 4)) + ",xmm15\n" \
" jmp .Llast\\@\n"
if wi >= 60:
parity = wi % 2
if parity == 0:
mmi += 1
block = \
".L" + str(line) + "\\@:" \
" _get_w_alt_c0 \\w,mm" + str(mmi) + "\n" \
" jmp .Llast\\@\n"
else:
block = \
".L" + str(line) + "\\@:" \
" _get_w_alt_c1 \\w,mm" + str(mmi) + ",\\gpr0q,\\gpr0l\n" \
" jmp .Llast\\@\n"
answer = answer + block
line += 1
print("/* Get wi jump table for SSE2 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_set_w_sse2_table_sha256():
global answer
line = 4500
mmi = 3
jt_label = "set_w_sse2_jt\\@"
for wi in range(64):
block = ""
L.append(line)
if wi <= 59:
block = \
".L" + str(line) + "\\@:" \
" _set_w \\w," + str((wi % 4)*4) + ",xmm" + str(int(wi / 4)) + ",xmm15,\\gpr0l\n" \
" jmp .Llast\\@\n"
if wi >= 60:
parity = wi % 2
if parity == 0:
mmi += 1
block = \
".L" + str(line) + "\\@:" \
" _set_w_alt \\w,0,mm" + str(mmi) + ",\\gpr0q,\\gpr0l,\\gpr1q\n" \
" jmp .Llast\\@\n"
else:
block = \
".L" + str(line) + "\\@:" \
" _set_w_alt \\w,32,mm" + str(mmi) + ",\\gpr0q,\\gpr0l,\\gpr1q\n" \
" jmp .Llast\\@\n"
answer = answer + block
line += 1
print("/* Set wi jump table for SSE2 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
###############################################################################################
def gen_get_w_sse4_1_table_sha512_256():
global answer
line = 3000
jt_label = "get_w_sse4_1_jt\\@"
for wi in range(64):
L.append(line)
block = \
".L" + str(line) + "\\@:" \
" _get_w \\w," + str(wi % 4) + ",xmm" + str(int(wi / 4)) + "\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Get wi jump table for SSE4.1 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_set_w_sse4_1_table_sha512_256():
global answer
line = 3500
jt_label = "set_w_sse4_1_jt\\@"
for wi in range(64):
L.append(line)
block = \
".L" + str(line) + "\\@:" \
" _set_w \\w," + str(wi % 4) + ",xmm" + str(int(wi / 4)) + "\n" \
" jmp .Llast\\@\n"
line += 1
answer = answer + block
print("/* Set wi jump table for SSE4.1 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_get_w_sse2_table_sha512_256():
global answer
line = 4000
mmi = 3
jt_label = "get_w_sse2_jt\\@"
for wi in range(64):
block = ""
L.append(line)
if wi <= 59:
block = \
".L" + str(line) + "\\@:" \
" _get_w \\w," + str((wi % 4)*4) + ",xmm" + str(int(wi / 4)) + ",xmm15\n" \
" jmp .Llast\\@\n"
if wi >= 60:
parity = wi % 2
if parity == 0:
mmi += 1
block = \
".L" + str(line) + "\\@:" \
" _get_w_alt_c0 \\w,mm" + str(mmi) + "\n" \
" jmp .Llast\\@\n"
else:
block = \
".L" + str(line) + "\\@:" \
" _get_w_alt_c1 \\w,mm" + str(mmi) + ",\\gpr0q,\\gpr0l\n" \
" jmp .Llast\\@\n"
answer = answer + block
line += 1
print("/* Get wi jump table for SSE2 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def gen_set_w_sse2_table_sha512_256():
global answer
line = 4500
mmi = 3
jt_label = "set_w_sse2_jt\\@"
for wi in range(64):
block = ""
L.append(line)
if wi <= 59:
block = \
".L" + str(line) + "\\@:" \
" _set_w \\w," + str((wi % 4)*4) + ",xmm" + str(int(wi / 4)) + ",xmm15,\\gpr0l\n" \
" jmp .Llast\\@\n"
if wi >= 60:
parity = wi % 2
if parity == 0:
mmi += 1
block = \
".L" + str(line) + "\\@:" \
" _set_w_alt \\w,0,mm" + str(mmi) + ",\\gpr0q,\\gpr0l,\\gpr1q\n" \
" jmp .Llast\\@\n"
else:
block = \
".L" + str(line) + "\\@:" \
" _set_w_alt \\w,32,mm" + str(mmi) + ",\\gpr0q,\\gpr0l,\\gpr1q\n" \
" jmp .Llast\\@\n"
answer = answer + block
line += 1
print("/* Set wi jump table for SSE2 */")
print(".section .rodata")
print(".align 4")
print(jt_label + ":")
for e in L:
print(".long .L" + str(e) + "\\@-" + jt_label)
print(".long .L" + str(line) + "\\@-" + jt_label)
print(".text")
answer = answer.replace("last",str(line))
print(answer)
print(".L" + str(line) + "\\@:")
def main():
print("---")
# gen_insert_W_byte_sse4_1_table_sha256()
print("---")
# gen_insert_W_byte_sse2_table_sha256()
print("---")
# gen_insert_W_byte_sse4_1_table_sha512_256()
print("---")
gen_insert_W_byte_sse2_table_sha512_256()
print("---")
# gen_get_w_sse4_1_table_sha256()
print("---")
# gen_set_w_sse4_1_table_sha256()
print("---")
# gen_get_w_sse2_table_sha256()
print("---")
# gen_set_w_sse2_table_sha256()
print("---")
# gen_get_w_sse4_1_table_sha512_256()
print("---")
# gen_set_w_sse4_1_table_sha512_256()
print("---")
# gen_get_w_sse2_table_sha512_256()
print("---")
# gen_set_w_sse2_table_sha512_256()
if __name__ == "__main__":
main()
| 2.5 | 2 |
evaluation/matthews_corrcoef_evaluation.py | AghilasSini/AT-Annotator | 0 | 12785600 | from sklearn.metrics import matthews_corrcoef
y_true = [+1, +1, +1, -1]
y_pred = [+1, -1, +1, +1]
matthews_corrcoef(y_true, y_pred) | 1.8125 | 2 |
recipes/Python/473781_mthreadpy_version_2/recipe-473781.py | tdiprima/code | 2,023 | 12785601 | <reponame>tdiprima/code
# #include <windows.h>
import thread
# #include <math.h>
import math
# #include <stdio.h>
import sys
# #include <stdlib.h>
import time
# static int runFlag = TRUE;
runFlag = True
# void main(int argc, char *argv[]) {
def main(argc, argv):
global runFlag
# unsigned int runTime
# PYTHON: NO CODE
# SYSTEMTIME now;
# PYTHON: NO CODE
# WORD stopTimeMinute, stopTimeSecond;
# PYTHON: NO CODE
# // Get command line argument, N
try:
N = abs(int(argv[1]))
except:
sys.exit(1)
# // Get the time the threads should run, runtime
try:
runTime = abs(int(argv[2]))
except:
sys.exit(1)
# // Calculate time to halt (learn better ways to do this later)
# GetSystemTime(&now);
now = time.localtime()
# printf("mthread: Suite starting at system time
# %d:%d:%d\n", now.wHour, now.wMinute, now.wSecond);
sys.stdout.write('mthread: Suite starting at system time %d:%d:%d\n' \
% (now.tm_hour, now.tm_min, now.tm_sec))
# stopTimeSecond = (now.wSecond + (WORD) runTime) % 60;
stopTimeSecond = (now.tm_sec + runTime) % 60
# stopTimeMinute = now.wMinute + (now.wSecond +
# (WORD) runTime) / 60;
stopTimeMinute = now.tm_min + (now.tm_sec + runTime) / 60
# // For 1 to N
# for (i = 0; i < N; i++) {
for i in range(N):
# // Create a new thread to execute simulated word
thread.start_new_thread(threadWork, ())
# Sleep(100); // Let newly created thread run
time.sleep(0.1)
# }
# PYTHON: NO CODE
# // Cycle while children work ...
# while (runFlag) {
while runFlag:
# GetSystemTime(&now);
now = time.localtime()
# if ((now.wMinute >= stopTimeMinute)
# &&
# (now.wSecond >= stopTimeSecond)
# )
if now.tm_min >= stopTimeMinute \
and now.tm_sec >= stopTimeSecond:
# runFlag = FALSE;
runFlag = False
# Sleep(1000);
time.sleep(1)
# }
# PYTHON: NO CODE
# Sleep(5000);
time.sleep(5)
# }
# PYTHON: NO CODE
# // The code executed by each worker thread (simulated work)
# DWORD WINAPI threadWork(LPVOID threadNo) {
def threadWork():
threadNo = thread.get_ident()
# // Local variables
# double y;
# PYTHON: NO CODE
# const double x = 3.14159;
x = 3.14159
# const double e = 2.7183;
e = 2.7183
# int i;
# PYTHON: NO CODE
# const int napTime = 1000; // in milliseconds
napTime = 1000
# const int busyTime = 40000;
busyTime = 40000
# DWORD result = 0;
result = 0
# // Create load
# while (runFlag) {
while runFlag:
# // Parameterized processor burst phase
# for (i = 0; i < busyTime; i++)
for i in range(busyTime):
# y = pow(x, e);
y = math.pow(x, e)
# // Parameterized sleep phase
# Sleep(napTime);
time.sleep(napTime / 1000.0)
# // Write message to stdout
sys.stdout.write('Thread %s just woke up.\n' % threadNo)
# }
# PYTHON: NO CODE
# // Terminating
# return result;
return result
# }
# PYTHON: NO CODE
if __name__ == '__main__':
main(len(sys.argv), sys.argv)
| 3.109375 | 3 |
main.py | HD13sel/Second_Me | 0 | 12785602 | from chatterbot import ChatBot
from chatterbot.trainers import ListTrainer
from spacy.cli import download
download('en_core_web_sm')
class ENGSM:
ISO_639_1 = 'en_core_web_sm'
chatbot = ChatBot('Botencio', tagger_langugage=ENGSM)
conversa = [
'Olá',
'Eai',
'Como você está?',
'Estou bem, e você?',
'Estou bem também',
'Então, alguma novidade ai?',
'Sim, aprendendo várias coisas',
'Legal, tipo o que?',
'Hoje to aprendendo sobre chatbots.',
'Interessante!',
'Bastante, e você aprendendo algo?',
'Várias coisas também, mas são coisas relativas',
'Bacana, importante é manter o aprendizado constante né!',
'Isso mesmo.',
'Então é isso',
'Tamo ai'
]
trainer = ListTrainer(chatbot)
trainer.train(conversa) | 2.71875 | 3 |
example_libs.py | LindsayYoung/Python-class-intro | 1 | 12785603 | <reponame>LindsayYoung/Python-class-intro
# get user input and set variables
number = raw_input("give me a whole number ")
size = raw_input("give me a size ")
noun = raw_input("give me a noun, please ")
adjective = raw_input("give me an adverb ")
# create the test by passing variables into strings
beginning = "You won't believe why I am late to work today! I woke up at my usual time- %s o'clock." % (number)
middle = "That is when things got weird. I saw a very %s %s, of course this would make me late." % (size, noun)
end = "But, now that I have %s coding skills. I will never be late again." % (adjective)
# print the variables that have the formatted text
print(beginning)
print(middle)
print(end)
| 4.15625 | 4 |
async_rx/observable/rx_map.py | geronimo-iia/async-rx | 4 | 12785604 | from inspect import iscoroutinefunction
from typing import Any, Callable, Optional
from ..protocol import Observable, Observer, Subscription, rx_observer_from
from .rx_create import rx_create
__all__ = ["rx_map"]
def rx_map(
observable: Observable, transform: Callable, expand_arg_parameters: Optional[bool] = False, expand_kwarg_parameters: Optional[bool] = False
) -> Observable:
"""Map operator.
Map operator modifies an Observable<A> into Observable<B> given a function with the type A->B.
For example, if we take the function x => 10 ∗ x and a list of 1,2,3. The result is 10,20,30, see figure 4.
Note that this function did not change the type of the Observable but did change the values.
Args:
observable (Observable): an observable instance
transform (Callable): transform function (sync or async)
expand_arg_parameters (Optional[bool]): if true each item will be expanded as args before call transform
(implique expand_kwarg_parameters = False).
expand_kwarg_parameters (Optional[bool]): if true each item will be expanded as kwargs before call transform.
Returns:
(Observable): observable instance
"""
_is_awaitable = iscoroutinefunction(transform)
async def _subscribe(an_observer: Observer) -> Subscription:
async def _on_next(item: Any):
nonlocal _is_awaitable
if expand_kwarg_parameters:
_next_item = await transform(**item) if _is_awaitable else transform(**item)
elif expand_arg_parameters:
_next_item = await transform(*item) if _is_awaitable else transform(*item)
else:
_next_item = await transform(item) if _is_awaitable else transform(item)
await an_observer.on_next(_next_item)
return await observable.subscribe(rx_observer_from(observer=an_observer, on_next=_on_next))
return rx_create(subscribe=_subscribe)
| 2.8125 | 3 |
examples/zmq_trigger_experiment.py | mark-dawn/stytra | 0 | 12785605 | <gh_stars>0
from stytra import Stytra, Protocol
from stytra.stimulation.stimuli.visual import Pause, FullFieldVisualStimulus
from stytra.triggering import ZmqTrigger
class FlashProtocol(Protocol):
name = "flash protocol"
def __init__(self):
super().__init__()
self.add_params(period_sec=5., flash_duration=2.)
def get_stim_sequence(self):
stimuli = [
Pause(duration=self.params["period_sec"] - self.params["flash_duration"]),
FullFieldVisualStimulus(
duration=self.params["flash_duration"], color=(255, 255, 255)
),
]
return stimuli
if __name__ == "__main__":
# trigger = Crappy2PTrigger(r'C:\Users\lpetrucco\Desktop\dummydir')
# trigger.start()
trigger = ZmqTrigger(port="5555")
st = Stytra(
protocols=[FlashProtocol],
trigger=trigger,
directory=r"C:\Users\portugueslab\Desktop\metadata",
)
# trigger.terminate_event.set()
# print('terminating')
# trigger.join()
| 2.21875 | 2 |
python/testData/resolve/multiFile/dunderAllDynamicallyBuiltInHelperFunction/pkg/submod.py | Sajaki/intellij-community | 2 | 12785606 | __all__ = ['bar']
bar = 'bar'
| 1.21875 | 1 |
server/opendp_apps/communication/email_client.py | opendifferentialprivacy/opendp-ux | 6 | 12785607 | import os
from sendgrid import sendgrid, Email, Content, Mail, To
from django.conf import settings
class SendGridAPIError(Exception):
pass
class EmailClient(object):
def __init__(self, from_email=None, api_key=None):
self.from_email = from_email if from_email else settings.DEFAULT_FROM_EMAIL
if not api_key:
try:
# Can't just use .get() here because the key may be an empty string, which would be returned
self.api_key = os.environ['SENDGRID_API_KEY'] \
if os.environ.get('SENDGRID_API_KEY') \
else 'sendgrid-api-key-not-set'
except KeyError:
raise SendGridAPIError("SENDGRID_API_KEY must be passed as an argument or"
" set as an environment variable")
else:
self.api_key = api_key
self.sendgrid_client = sendgrid.SendGridAPIClient(self.api_key)
def send(self, to_email=None, subject=None, content=None, content_type=None):
from_email = Email(self.from_email)
to_email = To(to_email)
content = Content(content_type, content)
mail = Mail(from_email, to_email, subject, content)
return self.sendgrid_client.client.mail.send.post(request_body=mail.get())
if __name__ == '__main__':
# apikey_message = "Current API Key: " + os.environ.get('SENDGRID_API_KEY')
# print("-"*(len(apikey_message)+1))
# print("Current API Key: ", os.environ.get('SENDGRID_API_KEY'))
c = EmailClient()
print("From Email: ", c.from_email)
# print("-"*(len(apikey_message)+1))
result = c.send(to_email='<EMAIL>', subject='test test',
content='hi', content_type='text/plain')
print("Message Sent")
print("Status Code: ", result.status_code)
# print("-"*(len(apikey_message)+1))
| 2.328125 | 2 |
code/TkGui.py | briansune/python-smith-chart-antenna-matching | 1 | 12785608 | import skrf
import tkinter as tk
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import numpy as np
import CircuitFig
from PIL import ImageTk, Image, ImageDraw
import io
import MatchCal
l2z = lambda l: l[0] + 1j * l[1]
s4cmp = lambda sf: 'nH' if sf == 'l' else 'pF'
def ld4img2gui(label: tk.Label,
color: str, stage: int, sh_se: bool,
cmp_l: list, cmp_v: list, z_val: str = '50+0j',
valid: bool = True):
cr_cfg = CircuitFig.CircuitFig(color, stage, sh_se, cmp_l, cmp_v, z_val)
image = Image.open(io.BytesIO(cr_cfg.image_data)).resize((300, 180), Image.ANTIALIAS)
im = Image.new('RGBA', (300, 180), (255, 255, 255, 255))
draw = ImageDraw.Draw(im)
im.paste(image, (0, 0))
if not valid:
draw.line((0, 0, 300, 180), fill=(255, 0, 0, 255), width=5)
draw.line((0, 180, 300, 0), fill=(255, 0, 0, 255), width=5)
label.image = ImageTk.PhotoImage(im)
label.configure(image=label.image)
class TkGui:
def __init__(self, master):
self.master = master
self.top_frame = tk.Frame(self.master)
self.top_frame.pack(side=tk.LEFT)
self.right_frame = tk.Frame(self.master)
self.right_frame.pack(side=tk.LEFT, fill=tk.BOTH)
self.upper_sch_f = tk.Frame(self.right_frame)
self.upper_sch_f.grid(row=0, padx=(0, 5), pady=(5, 0), sticky="nsew")
self.lower_ety_f = tk.Frame(self.right_frame)
self.lower_ety_f.grid(row=1, padx=(0, 5), pady=(0, 5), sticky="nsew")
self.fig = Figure(figsize=(5, 6), dpi=100)
self.fig_cvs = FigureCanvasTkAgg(self.fig, master=self.top_frame)
self.ax: Figure = self.fig.gca()
self.fig_cvs.get_tk_widget().pack(side=tk.LEFT, padx=5, pady=5)
try:
with open('ring slot.s1p', 'r'):
pass
except IOError:
with open('ring slot.s1p', 'a+') as wf:
wf.write("""!Created with skrf (http://scikit-rf.org).
# GHz S RI R 50.0
!freq ReS11 ImS11
75.0 -0.503723180993 0.457844804761""")
self.my_slot = skrf.Network('ring slot.s1p')
self.to_match_z = [50, 0]
self.ser_match_z = [50, 0]
self.shu_match_z = [50, 0]
self.shu_ser_match_z_a = [50, 0]
self.shu_ser_match_z_b = [50, 0]
self.ser_shu_match_z_a = [50, 0]
self.ser_shu_match_z_b = [50, 0]
self.plt_z0 = 50 + 0j
self.plt_freq = 2.45e9
self.up2chart()
self.lb1 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb1_tit = tk.Label(
self.upper_sch_f, text='Shunt Matching', relief="raised").grid(
row=0, column=0, sticky="nsew")
self.lb1.grid(row=1, column=0)
self.lb2 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb2_tit = tk.Label(
self.upper_sch_f, text='Series Matching', relief="raised").grid(
row=0, column=1, sticky="nsew")
self.lb2.grid(row=1, column=1)
self.lb3 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb3_tit = tk.Label(
self.upper_sch_f, text='Shunt-Series Matching', relief="raised").grid(
row=2, column=0, sticky="nsew")
self.lb3.grid(row=3, column=0)
self.lb4 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb4_tit = tk.Label(
self.upper_sch_f, text='Shunt-Series Matching', relief="raised").grid(
row=2, column=1, sticky="nsew")
self.lb4.grid(row=3, column=1)
self.lb5 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb5_tit = tk.Label(
self.upper_sch_f, text='Series-Shunt Matching', relief="raised").grid(
row=4, column=0, sticky="nsew")
self.lb5.grid(row=5, column=0)
self.lb6 = tk.Label(self.upper_sch_f, relief="ridge")
self.lb6_tit = tk.Label(
self.upper_sch_f, text='Series-Shunt Matching', relief="raised").grid(
row=4, column=1, sticky="nsew")
self.lb6.grid(row=5, column=1)
ld4img2gui(self.lb1, 'b', 1, False, ['c', 'l', 'c'], ['NC', 'SHORT', ''])
ld4img2gui(self.lb2, 'y', 1, True, ['c', 'l', 'c'], ['', 'SHORT', ''])
ld4img2gui(self.lb3, 'g', 2, False, ['c', 'l', 'c'], ['NC', 'SHORT', ''])
ld4img2gui(self.lb4, 'purple', 2, False, ['c', 'l', 'c'], ['NC', 'SHORT', ''])
ld4img2gui(self.lb5, 'orange', 2, True, ['c', 'l', 'c'], ['', 'SHORT', 'NC'])
ld4img2gui(self.lb6, 'brown', 2, True, ['c', 'l', 'c'], ['', 'SHORT', 'NC'])
###################################################################
self.to_match_r = tk.StringVar(value=str(self.to_match_z[0]))
self.to_match_i = tk.StringVar(value=str(self.to_match_z[1]))
self.ety_lb1 = tk.Label(self.lower_ety_f, text='To Match Complex Value')
self.ety_lb1.pack(side=tk.TOP)
self.ety_lb1b = tk.Label(self.lower_ety_f, text='Z = ')
self.ety_lb1b.pack(side=tk.LEFT)
self.ety1_r = tk.Entry(self.lower_ety_f, textvariable=self.to_match_r)
self.ety1_r.pack(side=tk.LEFT)
self.ety_lb1c = tk.Label(self.lower_ety_f, text=' + ')
self.ety_lb1c.pack(side=tk.LEFT)
self.ety1_i = tk.Entry(self.lower_ety_f, textvariable=self.to_match_i)
self.ety1_i.pack(side=tk.LEFT)
self.ety_lb1c = tk.Label(self.lower_ety_f, text='j')
self.ety_lb1c.pack(side=tk.LEFT)
self.enter = tk.Button(self.lower_ety_f, text="Start Auto Solver",
command=self.ld2chart)
self.enter.pack(side=tk.LEFT)
def ld2chart(self):
self.to_match_z = [float(self.ety1_r.get()), float(self.ety1_i.get())]
tmp_cal = MatchCal.MatchCal()
tmp_cal.tar_freq = self.plt_freq
to_mat = float(self.ety1_r.get()) + 1j * float(self.ety1_i.get())
tmp_cal.shu_0_sol(to_mat)
disp_str = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb1, 'b', 1, False, [tmp_cal.shu_t, 'l', 'c'],
[disp_str, 'SHORT', ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.ser_match_z = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.ser_0_sol(to_mat)
disp_str = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
ld4img2gui(self.lb2, 'y', 1, True, ['c', tmp_cal.ser_t, 'c'],
['', disp_str, ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.shu_match_z = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat, True)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb3, 'g', 2, False, [tmp_cal.shu_t, tmp_cal.ser_t, 'c'],
[disp_str2, disp_str1, ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.shu_ser_match_z_a = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat, True, True)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb4, 'purple', 2, False, [tmp_cal.shu_t, tmp_cal.ser_t, 'c'],
[disp_str2, disp_str1, ''],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.shu_ser_match_z_b = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb5, 'orange', 2, True, ['c', tmp_cal.ser_t, tmp_cal.shu_t],
['', disp_str1, disp_str2],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.ser_shu_match_z_a = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
tmp_cal.sol_2stage(to_mat, ans_sel=True)
disp_str1 = f'{tmp_cal.ser:.2f} {s4cmp(tmp_cal.ser_t)}' if tmp_cal.ser else 'SHORT'
disp_str2 = f'{tmp_cal.shu:.2f} {s4cmp(tmp_cal.shu_t)}' if tmp_cal.shu else 'NC'
ld4img2gui(self.lb6, 'brown', 2, True, ['c', tmp_cal.ser_t, tmp_cal.shu_t],
['', disp_str1, disp_str2],
f'{int(tmp_cal.tmp_z.real)}+{int(tmp_cal.tmp_z.imag)}j',
tmp_cal.sol_valid)
self.ser_shu_match_z_b = [tmp_cal.tmp_z.real, tmp_cal.tmp_z.imag]
self.up2chart()
def up2chart(self):
self.ax.clear()
self.fig2gui(np.array([[[l2z(self.to_match_z)]]]), 'To Match', 'r', 's')
self.fig2gui(np.array([[[l2z(self.ser_match_z)]]]), 'After Match', 'b', 'o')
self.fig2gui(np.array([[[l2z(self.shu_match_z)]]]), 'After Match', 'y', 'o')
self.fig2gui(np.array([[[l2z(self.shu_ser_match_z_a)]]]), 'After Match', 'g', 'o')
self.fig2gui(np.array([[[l2z(self.shu_ser_match_z_b)]]]), 'After Match', 'purple', 'o')
self.fig2gui(np.array([[[l2z(self.ser_shu_match_z_a)]]]), 'After Match', 'orange', 'o')
self.fig2gui(np.array([[[l2z(self.ser_shu_match_z_b)]]]), 'After Match', 'brown', 'o')
def fig2gui(self, plt_data: np.array,
label: str = '', color: str = 'r', mark: str = 's',
plt_sel: bool = False) -> None:
self.my_slot.frequency = self.plt_freq
self.my_slot.z0 = self.plt_z0
self.my_slot.z = plt_data
if plt_sel:
self.my_slot.plot_s_db(ax=self.ax)
else:
self.my_slot.plot_s_smith(ax=self.ax, draw_labels=True, show_legend=False,
label=label, color=color, chart_type='zy', marker=mark)
self.ax.legend(bbox_to_anchor=(0.5, 1.05), loc='lower center', ncol=3,
fancybox=True, shadow=True)
self.fig_cvs.draw()
| 2.1875 | 2 |
target_history_findings.py | Probely/API_Scripts | 7 | 12785609 | #!/usr/bin/env python
"""
Create an overview file of the target finding history
This example is for python 3.5
"""
import argparse
import csv
import getpass
from collections import OrderedDict
import requests
from urllib.parse import urljoin
api_base_url = "https://api.probely.com"
auth_endpoint = urljoin(api_base_url, "auth-obtain/")
target_detail_endpoint = urljoin(api_base_url, "targets/{target}/")
scan_list_endpoint = urljoin(api_base_url, "targets/{target}/scans/")
finding_list_endpoint = urljoin(api_base_url, "targets/{target}/findings/")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("target", help="Target id")
parser.add_argument("output", help="Output file")
parser.add_argument('--limit',
help='Limit number of assessments (default: 5)',
default=5,
type=int)
args = parser.parse_args()
username = input("Username: ")
password = <PASSWORD>()
# Get login token
response = requests.post(auth_endpoint,
data={'username': username, 'password': password})
response.raise_for_status()
token = response.json()['token']
headers = {'Authorization': "JWT {}".format(token)}
# Scans
response = requests.get(
scan_list_endpoint.format(target=args.target),
headers=headers,
params={'ordering': '-started', 'length': args.limit}
)
response.raise_for_status()
scans = response.json()['results']
extra = response.json()
extra.pop('results')
# Findings
response = requests.get(
finding_list_endpoint.format(target=args.target),
headers=headers,
params={'length': 100}
)
response.raise_for_status()
page_total = response.json()['page_total']
findings = response.json()['results']
extra = response.json()
extra.pop('results')
scan_map = OrderedDict(((scan['id'], idx)
for idx, scan in enumerate(scans)))
scan_dict = OrderedDict(((scan['id'], False) for scan in scans))
with open(args.output, 'w', newline='') as handler:
fieldnames = ['finding_id', 'ovi'] + list(scan_map.keys())
writer = csv.DictWriter(handler, fieldnames=fieldnames)
writer.writeheader()
for finding in findings:
present = scan_dict.copy()
for scan_id in finding['scans']:
if scan_id in present:
present[scan_id] = True
row = OrderedDict(
[('finding_id', finding['id']),
('ovi', finding['definition']['name'])]
+ list(present.items())
)
writer.writerow(row)
| 2.84375 | 3 |
podcomm/radio.py | badgerpapa/omnipy | 0 | 12785610 | <reponame>badgerpapa/omnipy<filename>podcomm/radio.py
import threading
from .exceptions import ProtocolError, RileyLinkError, TransmissionOutOfSyncError
from podcomm import crc
from podcomm.rileylink import RileyLink
from .message import Message, MessageState
from .packet import Packet
from .definitions import *
class Radio:
def __init__(self, msg_sequence=0, pkt_sequence=0):
self.stopRadioEvent = threading.Event()
self.messageSequence = msg_sequence
self.packetSequence = pkt_sequence
self.lastPacketReceived = None
self.logger = getLogger()
self.rileyLink = RileyLink()
self.last_packet_received = None
def send_request_get_response(self, message, stay_connected=True):
try:
return self._send_request_get_response(message, stay_connected)
except TransmissionOutOfSyncError:
raise
except Exception:
self.rileyLink.disconnect(ignore_errors=True)
raise
def disconnect(self):
try:
self.rileyLink.disconnect(ignore_errors=True)
except Exception as e:
self.logger.warning("Error while disconnecting %s" % str(e))
def _send_request_get_response(self, message, stay_connected=True):
try:
return self._send_request(message)
except TransmissionOutOfSyncError:
self.logger.warning("Transmission out of sync, radio needs resyncing")
raise
finally:
if not stay_connected:
self.rileyLink.disconnect()
def _send_request(self, message):
message.setSequence(self.messageSequence)
self.logger.debug("SENDING MSG: %s" % message)
packets = message.getPackets()
received = None
packet_index = 1
packet_count = len(packets)
for packet in packets:
if packet_index == packet_count:
expected_type = "POD"
else:
expected_type = "ACK"
received = self._exchange_packets(packet, expected_type)
if received is None:
raise ProtocolError("Timeout reached waiting for a response.")
if received.type != expected_type:
raise ProtocolError("Invalid response received. Expected type %s, received %s"
% (expected_type, received.type))
packet_index += 1
pod_response = Message.fromPacket(received)
while pod_response.state == MessageState.Incomplete:
ack_packet = Packet.Ack(message.address, False)
received = self._exchange_packets(ack_packet, "CON")
if received is None:
raise ProtocolError("Timeout reached waiting for a response.")
if received.type != "CON":
raise ProtocolError("Invalid response received. Expected type CON, received %s" % received.type)
pod_response.addConPacket(received)
if pod_response.state == MessageState.Invalid:
raise ProtocolError("Received message is not valid")
self.logger.debug("RECEIVED MSG: %s" % pod_response)
self.logger.debug("Sending end of conversation")
ack_packet = Packet.Ack(message.address, True)
self._send_packet(ack_packet)
self.logger.debug("Conversation ended")
self.messageSequence = (pod_response.sequence + 1) % 16
return pod_response
def _exchange_packets(self, packet_to_send, expected_type):
packet_to_send.setSequence(self.packetSequence)
expected_sequence = (self.packetSequence + 1) % 32
expected_address = packet_to_send.address
send_retries = 3
while send_retries > 0:
try:
self.logger.debug("SENDING PACKET EXP RESPONSE: %s" % packet_to_send)
data = packet_to_send.data
data += bytes([crc.crc8(data)])
if packet_to_send.type == "PDM":
send_retries -= 1
received = self.rileyLink.send_and_receive_packet(data, 0, 300, 300, 10, 80)
else:
received = self.rileyLink.send_and_receive_packet(data, 0, 20, 300, 10, 20)
if received is None:
self.logger.debug("Received nothing")
continue
p = self._get_packet(received)
if p is None:
self.logger.debug("Received illegal packet")
continue
if p.address != expected_address:
self.logger.debug("Received packet for a different address")
continue
if p.type != expected_type or p.sequence != expected_sequence:
if self.last_packet_received is not None:
if p.type == self.last_packet_received.type and \
p.sequence == self.last_packet_received.sequence:
self.logger.debug("Received previous response")
continue
self.logger.debug("Resynchronization requested")
self.packetSequence = (p.sequence + 1) % 32
self.messageSequence = 0
raise TransmissionOutOfSyncError()
self.packetSequence = (self.packetSequence + 2) % 32
self.last_packet_received = p
self.logger.debug("SEND AND RECEIVE complete")
return p
except RileyLinkError as rle:
raise ProtocolError("Radio error during send and receive") from rle
else:
raise ProtocolError("Exceeded retry count while send and receive")
def _send_packet(self, packetToSend):
packetToSend.setSequence(self.packetSequence)
try:
data = packetToSend.data
data += bytes([crc.crc8(data)])
while True:
self.logger.debug("SENDING FINAL PACKET: %s" % packetToSend)
received = self.rileyLink.send_and_receive_packet(data, 0, 20, 1000, 2, 40)
if received is None:
received = self.rileyLink.get_packet(2.5)
if received is None:
self.logger.debug("Silence has fallen")
break
p = self._get_packet(received)
if p is None:
self.logger.debug("Received illegal packet")
continue
if p.address != packetToSend.address:
self.logger.debug("Received packet for a different address")
continue
if self.last_packet_received is not None:
if p.type == self.last_packet_received.type and \
p.sequence == self.last_packet_received.sequence:
self.logger.debug("Received previous response")
continue
self.logger.warning("Resynchronization requested")
self.packetSequence = (self.packetSequence + 1) % 32
self.messageSequence = 0
raise TransmissionOutOfSyncError()
self.packetSequence = (self.packetSequence + 1) % 32
self.logger.debug("SEND FINAL complete")
except RileyLinkError as rle:
raise ProtocolError("Radio error during sending") from rle
@staticmethod
def _get_packet(data):
p = None
if data is not None and len(data) > 2:
calc = crc.crc8(data[2:-1])
if data[-1] == calc:
try:
p = Packet.from_data(data[2:-1])
getLogger().debug("RECEIVED PACKET: %s" % p)
except ProtocolError as pe:
getLogger().warning("Crc match on an invalid packet, error: %s" % pe)
return p
| 2.4375 | 2 |
project/forms/admin_unit_member.py | DanielGrams/gsevp | 1 | 12785611 | <gh_stars>1-10
from flask_babelex import lazy_gettext
from flask_wtf import FlaskForm
from wtforms import SubmitField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired
from project.forms.widgets import MultiCheckboxField
class InviteAdminUnitMemberForm(FlaskForm):
email = EmailField(lazy_gettext("Email"), validators=[DataRequired()])
roles = MultiCheckboxField(lazy_gettext("Roles"))
submit = SubmitField(lazy_gettext("Invite"))
class NegotiateAdminUnitMemberInvitationForm(FlaskForm):
accept = SubmitField(lazy_gettext("Accept"))
decline = SubmitField(lazy_gettext("Decline"))
class DeleteAdminUnitInvitationForm(FlaskForm):
submit = SubmitField(lazy_gettext("Delete invitation"))
email = EmailField(lazy_gettext("Email"), validators=[DataRequired()])
class DeleteAdminUnitMemberForm(FlaskForm):
submit = SubmitField(lazy_gettext("Delete member"))
email = EmailField(lazy_gettext("Email"), validators=[DataRequired()])
class UpdateAdminUnitMemberForm(FlaskForm):
roles = MultiCheckboxField(lazy_gettext("Roles"))
submit = SubmitField(lazy_gettext("Update member"))
| 2.1875 | 2 |
students/K33402/Shuginin_Yurii/practical_works/simple_django_web_project/cars/migrations/0002_auto_20220107_2119.py | emina13/ITMO_ICT_WebDevelopment_2021-2022 | 0 | 12785612 | <reponame>emina13/ITMO_ICT_WebDevelopment_2021-2022
# Generated by Django 3.2 on 2022-01-07 21:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cars', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='carowneruser',
name='birthday',
field=models.DateField(blank=True, null=True),
),
migrations.AlterField(
model_name='ownership',
name='end_date',
field=models.DateField(blank=True, null=True),
),
]
| 1.679688 | 2 |
pywinusb/__main__.py | sam-aldis/pyWinUSB | 92 | 12785613 | <filename>pywinusb/__main__.py<gh_stars>10-100
import os, sys
from gi.repository import Gtk, GObject, Gdk
from pywinusb.window import AppWindow
def check_root_access():
""" Sprwdzenie czy skrypt jest instalowany pod rootem
:return: True jeśli root
"""
return not os.geteuid()
def main():
if not check_root_access():
sys.exit("\nOnly root can run this script :(\n")
win = AppWindow()
win.show_all()
Gdk.threads_enter()
GObject.threads_init()
Gtk.main()
Gdk.threads_leave()
if __name__ == '__main__':
sys.exit(main() or 0) | 1.953125 | 2 |
terraform/environments/core-logging/lambda/index.py | cherrymu/modernisation-platform | 1 | 12785614 | <filename>terraform/environments/core-logging/lambda/index.py<gh_stars>1-10
import boto3
import base64
from botocore.exceptions import ClientError
import json
athena_client = boto3.client('athena', region_name='eu-west-2')
sts_client = boto3.client('sts')
ssm_client = boto3.client('ssm', region_name='eu-west-2')
query_location='s3://athena-cloudtrail-query'
athena_db="mod_cloudtrail_logs"
athena_table="cloudtrail_logs"
#Get account numbers from modernsation platform account
def get_accounts():
# Get environment management secret ARN
parameter_name = "environment_management_arn"
try:
get_parameter_response = ssm_client.get_parameter (
Name=parameter_name,
WithDecryption=True
)
secret_arn=get_parameter_response['Parameter']['Value']
except ClientError as e:
print("Unexpected error: %s" % e)
# get account numbers from environment management secret
region_name = "eu-west-2"
client = boto3.client(
service_name='secretsmanager',
region_name=region_name,
)
try:
get_secret_value_response = client.get_secret_value(
SecretId=secret_arn
)
res=json.loads(json.dumps(get_secret_value_response, default=str))
return res
except ClientError as e:
if e.response['Error']['Code'] == 'DecryptionFailureException':
# Secrets Manager can't decrypt the protected secret text using the provided KMS key.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InternalServiceErrorException':
# An error occurred on the server side.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidParameterException':
# You provided an invalid value for a parameter.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidRequestException':
# You provided a parameter value that is not valid for the current state of the resource.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'ResourceNotFoundException':
# We can't find the resource that you asked for.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
else:
# Decrypts secret using the associated KMS CMK.
# Depending on whether the secret is a string or binary, one of these fields will be populated.
if 'SecretString' in get_secret_value_response:
secret = get_secret_value_response['SecretString']
#res = json.loads(json.dumps(secret, default=str))
return secret
else:
decoded_binary_secret = base64.b64decode(get_secret_value_response['SecretBinary'])
#res = json.loads(json.dumps(decoded_binary_secret, default=str))
return decoded_binary_secret
#Create AWS Athena database
def create_athena_database():
print("[+] Creating Athena database " + athena_db)
try:
response = athena_client.start_query_execution(
QueryString='CREATE DATABASE IF NOT EXISTS %s' % athena_db,
ResultConfiguration={'OutputLocation': query_location})
except ClientError as e:
print("Unexpected error: %s" % e)
#Drop Athena table
def drop_athena_table():
print("[+] Dropping Athena table " + athena_table)
try:
response = athena_client.start_query_execution(
QueryString='DROP TABLE %s' % athena_table,
QueryExecutionContext={
'Database': athena_db
},
ResultConfiguration={'OutputLocation': query_location})
except ClientError as e:
print("[!] Could not drop Athena table")
print("Unexpected error: %s" % e)
#Create Athena table and add updated AWS accounts to Athena partition
def create_athena_table(list_accounts):
print("[+] Updating Athena table " + athena_table)
print("[+] Adding Mod accounts to Athena query: ")
print(list_accounts)
query = """
CREATE EXTERNAL TABLE cloudtrail_logs(
eventVersion STRING,
userIdentity STRUCT<
type: STRING,
principalId: STRING,
arn: STRING,
accountId: STRING,
invokedBy: STRING,
accessKeyId: STRING,
userName: STRING,
sessionContext: STRUCT<
attributes: STRUCT<
mfaAuthenticated: STRING,
creationDate: STRING>,
sessionIssuer: STRUCT<
type: STRING,
principalId: STRING,
arn: STRING,
accountId: STRING,
userName: STRING>>>,
eventTime STRING,
eventSource STRING,
eventName STRING,
awsRegion STRING,
sourceIpAddress STRING,
userAgent STRING,
errorCode STRING,
errorMessage STRING,
requestParameters STRING,
responseElements STRING,
additionalEventData STRING,
requestId STRING,
eventId STRING,
readOnly STRING,
resources ARRAY<STRUCT<
arn: STRING,
accountId: STRING,
type: STRING>>,
eventType STRING,
apiVersion STRING,
recipientAccountId STRING,
serviceEventDetails STRING,
sharedEventID STRING,
vpcEndpointId STRING
)
PARTITIONED BY (
`account` string, `region` string, `timestamp` string)
ROW FORMAT SERDE 'com.amazon.emr.hive.serde.CloudTrailSerde'
STORED AS INPUTFORMAT 'com.amazon.emr.cloudtrail.CloudTrailInputFormat'
OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION
's3://modernisation-platform-logs-cloudtrail/AWSLogs'
TBLPROPERTIES (
'projection.enabled'='true',
'projection.account.type' = 'enum',
'projection.account.values' = '%s',
'projection.region.type' = 'enum',
'projection.region.values' = 'eu-west-1, eu-west-2',
'projection.timestamp.format'='yyyy/MM/dd',
'projection.timestamp.interval'='1',
'projection.timestamp.interval.unit'='DAYS',
'projection.timestamp.range'='2020/01/01,NOW',
'projection.timestamp.type'='date',
'storage.location.template'='s3://modernisation-platform-logs-cloudtrail/AWSLogs/${account}/CloudTrail/${region}/${timestamp}')
""" % list_accounts
try:
response = athena_client.start_query_execution(
QueryString=query,
QueryExecutionContext={
'Database': athena_db
},
ResultConfiguration={
'OutputLocation': query_location,
}
)
print(response)
return response
except ClientError as e:
print("[-] Could not create Athena table")
print("Unexpected error: %s" % e)
def lambda_handler(event, context):
try:
#Get list of accounts
list_accounts = []
account = get_accounts()
data_object = json.loads(account['SecretString'])
data_json = json.dumps(data_object['account_ids'])
#Get AWS account values and add them to a list
for key, value in json.loads(data_json).items():
list_accounts.append(value)
#create athena database if it doesn't exist
create_athena_database()
drop_athena_table()
#Update Athena table and pass AWS account list as a parameter
create_athena_table(','.join(map(str, list_accounts)))
except ClientError as e:
print("Unexpected error: %s" % e)
| 2.0625 | 2 |
terrascript/alicloud/r.py | vutsalsinghal/python-terrascript | 0 | 12785615 | # terrascript/alicloud/r.py
import terrascript
class alicloud_instance(terrascript.Resource):
pass
class alicloud_ram_role_attachment(terrascript.Resource):
pass
class alicloud_disk(terrascript.Resource):
pass
class alicloud_disk_attachment(terrascript.Resource):
pass
class alicloud_network_interface(terrascript.Resource):
pass
class alicloud_network_interface_attachment(terrascript.Resource):
pass
class alicloud_snapshot(terrascript.Resource):
pass
class alicloud_snapshot_policy(terrascript.Resource):
pass
class alicloud_launch_template(terrascript.Resource):
pass
class alicloud_security_group(terrascript.Resource):
pass
class alicloud_security_group_rule(terrascript.Resource):
pass
class alicloud_db_database(terrascript.Resource):
pass
class alicloud_db_account(terrascript.Resource):
pass
class alicloud_db_account_privilege(terrascript.Resource):
pass
class alicloud_db_backup_policy(terrascript.Resource):
pass
class alicloud_db_connection(terrascript.Resource):
pass
class alicloud_db_read_write_splitting_connection(terrascript.Resource):
pass
class alicloud_db_instance(terrascript.Resource):
pass
class alicloud_mongodb_instance(terrascript.Resource):
pass
class alicloud_mongodb_sharding_instance(terrascript.Resource):
pass
class alicloud_gpdb_instance(terrascript.Resource):
pass
class alicloud_gpdb_connection(terrascript.Resource):
pass
class alicloud_db_readonly_instance(terrascript.Resource):
pass
class alicloud_ess_scaling_group(terrascript.Resource):
pass
class alicloud_ess_scaling_configuration(terrascript.Resource):
pass
class alicloud_ess_scaling_rule(terrascript.Resource):
pass
class alicloud_ess_schedule(terrascript.Resource):
pass
class alicloud_ess_scheduled_task(terrascript.Resource):
pass
class alicloud_ess_attachment(terrascript.Resource):
pass
class alicloud_ess_lifecycle_hook(terrascript.Resource):
pass
class alicloud_ess_notification(terrascript.Resource):
pass
class alicloud_ess_alarm(terrascript.Resource):
pass
class alicloud_ess_scalinggroup_vserver_groups(terrascript.Resource):
pass
class alicloud_vpc(terrascript.Resource):
pass
class alicloud_nat_gateway(terrascript.Resource):
pass
class alicloud_nas_file_system(terrascript.Resource):
pass
class alicloud_nas_mount_target(terrascript.Resource):
pass
class alicloud_nas_access_group(terrascript.Resource):
pass
class alicloud_nas_access_rule(terrascript.Resource):
pass
class alicloud_subnet(terrascript.Resource):
pass
class alicloud_vswitch(terrascript.Resource):
pass
class alicloud_route_entry(terrascript.Resource):
pass
class alicloud_route_table(terrascript.Resource):
pass
class alicloud_route_table_attachment(terrascript.Resource):
pass
class alicloud_snat_entry(terrascript.Resource):
pass
class alicloud_forward_entry(terrascript.Resource):
pass
class alicloud_eip(terrascript.Resource):
pass
class alicloud_eip_association(terrascript.Resource):
pass
class alicloud_slb(terrascript.Resource):
pass
class alicloud_slb_listener(terrascript.Resource):
pass
class alicloud_slb_attachment(terrascript.Resource):
pass
class alicloud_slb_backend_server(terrascript.Resource):
pass
class alicloud_slb_server_group(terrascript.Resource):
pass
class alicloud_slb_master_slave_server_group(terrascript.Resource):
pass
class alicloud_slb_rule(terrascript.Resource):
pass
class alicloud_slb_acl(terrascript.Resource):
pass
class alicloud_slb_ca_certificate(terrascript.Resource):
pass
class alicloud_slb_server_certificate(terrascript.Resource):
pass
class alicloud_oss_bucket(terrascript.Resource):
pass
class alicloud_oss_bucket_object(terrascript.Resource):
pass
class alicloud_ons_instance(terrascript.Resource):
pass
class alicloud_ons_topic(terrascript.Resource):
pass
class alicloud_ons_group(terrascript.Resource):
pass
class alicloud_dns_record(terrascript.Resource):
pass
class alicloud_dns(terrascript.Resource):
pass
class alicloud_dns_group(terrascript.Resource):
pass
class alicloud_key_pair(terrascript.Resource):
pass
class alicloud_key_pair_attachment(terrascript.Resource):
pass
class alicloud_kms_key(terrascript.Resource):
pass
class alicloud_ram_user(terrascript.Resource):
pass
class alicloud_ram_account_password_policy(terrascript.Resource):
pass
class alicloud_ram_access_key(terrascript.Resource):
pass
class alicloud_ram_login_profile(terrascript.Resource):
pass
class alicloud_ram_group(terrascript.Resource):
pass
class alicloud_ram_role(terrascript.Resource):
pass
class alicloud_ram_policy(terrascript.Resource):
pass
class alicloud_ram_alias(terrascript.Resource):
pass
class alicloud_ram_account_alias(terrascript.Resource):
pass
class alicloud_ram_group_membership(terrascript.Resource):
pass
class alicloud_ram_user_policy_attachment(terrascript.Resource):
pass
class alicloud_ram_role_policy_attachment(terrascript.Resource):
pass
class alicloud_ram_group_policy_attachment(terrascript.Resource):
pass
class alicloud_container_cluster(terrascript.Resource):
pass
class alicloud_cs_application(terrascript.Resource):
pass
class alicloud_cs_swarm(terrascript.Resource):
pass
class alicloud_cs_kubernetes(terrascript.Resource):
pass
class alicloud_cs_managed_kubernetes(terrascript.Resource):
pass
class alicloud_cr_namespace(terrascript.Resource):
pass
class alicloud_cr_repo(terrascript.Resource):
pass
class alicloud_cdn_domain(terrascript.Resource):
pass
class alicloud_cdn_domain_new(terrascript.Resource):
pass
class alicloud_cdn_domain_config(terrascript.Resource):
pass
class alicloud_router_interface(terrascript.Resource):
pass
class alicloud_router_interface_connection(terrascript.Resource):
pass
class alicloud_ots_table(terrascript.Resource):
pass
class alicloud_ots_instance(terrascript.Resource):
pass
class alicloud_ots_instance_attachment(terrascript.Resource):
pass
class alicloud_cms_alarm(terrascript.Resource):
pass
class alicloud_pvtz_zone(terrascript.Resource):
pass
class alicloud_pvtz_zone_attachment(terrascript.Resource):
pass
class alicloud_pvtz_zone_record(terrascript.Resource):
pass
class alicloud_log_project(terrascript.Resource):
pass
class alicloud_log_store(terrascript.Resource):
pass
class alicloud_log_store_index(terrascript.Resource):
pass
class alicloud_log_machine_group(terrascript.Resource):
pass
class alicloud_logtail_config(terrascript.Resource):
pass
class alicloud_logtail_attachment(terrascript.Resource):
pass
class alicloud_fc_service(terrascript.Resource):
pass
class alicloud_fc_function(terrascript.Resource):
pass
class alicloud_fc_trigger(terrascript.Resource):
pass
class alicloud_vpn_gateway(terrascript.Resource):
pass
class alicloud_vpn_customer_gateway(terrascript.Resource):
pass
class alicloud_vpn_connection(terrascript.Resource):
pass
class alicloud_ssl_vpn_server(terrascript.Resource):
pass
class alicloud_ssl_vpn_client_cert(terrascript.Resource):
pass
class alicloud_cen_instance(terrascript.Resource):
pass
class alicloud_cen_instance_attachment(terrascript.Resource):
pass
class alicloud_cen_bandwidth_package(terrascript.Resource):
pass
class alicloud_cen_bandwidth_package_attachment(terrascript.Resource):
pass
class alicloud_cen_bandwidth_limit(terrascript.Resource):
pass
class alicloud_cen_route_entry(terrascript.Resource):
pass
class alicloud_cen_instance_grant(terrascript.Resource):
pass
class alicloud_kvstore_instance(terrascript.Resource):
pass
class alicloud_kvstore_backup_policy(terrascript.Resource):
pass
class alicloud_datahub_project(terrascript.Resource):
pass
class alicloud_datahub_subscription(terrascript.Resource):
pass
class alicloud_datahub_topic(terrascript.Resource):
pass
class alicloud_mns_queue(terrascript.Resource):
pass
class alicloud_mns_topic(terrascript.Resource):
pass
class alicloud_havip(terrascript.Resource):
pass
class alicloud_mns_topic_subscription(terrascript.Resource):
pass
class alicloud_havip_attachment(terrascript.Resource):
pass
class alicloud_api_gateway_api(terrascript.Resource):
pass
class alicloud_api_gateway_group(terrascript.Resource):
pass
class alicloud_api_gateway_app(terrascript.Resource):
pass
class alicloud_api_gateway_app_attachment(terrascript.Resource):
pass
class alicloud_api_gateway_vpc_access(terrascript.Resource):
pass
class alicloud_common_bandwidth_package(terrascript.Resource):
pass
class alicloud_common_bandwidth_package_attachment(terrascript.Resource):
pass
class alicloud_drds_instance(terrascript.Resource):
pass
class alicloud_elasticsearch_instance(terrascript.Resource):
pass
class alicloud_actiontrail(terrascript.Resource):
pass
class alicloud_cas_certificate(terrascript.Resource):
pass
class alicloud_ddoscoo_instance(terrascript.Resource):
pass
class alicloud_network_acl(terrascript.Resource):
pass
class alicloud_network_acl_attachment(terrascript.Resource):
pass
class alicloud_network_acl_entries(terrascript.Resource):
pass
| 1.65625 | 2 |
platalea/text_image.py | gchrupala/platalea | 1 | 12785616 | <filename>platalea/text_image.py
from collections import Counter
import json
import logging
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from platalea.basic import cyclic_scheduler
import platalea.dataset as D
from platalea.encoders import TextEncoder, ImageEncoder
import platalea.loss
import platalea.score
import platalea.config
_device = platalea.config.device()
class TextImage(nn.Module):
def __init__(self, config):
super(TextImage, self).__init__()
self.config = config
# Components can be pre-instantiated or configured through a dictionary
if isinstance(config['TextEncoder'], nn.Module):
self.TextEncoder = config['TextEncoder']
else:
self.TextEncoder = TextEncoder(config['TextEncoder'])
if isinstance(config['ImageEncoder'], nn.Module):
self.ImageEncoder = config['ImageEncoder']
else:
self.ImageEncoder = ImageEncoder(config['ImageEncoder'])
def cost(self, item):
text_enc = self.TextEncoder(item['text'], item['text_len'])
image_enc = self.ImageEncoder(item['image'])
scores = platalea.loss.cosine_matrix(text_enc, image_enc)
loss = platalea.loss.contrastive(scores,
margin=self.config['margin_size'])
return loss
def embed_image(self, images):
image = torch.utils.data.DataLoader(dataset=images, batch_size=32,
shuffle=False,
collate_fn=D.batch_image)
image_e = []
for i in image:
image_e.append(self.ImageEncoder(i.to(_device)).detach().cpu().numpy())
image_e = np.concatenate(image_e)
return image_e
def embed_text(self, texts):
texts = [D.Flickr8KData.caption2tensor(t) for t in texts]
text = torch.utils.data.DataLoader(dataset=texts, batch_size=32,
shuffle=False,
collate_fn=D.batch_text)
text_e = []
for t, l in text:
text_e.append(self.TextEncoder(t.to(_device),
l.to(_device)).detach().cpu().numpy())
text_e = np.concatenate(text_e)
return text_e
def experiment(net, data, config):
def val_loss():
net.eval()
result = []
for item in data['val']:
item = {key: value.to(_device) for key, value in item.items()}
result.append(net.cost(item).item())
net.train()
return torch.tensor(result).mean()
net.to(_device)
net.train()
optimizer = optim.Adam(net.parameters(), lr=1)
scheduler = cyclic_scheduler(optimizer, len(data['train']),
max_lr=config['max_lr'], min_lr=1e-6)
optimizer.zero_grad()
with open("result.json", "w") as out:
for epoch in range(1, config['epochs']+1):
cost = Counter()
for j, item in enumerate(data['train'], start=1):
item = {key: value.to(_device) for key, value in item.items()}
loss = net.cost(item)
optimizer.zero_grad()
loss.backward()
optimizer.step()
scheduler.step()
cost += Counter({'cost': loss.item(), 'N': 1})
if j % 100 == 0:
logging.info("train {} {} {}".format(
epoch, j, cost['cost']/cost['N']))
if j % 400 == 0:
logging.info("valid {} {} {}".format(epoch, j, val_loss()))
result = platalea.score.score_text_image(net, data['val'].dataset)
result['epoch'] = epoch
json.dump(result, out)
print('', file=out, flush=True)
logging.info("Saving model in net.{}.pt".format(epoch))
torch.save(net, "net.{}.pt".format(epoch))
def get_default_config():
return dict(
TextEncoder=dict(
emb=dict(num_embeddings=D.Flickr8KData.vocabulary_size(),
embedding_dim=128),
rnn=dict(input_size=128, hidden_size=1024, num_layers=2,
bidirectional=True, dropout=0),
att=dict(in_size=1024 * 2, hidden_size=128)),
ImageEncoder=dict(
linear=dict(in_size=1024 * 2, out_size=1024 * 2),
norm=True),
margin_size=0.2)
| 2.296875 | 2 |
deepq/asyn_sec/four_robots_asyn_test.py | longhuang318/RL-and-Robot | 28 | 12785617 | from multiprocessing import Process, Queue, Pipe
from baselines import deepq
import gym
from deepq.asyn_sec.actor_interact_env import actor_inter
from deepq.asyn_sec.simple_multi_agent import learn
# from deepq.asyn_trainer_actor.new_models import mlp
from deepq.models import mlp
# from p2os_test.src.seventh_edition_gpw import set_gpw_num
from p2os_test.src.sixth_edition_gpw import set_gpw_num
from baselines.common import set_global_seeds
def trainer(in_actor_deque, in_action_pipes):
# 可以为learn过程设置终止条件
env = gym.make("GpwTrainer-v0")
# env = gym.make("MountainCar-v0")
# model = deepq.models.mlp([64])
model = mlp([256, 256, 128], layer_norm=True)
act = learn(
env,
actor_deque=in_actor_deque,
action_pipes=in_action_pipes,
q_func=model,
lr=1e-4, # 1e-3
max_timesteps=5000000,
buffer_size=1000000, # 300000
exploration_fraction=0.30,
exploration_final_eps=0.05, # 0.02
train_freq=1,
batch_size=32,
print_freq=30000, # 这里的print_freq与step相关,而actor处与episode相关
# checkpoint_freq=10000,
# checkpoint_path=None,
learning_starts=1000,
gamma=1.0,
target_network_update_freq=500,
prioritized_replay=True,
prioritized_replay_alpha=0.6,
# prioritized_replay_beta0=0.4,
# param_noise=True,
)
print("All end")
print("Saving model")
act.save("asyn_rob_model.pkl")
env.close()
def actor(in_ac_num, in_actor_deque, in_action_pipes):
set_global_seeds(0)
set_gpw_num(in_ac_num) # 设置环境号
acenv = gym.make("SixthRobGpw-v0")
# acenv = gym.make("MountainCar-v0")
actor_inter(env=acenv, ac_num=in_ac_num, actor_deque=in_actor_deque,
action_pipes=in_action_pipes, print_freq=20)
acenv.close()
def main():
pipes = [Pipe(duplex=False) for x in range(0, 4)]
pipes_conn1 = [pipes[i][1] for i in range(0, 4)]
actor_inf_queue = Queue(maxsize=5)
train_process = Process(target=trainer, args=(actor_inf_queue, pipes_conn1))
actor_process_01 = Process(target=actor, args=(1, actor_inf_queue, pipes[0][0]))
actor_process_02 = Process(target=actor, args=(2, actor_inf_queue, pipes[1][0]))
actor_process_03 = Process(target=actor, args=(3, actor_inf_queue, pipes[2][0]))
actor_process_04 = Process(target=actor, args=(4, actor_inf_queue, pipes[3][0]))
# actor_process_05 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 5))
# actor_process_06 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 6))
# actor_process_07 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 7))
# actor_process_08 = Process(target=actor, args=(lock1, net_list, mem_queue, total_step, update_flag, 8))
train_process.start()
actor_process_01.start()
actor_process_02.start()
actor_process_03.start()
actor_process_04.start()
# actor_process_05.start()
# actor_process_06.start()
# actor_process_07.start()
# actor_process_08.start()
train_process.join()
actor_process_01.join()
actor_process_02.join()
actor_process_03.join()
actor_process_04.join()
# actor_process_05.join()
# actor_process_06.join()
# actor_process_07.join()
# actor_process_08.join()
if __name__ == '__main__':
main()
| 2.421875 | 2 |
wagtail_simple_gallery/migrations/0001_initial.py | MorezMartin/wagtail-simple-gallery | 41 | 12785618 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-10-07 12:48
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0028_merge'),
]
operations = [
migrations.CreateModel(
name='SimpleGalleryIndex',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro_title', models.CharField(blank=True, help_text='Optional H1 title for the gallery page.', max_length=250)),
('intro_text', wagtail.core.fields.RichTextField(blank=True, help_text='Optional text to go with the intro text.')),
('images_per_page', models.IntegerField(default=8, help_text='How many images there should be on one page.')),
('use_lightbox', models.BooleanField(default=True, help_text='Use lightbox to view larger images when clicking the thumbnail.')),
('collection', models.ForeignKey(help_text='Show images in this collection in the gallery view.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Collection')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| 1.75 | 2 |
torchbenchmark/e2e_models/hf_bert/__init__.py | LaudateCorpus1/benchmark | 0 | 12785619 | <reponame>LaudateCorpus1/benchmark
import torch
import math
import os
from pathlib import Path
from torch.utils.data import DataLoader
from torchbenchmark.util.e2emodel import E2EBenchmarkModel
from torchbenchmark.tasks import NLP
from datasets import load_metric
from accelerate import Accelerator
from transformers import (
AdamW,
AutoConfig,
AutoModelForSequenceClassification,
AutoTokenizer,
DataCollatorWithPadding,
default_data_collator,
get_scheduler,
)
from typing import Optional
from torchbenchmark.util.framework.transformers.text_classification.dataset import prep_dataset, preprocess_dataset, prep_labels
from torchbenchmark.util.framework.transformers.text_classification.args import parse_args, parse_torchbench_args
# setup environment variable
CURRENT_DIR = Path(os.path.dirname(os.path.realpath(__file__)))
class Model(E2EBenchmarkModel):
task = NLP.LANGUAGE_MODELING
DEFAULT_TRAIN_BSIZE: int = 32
DEFAULT_EVAL_BSIZE: int = 1
def __init__(self, test, batch_size=None, extra_args=[]):
super().__init__(test=test, batch_size=batch_size, extra_args=extra_args)
# TODO: currently only support 1 GPU device
self.device = "cuda"
self.device_num = 1
# Parse the extra arguments
self.tb_args = parse_torchbench_args(extra_args)
torch.manual_seed(1337)
torch.backends.cudnn.deterministic = False
torch.backends.cudnn.benchmark = True
# Parameters
model_name = "bert-base-cased"
max_seq_length = "128"
learning_rate = "2e-5"
num_train_epochs = "3"
# this benchmark runs on a single GPU
cuda_visible_devices = "0"
output_dir = os.path.join(CURRENT_DIR, ".output")
os.environ["CUDA_VISIBLE_DEVICES"] = cuda_visible_devices
in_arg = ["--model_name_or_path", model_name, "--task_name", self.tb_args.task_name,
"--max_length", max_seq_length,
"--per_device_train_batch_size", str(self.batch_size),
"--per_device_eval_batch_size", str(self.batch_size),
"--learning_rate", learning_rate,
"--num_train_epochs", num_train_epochs,
"--output_dir", output_dir]
hf_args = parse_args(in_arg)
# setup other members
self.prep(hf_args)
if test == "train":
self.num_examples = len(self.train_dataloader) * self.batch_size
elif test == "eval":
self.num_examples = len(self.eval_dataloader) * self.batch_size
def prep(self, hf_args):
# Initialize the accelerator. We will let the accelerator handle device placement for us in this example.
accelerator = Accelerator(fp16=(self.tb_args.fp16 == "amp"))
accelerator.wait_for_everyone()
raw_datasets = prep_dataset(hf_args)
num_labels, label_list, is_regression = prep_labels(hf_args, raw_datasets)
# Load pretrained model and tokenizer
#
# In distributed training, the .from_pretrained methods guarantee that only one local process can concurrently
# download model & vocab.
config = AutoConfig.from_pretrained(hf_args.model_name_or_path, num_labels=num_labels, finetuning_task=hf_args.task_name)
tokenizer = AutoTokenizer.from_pretrained(hf_args.model_name_or_path, use_fast=not hf_args.use_slow_tokenizer)
model = AutoModelForSequenceClassification.from_pretrained(
hf_args.model_name_or_path,
from_tf=bool(".ckpt" in hf_args.model_name_or_path),
config=config,)
train_dataset, eval_dataset, self.mnli_eval_dataset = preprocess_dataset(hf_args, config, model, \
tokenizer, raw_datasets, num_labels, label_list, is_regression, accelerator)
# DataLoaders creation:
if hf_args.pad_to_max_length:
# If padding was already done ot max length, we use the default data collator that will just convert everything
# to tensors.
self.data_collator = default_data_collator
else:
# Otherwise, `DataCollatorWithPadding` will apply dynamic padding for us (by padding to the maximum length of
# the samples passed). When using mixed precision, we add `pad_to_multiple_of=8` to pad all tensors to multiple
# of 8s, which will enable the use of Tensor Cores on NVIDIA hardware with compute capability >= 7.5 (Volta).
self.data_collator = DataCollatorWithPadding(tokenizer, pad_to_multiple_of=(8 if accelerator.use_fp16 else None))
train_dataloader = DataLoader(
train_dataset, shuffle=True, collate_fn=self.data_collator, batch_size=hf_args.per_device_train_batch_size)
eval_dataloader = DataLoader(eval_dataset, collate_fn=self.data_collator, batch_size=hf_args.per_device_eval_batch_size)
# Optimizer
# Split weights in two groups, one with weight decay and the other not.
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": hf_args.weight_decay,
},
{
"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
]
optimizer = AdamW(optimizer_grouped_parameters, lr=hf_args.learning_rate)
# Prepare everything with our `accelerator`.
model, optimizer, train_dataloader, eval_dataloader = accelerator.prepare(
model, optimizer, train_dataloader, eval_dataloader
)
# Note -> the training dataloader needs to be prepared before we grab his length below (cause its length will be
# shorter in multiprocess)
# Scheduler and math around the number of training steps.
num_update_steps_per_epoch = math.ceil(len(train_dataloader) / hf_args.gradient_accumulation_steps)
if hf_args.max_train_steps is None:
hf_args.max_train_steps = hf_args.num_train_epochs * num_update_steps_per_epoch
else:
hf_args.num_train_epochs = math.ceil(hf_args.max_train_steps / num_update_steps_per_epoch)
lr_scheduler = get_scheduler(
name=hf_args.lr_scheduler_type,
optimizer=optimizer,
num_warmup_steps=hf_args.num_warmup_steps,
num_training_steps=hf_args.max_train_steps,
)
# Steup metrics
# Get the metric function
if hf_args.task_name is not None:
self.metric = load_metric("glue", hf_args.task_name)
else:
self.metric = load_metric("accuracy")
# Setup class members
self.hf_args = hf_args
self.is_regression = is_regression
self.model = model
self.optimizer = optimizer
self.train_dataloader = train_dataloader
self.eval_dataloader = eval_dataloader
self.lr_scheduler = lr_scheduler
self.accelerator = accelerator
def train(self) -> Optional[dict]:
completed_steps = 0
eval_metric = None
for _epoch in range(self.hf_args.num_train_epochs):
self.model.train()
for step, batch in enumerate(self.train_dataloader):
outputs = self.model(**batch)
loss = outputs.loss
loss = loss / self.hf_args.gradient_accumulation_steps
self.accelerator.backward(loss)
if step % self.hf_args.gradient_accumulation_steps == 0 or step == len(self.train_dataloader) - 1:
self.optimizer.step()
self.lr_scheduler.step()
self.optimizer.zero_grad()
completed_steps += 1
if completed_steps >= self.hf_args.max_train_steps:
break
if self.tb_args.validate_in_train:
self.model.eval()
for step, batch in enumerate(self.eval_dataloader):
outputs = self.model(**batch)
predictions = outputs.logits.argmax(dim=-1) if not self.is_regression else outputs.logits.squeeze()
self.metric.add_batch(
predictions=self.accelerator.gather(predictions),
references=self.accelerator.gather(batch["labels"]),
)
eval_metric = self.metric.compute()
if self.tb_args.validate_in_train:
if self.hf_args.task_name == "mnli":
# Final evaluation on mismatched validation set
eval_dataset = self.mnli_eval_dataset
eval_dataloader = DataLoader(
eval_dataset, collate_fn=self.data_collator, batch_size=self.hf_args.per_device_eval_batch_size
)
eval_dataloader = self.accelerator.prepare(eval_dataloader)
self.model.eval()
for step, batch in enumerate(eval_dataloader):
outputs = self.model(**batch)
predictions = outputs.logits.argmax(dim=-1)
self.metric.add_batch(
predictions=self.accelerator.gather(predictions),
references=self.accelerator.gather(batch["labels"]),
)
eval_metric = self.metric.compute()
return eval_metric
def eval(self) -> Optional[dict]:
self.model.eval()
for _step, batch in enumerate(self.eval_dataloader):
outputs = self.model(**batch)
predictions = outputs.logits.argmax(dim=-1) if not self.is_regression else outputs.logits.squeeze()
self.metric.add_batch(
predictions=self.accelerator.gather(predictions),
references=self.accelerator.gather(batch["labels"]),
)
eval_metric = self.metric.compute()
return eval_metric | 2.203125 | 2 |
ch9-reading-and-writing-files/madlibs.py | aojrzynski/BK-automate-boring-stuff-python-projects | 0 | 12785620 | <filename>ch9-reading-and-writing-files/madlibs.py
#! python3
# madlibs.py - reads in text files and lets the user add their own text anywhere the word
# ADJECTIVE, NOUN, ADVERB, or VERB appears in the text file.
#
# NOTES: Needs a story.txt file to be in the same location as this program.
import re
import pyinputplus as pyip
from pathlib import Path
# Open, save and print the content of the file.
storyFile = open('story.txt')
content = storyFile.read()
storyFile.close()
print('This is the text you can work with:')
print('"' + content + '"')
# Ask the user to replace strings within the file.
content = re.findall(r"[\w']+|[.,!?;]", content)
for i, word in enumerate(content):
if word.lower() == 'adjective':
content[i] = pyip.inputStr(prompt='Enter an adjective:')
elif word.lower() == 'noun':
content[i] = pyip.inputStr(prompt='Enter a noun:')
elif word.lower() == 'adverb':
content[i] = pyip.inputStr(prompt='Enter an adverb:')
elif word.lower() == 'verb':
content[i] = pyip.inputStr(prompt='Enter a verb:')
# Find the ends of sentances and modify the list so there is no space before a word and a dot.
# Example: 'Hello World .' is bad. 'Hello World.' is good.
pattern = re.compile(r'\w+\.')
for i in range(len(content) - 1):
if i != len(content) - 1:
if pattern.match(content[i] + content[i + 1]):
content[i] = content[i] + content[i + 1]
content[i + 1] = '--$$**DELETE**$$--'
for i in range(len(content) - 1):
if content[i] == '--$$**DELETE**$$--':
del content[i]
content = ' '.join(content)
# Write the new string to the file and print.
storyFile = open('story.txt', 'w')
storyFile.write(content)
storyFile.close()
print('The result:')
print(content)
| 4.15625 | 4 |
lib/button.py | bopopescu/ros | 0 | 12785621 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 by <NAME>. All rights reserved. This file is part of
# the Robot OS project and is released under the "Apache Licence, Version 2.0".
# Please see the LICENSE file included as part of this package.
#
# author: <NAME>
# created: 2020-01-18
# modified: 2020-03-26
#
import time, threading
from colorama import init, Fore, Style
init()
from lib.abstract_task import AbstractTask
from lib.event import Event
from lib.message import Message
try:
from gpiozero import Button as GpioButton
print('import :' + Fore.BLACK + ' INFO : successfully imported gpiozero Button.' + Style.RESET_ALL)
except ImportError:
print('import :' + Fore.RED + ' ERROR : failed to import gpiozero Button, using mock...' + Style.RESET_ALL)
from .mock_gpiozero import Button as GpioButton
ms = 50 / 1000 # 50ms loop delay
# ..............................................................................
class Button(AbstractTask):
'''
Button Task: reacts to pressing the red button.
Usage:
TOGGLE = False
button = Button(TOGGLE, mutex)
button.start()
value = button.get()
'''
button_priority = 6
def __init__(self, config, queue, mutex):
'''
Parameters:
config: the YAML-based application configuration
queue: the message queue to receive messages from this task
mutex: vs godzilla
'''
super().__init__("button", queue, None, Button.button_priority, mutex)
if config is None:
raise ValueError('no configuration provided.')
self._queue = queue
_config = config['ros'].get('button')
_pin = _config.get('pin')
self._toggle = _config.get('toggle') # if true, the value toggles when the button is pushed rather than acting as a momentary button
self._log.info('initialising button on pin {:d}; toggle={}'.format(_pin, self._toggle))
self._queue = queue
self._button = GpioButton(_pin)
self._value = False
self._log.debug('ready.')
# ......................................................
def run(self):
super(AbstractTask, self).run()
self.enable()
if self._toggle:
self._button.when_released = self.toggle_state
else:
self.polling = threading.Thread(target=Button.poll, args=[self,])
self.polling.start()
# ......................................................
def toggle_state(self):
self._value = not self._value
self._log.info("button toggle: {}".format(self._value))
_message = Message(Event.BUTTON)
_message.set_value(self._value)
if self._queue:
self._queue.add(_message)
# ......................................................
def poll(self):
while self.is_enabled():
if not self._toggle:
self._value = self._button.is_pressed
self._log.debug('button poll.')
time.sleep(ms)
# ..........................................................................
def enable(self):
super().enable()
# ..........................................................................
def disable(self):
super().disable()
# ......................................................
def get(self):
return self._value
# ......................................................
def close(self):
super().close()
| 2.234375 | 2 |
pysynphot/test/utils.py | axru5812/pysynphot | 0 | 12785622 | <gh_stars>0
import pytest
# This is to mark tests that require access to CDBS
try:
use_cdbs = pytest.mark.skipif(not pytest.config.getoption('--cdbs'),
reason='need --cdbs option to run')
except AttributeError: # Not using pytest
use_cdbs = pytest.mark.skipif(True, reason='need --cdbs option to run')
| 2.109375 | 2 |
generation/calibrations.py | guodashun/art-force | 0 | 12785623 | import math
import numpy as np
## Real Data:
# %% Kinect Color Camera
color_cam_matrix = np.array([ 1.0526303338534365e+03, 0., 9.3528526085572480e+02, 0., 1.0534191001014469e+03, 5.2225718970556716e+02, 0., 0., 1. ]).reshape(3,3)
color_distortion_coeffs = np.array([ 4.5467150011699140e-02, -7.4470107942918126e-02, -6.1697129558609537e-03, -2.5667037404509380e-03, -1.4503959457133547e-02 ]).reshape(1,5)
color_rotation = np.eye(3)
color_projection = np.array([ 1.0526303338534365e+03, 0., 9.3528526085572480e+02, 0., 0., 1.0534191001014469e+03, 5.2225718970556716e+02, 0., 0., 0., 1., 0., 0., 0., 0., 1. ]).reshape(4,4)
# %% Kinect IR Camera
ir_cam_matrix = np.array([ 3.5706872738709285e+02, 0., 2.5037220752105404e+02, 0., 3.5700920458183873e+02, 2.0803230739018434e+02, 0., 0., 1. ]).reshape(3,3)
ir_distortion_coeffs = np.array([ 5.5998048975189132e-02, -2.5691440815038830e-01, -5.3889184410447575e-03, -1.6922667364749613e-03, 1.9674519800098919e-01 ]).reshape(1,5)
ir_rotation = np.eye(3)
ir_projection = np.array([ 3.5706872738709285e+02, 0., 2.5037220752105404e+02, 0., 0., 3.5700920458183873e+02, 2.0803230739018434e+02, 0., 0., 0., 1., 0., 0., 0., 0., 1. ]).reshape(4,4)
depthShift = -2.7989551644219979e+01
# %% Pose Calibration between depth and color
rotation = np.array([ 9.9997222955499243e-01, -7.4399336788120839e-03, 4.3301925190808763e-04, 7.4347723554060875e-03, 9.9991294780487039e-01, 1.0900503300210780e-02, -5.1408057825089366e-04, -1.0896981188819882e-02, 9.9994049399058227e-01 ]).reshape(3,3)
translation = np.array([ -5.2291985456630448e-02, -1.9227292627499695e-04, 1.7173350151375650e-03 ]).reshape(3,1)
essential = np.array([ -1.2669151118394222e-05, -1.7150903228939863e-03, -2.1098130088050980e-04, 1.6904050298585356e-03, -5.8260164046387006e-04, 5.2289617408374921e-02, -1.9651142111198186e-04, -5.2288863822328481e-02, -5.6992570216587654e-04 ]).reshape(3,3)
fundamental = np.array([ -8.8142664830290771e-09, -1.1934330447023842e-06, 1.9806702972926870e-04, 1.1751792885051283e-06, -4.0509553642475600e-07, 1.2770218257581496e-02, -7.4941574482561516e-04, -3.6972004067303506e-02, 1. ]).reshape(3,3)
# %% Color Params
color_height = 1080
color_width = 1920
color_fov_x = 360 / math.pi * math.atan2(color_width, 2 * color_cam_matrix[0,0])
color_fov_y = 360 / math.pi * math.atan2(color_height, 2 * color_cam_matrix[1,1] )
color_fx = color_cam_matrix[0,0]
color_fy = color_cam_matrix[1,1]
color_cx = color_cam_matrix[0,2]
color_cy = color_cam_matrix[1,2]
color_fx
color_fy
color_fov_x
color_fov_y
# %% IR Field of View, Width, Height computation
ir_width = 512
ir_height = 424
ir_aspect = ir_width / ir_height
depth_fov_x = 360 / math.pi * math.atan2(ir_width, 2 * color_cam_matrix[0,0])
depth_fov_y = 360 / math.pi * math.atan2(ir_height, 2 * color_cam_matrix[1,1])
ir_fx = ir_cam_matrix[0,0]
ir_fy = ir_cam_matrix[1,1]
ir_cx = ir_cam_matrix[0,2]
ir_cy = ir_cam_matrix[1,2]
## transform into camera frame. useful for reconstruction!
T_magic_to_cam = np.array([ [0. ,-1. , 0. , 0. ],
[0. , 0. ,-1. , 0. ],
[1. , 0. , 0. , 0. ],
[0. , 0. , 0. , 1.0]])
## Simulation Camera Params
# %%
znear = 0.1
zfar = 12
sim_width = 192
sim_height = 108
# sim_width = 720 * 4
# sim_height = 405 * 4
old_sim_fovy = 60 * math.pi / 180
old_sim_fovx = 2 * math.atan(math.tan(old_sim_fovy / 2) * sim_width / sim_height)
old_sim_fovy * 180 / math.pi
old_sim_fovx * 180 / math.pi
old_sim_focal_y = (sim_height / 2) / math.tan(old_sim_fovy / 2)
old_sim_focal_x = (sim_width / 2 ) / math.tan(old_sim_fovx / 2)
old_sim_proj_matrix = np.array([[old_sim_focal_x, 0, sim_width / 2],
[0, old_sim_focal_y, sim_height / 2],
[0, 0, 1]])
# new sim cam Params, using color fov_y
sim_focal_y = (sim_height / 2) / math.tan(color_fov_y * 3.14 / 180.0 / 2)
sim_focal_x = sim_focal_y
sim_proj_matrix = np.array([[sim_focal_x, 0, sim_width / 2],
[0, sim_focal_y, sim_height / 2],
[0, 0, 1]])
# checking that these are reasonable
color_fov_x = 360 / math.pi * math.atan2(color_width, 2 * color_cam_matrix[0,0])
color_fov_y = 360 / math.pi * math.atan2(color_height, 2 * color_cam_matrix[1,1] )
color_fov_x
color_fov_y
test_sim_fov_y = 360 / math.pi * math.atan2(sim_height, 2 * sim_proj_matrix[1,1] )
test_sim_fov_x = 360 / math.pi * math.atan2(sim_width, 2 * sim_proj_matrix[0,0] )
# fake real sim cam Params (ie, size is the full 1920 x 1080)
fake_focal_y = (color_height / 2) / math.tan(color_fov_y * 3.14 / 180.0 / 2)
fake_focal_x = (color_width / 2) / math.tan(color_fov_x * 3.14 / 180.0 / 2)
fake_proj_matrix = np.array([[fake_focal_x, 0, color_width / 2],
[0, fake_focal_y, color_height / 2],
[0, 0, 1]])
if __name__ == '__main__':
np.set_printoptions(suppress=True)
print(' \n simulated cam matrix: \n\t', str(np.round(fake_proj_matrix,0)).replace('\n', '\n\t'))
print(' \n real cam matrix: \n\t', str(np.round(color_cam_matrix,0)).replace('\n', '\n\t'))
print(' \n ')
print(color_fov_y)
| 2.046875 | 2 |
rnn_enhancement/unitary_linear.py | nicolas-ivanov/Seq2Seq_Upgrade_TensorFlow | 65 | 12785624 | <gh_stars>10-100
"""Linear Algebraic Functions for Unitary Matrices.
These equations come from http://arxiv.org/pdf/1511.06464v2.pdf
This paper is constantly referenced throughout this library"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
print('Unitary Linear has been imported')
#ORIGINAL FUNCTION
def times_diag(input, n_hidden, diag): #convert this to tensorflow....
input_re = input[:, :n_hidden]
input_im = input[:, n_hidden:]
Re = T.nlinalg.AllocDiag()(T.cos(diag)) #I think this will allocate a square matrix mtrix
Im = T.nlinalg.AllocDiag()(T.sin(diag))
input_re_times_Re = T.dot(input_re, Re)
input_re_times_Im = T.dot(input_re, Im)
input_im_times_Re = T.dot(input_im, Re)
input_im_times_Im = T.dot(input_im, Im)
return T.concatenate([input_re_times_Re - input_im_times_Im,
input_re_times_Im + input_im_times_Re], axis=1)
'''Note, the equation for this comes from Section 3 of complex paper, first bullet point
Dj,j = e^iw_j
To accomplish this task, they use Euler's formula instead of the exponent
e^ix = cos(x) + i sin(x)'''
#CONVERTED TENSORFLOW
def times_diag_tf(input_matrix, n_hidden, diag):
input_re = input_matrix[:, :n_hidden] #okay so the first left half of the matrix is real numbers
input_im = input_matrix[:, n_hidden:] #the right half is the imaginary numbers that correspond
Re = tf.diag(tf.cos(diag))
Im = tf.diag(tf.sin(diag))
input_re_times_Re = tf.matmul(input_re, Re) #matmul is the equivalent of dot
input_re_times_Im = tf.matmul(input_re, Im)
input_im_times_Re = tf.matmul(input_im, Re)
input_im_times_Im = tf.matmul(input_im, Im)
return tf.concat(1, [input_re_times_Re - input_im_times_Im,
input_re_times_Im + input_im_times_Re]) #this will combine two matrixes
#nick, this concatenate at the end is the equation number 7 i think....
#in the future, see if these can be done in one step and skip the concatenation
'''-----------------------------------NEXT FUNCTION TO WORK ON------------------'''
#ORIGINAL FUNCTION
def times_reflection(input, n_hidden, reflection):
input_re = input[:, :n_hidden]
input_im = input[:, n_hidden:]
reflect_re = reflection[:n_hidden]
reflect_im = reflection[n_hidden:]
vstarv = (reflect_re**2 + reflect_im**2).sum()
input_re_reflect = input_re - 2 / vstarv * (T.outer(T.dot(input_re, reflect_re), reflect_re)
+ T.outer(T.dot(input_re, reflect_im), reflect_im)
- T.outer(T.dot(input_im, reflect_im), reflect_re)
+ T.outer(T.dot(input_im, reflect_re), reflect_im))
input_im_reflect = input_im - 2 / vstarv * (T.outer(T.dot(input_im, reflect_re), reflect_re)
+ T.outer(T.dot(input_im, reflect_im), reflect_im)
+ T.outer(T.dot(input_re, reflect_im), reflect_re)
- T.outer(T.dot(input_re, reflect_re), reflect_im))
return T.concatenate([input_re_reflect, input_im_reflect], axis=1)
#TF CONVERTED
'''Note, the equation for this comes from Section 3, second bullet point
R = I - (2vv*/||v||2)'''
def times_reflection_tf(input, n_hidden, reflection):
input_re = input[:, :n_hidden]
input_im = input[:, n_hidden:]
reflect_re = reflection[:n_hidden]
reflect_im = reflection[n_hidden:]
vstarv = (reflect_re**2 + reflect_im**2).sum() #not sure where all of this is coming from
vstarv = tf.add(tf.square(reflect_re) + tf.square(reflect_im)) #this might need to be add_n...i don't know
#i think this mkaes a unitary matrix -- the vstarv
input_re_reflect = input_re - 2 / vstarv * (T.outer(T.dot(input_re, reflect_re), reflect_re)
+ T.outer(T.dot(input_re, reflect_im), reflect_im)
- T.outer(T.dot(input_im, reflect_im), reflect_re)
+ T.outer(T.dot(input_im, reflect_re), reflect_im))
input_im_reflect = input_im - 2 / vstarv * (T.outer(T.dot(input_im, reflect_re), reflect_re)
+ T.outer(T.dot(input_im, reflect_im), reflect_im)
+ T.outer(T.dot(input_re, reflect_im), reflect_re)
- T.outer(T.dot(input_re, reflect_re), reflect_im))
return tf.concat(1, [input_re_reflect, input_im_reflect])
#ORIGINAL FUNCTION
def vec_permutation(input, n_hidden, index_permute):
re = input[:, :n_hidden]
im = input[:, n_hidden:]
re_permute = re[:, index_permute]
im_permute = im[:, index_permute]
return T.concatenate([re_permute, im_permute], axis=1)
'''section three bullet 3 --
II, a fixed random index permutation matrix
A permutation matrix consists of one's and zero's.
http://mathworld.wolfram.com/PermutationMatrix.html
'''
#TF CONVERTED FUNCTION
def vec_permutation_tf(input, n_hidden, index_permute): #I don't get this...why do we do this?
re = input[:, :n_hidden]
im = input[:, n_hidden:]
re_permute = re[:, index_permute] #this part means you keep the batch size and choose one index to permuate?
im_permute = im[:, index_permute]
return tf.concat(1, [re_permute, im_permute])
def unitary_linear(args, output_size, bias, bias_start=0.0, scope=None):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch x n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_start: starting value to initialize the bias; 0 by default.
scope: VariableScope for the created subgraph; defaults to "Linear".
Returns:
A 2D Tensor with shape [batch x output_size] equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
assert args
if not isinstance(args, (list, tuple)):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape().as_list() for a in args]
for shape in shapes:
if len(shape) != 2:
raise ValueError("Linear is expecting 2D arguments: %s" % str(shapes))
if not shape[1]:
raise ValueError("Linear expects shape[1] of arguments: %s" % str(shapes))
else:
total_arg_size += shape[1]
# Now the computation.
with tf.variable_scope(scope or "Unitary_Linear"):
matrix = tf.get_variable("Unitary_Matrix", [total_arg_size, output_size],
initializer = unitary_initializer())
if len(args) == 1:
res = tf.matmul(args[0], matrix)
else:
res = tf.matmul(tf.concat(1, args), matrix)
if not bias:
return res
bias_term = tf.get_variable("Unitary_Bias", [output_size],
initializer=tf.constant_initializer(bias_start))
return res + bias_term
| 2.578125 | 3 |
tests/scheduler/schemas/examples/test_utils.py | quantify-os/quantify-scheduler | 1 | 12785625 | # pylint: disable=missing-module-docstring
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
import pytest
from quantify_scheduler.schemas.examples import utils
@pytest.mark.parametrize(
"filename",
[
"qblox_test_mapping.json",
"transmon_test_config.json",
"zhinst_test_mapping.json",
],
)
def test_load_json_example_scheme(filename: str):
utils.load_json_example_scheme(filename)
| 1.90625 | 2 |
eval.py | cbschaff/nlimb | 12 | 12785626 | from deeplearning import tf_util as U
from init import make_env_fn, make_model_fn
from collections import namedtuple
import os, argparse, json
import numpy as np
def eval_robot(args, env, pi):
rewards = []
lengths = []
for j in range(args.nepisodes):
rewards.append(0)
lengths.append(0)
done = False
ob = env.reset()
while not done:
ac = pi.actor.mode(ob[None])[0]
ob, rew, done, _ = env.step(ac)
rewards[-1] += rew
lengths[-1] += 1
return np.mean(lengths), np.mean(rewards)
def main(args):
U.reset()
with open(os.path.join(args.logdir, 'hyps.json'), 'r') as f:
hyps = json.load(f)
train_args = namedtuple('Args', hyps.keys())(**hyps)
env_fn = make_env_fn(train_args)
model_fn = make_model_fn(train_args)
env = env_fn(0)
model = model_fn(env)
model.build('model', 1, 1)
model.sampler.build('model', 1, 1)
sess = U.make_session()
sess.__enter__()
U.initialize()
t = U.Experiment(args.logdir).load(args.ckpt)
ls = []
rs = []
for i in range(args.samples):
env.update_robot(model.sampler.sample(args.stochastic)[0])
l,r = eval_robot(args, env, model)
ls.append(l)
rs.append(r)
if not args.stochastic:
break
os.makedirs(os.path.join(args.logdir, 'eval'), exist_ok=True)
with open(os.path.join(args.logdir, 'eval', '{}.json'.format(t)), 'w') as f:
json.dump({'l':ls, 'r':rs}, f)
sess.__exit__(None, None, None)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Evaluate a Checkpoint')
parser.add_argument('logdir', type=str, help='log directory')
parser.add_argument('-t', '--ckpt', type=int, default=None, help='which checkpoint file to use')
parser.add_argument('-n', '--nepisodes', type=int, default=1, help='n episodes to show')
parser.add_argument('-s', '--samples', type=int, default=1, help='# of robots to sample')
parser.add_argument('--stochastic', type=bool, default=True, help='If false, eval the mode of the robot distribution')
main(parser.parse_args())
| 2.40625 | 2 |
bootimgpack/pack_bootimg.py | hchyhchyxh/tools | 45 | 12785627 | #!/usr/bin/python
# Copyright 2015 duanqz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Usage: pack_bootimg.py BOOT_IMG_DIR [OUTPUT_IMG]
- BOOT_IMG_DIR : the directory of boot image files.
- OUTPUT_IMG : the output image after pack. If not present, BOOT_IMG_DIR.img will be used
"""
__author__ = '<EMAIL>'
from internal import bootimg
import sys
import traceback
if __name__ == '__main__':
argc = len(sys.argv)
if argc <= 1:
print __doc__
exit(1)
if argc > 1:
boot_dir = sys.argv[1]
output = boot_dir + ".img"
if argc > 2:
output = sys.argv[2]
try:
bootimg.pack(boot_dir, output)
except ValueError as ve:
traceback.print_exc()
# See help.xml ERR_PACK_BOOTIMG_FAILED
sys.exit(154)
| 2.390625 | 2 |
algorithms/kth-smallest-element-in-a-bst.py | Chronoviser/leetcode-1 | 41 | 12785628 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def traversal(self, root):
if root:
yield from self.traversal(root.left)
yield root.val
yield from self.traversal(root.right)
def kthSmallest(self, root, k):
"""
:type root: TreeNode
:type k: int
:rtype: int
"""
nums = list(self.traversal(root))
return nums[k - 1]
| 3.75 | 4 |
module/active/csp_info.py | b1ackc4t/getdomain | 0 | 12785629 | <gh_stars>0
from requests import get, exceptions
from tldextract import extract
import asyncio
import aiohttp
class CSPInfo(object):
"""
利用csp头搜集子域名
"""
def __init__(self, url):
"""
:param apex_domain: csp头中对应的顶级域名
:param ip: 域名对应ip地址
:param count: 域名有几个子域名
:param status: 域名是否可访问
:param url: 网址
"""
self.apex_domain = ""
self.ip = ""
self.count = ""
self.status = True
self.url = url
self.csp_header = ''
self.sub_domains = set()
# def create_url(self):
# """
# 通过域名创建url, 并设置状态码
# """
# url_append = ["http://", "https://"]
# for ua in url_append:
# url_test = ua + self.domain
# r = get(url_test)
# if r.status_code == 200:
# self.url = url_test
# self.status = False
async def get_csp_header(self):
"""
获取url的csp头
"""
try:
async with aiohttp.request('HEAD', url=self.url, headers={'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.9 Safari/537.36'}) as r:
await r.text()
except exceptions.RequestException as e:
print(e)
if 'Content-Security-Policy' in r.headers:
csp_header = r.headers['Content-Security-Policy']
self.csp_header = csp_header
self.get_sub_domains()
elif 'Content-Security-Policy-report-only' in r.headers:
csp_header = r.headers['Content-Security-Policy-report-only']
self.csp_header = csp_header
self.get_sub_domains()
else:
self.status = False
def get_sub_domains(self):
"""
从csp头部获取未处理过的url列表
对其列表进行清理,提取子域名
存放在self.sub_domains
"""
csp_sub_domains = []
csp_header_list = self.csp_header.split(" ")
for line in csp_header_list:
if "." in line:
line = line.replace(';', '')
csp_sub_domains.append(line)
else:
pass
# 进行清理
# print(csp_sub_domains)
domain_ext = extract(self.url)
for csp_url in csp_sub_domains:
ext = extract(csp_url)
if ext[0] not in ['*', ''] and ext[1] == domain_ext[1] and ext[2] == domain_ext[2]:
self.sub_domains.add('.'.join(ext))
# print(self.sub_domains)
async def main(url):
"""
供调用的接口
:param url: 传入要识别csp的url
:return: 获取到的子域名集合
"""
# 建立对象
csp_info = CSPInfo(url)
# 获取目标url的csp头
await asyncio.ensure_future(csp_info.get_csp_header())
# 获取子域名
# print(csp_info.sub_domains)
return csp_info.sub_domains
# dns解析
# if resolve:
# domains = resolve_domains(domains)
# 查看whois信息
# if check_whois:
# domains = check_whois_domains(domains)
if __name__ == '__main__':
# print(main("http://flipkart.com"))
loop = asyncio.get_event_loop()
loop.run_until_complete(main("http://flipkart.com")) # 处理一个任务
| 2.796875 | 3 |
Themis2.0/grid.py | austinatchley/Themis | 88 | 12785630 | <filename>Themis2.0/grid.py
import sys
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
import xml.etree.ElementTree as ET
import themis2
class App(QDialog):
def __init__(self):
super().__init__()
self.title = 'Themis 2.0'
self.left = 100
self.top = 100
self.width = 800
self.height = 1000
self.tree = None
self.initUI()
def initUI(self):
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
self.createThemisGrid()
windowLayout = QVBoxLayout()
windowLayout.addWidget(self.horizontalGroupBox4)
windowLayout.addWidget(self.horizontalGroupBox)
windowLayout.addWidget(self.horizontalGroupBox2)
windowLayout.addWidget(self.horizontalGroupBox3)
self.setLayout(windowLayout)
self.show()
def createThemisGrid(self):
self.horizontalGroupBox4 = QGroupBox()
layout4 = QGridLayout()
command_box_label = QLabel("Command:")
self.command_box = QLineEdit(self)
seed_box_label = QLabel("Random seed:")
self.seed_box = QLineEdit(self)
max_box_label = QLabel("Max Samples:")
self.max_box = QLineEdit(self)
min_box_label = QLabel("Min Samples:")
self.min_box = QLineEdit(self)
layout4.addWidget(command_box_label,1, 0)
layout4.addWidget(self.command_box,1, 1)
layout4.addWidget(seed_box_label,2,0)
layout4.addWidget(self.seed_box,2,1)
layout4.addWidget(max_box_label,3,0)
layout4.addWidget(self.max_box,3,1)
layout4.addWidget(min_box_label,4,0)
layout4.addWidget(self.min_box,4,1)
self.horizontalGroupBox = QGroupBox("Inputs")
layout = QGridLayout()
layout.setSpacing(5)
self.createInputsTable()
load_button = QPushButton('Load...')
load_button.clicked.connect(self.handleLoadButton)
save_button = QPushButton('Save...')
save_button.clicked.connect(self.handleSaveButton)
add_button = QPushButton('Add Input...')
add_button.clicked.connect(self.handleAddButton)
self.dialog = EditInputWindow()
layout.addWidget(self.inputs_table,0,1, 3, 4)
layout.addWidget(load_button,5, 1)
layout.addWidget(save_button,5, 2)
layout.addWidget(add_button,5,3)
self.horizontalGroupBox2 = QGroupBox("Tests")
layout2 = QGridLayout()
self.createTestsTable()
add_test_button = QPushButton("Add Test...")
layout2.addWidget(self.tests_table, 5, 4, 4, 4)
layout2.addWidget(add_test_button, 9, 4)
self.horizontalGroupBox3 = QGroupBox("")
layout3 = QGridLayout()
run_button = QPushButton("Run")
# run themis
run_button.clicked.connect(self.runThemis)
self.results_box = QTextEdit()
self.results_box.setReadOnly(True)
layout3.addWidget(run_button,1, 1)
layout3.addWidget(self.results_box, 2, 1, 5, 5)
self.horizontalGroupBox.setLayout(layout)
self.horizontalGroupBox2.setLayout(layout2)
self.horizontalGroupBox3.setLayout(layout3)
self.horizontalGroupBox4.setLayout(layout4)
def runThemis(self):
self.tester.run()
self.results_box.setText(self.getTesterOutput)
def getTesterOutput(self):
results = self.tester.output
return results
def handleAddButton(self):
self.dialog.setModal(True)
self.dialog.show()
def handleLoadButton(self):
dialog = QFileDialog()
filename = dialog.getOpenFileName(self, "Open File", "/home")
if filename[0]:
self.file = open(filename[0], 'r')
# add themis instance with loaded file
self.processSettingsFiles()
def processSettingsFiles(self):
self.tree = ET.parse(self.file)
root = self.tree.getroot()
run_command = root.find('command').text
self.command_box.setText(run_command)
seed = root.find('seed').text
self.seed_box.setText(seed)
max_samples = root.find('max_samples').text
self.max_box.setText(max_samples)
min_samples = root.find('min_samples').text
self.min_box.setText(min_samples)
# column 1 = Input Name
# column 2 = Input Type
# column 3 = Values
#for categorical values
self.inputs = []
ctr = 0
for run_input in root.iter('input'):
name = run_input.find('name').text
print(name)
self.inputs[ctr] = name
categoricalFlag = False
for j in run_input.iter('type'):
if j.text == "categorical":
categoricalFlag = True
values = []
if(categoricalFlag is True):
for i in run_input.iter('value'):
values.append(i.text)
else:
for lbound in run_input.iter('lowerbound'):
values.append(lbound.text)
for ubound in run_input.iter('upperbound'):
values.append(ubound.text)
if (len(values) != 0):
self.setCellValue(values.__str__(), ctr, 3)
ctr += 1
index = 0
for run_test in root.iter('test'):
function = ""
configuration = ""
margin = ""
for func in run_test.iter("function"):
function = func.text
for config in run_test.iter("conf"):
configuration = config.text
for marg in run_test.iter("margin"):
margin = marg.text
print(function)
print(configuration)
print(margin)
print("Got all the values")
self.setTestTableValue(function,index,1)
self.setTestTableValue(configuration, index, 2)
self.setTestTableValue(margin, index, 3)
index += 1
def handleSaveButton(self):
self.tree.write("settings")
def createInputsTable(self):
self.inputs_table = QTableWidget()
self.inputs_table.setRowCount(10)
self.inputs_table.setColumnCount(4)
self.inputs_table.setHorizontalHeaderLabels(["", "Input Name", "Input Type", "Values"])
# pass in row to create buttons on that row
for i in range(self.inputs_table.rowCount()):
self.createEditButtons(self.inputs_table, i)
self.inputs_table.horizontalHeader().setStretchLastSection(True)
self.inputs_table.resizeRowsToContents()
self.inputs_table.resizeColumnsToContents()
self.inputs_table.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.inputs_table.verticalHeader().setVisible(False)
def createTestsTable(self):
self.tests_table = QTableWidget()
self.tests_table.setRowCount(10)
self.tests_table.setColumnCount(5)
self.tests_table.setHorizontalHeaderLabels(["", "Name", "Confidence", "Margin", "Notes"])
for i in range(self.tests_table.rowCount()):
self.createEditButtons(self.tests_table,i)
self.tests_table.horizontalHeader().setStretchLastSection(True)
self.tests_table.resizeRowsToContents()
self.tests_table.resizeColumnsToContents()
self.tests_table.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.tests_table.verticalHeader().setVisible(False)
def createEditButtons(self, table, row):
layout = QHBoxLayout()
layout.setContentsMargins(2,2,2,2)
layout.setSpacing(10)
delete_btn = QPushButton(table)
delete_btn.setText("Delete")
delete_btn.adjustSize()
layout.addWidget(delete_btn)
edit_btn = QPushButton(table)
edit_btn.setText("Edit...")
layout.addWidget(edit_btn)
cellWidget = QWidget()
cellWidget.setLayout(layout)
table.setCellWidget(row,0,cellWidget)
def setCellValue(self, value, row, column):
new_input = QTableWidgetItem()
new_input.setText(value)
self.inputs_table.setItem(row,column,new_input)
def setTestTableValue(self, value, row, column):
new_input = QTableWidgetItem()
new_input.setText(value)
self.tests_table.setItem(row,column,new_input)
class EditInputWindow(QDialog):
def __init__(self):
super().__init__()
self.title = 'Add or Edit Inputs'
self.left = 100
self.top = 100
self.width = 500
self.height = 300
self.initUI()
def initUI(self):
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
self.createGrid()
windowLayout = QVBoxLayout()
windowLayout.addWidget(self.horizontalGroupBox)
self.setLayout(windowLayout)
## self.show()
def createGrid(self):
self.horizontalGroupBox = QGroupBox("")
layout = QGridLayout()
name_label = QLabel("Input name: ")
self.name_box = QLineEdit(self)
layout.addWidget(name_label, 1, 1)
layout.addWidget(self.name_box, 1, 2)
type_label = QLabel("Input type: ")
self.types = QComboBox()
self.types.addItem("Categorical")
self.types.addItem("Continuous Int")
layout.addWidget(type_label, 2, 1)
layout.addWidget(self.types, 2, 2)
self.values_label = QLabel("Values (separated by commas): ")
self.values_box = QLineEdit(self)
layout.addWidget(self.values_label, 3, 1)
layout.addWidget(self.values_box, 3, 2)
self.types.currentIndexChanged.connect(self.selectionChange)
self.add_button = QPushButton("Add")
layout.addWidget(self.add_button, 4, 1)
self.done_button = QPushButton("Done")
layout.addWidget(self.done_button, 4, 4)
self.horizontalGroupBox.setLayout(layout)
def selectionChange(self):
if self.types.currentText() == "Continuous Int":
self.values_label.setText("Enter range (e.g. 1-10) : ")
else:
self.values_label.setText("Values (separated by commas): ")
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = App()
sys.exit(app.exec_())
| 2.578125 | 3 |
albow/input/Field.py | hasii2011/albow-python-3 | 6 | 12785631 |
import logging
from albow.widgets.Control import Control
from albow.input.TextEditor import TextEditor
class Field(Control, TextEditor):
"""
Field is an abstract base class for controls that edit a value with a textual representation. It provides
facilities for
- Converting between the text and internal representations of the value,
- For specifying minimum and maximum allowed values, and
- Controlling whether the value is allowed to be empty and what representation to use for an empty value.
A Field can be in two states, _editing_ and _non-editing_. In the non-editing state, the control displays
the value to which it is linked via its `ref` attribute. When the user focuses the control and begins typing,
it switches to the editing state. In this state, the text may be edited but the associated value is not yet
updated. When the `Return`, `Enter` or `Tab key` is pressed, or a mouse click occurs anywhere outside the field,
the value is updated and the control returns to the non-editing state. Updating of the value can also be
forced by calling the `commit()` method.
"""
DEFAULT_WIDTH = 100
empty = NotImplemented
"""
Internal value to use when the field is empty. If set to NotImplemented, the user is not allowed to enter
an empty value.
"""
format = "%s"
"""
Format string to use when converting the internal representation to text. See also format_value() below.
"""
min: int = None
"""
Minimum allowable value. If `None`, no minimum value will be enforced.
"""
max: int = None
"""
Maximum allowable value. If `None`, no maximum value will be enforced.
"""
type = None
"""
A function for converting from text to the internal representation. Typically a type object, but
can be any callable object.
"""
editing: bool = None
"""
_Read only_. A boolean which is true when the control is in the editing state.
"""
insertion_point = None
def __init__(self, width=None, **kwds):
"""
Args:
width: The width may be an integer or a string, as for TextEditor. If no width is specified, but a
value for min and/or max is specified at construction time, the width will be determined from
the min or max value. If no other way of determining the width is available, it defaults to 100.
**kwds:
"""
self.logger = logging.getLogger(__name__)
if 'format' in kwds:
self.format = kwds.pop('format')
if 'empty' in kwds:
self.empty = kwds.pop('empty')
self.editing = False
predictedWidth = self._predictWidth(kwds, width)
TextEditor.__init__(self, width=predictedWidth, **kwds)
def _predictWidth(self, kwds, theWidth):
minimum = self.predict_attr(kwds, 'min')
maximum = self.predict_attr(kwds, 'max')
predictedWidth = theWidth
if theWidth is None:
w1 = 0
w2 = 0
if minimum is not None:
w1 = minimum
if maximum is not None:
w2 = maximum
if w1 > w2:
predictedWidth = w1
else:
predictedWidth = w2
if predictedWidth == 0 and theWidth is None:
predictedWidth = Field.DEFAULT_WIDTH
self.logger.debug(f"predictedWidth: {predictedWidth}")
return predictedWidth
def format_value(self, theValueToFormat):
"""
This method is called to format the value for display. By default it uses the format string specified by
the format attribute. You can override this method to format the value in a different way.
Args:
theValueToFormat: The value
Returns: The formatted value
"""
if theValueToFormat == self.empty:
return ""
else:
return self.format % theValueToFormat
def get_text(self):
if self.editing:
return self._text
else:
return self.format_value(self.value)
def set_text(self, theNewText):
self.editing = True
self._text = theNewText
def enter_action(self):
if self.editing:
self.commit()
return 'pass'
def escape_action(self):
if self.editing:
self.editing = False
self.insertion_point = None
else:
return 'pass'
def attention_lost(self):
self.commit()
def commit(self):
"""
When in the editing state, causes the control's value to be updated and places the control
in the non-editing state.
"""
if self.editing:
text = self._text
if text:
try:
value = self.type(text)
except ValueError:
return
if self.min is not None:
value = max(self.min, value)
if self.max is not None:
value = min(self.max, value)
else:
value = self.empty
if value is NotImplemented:
return
self.value = value
self.editing = False
self.insertion_point = None
else:
self.insertion_point = None
| 3.59375 | 4 |
tests/find_func_r2.py | CAFA1/angrop | 0 | 12785632 | import os
import subprocess
import r2pipe
import sys
#return file name
work_dir='/home/l/Downloads/test/'
def get_file_name(file_dir):
file_elf=[]
for root,dirs,files in os.walk(file_dir):
for file in files:
out_bytes=subprocess.check_output(['file',os.path.join(root,file)])
if(out_bytes.find('ELF')!=-1):
#print out_bytes
file_elf.append(out_bytes.split(':')[0])
return file_elf
#return file name
def get_file_name_strings(file_dir):
#system
string_interesting='"evil|system|read|recv|popen|hack|exec|setuid|http|send|write"'
file_elf=[]
i=0
for root,dirs,files in os.walk(file_dir):
for file in files:
this_file=os.path.join(root,file)
out_bytes=subprocess.check_output(['file',os.path.join(root,file)])
#print 'file output:\n'+out_bytes
if(out_bytes.find('ELF')!=-1 and out_bytes.find('LSB relocatable')==-1):
try:
out_bytes1=subprocess.check_output('strings '+os.path.join(root,file)+' |egrep '+string_interesting,shell=True)
print 'string output: '+out_bytes1
if(out_bytes1!=''):
print 'find file : '+this_file+' !!!!!!' + ' '+str(i)
file_elf.append(this_file)
i=i+1
except:
pass
return file_elf
#return the file name which has the func
def get_func_elf(file_name_list,func_name):
file_elf_func=[]
for file_tmp in file_name_list:
r2 = r2pipe.open(file_tmp)
#axt find reference
read_str = r2.cmd("aaa;afl |grep "+func_name)
print read_str
if(read_str!=''):
file_elf_func.append(file_tmp)
print file_tmp
return file_elf_func
if __name__ == '__main__':
#main()
work_dir='/home/l/Downloads/test/'
if(len(sys.argv)!=2):
print "python find_func_r2.py dir"
exit()
dir1= sys.argv[1]
dir2=work_dir+dir1
#string1 = sys.argv[2]
files_name=get_file_name_strings(dir2)
for i in range(len(files_name)):
print i,files_name[i]
optinstr='which file do you want to test[0-'+str(len(files_name)-1)+']: '
input_file_int = input(optinstr)
test_file=files_name[input_file_int]
cp_cmd = 'python test_cp.py '+test_file+' '+dir1
os.system(cp_cmd)
print 'find_func_r2.py ok' | 2.84375 | 3 |
lesson2_netmiko/ex6f.py | anejolazaro70/python_july19 | 0 | 12785633 | #!/usr/bin/python
from datetime import datetime
from netmiko import ConnectHandler
from pprint import pprint
from getpass import getpass
import time
password=<PASSWORD>()
device={"host": "cisco4",
"username": "user",
"password": password,
'secret': password,
"device_type": "cisco_ios",
"session_log": "cisco4_6f.txt"}
t1=datetime.now()
ssh_con=ConnectHandler(**device)
ssh_con.enable()
prompt=ssh_con.find_prompt()
print(prompt)
#ssh_con.disconnect()
t2=datetime.now()
t3=t2-t1
print("\nINICIO: ", t1)
print('\nFIN: ', t2)
print('\nDuracion ejecucion comando: ', t3)
| 2.5 | 2 |
venv/Lib/site-packages/PySide2/examples/installer_test/hello.py | TEDxVienna/continuum | 0 | 12785634 | # This Python file uses the following encoding: utf-8
# It has been edited by fix-complaints.py .
#############################################################################
##
## Copyright (C) 2019 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of Qt for Python.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 3 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL3 included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 3 requirements
## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 2.0 or (at your option) the GNU General
## Public license version 3 or any later version approved by the KDE Free
## Qt Foundation. The licenses are as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-2.0.html and
## https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
"""
hello.py
--------
This simple script shows a label with changing "Hello World" messages.
It can be used directly as a script, but we use it also to automatically
test PyInstaller. See testing/wheel_tester.py .
When used with PyInstaller, it automatically stops its execution after
2 seconds.
"""
from __future__ import print_function
import sys
import random
import platform
import time
from PySide2.QtWidgets import (QApplication, QLabel, QPushButton,
QVBoxLayout, QWidget)
from PySide2.QtCore import Slot, Qt, QTimer
class MyWidget(QWidget):
def __init__(self):
QWidget.__init__(self)
self.hello = ["<NAME>", "你好,世界", "<NAME>",
"<NAME>", "Привет мир"]
self.button = QPushButton("Click me!")
self.text = QLabel("Hello World embedded={}".format(sys.pyside_uses_embedding))
self.text.setAlignment(Qt.AlignCenter)
self.layout = QVBoxLayout()
self.layout.addWidget(self.text)
self.layout.addWidget(self.button)
self.setLayout(self.layout)
# Connecting the signal
self.button.clicked.connect(self.magic)
@Slot()
def magic(self):
self.text.setText(random.choice(self.hello))
if __name__ == "__main__":
print("Start of hello.py ", time.ctime())
print(" sys.version = {}".format(sys.version.splitlines()[0]))
print(" platform.platform() = {}".format(platform.platform()))
app = QApplication()
widget = MyWidget()
widget.resize(800, 600)
widget.show()
if sys.pyside_uses_embedding:
milliseconds = 2 * 1000 # run 2 second
QTimer.singleShot(milliseconds, app.quit)
retcode = app.exec_()
print("End of hello.py ", time.ctime())
sys.exit(retcode)
| 1.429688 | 1 |
dbus_async/__init__.py | hugosenari/dbus_async | 1 | 12785635 | __author__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '0.1.0'
from .core import Session, System, Bus
from .client import Object
from .service import Service | 1.195313 | 1 |
supports/pyload/src/pyload/plugins/accounts/DepositfilesCom.py | LuckyNicky/pycrawler | 1 | 12785636 | # -*- coding: utf-8 -*-
import re
import time
from ..base.account import BaseAccount
class DepositfilesCom(BaseAccount):
__name__ = "DepositfilesCom"
__type__ = "account"
__version__ = "0.39"
__status__ = "testing"
__pyload_version__ = "0.5"
__description__ = """Depositfiles.com account plugin"""
__license__ = "GPLv3"
__authors__ = [
("mkaay", "<EMAIL>"),
("stickell", "<EMAIL>"),
("<NAME>", "<EMAIL>"),
]
def grab_info(self, user, password, data):
html = self.load("https://dfiles.eu/de/gold/")
validuntil = re.search(
r"Sie haben Gold Zugang bis: <b>(.*?)</b></div>", html
).group(1)
validuntil = time.mktime(time.strptime(validuntil, "%Y-%m-%d %H:%M:%S"))
return {"validuntil": validuntil, "trafficleft": -1}
def signin(self, user, password, data):
html = self.load(
"https://dfiles.eu/de/login.php",
get={"return": "/de/gold/payment.php"},
post={"login": user, "password": password},
)
if (
r'<div class="error_message">Sie haben eine falsche Benutzername-Passwort-Kombination verwendet.</div>'
in html
):
self.fail_login()
| 2.4375 | 2 |
CONTENT/PYTHON/LEETCODE/155_min_stack/min_stack.py | impastasyndrome/DS-ALGO-OFFICIAL | 13 | 12785637 | <filename>CONTENT/PYTHON/LEETCODE/155_min_stack/min_stack.py<gh_stars>10-100
class MinStack:
# initialize your data structure here.
def __init__(self):
self.nums = []
self.mins = []
# @param x, an integer
# @return nothing
def push(self, x):
self.nums.append(x)
if self.mins:
if x < self.nums[self.mins[-1]]:
self.mins.append(len(self.nums) - 1)
else:
self.mins.append(self.mins[-1])
else:
self.mins.append(0)
# @return nothing
def pop(self):
if self.nums:
del self.nums[-1]
del self.mins[-1]
# @return an integer
def top(self):
return self.nums[-1]
# @return an integer
def getMin(self):
return self.nums[self.mins[-1]]
| 3.390625 | 3 |
tests/data/e2e_latency_replacements.py | FrNecas/requre | 4 | 12785638 | # Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
from requre.import_system import UpgradeImportSystem
from requre.simple_object import Simple
FILTERS = UpgradeImportSystem().decorate("time.sleep", Simple.decorator_plain())
| 1.492188 | 1 |
lab/lab3/dist/autograder/tests/q4e.py | ds-modules/PS-88-21-DEV | 0 | 12785639 | <reponame>ds-modules/PS-88-21-DEV
test = { 'name': 'q4e',
'points': 1,
'suites': [ { 'cases': [ {'code': '>>> 0 <= voter_2_pivotal_prob <= .25\nTrue', 'hidden': False, 'locked': False},
{'code': '>>> voter_2_pivotal_prob == sum(grouptrials.column("overall piv 2"))/ntrials\nTrue', 'hidden': True, 'locked': False},
{'code': '>>> 0 <= voter_3_pivotal_prob <= .25\nTrue', 'hidden': False, 'locked': False},
{'code': '>>> voter_3_pivotal_prob == sum(grouptrials.column("overall piv 3"))/ntrials\nTrue', 'hidden': True, 'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
| 1.585938 | 2 |
news_buddy/alembic/versions/42b486977799_added_tags_table.py | izacus/newsbuddy | 0 | 12785640 | """Added tags table
Revision ID: 42b486977799
Revises: <PASSWORD>
Create Date: 2014-02-05 23:57:37.029556
"""
# revision identifiers, used by Alembic.
revision = '42b486977799'
down_revision = '<PASSWORD>'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('tags',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('tag_name', sa.UnicodeText, nullable=False),
sa.Column('tag_type', sa.Enum('PERSON', 'LOCATION', 'OTHER', name="tag_types")))
op.create_table('news_tags',
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tags.id', ondelete="CASCADE")),
sa.Column('news_id', sa.String, sa.ForeignKey('news.id', ondelete="CASCADE")))
op.create_index('tags_name', 'tags', ['tag_name'])
def downgrade():
op.drop_index('tags_name')
op.drop_table('news_tags')
op.drop_table('tags')
| 1.59375 | 2 |
Python/54_SpiralMatrix.py | comicxmz001/LeetCode | 2 | 12785641 | class Solution(object):
def spiralOrder(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
result = []
row = len(matrix)
if row == 0:
return matrix
if row == 1:
result = matrix[0]
return result
col = len(matrix[0])
direction ={'LEFT':0,'RIGHT':1, 'DOWN':2, 'UP':3}
margin = {'left':0,'right':col-1, 'top':0, 'bottom':row-1}
mSize = row*col
d = direction['RIGHT'] #direction
curRow = 0 #row cursor
curCol = 0 #col cursor
while mSize > 0:
result.append(matrix[curRow][curCol])
mSize -=1
if d == direction['RIGHT']:
if curCol == margin['right']:
d = direction['DOWN']
margin['top'] +=1
curRow += 1
continue
curCol +=1
continue
if d == direction['LEFT']:
if curCol == margin['left']:
d = direction['UP']
margin['bottom'] -= 1
curRow -=1
continue
curCol -=1
continue
if d == direction['DOWN']:
if curRow == margin['bottom']:
d = direction['LEFT']
margin['right'] -= 1
curCol -= 1
continue
curRow +=1
continue
if d == direction['UP']:
if curRow == margin['top']:
d = direction['RIGHT']
margin['left'] += 1
curCol += 1
continue
curRow -=1
continue
return result
matrix = [[1,2,3,4,5],[6,7,8,9,10],[11,12,13,14,15],[16,17,18,19,20],[21,22,23,24,25]]
#print matrix
foo = Solution()
print foo.spiralOrder(matrix) | 3.3125 | 3 |
bot.py | diogoscf/telegram-birthday-bot | 11 | 12785642 | """
Bot that wishes happy birthday
"""
import datetime
import json
import logging
import math
import os
import requests
import sys
from dotenv import load_dotenv
from telegram import Update
from telegram.ext import Updater, CommandHandler, CallbackContext
# Load .env file
load_dotenv()
TOKEN = os.getenv("TOKEN")
# Get ordinal function
ordinal = lambda n: "%d%s" % (
n,
"tsnrhtdd"[(math.floor(n / 10) % 10 != 1) * (n % 10 < 4) * n % 10 :: 4],
)
# Enabling logging
logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger()
def start_handler(update: Update, context: CallbackContext):
"""Handles the /start command"""
logger.info("User {} started bot".format(update.effective_user["id"]))
update.message.reply_text(
"Hello there! You have succesfully initiated the birthday wishing bot"
)
context.job_queue.run_once(
wishHB, 0, context={"context": update.message.chat_id, "first": True}
)
nextHour = datetime.datetime.utcnow().hour + 1
context.job_queue.run_repeating(
wishHB,
900,
context={"context": update.message.chat_id, "first": False},
first=datetime.time(nextHour),
) # Timezones can have offsets of 15 minutes and 15min = 900s
def wishHB(context: CallbackContext):
"""Wishes happy birthday"""
bdays = getBdays()
job = context.job
now = datetime.datetime.utcnow()
logger.info("RUN")
for p in bdays:
month = [p["utc_dob"].month, now.month]
day = [p["utc_dob"].day, now.day]
hour = [p["utc_dob"].hour, now.hour]
minute = [p["utc_dob"].minute, now.minute]
checkArr = [month, day, hour, minute]
if job.context["first"]:
there = now + p["delta"]
if there.day == p["dob"].day:
checkArr = [checkArr[0],]
if any(l[0] != l[1] for l in checkArr):
continue
age = now.year - p["utc_dob"].year
logger.info(
"Found birthday for {}! Wishing...".format(
p["username"] if len(p["username"]) else p["name"]
)
)
context.bot.send_message(
job.context["context"],
"Happy {} birthday {}!".format(
ordinal(age), p["username"] if len(p["username"]) else p["name"]
),
)
def getBdays():
"""Parses the birthdays.json file"""
# data = requests.get(
# "https://raw.githubusercontent.com/diogoscf/telegram-birthday-bot/master/birthdays.json"
# ).json()
data = json.load(open("birthdays.json", "r", encoding="utf-8"))
output = []
for p in data:
diff = [int(x) for x in p["tz"].replace("UTC", "").split(":")]
delta = datetime.timedelta(hours=diff[0], minutes=diff[1])
output.append(
{
"name": p["name"],
"dob": datetime.datetime.strptime(p["dob"], "%d.%m.%Y"),
"utc_dob": datetime.datetime.strptime(p["dob"], "%d.%m.%Y") - delta,
"username": p["username"],
"delta": delta,
}
)
return output
if __name__ == "__main__":
logger.info("Starting script")
updater = Updater(TOKEN, use_context=True)
updater.dispatcher.add_handler(CommandHandler("start", start_handler))
# updater.dispatcher.add_handler(CommandHandler('stop', Stop_timer, pass_job_queue=True))
updater.start_polling()
| 2.78125 | 3 |
DjangoPractice/DjangoPractice/serializers.py | UVA-DSI-2019-Capstones/ARL | 1 | 12785643 | from django.contrib.auth.models import User
from .models import TraineeResponseModel, MediaModel
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'first_name', 'last_name', 'email')
class TraineeSerializer(serializers.ModelSerializer):
class Meta:
model = TraineeResponseModel
fields = ('id', 'avatar_prompt_id', 'identifier',
'response_text', 'response_score', 'response_feedback', 'comment')
class MediaFileSerializer(serializers.ModelSerializer):
class Meta():
model = MediaModel
fields = ('file', 'identifier') | 2.09375 | 2 |
rpbp/reference_preprocessing/label_orfs.py | HeyLifeHD/rp-bp | 6 | 12785644 | <gh_stars>1-10
#! /usr/bin/env python3
"""This script labels the ORFs based on their exon
transcript structure with respect to annotated coding sequences
"""
import argparse
import logging
import pbio.misc.logging_utils as logging_utils
import pbio.utils.bed_utils as bed_utils
from rpbp.defaults import default_num_cpus
logger = logging.getLogger(__name__)
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='''Label the ORFs based on their transcript
exon structure wrt the annotated transcripts.''')
parser.add_argument('annotated_transcripts', help='''The annotated transcripts for the genome
in BED12+ format.''')
parser.add_argument('extracted_orfs', help='''The ORFs extracted from the transcripts
in BED12+ format.''')
parser.add_argument('out', help='''The output (BED12+.gz) file.''')
parser.add_argument('-e', '--annotated-exons', help='''The annotated transcript
exons can be passed with this option. If they are not given, they will be
split from the annotated transcripts.''', default=None)
parser.add_argument('-o', '--orf-exons', help='''The exon blocks for the ORFs, in BED6+ format,
obtained from "split-bed12-blocks". If they are not given, they will be split from the
extracted ORFs.''', default=None)
parser.add_argument('-n', '--nonoverlapping-label', help='''If this option is given,
then the ORFs which do not overlap the annotated transcripts at all will be given this label.
By default, remaining oof overlapping ORFs are assigned the "overlap" label.
If not given, the ORFs outside of annotated regions are labeled as "suspect".''',
default=None)
parser.add_argument('-l', '--label-prefix', help='''This string is prepended to all labels
assigned to ORFs, e.g. to indicate ORFs from a de novo assembly (Rp-Bp assigns the label
"novel" to these, however the string is not prepended to "canonical ORFs").''',
default='')
parser.add_argument('-f', '--filter', help='''If this flag is given, then ORFs
which are completely covered by an annotated transcript are discarded. Use to filter
uninteresting ORFs from a de novo assembly.''', action='store_true')
parser.add_argument('-p', '--num-cpus', help='''The number of CPUs to use to perform
BED operations.''', type=int, default=default_num_cpus)
logging_utils.add_logging_options(parser)
args = parser.parse_args()
logging_utils.update_logging(args)
msg = "Reading annotated transcripts"
logger.info(msg)
annotated_transcripts = bed_utils.read_bed(args.annotated_transcripts)
# get the annotated transcript exons
if args.annotated_exons is None:
msg = "Splitting the annotated transcripts into exon blocks"
logger.info(msg)
annotated_exons = bed_utils.split_bed12(annotated_transcripts,
num_cpus=args.num_cpus,
progress_bar=True)
else:
msg = "Reading the annotated transcript exons"
logger.info(msg)
annotated_exons = bed_utils.read_bed(args.annotated_exons)
msg = "Reading extracted ORFs"
logger.info(msg)
extracted_orfs = bed_utils.read_bed(args.extracted_orfs)
if args.orf_exons is None:
msg = "Splitting the extracted ORFs into exon blocks"
logger.info(msg)
extracted_orf_exons = bed_utils.split_bed12(extracted_orfs,
num_cpus=args.num_cpus,
progress_bar=True)
else:
msg = "Reading the extracted ORFs exons"
logger.info(msg)
extracted_orf_exons = bed_utils.read_bed(args.orf_exons)
msg = "Found {} extracted ORFs with {} exons".format(len(extracted_orfs),
len(extracted_orf_exons))
logger.debug(msg)
# filter out the ORFs that are entirely within annotated transcripts
if args.filter:
msg = "Removing ORFs which are completely covered by the annotated transcripts"
logger.info(msg)
nonoverlapping_ids = bed_utils.subtract_bed(extracted_orf_exons,
annotated_exons,
min_a_overlap=1)
m_unfiltered = extracted_orfs['id'].isin(nonoverlapping_ids)
extracted_orfs = extracted_orfs[m_unfiltered]
# discard the unnecessary exons
m_unfiltered = extracted_orf_exons['id'].isin(nonoverlapping_ids)
extracted_orf_exons = extracted_orf_exons[m_unfiltered]
msg = "After filtering, {} extracted ORFs remain".format(len(extracted_orfs))
logger.info(msg)
# annotate and remove the ORFs which do not at all overlap the annotations
if args.nonoverlapping_label is not None:
nonoverlapping_ids = bed_utils.subtract_bed(extracted_orfs,
annotated_transcripts,
exons_a=extracted_orf_exons,
exons_b=annotated_exons)
m_nonoverlapping = extracted_orf_exons['id'].isin(nonoverlapping_ids)
extracted_orf_exons = extracted_orf_exons[~m_nonoverlapping]
m_nonoverlapping = extracted_orfs['id'].isin(nonoverlapping_ids)
extracted_orfs.loc[m_nonoverlapping, 'orf_type'] = args.nonoverlapping_label
msg = ("Found {} ORFs completely non-overlapping annotated transcripts".
format(len(nonoverlapping_ids)))
logger.info(msg)
msg = "Removing the annotated UTRs from the transcripts"
logger.info(msg)
canonical_orfs = bed_utils.retain_all_thick_only(annotated_transcripts,
num_cpus=args.num_cpus)
msg = "Splitting the canonical ORFs into exons"
logger.info(msg)
canonical_orf_exons = bed_utils.split_bed12(canonical_orfs,
num_cpus=args.num_cpus,
progress_bar=True)
msg = "Extracting annotated 5' leader regions"
logger.info(msg)
five_prime_regions = bed_utils.retain_all_five_prime_of_thick(
annotated_transcripts, num_cpus=args.num_cpus)
if len(five_prime_regions) == 0:
msg = "No annotated 5' leader regions were found"
logger.warning(msg)
msg = "Splitting the 5' leaders into exons"
logger.info(msg)
five_prime_exons = bed_utils.split_bed12(five_prime_regions,
num_cpus=args.num_cpus,
progress_bar=True)
msg = "Extracting annotated 3' trailer regions"
logger.info(msg)
three_prime_regions = bed_utils.retain_all_three_prime_of_thick(
annotated_transcripts, num_cpus=args.num_cpus)
if len(three_prime_regions) == 0:
msg = "No annotated 3' trailer regions were found"
logger.warning(msg)
msg = "Splitting the 3' trailers into exons"
logger.info(msg)
three_prime_exons = bed_utils.split_bed12(three_prime_regions,
num_cpus=args.num_cpus,
progress_bar=True)
msg = "Splitting non-coding transcripts into exons"
logger.info(msg)
m_no_thick_start = annotated_transcripts['thick_start'] == -1
m_no_thick_end = annotated_transcripts['thick_end'] == -1
m_no_thick = m_no_thick_start & m_no_thick_end
noncoding_transcripts = annotated_transcripts[m_no_thick]
noncoding_exons = bed_utils.split_bed12(noncoding_transcripts,
num_cpus=args.num_cpus,
progress_bar=True)
# First, remove all in-frame (canonical, canonical variants), and also within and oof ORFs
msg = "Marking canonical and extracted ORFs with the same stop codon"
logger.info(msg)
# first, add the "true" ORF end
m_reverse_canonical = canonical_orfs['strand'] == '-'
canonical_orfs['orf_end'] = canonical_orfs['end']
canonical_orfs.loc[m_reverse_canonical, 'orf_end'] = canonical_orfs.loc[m_reverse_canonical, 'start']
m_reverse_extracted = extracted_orfs['strand'] == '-'
extracted_orfs['orf_end'] = extracted_orfs['end']
extracted_orfs.loc[m_reverse_extracted, 'orf_end'] = extracted_orfs.loc[m_reverse_extracted, 'start']
# then, find extracted ORFs with the same "orf_end" (and seqname, strand) as canonical ORFs
merge_fields = ['seqname', 'strand', 'orf_end']
canonical_extracted_orf_ends = canonical_orfs.merge(extracted_orfs,
on=merge_fields,
suffixes=['_canonical', '_extracted'])
# finally, pull this into a set
zip_it = zip(canonical_extracted_orf_ends['id_canonical'],
canonical_extracted_orf_ends['id_extracted'])
canonical_extracted_matching_ends = {(c, a) for c, a in zip_it}
msg = "Finding ORFs which exactly overlap the canonical ORFs"
logger.info(msg)
exact_matches = bed_utils.get_bed_overlaps(canonical_orf_exons,
extracted_orf_exons,
min_a_overlap=1,
min_b_overlap=1)
exact_match_orf_ids = {m.b_info for m in exact_matches}
m_exact_orf_matches = extracted_orf_exons['id'].isin(exact_match_orf_ids)
extracted_orf_exons = extracted_orf_exons[~m_exact_orf_matches]
m_canonical = extracted_orfs['id'].isin(exact_match_orf_ids)
label = 'canonical'
extracted_orfs.loc[m_canonical, 'orf_type'] = label
msg = "Found {} canonical ORFs".format(len(exact_match_orf_ids))
logger.info(msg)
msg = "Finding truncated canonical ORFs"
logger.info(msg)
truncated_matches = bed_utils.get_bed_overlaps(canonical_orf_exons,
extracted_orf_exons,
min_b_overlap=1)
truncated_match_ids = {m.b_info for m in truncated_matches
if (m.a_info, m.b_info) in canonical_extracted_matching_ends}
m_truncated_matches = extracted_orf_exons['id'].isin(truncated_match_ids)
extracted_orf_exons = extracted_orf_exons[~m_truncated_matches]
m_canonical_truncated = extracted_orfs['id'].isin(truncated_match_ids)
msg = "Finding extended canonical ORFs"
logger.info(msg)
extended_matches = bed_utils.get_bed_overlaps(canonical_orf_exons,
extracted_orf_exons,
min_a_overlap=1)
# For standard assembly, we also need to make sure that
# all extended matches are fully contained within the
# transcript structure (i.e start upstream but otherwise
# have the same structure).
if args.nonoverlapping_label is None:
transcript_matches = bed_utils.get_bed_overlaps(annotated_exons,
extracted_orf_exons,
min_b_overlap=1)
transcript_match_pairs = {(m.a_info, m.b_info) for m in transcript_matches}
extended_match_ids = {m.b_info for m in extended_matches
if (m.a_info, m.b_info) in transcript_match_pairs
and (m.a_info, m.b_info) in canonical_extracted_matching_ends}
else:
extended_match_ids = {m.b_info for m in extended_matches
if (m.a_info, m.b_info) in canonical_extracted_matching_ends}
m_extended_matches = extracted_orf_exons['id'].isin(extended_match_ids)
extracted_orf_exons = extracted_orf_exons[~m_extended_matches]
m_canonical_extended = extracted_orfs['id'].isin(extended_match_ids)
m_canonical_variants = m_canonical_truncated | m_canonical_extended
label = "{}canonical_variant".format(args.label_prefix)
extracted_orfs.loc[m_canonical_variants, 'orf_type'] = label
msg = "Found {} canonical_variant ORFs".\
format(len(extended_match_ids | truncated_match_ids))
logger.info(msg)
msg = ("Finding within canonical ORFs that do not share an "
"annotated stop codon with a canonical ORF (e.g. in "
"frame stop, out-of-frame)")
logger.info(msg)
within_ids = {m.b_info for m in truncated_matches
if m.b_info not in truncated_match_ids}
m_within_matches = extracted_orf_exons['id'].isin(within_ids)
extracted_orf_exons = extracted_orf_exons[~m_within_matches]
m_within = extracted_orfs['id'].isin(within_ids)
label = "{}within".format(args.label_prefix)
extracted_orfs.loc[m_within, 'orf_type'] = label
msg = "Found {} within ORFs".format(len(within_ids))
logger.info(msg)
# find all overlapping ORFs
msg = "Finding all UTR overlap matches"
logger.info(msg)
out_of_frame_matches = bed_utils.get_bed_overlaps(canonical_orf_exons,
extracted_orf_exons)
leader_matches = bed_utils.get_bed_overlaps(five_prime_exons,
extracted_orf_exons)
trailer_matches = bed_utils.get_bed_overlaps(three_prime_exons,
extracted_orf_exons)
msg = ("Labeling ORFs which have (out-of-frame) overlaps with both a "
"canonical ORF and annotated leaders or trailers")
logger.info(msg)
# We need to choose how to ensure that up-/downstream overlaps are unique.
# Where an ORF overlaps both the 5'UTR and the 3'UTR of different same
# sense overlapping transcripts, it is assigned by default to the downstream overlap.
# For de novo, everything is labeled as overlap.
leader_match_pairs = {(m.a_info, m.b_info) for m in leader_matches}
trailer_match_pairs = {(m.a_info, m.b_info) for m in trailer_matches}
if args.nonoverlapping_label is None:
# For standard assembly, we also need to make sure that
# all overlap matches are fully contained within the
# transcript structure.
transcript_matches = bed_utils.get_bed_overlaps(annotated_exons,
extracted_orf_exons,
min_b_overlap=1)
transcript_match_pairs = {(m.a_info, m.b_info) for m in transcript_matches}
leader_overlap_pairs = {(m.a_info, m.b_info) for m in out_of_frame_matches
if (m.a_info, m.b_info) in leader_match_pairs
and (m.a_info, m.b_info) not in trailer_match_pairs
and (m.a_info, m.b_info) in transcript_match_pairs}
trailer_overlap_pairs = {(m.a_info, m.b_info) for m in out_of_frame_matches
if (m.a_info, m.b_info) in trailer_match_pairs
and (m.a_info, m.b_info) not in leader_match_pairs
and (m.a_info, m.b_info) in transcript_match_pairs}
# We do not assign preference where the ORF overlaps both sides
# of the coding sequence on the same transcript, any ORF
# satisfying both will be labeled simply as overlap.
overlap_ids = {m.b_info for m in out_of_frame_matches
if (m.a_info, m.b_info) in leader_match_pairs
and (m.a_info, m.b_info) in trailer_match_pairs
and (m.a_info, m.b_info) in transcript_match_pairs}
trailer_overlap_ids = {pair[1] for pair in trailer_overlap_pairs
if pair[1] not in overlap_ids}
leader_overlap_ids = {pair[1] for pair in leader_overlap_pairs
if pair[1] not in trailer_overlap_ids
and pair[1] not in overlap_ids}
m_overlap_matches = extracted_orf_exons['id'].isin(overlap_ids)
extracted_orf_exons = extracted_orf_exons[~m_overlap_matches]
m_leader_overlap_matches = extracted_orf_exons['id'].isin(leader_overlap_ids)
extracted_orf_exons = extracted_orf_exons[~m_leader_overlap_matches]
m_five_prime_overlap = extracted_orfs['id'].isin(leader_overlap_ids)
label = "{}five_prime_overlap".format(args.label_prefix)
extracted_orfs.loc[m_five_prime_overlap, 'orf_type'] = label
m_trailer_overlap_matches = extracted_orf_exons['id'].isin(trailer_overlap_ids)
extracted_orf_exons = extracted_orf_exons[~m_trailer_overlap_matches]
m_three_prime_overlap = extracted_orfs['id'].isin(trailer_overlap_ids)
label = "{}three_prime_overlap".format(args.label_prefix)
extracted_orfs.loc[m_three_prime_overlap, 'orf_type'] = label
msg = "Found {} five_prime_overlap ORFs".format(len(leader_overlap_ids))
logger.info(msg)
msg = "Found {} three_prime_overlap ORFs".format(len(trailer_overlap_ids))
logger.info(msg)
else:
overlap_ids = {m.b_info for m in out_of_frame_matches}
overlap_ids |= {m.b_info for m in leader_matches}
overlap_ids |= {m.b_info for m in trailer_matches}
m_overlap_matches = extracted_orf_exons['id'].isin(overlap_ids)
extracted_orf_exons = extracted_orf_exons[~m_overlap_matches]
m_overlap = extracted_orfs['id'].isin(overlap_ids)
label = "{}overlap".format(args.label_prefix)
extracted_orfs.loc[m_overlap, 'orf_type'] = label
msg = "Found {} overlap ORFs".format(len(overlap_ids))
logger.info(msg)
msg = "Finding ORFs completely within 5' or 3' leaders"
logger.info(msg)
leader_matches = bed_utils.get_bed_overlaps(five_prime_exons,
extracted_orf_exons,
min_b_overlap=1)
leader_ids = {m.b_info for m in leader_matches}
m_leader_matches = extracted_orf_exons['id'].isin(leader_ids)
extracted_orf_exons = extracted_orf_exons[~m_leader_matches]
m_five_prime = extracted_orfs['id'].isin(leader_ids)
label = "{}five_prime".format(args.label_prefix)
extracted_orfs.loc[m_five_prime, 'orf_type'] = label
msg = "Found {} five_prime ORFs".format(len(leader_ids))
logger.info(msg)
trailer_matches = bed_utils.get_bed_overlaps(three_prime_exons,
extracted_orf_exons,
min_b_overlap=1)
trailer_ids = {m.b_info for m in trailer_matches}
m_trailer_matches = extracted_orf_exons['id'].isin(trailer_ids)
extracted_orf_exons = extracted_orf_exons[~m_trailer_matches]
m_three_prime = extracted_orfs['id'].isin(trailer_ids)
label = "{}three_prime".format(args.label_prefix)
extracted_orfs.loc[m_three_prime, 'orf_type'] = label
msg = "Found {} three_prime ORFs".format(len(trailer_ids))
logger.info(msg)
msg = "Finding ORFs completely within annotated, non-coding transcripts"
logger.info(msg)
noncoding_matches = bed_utils.get_bed_overlaps(noncoding_exons,
extracted_orf_exons,
min_b_overlap=1)
noncoding_ids = {m.b_info for m in noncoding_matches}
m_noncoding_matches = extracted_orf_exons['id'].isin(noncoding_ids)
extracted_orf_exons = extracted_orf_exons[~m_noncoding_matches]
m_noncoding = extracted_orfs['id'].isin(noncoding_ids)
label = "{}noncoding".format(args.label_prefix)
extracted_orfs.loc[m_noncoding, 'orf_type'] = label
msg = "Found {} noncoding ORFs".format(len(noncoding_ids))
logger.info(msg)
# all of the remaining ORFs fall into the "suspect" category
suspect_ids = {orf_id for orf_id in extracted_orf_exons['id']}
m_suspect = extracted_orfs['id'].isin(suspect_ids)
label = "{}suspect".format(args.label_prefix)
extracted_orfs.loc[m_suspect, 'orf_type'] = label
n_suspect_ids = len(suspect_ids)
msg = "Remaining {} ORFs labeled as suspect".format(n_suspect_ids)
logger.info(msg)
m_no_orf_type = extracted_orfs['orf_type'].isnull()
msg = "Found {} unlabeled ORFs".format(sum(m_no_orf_type))
logger.info(msg)
msg = "Writing ORFs with labels to disk"
logger.info(msg)
extracted_orfs = bed_utils.sort(extracted_orfs)
msg = ("The ORF labels will be written to {} in the next major release.".
format(args.out))
logger.warning(msg)
additional_columns = ['orf_num', 'orf_len', 'orf_type']
fields = bed_utils.bed12_field_names + additional_columns
orfs_genomic = extracted_orfs[fields]
bed_utils.write_bed(orfs_genomic, args.extracted_orfs)
label_columns = ['id', 'duplicates', 'orf_type']
extracted_orfs = extracted_orfs[label_columns]
bed_utils.write_bed(extracted_orfs, args.out)
if __name__ == '__main__':
main()
| 2.953125 | 3 |
assets/python/flaskserver.py | dnoneill/annotate-theme | 0 | 12785645 | from flask import Flask, jsonify
from flask import request, render_template
from flask_cors import CORS
import json, os, glob, requests
import base64
from settings import *
from bs4 import BeautifulSoup
import yaml
import re
import string, random
import uuid
app = Flask(__name__)
CORS(app)
annotations = []
@app.route('/create_annotations/', methods=['POST'])
def create_anno():
response = json.loads(request.data)
data_object = response['json']
list_file_path = get_list_filepath(data_object)
uniqid = str(uuid.uuid1())
data_object['@id'] = "{}{}.json".format(origin_url, uniqid)
cleanobject = cleananno(data_object)
updatelistdata(list_file_path, cleanobject)
file_path = os.path.join(filepath, uniqid) + '.json'
writeannos(file_path, cleanobject)
return jsonify(data_object), 201
@app.route('/update_annotations/', methods=['POST'])
def update_anno():
response = json.loads(request.data)
data_object = response['json']
id = cleanid(data_object['@id'])
origin_url_id = "{}{}".format(origin_url, id)
data_object['@id'] = origin_url_id if data_object['@id'] != origin_url_id else data_object['@id']
cleanobject = cleananno(data_object)
file_path = os.path.join(filepath, id)
list_file_path = get_list_filepath(cleanobject)
writeannos(file_path, cleanobject)
newlist = updatelistdata(list_file_path, cleanobject)
return jsonify(data_object), 201
@app.route('/delete_annotations/', methods=['DELETE', 'POST'])
def delete_anno():
response = json.loads(request.data)
id = cleanid(response['id'])
deletefiles = [os.path.join(filepath, id), os.path.join(search_filepath, id).replace('.json', '.md')]
list_file_path = get_list_filepath(str(response['listuri']))
listlength = updatelistdata(list_file_path, {'@id': response['id'], 'delete': True})
if listlength <= 0:
deletefiles.append(list_file_path)
delete_annos(deletefiles)
return jsonify({"File Removed": True}), 201
@app.route('/write_annotation/', methods=['POST'])
def write_annotation():
data = json.loads(request.data)
json_data = data['json']
file = filepath if data['type'] == 'annotation' else '_ranges'
filename = os.path.join(file, data['filename'])
for id in data['deleteids']:
fileid = cleanid(id)
deletefiles = [os.path.join(filepath, fileid), os.path.join(search_filepath, fileid).replace('.json', '.md')]
delete_annos(deletefiles)
if 'list' in json_data['@type'].lower() or 'page' in json_data['@type'].lower():
for anno in json_data['resources']:
id = cleanid(anno['@id'])
single_filename = os.path.join(file, id)
writeannos(single_filename, anno)
writeannos(filename, json_data)
return jsonify({"Annotations Written": True}), 201
def cleananno(data_object):
field = 'resource' if 'resource' in data_object.keys() else 'body'
charfield = 'chars' if 'resource' in data_object.keys() else 'value'
if field in data_object.keys():
for item in data_object[field]:
replace = re.finditer(r'<iiif-(.*?)><\/iiif-(.*?)>', item[charfield])
for rep in replace:
replacestring = rep.group().replace("<","<").replace(">", ">").replace(""", '"')
item[charfield] = item[charfield].replace(rep.group(), replacestring)
return data_object
def cleanid(id):
return id.split('/')[-1].replace('.json', '') + '.json'
def delete_annos(annolist):
for anno in annolist:
if github_repo == "":
os.remove(anno)
else:
existing = github_get_existing(anno)
if 'sha' in existing:
data = createdatadict(anno, 'delete', existing['sha'])
payload = {'ref': github_branch}
requests.delete("{}/{}".format(github_url, anno), headers={'Authorization': 'token {}'.format(github_token)}, data=json.dumps(data), params=payload)
def get_list_filepath(data_object):
if type(data_object) == str:
targetid = data_object
elif 'on' in data_object.keys():
targetid = data_object['on'][0]['full']
else:
targetid = data_object['target']['id']
regex = re.compile('[0-9]')
numbitems = [item for item in targetid.split('/') if bool(regex.search(item)) and len(item) > 2 and ':5555' not in item]
targetid = '-'.join(numbitems) if len(numbitems) > 0 else targetid
targetid = targetid.split("#xywh")[0]
listid = targetid.split('/')[-1].replace("_", "-").replace(":", "").replace(".json", "").replace(".", "").lower()
listfilename = "{}-list.json".format(listid)
list_file_path = os.path.join(filepath, listfilename)
return list_file_path
def github_get_existing(filename):
full_url = github_url + "/{}".format(filename)
payload = {'ref': github_branch}
existing = requests.get(full_url, headers={'Authorization': 'token {}'.format(github_token)}, params=payload).json()
return existing
def get_list_data(filepath):
if github_repo == "":
if os.path.exists(filepath):
filecontents = open(filepath).read()
jsoncontent = json.loads(filecontents.split("---")[-1].strip())
return jsoncontent
else:
return False
else:
existing = github_get_existing(filepath)
if 'content' in existing.keys():
content = base64.b64decode(existing['content']).split("---")[-1].strip()
jsoncontent = json.loads(content)
return jsoncontent
else:
return False
def updatelistdata(list_file_path, newannotation):
listdata = get_list_data(list_file_path)
newannoid = newannotation['@id'].split('/')[-1]
if listdata:
listindex = [i for i, res in enumerate(listdata['resources']) if res['@id'].split('/')[-1] == newannoid ]
listindex = listindex[0] if len(listindex) > 0 else None
if 'delete' in newannotation.keys() and listindex != None:
del listdata['resources'][listindex]
elif listindex != None:
listdata['resources'][listindex] = newannotation
else:
listdata['resources'].append(newannotation)
listdata = updatelistdate(newannotation, listdata)
elif 'delete' not in newannotation.keys():
listdata = create_list([newannotation], newannotation['@context'], newannoid)
listdata = updatelistdate(newannotation, listdata, True)
if listdata:
writeannos(list_file_path, listdata)
length = len(listdata['resources']) if listdata else 1
return length
def updatelistdate(singleanno, annolist, created=False):
if created and 'created' in singleanno.keys():
annolist['created'] = singleanno['created']
elif 'created' in singleanno.keys():
annolist['modified'] = singleanno['created']
if created and 'oa:annotatedAt' in singleanno.keys():
annolist['oa:annotatedAt'] = singleanno['oa:annotatedAt']
elif 'oa:annotatedAt' in singleanno.keys():
annolist['oa:serializedAt'] = singleanno['oa:annotatedAt']
if 'modified' in singleanno.keys():
annolist['modified'] = singleanno['modified']
if 'oa:serializedAt' in singleanno.keys():
annolist['oa:serializedAt'] = singleanno['oa:serializedAt']
return annolist
def writeannos(file_path, data_object):
if 'list' not in file_path and 'ranges' not in file_path:
get_search(data_object, file_path)
if github_repo == '':
writetofile(file_path, data_object)
else:
writetogithub(file_path, data_object)
def create_list(annotation, context, id):
if 'w3.org' in context:
formated_annotation = {"@context":"http://www.w3.org/ns/anno.jsonld",
"@type": "AnnotationPage", "id": "%s%s-list.json"% (origin_url, id), "resources": annotation}
else:
formated_annotation = {"@context":"http://iiif.io/api/presentation/2/context.json",
"@type": "sc:AnnotationList", "@id": "%s%s-list.json"% (origin_url, id), "resources": annotation }
return formated_annotation
def writetogithub(filename, annotation, yaml=False):
full_url = github_url + "/{}".format(filename)
sha = ''
existing = github_get_existing(filename)
if 'sha' in existing.keys():
sha = existing['sha']
anno_text = annotation if yaml else "---\nlayout: null\n---\n" + json.dumps(annotation)
data = createdatadict(filename, anno_text, sha)
response = requests.put(full_url, data=json.dumps(data), headers={'Authorization': 'token {}'.format(github_token), 'charset': 'utf-8'})
def createdatadict(filename, text, sha):
writeordelete = "write" if text != 'delete' else "delete"
message = "{} {}".format(writeordelete, filename)
data = {"message":message, "content": base64.b64encode(text), "branch": github_branch }
if sha != '':
data['sha'] = sha
return data
def writetofile(filename, annotation, yaml=False):
anno_text = annotation if yaml else "---\nlayout: null\n---\n" + json.dumps(annotation)
with open(filename, 'w') as outfile:
outfile.write(anno_text)
def get_search(anno, filename):
imagescr = '<iiif-annotation annotationurl="{}" styling="image_only:true"></iiif-annotation>'.format(anno['@id'])
listname = get_list_filepath(anno).split('/')[-1]
annodata_data = {'tags': [], 'layout': 'searchview', 'listname': listname, 'content': [], 'imagescr': imagescr, 'datecreated':'', 'datemodified': ''}
if 'oa:annotatedAt' in anno.keys():
annodata_data['datecreated'] = encodedecode(anno['oa:annotatedAt'])
if 'created' in anno.keys():
annodata_data['datecreated'] = encodedecode(anno['created'])
if 'oa:serializedAt' in anno.keys():
annodata_data['datemodified'] = encodedecode(anno['oa:serializedAt'])
if 'modified' in anno.keys():
annodata_data['datemodified'] = encodedecode(anno['modified'])
annodata_filename = os.path.join(search_filepath, filename.split('/')[-1].replace('.json', '')) + '.md'
textdata = anno['resource'] if 'resource' in anno.keys() else anno['body']
textdata = textdata if type(textdata) == list else [textdata]
for resource in textdata:
chars = BeautifulSoup(resource['chars'], 'html.parser').get_text() if 'chars' in resource.keys() else ''
chars = encodedecode(chars)
if chars and 'tag' in resource['@type'].lower():
annodata_data['tags'].append(chars)
elif 'purpose' in resource.keys() and 'tag' in resource['purpose']:
tags_data = chars if chars else resource['value']
annodata_data['tags'].append(encodedecode(tags_data))
elif chars:
annodata_data['content'].append(chars)
elif 'items' in resource.keys():
field = 'value' if 'value' in resource['items'][0].keys() else 'chars'
fieldvalues = " ".join([encodedecode(item[field]) for item in resource['items']])
annodata_data['content'].append(fieldvalues)
elif 'value' in resource:
annodata_data['content'].append(encodedecode(resource['value']))
contentvalue = annodata_data.pop('content')
try:
content = '\n'.join(contentvalue)
except:
decodedvalue = [item.decode("utf-8") for item in contentvalue]
content = '\n'.join(decodedvalue)
annodata_yaml = "---\n{}---\n{}".format(yaml.dump(annodata_data), content)
if github_repo == '':
writetofile(annodata_filename, annodata_yaml, True)
else:
writetogithub(annodata_filename, annodata_yaml, True)
def encodedecode(chars):
if type(chars) == str:
return chars
else:
return chars.encode('utf8')
if __name__ == "__main__":
app.run()
| 2.328125 | 2 |
tests/test_vectors/did_doc/did_doc_bob.py | alex-polosky/didcomm-python | 8 | 12785646 | from authlib.common.encoding import json_dumps
from didcomm.common.types import (
VerificationMethodType,
VerificationMaterial,
VerificationMaterialFormat,
)
from didcomm.did_doc.did_doc import VerificationMethod, DIDDoc, DIDCommService
from didcomm.protocols.routing.forward import (
PROFILE_DIDCOMM_V2,
PROFILE_DIDCOMM_AIP2_ENV_RFC587,
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1 = VerificationMethod(
id="did:example:bob#key-x25519-1",
controller="did:example:bob#key-x25519-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "GDTrI66K0pFfO54tlCSvfjjNapIs44dzpneBgyx0S3E",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2 = VerificationMethod(
id="did:example:bob#key-x25519-2",
controller="did:example:bob#key-x25519-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "UT9S3F5ep16KSNBBShU2wh3qSfqYjlasZimn0mB8_VM",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3 = VerificationMethod(
id="did:example:bob#key-x25519-3",
controller="did:example:bob#key-x25519-3",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-x25519-not-secrets-1",
controller="did:example:bob#key-x25519-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "OKP",
"crv": "X25519",
"x": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1 = VerificationMethod(
id="did:example:bob#key-p256-1",
controller="did:example:bob#key-p256-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-256",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2 = VerificationMethod(
id="did:example:bob#key-p256-2",
controller="did:example:bob#key-p256-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-256",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-p256-not-secrets-1",
controller="did:example:bob#key-p256-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-256",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1 = VerificationMethod(
id="did:example:bob#key-p384-1",
controller="did:example:bob#key-p384-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-384",
"x": "MvnE_OwKoTcJVfHyTX-DLSRhhNwlu5LNoQ5UWD9Jmgtdxp_kpjsMuTTBnxg5RF_Y",
"y": "X_3HJBcKFQEG35PZbEOBn8u9_z8V1F9V1Kv-Vh0aSzmH-y9aOuDJUE3D4Hvmi5l7",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2 = VerificationMethod(
id="did:example:bob#key-p384-2",
controller="did:example:bob#key-p384-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-384",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-p384-not-secrets-1",
controller="did:example:bob#key-p384-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-384",
"x": "2x3HOTvR8e-Tu6U4UqMd1wUWsNXMD0RgIunZTMcZsS-zWOwDgsrhYVHmv3k_DjV3",
"y": "W9LLaBjlWYcXUxOf6ECSfcXKaC3-K9z4hCoP0PS87Q_4ExMgIwxVCXUEB6nf0GDd",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1 = VerificationMethod(
id="did:example:bob#key-p521-1",
controller="did:example:bob#key-p521-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-521",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2 = VerificationMethod(
id="did:example:bob#key-p521-2",
controller="did:example:bob#key-p521-2",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-521",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_NOT_IN_SECRETS_1 = VerificationMethod(
id="did:example:bob#key-p521-not-secrets-1",
controller="did:example:bob#key-p521-not-secrets-1",
type=VerificationMethodType.JSON_WEB_KEY_2020,
verification_material=VerificationMaterial(
format=VerificationMaterialFormat.JWK,
value=json_dumps(
{
"kty": "EC",
"crv": "P-521",
"x": "<KEY>",
"y": "<KEY>",
}
),
),
)
DID_DOC_BOB_SPEC_TEST_VECTORS = DIDDoc(
did="did:example:bob",
authentication_kids=[],
key_agreement_kids=[
"did:example:bob#key-x25519-1",
"did:example:bob#key-x25519-2",
"did:example:bob#key-x25519-3",
"did:example:bob#key-p256-1",
"did:example:bob#key-p256-2",
"did:example:bob#key-p384-1",
"did:example:bob#key-p384-2",
"did:example:bob#key-p521-1",
"did:example:bob#key-p521-2",
],
didcomm_services=[],
verification_methods=[
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2,
],
)
DID_DOC_BOB_WITH_NO_SECRETS = DIDDoc(
did="did:example:bob",
authentication_kids=[],
key_agreement_kids=[
"did:example:bob#key-x25519-1",
"did:example:bob#key-x25519-2",
"did:example:bob#key-x25519-3",
"did:example:bob#key-x25519-not-secrets-1",
"did:example:bob#key-p256-1",
"did:example:bob#key-p256-2",
"did:example:bob#key-p256-not-secrets-1",
"did:example:bob#key-p384-1",
"did:example:bob#key-p384-2",
"did:example:bob#key-p384-not-secrets-1",
"did:example:bob#key-p521-1",
"did:example:bob#key-p521-2",
"did:example:bob#key-p521-not-secrets-1",
],
didcomm_services=[
DIDCommService(
id="did:example:123456789abcdefghi#didcomm-1",
service_endpoint="http://example.com/path",
accept=[PROFILE_DIDCOMM_V2, PROFILE_DIDCOMM_AIP2_ENV_RFC587],
routing_keys=["did:example:mediator1#key-x25519-1"],
)
],
verification_methods=[
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_3,
BOB_VERIFICATION_METHOD_KEY_AGREEM_X25519_NOT_IN_SECRETS_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P256_NOT_IN_SECRETS_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P384_NOT_IN_SECRETS_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_1,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_2,
BOB_VERIFICATION_METHOD_KEY_AGREEM_P521_NOT_IN_SECRETS_1,
],
)
| 2.25 | 2 |
tasks/code90/code/code90.py | internetwache/Internetwache-CTF-2016 | 83 | 12785647 | <gh_stars>10-100
#!/usr/bin/env python2
import socket
import threading
import time
import SocketServer
import random
import tree
HOST = "0.0.0.0"
PORT = 11491
WELCOME_MSG = "I'm lost in a forest. Can you invert the path?\n"
ERROR_MSG = "Ooops, something went wrong here. Please check your input!\n"
CORRECT_MSG = "Yay, that's right!\n"
WRONG_MSG = "Nope, that's not the right solution. Try again later!\n"
FLAG = "IW{10000101010101TR33}\n"
MAX_TO_SOLVE = 50
class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler):
def handle(self):
try:
self.request.sendall(WELCOME_MSG)
num_solved = 0
for level in range(1,MAX_TO_SOLVE+1):
eq, res = self.rand_tree(level)
self.request.sendall("Level {}.: {}\n".format(str(level), eq))
try:
answer = self.request.recv(1024)
answer = str(self.decode(answer.strip()))
except:
self.request.sendall(ERROR_MSG)
return
if answer == res:
num_solved += 1
self.request.sendall(CORRECT_MSG)
else:
self.request.sendall(WRONG_MSG)
return
if num_solved == MAX_TO_SOLVE:
self.request.sendall(FLAG)
except:
return
def rand_tree(self, level):
num_range = [2,20*level]
nums = [random.randint(num_range[0], num_range[1]) for i in range(2*level)]
t = tree.BST()
for num in nums:
t.insertVal(num)
tchal = t.serialize(t.root)
t.invert(t.root)
tsol = t.serialize(t.root)
return self.encode(tchal), str(tsol)
def encode(self, l):
return l
def decode(self, answer):
return str(answer)
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
if __name__ == "__main__":
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
ip, port = server.server_address
server_thread = threading.Thread(target=server.serve_forever)
server_thread.daemon = False
server_thread.start()
while True:
try:
time.sleep(1)
except:
break
server.shutdown()
server.server_close() | 2.703125 | 3 |
datascience/numpy/from_function.py | janbodnar/Python-Course | 13 | 12785648 | #!/usr/bin/python
import numpy as np
# Construct an array by executing a function over each coordinate.
def f(x, y):
return 2*x + y + 1
a = np.fromfunction(f, (5, 4), dtype=int)
print(a)
# anonymous functoin
b = np.fromfunction(lambda x, y: 2*x + y, (2, 2))
print(b)
| 3.890625 | 4 |
indicators/management/commands/qa_program_widgets/qa_widgets.py | mercycorps/toladata | 0 | 12785649 | import math
import random
import json
import os
from copy import deepcopy
from datetime import date, timedelta
from itertools import cycle
from dateutil.relativedelta import relativedelta
from django.contrib.auth.models import User
from django.utils import timezone
from indicators.models import (
Indicator,
IndicatorType,
Result,
PeriodicTarget,
Level,
DisaggregationType,
DisaggregatedValue,
LevelTier,
)
from workflow.models import Program, Country, Organization, TolaUser, SiteProfile, Sector
from indicators.views.views_indicators import generate_periodic_targets
class ProgramFactory:
module_location = os.path.dirname(__file__)
with open(os.path.join(module_location, 'sample_levels.json'), 'r') as fh:
sample_levels = json.loads(fh.read())
def __init__(self, country):
self.country = country
self.org = Organization.mercy_corps()
self.default_start_date = (date.today() + relativedelta(months=-18)).replace(day=1)
self.default_end_date = (self.default_start_date + relativedelta(months=+32)).replace(day=1) - timedelta(days=1)
def create_program(
self, name, start_date=False, end_date=False, post_satsuma=True, multi_country=False, create_levels=True):
if not start_date:
start_date = self.default_start_date
if not end_date:
end_date = self.default_end_date
program = Program.objects.create(**{
'name': name,
'reporting_period_start': start_date,
'reporting_period_end': end_date,
'funding_status': 'Funded',
'gaitid': 'fake_gait_id_{}'.format(random.randint(1, 9999)),
'_using_results_framework': Program.RF_ALWAYS if post_satsuma else Program.NOT_MIGRATED,
})
program.country.add(self.country)
if multi_country:
country2 = Country.objects.get(country="United States")
program.country.add(country2)
if create_levels:
self.create_levels(program, deepcopy(self.sample_levels))
return program
@staticmethod
def create_levels(program, level_template):
level_data = deepcopy(level_template)
tier_labels = LevelTier.get_templates()['mc_standard']['tiers']
for i, tier in enumerate(tier_labels):
t = LevelTier(name=tier, tier_depth=i + 1, program=program)
t.save()
level_map = {}
for level_fix in level_data:
parent = None
if 'parent_id' in level_fix['fields']:
parent = level_map[level_fix['fields'].pop('parent_id')]
level = Level(**level_fix['fields'])
level.parent = parent
level.program = program
level.save()
level_map[level_fix['pk']] = level
class IndicatorFactory:
standard_params_base = []
for freq in Indicator.TARGET_FREQUENCIES:
for uom_type in (Indicator.NUMBER, Indicator.PERCENTAGE):
for is_cumulative in (True, False):
for direction in (Indicator.DIRECTION_OF_CHANGE_POSITIVE, Indicator.DIRECTION_OF_CHANGE_NEGATIVE):
# Don't create indicators that are LoP|cumulative or percent|non-cumulative
# since we don't support those combinations
if (freq[0] == Indicator.LOP and is_cumulative) or \
(uom_type == Indicator.PERCENTAGE and not is_cumulative):
continue
standard_params_base.append({
'freq': freq[0], 'uom_type': uom_type, 'is_cumulative': is_cumulative,
'direction': direction, 'null_level': None})
null_supplements_params = [
{'freq': Indicator.ANNUAL, 'uom_type': Indicator.NUMBER, 'is_cumulative': False,
'direction': Indicator.DIRECTION_OF_CHANGE_POSITIVE, 'null_level': 'targets'},
{'freq': Indicator.QUARTERLY, 'uom_type': Indicator.PERCENTAGE, 'is_cumulative': True,
'direction': Indicator.DIRECTION_OF_CHANGE_NONE, 'null_level': 'results'},
{'freq': Indicator.LOP, 'uom_type': Indicator.NUMBER, 'is_cumulative': False,
'direction': Indicator.DIRECTION_OF_CHANGE_NONE, 'null_level': 'results'},
{'freq': Indicator.EVENT, 'uom_type': Indicator.PERCENTAGE, 'is_cumulative': True,
'direction': Indicator.DIRECTION_OF_CHANGE_NEGATIVE, 'null_level': 'evidence'},
{'freq': Indicator.MID_END, 'uom_type': Indicator.NUMBER, 'is_cumulative': False,
'direction': Indicator.DIRECTION_OF_CHANGE_POSITIVE, 'null_level': 'evidence'},
]
frequency_labels = {
Indicator.LOP: 'LoP only',
Indicator.MID_END: 'Midline and endline',
Indicator.EVENT: 'Event',
Indicator.ANNUAL: 'Annual',
Indicator.SEMI_ANNUAL: 'Semi-annual',
Indicator.TRI_ANNUAL: 'Tri-annual',
Indicator.QUARTERLY: 'Quarterly',
Indicator.MONTHLY: 'Monthly',
}
uom_labels = {
Indicator.NUMBER: 'Number (#)',
Indicator.PERCENTAGE: "Percentage (%)",
}
direction_labels = {
Indicator.DIRECTION_OF_CHANGE_NONE: "Direction of change NA",
Indicator.DIRECTION_OF_CHANGE_POSITIVE: "Increase (+)",
Indicator.DIRECTION_OF_CHANGE_NEGATIVE: "Decrease (-)",
}
def __init__(self, program, country):
self.program = program
self.country = country
self.sadd_disagg_obj = DisaggregationType.objects.get(
pk=109, disaggregation_type="Sex and Age Disaggregated Data (SADD)")
self.sadd_disagg_labels = self.sadd_disagg_obj.disaggregationlabel_set.all()
def create_standard_indicators(self, **kwargs):
passed_apply_skips = kwargs.pop('apply_skips', None)
apply_skips_main = passed_apply_skips or True
apply_skips_supplement = passed_apply_skips or False
indicator_ids = self.create_indicators(self.standard_params_base, apply_skips=apply_skips_main, **kwargs)
indicator_ids.extend(self.create_indicators(
self.null_supplements_params, apply_skips=apply_skips_supplement, **kwargs))
return indicator_ids
def create_indicators(
self, param_sets, indicator_suffix='', apply_skips=True, apply_rf_skips=False,
personal_indicator=False, indicatorless_levels=None):
indicatorless_levels = [] if not indicatorless_levels else indicatorless_levels
indicator_ids = []
old_levels = list(Indicator.objects.filter(old_level__isnull=False).order_by('old_level')
.distinct().values_list('old_level', flat=True))
old_levels.append(None)
old_level_cycle = cycle(old_levels)
rf_levels = list(Level.objects.filter(program__id=self.program.id).exclude(id__in=indicatorless_levels))
if apply_rf_skips:
rf_levels.append(None)
rf_level_cycle = cycle(rf_levels)
indicator_types = list(IndicatorType.objects.all())
if apply_skips:
indicator_types.append(None)
type_cycle = cycle(indicator_types)
sectors = list(Sector.objects.all()[:5])
if apply_skips:
sectors.append(None)
sector_cycle = cycle(sectors)
sites = list(SiteProfile.objects.filter(country__country="Tolaland"))
if apply_skips:
sites.append(None)
site_cycle = cycle(sites)
result_skip_cycle = cycle([False, False, False, False, True, False, False])
extra_result_cycle = cycle([True, False, False, True, False, False, False])
evidence_skip_cycle = cycle([False, False, True, False, False, False, False])
# Determines how many country disaggs an indicator will have assigned to it
country_disagg_cycle = cycle([0, 1, 2])
# Determins whether the country level SADD disagg will be assigned to an indicator
sadd_disagg_cycle = cycle([True, True, True, False])
# Regardless of what disaggs an indicator has assigned, this controls how many disaggas actually get
# used by a result. That way, there are potentially some results that don't have disagg values
# even though the indicator has been assigned a particular disagg type. one and two
# indicate that one or two disagg types should be used but not the SADD type.
result_disagg_cycle = cycle(['sadd', 'one', 'two', 'none', 'all', 'all', 'all', 'none'])
for n, params in enumerate(param_sets):
if params['is_cumulative']:
cumulative_text = 'Cumulative'
else:
cumulative_text = 'Non-cumulative'
indicator_disagg_count = next(country_disagg_cycle)
sadd_disagg_flag = next(sadd_disagg_cycle)
result_disagg_type = next(result_disagg_cycle)
indicator_name_list = [
self.frequency_labels[params['freq']],
self.uom_labels[params['uom_type']],
cumulative_text,
self.direction_labels[params['direction']],
f"Disagg type - SADD:{sadd_disagg_flag}, Country:{indicator_disagg_count}",
]
if params['null_level']:
indicator_name_list.append(f"| No {params['null_level']}")
else:
result_text_list = []
result_text_list.append(f"SADD:{result_disagg_type in ('all', 'sadd')}") if sadd_disagg_flag else None
result_text_list.append(
f"Country:{result_disagg_type in ('one', 'two', 'all')}"
) if indicator_disagg_count > 0 else None
if len(result_text_list) > 0:
result_text = ", ".join(result_text_list)
else:
result_text = "None"
indicator_name_list.append(
f"Disaggs applied - {result_text}")
if indicator_suffix:
indicator_name_list.append(indicator_suffix)
indicator_name = ' | '.join(indicator_name_list)
frequency = params['freq']
if params['null_level'] == 'targets':
frequency = None
indicator = Indicator(
name=indicator_name,
is_cumulative=params['is_cumulative'],
target_frequency=frequency,
unit_of_measure='This is a UOM',
baseline=0,
unit_of_measure_type=params['uom_type'],
direction_of_change=params['direction'],
program=self.program,
old_level=None if self.program.results_framework else next(old_level_cycle),
level=next(rf_level_cycle),
sector=None if not personal_indicator else next(sector_cycle),
baseline_na=False,
definition="",
means_of_verification="",
data_collection_method="",
method_of_analysis=""
)
indicator.save()
country_assigned_disagg_labelsets = []
for disagg in self.country.disaggregationtype_set.order_by('?').all()[:indicator_disagg_count]:
indicator.disaggregation.add(disagg)
country_assigned_disagg_labelsets.append(list(disagg.disaggregationlabel_set.all()))
if sadd_disagg_flag:
indicator.disaggregation.add(self.sadd_disagg_obj)
i_type = next(type_cycle)
if personal_indicator and i_type:
indicator.indicator_type.add(i_type)
indicator.save()
indicator_ids.append(indicator.id)
if params['null_level'] == 'targets':
indicator.lop_target = 100
indicator.save()
continue
self.make_targets(self.program, indicator)
periodic_targets = PeriodicTarget.objects.filter(indicator__id=indicator.id)
incrementors = self.calc_target_and_achieved_base(
params['uom_type'], params['direction'], params['is_cumulative'], len(periodic_targets))
lop_target = 0
for i, pt in enumerate(periodic_targets):
pt.target = incrementors['target_start'] + incrementors['target_increment'] * i
pt.save()
if params['is_cumulative']:
lop_target = pt.target
else:
lop_target += pt.target
indicator.lop_target = lop_target
indicator.save()
result_factory = ResultFactory(
indicator, self.program, country_assigned_disagg_labelsets, self.sadd_disagg_labels,
result_disagg_type, params['uom_type'], params['null_level'], site_cycle, personal_indicator,
apply_skips)
result_factory.make_results(
periodic_targets, incrementors, evidence_skip_cycle, result_skip_cycle, extra_result_cycle)
return indicator_ids
@staticmethod
def make_targets(program, indicator):
if indicator.target_frequency == Indicator.LOP:
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': 1,
'edit_date': timezone.now(),
'period': 'LOP target',
})
return
elif indicator.target_frequency == Indicator.EVENT:
for i in range(3):
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': i,
'edit_date': timezone.now(),
'period': 'Event {}'.format(i + 1),
})
return
target_generator = PeriodicTarget.generate_for_frequency(indicator.target_frequency)
num_periods = len([p for p in target_generator(program.reporting_period_start, program.reporting_period_end)])
targets_json = generate_periodic_targets(
tf=indicator.target_frequency, start_date=program.reporting_period_start, numTargets=num_periods)
for i, pt in enumerate(targets_json):
if indicator.target_frequency in [Indicator.LOP, Indicator.MID_END]:
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': i,
'edit_date': timezone.now(),
'period': 'Period {}'.format(i + 1),
})
else:
PeriodicTarget.objects.create(**{
'indicator': indicator,
'customsort': i,
'edit_date': timezone.now(),
'period': 'Period {}'.format(i + 1),
'start_date': pt['start_date'],
'end_date': pt['end_date'],
})
@staticmethod
def calc_target_and_achieved_base(uom_type, direction, is_cumulative, pt_count):
if uom_type == Indicator.NUMBER:
if direction == Indicator.DIRECTION_OF_CHANGE_POSITIVE:
if is_cumulative:
target_start = 100
target_increment = target_start
achieved_start = 90
achieved_increment = int(achieved_start * 1.1)
else:
target_start = 100
target_increment = target_start
achieved_start = 90
achieved_increment = int(achieved_start * 1.1)
else:
if is_cumulative:
target_start = 500
target_increment = -int(math.floor((target_start / pt_count) / 10) * 10)
achieved_start = 400
achieved_increment = target_increment + 2
else:
target_start = 500
target_increment = -int(math.floor((target_start / pt_count) / 10) * 10)
achieved_start = 400
achieved_increment = target_increment * .8
else:
if direction == Indicator.DIRECTION_OF_CHANGE_POSITIVE:
# Don't need to check non-cumulative because we don't really handle it
target_start = 10
target_increment = 3
achieved_start = 7
achieved_increment = 4
else:
# Don't need to check non-cumulative because we don't really handle it
target_start = 90
target_increment = max(-math.floor(target_start / pt_count), -2)
achieved_start = 95
achieved_increment = target_increment - 1
return {
"target_start": target_start, "target_increment": target_increment,
"achieved_start": achieved_start, "achieved_increment": achieved_increment}
class ResultFactory:
def __init__(
self, indicator, program, country_assigned_disagg_labelsets, sadd_disagg_labels, result_disagg,
uom_type, null_level, site_cycle, personal_indicator, apply_skips):
self.program = program
self.indicator = indicator
self.sadd_disagg_labels = sadd_disagg_labels
self.indicator_disagg_labelsets = country_assigned_disagg_labelsets
self.result_disagg = result_disagg
self.uom_type = uom_type
self.null_level = null_level
self.site_cycle = site_cycle
self.personal_indicator = personal_indicator
self.apply_skips = apply_skips
def make_results(self, periodic_targets, incrementors, evidence_skip_cycle, result_skip_cycle, extra_result_cycle):
day_offset = timedelta(days=2)
for i, pt in enumerate(periodic_targets):
# Users shouldn't put in results with a date in the future, so neither should we.
if pt.start_date and date.today() < pt.start_date + day_offset:
continue
# Skip creating a result if the null_level is result or if
# the number of results has reached the arbitrary skip point.
result_skip = next(result_skip_cycle)
extra_result = next(extra_result_cycle)
if (self.apply_skips and result_skip) or self.null_level == 'results':
continue
achieved_value = incrementors['achieved_start'] + (incrementors['achieved_increment'] * i)
results_to_create = 1
if self.apply_skips and extra_result:
results_to_create = 2
if self.uom_type == Indicator.NUMBER:
achieved_value = int(achieved_value * .4)
else:
achieved_value = int(achieved_value * .9)
# Now create the Results and their related Records
if pt.start_date:
date_collected = pt.start_date + day_offset
else:
date_collected = date.today()
for c in range(results_to_create):
rs = Result(
periodic_target=pt,
indicator=self.indicator,
program=self.program,
achieved=achieved_value,
date_collected=date_collected)
rs.save()
if self.result_disagg != 'none':
self.disaggregate_result(rs, self.result_disagg, self.indicator)
date_collected = date_collected + day_offset
if self.uom_type == Indicator.NUMBER:
achieved_value = int(achieved_value * 1.5)
else:
achieved_value = int(achieved_value * 1.15)
if self.null_level == 'evidence':
continue
# evidence_skip = next(evidence_skip_cycle)
if self.apply_skips and next(evidence_skip_cycle):
continue
rs.record_name = 'Evidence for result id {}'.format(rs.id)
rs.evidence_url = 'https://www.pinterest.ca/search/pins/?q=cute%20animals'
r_site = next(self.site_cycle)
# TODO: remove personal indicator?
if self.personal_indicator and r_site:
rs.site.add(r_site)
rs.save()
def disaggregate_result(self, result, result_disagg_type, indicator):
label_sets = []
if result_disagg_type == 'sadd':
label_sets.append(self.sadd_disagg_labels)
elif result_disagg_type == 'one' and len(self.indicator_disagg_labelsets) > 0:
try:
label_sets.append(random.choice(self.indicator_disagg_labelsets))
except ValueError:
pass
elif result_disagg_type == 'two' and indicator.disaggregation.all().count() > 1:
try:
label_sets.extend(random.sample(self.indicator_disagg_labelsets, k=2))
except ValueError:
label_sets.extend(self.indicator_disagg_labelsets)
elif result_disagg_type == 'all':
label_sets.append(self.sadd_disagg_labels)
label_sets.extend(self.indicator_disagg_labelsets)
if len(label_sets) < 1:
return
for label_set in label_sets:
# Calculate how many of the labels we will use (k) and then randomly select that number of label indexes
k = random.randrange(1, len(label_set) + 1)
label_indexes = random.sample(list(range(len(label_set))), k)
values = self.make_random_disagg_values(result.achieved, len(label_indexes))
value_objects = []
for label_index, value in zip(label_indexes, values):
label = label_set[label_index]
value_objects.append(DisaggregatedValue(category=label, value=value, result=result))
DisaggregatedValue.objects.bulk_create(value_objects)
@staticmethod
def make_random_disagg_values(aggregate_value, total_slot_count):
filled = []
for slot_index in range(total_slot_count):
slots_available_count = total_slot_count - len(filled)
max_value = aggregate_value - sum(filled) - slots_available_count + 1
if max_value <= 1:
filled.extend([1] * slots_available_count)
break
elif slot_index == total_slot_count - 1:
filled.append(aggregate_value - sum(filled))
else:
filled.append(random.randrange(0, max_value))
if sum(filled) < aggregate_value:
filled[0] += aggregate_value - sum(filled)
if sum(filled) > aggregate_value:
reduction_amount = sum(filled) - aggregate_value
while reduction_amount > 0:
i = filled.index(max(filled))
if filled[i] >= reduction_amount:
filled[i] -= reduction_amount
reduction_amount = 0
else:
reduction_amount -= filled[i]
filled[i] = 0
if sum(filled) != aggregate_value:
raise NotImplementedError('You wrote a bad algorithm')
random.shuffle(filled)
return filled
class Cleaner:
@classmethod
def clean(cls, *args):
if 'clean_all' in args:
cls.clean_programs()
cls.clean_tolaland()
cls.clean_test_users()
else:
if 'clean_tolaland' in args:
cls.clean_tolaland()
if 'clean_programs' in args:
cls.clean_programs()
if 'clean_test_users' in args:
cls.clean_test_users()
@staticmethod
def clean_test_users():
auth_users = User.objects.filter(username__in=user_profiles.keys())
tola_users = TolaUser.objects.filter(user__in=auth_users)
message = f"{auth_users.count()} Auth Users and {tola_users.count()} Tola Users deleted"
tola_users.delete()
auth_users.delete()
print(message)
@staticmethod
def clean_tolaland():
try:
country = Country.objects.get(country='Tolaland')
print("Deleting country: {}".format(country))
disaggregations = DisaggregationType.objects.filter(country=country)
disaggregations.delete()
country.delete()
except Country.DoesNotExist:
pass
@staticmethod
def clean_programs():
programs = Program.objects.filter(name__icontains='QA program -')
if programs.count() > 0:
print("Delete these programs?\n{}".format('\n'.join(p.name for p in programs)))
confirm = input('[yes/no]: ')
if confirm == 'yes':
for program in programs:
print('Deleting program: {}'.format(program))
for indicator in program.indicator_set.all():
indicator.delete()
program.delete()
else:
print('\nPrograms not deleted')
standard_countries = ['Afghanistan', 'Haiti', 'Jordan', 'Tolaland', 'United States']
TEST_ORG, created = Organization.objects.get_or_create(name='Test')
MC_ORG = Organization.objects.get(name='Mercy Corps')
user_profiles = {
'mc-low': {
'first_last': ['mc-low-first', 'mc-low-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'low',
'home_country': 'United States',
'org': MC_ORG,
},
'mc-medium': {
'first_last': ['mc-med-first', 'mc-med-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'medium',
'home_country': 'United States',
'org': MC_ORG,
},
'mc-high': {
'first_last': ['mc-high-first', 'mc-high-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': 'United States',
'org': MC_ORG,
},
'mc-basicadmin': {
'first_last': ['mc-basicadmin-first', 'mc-basicadmin-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': 'United States',
'org': MC_ORG,
'admin': 'all'
},
'gmail-low': {
'first_last': ['gmail-low-first', 'gmail-low-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'low',
'home_country': None,
'org': TEST_ORG,
},
'gmail-medium': {
'first_last': ['gmail-med-first', 'gmail-med-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'medium',
'home_country': None,
'org': TEST_ORG,
},
'gmail-high': {
'first_last': ['gmail-high-first', 'gmail-high-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': None,
'org': TEST_ORG,
},
'external-low': {
'first_last': ['external-low-first', 'external-low-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'low',
'home_country': None,
'org': TEST_ORG,
},
'external-medium': {
'first_last': ['external-med-first', 'external-med-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'medium',
'home_country': None,
'org': TEST_ORG,
},
'external-high': {
'first_last': ['external-high-first', 'external-high-last'],
'email': '<EMAIL>',
'accessible_countries': standard_countries,
'permission_level': 'high',
'home_country': None,
'org': TEST_ORG,
},
'demo1': {
'first_last': ['demo', 'one'],
'email': '<EMAIL>',
'accessible_countries': ['Ethiopia'],
'permission_level': 'low',
'home_country': 'Ethiopia',
'org': MC_ORG,
},
'demo2': {
'first_last': ['demo', 'two'],
'email': '<EMAIL>',
'accessible_countries': [],
'permission_level': 'medium',
'home_country': None,
'org': TEST_ORG,
'program_access': [('Ethiopia', 'Collaboration in Cross-Border Areas', 'medium')]
},
'demo3': {
'first_last': ['demo', 'three'],
'email': '<EMAIL>',
'accessible_countries': [],
'permission_level': 'high',
'home_country': None,
'org': TEST_ORG,
'program_access': [('Ethiopia', 'Collaboration in Cross-Border Areas', 'high')]
},
}
| 1.960938 | 2 |
src/handlers/feedback.py | ngshiheng/burplist-frontend | 4 | 12785650 | <reponame>ngshiheng/burplist-frontend
__all__ = ['feedback']
from pywebio.output import put_html
from pywebio.platform import seo
from pywebio.platform.page import config
from pywebio.session import run_js
from src.settings import SEO_DESCRIPTION, SEO_TITLE
from src.utils.constants import GA_JS_CODE, GA_JS_FILE
from src.utils.contents.index import FOOTER, HEADER, LANDING_PAGE_HEADING
@seo(SEO_TITLE, SEO_DESCRIPTION)
@config(theme="minty", js_file=[GA_JS_FILE], js_code=GA_JS_CODE)
def feedback() -> None:
run_js(HEADER)
run_js(FOOTER)
put_html(LANDING_PAGE_HEADING)
put_html(r"""
<iframe src="https://tally.so/embed/wdxRDw?hideTitle=1&alignLeft=1" width="100%" height="1050" frameborder="0" marginheight="0" marginwidth="0" title="Feedback"></iframe>
""")
| 1.914063 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.